feat(backups,xo-web): add cleanVm warnings to task (#6225)

This commit is contained in:
Thierry Goettelmann 2022-05-30 15:39:54 +02:00 committed by GitHub
parent d7d81431ef
commit c0b0ba433f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 166 additions and 64 deletions

View File

@ -47,19 +47,19 @@ const computeVhdsSize = (handler, vhdPaths) =>
// | | // | |
// \___________rename_____________/ // \___________rename_____________/
async function mergeVhdChain(chain, { handler, onLog, remove, merge }) { async function mergeVhdChain(chain, { handler, logInfo, remove, merge }) {
assert(chain.length >= 2) assert(chain.length >= 2)
const chainCopy = [...chain] const chainCopy = [...chain]
const parent = chainCopy.pop() const parent = chainCopy.pop()
const children = chainCopy const children = chainCopy
if (merge) { if (merge) {
onLog(`merging ${children.length} children into ${parent}`) logInfo(`merging children into parent`, { childrenCount: children.length, parent })
let done, total let done, total
const handle = setInterval(() => { const handle = setInterval(() => {
if (done !== undefined) { if (done !== undefined) {
onLog(`merging ${children.join(',')} into ${parent}: ${done}/${total}`) logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total})
} }
}, 10e3) }, 10e3)
@ -75,9 +75,9 @@ async function mergeVhdChain(chain, { handler, onLog, remove, merge }) {
await Promise.all([ await Promise.all([
VhdAbstract.rename(handler, parent, mergeTargetChild), VhdAbstract.rename(handler, parent, mergeTargetChild),
asyncMap(children, child => { asyncMap(children, child => {
onLog(`the VHD ${child} is already merged`) logInfo(`the VHD child is already merged`, { child })
if (remove) { if (remove) {
onLog(`deleting merged VHD ${child}`) logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child) return VhdAbstract.unlink(handler, child)
} }
}), }),
@ -125,14 +125,19 @@ const listVhds = async (handler, vmDir) => {
return { vhds, interruptedVhds, aliases } return { vhds, interruptedVhds, aliases }
} }
async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog = noop, remove = false }) { async function checkAliases(
aliasPaths,
targetDataRepository,
{ handler, logInfo = noop, logWarn = console.warn, remove = false }
) {
const aliasFound = [] const aliasFound = []
for (const path of aliasPaths) { for (const path of aliasPaths) {
const target = await resolveVhdAlias(handler, path) const target = await resolveVhdAlias(handler, path)
if (!isVhdFile(target)) { if (!isVhdFile(target)) {
onLog(`Alias ${path} references a non vhd target: ${target}`) logWarn('alias references non VHD target', { path, target })
if (remove) { if (remove) {
logInfo('removing alias and non VHD target', { path, target })
await handler.unlink(target) await handler.unlink(target)
await handler.unlink(path) await handler.unlink(path)
} }
@ -147,13 +152,13 @@ async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog =
// error during dispose should not trigger a deletion // error during dispose should not trigger a deletion
} }
} catch (error) { } catch (error) {
onLog(`target ${target} of alias ${path} is missing or broken`, { error }) logWarn('missing or broken alias target', { target, path, error })
if (remove) { if (remove) {
try { try {
await VhdAbstract.unlink(handler, path) await VhdAbstract.unlink(handler, path)
} catch (e) { } catch (error) {
if (e.code !== 'ENOENT') { if (error.code !== 'ENOENT') {
onLog(`Error while deleting target ${target} of alias ${path}`, { error: e }) logWarn('error deleting alias target', { target, path, error })
} }
} }
} }
@ -170,20 +175,22 @@ async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog =
entries.forEach(async entry => { entries.forEach(async entry => {
if (!aliasFound.includes(entry)) { if (!aliasFound.includes(entry)) {
onLog(`the Vhd ${entry} is not referenced by a an alias`) logWarn('no alias references VHD', { entry })
if (remove) { if (remove) {
logInfo('deleting unaliased VHD')
await VhdAbstract.unlink(handler, entry) await VhdAbstract.unlink(handler, entry)
} }
} }
}) })
} }
exports.checkAliases = checkAliases exports.checkAliases = checkAliases
const defaultMergeLimiter = limitConcurrency(1) const defaultMergeLimiter = limitConcurrency(1)
exports.cleanVm = async function cleanVm( exports.cleanVm = async function cleanVm(
vmDir, vmDir,
{ fixMetadata, remove, merge, mergeLimiter = defaultMergeLimiter, onLog = noop } { fixMetadata, remove, merge, mergeLimiter = defaultMergeLimiter, logInfo = noop, logWarn = console.warn }
) { ) {
const limitedMergeVhdChain = mergeLimiter(mergeVhdChain) const limitedMergeVhdChain = mergeLimiter(mergeVhdChain)
@ -214,9 +221,9 @@ exports.cleanVm = async function cleanVm(
}) })
} catch (error) { } catch (error) {
vhds.delete(path) vhds.delete(path)
onLog(`error while checking the VHD with path ${path}`, { error }) logWarn('VHD check error', { path, error })
if (error?.code === 'ERR_ASSERTION' && remove) { if (error?.code === 'ERR_ASSERTION' && remove) {
onLog(`deleting broken ${path}`) logInfo('deleting broken path', { path })
return VhdAbstract.unlink(handler, path) return VhdAbstract.unlink(handler, path)
} }
} }
@ -228,12 +235,12 @@ exports.cleanVm = async function cleanVm(
const statePath = interruptedVhds.get(interruptedVhd) const statePath = interruptedVhds.get(interruptedVhd)
interruptedVhds.delete(interruptedVhd) interruptedVhds.delete(interruptedVhd)
onLog('orphan merge state', { logWarn('orphan merge state', {
mergeStatePath: statePath, mergeStatePath: statePath,
missingVhdPath: interruptedVhd, missingVhdPath: interruptedVhd,
}) })
if (remove) { if (remove) {
onLog(`deleting orphan merge state ${statePath}`) logInfo('deleting orphan merge state', { statePath })
await handler.unlink(statePath) await handler.unlink(statePath)
} }
} }
@ -242,7 +249,7 @@ exports.cleanVm = async function cleanVm(
// check if alias are correct // check if alias are correct
// check if all vhd in data subfolder have a corresponding alias // check if all vhd in data subfolder have a corresponding alias
await asyncMap(Object.keys(aliases), async dir => { await asyncMap(Object.keys(aliases), async dir => {
await checkAliases(aliases[dir], `${dir}/data`, { handler, onLog, remove }) await checkAliases(aliases[dir], `${dir}/data`, { handler, logInfo, logWarn, remove })
}) })
// remove VHDs with missing ancestors // remove VHDs with missing ancestors
@ -264,9 +271,9 @@ exports.cleanVm = async function cleanVm(
if (!vhds.has(parent)) { if (!vhds.has(parent)) {
vhds.delete(vhdPath) vhds.delete(vhdPath)
onLog(`the parent ${parent} of the VHD ${vhdPath} is missing`) logWarn('parent VHD is missing', { parent, vhdPath })
if (remove) { if (remove) {
onLog(`deleting orphan VHD ${vhdPath}`) logInfo('deleting orphan VHD', { vhdPath })
deletions.push(VhdAbstract.unlink(handler, vhdPath)) deletions.push(VhdAbstract.unlink(handler, vhdPath))
} }
} }
@ -303,7 +310,7 @@ exports.cleanVm = async function cleanVm(
// check is not good enough to delete the file, the best we can do is report // check is not good enough to delete the file, the best we can do is report
// it // it
if (!(await this.isValidXva(path))) { if (!(await this.isValidXva(path))) {
onLog(`the XVA with path ${path} is potentially broken`) logWarn('XVA might be broken', { path })
} }
}) })
@ -317,7 +324,7 @@ exports.cleanVm = async function cleanVm(
try { try {
metadata = JSON.parse(await handler.readFile(json)) metadata = JSON.parse(await handler.readFile(json))
} catch (error) { } catch (error) {
onLog(`failed to read metadata file ${json}`, { error }) logWarn('failed to read metadata file', { json, error })
jsons.delete(json) jsons.delete(json)
return return
} }
@ -328,9 +335,9 @@ exports.cleanVm = async function cleanVm(
if (xvas.has(linkedXva)) { if (xvas.has(linkedXva)) {
unusedXvas.delete(linkedXva) unusedXvas.delete(linkedXva)
} else { } else {
onLog(`the XVA linked to the metadata ${json} is missing`) logWarn('metadata XVA is missing', { json })
if (remove) { if (remove) {
onLog(`deleting incomplete backup ${json}`) logInfo('deleting incomplete backup', { json })
jsons.delete(json) jsons.delete(json)
await handler.unlink(json) await handler.unlink(json)
} }
@ -351,9 +358,9 @@ exports.cleanVm = async function cleanVm(
vhdsToJSons[path] = json vhdsToJSons[path] = json
}) })
} else { } else {
onLog(`Some VHDs linked to the metadata ${json} are missing`, { missingVhds }) logWarn('some metadata VHDs are missing', { json, missingVhds })
if (remove) { if (remove) {
onLog(`deleting incomplete backup ${json}`) logInfo('deleting incomplete backup', { json })
jsons.delete(json) jsons.delete(json)
await handler.unlink(json) await handler.unlink(json)
} }
@ -394,9 +401,9 @@ exports.cleanVm = async function cleanVm(
} }
} }
onLog(`the VHD ${vhd} is unused`) logWarn('unused VHD', { vhd })
if (remove) { if (remove) {
onLog(`deleting unused VHD ${vhd}`) logInfo('deleting unused VHD', { vhd })
unusedVhdsDeletion.push(VhdAbstract.unlink(handler, vhd)) unusedVhdsDeletion.push(VhdAbstract.unlink(handler, vhd))
} }
} }
@ -420,7 +427,7 @@ exports.cleanVm = async function cleanVm(
const metadataWithMergedVhd = {} const metadataWithMergedVhd = {}
const doMerge = async () => { const doMerge = async () => {
await asyncMap(toMerge, async chain => { await asyncMap(toMerge, async chain => {
const merged = await limitedMergeVhdChain(chain, { handler, onLog, remove, merge }) const merged = await limitedMergeVhdChain(chain, { handler, logInfo, logWarn, remove, merge })
if (merged !== undefined) { if (merged !== undefined) {
const metadataPath = vhdsToJSons[chain[0]] // all the chain should have the same metada file const metadataPath = vhdsToJSons[chain[0]] // all the chain should have the same metada file
metadataWithMergedVhd[metadataPath] = true metadataWithMergedVhd[metadataPath] = true
@ -432,18 +439,18 @@ exports.cleanVm = async function cleanVm(
...unusedVhdsDeletion, ...unusedVhdsDeletion,
toMerge.length !== 0 && (merge ? Task.run({ name: 'merge' }, doMerge) : doMerge()), toMerge.length !== 0 && (merge ? Task.run({ name: 'merge' }, doMerge) : doMerge()),
asyncMap(unusedXvas, path => { asyncMap(unusedXvas, path => {
onLog(`the XVA ${path} is unused`) logWarn('unused XVA', { path })
if (remove) { if (remove) {
onLog(`deleting unused XVA ${path}`) logInfo('deleting unused XVA', { path })
return handler.unlink(path) return handler.unlink(path)
} }
}), }),
asyncMap(xvaSums, path => { asyncMap(xvaSums, path => {
// no need to handle checksums for XVAs deleted by the script, they will be handled by `unlink()` // no need to handle checksums for XVAs deleted by the script, they will be handled by `unlink()`
if (!xvas.has(path.slice(0, -'.checksum'.length))) { if (!xvas.has(path.slice(0, -'.checksum'.length))) {
onLog(`the XVA checksum ${path} is unused`) logInfo('unused XVA checksum', { path })
if (remove) { if (remove) {
onLog(`deleting unused XVA checksum ${path}`) logInfo('deleting unused XVA checksum', { path })
return handler.unlink(path) return handler.unlink(path)
} }
} }
@ -477,11 +484,11 @@ exports.cleanVm = async function cleanVm(
// don't warn if the size has changed after a merge // don't warn if the size has changed after a merge
if (!merged && fileSystemSize !== size) { if (!merged && fileSystemSize !== size) {
onLog(`incorrect size in metadata: ${size ?? 'none'} instead of ${fileSystemSize}`) logWarn('incorrect size in metadata', { size: size ?? 'none', fileSystemSize })
} }
} }
} catch (error) { } catch (error) {
onLog(`failed to get size of ${metadataPath}`, { error }) logWarn('failed to get metadata size', { metadataPath, error })
return return
} }
@ -491,7 +498,7 @@ exports.cleanVm = async function cleanVm(
try { try {
await handler.writeFile(metadataPath, JSON.stringify(metadata), { flags: 'w' }) await handler.writeFile(metadataPath, JSON.stringify(metadata), { flags: 'w' })
} catch (error) { } catch (error) {
onLog(`failed to update size in backup metadata ${metadataPath} after merge`, { error }) logWarn('metadata size update failed', { metadataPath, error })
} }
} }
}) })

View File

@ -69,6 +69,8 @@ job.start(data: { mode: Mode, reportWhen: ReportWhen })
├─ task.warning(message: string) ├─ task.warning(message: string)
├─ task.start(data: { type: 'VM', id: string }) ├─ task.start(data: { type: 'VM', id: string })
│ ├─ task.warning(message: string) │ ├─ task.warning(message: string)
| ├─ task.start(message: 'clean-vm')
│ │ └─ task.end
│ ├─ task.start(message: 'snapshot') │ ├─ task.start(message: 'snapshot')
│ │ └─ task.end │ │ └─ task.end
│ ├─ task.start(message: 'export', data: { type: 'SR' | 'remote', id: string, isFull: boolean }) │ ├─ task.start(message: 'export', data: { type: 'SR' | 'remote', id: string, isFull: boolean })
@ -89,12 +91,8 @@ job.start(data: { mode: Mode, reportWhen: ReportWhen })
│ │ ├─ task.start(message: 'clean') │ │ ├─ task.start(message: 'clean')
│ │ │ ├─ task.warning(message: string) │ │ │ ├─ task.warning(message: string)
│ │ │ └─ task.end │ │ │ └─ task.end
│ │ │ │ │ └─ task.end
│ │ │ // in case of delta backup | ├─ task.start(message: 'clean-vm')
│ │ ├─ task.start(message: 'merge')
│ │ │ ├─ task.warning(message: string)
│ │ │ └─ task.end(result: { size: number })
│ │ │
│ │ └─ task.end │ │ └─ task.end
│ └─ task.end │ └─ task.end
└─ job.end └─ job.end

View File

@ -6,8 +6,9 @@ const { join } = require('path')
const { getVmBackupDir } = require('../_getVmBackupDir.js') const { getVmBackupDir } = require('../_getVmBackupDir.js')
const MergeWorker = require('../merge-worker/index.js') const MergeWorker = require('../merge-worker/index.js')
const { formatFilenameDate } = require('../_filenameDate.js') const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
const { warn } = createLogger('xo:backups:MixinBackupWriter') const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
exports.MixinBackupWriter = (BaseClass = Object) => exports.MixinBackupWriter = (BaseClass = Object) =>
class MixinBackupWriter extends BaseClass { class MixinBackupWriter extends BaseClass {
@ -25,11 +26,17 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
async _cleanVm(options) { async _cleanVm(options) {
try { try {
return await this._adapter.cleanVm(this.#vmBackupDir, { return await Task.run({ name: 'clean-vm' }, () => {
...options, return this._adapter.cleanVm(this.#vmBackupDir, {
fixMetadata: true, ...options,
onLog: warn, fixMetadata: true,
lock: false, logInfo: info,
logWarn: (message, data) => {
warn(message, data)
Task.warning(message, data)
},
lock: false,
})
}) })
} catch (error) { } catch (error) {
warn(error) warn(error)

View File

@ -13,6 +13,7 @@
- [XO Web] Add ability to configure a default filter for Storage [#6236](https://github.com/vatesfr/xen-orchestra/issues/6236) (PR [#6237](https://github.com/vatesfr/xen-orchestra/pull/6237)) - [XO Web] Add ability to configure a default filter for Storage [#6236](https://github.com/vatesfr/xen-orchestra/issues/6236) (PR [#6237](https://github.com/vatesfr/xen-orchestra/pull/6237))
- [VM migration] Ensure the VM can be migrated before performing the migration to avoid issues [#5301](https://github.com/vatesfr/xen-orchestra/issues/5301) (PR [#6245](https://github.com/vatesfr/xen-orchestra/pull/6245)) - [VM migration] Ensure the VM can be migrated before performing the migration to avoid issues [#5301](https://github.com/vatesfr/xen-orchestra/issues/5301) (PR [#6245](https://github.com/vatesfr/xen-orchestra/pull/6245))
- [Backup] VMs with USB Pass-through devices are now supported! The advanced _Offline Snapshot Mode_ setting must be enabled. For Full Backup or Disaster Recovery jobs, Rolling Snapshot needs to be anabled as well. (PR [#6239](https://github.com/vatesfr/xen-orchestra/pull/6239)) - [Backup] VMs with USB Pass-through devices are now supported! The advanced _Offline Snapshot Mode_ setting must be enabled. For Full Backup or Disaster Recovery jobs, Rolling Snapshot needs to be anabled as well. (PR [#6239](https://github.com/vatesfr/xen-orchestra/pull/6239))
- [Backup] Show any detected errors on existing backups instead of fixing them silently (PR [#6207](https://github.com/vatesfr/xen-orchestra/pull/6225))
- [RPU/Host] If some backup jobs are running on the pool, ask for confirmation before starting an RPU, shutdown/rebooting a host or restarting a host's toolstack (PR [6232](https://github.com/vatesfr/xen-orchestra/pull/6232)) - [RPU/Host] If some backup jobs are running on the pool, ask for confirmation before starting an RPU, shutdown/rebooting a host or restarting a host's toolstack (PR [6232](https://github.com/vatesfr/xen-orchestra/pull/6232))
### Bug fixes ### Bug fixes

View File

@ -745,6 +745,7 @@ const messages = {
resumeVmLabel: 'Resume', resumeVmLabel: 'Resume',
copyVmLabel: 'Copy', copyVmLabel: 'Copy',
cloneVmLabel: 'Clone', cloneVmLabel: 'Clone',
cleanVm: 'Clean VM directory',
fastCloneVmLabel: 'Fast clone', fastCloneVmLabel: 'Fast clone',
vmConsoleLabel: 'Console', vmConsoleLabel: 'Console',
vmExportUrlValidity: 'The URL is valid once for a short period of time.', vmExportUrlValidity: 'The URL is valid once for a short period of time.',

View File

@ -23,6 +23,10 @@
@extend .fa; @extend .fa;
@extend .fa-tasks; @extend .fa-tasks;
} }
&-clean-vm {
@extend .fa;
@extend .fa-recycle;
}
&-template { &-template {
@extend .fa; @extend .fa;
@extend .fa-thumb-tack; @extend .fa-thumb-tack;

View File

@ -256,3 +256,36 @@ $select-input-height: 40px; // Bootstrap input height
-moz-user-select: none; /* Firefox */ -moz-user-select: none; /* Firefox */
user-select: none; /* Chrome */ user-select: none; /* Chrome */
} }
.message-expandable {
cursor: pointer;
span {
text-decoration: underline;
}
}
.task-warning {
padding: 2px 10px;
margin: 10px 0;
list-style-type: none;
border-radius: 10px;
border: 1px dashed #eca649;
li {
margin-bottom: 10px;
&:last-child {
margin-bottom: 0;
}
strong {
display: block;
color: #eca649;
}
span {
color: grey;
}
}
}

View File

@ -6,6 +6,7 @@ import decorate from 'apply-decorators'
import defined, { get } from '@xen-orchestra/defined' import defined, { get } from '@xen-orchestra/defined'
import Icon from 'icon' import Icon from 'icon'
import Pagination from 'pagination' import Pagination from 'pagination'
import PropTypes from 'prop-types'
import React from 'react' import React from 'react'
import SearchBar from 'search-bar' import SearchBar from 'search-bar'
import Select from 'form/select' import Select from 'form/select'
@ -110,17 +111,55 @@ const TaskError = ({ task }) => {
) )
} }
const Warnings = ({ warnings }) => class TaskWarning extends React.Component {
constructor(props) {
super(props)
this.state = {
expanded: false,
}
}
render() {
const className = `text-warning ${this.props.data ? 'message-expandable' : ''}`
return (
<div>
<span className={className} onClick={() => this.setState({ expanded: !this.state.expanded })}>
<Icon icon='alarm' /> {this.props.message}
</span>
{this.state.expanded && (
<ul className='task-warning'>
{Object.keys(this.props.data).map(key => (
<li key={key}>
<strong>{key}</strong>
<span>{JSON.stringify(this.props.data[key])}</span>
</li>
))}
</ul>
)}
</div>
)
}
}
TaskWarning.propTypes = {
message: PropTypes.string.isRequired,
data: PropTypes.object,
}
const TaskWarnings = ({ warnings }) =>
warnings !== undefined ? ( warnings !== undefined ? (
<div> <div>
{warnings.map(({ message }, key) => ( {warnings.map(({ message, data }, key) => (
<div className='text-warning' key={key}> <TaskWarning message={message} data={data} key={key} />
<Icon icon='alarm' /> {message}
</div>
))} ))}
</div> </div>
) : null ) : null
TaskWarnings.propTypes = {
warnings: PropTypes.arrayOf(PropTypes.shape(TaskWarning.propTypes)),
}
const VmTask = ({ children, className, restartVmJob, task }) => ( const VmTask = ({ children, className, restartVmJob, task }) => (
<li className={className}> <li className={className}>
<Vm id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />{' '} <Vm id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />{' '}
@ -144,7 +183,7 @@ const VmTask = ({ children, className, restartVmJob, task }) => (
/> />
</ButtonGroup> </ButtonGroup>
)} )}
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -171,7 +210,7 @@ const VmTask = ({ children, className, restartVmJob, task }) => (
const PoolTask = ({ children, className, task }) => ( const PoolTask = ({ children, className, task }) => (
<li className={className}> <li className={className}>
<Pool id={task.data.id} link newTab /> <TaskStateInfos status={task.status} /> <Pool id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -183,7 +222,7 @@ const PoolTask = ({ children, className, task }) => (
const XoTask = ({ children, className, task }) => ( const XoTask = ({ children, className, task }) => (
<li className={className}> <li className={className}>
<Icon icon='menu-xoa' /> XO <TaskStateInfos status={task.status} /> <Icon icon='menu-xoa' /> XO <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -195,7 +234,18 @@ const XoTask = ({ children, className, task }) => (
const SnapshotTask = ({ className, task }) => ( const SnapshotTask = ({ className, task }) => (
<li className={className}> <li className={className}>
<Icon icon='task' /> {_('snapshotVmLabel')} <TaskStateInfos status={task.status} /> <Icon icon='task' /> {_('snapshotVmLabel')} <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
<TaskStart task={task} />
<TaskEnd task={task} />
<TaskError task={task} />
</li>
)
const CleanVmTask = ({ children, className, task }) => (
<li className={className}>
<Icon icon='clean-vm' /> {_('cleanVm')} <TaskStateInfos status={task.status} />
<TaskWarnings warnings={task.warnings} />
{children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
<TaskError task={task} /> <TaskError task={task} />
@ -203,7 +253,7 @@ const SnapshotTask = ({ className, task }) => (
) )
const HealthCheckTask = ({ children, className, task }) => ( const HealthCheckTask = ({ children, className, task }) => (
<li className={className}> <li className={className}>
<Icon icon='health' /> {task.message} <TaskStateInfos status={task.status} /> <Warnings warnings={task.warnings} /> <Icon icon='health' /> {task.message} <TaskStateInfos status={task.status} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -222,7 +272,7 @@ const HealthCheckVmStartTask = ({ children, className, task }) => (
const RemoteTask = ({ children, className, task }) => ( const RemoteTask = ({ children, className, task }) => (
<li className={className}> <li className={className}>
<Remote id={task.data.id} link newTab /> <TaskStateInfos status={task.status} /> <Remote id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -234,7 +284,7 @@ const RemoteTask = ({ children, className, task }) => (
const SrTask = ({ children, className, task }) => ( const SrTask = ({ children, className, task }) => (
<li className={className}> <li className={className}>
<Sr id={task.data.id} link newTab /> <TaskStateInfos status={task.status} /> <Sr id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
{children} {children}
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
@ -245,7 +295,7 @@ const SrTask = ({ children, className, task }) => (
const TransferMergeTask = ({ className, task }) => { const TransferMergeTask = ({ className, task }) => {
const size = defined(() => task.result.size, 0) const size = defined(() => task.result.size, 0)
if (task.status === 'success' && size === 0) { if (task.status === 'success' && size === 0 && task.warnings?.length === 0) {
return null return null
} }
@ -262,7 +312,7 @@ const TransferMergeTask = ({ className, task }) => {
)}{' '} )}{' '}
{task.message} {task.message}
<TaskStateInfos status={task.status} /> <TaskStateInfos status={task.status} />
<Warnings warnings={task.warnings} /> <TaskWarnings warnings={task.warnings} />
<TaskStart task={task} /> <TaskStart task={task} />
<TaskEnd task={task} /> <TaskEnd task={task} />
<TaskDuration task={task} /> <TaskDuration task={task} />
@ -292,6 +342,7 @@ const COMPONENT_BY_MESSAGE = {
transfer: TransferMergeTask, transfer: TransferMergeTask,
'health check': HealthCheckTask, 'health check': HealthCheckTask,
vmstart: HealthCheckVmStartTask, vmstart: HealthCheckVmStartTask,
'clean-vm': CleanVmTask,
} }
const TaskLi = ({ task, ...props }) => { const TaskLi = ({ task, ...props }) => {
@ -474,7 +525,7 @@ export default decorate([
const { scheduleId, warnings, tasks = [] } = state.log const { scheduleId, warnings, tasks = [] } = state.log
return tasks.length === 0 ? ( return tasks.length === 0 ? (
<div> <div>
<Warnings warnings={warnings} /> <TaskWarnings warnings={warnings} />
<TaskError task={state.log} /> <TaskError task={state.log} />
</div> </div>
) : ( ) : (
@ -495,7 +546,7 @@ export default decorate([
value={state.status} value={state.status}
valueKey='value' valueKey='value'
/> />
<Warnings warnings={warnings} /> <TaskWarnings warnings={warnings} />
<br /> <br />
<ul className='list-group'> <ul className='list-group'>
{map(state.displayedTasks, taskLog => { {map(state.displayedTasks, taskLog => {