feat(backups,xo-web): add cleanVm warnings to task (#6225)
This commit is contained in:
parent
d7d81431ef
commit
c0b0ba433f
@ -47,19 +47,19 @@ const computeVhdsSize = (handler, vhdPaths) =>
|
||||
// | |
|
||||
// \___________rename_____________/
|
||||
|
||||
async function mergeVhdChain(chain, { handler, onLog, remove, merge }) {
|
||||
async function mergeVhdChain(chain, { handler, logInfo, remove, merge }) {
|
||||
assert(chain.length >= 2)
|
||||
const chainCopy = [...chain]
|
||||
const parent = chainCopy.pop()
|
||||
const children = chainCopy
|
||||
|
||||
if (merge) {
|
||||
onLog(`merging ${children.length} children into ${parent}`)
|
||||
logInfo(`merging children into parent`, { childrenCount: children.length, parent })
|
||||
|
||||
let done, total
|
||||
const handle = setInterval(() => {
|
||||
if (done !== undefined) {
|
||||
onLog(`merging ${children.join(',')} into ${parent}: ${done}/${total}`)
|
||||
logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total})
|
||||
}
|
||||
}, 10e3)
|
||||
|
||||
@ -75,9 +75,9 @@ async function mergeVhdChain(chain, { handler, onLog, remove, merge }) {
|
||||
await Promise.all([
|
||||
VhdAbstract.rename(handler, parent, mergeTargetChild),
|
||||
asyncMap(children, child => {
|
||||
onLog(`the VHD ${child} is already merged`)
|
||||
logInfo(`the VHD child is already merged`, { child })
|
||||
if (remove) {
|
||||
onLog(`deleting merged VHD ${child}`)
|
||||
logInfo(`deleting merged VHD child`, { child })
|
||||
return VhdAbstract.unlink(handler, child)
|
||||
}
|
||||
}),
|
||||
@ -125,14 +125,19 @@ const listVhds = async (handler, vmDir) => {
|
||||
return { vhds, interruptedVhds, aliases }
|
||||
}
|
||||
|
||||
async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog = noop, remove = false }) {
|
||||
async function checkAliases(
|
||||
aliasPaths,
|
||||
targetDataRepository,
|
||||
{ handler, logInfo = noop, logWarn = console.warn, remove = false }
|
||||
) {
|
||||
const aliasFound = []
|
||||
for (const path of aliasPaths) {
|
||||
const target = await resolveVhdAlias(handler, path)
|
||||
|
||||
if (!isVhdFile(target)) {
|
||||
onLog(`Alias ${path} references a non vhd target: ${target}`)
|
||||
logWarn('alias references non VHD target', { path, target })
|
||||
if (remove) {
|
||||
logInfo('removing alias and non VHD target', { path, target })
|
||||
await handler.unlink(target)
|
||||
await handler.unlink(path)
|
||||
}
|
||||
@ -147,13 +152,13 @@ async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog =
|
||||
// error during dispose should not trigger a deletion
|
||||
}
|
||||
} catch (error) {
|
||||
onLog(`target ${target} of alias ${path} is missing or broken`, { error })
|
||||
logWarn('missing or broken alias target', { target, path, error })
|
||||
if (remove) {
|
||||
try {
|
||||
await VhdAbstract.unlink(handler, path)
|
||||
} catch (e) {
|
||||
if (e.code !== 'ENOENT') {
|
||||
onLog(`Error while deleting target ${target} of alias ${path}`, { error: e })
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
logWarn('error deleting alias target', { target, path, error })
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -170,20 +175,22 @@ async function checkAliases(aliasPaths, targetDataRepository, { handler, onLog =
|
||||
|
||||
entries.forEach(async entry => {
|
||||
if (!aliasFound.includes(entry)) {
|
||||
onLog(`the Vhd ${entry} is not referenced by a an alias`)
|
||||
logWarn('no alias references VHD', { entry })
|
||||
if (remove) {
|
||||
logInfo('deleting unaliased VHD')
|
||||
await VhdAbstract.unlink(handler, entry)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
exports.checkAliases = checkAliases
|
||||
|
||||
const defaultMergeLimiter = limitConcurrency(1)
|
||||
|
||||
exports.cleanVm = async function cleanVm(
|
||||
vmDir,
|
||||
{ fixMetadata, remove, merge, mergeLimiter = defaultMergeLimiter, onLog = noop }
|
||||
{ fixMetadata, remove, merge, mergeLimiter = defaultMergeLimiter, logInfo = noop, logWarn = console.warn }
|
||||
) {
|
||||
const limitedMergeVhdChain = mergeLimiter(mergeVhdChain)
|
||||
|
||||
@ -214,9 +221,9 @@ exports.cleanVm = async function cleanVm(
|
||||
})
|
||||
} catch (error) {
|
||||
vhds.delete(path)
|
||||
onLog(`error while checking the VHD with path ${path}`, { error })
|
||||
logWarn('VHD check error', { path, error })
|
||||
if (error?.code === 'ERR_ASSERTION' && remove) {
|
||||
onLog(`deleting broken ${path}`)
|
||||
logInfo('deleting broken path', { path })
|
||||
return VhdAbstract.unlink(handler, path)
|
||||
}
|
||||
}
|
||||
@ -228,12 +235,12 @@ exports.cleanVm = async function cleanVm(
|
||||
const statePath = interruptedVhds.get(interruptedVhd)
|
||||
interruptedVhds.delete(interruptedVhd)
|
||||
|
||||
onLog('orphan merge state', {
|
||||
logWarn('orphan merge state', {
|
||||
mergeStatePath: statePath,
|
||||
missingVhdPath: interruptedVhd,
|
||||
})
|
||||
if (remove) {
|
||||
onLog(`deleting orphan merge state ${statePath}`)
|
||||
logInfo('deleting orphan merge state', { statePath })
|
||||
await handler.unlink(statePath)
|
||||
}
|
||||
}
|
||||
@ -242,7 +249,7 @@ exports.cleanVm = async function cleanVm(
|
||||
// check if alias are correct
|
||||
// check if all vhd in data subfolder have a corresponding alias
|
||||
await asyncMap(Object.keys(aliases), async dir => {
|
||||
await checkAliases(aliases[dir], `${dir}/data`, { handler, onLog, remove })
|
||||
await checkAliases(aliases[dir], `${dir}/data`, { handler, logInfo, logWarn, remove })
|
||||
})
|
||||
|
||||
// remove VHDs with missing ancestors
|
||||
@ -264,9 +271,9 @@ exports.cleanVm = async function cleanVm(
|
||||
if (!vhds.has(parent)) {
|
||||
vhds.delete(vhdPath)
|
||||
|
||||
onLog(`the parent ${parent} of the VHD ${vhdPath} is missing`)
|
||||
logWarn('parent VHD is missing', { parent, vhdPath })
|
||||
if (remove) {
|
||||
onLog(`deleting orphan VHD ${vhdPath}`)
|
||||
logInfo('deleting orphan VHD', { vhdPath })
|
||||
deletions.push(VhdAbstract.unlink(handler, vhdPath))
|
||||
}
|
||||
}
|
||||
@ -303,7 +310,7 @@ exports.cleanVm = async function cleanVm(
|
||||
// check is not good enough to delete the file, the best we can do is report
|
||||
// it
|
||||
if (!(await this.isValidXva(path))) {
|
||||
onLog(`the XVA with path ${path} is potentially broken`)
|
||||
logWarn('XVA might be broken', { path })
|
||||
}
|
||||
})
|
||||
|
||||
@ -317,7 +324,7 @@ exports.cleanVm = async function cleanVm(
|
||||
try {
|
||||
metadata = JSON.parse(await handler.readFile(json))
|
||||
} catch (error) {
|
||||
onLog(`failed to read metadata file ${json}`, { error })
|
||||
logWarn('failed to read metadata file', { json, error })
|
||||
jsons.delete(json)
|
||||
return
|
||||
}
|
||||
@ -328,9 +335,9 @@ exports.cleanVm = async function cleanVm(
|
||||
if (xvas.has(linkedXva)) {
|
||||
unusedXvas.delete(linkedXva)
|
||||
} else {
|
||||
onLog(`the XVA linked to the metadata ${json} is missing`)
|
||||
logWarn('metadata XVA is missing', { json })
|
||||
if (remove) {
|
||||
onLog(`deleting incomplete backup ${json}`)
|
||||
logInfo('deleting incomplete backup', { json })
|
||||
jsons.delete(json)
|
||||
await handler.unlink(json)
|
||||
}
|
||||
@ -351,9 +358,9 @@ exports.cleanVm = async function cleanVm(
|
||||
vhdsToJSons[path] = json
|
||||
})
|
||||
} else {
|
||||
onLog(`Some VHDs linked to the metadata ${json} are missing`, { missingVhds })
|
||||
logWarn('some metadata VHDs are missing', { json, missingVhds })
|
||||
if (remove) {
|
||||
onLog(`deleting incomplete backup ${json}`)
|
||||
logInfo('deleting incomplete backup', { json })
|
||||
jsons.delete(json)
|
||||
await handler.unlink(json)
|
||||
}
|
||||
@ -394,9 +401,9 @@ exports.cleanVm = async function cleanVm(
|
||||
}
|
||||
}
|
||||
|
||||
onLog(`the VHD ${vhd} is unused`)
|
||||
logWarn('unused VHD', { vhd })
|
||||
if (remove) {
|
||||
onLog(`deleting unused VHD ${vhd}`)
|
||||
logInfo('deleting unused VHD', { vhd })
|
||||
unusedVhdsDeletion.push(VhdAbstract.unlink(handler, vhd))
|
||||
}
|
||||
}
|
||||
@ -420,7 +427,7 @@ exports.cleanVm = async function cleanVm(
|
||||
const metadataWithMergedVhd = {}
|
||||
const doMerge = async () => {
|
||||
await asyncMap(toMerge, async chain => {
|
||||
const merged = await limitedMergeVhdChain(chain, { handler, onLog, remove, merge })
|
||||
const merged = await limitedMergeVhdChain(chain, { handler, logInfo, logWarn, remove, merge })
|
||||
if (merged !== undefined) {
|
||||
const metadataPath = vhdsToJSons[chain[0]] // all the chain should have the same metada file
|
||||
metadataWithMergedVhd[metadataPath] = true
|
||||
@ -432,18 +439,18 @@ exports.cleanVm = async function cleanVm(
|
||||
...unusedVhdsDeletion,
|
||||
toMerge.length !== 0 && (merge ? Task.run({ name: 'merge' }, doMerge) : doMerge()),
|
||||
asyncMap(unusedXvas, path => {
|
||||
onLog(`the XVA ${path} is unused`)
|
||||
logWarn('unused XVA', { path })
|
||||
if (remove) {
|
||||
onLog(`deleting unused XVA ${path}`)
|
||||
logInfo('deleting unused XVA', { path })
|
||||
return handler.unlink(path)
|
||||
}
|
||||
}),
|
||||
asyncMap(xvaSums, path => {
|
||||
// no need to handle checksums for XVAs deleted by the script, they will be handled by `unlink()`
|
||||
if (!xvas.has(path.slice(0, -'.checksum'.length))) {
|
||||
onLog(`the XVA checksum ${path} is unused`)
|
||||
logInfo('unused XVA checksum', { path })
|
||||
if (remove) {
|
||||
onLog(`deleting unused XVA checksum ${path}`)
|
||||
logInfo('deleting unused XVA checksum', { path })
|
||||
return handler.unlink(path)
|
||||
}
|
||||
}
|
||||
@ -477,11 +484,11 @@ exports.cleanVm = async function cleanVm(
|
||||
|
||||
// don't warn if the size has changed after a merge
|
||||
if (!merged && fileSystemSize !== size) {
|
||||
onLog(`incorrect size in metadata: ${size ?? 'none'} instead of ${fileSystemSize}`)
|
||||
logWarn('incorrect size in metadata', { size: size ?? 'none', fileSystemSize })
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
onLog(`failed to get size of ${metadataPath}`, { error })
|
||||
logWarn('failed to get metadata size', { metadataPath, error })
|
||||
return
|
||||
}
|
||||
|
||||
@ -491,7 +498,7 @@ exports.cleanVm = async function cleanVm(
|
||||
try {
|
||||
await handler.writeFile(metadataPath, JSON.stringify(metadata), { flags: 'w' })
|
||||
} catch (error) {
|
||||
onLog(`failed to update size in backup metadata ${metadataPath} after merge`, { error })
|
||||
logWarn('metadata size update failed', { metadataPath, error })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -69,6 +69,8 @@ job.start(data: { mode: Mode, reportWhen: ReportWhen })
|
||||
├─ task.warning(message: string)
|
||||
├─ task.start(data: { type: 'VM', id: string })
|
||||
│ ├─ task.warning(message: string)
|
||||
| ├─ task.start(message: 'clean-vm')
|
||||
│ │ └─ task.end
|
||||
│ ├─ task.start(message: 'snapshot')
|
||||
│ │ └─ task.end
|
||||
│ ├─ task.start(message: 'export', data: { type: 'SR' | 'remote', id: string, isFull: boolean })
|
||||
@ -89,12 +91,8 @@ job.start(data: { mode: Mode, reportWhen: ReportWhen })
|
||||
│ │ ├─ task.start(message: 'clean')
|
||||
│ │ │ ├─ task.warning(message: string)
|
||||
│ │ │ └─ task.end
|
||||
│ │ │
|
||||
│ │ │ // in case of delta backup
|
||||
│ │ ├─ task.start(message: 'merge')
|
||||
│ │ │ ├─ task.warning(message: string)
|
||||
│ │ │ └─ task.end(result: { size: number })
|
||||
│ │ │
|
||||
│ │ └─ task.end
|
||||
| ├─ task.start(message: 'clean-vm')
|
||||
│ │ └─ task.end
|
||||
│ └─ task.end
|
||||
└─ job.end
|
||||
|
@ -6,8 +6,9 @@ const { join } = require('path')
|
||||
const { getVmBackupDir } = require('../_getVmBackupDir.js')
|
||||
const MergeWorker = require('../merge-worker/index.js')
|
||||
const { formatFilenameDate } = require('../_filenameDate.js')
|
||||
const { Task } = require('../Task.js')
|
||||
|
||||
const { warn } = createLogger('xo:backups:MixinBackupWriter')
|
||||
const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
|
||||
|
||||
exports.MixinBackupWriter = (BaseClass = Object) =>
|
||||
class MixinBackupWriter extends BaseClass {
|
||||
@ -25,11 +26,17 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
|
||||
|
||||
async _cleanVm(options) {
|
||||
try {
|
||||
return await this._adapter.cleanVm(this.#vmBackupDir, {
|
||||
...options,
|
||||
fixMetadata: true,
|
||||
onLog: warn,
|
||||
lock: false,
|
||||
return await Task.run({ name: 'clean-vm' }, () => {
|
||||
return this._adapter.cleanVm(this.#vmBackupDir, {
|
||||
...options,
|
||||
fixMetadata: true,
|
||||
logInfo: info,
|
||||
logWarn: (message, data) => {
|
||||
warn(message, data)
|
||||
Task.warning(message, data)
|
||||
},
|
||||
lock: false,
|
||||
})
|
||||
})
|
||||
} catch (error) {
|
||||
warn(error)
|
||||
|
@ -13,6 +13,7 @@
|
||||
- [XO Web] Add ability to configure a default filter for Storage [#6236](https://github.com/vatesfr/xen-orchestra/issues/6236) (PR [#6237](https://github.com/vatesfr/xen-orchestra/pull/6237))
|
||||
- [VM migration] Ensure the VM can be migrated before performing the migration to avoid issues [#5301](https://github.com/vatesfr/xen-orchestra/issues/5301) (PR [#6245](https://github.com/vatesfr/xen-orchestra/pull/6245))
|
||||
- [Backup] VMs with USB Pass-through devices are now supported! The advanced _Offline Snapshot Mode_ setting must be enabled. For Full Backup or Disaster Recovery jobs, Rolling Snapshot needs to be anabled as well. (PR [#6239](https://github.com/vatesfr/xen-orchestra/pull/6239))
|
||||
- [Backup] Show any detected errors on existing backups instead of fixing them silently (PR [#6207](https://github.com/vatesfr/xen-orchestra/pull/6225))
|
||||
- [RPU/Host] If some backup jobs are running on the pool, ask for confirmation before starting an RPU, shutdown/rebooting a host or restarting a host's toolstack (PR [6232](https://github.com/vatesfr/xen-orchestra/pull/6232))
|
||||
|
||||
### Bug fixes
|
||||
|
@ -745,6 +745,7 @@ const messages = {
|
||||
resumeVmLabel: 'Resume',
|
||||
copyVmLabel: 'Copy',
|
||||
cloneVmLabel: 'Clone',
|
||||
cleanVm: 'Clean VM directory',
|
||||
fastCloneVmLabel: 'Fast clone',
|
||||
vmConsoleLabel: 'Console',
|
||||
vmExportUrlValidity: 'The URL is valid once for a short period of time.',
|
||||
|
@ -23,6 +23,10 @@
|
||||
@extend .fa;
|
||||
@extend .fa-tasks;
|
||||
}
|
||||
&-clean-vm {
|
||||
@extend .fa;
|
||||
@extend .fa-recycle;
|
||||
}
|
||||
&-template {
|
||||
@extend .fa;
|
||||
@extend .fa-thumb-tack;
|
||||
|
@ -256,3 +256,36 @@ $select-input-height: 40px; // Bootstrap input height
|
||||
-moz-user-select: none; /* Firefox */
|
||||
user-select: none; /* Chrome */
|
||||
}
|
||||
|
||||
.message-expandable {
|
||||
cursor: pointer;
|
||||
|
||||
span {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
|
||||
.task-warning {
|
||||
padding: 2px 10px;
|
||||
margin: 10px 0;
|
||||
list-style-type: none;
|
||||
border-radius: 10px;
|
||||
border: 1px dashed #eca649;
|
||||
|
||||
li {
|
||||
margin-bottom: 10px;
|
||||
|
||||
&:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
strong {
|
||||
display: block;
|
||||
color: #eca649;
|
||||
}
|
||||
|
||||
span {
|
||||
color: grey;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import decorate from 'apply-decorators'
|
||||
import defined, { get } from '@xen-orchestra/defined'
|
||||
import Icon from 'icon'
|
||||
import Pagination from 'pagination'
|
||||
import PropTypes from 'prop-types'
|
||||
import React from 'react'
|
||||
import SearchBar from 'search-bar'
|
||||
import Select from 'form/select'
|
||||
@ -110,17 +111,55 @@ const TaskError = ({ task }) => {
|
||||
)
|
||||
}
|
||||
|
||||
const Warnings = ({ warnings }) =>
|
||||
class TaskWarning extends React.Component {
|
||||
constructor(props) {
|
||||
super(props)
|
||||
this.state = {
|
||||
expanded: false,
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
const className = `text-warning ${this.props.data ? 'message-expandable' : ''}`
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span className={className} onClick={() => this.setState({ expanded: !this.state.expanded })}>
|
||||
<Icon icon='alarm' /> {this.props.message}
|
||||
</span>
|
||||
{this.state.expanded && (
|
||||
<ul className='task-warning'>
|
||||
{Object.keys(this.props.data).map(key => (
|
||||
<li key={key}>
|
||||
<strong>{key}</strong>
|
||||
<span>{JSON.stringify(this.props.data[key])}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
TaskWarning.propTypes = {
|
||||
message: PropTypes.string.isRequired,
|
||||
data: PropTypes.object,
|
||||
}
|
||||
|
||||
const TaskWarnings = ({ warnings }) =>
|
||||
warnings !== undefined ? (
|
||||
<div>
|
||||
{warnings.map(({ message }, key) => (
|
||||
<div className='text-warning' key={key}>
|
||||
<Icon icon='alarm' /> {message}
|
||||
</div>
|
||||
{warnings.map(({ message, data }, key) => (
|
||||
<TaskWarning message={message} data={data} key={key} />
|
||||
))}
|
||||
</div>
|
||||
) : null
|
||||
|
||||
TaskWarnings.propTypes = {
|
||||
warnings: PropTypes.arrayOf(PropTypes.shape(TaskWarning.propTypes)),
|
||||
}
|
||||
|
||||
const VmTask = ({ children, className, restartVmJob, task }) => (
|
||||
<li className={className}>
|
||||
<Vm id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />{' '}
|
||||
@ -144,7 +183,7 @@ const VmTask = ({ children, className, restartVmJob, task }) => (
|
||||
/>
|
||||
</ButtonGroup>
|
||||
)}
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -171,7 +210,7 @@ const VmTask = ({ children, className, restartVmJob, task }) => (
|
||||
const PoolTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Pool id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -183,7 +222,7 @@ const PoolTask = ({ children, className, task }) => (
|
||||
const XoTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Icon icon='menu-xoa' /> XO <TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -195,7 +234,18 @@ const XoTask = ({ children, className, task }) => (
|
||||
const SnapshotTask = ({ className, task }) => (
|
||||
<li className={className}>
|
||||
<Icon icon='task' /> {_('snapshotVmLabel')} <TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
<TaskError task={task} />
|
||||
</li>
|
||||
)
|
||||
|
||||
const CleanVmTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Icon icon='clean-vm' /> {_('cleanVm')} <TaskStateInfos status={task.status} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
<TaskError task={task} />
|
||||
@ -203,7 +253,7 @@ const SnapshotTask = ({ className, task }) => (
|
||||
)
|
||||
const HealthCheckTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Icon icon='health' /> {task.message} <TaskStateInfos status={task.status} /> <Warnings warnings={task.warnings} />
|
||||
<Icon icon='health' /> {task.message} <TaskStateInfos status={task.status} /> <TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -222,7 +272,7 @@ const HealthCheckVmStartTask = ({ children, className, task }) => (
|
||||
const RemoteTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Remote id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -234,7 +284,7 @@ const RemoteTask = ({ children, className, task }) => (
|
||||
const SrTask = ({ children, className, task }) => (
|
||||
<li className={className}>
|
||||
<Sr id={task.data.id} link newTab /> <TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
{children}
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
@ -245,7 +295,7 @@ const SrTask = ({ children, className, task }) => (
|
||||
|
||||
const TransferMergeTask = ({ className, task }) => {
|
||||
const size = defined(() => task.result.size, 0)
|
||||
if (task.status === 'success' && size === 0) {
|
||||
if (task.status === 'success' && size === 0 && task.warnings?.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
@ -262,7 +312,7 @@ const TransferMergeTask = ({ className, task }) => {
|
||||
)}{' '}
|
||||
{task.message}
|
||||
<TaskStateInfos status={task.status} />
|
||||
<Warnings warnings={task.warnings} />
|
||||
<TaskWarnings warnings={task.warnings} />
|
||||
<TaskStart task={task} />
|
||||
<TaskEnd task={task} />
|
||||
<TaskDuration task={task} />
|
||||
@ -292,6 +342,7 @@ const COMPONENT_BY_MESSAGE = {
|
||||
transfer: TransferMergeTask,
|
||||
'health check': HealthCheckTask,
|
||||
vmstart: HealthCheckVmStartTask,
|
||||
'clean-vm': CleanVmTask,
|
||||
}
|
||||
|
||||
const TaskLi = ({ task, ...props }) => {
|
||||
@ -474,7 +525,7 @@ export default decorate([
|
||||
const { scheduleId, warnings, tasks = [] } = state.log
|
||||
return tasks.length === 0 ? (
|
||||
<div>
|
||||
<Warnings warnings={warnings} />
|
||||
<TaskWarnings warnings={warnings} />
|
||||
<TaskError task={state.log} />
|
||||
</div>
|
||||
) : (
|
||||
@ -495,7 +546,7 @@ export default decorate([
|
||||
value={state.status}
|
||||
valueKey='value'
|
||||
/>
|
||||
<Warnings warnings={warnings} />
|
||||
<TaskWarnings warnings={warnings} />
|
||||
<br />
|
||||
<ul className='list-group'>
|
||||
{map(state.displayedTasks, taskLog => {
|
||||
|
Loading…
Reference in New Issue
Block a user