Compare commits

...

9 Commits

Author SHA1 Message Date
Florent BEAUCHAMP
6db4d21aca Update @xen-orchestra/backups/_cleanVm.js
Co-authored-by: Julien Fontanet <julien.fontanet@isonoe.net>
2022-05-12 14:59:31 +02:00
Florent BEAUCHAMP
ebee50cab1 Update @xen-orchestra/backups/_cleanVm.js
Co-authored-by: Julien Fontanet <julien.fontanet@isonoe.net>
2022-05-12 14:56:07 +02:00
Florent Beauchamp
73abf62707 add more debug 2022-05-12 14:50:59 +02:00
Julien Fontanet
29622856fa Merge branch 'master' into feat_add_debug_clean_vm 2022-05-11 15:23:11 +02:00
Florent Beauchamp
2882f0ba50 feat: add debug on deletedeltavmbackup and cleanvm 2022-04-29 16:24:23 +02:00
Florent Beauchamp
b867fdd2a7 feat: add more debug 2022-04-26 08:24:21 +02:00
Florent Beauchamp
a18d649605 feat: add additionnal warnings to delta continuous replication 2022-04-21 10:48:03 +02:00
Florent Beauchamp
c5c3df8776 fix following review 2022-04-21 10:19:48 +02:00
Florent Beauchamp
12d8e2f517 feat(backup): more log messages 2022-04-19 15:51:58 +02:00
6 changed files with 99 additions and 8 deletions

View File

@@ -32,7 +32,7 @@ exports.DIR_XO_CONFIG_BACKUPS = DIR_XO_CONFIG_BACKUPS
const DIR_XO_POOL_METADATA_BACKUPS = 'xo-pool-metadata-backups'
exports.DIR_XO_POOL_METADATA_BACKUPS = DIR_XO_POOL_METADATA_BACKUPS
const { warn } = createLogger('xo:backups:RemoteAdapter')
const { debug, warn } = createLogger('xo:backups:RemoteAdapter')
const compareTimestamp = (a, b) => a.timestamp - b.timestamp
@@ -224,7 +224,7 @@ class RemoteAdapter {
async deleteDeltaVmBackups(backups) {
const handler = this._handler
debug(`deleteDeltaVmBackups will delete ${backups.length} delta backups`, { backups })
// this will delete the json, unused VHDs will be detected by `cleanVm`
await asyncMapSettled(backups, ({ _filename }) => handler.unlink(_filename))
}

View File

@@ -5,7 +5,7 @@ const sum = require('lodash/sum')
const { asyncMap } = require('@xen-orchestra/async-map')
const { Constants, mergeVhd, openVhd, VhdAbstract, VhdFile } = require('vhd-lib')
const { isVhdAlias, resolveVhdAlias } = require('vhd-lib/aliases')
const { dirname, resolve } = require('path')
const { dirname, resolve, basename } = require('path')
const { DISK_TYPES } = Constants
const { isMetadataFile, isVhdFile, isXvaFile, isXvaSumFile } = require('./_backupType.js')
const { limitConcurrency } = require('limit-concurrency-decorator')
@@ -90,7 +90,7 @@ async function mergeVhdChain(chain, { handler, onLog, remove, merge }) {
asyncMap(children.slice(0, -1), child => {
onLog(`the VHD ${child} is unused`)
if (remove) {
onLog(`deleting unused VHD ${child}`)
onLog(`mergeVhdChain: deleting unused VHD ${child}`)
return VhdAbstract.unlink(handler, child)
}
}),
@@ -383,7 +383,7 @@ exports.cleanVm = async function cleanVm(
const vhdChainsToMerge = { __proto__: null }
const toCheck = new Set(unusedVhds)
let shouldDelete = false
const getUsedChildChainOrDelete = vhd => {
if (vhd in vhdChainsToMerge) {
const chain = vhdChainsToMerge[vhd]
@@ -409,8 +409,64 @@ exports.cleanVm = async function cleanVm(
onLog(`the VHD ${vhd} is unused`)
if (remove) {
onLog(`deleting unused VHD ${vhd}`)
unusedVhdsDeletion.push(VhdAbstract.unlink(handler, vhd))
onLog(`getUsedChildChainOrDelete: deleting unused VHD`, {
vhdChildren,
vhd,
})
// temporarly disabled
shouldDelete = true
// unusedVhdsDeletion.push(VhdAbstract.unlink(handler, vhd))
}
}
{
// eslint-disable-next-line no-console
const debug = console.debug
if (shouldDelete) {
const chains = { __proto__: null }
const queue = new Set(vhds)
function addChildren(parent, chain) {
queue.delete(parent)
const child = vhdChildren[parent]
if (child !== undefined) {
const childChain = chains[child]
if (childChain !== undefined) {
// if a chain already exists, use it
delete chains[child]
chain.push(...childChain)
} else {
chain.push(child)
addChildren(child, chain)
}
}
}
for (const vhd of queue) {
const chain = []
addChildren(vhd, chain)
chains[vhd] = chain
}
const entries = Object.entries(chains)
debug(`${vhds.size} VHDs (${unusedVhds.size} unused) found among ${entries.length} chains [`)
const decorateVhd = vhd => {
const shortPath = basename(vhd)
return unusedVhds.has(vhd) ? `${shortPath} [unused]` : shortPath
}
for (let i = 0, n = entries.length; i < n; ++i) {
debug(`in ${dirname(entries[i][0])}`)
debug(' [')
const [parent, children] = entries[i]
debug(' ' + decorateVhd(parent))
for (const child of children) {
debug(' ' + decorateVhd(child))
}
debug(' ]')
}
debug(']')
}
}

View File

@@ -3,9 +3,10 @@
const assert = require('assert')
const map = require('lodash/map.js')
const mapValues = require('lodash/mapValues.js')
const uuid = require('uuid')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap } = require('@xen-orchestra/async-map')
const { chainVhd, checkVhdChain, openVhd, VhdAbstract } = require('vhd-lib')
const { chainVhd, checkVhdChain, openVhd, VhdAbstract, VhdDirectory } = require('vhd-lib')
const { createLogger } = require('@xen-orchestra/log')
const { dirname } = require('path')
@@ -30,6 +31,7 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
const backupDir = getVmBackupDir(backup.vm.uuid)
const vdisDir = `${backupDir}/vdis/${backup.job.id}`
const vhdDebugData = {}
await asyncMap(baseUuidToSrcVdi, async ([baseUuid, srcVdi]) => {
let found = false
@@ -40,6 +42,16 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
})
const packedBaseUuid = packUuid(baseUuid)
await asyncMap(vhds, async path => {
await Disposable.use(openVhd(handler, path), async vhd => {
const isMergeable = await adapter.isMergeableParent(packedBaseUuid, path)
vhdDebugData[path] = {
uuid: uuid.stringify(vhd.footer.uuid),
parentUuid: uuid.stringify(vhd.header.parentUuid),
isVhdDirectory: vhd instanceof VhdDirectory,
disktype: vhd.footer.diskType,
isMergeable,
}
})
try {
await checkVhdChain(handler, path)
// Warning, this should not be written as found = found || await adapter.isMergeableParent(packedBaseUuid, path)
@@ -52,13 +64,31 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
found = found || isMergeable
} catch (error) {
warn('checkBaseVdis', { error })
Task.warning(
`Backup.checkBaseVdis: Error while checking existing VHD ${vdisDir}/${srcVdi.uuid} : ${error.toString()}`
)
await ignoreErrors.call(VhdAbstract.unlink(handler, path))
}
})
} catch (error) {
warn('checkBaseVdis', { error })
Task.warning(
`Backup.checkBaseVdis : Impossible to open ${vdisDir}/${
srcVdi.uuid
} folder to list precedent backups: ${error.toString()}`
)
}
if (!found) {
Task.warning(
`Backup.checkBaseVdis : Impossible to find the base of ${srcVdi.uuid} for a delta : fallback to a full `,
{
data: {
vhdDebugData,
baseUuid,
vdiuuid: srcVdi.uuid,
},
}
)
baseUuidToSrcVdi.delete(baseUuid)
}
})

View File

@@ -20,6 +20,7 @@ exports.DeltaReplicationWriter = class DeltaReplicationWriter extends MixinRepli
vm => vm.other_config[TAG_COPY_SRC] === baseVm.uuid
)
if (replicatedVm === undefined) {
Task.warning(`Replication.checkBaseVdis: no replicated VMs`)
return baseUuidToSrcVdi.clear()
}
@@ -33,6 +34,7 @@ exports.DeltaReplicationWriter = class DeltaReplicationWriter extends MixinRepli
for (const uuid of baseUuidToSrcVdi.keys()) {
if (!replicatedVdis.has(uuid)) {
Task.warning(`Replication.checkBaseVdis: VDI ${uuid} is not in the list of already replicated VDI`)
baseUuidToSrcVdi.delete(uuid)
}
}

View File

@@ -6,6 +6,7 @@ const { join } = require('path')
const { getVmBackupDir } = require('../_getVmBackupDir.js')
const MergeWorker = require('../merge-worker/index.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
const { warn } = createLogger('xo:backups:MixinBackupWriter')
@@ -33,6 +34,7 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
})
} catch (error) {
warn(error)
Task.warning(`error while cleaning the backup folder : ${error.toString()}`)
return {}
}
}

View File

@@ -34,6 +34,7 @@
<!--packages-start-->
- @xen-orchestra/xapi major
- @xen-orchestra/backups minor
- @xen-orchestra/mixins major
- xo-server patch
- @xen-orchestra/proxy patch