feat(deltaBackups): correctly chain VHDs (#406)

The goal is for a tool like vhdimount to be able to mount any file and use it as a disk to recover specific file in it.
This commit is contained in:
Nicolas Raynaud
2016-09-29 08:31:36 -07:00
committed by Julien Fontanet
parent 57092ee788
commit 6e66cffb92
3 changed files with 52 additions and 16 deletions

View File

@@ -93,7 +93,7 @@
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"stack-chain": "^1.3.3",
"struct-fu": "^1.0.0",
"@nraynaud/struct-fu": "^1.0.1",
"tar-stream": "^1.5.2",
"through2": "^2.0.0",
"trace": "^2.0.1",

View File

@@ -1,4 +1,4 @@
import fu from 'struct-fu'
import fu from '@nraynaud/struct-fu'
import {
noop,
@@ -91,7 +91,7 @@ const fuHeader = fu.struct([
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char('parentUnicodeName', 512),
fu.char16be('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
@@ -144,24 +144,22 @@ const unpackField = (field, buf) => {
}
// ===================================================================
// Returns the checksum of a raw footer.
// The raw footer is altered with the new sum.
function checksumFooter (rawFooter) {
const checksumField = fuFooter.fields.checksum
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, checksumField) {
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawFooter)
packField(checksumField, 0, rawStruct)
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawFooter)
packField(checksumField, sum, rawStruct)
return sum
}
@@ -257,7 +255,7 @@ class Vhd {
)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumFooter(buf)
const sumToTest = checksumStruct(buf, fuFooter.fields.checksum)
// Checksum child & parent.
if (sumToTest !== sum) {
@@ -494,19 +492,28 @@ class Vhd {
}
}
// Write a context footer. (At the end and beggining of a vhd file.)
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumFooter(rawFooter)
footer.checksum = checksumStruct(rawFooter, fuFooter.fields.checksum)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
async writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader.fields.checksum)
const offset = VHD_FOOTER_SIZE
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
await this._write(rawHeader, offset)
}
}
// Merge vhd child into vhd parent.
@@ -564,3 +571,22 @@ export default async function vhdMerge (
await parentVhd.writeFooter()
}
export async function chainVhd (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter()
])
const parentName = parentPath.split('/').pop()
const parentUuid = parentVhd.footer.uuid
if (childVhd.header.parentUuid !== parentUuid || childVhd.header.parentUnicodeName !== parentName) {
childVhd.header.parentUuid = parentUuid
childVhd.header.parentUnicodeName = parentName
await childVhd.writeHeader()
}
}

View File

@@ -12,7 +12,7 @@ import {
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import vhdMerge from '../vhd-merge'
import vhdMerge, { chainVhd } from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import {
deferrable
@@ -291,6 +291,14 @@ export default class {
return backups.slice(i)
}
// fix the parent UUID and filename in delta files after download from xapi or backup compression
async _chainDeltaVdiBackups ({handler, dir}) {
const backups = await this._listVdiBackups(handler, dir)
for (let i = 1; i < backups.length; i++) {
await chainVhd(handler, dir + '/' + backups[i - 1], handler, dir + '/' + backups[i])
}
}
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
const backups = await this._listVdiBackups(handler, dir)
let i = backups.length - depth
@@ -553,7 +561,9 @@ export default class {
mapToArray(vdiBackups, vdiBackup => {
const backupName = vdiBackup.value()
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
const backupDir = `${dir}/${backupDirectory}`
return this._mergeDeltaVdiBackups({ handler, dir: backupDir, depth })
.then(() => { this._chainDeltaVdiBackups({ handler, dir: backupDir }) })
})
)