feat(backup): implement file restore (#461)

See vatesfr/xo-web#1590

Current implementation has following limitations:

- only support local and NFS remotes
- requires installation of libvhdi-utils
- files can only be recovered one by one
This commit is contained in:
Julien Fontanet 2016-12-20 12:18:22 +01:00 committed by GitHub
parent 34ff8b0f02
commit 1ac8af34ec
10 changed files with 422 additions and 23 deletions

View File

@ -59,7 +59,6 @@
"fatfs": "^0.10.3",
"fs-extra": "^1.0.0",
"fs-promise": "^1.0.0",
"get-stream": "^3.0.0",
"golike-defer": "^0.0.0",
"hashy": "~0.5.1",
"helmet": "^3.0.0",
@ -88,6 +87,7 @@
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"parse-pairs": "^0.2.2",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport-local": "^1.0.0",
@ -98,9 +98,11 @@
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"split-lines": "^1.1.0",
"stack-chain": "^1.3.3",
"tar-stream": "^1.5.2",
"through2": "^2.0.0",
"tmp": "^0.0.31",
"uuid": "^3.0.0",
"ws": "^1.1.1",
"xen-api": "^0.9.6",
@ -135,8 +137,8 @@
"dev": "gulp build",
"dev-test": "jest --bail --watch",
"posttest": "standard && dependency-check ./package.json",
"predev": "npm run prebuild",
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins",
"predev": "npm run prebuild",
"prepublish": "npm run build",
"start": "node bin/xo-server",
"test": "jest"

72
src/api/backup.js Normal file
View File

@ -0,0 +1,72 @@
import { basename } from 'path'
import { format } from 'json-rpc-peer'
// ===================================================================
export function list ({ remote }) {
return this.listVmBackups(remote)
}
list.permission = 'admin'
list.params = {
remote: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanDisk ({ remote, disk }) {
return this.scanDiskBackup(remote, disk)
}
scanDisk.permission = 'admin'
scanDisk.params = {
remote: { type: 'string' },
disk: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanFiles ({ remote, disk, partition, path }) {
return this.scanFilesInDiskBackup(remote, disk, partition, path)
}
scanFiles.permission = 'admin'
scanFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
path: { type: 'string' }
}
// -------------------------------------------------------------------
function handleFetchFiles (req, res, { remote, disk, partition, paths }) {
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
res.setHeader('content-disposition', 'attachment')
res.setHeader('content-type', 'application/octet-stream')
files[0].pipe(res)
}).catch(error => {
console.error(error)
res.writeHead(500)
res.end(format.error(0, error))
})
}
export async function fetchFiles (params) {
return this.registerHttpRequest(handleFetchFiles, params, {
suffix: `/${basename(params.paths[0])}`
}).then(url => ({ $getFrom: url }))
}
fetchFiles.permission = 'admin'
fetchFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
paths: {
type: 'array',
items: { type: 'string' },
minLength: 1,
maxLength: 1 // TODO: remove when able to tar
}
}

View File

@ -0,0 +1,44 @@
import assert from 'assert'
const streamToExistingBuffer = (
stream,
buffer,
offset = 0,
end = buffer.length
) => new Promise((resolve, reject) => {
assert(offset >= 0)
assert(end > offset)
assert(end <= buffer.length)
let i = offset
const onData = chunk => {
const prev = i
i += chunk.length
if (i > end) {
return onError(new Error('too much data'))
}
chunk.copy(buffer, prev)
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(i - offset)
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToExistingBuffer as default }

View File

@ -0,0 +1,20 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs'
import { fromCallback } from 'promise-toolbox'
import streamToExistingBuffer from './stream-to-existing-buffer'
describe('streamToExistingBuffer()', () => {
it('read the content of a stream in a buffer', async () => {
const stream = createReadStream(__filename)
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
const buf = Buffer.allocUnsafe(expected.length + 1)
buf[0] = 'A'.charCodeAt()
await streamToExistingBuffer(stream, buf, 1)
expect(String(buf)).toBe(`A${expected}`)
})
})

View File

@ -0,0 +1,27 @@
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
const chunks = []
let length = 0
const onData = chunk => {
chunks.push(chunk)
length += chunk.length
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(Buffer.concat(chunks, length))
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToNewBuffer as default }

View File

@ -1,7 +1,6 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash/forEach'
import getStream from 'get-stream'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
@ -12,7 +11,9 @@ import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
@ -26,6 +27,7 @@ import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
promisify,
reflect as pReflect
} from 'promise-toolbox'
@ -54,7 +56,7 @@ export function bufferToStream (buf) {
return stream
}
export const streamToBuffer = getStream.buffer
export streamToBuffer from './stream-to-new-buffer'
// -------------------------------------------------------------------
@ -164,7 +166,7 @@ export const validChecksumOfReadStream = (stream, expectedChecksum) => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
checksum.trim() !== expectedChecksum.trim()
? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`)
: null
)
@ -460,6 +462,11 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
// -------------------------------------------------------------------
export const resolveSubpath = (root, path) =>
resolve(root, `./${resolve('/', path)}`)
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper
@ -531,7 +538,7 @@ export const thunkToArray = thunk => {
// ```js
// promise.catch(throwFn('an error has occured'))
//
// function foo (param = throwFn('param is required')) {}
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw (
@ -543,5 +550,9 @@ export const throwFn = error => () => {
// -------------------------------------------------------------------
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value

View File

@ -1,3 +1,5 @@
// TODO: remove once completely merged in vhd.js
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'

View File

@ -1,18 +1,25 @@
import deferrable from 'golike-defer'
import endsWith from 'lodash/endsWith'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash/filter'
import find from 'lodash/find'
import findIndex from 'lodash/findIndex'
import sortBy from 'lodash/sortBy'
import startsWith from 'lodash/startsWith'
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser as createPairsParser } from 'parse-pairs'
import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import {
basename,
dirname
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import {
endsWith,
filter,
find,
findIndex,
once,
sortBy,
startsWith
} from 'lodash'
import vhdMerge, { chainVhd } from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
@ -21,9 +28,12 @@ import {
mapToArray,
noop,
pCatch,
pFromCallback,
pSettle,
resolveSubpath,
safeDateFormat,
safeDateParse
safeDateParse,
tmpDir
} from '../utils'
import {
VDI_FORMAT_VHD
@ -51,6 +61,7 @@ const parseVmBackupPath = name => {
if (baseMatches) {
return {
datetime: safeDateParse(baseMatches[1]),
id: name,
name: baseMatches[3],
tag: baseMatches[2],
type: 'xva'
@ -64,10 +75,11 @@ const parseVmBackupPath = name => {
) {
return {
datetime: safeDateParse(baseMatches[1]),
id: name,
name: baseMatches[2],
uuid: dirMatches[2],
tag: dirMatches[1],
type: 'delta'
type: 'delta',
uuid: dirMatches[2]
}
}
@ -107,6 +119,82 @@ async function checkFileIntegrity (handler, name) {
// await eventToPromise(stream, 'finish')
}
// -------------------------------------------------------------------
const listPartitions = (() => {
const IGNORED = {}
forEach([
// https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38
0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5,
0x82 // swap
], type => {
IGNORED[type] = true
})
const TYPES = {
0x7: 'NTFS',
0x83: 'linux',
0xc: 'FAT'
}
const parseLine = createPairsParser({
keyTransform: key => key === 'UUID'
? 'id'
: key.toLowerCase(),
valueTransform: (value, key) => key === 'start' || key === 'size'
? +value
: key === 'type'
? TYPES[+value] || value
: value
})
return device => execa.stdout('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path
]).then(stdout => filter(
mapToArray(splitLines(stdout), parseLine),
({ type }) => type != null && !IGNORED[+type]
))
})()
const mountPartition = (device, partitionId) => Promise.all([
partitionId != null && listPartitions(device),
tmpDir()
]).then(([ partitions, path ]) => {
const partition = partitionId && find(partitions, { id: partitionId })
const options = [
'loop',
'ro'
]
if (partition) {
options.push(`offset=${partition.start * 512}`)
if (partition.type === 'linux') {
options.push('noload')
}
}
return execa('mount', [
`--options=${options.join(',')}`,
`--source=${device.path}`,
`--target=${path}`
], {
timeout: 1e4
}).then(() => ({
path,
unmount: once(() => execa('umount', [ '--lazy', path ]))
})).catch(error => {
console.log(error)
throw error
})
})
// ===================================================================
export default class {
@ -141,6 +229,37 @@ export default class {
return backups
}
async listVmBackups (remoteId) {
const handler = await this._xo.getRemoteHandler(remoteId)
const backups = []
await Promise.all(mapToArray(await handler.list(), entry => {
if (endsWith(entry, '.xva')) {
backups.push(parseVmBackupPath(entry))
} else if (startsWith(entry, 'vm_delta_')) {
return handler.list(entry).then(children => Promise.all(mapToArray(children, child => {
if (endsWith(child, '.json')) {
const path = `${entry}/${child}`
const record = parseVmBackupPath(path)
backups.push(record)
return handler.readFile(path).then(data => {
record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({
id: `${entry}/${vdi.xoPath}`,
name: vdi.name_label,
uuid: vdi.uuid
}))
}).catch(noop)
}
})))
}
}))
return backups
}
async importVmBackup (remoteId, file, sr) {
const handler = await this._xo.getRemoteHandler(remoteId)
const stream = await handler.createReadStream(file)
@ -781,4 +900,96 @@ export default class {
targetXapi.deleteVm(vm.$id, true)::pCatch(noop)
))
}
// -----------------------------------------------------------------
_mountVhd (remoteId, vhdPath) {
return Promise.all([
this._xo.getRemoteHandler(remoteId),
tmpDir()
]).then(([ handler, mountDir ]) => {
if (!handler._getRealPath) {
throw new Error(`this remote is not supported`)
}
const remotePath = handler._getRealPath()
return execa('vhdimount', [ resolveSubpath(remotePath, vhdPath), mountDir ]).then(() =>
pFromCallback(cb => readdir(mountDir, cb)).then(entries => {
let max = 0
forEach(entries, entry => {
const matches = /^vhdi(\d+)/.exec(entry)
if (matches) {
const value = +matches[1]
if (value > max) {
max = value
}
}
})
if (!max) {
throw new Error('no disks found')
}
return {
path: `${mountDir}/vhdi${max}`,
unmount: once(() => execa('fusermount', [ '-uz', mountDir ]))
}
})
)
})
}
_mountPartition (remoteId, vhdPath, partitionId) {
return this._mountVhd(remoteId, vhdPath).then(device =>
mountPartition(device, partitionId).then(partition => ({
...partition,
unmount: () => partition.unmount().then(device.unmount)
}))
)
}
@deferrable
async scanDiskBackup ($defer, remoteId, vhdPath) {
const device = await this._mountVhd(remoteId, vhdPath)
$defer(device.unmount)
return {
partitions: await listPartitions(device)
}
}
@deferrable
async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
$defer(partition.unmount)
path = resolveSubpath(partition.path, path)
const entries = await pFromCallback(cb => readdir(path, cb))
const entriesMap = {}
await Promise.all(mapToArray(entries, async name => {
const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::pCatch(noop)
if (stats) {
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
}
}))
return entriesMap
}
async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
let i = 0
const onEnd = () => {
if (!--i) {
partition.unmount()
}
}
return mapToArray(paths, path => {
++i
return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd)
})
}
}

View File

@ -212,7 +212,7 @@ export default class Xo extends EventEmitter {
const {fn, data} = watcher
new Promise(resolve => {
resolve(fn(req, res, data, next))
resolve(fn.call(this, req, res, data, next))
}).then(
result => {
if (result != null) {

View File

@ -2590,10 +2590,6 @@ get-stream@^2.2.0:
object-assign "^4.0.1"
pinkie-promise "^2.0.0"
get-stream@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
get-uri@1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/get-uri/-/get-uri-1.1.0.tgz#7375d04daf7fcb584b3632679cbdf339b51bb149"
@ -4818,7 +4814,7 @@ os-locale@^1.4.0:
dependencies:
lcid "^1.0.0"
os-tmpdir@^1.0.0, os-tmpdir@^1.0.1:
os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
@ -4880,6 +4876,10 @@ parse-json@^2.2.0:
dependencies:
error-ex "^1.2.0"
parse-pairs@^0.2.2:
version "0.2.2"
resolved "https://registry.yarnpkg.com/parse-pairs/-/parse-pairs-0.2.2.tgz#86d3cc90fa4d3acd403b5556f68d9fcd208c3abe"
parse-passwd@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6"
@ -5905,6 +5905,10 @@ spdx-license-ids@^1.0.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57"
split-lines@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/split-lines/-/split-lines-1.1.0.tgz#3abba8f598614142f9db8d27ab6ab875662a1e09"
split2@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/split2/-/split2-2.1.0.tgz#7382c148cb622c4b28af7c727f9673730b73f474"
@ -6242,6 +6246,12 @@ time-stamp@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.0.1.tgz#9f4bd23559c9365966f3302dbba2b07c6b99b151"
tmp@^0.0.31:
version "0.0.31"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.31.tgz#8f38ab9438e17315e5dbd8b3657e8bfb277ae4a7"
dependencies:
os-tmpdir "~1.0.1"
tmpl@1.0.x:
version "1.0.4"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"