* full and delta backup jobs are renamed to full and incremental * inside an incremental job, the full and delta transfers are renamed to base and delta transver * the baseVm used for computing the snapshot is renamed to comparisasonBasisVm * split a lot of files to extract reusable methods * use a factory to instantiates BackubJob, the Backup* classes are now private by convention * the VmBackups class are instatiated by a XapiBackupJob => move them to the _backup folder * the writers need a VmBackup in their constructor: move them to the vmBackup folder
27 lines
777 B
JavaScript
27 lines
777 B
JavaScript
'use strict'
|
|
|
|
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
|
|
const { PATH_DB_DUMP } = require('./PoolMetadataBackup.js')
|
|
|
|
exports.RestoreMetadataBackup = class RestoreMetadataBackup {
|
|
constructor({ backupId, handler, xapi }) {
|
|
this._backupId = backupId
|
|
this._handler = handler
|
|
this._xapi = xapi
|
|
}
|
|
|
|
async run() {
|
|
const backupId = this._backupId
|
|
const handler = this._handler
|
|
const xapi = this._xapi
|
|
|
|
if (backupId.split('/')[0] === DIR_XO_POOL_METADATA_BACKUPS) {
|
|
return xapi.putResource(await handler.createReadStream(`${backupId}/data`), PATH_DB_DUMP, {
|
|
task: xapi.task_create('Import pool metadata'),
|
|
})
|
|
} else {
|
|
return String(await handler.readFile(`${backupId}/data.json`))
|
|
}
|
|
}
|
|
}
|