Compare commits

..

2 Commits

Author SHA1 Message Date
Thierry
72454ac593 add changelog 2023-05-12 09:30:44 +02:00
Thierry
4a45e78c1c feat(lite/console): add ability to open console in new window 2023-05-12 09:26:40 +02:00
197 changed files with 3657 additions and 4631 deletions

View File

@@ -28,7 +28,7 @@ module.exports = {
},
},
{
files: ['*.{integ,spec,test}.{,c,m}js'],
files: ['*.{spec,test}.{,c,m}js'],
rules: {
'n/no-unpublished-require': 'off',
'n/no-unpublished-import': 'off',

View File

@@ -21,7 +21,7 @@
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^4.4.1"
"vhd-lib": "^4.4.0"
},
"scripts": {
"postversion": "npm publish --access public"

View File

@@ -23,7 +23,7 @@
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.6.0",
"promise-toolbox": "^0.21.0",
"xen-api": "^1.3.1"
"xen-api": "^1.3.0"
},
"devDependencies": {
"tap": "^16.3.0",
@@ -31,6 +31,6 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap --lines 70 --functions 36 --branches 54 --statements 69 *.integ.js"
"test-integration": "tap *.spec.js"
}
}

View File

@@ -13,7 +13,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "0.1.2",
"version": "0.1.1",
"engines": {
"node": ">=14"
},

View File

@@ -7,8 +7,8 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.37.0",
"@xen-orchestra/fs": "^4.0.0",
"@xen-orchestra/backups": "^0.36.0",
"@xen-orchestra/fs": "^3.3.4",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "1.0.7",
"version": "1.0.6",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -1,16 +1,307 @@
'use strict'
const { Metadata } = require('./_runners/Metadata.js')
const { VmsXapi } = require('./_runners/VmsXapi.js')
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const pTimeout = require('promise-toolbox/timeout')
const { compileTemplate } = require('@xen-orchestra/template')
const { limitConcurrency } = require('limit-concurrency-decorator')
exports.createRunner = function createRunner(opts) {
const { type } = opts.job
switch (type) {
case 'backup':
return new VmsXapi(opts)
case 'metadataBackup':
return new Metadata(opts)
default:
throw new Error(`No runner for the backup type ${type}`)
const { extractIdsFromSimplePattern } = require('./extractIdsFromSimplePattern.js')
const { PoolMetadataBackup } = require('./_PoolMetadataBackup.js')
const { Task } = require('./Task.js')
const { VmBackup } = require('./_VmBackup.js')
const { XoMetadataBackup } = require('./_XoMetadataBackup.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const noop = Function.prototype
const getAdaptersByRemote = adapters => {
const adaptersByRemote = {}
adapters.forEach(({ adapter, remoteId }) => {
adaptersByRemote[remoteId] = adapter
})
return adaptersByRemote
}
const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs
const DEFAULT_SETTINGS = {
getRemoteTimeout: 300e3,
reportWhen: 'failure',
}
const DEFAULT_VM_SETTINGS = {
bypassVdiChainsCheck: false,
checkpointSnapshot: false,
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
exportRetention: 0,
fullInterval: 0,
healthCheckSr: undefined,
healthCheckVmsWithTags: [],
maxExportRate: 0,
maxMergedDeltasPerRun: Infinity,
offlineBackup: false,
offlineSnapshot: false,
snapshotRetention: 0,
timeout: 0,
useNbd: false,
unconditionalSnapshot: false,
validateVhdStreams: false,
vmTimeout: 0,
}
const DEFAULT_METADATA_SETTINGS = {
retentionPoolMetadata: 0,
retentionXoMetadata: 0,
}
class RemoteTimeoutError extends Error {
constructor(remoteId) {
super('timeout while getting the remote ' + remoteId)
this.remoteId = remoteId
}
}
exports.Backup = class Backup {
constructor({ config, getAdapter, getConnectedRecord, job, schedule }) {
this._config = config
this._getRecord = getConnectedRecord
this._job = job
this._schedule = schedule
this._getSnapshotNameLabel = compileTemplate(config.snapshotNameLabelTpl, {
'{job.name}': job.name,
'{vm.name_label}': vm => vm.name_label,
})
const { type } = job
const baseSettings = { ...DEFAULT_SETTINGS }
if (type === 'backup') {
Object.assign(baseSettings, DEFAULT_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)
this.run = this._runVmBackup
} else if (type === 'metadataBackup') {
Object.assign(baseSettings, DEFAULT_METADATA_SETTINGS, config.defaultSettings, config.metadata?.defaultSettings)
this.run = this._runMetadataBackup
} else {
throw new Error(`No runner for the backup type ${type}`)
}
Object.assign(baseSettings, job.settings[''])
this._baseSettings = baseSettings
this._settings = { ...baseSettings, ...job.settings[schedule.id] }
const { getRemoteTimeout } = this._settings
this._getAdapter = async function (remoteId) {
try {
const disposable = await pTimeout.call(getAdapter(remoteId), getRemoteTimeout, new RemoteTimeoutError(remoteId))
return new Disposable(() => disposable.dispose(), {
adapter: disposable.value,
remoteId,
})
} catch (error) {
// See https://github.com/vatesfr/xen-orchestra/commit/6aa6cfba8ec939c0288f0fa740f6dfad98c43cbb
runTask(
{
name: 'get remote adapter',
data: { type: 'remote', id: remoteId },
},
() => Promise.reject(error)
)
}
}
}
async _runMetadataBackup() {
const schedule = this._schedule
const job = this._job
const remoteIds = extractIdsFromSimplePattern(job.remotes)
if (remoteIds.length === 0) {
throw new Error('metadata backup job cannot run without remotes')
}
const config = this._config
const poolIds = extractIdsFromSimplePattern(job.pools)
const isEmptyPools = poolIds.length === 0
const isXoMetadata = job.xoMetadata !== undefined
if (!isXoMetadata && isEmptyPools) {
throw new Error('no metadata mode found')
}
const settings = this._settings
const { retentionPoolMetadata, retentionXoMetadata } = settings
if (
(retentionPoolMetadata === 0 && retentionXoMetadata === 0) ||
(!isXoMetadata && retentionPoolMetadata === 0) ||
(isEmptyPools && retentionXoMetadata === 0)
) {
throw new Error('no retentions corresponding to the metadata modes found')
}
await Disposable.use(
Disposable.all(
poolIds.map(id =>
this._getRecord('pool', id).catch(error => {
// See https://github.com/vatesfr/xen-orchestra/commit/6aa6cfba8ec939c0288f0fa740f6dfad98c43cbb
runTask(
{
name: 'get pool record',
data: { type: 'pool', id },
},
() => Promise.reject(error)
)
})
)
),
Disposable.all(remoteIds.map(id => this._getAdapter(id))),
async (pools, remoteAdapters) => {
// remove adapters that failed (already handled)
remoteAdapters = remoteAdapters.filter(_ => _ !== undefined)
if (remoteAdapters.length === 0) {
return
}
remoteAdapters = getAdaptersByRemote(remoteAdapters)
// remove pools that failed (already handled)
pools = pools.filter(_ => _ !== undefined)
const promises = []
if (pools.length !== 0 && settings.retentionPoolMetadata !== 0) {
promises.push(
asyncMap(pools, async pool =>
runTask(
{
name: `Starting metadata backup for the pool (${pool.$id}). (${job.id})`,
data: {
id: pool.$id,
pool,
poolMaster: await ignoreErrors.call(pool.$xapi.getRecord('host', pool.master)),
type: 'pool',
},
},
() =>
new PoolMetadataBackup({
config,
job,
pool,
remoteAdapters,
schedule,
settings,
}).run()
)
)
)
}
if (job.xoMetadata !== undefined && settings.retentionXoMetadata !== 0) {
promises.push(
runTask(
{
name: `Starting XO metadata backup. (${job.id})`,
data: {
type: 'xo',
},
},
() =>
new XoMetadataBackup({
config,
job,
remoteAdapters,
schedule,
settings,
}).run()
)
)
}
await Promise.all(promises)
}
)
}
async _runVmBackup() {
const job = this._job
// FIXME: proper SimpleIdPattern handling
const getSnapshotNameLabel = this._getSnapshotNameLabel
const schedule = this._schedule
const settings = this._settings
const throttleStream = createStreamThrottle(settings.maxExportRate)
const config = this._config
await Disposable.use(
Disposable.all(
extractIdsFromSimplePattern(job.srs).map(id =>
this._getRecord('SR', id).catch(error => {
runTask(
{
name: 'get SR record',
data: { type: 'SR', id },
},
() => Promise.reject(error)
)
})
)
),
Disposable.all(extractIdsFromSimplePattern(job.remotes).map(id => this._getAdapter(id))),
() => (settings.healthCheckSr !== undefined ? this._getRecord('SR', settings.healthCheckSr) : undefined),
async (srs, remoteAdapters, healthCheckSr) => {
// remove adapters that failed (already handled)
remoteAdapters = remoteAdapters.filter(_ => _ !== undefined)
// remove srs that failed (already handled)
srs = srs.filter(_ => _ !== undefined)
if (remoteAdapters.length === 0 && srs.length === 0 && settings.snapshotRetention === 0) {
return
}
const vmIds = extractIdsFromSimplePattern(job.vms)
Task.info('vms', { vms: vmIds })
remoteAdapters = getAdaptersByRemote(remoteAdapters)
const allSettings = this._job.settings
const baseSettings = this._baseSettings
const handleVm = vmUuid => {
const taskStart = { name: 'backup VM', data: { type: 'VM', id: vmUuid } }
return this._getRecord('VM', vmUuid).then(
disposableVm =>
Disposable.use(disposableVm, vm => {
taskStart.data.name_label = vm.name_label
return runTask(taskStart, () =>
new VmBackup({
baseSettings,
config,
getSnapshotNameLabel,
healthCheckSr,
job,
remoteAdapters,
schedule,
settings: { ...settings, ...allSettings[vm.uuid] },
srs,
throttleStream,
vm,
}).run()
)
}),
error =>
runTask(taskStart, () => {
throw error
})
)
}
const { concurrency } = settings
await asyncMapSettled(vmIds, concurrency === 0 ? handleVm : limitConcurrency(concurrency)(handleVm))
}
)
}
}

View File

@@ -3,14 +3,14 @@
const assert = require('assert')
const { formatFilenameDate } = require('./_filenameDate.js')
const { importIncrementalVm } = require('./_incrementalVm.js')
const { importDeltaVm } = require('./_deltaVm.js')
const { Task } = require('./Task.js')
const { watchStreamSize } = require('./_watchStreamSize.js')
exports.ImportVmBackup = class ImportVmBackup {
constructor({ adapter, metadata, srUuid, xapi, settings: { newMacAddresses, mapVdisSrs = {} } = {} }) {
this._adapter = adapter
this._importIncrementalVmSettings = { newMacAddresses, mapVdisSrs }
this._importDeltaVmSettings = { newMacAddresses, mapVdisSrs }
this._metadata = metadata
this._srUuid = srUuid
this._xapi = xapi
@@ -31,11 +31,11 @@ exports.ImportVmBackup = class ImportVmBackup {
assert.strictEqual(metadata.mode, 'delta')
const ignoredVdis = new Set(
Object.entries(this._importIncrementalVmSettings.mapVdisSrs)
Object.entries(this._importDeltaVmSettings.mapVdisSrs)
.filter(([_, srUuid]) => srUuid === null)
.map(([vdiUuid]) => vdiUuid)
)
backup = await adapter.readIncrementalVmBackup(metadata, ignoredVdis)
backup = await adapter.readDeltaVmBackup(metadata, ignoredVdis)
Object.values(backup.streams).forEach(stream => watchStreamSize(stream, sizeContainer))
}
@@ -49,8 +49,8 @@ exports.ImportVmBackup = class ImportVmBackup {
const vmRef = isFull
? await xapi.VM_import(backup, srRef)
: await importIncrementalVm(backup, await xapi.getRecord('SR', srRef), {
...this._importIncrementalVmSettings,
: await importDeltaVm(backup, await xapi.getRecord('SR', srRef), {
...this._importDeltaVmSettings,
detectBase: false,
})

View File

@@ -333,7 +333,7 @@ class RemoteAdapter {
const RE_VHDI = /^vhdi(\d+)$/
const handler = this._handler
const diskPath = handler.getFilePath('/' + diskId)
const diskPath = handler._getFilePath('/' + diskId)
const mountDir = yield getTmpDir()
await fromCallback(execFile, 'vhdimount', [diskPath, mountDir])
try {
@@ -404,27 +404,20 @@ class RemoteAdapter {
return `${baseName}.vhd`
}
async listAllVms() {
async listAllVmBackups() {
const handler = this._handler
const vmsUuids = []
await asyncEach(await handler.list(BACKUP_DIR), async entry => {
const backups = { __proto__: null }
await asyncMap(await handler.list(BACKUP_DIR), async entry => {
// ignore hidden and lock files
if (entry[0] !== '.' && !entry.endsWith('.lock')) {
vmsUuids.push(entry)
const vmBackups = await this.listVmBackups(entry)
if (vmBackups.length !== 0) {
backups[entry] = vmBackups
}
}
})
return vmsUuids
}
async listAllVmBackups() {
const vmsUuids = await this.listAllVms()
const backups = { __proto__: null }
await asyncEach(vmsUuids, async vmUuid => {
const vmBackups = await this.listVmBackups(vmUuid)
if (vmBackups.length !== 0) {
backups[vmUuid] = vmBackups
}
})
return backups
}
@@ -698,8 +691,8 @@ class RemoteAdapter {
}
// open the hierarchy of ancestors until we find a full one
async _createVhdStream(handler, path, { useChain }) {
const disposableSynthetic = useChain ? await VhdSynthetic.fromVhdChain(handler, path) : await openVhd(handler, path)
async _createSyntheticStream(handler, path) {
const disposableSynthetic = await VhdSynthetic.fromVhdChain(handler, path)
// I don't want the vhds to be disposed on return
// but only when the stream is done ( or failed )
@@ -724,7 +717,7 @@ class RemoteAdapter {
return stream
}
async readIncrementalVmBackup(metadata, ignoredVdis, { useChain = true } = {}) {
async readDeltaVmBackup(metadata, ignoredVdis) {
const handler = this._handler
const { vbds, vhds, vifs, vm, vmSnapshot } = metadata
const dir = dirname(metadata._filename)
@@ -732,7 +725,7 @@ class RemoteAdapter {
const streams = {}
await asyncMapSettled(Object.keys(vdis), async ref => {
streams[`${ref}.vhd`] = await this._createVhdStream(handler, join(dir, vhds[ref]), { useChain })
streams[`${ref}.vhd`] = await this._createSyntheticStream(handler, join(dir, vhds[ref]))
})
return {

View File

@@ -1,7 +1,7 @@
'use strict'
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
const { PATH_DB_DUMP } = require('./_runners/_PoolMetadataBackup.js')
const { PATH_DB_DUMP } = require('./_PoolMetadataBackup.js')
exports.RestoreMetadataBackup = class RestoreMetadataBackup {
constructor({ backupId, handler, xapi }) {

View File

@@ -2,10 +2,10 @@
const { asyncMap } = require('@xen-orchestra/async-map')
const { DIR_XO_POOL_METADATA_BACKUPS } = require('../RemoteAdapter.js')
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
const { forkStreamUnpipe } = require('./_forkStreamUnpipe.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
const { formatFilenameDate } = require('./_filenameDate.js')
const { Task } = require('./Task.js')
const PATH_DB_DUMP = '/pool/xmldbdump'
exports.PATH_DB_DUMP = PATH_DB_DUMP

View File

@@ -0,0 +1,515 @@
'use strict'
const assert = require('assert')
const findLast = require('lodash/findLast.js')
const groupBy = require('lodash/groupBy.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const keyBy = require('lodash/keyBy.js')
const mapValues = require('lodash/mapValues.js')
const vhdStreamValidator = require('vhd-lib/vhdStreamValidator.js')
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { pipeline } = require('node:stream')
const { DeltaBackupWriter } = require('./writers/DeltaBackupWriter.js')
const { DeltaReplicationWriter } = require('./writers/DeltaReplicationWriter.js')
const { exportDeltaVm } = require('./_deltaVm.js')
const { forkStreamUnpipe } = require('./_forkStreamUnpipe.js')
const { FullBackupWriter } = require('./writers/FullBackupWriter.js')
const { FullReplicationWriter } = require('./writers/FullReplicationWriter.js')
const { getOldEntries } = require('./_getOldEntries.js')
const { Task } = require('./Task.js')
const { watchStreamSize } = require('./_watchStreamSize.js')
const { debug, warn } = createLogger('xo:backups:VmBackup')
class AggregateError extends Error {
constructor(errors, message) {
super(message)
this.errors = errors
}
}
const asyncEach = async (iterable, fn, thisArg = iterable) => {
for (const item of iterable) {
await fn.call(thisArg, item)
}
}
const forkDeltaExport = deltaExport =>
Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
const noop = Function.prototype
class VmBackup {
constructor({
config,
getSnapshotNameLabel,
healthCheckSr,
job,
remoteAdapters,
remotes,
schedule,
settings,
srs,
throttleStream,
vm,
}) {
if (vm.other_config['xo:backup:job'] === job.id && 'start' in vm.blocked_operations) {
// don't match replicated VMs created by this very job otherwise they
// will be replicated again and again
throw new Error('cannot backup a VM created by this very job')
}
this.config = config
this.job = job
this.remoteAdapters = remoteAdapters
this.scheduleId = schedule.id
this.timestamp = undefined
// VM currently backed up
this.vm = vm
const { tags } = this.vm
// VM (snapshot) that is really exported
this.exportedVm = undefined
this._fullVdisRequired = undefined
this._getSnapshotNameLabel = getSnapshotNameLabel
this._isDelta = job.mode === 'delta'
this._healthCheckSr = healthCheckSr
this._jobId = job.id
this._jobSnapshots = undefined
this._throttleStream = throttleStream
this._xapi = vm.$xapi
// Base VM for the export
this._baseVm = undefined
// Settings for this specific run (job, schedule, VM)
if (tags.includes('xo-memory-backup')) {
settings.checkpointSnapshot = true
}
if (tags.includes('xo-offline-backup')) {
settings.offlineSnapshot = true
}
this._settings = settings
// Create writers
{
const writers = new Set()
this._writers = writers
const [BackupWriter, ReplicationWriter] = this._isDelta
? [DeltaBackupWriter, DeltaReplicationWriter]
: [FullBackupWriter, FullReplicationWriter]
const allSettings = job.settings
Object.keys(remoteAdapters).forEach(remoteId => {
const targetSettings = {
...settings,
...allSettings[remoteId],
}
if (targetSettings.exportRetention !== 0) {
writers.add(new BackupWriter({ backup: this, remoteId, settings: targetSettings }))
}
})
srs.forEach(sr => {
const targetSettings = {
...settings,
...allSettings[sr.uuid],
}
if (targetSettings.copyRetention !== 0) {
writers.add(new ReplicationWriter({ backup: this, sr, settings: targetSettings }))
}
})
}
}
// calls fn for each function, warns of any errors, and throws only if there are no writers left
async _callWriters(fn, step, parallel = true) {
const writers = this._writers
const n = writers.size
if (n === 0) {
return
}
async function callWriter(writer) {
const { name } = writer.constructor
try {
debug('writer step starting', { step, writer: name })
await fn(writer)
debug('writer step succeeded', { duration: step, writer: name })
} catch (error) {
writers.delete(writer)
warn('writer step failed', { error, step, writer: name })
// these two steps are the only one that are not already in their own sub tasks
if (step === 'writer.checkBaseVdis()' || step === 'writer.beforeBackup()') {
Task.warning(
`the writer ${name} has failed the step ${step} with error ${error.message}. It won't be used anymore in this job execution.`
)
}
throw error
}
}
if (n === 1) {
const [writer] = writers
return callWriter(writer)
}
const errors = []
await (parallel ? asyncMap : asyncEach)(writers, async function (writer) {
try {
await callWriter(writer)
} catch (error) {
errors.push(error)
}
})
if (writers.size === 0) {
throw new AggregateError(errors, 'all targets have failed, step: ' + step)
}
}
// ensure the VM itself does not have any backup metadata which would be
// copied on manual snapshots and interfere with the backup jobs
async _cleanMetadata() {
const { vm } = this
if ('xo:backup:job' in vm.other_config) {
await vm.update_other_config({
'xo:backup:datetime': null,
'xo:backup:deltaChainLength': null,
'xo:backup:exported': null,
'xo:backup:job': null,
'xo:backup:schedule': null,
'xo:backup:vm': null,
})
}
}
async _snapshot() {
const { vm } = this
const xapi = this._xapi
const settings = this._settings
const doSnapshot =
settings.unconditionalSnapshot ||
this._isDelta ||
(!settings.offlineBackup && vm.power_state === 'Running') ||
settings.snapshotRetention !== 0
if (doSnapshot) {
await Task.run({ name: 'snapshot' }, async () => {
if (!settings.bypassVdiChainsCheck) {
await vm.$assertHealthyVdiChains()
}
const snapshotRef = await vm[settings.checkpointSnapshot ? '$checkpoint' : '$snapshot']({
ignoreNobakVdis: true,
name_label: this._getSnapshotNameLabel(vm),
unplugVusbs: true,
})
this.timestamp = Date.now()
await xapi.setFieldEntries('VM', snapshotRef, 'other_config', {
'xo:backup:datetime': formatDateTime(this.timestamp),
'xo:backup:job': this._jobId,
'xo:backup:schedule': this.scheduleId,
'xo:backup:vm': vm.uuid,
})
this.exportedVm = await xapi.getRecord('VM', snapshotRef)
return this.exportedVm.uuid
})
} else {
this.exportedVm = vm
this.timestamp = Date.now()
}
}
async _copyDelta() {
const { exportedVm } = this
const baseVm = this._baseVm
const fullVdisRequired = this._fullVdisRequired
const isFull = fullVdisRequired === undefined || fullVdisRequired.size !== 0
await this._callWriters(writer => writer.prepare({ isFull }), 'writer.prepare()')
const deltaExport = await exportDeltaVm(exportedVm, baseVm, {
fullVdisRequired,
})
// since NBD is network based, if one disk use nbd , all the disk use them
// except the suspended VDI
if (Object.values(deltaExport.streams).some(({ _nbd }) => _nbd)) {
Task.info('Transfer data using NBD')
}
const sizeContainers = mapValues(deltaExport.streams, stream => watchStreamSize(stream))
if (this._settings.validateVhdStreams) {
deltaExport.streams = mapValues(deltaExport.streams, stream => pipeline(stream, vhdStreamValidator, noop))
}
deltaExport.streams = mapValues(deltaExport.streams, this._throttleStream)
const timestamp = Date.now()
await this._callWriters(
writer =>
writer.transfer({
deltaExport: forkDeltaExport(deltaExport),
sizeContainers,
timestamp,
}),
'writer.transfer()'
)
this._baseVm = exportedVm
if (baseVm !== undefined) {
await exportedVm.update_other_config(
'xo:backup:deltaChainLength',
String(+(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1)
)
}
// not the case if offlineBackup
if (exportedVm.is_a_snapshot) {
await exportedVm.update_other_config('xo:backup:exported', 'true')
}
const size = Object.values(sizeContainers).reduce((sum, { size }) => sum + size, 0)
const end = Date.now()
const duration = end - timestamp
debug('transfer complete', {
duration,
speed: duration !== 0 ? (size * 1e3) / 1024 / 1024 / duration : 0,
size,
})
await this._callWriters(writer => writer.cleanup(), 'writer.cleanup()')
}
async _copyFull() {
const { compression } = this.job
const stream = this._throttleStream(
await this._xapi.VM_export(this.exportedVm.$ref, {
compress: Boolean(compression) && (compression === 'native' ? 'gzip' : 'zstd'),
useSnapshot: false,
})
)
const sizeContainer = watchStreamSize(stream)
const timestamp = Date.now()
await this._callWriters(
writer =>
writer.run({
sizeContainer,
stream: forkStreamUnpipe(stream),
timestamp,
}),
'writer.run()'
)
const { size } = sizeContainer
const end = Date.now()
const duration = end - timestamp
debug('transfer complete', {
duration,
speed: duration !== 0 ? (size * 1e3) / 1024 / 1024 / duration : 0,
size,
})
}
async _fetchJobSnapshots() {
const jobId = this._jobId
const vmRef = this.vm.$ref
const xapi = this._xapi
const snapshotsRef = await xapi.getField('VM', vmRef, 'snapshots')
const snapshotsOtherConfig = await asyncMap(snapshotsRef, ref => xapi.getField('VM', ref, 'other_config'))
const snapshots = []
snapshotsOtherConfig.forEach((other_config, i) => {
if (other_config['xo:backup:job'] === jobId) {
snapshots.push({ other_config, $ref: snapshotsRef[i] })
}
})
snapshots.sort((a, b) => (a.other_config['xo:backup:datetime'] < b.other_config['xo:backup:datetime'] ? -1 : 1))
this._jobSnapshots = snapshots
}
async _removeUnusedSnapshots() {
const allSettings = this.job.settings
const baseSettings = this._baseSettings
const baseVmRef = this._baseVm?.$ref
const snapshotsPerSchedule = groupBy(this._jobSnapshots, _ => _.other_config['xo:backup:schedule'])
const xapi = this._xapi
await asyncMap(Object.entries(snapshotsPerSchedule), ([scheduleId, snapshots]) => {
const settings = {
...baseSettings,
...allSettings[scheduleId],
...allSettings[this.vm.uuid],
}
return asyncMap(getOldEntries(settings.snapshotRetention, snapshots), ({ $ref }) => {
if ($ref !== baseVmRef) {
return xapi.VM_destroy($ref)
}
})
})
}
async _selectBaseVm() {
const xapi = this._xapi
let baseVm = findLast(this._jobSnapshots, _ => 'xo:backup:exported' in _.other_config)
if (baseVm === undefined) {
debug('no base VM found')
return
}
const fullInterval = this._settings.fullInterval
const deltaChainLength = +(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1
if (!(fullInterval === 0 || fullInterval > deltaChainLength)) {
debug('not using base VM becaust fullInterval reached')
return
}
const srcVdis = keyBy(await xapi.getRecords('VDI', await this.vm.$getDisks()), '$ref')
// resolve full record
baseVm = await xapi.getRecord('VM', baseVm.$ref)
const baseUuidToSrcVdi = new Map()
await asyncMap(await baseVm.$getDisks(), async baseRef => {
const [baseUuid, snapshotOf] = await Promise.all([
xapi.getField('VDI', baseRef, 'uuid'),
xapi.getField('VDI', baseRef, 'snapshot_of'),
])
const srcVdi = srcVdis[snapshotOf]
if (srcVdi !== undefined) {
baseUuidToSrcVdi.set(baseUuid, srcVdi)
} else {
debug('ignore snapshot VDI because no longer present on VM', {
vdi: baseUuid,
})
}
})
const presentBaseVdis = new Map(baseUuidToSrcVdi)
await this._callWriters(
writer => presentBaseVdis.size !== 0 && writer.checkBaseVdis(presentBaseVdis, baseVm),
'writer.checkBaseVdis()',
false
)
if (presentBaseVdis.size === 0) {
debug('no base VM found')
return
}
const fullVdisRequired = new Set()
baseUuidToSrcVdi.forEach((srcVdi, baseUuid) => {
if (presentBaseVdis.has(baseUuid)) {
debug('found base VDI', {
base: baseUuid,
vdi: srcVdi.uuid,
})
} else {
debug('missing base VDI', {
base: baseUuid,
vdi: srcVdi.uuid,
})
fullVdisRequired.add(srcVdi.uuid)
}
})
this._baseVm = baseVm
this._fullVdisRequired = fullVdisRequired
}
async _healthCheck() {
const settings = this._settings
if (this._healthCheckSr === undefined) {
return
}
// check if current VM has tags
const { tags } = this.vm
const intersect = settings.healthCheckVmsWithTags.some(t => tags.includes(t))
if (settings.healthCheckVmsWithTags.length !== 0 && !intersect) {
return
}
await this._callWriters(writer => writer.healthCheck(this._healthCheckSr), 'writer.healthCheck()')
}
async run($defer) {
const settings = this._settings
assert(
!settings.offlineBackup || settings.snapshotRetention === 0,
'offlineBackup is not compatible with snapshotRetention'
)
await this._callWriters(async writer => {
await writer.beforeBackup()
$defer(async () => {
await writer.afterBackup()
})
}, 'writer.beforeBackup()')
await this._fetchJobSnapshots()
if (this._isDelta) {
await this._selectBaseVm()
}
await this._cleanMetadata()
await this._removeUnusedSnapshots()
const { vm } = this
const isRunning = vm.power_state === 'Running'
const startAfter = isRunning && (settings.offlineBackup ? 'backup' : settings.offlineSnapshot && 'snapshot')
if (startAfter) {
await vm.$callAsync('clean_shutdown')
}
try {
await this._snapshot()
if (startAfter === 'snapshot') {
ignoreErrors.call(vm.$callAsync('start', false, false))
}
if (this._writers.size !== 0) {
await (this._isDelta ? this._copyDelta() : this._copyFull())
}
} finally {
if (startAfter) {
ignoreErrors.call(vm.$callAsync('start', false, false))
}
await this._fetchJobSnapshots()
await this._removeUnusedSnapshots()
}
await this._healthCheck()
}
}
exports.VmBackup = VmBackup
decorateMethodsWith(VmBackup, {
run: defer,
})

View File

@@ -2,9 +2,9 @@
const { asyncMap } = require('@xen-orchestra/async-map')
const { DIR_XO_CONFIG_BACKUPS } = require('../RemoteAdapter.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
const { DIR_XO_CONFIG_BACKUPS } = require('./RemoteAdapter.js')
const { formatFilenameDate } = require('./_filenameDate.js')
const { Task } = require('./Task.js')
exports.XoMetadataBackup = class XoMetadataBackup {
constructor({ config, job, remoteAdapters, schedule, settings }) {

View File

@@ -13,10 +13,10 @@ const { createDebounceResource } = require('@vates/disposable/debounceResource.j
const { decorateMethodsWith } = require('@vates/decorate-with')
const { deduped } = require('@vates/disposable/deduped.js')
const { getHandler } = require('@xen-orchestra/fs')
const { createRunner } = require('./Backup.js')
const { parseDuration } = require('@vates/parse-duration')
const { Xapi } = require('@xen-orchestra/xapi')
const { Backup } = require('./Backup.js')
const { RemoteAdapter } = require('./RemoteAdapter.js')
const { Task } = require('./Task.js')
@@ -48,7 +48,7 @@ class BackupWorker {
}
run() {
return createRunner({
return new Backup({
config: this.#config,
getAdapter: remoteId => this.getAdapter(this.#remotes[remoteId]),
getConnectedRecord: Disposable.factory(async function* getConnectedRecord(type, uuid) {

View File

@@ -3,6 +3,7 @@
const { beforeEach, afterEach, test, describe } = require('test')
const assert = require('assert').strict
const rimraf = require('rimraf')
const tmp = require('tmp')
const fs = require('fs-extra')
const uuid = require('uuid')
@@ -13,7 +14,6 @@ const { VHDFOOTER, VHDHEADER } = require('./tests.fixtures.js')
const { VhdFile, Constants, VhdDirectory, VhdAbstract } = require('vhd-lib')
const { checkAliases } = require('./_cleanVm')
const { dirname, basename } = require('path')
const { rimraf } = require('rimraf')
let tempDir, adapter, handler, jobId, vdiId, basePath, relativePath
const rootPath = 'xo-vm-backups/VMUUID/'

View File

@@ -33,7 +33,7 @@ const resolveUuid = async (xapi, cache, uuid, type) => {
return ref
}
exports.exportIncrementalVm = async function exportIncrementalVm(
exports.exportDeltaVm = async function exportDeltaVm(
vm,
baseVm,
{
@@ -143,18 +143,18 @@ exports.exportIncrementalVm = async function exportIncrementalVm(
)
}
exports.importIncrementalVm = defer(async function importIncrementalVm(
exports.importDeltaVm = defer(async function importDeltaVm(
$defer,
incrementalVm,
deltaVm,
sr,
{ cancelToken = CancelToken.none, detectBase = true, mapVdisSrs = {}, newMacAddresses = false } = {}
) {
const { version } = incrementalVm
const { version } = deltaVm
if (compareVersions(version, '1.0.0') < 0) {
throw new Error(`Unsupported delta backup version: ${version}`)
}
const vmRecord = incrementalVm.vm
const vmRecord = deltaVm.vm
const xapi = sr.$xapi
let baseVm
@@ -183,7 +183,7 @@ exports.importIncrementalVm = defer(async function importIncrementalVm(
baseVdis[vbd.VDI] = vbd.$VDI
}
})
const vdiRecords = incrementalVm.vdis
const vdiRecords = deltaVm.vdis
// 0. Create suspend_VDI
let suspendVdi
@@ -240,7 +240,7 @@ exports.importIncrementalVm = defer(async function importIncrementalVm(
await asyncMap(await xapi.getField('VM', vmRef, 'VBDs'), ref => ignoreErrors.call(xapi.call('VBD.destroy', ref)))
// 3. Create VDIs & VBDs.
const vbdRecords = incrementalVm.vbds
const vbdRecords = deltaVm.vbds
const vbds = groupBy(vbdRecords, 'VDI')
const newVdis = {}
await asyncMap(Object.keys(vdiRecords), async vdiRef => {
@@ -309,7 +309,7 @@ exports.importIncrementalVm = defer(async function importIncrementalVm(
}
})
const { streams } = incrementalVm
const { streams } = deltaVm
await Promise.all([
// Import VDI contents.
@@ -326,7 +326,7 @@ exports.importIncrementalVm = defer(async function importIncrementalVm(
}),
// Create VIFs.
asyncMap(Object.values(incrementalVm.vifs), vif => {
asyncMap(Object.values(deltaVm.vifs), vif => {
let network = vif.$network$uuid && xapi.getObjectByUuid(vif.$network$uuid, undefined)
if (network === undefined) {
@@ -358,8 +358,8 @@ exports.importIncrementalVm = defer(async function importIncrementalVm(
])
await Promise.all([
incrementalVm.vm.ha_always_run && xapi.setField('VM', vmRef, 'ha_always_run', true),
xapi.setField('VM', vmRef, 'name_label', incrementalVm.vm.name_label),
deltaVm.vm.ha_always_run && xapi.setField('VM', vmRef, 'ha_always_run', true),
xapi.setField('VM', vmRef, 'name_label', deltaVm.vm.name_label),
])
return vmRef

View File

@@ -1,134 +0,0 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { PoolMetadataBackup } = require('./_PoolMetadataBackup.js')
const { XoMetadataBackup } = require('./_XoMetadataBackup.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const DEFAULT_METADATA_SETTINGS = {
retentionPoolMetadata: 0,
retentionXoMetadata: 0,
}
exports.Metadata = class MetadataBackupRunner extends Abstract {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_METADATA_SETTINGS, config.defaultSettings, config.metadata?.defaultSettings)
Object.assign(baseSettings, job.settings[''])
return baseSettings
}
async run() {
const schedule = this._schedule
const job = this._job
const remoteIds = extractIdsFromSimplePattern(job.remotes)
if (remoteIds.length === 0) {
throw new Error('metadata backup job cannot run without remotes')
}
const config = this._config
const poolIds = extractIdsFromSimplePattern(job.pools)
const isEmptyPools = poolIds.length === 0
const isXoMetadata = job.xoMetadata !== undefined
if (!isXoMetadata && isEmptyPools) {
throw new Error('no metadata mode found')
}
const settings = this._settings
const { retentionPoolMetadata, retentionXoMetadata } = settings
if (
(retentionPoolMetadata === 0 && retentionXoMetadata === 0) ||
(!isXoMetadata && retentionPoolMetadata === 0) ||
(isEmptyPools && retentionXoMetadata === 0)
) {
throw new Error('no retentions corresponding to the metadata modes found')
}
await Disposable.use(
Disposable.all(
poolIds.map(id =>
this._getRecord('pool', id).catch(error => {
// See https://github.com/vatesfr/xen-orchestra/commit/6aa6cfba8ec939c0288f0fa740f6dfad98c43cbb
runTask(
{
name: 'get pool record',
data: { type: 'pool', id },
},
() => Promise.reject(error)
)
})
)
),
Disposable.all(remoteIds.map(id => this._getAdapter(id))),
async (pools, remoteAdapters) => {
// remove adapters that failed (already handled)
remoteAdapters = remoteAdapters.filter(_ => _ !== undefined)
if (remoteAdapters.length === 0) {
return
}
remoteAdapters = getAdaptersByRemote(remoteAdapters)
// remove pools that failed (already handled)
pools = pools.filter(_ => _ !== undefined)
const promises = []
if (pools.length !== 0 && settings.retentionPoolMetadata !== 0) {
promises.push(
asyncMap(pools, async pool =>
runTask(
{
name: `Starting metadata backup for the pool (${pool.$id}). (${job.id})`,
data: {
id: pool.$id,
pool,
poolMaster: await ignoreErrors.call(pool.$xapi.getRecord('host', pool.master)),
type: 'pool',
},
},
() =>
new PoolMetadataBackup({
config,
job,
pool,
remoteAdapters,
schedule,
settings,
}).run()
)
)
)
}
if (job.xoMetadata !== undefined && settings.retentionXoMetadata !== 0) {
promises.push(
runTask(
{
name: `Starting XO metadata backup. (${job.id})`,
data: {
type: 'xo',
},
},
() =>
new XoMetadataBackup({
config,
job,
remoteAdapters,
schedule,
settings,
}).run()
)
)
}
await Promise.all(promises)
}
)
}
}

View File

@@ -1,138 +0,0 @@
'use strict'
const { asyncMapSettled } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { Task } = require('../Task.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { IncrementalXapi } = require('./_vmRunners/IncrementalXapi.js')
const { FullXapi } = require('./_vmRunners/FullXapi.js')
const DEFAULT_XAPI_VM_SETTINGS = {
bypassVdiChainsCheck: false,
checkpointSnapshot: false,
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
exportRetention: 0,
fullInterval: 0,
healthCheckSr: undefined,
healthCheckVmsWithTags: [],
maxExportRate: 0,
maxMergedDeltasPerRun: Infinity,
offlineBackup: false,
offlineSnapshot: false,
snapshotRetention: 0,
timeout: 0,
useNbd: false,
unconditionalSnapshot: false,
validateVhdStreams: false,
vmTimeout: 0,
}
exports.VmsXapi = class VmsXapiBackupRunner extends Abstract {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_XAPI_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)
Object.assign(baseSettings, job.settings[''])
return baseSettings
}
async run() {
const job = this._job
// FIXME: proper SimpleIdPattern handling
const getSnapshotNameLabel = this._getSnapshotNameLabel
const schedule = this._schedule
const settings = this._settings
const throttleStream = createStreamThrottle(settings.maxExportRate)
const config = this._config
await Disposable.use(
Disposable.all(
extractIdsFromSimplePattern(job.srs).map(id =>
this._getRecord('SR', id).catch(error => {
runTask(
{
name: 'get SR record',
data: { type: 'SR', id },
},
() => Promise.reject(error)
)
})
)
),
Disposable.all(extractIdsFromSimplePattern(job.remotes).map(id => this._getAdapter(id))),
() => (settings.healthCheckSr !== undefined ? this._getRecord('SR', settings.healthCheckSr) : undefined),
async (srs, remoteAdapters, healthCheckSr) => {
// remove adapters that failed (already handled)
remoteAdapters = remoteAdapters.filter(_ => _ !== undefined)
// remove srs that failed (already handled)
srs = srs.filter(_ => _ !== undefined)
if (remoteAdapters.length === 0 && srs.length === 0 && settings.snapshotRetention === 0) {
return
}
const vmIds = extractIdsFromSimplePattern(job.vms)
Task.info('vms', { vms: vmIds })
remoteAdapters = getAdaptersByRemote(remoteAdapters)
const allSettings = this._job.settings
const baseSettings = this._baseSettings
const handleVm = vmUuid => {
const taskStart = { name: 'backup VM', data: { type: 'VM', id: vmUuid } }
return this._getRecord('VM', vmUuid).then(
disposableVm =>
Disposable.use(disposableVm, vm => {
taskStart.data.name_label = vm.name_label
return runTask(taskStart, () => {
const opts = {
baseSettings,
config,
getSnapshotNameLabel,
healthCheckSr,
job,
remoteAdapters,
schedule,
settings: { ...settings, ...allSettings[vm.uuid] },
srs,
throttleStream,
vm,
}
let vmBackup
if (job.mode === 'delta') {
vmBackup = new IncrementalXapi(opts)
} else {
if (job.mode === 'full') {
vmBackup = new FullXapi(opts)
} else {
throw new Error(`Job mode ${job.mode} not implemented`)
}
}
return vmBackup.run()
})
}),
error =>
runTask(taskStart, () => {
throw error
})
)
}
const { concurrency } = settings
await asyncMapSettled(vmIds, concurrency === 0 ? handleVm : limitConcurrency(concurrency)(handleVm))
}
)
}
}

View File

@@ -1,51 +0,0 @@
'use strict'
const Disposable = require('promise-toolbox/Disposable')
const pTimeout = require('promise-toolbox/timeout')
const { compileTemplate } = require('@xen-orchestra/template')
const { runTask } = require('./_runTask.js')
const { RemoteTimeoutError } = require('./_RemoteTimeoutError.js')
exports.DEFAULT_SETTINGS = {
getRemoteTimeout: 300e3,
reportWhen: 'failure',
}
exports.Abstract = class AbstractRunner {
constructor({ config, getAdapter, getConnectedRecord, job, schedule }) {
this._config = config
this._getRecord = getConnectedRecord
this._job = job
this._schedule = schedule
this._getSnapshotNameLabel = compileTemplate(config.snapshotNameLabelTpl, {
'{job.name}': job.name,
'{vm.name_label}': vm => vm.name_label,
})
const baseSettings = this._computeBaseSettings(config, job)
this._baseSettings = baseSettings
this._settings = { ...baseSettings, ...job.settings[schedule.id] }
const { getRemoteTimeout } = this._settings
this._getAdapter = async function (remoteId) {
try {
const disposable = await pTimeout.call(getAdapter(remoteId), getRemoteTimeout, new RemoteTimeoutError(remoteId))
return new Disposable(() => disposable.dispose(), {
adapter: disposable.value,
remoteId,
})
} catch (error) {
// See https://github.com/vatesfr/xen-orchestra/commit/6aa6cfba8ec939c0288f0fa740f6dfad98c43cbb
runTask(
{
name: 'get remote adapter',
data: { type: 'remote', id: remoteId },
},
() => Promise.reject(error)
)
}
}
}
}

View File

@@ -1,8 +0,0 @@
'use strict'
class RemoteTimeoutError extends Error {
constructor(remoteId) {
super('timeout while getting the remote ' + remoteId)
this.remoteId = remoteId
}
}
exports.RemoteTimeoutError = RemoteTimeoutError

View File

@@ -1,9 +0,0 @@
'use strict'
const getAdaptersByRemote = adapters => {
const adaptersByRemote = {}
adapters.forEach(({ adapter, remoteId }) => {
adaptersByRemote[remoteId] = adapter
})
return adaptersByRemote
}
exports.getAdaptersByRemote = getAdaptersByRemote

View File

@@ -1,6 +0,0 @@
'use strict'
const { Task } = require('../Task.js')
const noop = Function.prototype
const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs
exports.runTask = runTask

View File

@@ -1,61 +0,0 @@
'use strict'
const { createLogger } = require('@xen-orchestra/log')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe.js')
const { FullRemoteWriter } = require('../_writers/FullRemoteWriter.js')
const { FullXapiWriter } = require('../_writers/FullXapiWriter.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { debug } = createLogger('xo:backups:FullXapiVmBackup')
exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
_getWriters() {
return [FullRemoteWriter, FullXapiWriter]
}
_mustDoSnapshot() {
const { vm } = this
const settings = this._settings
return (
settings.unconditionalSnapshot ||
(!settings.offlineBackup && vm.power_state === 'Running') ||
settings.snapshotRetention !== 0
)
}
_selectBaseVm() {}
async _copy() {
const { compression } = this.job
const stream = this._throttleStream(
await this._xapi.VM_export(this.exportedVm.$ref, {
compress: Boolean(compression) && (compression === 'native' ? 'gzip' : 'zstd'),
useSnapshot: false,
})
)
const sizeContainer = watchStreamSize(stream)
const timestamp = Date.now()
await this._callWriters(
writer =>
writer.run({
sizeContainer,
stream: forkStreamUnpipe(stream),
timestamp,
}),
'writer.run()'
)
const { size } = sizeContainer
const end = Date.now()
const duration = end - timestamp
debug('transfer complete', {
duration,
speed: duration !== 0 ? (size * 1e3) / 1024 / 1024 / duration : 0,
size,
})
}
}

View File

@@ -1,163 +0,0 @@
'use strict'
const findLast = require('lodash/findLast.js')
const keyBy = require('lodash/keyBy.js')
const mapValues = require('lodash/mapValues.js')
const vhdStreamValidator = require('vhd-lib/vhdStreamValidator.js')
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { pipeline } = require('node:stream')
const { IncrementalRemoteWriter } = require('../_writers/IncrementalRemoteWriter.js')
const { IncrementalXapiWriter } = require('../_writers/IncrementalXapiWriter.js')
const { exportIncrementalVm } = require('../../_incrementalVm.js')
const { Task } = require('../../Task.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { forkDeltaExport } = require('./_forkDeltaExport.js')
const { debug } = createLogger('xo:backups:IncrementalXapiVmBackup')
const noop = Function.prototype
exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXapi {
_getWriters() {
return [IncrementalRemoteWriter, IncrementalXapiWriter]
}
_mustDoSnapshot() {
return true
}
async _copy() {
const { exportedVm } = this
const baseVm = this._baseVm
const fullVdisRequired = this._fullVdisRequired
const isFull = fullVdisRequired === undefined || fullVdisRequired.size !== 0
await this._callWriters(writer => writer.prepare({ isFull }), 'writer.prepare()')
const deltaExport = await exportIncrementalVm(exportedVm, baseVm, {
fullVdisRequired,
})
// since NBD is network based, if one disk use nbd , all the disk use them
// except the suspended VDI
if (Object.values(deltaExport.streams).some(({ _nbd }) => _nbd)) {
Task.info('Transfer data using NBD')
}
const sizeContainers = mapValues(deltaExport.streams, stream => watchStreamSize(stream))
if (this._settings.validateVhdStreams) {
deltaExport.streams = mapValues(deltaExport.streams, stream => pipeline(stream, vhdStreamValidator, noop))
}
deltaExport.streams = mapValues(deltaExport.streams, this._throttleStream)
const timestamp = Date.now()
await this._callWriters(
writer =>
writer.transfer({
deltaExport: forkDeltaExport(deltaExport),
sizeContainers,
timestamp,
}),
'writer.transfer()'
)
this._baseVm = exportedVm
if (baseVm !== undefined) {
await exportedVm.update_other_config(
'xo:backup:deltaChainLength',
String(+(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1)
)
}
// not the case if offlineBackup
if (exportedVm.is_a_snapshot) {
await exportedVm.update_other_config('xo:backup:exported', 'true')
}
const size = Object.values(sizeContainers).reduce((sum, { size }) => sum + size, 0)
const end = Date.now()
const duration = end - timestamp
debug('transfer complete', {
duration,
speed: duration !== 0 ? (size * 1e3) / 1024 / 1024 / duration : 0,
size,
})
await this._callWriters(writer => writer.cleanup(), 'writer.cleanup()')
}
async _selectBaseVm() {
const xapi = this._xapi
let baseVm = findLast(this._jobSnapshots, _ => 'xo:backup:exported' in _.other_config)
if (baseVm === undefined) {
debug('no base VM found')
return
}
const fullInterval = this._settings.fullInterval
const deltaChainLength = +(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1
if (!(fullInterval === 0 || fullInterval > deltaChainLength)) {
debug('not using base VM becaust fullInterval reached')
return
}
const srcVdis = keyBy(await xapi.getRecords('VDI', await this.vm.$getDisks()), '$ref')
// resolve full record
baseVm = await xapi.getRecord('VM', baseVm.$ref)
const baseUuidToSrcVdi = new Map()
await asyncMap(await baseVm.$getDisks(), async baseRef => {
const [baseUuid, snapshotOf] = await Promise.all([
xapi.getField('VDI', baseRef, 'uuid'),
xapi.getField('VDI', baseRef, 'snapshot_of'),
])
const srcVdi = srcVdis[snapshotOf]
if (srcVdi !== undefined) {
baseUuidToSrcVdi.set(baseUuid, srcVdi)
} else {
debug('ignore snapshot VDI because no longer present on VM', {
vdi: baseUuid,
})
}
})
const presentBaseVdis = new Map(baseUuidToSrcVdi)
await this._callWriters(
writer => presentBaseVdis.size !== 0 && writer.checkBaseVdis(presentBaseVdis, baseVm),
'writer.checkBaseVdis()',
false
)
if (presentBaseVdis.size === 0) {
debug('no base VM found')
return
}
const fullVdisRequired = new Set()
baseUuidToSrcVdi.forEach((srcVdi, baseUuid) => {
if (presentBaseVdis.has(baseUuid)) {
debug('found base VDI', {
base: baseUuid,
vdi: srcVdi.uuid,
})
} else {
debug('missing base VDI', {
base: baseUuid,
vdi: srcVdi.uuid,
})
fullVdisRequired.add(srcVdi.uuid)
}
})
this._baseVm = baseVm
this._fullVdisRequired = fullVdisRequired
}
}

View File

@@ -1,87 +0,0 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { Task } = require('../../Task.js')
const { debug, warn } = createLogger('xo:backups:AbstractVmRunner')
class AggregateError extends Error {
constructor(errors, message) {
super(message)
this.errors = errors
}
}
const asyncEach = async (iterable, fn, thisArg = iterable) => {
for (const item of iterable) {
await fn.call(thisArg, item)
}
}
exports.Abstract = class AbstractVmBackupRunner {
// calls fn for each function, warns of any errors, and throws only if there are no writers left
async _callWriters(fn, step, parallel = true) {
const writers = this._writers
const n = writers.size
if (n === 0) {
return
}
async function callWriter(writer) {
const { name } = writer.constructor
try {
debug('writer step starting', { step, writer: name })
await fn(writer)
debug('writer step succeeded', { duration: step, writer: name })
} catch (error) {
writers.delete(writer)
warn('writer step failed', { error, step, writer: name })
// these two steps are the only one that are not already in their own sub tasks
if (step === 'writer.checkBaseVdis()' || step === 'writer.beforeBackup()') {
Task.warning(
`the writer ${name} has failed the step ${step} with error ${error.message}. It won't be used anymore in this job execution.`
)
}
throw error
}
}
if (n === 1) {
const [writer] = writers
return callWriter(writer)
}
const errors = []
await (parallel ? asyncMap : asyncEach)(writers, async function (writer) {
try {
await callWriter(writer)
} catch (error) {
errors.push(error)
}
})
if (writers.size === 0) {
throw new AggregateError(errors, 'all targets have failed, step: ' + step)
}
}
async _healthCheck() {
const settings = this._settings
if (this._healthCheckSr === undefined) {
return
}
// check if current VM has tags
const { tags } = this.vm
const intersect = settings.healthCheckVmsWithTags.some(t => tags.includes(t))
if (settings.healthCheckVmsWithTags.length !== 0 && !intersect) {
return
}
await this._callWriters(writer => writer.healthCheck(this._healthCheckSr), 'writer.healthCheck()')
}
}

View File

@@ -1,258 +0,0 @@
'use strict'
const assert = require('assert')
const groupBy = require('lodash/groupBy.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap } = require('@xen-orchestra/async-map')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { Abstract } = require('./_Abstract.js')
class AbstractXapiVmBackupRunner extends Abstract {
constructor({
config,
getSnapshotNameLabel,
healthCheckSr,
job,
remoteAdapters,
remotes,
schedule,
settings,
srs,
throttleStream,
vm,
}) {
super()
if (vm.other_config['xo:backup:job'] === job.id && 'start' in vm.blocked_operations) {
// don't match replicated VMs created by this very job otherwise they
// will be replicated again and again
throw new Error('cannot backup a VM created by this very job')
}
this.config = config
this.job = job
this.remoteAdapters = remoteAdapters
this.scheduleId = schedule.id
this.timestamp = undefined
// VM currently backed up
this.vm = vm
const { tags } = this.vm
// VM (snapshot) that is really exported
this.exportedVm = undefined
this._fullVdisRequired = undefined
this._getSnapshotNameLabel = getSnapshotNameLabel
this._isIncremental = job.mode === 'delta'
this._healthCheckSr = healthCheckSr
this._jobId = job.id
this._jobSnapshots = undefined
this._throttleStream = throttleStream
this._xapi = vm.$xapi
// Base VM for the export
this._baseVm = undefined
// Settings for this specific run (job, schedule, VM)
if (tags.includes('xo-memory-backup')) {
settings.checkpointSnapshot = true
}
if (tags.includes('xo-offline-backup')) {
settings.offlineSnapshot = true
}
this._settings = settings
// Create writers
{
const writers = new Set()
this._writers = writers
const [BackupWriter, ReplicationWriter] = this._getWriters()
const allSettings = job.settings
Object.keys(remoteAdapters).forEach(remoteId => {
const targetSettings = {
...settings,
...allSettings[remoteId],
}
if (targetSettings.exportRetention !== 0) {
writers.add(new BackupWriter({ backup: this, remoteId, settings: targetSettings }))
}
})
srs.forEach(sr => {
const targetSettings = {
...settings,
...allSettings[sr.uuid],
}
if (targetSettings.copyRetention !== 0) {
writers.add(new ReplicationWriter({ backup: this, sr, settings: targetSettings }))
}
})
}
}
// ensure the VM itself does not have any backup metadata which would be
// copied on manual snapshots and interfere with the backup jobs
async _cleanMetadata() {
const { vm } = this
if ('xo:backup:job' in vm.other_config) {
await vm.update_other_config({
'xo:backup:datetime': null,
'xo:backup:deltaChainLength': null,
'xo:backup:exported': null,
'xo:backup:job': null,
'xo:backup:schedule': null,
'xo:backup:vm': null,
})
}
}
async _snapshot() {
const { vm } = this
const xapi = this._xapi
const settings = this._settings
if (this._mustDoSnapshot()) {
await Task.run({ name: 'snapshot' }, async () => {
if (!settings.bypassVdiChainsCheck) {
await vm.$assertHealthyVdiChains()
}
const snapshotRef = await vm[settings.checkpointSnapshot ? '$checkpoint' : '$snapshot']({
ignoreNobakVdis: true,
name_label: this._getSnapshotNameLabel(vm),
unplugVusbs: true,
})
this.timestamp = Date.now()
await xapi.setFieldEntries('VM', snapshotRef, 'other_config', {
'xo:backup:datetime': formatDateTime(this.timestamp),
'xo:backup:job': this._jobId,
'xo:backup:schedule': this.scheduleId,
'xo:backup:vm': vm.uuid,
})
this.exportedVm = await xapi.getRecord('VM', snapshotRef)
return this.exportedVm.uuid
})
} else {
this.exportedVm = vm
this.timestamp = Date.now()
}
}
async _fetchJobSnapshots() {
const jobId = this._jobId
const vmRef = this.vm.$ref
const xapi = this._xapi
const snapshotsRef = await xapi.getField('VM', vmRef, 'snapshots')
const snapshotsOtherConfig = await asyncMap(snapshotsRef, ref => xapi.getField('VM', ref, 'other_config'))
const snapshots = []
snapshotsOtherConfig.forEach((other_config, i) => {
if (other_config['xo:backup:job'] === jobId) {
snapshots.push({ other_config, $ref: snapshotsRef[i] })
}
})
snapshots.sort((a, b) => (a.other_config['xo:backup:datetime'] < b.other_config['xo:backup:datetime'] ? -1 : 1))
this._jobSnapshots = snapshots
}
async _removeUnusedSnapshots() {
const allSettings = this.job.settings
const baseSettings = this._baseSettings
const baseVmRef = this._baseVm?.$ref
const snapshotsPerSchedule = groupBy(this._jobSnapshots, _ => _.other_config['xo:backup:schedule'])
const xapi = this._xapi
await asyncMap(Object.entries(snapshotsPerSchedule), ([scheduleId, snapshots]) => {
const settings = {
...baseSettings,
...allSettings[scheduleId],
...allSettings[this.vm.uuid],
}
return asyncMap(getOldEntries(settings.snapshotRetention, snapshots), ({ $ref }) => {
if ($ref !== baseVmRef) {
return xapi.VM_destroy($ref)
}
})
})
}
async copy() {
throw new Error('Not implemented')
}
_getWriters() {
throw new Error('Not implemented')
}
_mustDoSnapshot() {
throw new Error('Not implemented')
}
async _selectBaseVm() {
throw new Error('Not implemented')
}
async run($defer) {
const settings = this._settings
assert(
!settings.offlineBackup || settings.snapshotRetention === 0,
'offlineBackup is not compatible with snapshotRetention'
)
await this._callWriters(async writer => {
await writer.beforeBackup()
$defer(async () => {
await writer.afterBackup()
})
}, 'writer.beforeBackup()')
await this._fetchJobSnapshots()
await this._selectBaseVm()
await this._cleanMetadata()
await this._removeUnusedSnapshots()
const { vm } = this
const isRunning = vm.power_state === 'Running'
const startAfter = isRunning && (settings.offlineBackup ? 'backup' : settings.offlineSnapshot && 'snapshot')
if (startAfter) {
await vm.$callAsync('clean_shutdown')
}
try {
await this._snapshot()
if (startAfter === 'snapshot') {
ignoreErrors.call(vm.$callAsync('start', false, false))
}
if (this._writers.size !== 0) {
await this._copy()
}
} finally {
if (startAfter) {
ignoreErrors.call(vm.$callAsync('start', false, false))
}
await this._fetchJobSnapshots()
await this._removeUnusedSnapshots()
}
await this._healthCheck()
}
}
exports.AbstractXapi = AbstractXapiVmBackupRunner
decorateMethodsWith(AbstractXapiVmBackupRunner, {
run: defer,
})

View File

@@ -1,12 +0,0 @@
'use strict'
const { mapValues } = require('lodash')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe')
exports.forkDeltaExport = function forkDeltaExport(deltaExport) {
return Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
}

View File

@@ -8,13 +8,13 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.37.0",
"version": "0.36.0",
"engines": {
"node": ">=14.6"
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "node--test *.integ.js"
"test": "node--test"
},
"dependencies": {
"@kldzj/stream-throttle": "^1.1.1",
@@ -27,7 +27,7 @@
"@vates/nbd-client": "^1.2.0",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^4.0.0",
"@xen-orchestra/fs": "^3.3.4",
"@xen-orchestra/log": "^0.6.0",
"@xen-orchestra/template": "^0.1.0",
"compare-versions": "^5.0.1",
@@ -42,17 +42,17 @@
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"uuid": "^9.0.0",
"vhd-lib": "^4.4.1",
"vhd-lib": "^4.4.0",
"yazl": "^2.5.1"
},
"devDependencies": {
"rimraf": "^5.0.1",
"rimraf": "^4.1.1",
"sinon": "^15.0.1",
"test": "^3.2.1",
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^2.2.1"
"@xen-orchestra/xapi": "^2.2.0"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -11,19 +11,19 @@ const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { dirname } = require('path')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { Task } = require('../Task.js')
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
const { MixinBackupWriter } = require('./_MixinBackupWriter.js')
const { AbstractDeltaWriter } = require('./_AbstractDeltaWriter.js')
const { checkVhd } = require('./_checkVhd.js')
const { packUuid } = require('./_packUuid.js')
const { Disposable } = require('promise-toolbox')
const { warn } = createLogger('xo:backups:DeltaBackupWriter')
class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWriter) {
class DeltaBackupWriter extends MixinBackupWriter(AbstractDeltaWriter) {
async checkBaseVdis(baseUuidToSrcVdi) {
const { handler } = this._adapter
const backup = this._backup
@@ -227,6 +227,6 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
// TODO: run cleanup?
}
}
exports.IncrementalRemoteWriter = decorateClass(IncrementalRemoteWriter, {
exports.DeltaBackupWriter = decorateClass(DeltaBackupWriter, {
_transfer: defer,
})

View File

@@ -4,16 +4,16 @@ const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { importIncrementalVm, TAG_COPY_SRC } = require('../../_incrementalVm.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { importDeltaVm, TAG_COPY_SRC } = require('../_deltaVm.js')
const { Task } = require('../Task.js')
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
const { AbstractDeltaWriter } = require('./_AbstractDeltaWriter.js')
const { MixinReplicationWriter } = require('./_MixinReplicationWriter.js')
const { listReplicatedVms } = require('./_listReplicatedVms.js')
exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWriter) {
exports.DeltaReplicationWriter = class DeltaReplicationWriter extends MixinReplicationWriter(AbstractDeltaWriter) {
async checkBaseVdis(baseUuidToSrcVdi, baseVm) {
const sr = this._sr
const replicatedVm = listReplicatedVms(sr.$xapi, this._backup.job.id, sr.uuid, this._backup.vm.uuid).find(
@@ -50,8 +50,8 @@ exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWri
},
})
this.transfer = task.wrapFn(this.transfer)
this.cleanup = task.wrapFn(this.cleanup)
this.healthCheck = task.wrapFn(this.healthCheck, true)
this.healthCheck = task.wrapFn(this.healthCheck)
this.cleanup = task.wrapFn(this.cleanup, true)
return task.run(() => this._prepare())
}
@@ -90,7 +90,7 @@ exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWri
let targetVmRef
await Task.run({ name: 'transfer' }, async () => {
targetVmRef = await importIncrementalVm(
targetVmRef = await importDeltaVm(
{
__proto__: deltaExport,
vm: {

View File

@@ -1,13 +1,13 @@
'use strict'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { Task } = require('../Task.js')
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { MixinBackupWriter } = require('./_MixinBackupWriter.js')
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
exports.FullRemoteWriter = class FullRemoteWriter extends MixinRemoteWriter(AbstractFullWriter) {
exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(AbstractFullWriter) {
constructor(props) {
super(props)

View File

@@ -4,15 +4,15 @@ const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { Task } = require('../Task.js')
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
const { MixinReplicationWriter } = require('./_MixinReplicationWriter.js')
const { listReplicatedVms } = require('./_listReplicatedVms.js')
exports.FullXapiWriter = class FullXapiWriter extends MixinXapiWriter(AbstractFullWriter) {
exports.FullReplicationWriter = class FullReplicationWriter extends MixinReplicationWriter(AbstractFullWriter) {
constructor(props) {
super(props)

View File

@@ -2,7 +2,7 @@
const { AbstractWriter } = require('./_AbstractWriter.js')
exports.AbstractIncrementalWriter = class AbstractIncrementalWriter extends AbstractWriter {
exports.AbstractDeltaWriter = class AbstractDeltaWriter extends AbstractWriter {
checkBaseVdis(baseUuidToSrcVdi, baseVm) {
throw new Error('Not implemented')
}

View File

@@ -4,17 +4,17 @@ const { createLogger } = require('@xen-orchestra/log')
const { join } = require('path')
const assert = require('assert')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getVmBackupDir } = require('../../_getVmBackupDir.js')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup.js')
const { ImportVmBackup } = require('../../ImportVmBackup.js')
const { Task } = require('../../Task.js')
const MergeWorker = require('../../merge-worker/index.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getVmBackupDir } = require('../_getVmBackupDir.js')
const { HealthCheckVmBackup } = require('../HealthCheckVmBackup.js')
const { ImportVmBackup } = require('../ImportVmBackup.js')
const { Task } = require('../Task.js')
const MergeWorker = require('../merge-worker/index.js')
const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
exports.MixinRemoteWriter = (BaseClass = Object) =>
class MixinRemoteWriter extends BaseClass {
exports.MixinBackupWriter = (BaseClass = Object) =>
class MixinBackupWriter extends BaseClass {
#lock
constructor({ remoteId, ...rest }) {
@@ -58,7 +58,7 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
const { disableMergeWorker } = this._backup.config
// merge worker only compatible with local remotes
const { handler } = this._adapter
const willMergeInWorker = !disableMergeWorker && typeof handler.getRealPath === 'function'
const willMergeInWorker = !disableMergeWorker && typeof handler._getRealPath === 'function'
const { merge } = await this._cleanVm({ remove: true, merge: !willMergeInWorker })
await this.#lock.dispose()
@@ -71,7 +71,7 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
Math.random().toString(36).slice(2)
await handler.outputFile(taskFile, this._backup.vm.uuid)
const remotePath = handler.getRealPath()
const remotePath = handler._getRealPath()
await MergeWorker.run(remotePath)
}
}

View File

@@ -1,13 +1,19 @@
'use strict'
const { extractOpaqueRef } = require('@xen-orchestra/xapi')
const { Task } = require('../../Task')
const { Task } = require('../Task')
const assert = require('node:assert/strict')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup')
const { HealthCheckVmBackup } = require('../HealthCheckVmBackup')
exports.MixinXapiWriter = (BaseClass = Object) =>
class MixinXapiWriter extends BaseClass {
function extractOpaqueRef(str) {
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
exports.MixinReplicationWriter = (BaseClass = Object) =>
class MixinReplicationWriter extends BaseClass {
constructor({ sr, ...rest }) {
super(rest)

View File

@@ -18,7 +18,7 @@
"preferGlobal": true,
"dependencies": {
"golike-defer": "^0.5.1",
"xen-api": "^1.3.1"
"xen-api": "^1.3.0"
},
"scripts": {
"postversion": "npm publish"

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/fs",
"version": "4.0.0",
"version": "3.3.4",
"license": "AGPL-3.0-or-later",
"description": "The File System for Xen Orchestra backups.",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
@@ -53,9 +53,7 @@
"@babel/preset-env": "^7.8.0",
"cross-env": "^7.0.2",
"dotenv": "^16.0.0",
"rimraf": "^5.0.1",
"sinon": "^15.0.4",
"test": "^3.3.0",
"rimraf": "^4.1.1",
"tmp": "^0.2.1"
},
"scripts": {
@@ -65,9 +63,7 @@
"prebuild": "yarn run clean",
"predev": "yarn run clean",
"prepublishOnly": "yarn run build",
"pretest": "yarn run build",
"postversion": "npm publish",
"test": "node--test ./dist/"
"postversion": "npm publish"
},
"author": {
"name": "Vates SAS",

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'readable-stream'
import copyStreamToBuffer from './_copyStreamToBuffer.js'
@@ -17,6 +16,6 @@ describe('copyStreamToBuffer', () => {
await copyStreamToBuffer(stream, buffer)
assert.equal(buffer.toString(), 'hel')
expect(buffer.toString()).toBe('hel')
})
})

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'readable-stream'
import createBufferFromStream from './_createBufferFromStream.js'
@@ -15,6 +14,6 @@ describe('createBufferFromStream', () => {
const buffer = await createBufferFromStream(stream)
assert.equal(buffer.toString(), 'hello')
expect(buffer.toString()).toBe('hello')
})
})

View File

@@ -1,6 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'node:stream'
import { _getEncryptor } from './_encryptor'
import crypto from 'crypto'
@@ -27,13 +25,13 @@ algorithms.forEach(algorithm => {
it('handle buffer', () => {
const encrypted = encryptor.encryptData(buffer)
if (algorithm !== 'none') {
assert.equal(encrypted.equals(buffer), false) // encrypted should be different
expect(encrypted.equals(buffer)).toEqual(false) // encrypted should be different
// ivlength, auth tag, padding
assert.notEqual(encrypted.length, buffer.length)
expect(encrypted.length).not.toEqual(buffer.length)
}
const decrypted = encryptor.decryptData(encrypted)
assert.equal(decrypted.equals(buffer), true)
expect(decrypted.equals(buffer)).toEqual(true)
})
it('handle stream', async () => {
@@ -41,12 +39,12 @@ algorithms.forEach(algorithm => {
stream.length = buffer.length
const encrypted = encryptor.encryptStream(stream)
if (algorithm !== 'none') {
assert.equal(encrypted.length, undefined)
expect(encrypted.length).toEqual(undefined)
}
const decrypted = encryptor.decryptStream(encrypted)
const decryptedBuffer = await streamToBuffer(decrypted)
assert.equal(decryptedBuffer.equals(buffer), true)
expect(decryptedBuffer.equals(buffer)).toEqual(true)
})
})
})

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import guessAwsRegion from './_guessAwsRegion.js'
@@ -7,12 +6,12 @@ describe('guessAwsRegion', () => {
it('should return region from AWS URL', async () => {
const region = guessAwsRegion('s3.test-region.amazonaws.com')
assert.equal(region, 'test-region')
expect(region).toBe('test-region')
})
it('should return default region if none is found is AWS URL', async () => {
const region = guessAwsRegion('s3.amazonaws.com')
assert.equal(region, 'us-east-1')
expect(region).toBe('us-east-1')
})
})

View File

@@ -9,32 +9,28 @@ import LocalHandler from './local'
const sudoExeca = (command, args, opts) => execa('sudo', [command, ...args], opts)
export default class MountHandler extends LocalHandler {
#execa
#keeper
#params
#realPath
constructor(remote, { mountsDir = join(tmpdir(), 'xo-fs-mounts'), useSudo = false, ...opts } = {}, params) {
super(remote, opts)
this.#execa = useSudo ? sudoExeca : execa
this.#params = {
this._execa = useSudo ? sudoExeca : execa
this._keeper = undefined
this._params = {
...params,
options: [params.options, remote.options ?? params.defaultOptions].filter(_ => _ !== undefined).join(','),
}
this.#realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
this._realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
}
async _forget() {
const keeper = this.#keeper
const keeper = this._keeper
if (keeper === undefined) {
return
}
this.#keeper = undefined
this._keeper = undefined
await fs.close(keeper)
await ignoreErrors.call(
this.#execa('umount', [this.getRealPath()], {
this._execa('umount', [this._getRealPath()], {
env: {
LANG: 'C',
},
@@ -42,30 +38,30 @@ export default class MountHandler extends LocalHandler {
)
}
getRealPath() {
return this.#realPath
_getRealPath() {
return this._realPath
}
async _sync() {
// in case of multiple `sync`s, ensure we properly close previous keeper
{
const keeper = this.#keeper
const keeper = this._keeper
if (keeper !== undefined) {
this.#keeper = undefined
this._keeper = undefined
ignoreErrors.call(fs.close(keeper))
}
}
const realPath = this.getRealPath()
const realPath = this._getRealPath()
await fs.ensureDir(realPath)
try {
const { type, device, options, env } = this.#params
const { type, device, options, env } = this._params
// Linux mount is more flexible in which order the mount arguments appear.
// But FreeBSD requires this order of the arguments.
await this.#execa('mount', ['-o', options, '-t', type, device, realPath], {
await this._execa('mount', ['-o', options, '-t', type, device, realPath], {
env: {
LANG: 'C',
...env,
@@ -75,7 +71,7 @@ export default class MountHandler extends LocalHandler {
try {
// the failure may mean it's already mounted, use `findmnt` to check
// that's the case
await this.#execa('findmnt', [realPath], {
await this._execa('findmnt', [realPath], {
stdio: 'ignore',
})
} catch (_) {
@@ -86,7 +82,7 @@ export default class MountHandler extends LocalHandler {
// keep an open file on the mount to prevent it from being unmounted if used
// by another handler/process
const keeperPath = `${realPath}/.keeper_${Math.random().toString(36).slice(2)}`
this.#keeper = await fs.open(keeperPath, 'w')
this._keeper = await fs.open(keeperPath, 'w')
ignoreErrors.call(fs.unlink(keeperPath))
}
}

View File

@@ -37,13 +37,8 @@ const ignoreEnoent = error => {
const noop = Function.prototype
class PrefixWrapper {
#prefix
constructor(handler, prefix) {
this.#prefix = prefix
// cannot be a private field because used by methods dynamically added
// outside of the class
this._prefix = prefix
this._handler = handler
}
@@ -55,7 +50,7 @@ class PrefixWrapper {
async list(dir, opts) {
const entries = await this._handler.list(this._resolve(dir), opts)
if (opts != null && opts.prependDir) {
const n = this.#prefix.length
const n = this._prefix.length
entries.forEach((entry, i, entries) => {
entries[i] = entry.slice(n)
})
@@ -67,21 +62,19 @@ class PrefixWrapper {
return this._handler.rename(this._resolve(oldPath), this._resolve(newPath))
}
// cannot be a private method because used by methods dynamically added
// outside of the class
_resolve(path) {
return this.#prefix + normalizePath(path)
return this._prefix + normalizePath(path)
}
}
export default class RemoteHandlerAbstract {
#rawEncryptor
#encryptor
get #encryptor() {
if (this.#rawEncryptor === undefined) {
get _encryptor() {
if (this.#encryptor === undefined) {
throw new Error(`Can't access to encryptor before remote synchronization`)
}
return this.#rawEncryptor
return this.#encryptor
}
constructor(remote, options = {}) {
@@ -118,10 +111,6 @@ export default class RemoteHandlerAbstract {
}
// Public members
//
// Should not be called directly because:
// - some concurrency limits may be applied which may lead to deadlocks
// - some preprocessing may be applied on parameters that should not be done multiple times (e.g. prefixing paths)
get type() {
throw new Error('Not implemented')
@@ -132,6 +121,10 @@ export default class RemoteHandlerAbstract {
return prefix === '/' ? this : new PrefixWrapper(this, prefix)
}
async closeFile(fd) {
await this.__closeFile(fd)
}
async createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) {
if (options.end !== undefined || options.start !== undefined) {
assert.strictEqual(this.isEncrypted, false, `Can't read part of a file when encryption is active ${file}`)
@@ -164,7 +157,7 @@ export default class RemoteHandlerAbstract {
}
if (this.isEncrypted) {
stream = this.#encryptor.decryptStream(stream)
stream = this._encryptor.decryptStream(stream)
} else {
// try to add the length prop if missing and not a range stream
if (stream.length === undefined && options.end === undefined && options.start === undefined) {
@@ -193,7 +186,7 @@ export default class RemoteHandlerAbstract {
path = normalizePath(path)
let checksumStream
input = this.#encryptor.encryptStream(input)
input = this._encryptor.encryptStream(input)
if (checksum) {
checksumStream = createChecksumStream()
pipeline(input, checksumStream, noop)
@@ -231,10 +224,10 @@ export default class RemoteHandlerAbstract {
assert.strictEqual(this.isEncrypted, false, `Can't compute size of an encrypted file ${file}`)
const size = await timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout)
return size - this.#encryptor.ivLength
return size - this._encryptor.ivLength
}
async __list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) {
async list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) {
try {
const virtualDir = normalizePath(dir)
dir = normalizePath(dir)
@@ -264,12 +257,20 @@ export default class RemoteHandlerAbstract {
return { dispose: await this._lock(path) }
}
async mkdir(dir, { mode } = {}) {
await this.__mkdir(normalizePath(dir), { mode })
}
async mktree(dir, { mode } = {}) {
await this._mktree(normalizePath(dir), { mode })
}
openFile(path, flags) {
return this.__openFile(path, flags)
}
async outputFile(file, data, { dirMode, flags = 'wx' } = {}) {
const encryptedData = this.#encryptor.encryptData(data)
const encryptedData = this._encryptor.encryptData(data)
await this._outputFile(normalizePath(file), encryptedData, { dirMode, flags })
}
@@ -278,9 +279,9 @@ export default class RemoteHandlerAbstract {
return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async __readFile(file, { flags = 'r' } = {}) {
async readFile(file, { flags = 'r' } = {}) {
const data = await this._readFile(normalizePath(file), { flags })
return this.#encryptor.decryptData(data)
return this._encryptor.decryptData(data)
}
async #rename(oldPath, newPath, { checksum }, createTree = true) {
@@ -300,11 +301,11 @@ export default class RemoteHandlerAbstract {
}
}
__rename(oldPath, newPath, { checksum = false } = {}) {
rename(oldPath, newPath, { checksum = false } = {}) {
return this.#rename(normalizePath(oldPath), normalizePath(newPath), { checksum })
}
async __copy(oldPath, newPath, { checksum = false } = {}) {
async copy(oldPath, newPath, { checksum = false } = {}) {
oldPath = normalizePath(oldPath)
newPath = normalizePath(newPath)
@@ -331,33 +332,33 @@ export default class RemoteHandlerAbstract {
async sync() {
await this._sync()
try {
await this.#checkMetadata()
await this._checkMetadata()
} catch (error) {
await this._forget()
throw error
}
}
async #canWriteMetadata() {
const list = await this.__list('/', {
async _canWriteMetadata() {
const list = await this.list('/', {
filter: e => !e.startsWith('.') && e !== ENCRYPTION_DESC_FILENAME && e !== ENCRYPTION_METADATA_FILENAME,
})
return list.length === 0
}
async #createMetadata() {
async _createMetadata() {
const encryptionAlgorithm = this._remote.encryptionKey === undefined ? 'none' : DEFAULT_ENCRYPTION_ALGORITHM
this.#rawEncryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
await Promise.all([
this._writeFile(normalizePath(ENCRYPTION_DESC_FILENAME), JSON.stringify({ algorithm: encryptionAlgorithm }), {
flags: 'w',
}), // not encrypted
this.__writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
this.writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
])
}
async #checkMetadata() {
async _checkMetadata() {
let encryptionAlgorithm = 'none'
let data
try {
@@ -373,18 +374,18 @@ export default class RemoteHandlerAbstract {
}
try {
this.#rawEncryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
// this file is encrypted
const data = await this.__readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
const data = await this.readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
JSON.parse(data)
} catch (error) {
// can be enoent, bad algorithm, or broeken json ( bad key or algorithm)
if (encryptionAlgorithm !== 'none') {
if (await this.#canWriteMetadata()) {
if (await this._canWriteMetadata()) {
// any other error , but on empty remote => update with remote settings
info('will update metadata of this remote')
return this.#createMetadata()
return this._createMetadata()
} else {
warn(
`The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`,
@@ -437,7 +438,7 @@ export default class RemoteHandlerAbstract {
await this._truncate(file, len)
}
async __unlink(file, { checksum = true } = {}) {
async unlink(file, { checksum = true } = {}) {
file = normalizePath(file)
if (checksum) {
@@ -452,8 +453,8 @@ export default class RemoteHandlerAbstract {
await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async __writeFile(file, data, { flags = 'wx' } = {}) {
const encryptedData = this.#encryptor.encryptData(data)
async writeFile(file, data, { flags = 'wx' } = {}) {
const encryptedData = this._encryptor.encryptData(data)
await this._writeFile(normalizePath(file), encryptedData, { flags })
}
@@ -464,8 +465,6 @@ export default class RemoteHandlerAbstract {
}
async __mkdir(dir, { mode } = {}) {
dir = normalizePath(dir)
try {
await this._mkdir(dir, { mode })
} catch (error) {
@@ -587,9 +586,9 @@ export default class RemoteHandlerAbstract {
if (validator !== undefined) {
await validator.call(this, tmpPath)
}
await this.__rename(tmpPath, path)
await this.rename(tmpPath, path)
} catch (error) {
await this.__unlink(tmpPath)
await this.unlink(tmpPath)
throw error
}
}
@@ -666,22 +665,7 @@ export default class RemoteHandlerAbstract {
}
get isEncrypted() {
return this.#encryptor.id !== 'NULL_ENCRYPTOR'
}
}
// from implementation methods, which names start with `__`, create public
// accessors on which external behaviors can be added (e.g. concurrency limits, path rewriting)
{
const proto = RemoteHandlerAbstract.prototype
for (const method of Object.getOwnPropertyNames(proto)) {
if (method.startsWith('__')) {
const publicName = method.slice(2)
assert(!Object.hasOwn(proto, publicName))
Object.defineProperty(proto, publicName, Object.getOwnPropertyDescriptor(proto, method))
}
return this._encryptor.id !== 'NULL_ENCRYPTOR'
}
}

View File

@@ -1,13 +1,11 @@
import { after, beforeEach, describe, it } from 'test'
import { strict as assert } from 'assert'
import sinon from 'sinon'
/* eslint-env jest */
import { DEFAULT_ENCRYPTION_ALGORITHM, _getEncryptor } from './_encryptor'
import { Disposable, pFromCallback, TimeoutError } from 'promise-toolbox'
import { getSyncedHandler } from '.'
import { rimraf } from 'rimraf'
import AbstractHandler from './abstract'
import fs from 'fs-extra'
import rimraf from 'rimraf'
import tmp from 'tmp'
const TIMEOUT = 10e3
@@ -26,7 +24,7 @@ class TestHandler extends AbstractHandler {
const noop = Function.prototype
const clock = sinon.useFakeTimers()
jest.useFakeTimers()
describe('closeFile()', () => {
it(`throws in case of timeout`, async () => {
@@ -35,8 +33,8 @@ describe('closeFile()', () => {
})
const promise = testHandler.closeFile({ fd: undefined, path: '' })
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -47,8 +45,8 @@ describe('getInfo()', () => {
})
const promise = testHandler.getInfo()
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -59,8 +57,8 @@ describe('getSize()', () => {
})
const promise = testHandler.getSize('')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -71,8 +69,8 @@ describe('list()', () => {
})
const promise = testHandler.list('.')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -83,8 +81,8 @@ describe('openFile()', () => {
})
const promise = testHandler.openFile('path')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -95,8 +93,8 @@ describe('rename()', () => {
})
const promise = testHandler.rename('oldPath', 'newPath')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -107,8 +105,8 @@ describe('rmdir()', () => {
})
const promise = testHandler.rmdir('dir')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -117,14 +115,14 @@ describe('encryption', () => {
beforeEach(async () => {
dir = await pFromCallback(cb => tmp.dir(cb))
})
after(async () => {
afterAll(async () => {
await rimraf(dir)
})
it('sync should NOT create metadata if missing (not encrypted)', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
assert.deepEqual(await fs.readdir(dir), [])
expect(await fs.readdir(dir)).toEqual([])
})
it('sync should create metadata if missing (encrypted)', async () => {
@@ -133,12 +131,12 @@ describe('encryption', () => {
noop
)
assert.deepEqual(await fs.readdir(dir), ['encryption.json', 'metadata.json'])
expect(await fs.readdir(dir)).toEqual(['encryption.json', 'metadata.json'])
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
// encrypted , should not be parsable
assert.rejects(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`)))
expect(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`))).rejects.toThrowError()
})
it('sync should not modify existing metadata', async () => {
@@ -148,9 +146,9 @@ describe('encryption', () => {
await Disposable.use(await getSyncedHandler({ url: `file://${dir}` }), noop)
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, 'none')
expect(encryption.algorithm).toEqual('none')
const metadata = JSON.parse(await fs.readFile(`${dir}/metadata.json`, 'utf-8'))
assert.equal(metadata.random, 'NOTSORANDOM')
expect(metadata.random).toEqual('NOTSORANDOM')
})
it('should modify metadata if empty', async () => {
@@ -162,11 +160,11 @@ describe('encryption', () => {
noop
)
let encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, 'none')
expect(encryption.algorithm).toEqual('none')
})
it(
@@ -180,9 +178,9 @@ describe('encryption', () => {
const handler = yield getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` })
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
const metadata = JSON.parse(await handler.readFile(`./metadata.json`))
assert.equal(metadata.random, 'NOTSORANDOM')
expect(metadata.random).toEqual('NOTSORANDOM')
})
)
@@ -200,9 +198,9 @@ describe('encryption', () => {
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await assert.rejects(
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd10"` }), noop)
)
).rejects.toThrowError()
})
it('sync should fail when changing algorithm', async () => {
@@ -215,8 +213,8 @@ describe('encryption', () => {
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await assert.rejects(
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }), noop)
)
).rejects.toThrowError()
})
})

View File

@@ -1,5 +1,4 @@
import { after, afterEach, before, beforeEach, describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import 'dotenv/config'
import { forOwn, random } from 'lodash'
@@ -54,11 +53,11 @@ handlers.forEach(url => {
})
}
before(async () => {
beforeAll(async () => {
handler = getHandler({ url }).addPrefix(`xo-fs-tests-${Date.now()}`)
await handler.sync()
})
after(async () => {
afterAll(async () => {
await handler.forget()
handler = undefined
})
@@ -73,63 +72,67 @@ handlers.forEach(url => {
describe('#type', () => {
it('returns the type of the remote', () => {
assert.equal(typeof handler.type, 'string')
expect(typeof handler.type).toBe('string')
})
})
describe('#getInfo()', () => {
let info
before(async () => {
beforeAll(async () => {
info = await handler.getInfo()
})
it('should return an object with info', async () => {
assert.equal(typeof info, 'object')
expect(typeof info).toBe('object')
})
it('should return correct type of attribute', async () => {
if (info.size !== undefined) {
assert.equal(typeof info.size, 'number')
expect(typeof info.size).toBe('number')
}
if (info.used !== undefined) {
assert.equal(typeof info.used, 'number')
expect(typeof info.used).toBe('number')
}
})
})
describe('#getSize()', () => {
before(() => handler.outputFile('file', TEST_DATA))
beforeEach(() => handler.outputFile('file', TEST_DATA))
testWithFileDescriptor('file', 'r', async () => {
assert.equal(await handler.getSize('file'), TEST_DATA_LEN)
expect(await handler.getSize('file')).toEqual(TEST_DATA_LEN)
})
})
describe('#list()', () => {
it(`should list the content of folder`, async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.list('.'), ['file'])
await expect(await handler.list('.')).toEqual(['file'])
})
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
assert.deepEqual(await handler.list('dir', { prependDir: true }), ['/dir/file'])
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
it('throws ENOENT if no such directory', async () => {
await handler.rmtree('dir')
assert.equal((await rejectionOf(handler.list('dir'))).code, 'ENOENT')
expect((await rejectionOf(handler.list('dir'))).code).toBe('ENOENT')
})
it('can returns empty for missing directory', async () => {
assert.deepEqual(await handler.list('dir', { ignoreMissing: true }), [])
expect(await handler.list('dir', { ignoreMissing: true })).toEqual([])
})
})
describe('#mkdir()', () => {
it('creates a directory', async () => {
await handler.mkdir('dir')
assert.deepEqual(await handler.list('.'), ['dir'])
await expect(await handler.list('.')).toEqual(['dir'])
})
it('does not throw on existing directory', async () => {
@@ -140,15 +143,15 @@ handlers.forEach(url => {
it('throws ENOTDIR on existing file', async () => {
await handler.outputFile('file', '')
const error = await rejectionOf(handler.mkdir('file'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
})
describe('#mktree()', () => {
it('creates a tree of directories', async () => {
await handler.mktree('dir/dir')
assert.deepEqual(await handler.list('.'), ['dir'])
assert.deepEqual(await handler.list('dir'), ['dir'])
await expect(await handler.list('.')).toEqual(['dir'])
await expect(await handler.list('dir')).toEqual(['dir'])
})
it('does not throw on existing directory', async () => {
@@ -159,27 +162,26 @@ handlers.forEach(url => {
it('throws ENOTDIR on existing file', async () => {
await handler.outputFile('dir/file', '')
const error = await rejectionOf(handler.mktree('dir/file'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
it('throws ENOTDIR on existing file in path', async () => {
await handler.outputFile('file', '')
const error = await rejectionOf(handler.mktree('file/dir'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
})
describe('#outputFile()', () => {
it('writes data to a file', async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.readFile('file'), TEST_DATA)
expect(await handler.readFile('file')).toEqual(TEST_DATA)
})
it('throws on existing files', async () => {
await handler.unlink('file')
await handler.outputFile('file', '')
const error = await rejectionOf(handler.outputFile('file', ''))
assert.equal(error.code, 'EEXIST')
expect(error.code).toBe('EEXIST')
})
it("shouldn't timeout in case of the respect of the parallel execution restriction", async () => {
@@ -190,7 +192,7 @@ handlers.forEach(url => {
})
describe('#read()', () => {
before(() => handler.outputFile('file', TEST_DATA))
beforeEach(() => handler.outputFile('file', TEST_DATA))
const start = random(TEST_DATA_LEN)
const size = random(TEST_DATA_LEN)
@@ -198,8 +200,8 @@ handlers.forEach(url => {
testWithFileDescriptor('file', 'r', async ({ file }) => {
const buffer = Buffer.alloc(size)
const result = await handler.read(file, buffer, start)
assert.deepEqual(result.buffer, buffer)
assert.deepEqual(result, {
expect(result.buffer).toBe(buffer)
expect(result).toEqual({
buffer,
bytesRead: Math.min(size, TEST_DATA_LEN - start),
})
@@ -209,13 +211,12 @@ handlers.forEach(url => {
describe('#readFile', () => {
it('returns a buffer containing the contents of the file', async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.readFile('file'), TEST_DATA)
expect(await handler.readFile('file')).toEqual(TEST_DATA)
})
it('throws on missing file', async () => {
await handler.unlink('file')
const error = await rejectionOf(handler.readFile('file'))
assert.equal(error.code, 'ENOENT')
expect(error.code).toBe('ENOENT')
})
})
@@ -224,19 +225,19 @@ handlers.forEach(url => {
await handler.outputFile('file', TEST_DATA)
await handler.rename('file', `file2`)
assert.deepEqual(await handler.list('.'), ['file2'])
assert.deepEqual(await handler.readFile(`file2`), TEST_DATA)
expect(await handler.list('.')).toEqual(['file2'])
expect(await handler.readFile(`file2`)).toEqual(TEST_DATA)
})
it(`should rename the file and create dest directory`, async () => {
await handler.outputFile('file', TEST_DATA)
await handler.rename('file', `sub/file2`)
assert.deepEqual(await handler.list('sub'), ['file2'])
assert.deepEqual(await handler.readFile(`sub/file2`), TEST_DATA)
expect(await handler.list('sub')).toEqual(['file2'])
expect(await handler.readFile(`sub/file2`)).toEqual(TEST_DATA)
})
it(`should fail with enoent if source file is missing`, async () => {
const error = await rejectionOf(handler.rename('file', `sub/file2`))
assert.equal(error.code, 'ENOENT')
expect(error.code).toBe('ENOENT')
})
})
@@ -244,15 +245,14 @@ handlers.forEach(url => {
it('should remove an empty directory', async () => {
await handler.mkdir('dir')
await handler.rmdir('dir')
assert.deepEqual(await handler.list('.'), [])
expect(await handler.list('.')).toEqual([])
})
it(`should throw on non-empty directory`, async () => {
await handler.outputFile('dir/file', '')
const error = await rejectionOf(handler.rmdir('.'))
assert.equal(error.code, 'ENOTEMPTY')
await handler.unlink('dir/file')
await expect(error.code).toEqual('ENOTEMPTY')
})
it('does not throw on missing directory', async () => {
@@ -265,7 +265,7 @@ handlers.forEach(url => {
await handler.outputFile('dir/file', '')
await handler.rmtree('dir')
assert.deepEqual(await handler.list('.'), [])
expect(await handler.list('.')).toEqual([])
})
})
@@ -273,9 +273,9 @@ handlers.forEach(url => {
it('tests the remote appears to be working', async () => {
const answer = await handler.test()
assert.equal(answer.success, true)
assert.equal(typeof answer.writeRate, 'number')
assert.equal(typeof answer.readRate, 'number')
expect(answer.success).toBe(true)
expect(typeof answer.writeRate).toBe('number')
expect(typeof answer.readRate).toBe('number')
})
})
@@ -284,7 +284,7 @@ handlers.forEach(url => {
await handler.outputFile('file', TEST_DATA)
await handler.unlink('file')
assert.deepEqual(await handler.list('.'), [])
await expect(await handler.list('.')).toEqual([])
})
it('does not throw on missing file', async () => {
@@ -294,7 +294,6 @@ handlers.forEach(url => {
describe('#write()', () => {
beforeEach(() => handler.outputFile('file', TEST_DATA))
afterEach(() => handler.unlink('file'))
const PATCH_DATA_LEN = Math.ceil(TEST_DATA_LEN / 2)
const PATCH_DATA = unsecureRandomBytes(PATCH_DATA_LEN)
@@ -323,7 +322,7 @@ handlers.forEach(url => {
describe(title, () => {
testWithFileDescriptor('file', 'r+', async ({ file }) => {
await handler.write(file, PATCH_DATA, offset)
assert.deepEqual(await handler.readFile('file'), expected)
await expect(await handler.readFile('file')).toEqual(expected)
})
})
}
@@ -331,7 +330,6 @@ handlers.forEach(url => {
})
describe('#truncate()', () => {
afterEach(() => handler.unlink('file'))
forOwn(
{
'shrinks file': (() => {
@@ -350,7 +348,7 @@ handlers.forEach(url => {
it(title, async () => {
await handler.outputFile('file', TEST_DATA)
await handler.truncate('file', length)
assert.deepEqual(await handler.readFile('file'), expected)
await expect(await handler.readFile('file')).toEqual(expected)
})
}
)

View File

@@ -34,14 +34,11 @@ function dontAddSyncStackTrace(fn, ...args) {
}
export default class LocalHandler extends RemoteHandlerAbstract {
#addSyncStackTrace
#retriesOnEagain
constructor(remote, opts = {}) {
super(remote)
this.#addSyncStackTrace = opts.syncStackTraces ?? true ? addSyncStackTrace : dontAddSyncStackTrace
this.#retriesOnEagain = {
this._addSyncStackTrace = opts.syncStackTraces ?? true ? addSyncStackTrace : dontAddSyncStackTrace
this._retriesOnEagain = {
delay: 1e3,
retries: 9,
...opts.retriesOnEagain,
@@ -54,26 +51,26 @@ export default class LocalHandler extends RemoteHandlerAbstract {
return 'file'
}
getRealPath() {
_getRealPath() {
return this._remote.path
}
getFilePath(file) {
return this.getRealPath() + file
_getFilePath(file) {
return this._getRealPath() + file
}
async _closeFile(fd) {
return this.#addSyncStackTrace(fs.close, fd)
return this._addSyncStackTrace(fs.close, fd)
}
async _copy(oldPath, newPath) {
return this.#addSyncStackTrace(fs.copy, this.getFilePath(oldPath), this.getFilePath(newPath))
return this._addSyncStackTrace(fs.copy, this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _createReadStream(file, options) {
if (typeof file === 'string') {
const stream = fs.createReadStream(this.getFilePath(file), options)
await this.#addSyncStackTrace(fromEvent, stream, 'open')
const stream = fs.createReadStream(this._getFilePath(file), options)
await this._addSyncStackTrace(fromEvent, stream, 'open')
return stream
}
return fs.createReadStream('', {
@@ -85,8 +82,8 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _createWriteStream(file, options) {
if (typeof file === 'string') {
const stream = fs.createWriteStream(this.getFilePath(file), options)
await this.#addSyncStackTrace(fromEvent, stream, 'open')
const stream = fs.createWriteStream(this._getFilePath(file), options)
await this._addSyncStackTrace(fromEvent, stream, 'open')
return stream
}
return fs.createWriteStream('', {
@@ -101,7 +98,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
// filesystem, type, size, used, available, capacity and mountpoint.
// size, used, available and capacity may be `NaN` so we remove any `NaN`
// value from the object.
const info = await df.file(this.getFilePath('/'))
const info = await df.file(this._getFilePath('/'))
Object.keys(info).forEach(key => {
if (Number.isNaN(info[key])) {
delete info[key]
@@ -112,16 +109,16 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _getSize(file) {
const stats = await this.#addSyncStackTrace(fs.stat, this.getFilePath(typeof file === 'string' ? file : file.path))
const stats = await this._addSyncStackTrace(fs.stat, this._getFilePath(typeof file === 'string' ? file : file.path))
return stats.size
}
async _list(dir) {
return this.#addSyncStackTrace(fs.readdir, this.getFilePath(dir))
return this._addSyncStackTrace(fs.readdir, this._getFilePath(dir))
}
async _lock(path) {
const acquire = lockfile.lock.bind(undefined, this.getFilePath(path), {
const acquire = lockfile.lock.bind(undefined, this._getFilePath(path), {
async onCompromised(error) {
warn('lock compromised', { error })
try {
@@ -133,11 +130,11 @@ export default class LocalHandler extends RemoteHandlerAbstract {
},
})
let release = await this.#addSyncStackTrace(acquire)
let release = await this._addSyncStackTrace(acquire)
return async () => {
try {
await this.#addSyncStackTrace(release)
await this._addSyncStackTrace(release)
} catch (error) {
warn('lock could not be released', { error })
}
@@ -145,18 +142,18 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
_mkdir(dir, { mode }) {
return this.#addSyncStackTrace(fs.mkdir, this.getFilePath(dir), { mode })
return this._addSyncStackTrace(fs.mkdir, this._getFilePath(dir), { mode })
}
async _openFile(path, flags) {
return this.#addSyncStackTrace(fs.open, this.getFilePath(path), flags)
return this._addSyncStackTrace(fs.open, this._getFilePath(path), flags)
}
async _read(file, buffer, position) {
const needsClose = typeof file === 'string'
file = needsClose ? await this.#addSyncStackTrace(fs.open, this.getFilePath(file), 'r') : file.fd
file = needsClose ? await this._addSyncStackTrace(fs.open, this._getFilePath(file), 'r') : file.fd
try {
return await this.#addSyncStackTrace(
return await this._addSyncStackTrace(
fs.read,
file,
buffer,
@@ -166,44 +163,44 @@ export default class LocalHandler extends RemoteHandlerAbstract {
)
} finally {
if (needsClose) {
await this.#addSyncStackTrace(fs.close, file)
await this._addSyncStackTrace(fs.close, file)
}
}
}
async _readFile(file, options) {
const filePath = this.getFilePath(file)
return await this.#addSyncStackTrace(retry, () => fs.readFile(filePath, options), this.#retriesOnEagain)
const filePath = this._getFilePath(file)
return await this._addSyncStackTrace(retry, () => fs.readFile(filePath, options), this._retriesOnEagain)
}
async _rename(oldPath, newPath) {
return this.#addSyncStackTrace(fs.rename, this.getFilePath(oldPath), this.getFilePath(newPath))
return this._addSyncStackTrace(fs.rename, this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _rmdir(dir) {
return this.#addSyncStackTrace(fs.rmdir, this.getFilePath(dir))
return this._addSyncStackTrace(fs.rmdir, this._getFilePath(dir))
}
async _sync() {
const path = this.getRealPath('/')
await this.#addSyncStackTrace(fs.ensureDir, path)
await this.#addSyncStackTrace(fs.access, path, fs.R_OK | fs.W_OK)
const path = this._getRealPath('/')
await this._addSyncStackTrace(fs.ensureDir, path)
await this._addSyncStackTrace(fs.access, path, fs.R_OK | fs.W_OK)
}
_truncate(file, len) {
return this.#addSyncStackTrace(fs.truncate, this.getFilePath(file), len)
return this._addSyncStackTrace(fs.truncate, this._getFilePath(file), len)
}
async _unlink(file) {
const filePath = this.getFilePath(file)
return await this.#addSyncStackTrace(retry, () => fs.unlink(filePath), this.#retriesOnEagain)
const filePath = this._getFilePath(file)
return await this._addSyncStackTrace(retry, () => fs.unlink(filePath), this._retriesOnEagain)
}
_writeFd(file, buffer, position) {
return this.#addSyncStackTrace(fs.write, file.fd, buffer, 0, buffer.length, position)
return this._addSyncStackTrace(fs.write, file.fd, buffer, 0, buffer.length, position)
}
_writeFile(file, data, { flags }) {
return this.#addSyncStackTrace(fs.writeFile, this.getFilePath(file), data, { flag: flags })
return this._addSyncStackTrace(fs.writeFile, this._getFilePath(file), data, { flag: flags })
}
}

View File

@@ -34,10 +34,6 @@ const MAX_PART_SIZE = 1024 * 1024 * 1024 * 5 // 5GB
const { warn } = createLogger('xo:fs:s3')
export default class S3Handler extends RemoteHandlerAbstract {
#bucket
#dir
#s3
constructor(remote, _opts) {
super(remote)
const {
@@ -50,7 +46,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
region = guessAwsRegion(host),
} = parse(remote.url)
this.#s3 = new S3Client({
this._s3 = new S3Client({
apiVersion: '2006-03-01',
endpoint: `${protocol}://${host}`,
forcePathStyle: true,
@@ -73,27 +69,27 @@ export default class S3Handler extends RemoteHandlerAbstract {
})
// Workaround for https://github.com/aws/aws-sdk-js-v3/issues/2673
this.#s3.middlewareStack.use(getApplyMd5BodyChecksumPlugin(this.#s3.config))
this._s3.middlewareStack.use(getApplyMd5BodyChecksumPlugin(this._s3.config))
const parts = split(path)
this.#bucket = parts.shift()
this.#dir = join(...parts)
this._bucket = parts.shift()
this._dir = join(...parts)
}
get type() {
return 's3'
}
#makeCopySource(path) {
return join(this.#bucket, this.#dir, path)
_makeCopySource(path) {
return join(this._bucket, this._dir, path)
}
#makeKey(file) {
return join(this.#dir, file)
_makeKey(file) {
return join(this._dir, file)
}
#makePrefix(dir) {
const prefix = join(this.#dir, dir, '/')
_makePrefix(dir) {
const prefix = join(this._dir, dir, '/')
// no prefix for root
if (prefix !== './') {
@@ -101,20 +97,20 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
}
#createParams(file) {
return { Bucket: this.#bucket, Key: this.#makeKey(file) }
_createParams(file) {
return { Bucket: this._bucket, Key: this._makeKey(file) }
}
async #multipartCopy(oldPath, newPath) {
async _multipartCopy(oldPath, newPath) {
const size = await this._getSize(oldPath)
const CopySource = this.#makeCopySource(oldPath)
const multipartParams = await this.#s3.send(new CreateMultipartUploadCommand({ ...this.#createParams(newPath) }))
const CopySource = this._makeCopySource(oldPath)
const multipartParams = await this._s3.send(new CreateMultipartUploadCommand({ ...this._createParams(newPath) }))
try {
const parts = []
let start = 0
while (start < size) {
const partNumber = parts.length + 1
const upload = await this.#s3.send(
const upload = await this._s3.send(
new UploadPartCopyCommand({
...multipartParams,
CopySource,
@@ -125,31 +121,31 @@ export default class S3Handler extends RemoteHandlerAbstract {
parts.push({ ETag: upload.CopyPartResult.ETag, PartNumber: partNumber })
start += MAX_PART_SIZE
}
await this.#s3.send(
await this._s3.send(
new CompleteMultipartUploadCommand({
...multipartParams,
MultipartUpload: { Parts: parts },
})
)
} catch (e) {
await this.#s3.send(new AbortMultipartUploadCommand(multipartParams))
await this._s3.send(new AbortMultipartUploadCommand(multipartParams))
throw e
}
}
async _copy(oldPath, newPath) {
const CopySource = this.#makeCopySource(oldPath)
const CopySource = this._makeCopySource(oldPath)
try {
await this.#s3.send(
await this._s3.send(
new CopyObjectCommand({
...this.#createParams(newPath),
...this._createParams(newPath),
CopySource,
})
)
} catch (e) {
// object > 5GB must be copied part by part
if (e.name === 'EntityTooLarge') {
return this.#multipartCopy(oldPath, newPath)
return this._multipartCopy(oldPath, newPath)
}
// normalize this error code
if (e.name === 'NoSuchKey') {
@@ -163,20 +159,20 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
}
async #isNotEmptyDir(path) {
const result = await this.#s3.send(
async _isNotEmptyDir(path) {
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
MaxKeys: 1,
Prefix: this.#makePrefix(path),
Prefix: this._makePrefix(path),
})
)
return result.Contents?.length > 0
}
async #isFile(path) {
async _isFile(path) {
try {
await this.#s3.send(new HeadObjectCommand(this.#createParams(path)))
await this._s3.send(new HeadObjectCommand(this._createParams(path)))
return true
} catch (error) {
if (error.name === 'NotFound') {
@@ -193,9 +189,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
pipeline(input, Body, () => {})
const upload = new Upload({
client: this.#s3,
client: this._s3,
params: {
...this.#createParams(path),
...this._createParams(path),
Body,
},
})
@@ -206,7 +202,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
try {
await validator.call(this, path)
} catch (error) {
await this.__unlink(path)
await this.unlink(path)
throw error
}
}
@@ -228,9 +224,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
},
})
async _writeFile(file, data, options) {
return this.#s3.send(
return this._s3.send(
new PutObjectCommand({
...this.#createParams(file),
...this._createParams(file),
Body: data,
})
)
@@ -238,7 +234,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
async _createReadStream(path, options) {
try {
return (await this.#s3.send(new GetObjectCommand(this.#createParams(path)))).Body
return (await this._s3.send(new GetObjectCommand(this._createParams(path)))).Body
} catch (e) {
if (e.name === 'NoSuchKey') {
const error = new Error(`ENOENT: no such file '${path}'`)
@@ -251,9 +247,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _unlink(path) {
await this.#s3.send(new DeleteObjectCommand(this.#createParams(path)))
await this._s3.send(new DeleteObjectCommand(this._createParams(path)))
if (await this.#isNotEmptyDir(path)) {
if (await this._isNotEmptyDir(path)) {
const error = new Error(`EISDIR: illegal operation on a directory, unlink '${path}'`)
error.code = 'EISDIR'
error.path = path
@@ -264,12 +260,12 @@ export default class S3Handler extends RemoteHandlerAbstract {
async _list(dir) {
let NextContinuationToken
const uniq = new Set()
const Prefix = this.#makePrefix(dir)
const Prefix = this._makePrefix(dir)
do {
const result = await this.#s3.send(
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
Prefix,
Delimiter: '/',
// will only return path until delimiters
@@ -299,7 +295,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _mkdir(path) {
if (await this.#isFile(path)) {
if (await this._isFile(path)) {
const error = new Error(`ENOTDIR: file already exists, mkdir '${path}'`)
error.code = 'ENOTDIR'
error.path = path
@@ -310,15 +306,15 @@ export default class S3Handler extends RemoteHandlerAbstract {
// s3 doesn't have a rename operation, so copy + delete source
async _rename(oldPath, newPath) {
await this.__copy(oldPath, newPath)
await this.#s3.send(new DeleteObjectCommand(this.#createParams(oldPath)))
await this.copy(oldPath, newPath)
await this._s3.send(new DeleteObjectCommand(this._createParams(oldPath)))
}
async _getSize(file) {
if (typeof file !== 'string') {
file = file.fd
}
const result = await this.#s3.send(new HeadObjectCommand(this.#createParams(file)))
const result = await this._s3.send(new HeadObjectCommand(this._createParams(file)))
return +result.ContentLength
}
@@ -326,15 +322,15 @@ export default class S3Handler extends RemoteHandlerAbstract {
if (typeof file !== 'string') {
file = file.fd
}
const params = this.#createParams(file)
const params = this._createParams(file)
params.Range = `bytes=${position}-${position + buffer.length - 1}`
try {
const result = await this.#s3.send(new GetObjectCommand(params))
const result = await this._s3.send(new GetObjectCommand(params))
const bytesRead = await copyStreamToBuffer(result.Body, buffer)
return { bytesRead, buffer }
} catch (e) {
if (e.name === 'NoSuchKey') {
if (await this.#isNotEmptyDir(file)) {
if (await this._isNotEmptyDir(file)) {
const error = new Error(`${file} is a directory`)
error.code = 'EISDIR'
error.path = file
@@ -346,7 +342,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _rmdir(path) {
if (await this.#isNotEmptyDir(path)) {
if (await this._isNotEmptyDir(path)) {
const error = new Error(`ENOTEMPTY: directory not empty, rmdir '${path}`)
error.code = 'ENOTEMPTY'
error.path = path
@@ -360,11 +356,11 @@ export default class S3Handler extends RemoteHandlerAbstract {
// @todo : use parallel processing for unlink
async _rmtree(path) {
let NextContinuationToken
const Prefix = this.#makePrefix(path)
const Prefix = this._makePrefix(path)
do {
const result = await this.#s3.send(
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
Prefix,
ContinuationToken: NextContinuationToken,
})
@@ -376,9 +372,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
async ({ Key }) => {
// _unlink will add the prefix, but Key contains everything
// also we don't need to check if we delete a directory, since the list only return files
await this.#s3.send(
await this._s3.send(
new DeleteObjectCommand({
Bucket: this.#bucket,
Bucket: this._bucket,
Key,
})
)

View File

@@ -1,5 +1,9 @@
# ChangeLog
## **next**
- Add ability to open VM console in new window (PR [#6827](https://github.com/vatesfr/xen-orchestra/pull/6827))
## **0.2.0**
- Invalidate sessionId token after logout (PR [#6480](https://github.com/vatesfr/xen-orchestra/pull/6480))

View File

@@ -19,8 +19,8 @@
"@types/d3-time-format": "^4.0.0",
"@types/lodash-es": "^4.17.6",
"@types/marked": "^4.0.8",
"@vueuse/core": "^10.1.2",
"@vueuse/math": "^10.1.2",
"@vueuse/core": "^9.5.0",
"@vueuse/math": "^9.5.0",
"complex-matcher": "^0.7.0",
"d3-time-format": "^4.1.0",
"decorator-synchronized": "^0.6.0",
@@ -34,19 +34,19 @@
"lodash-es": "^4.17.21",
"make-error": "^1.3.6",
"marked": "^4.2.12",
"pinia": "^2.1.2",
"pinia": "^2.0.14",
"placement.js": "^1.0.0-beta.5",
"vue": "^3.3.4",
"vue": "^3.2.37",
"vue-echarts": "^6.2.3",
"vue-i18n": "^9.2.2",
"vue-router": "^4.2.1"
"vue-i18n": "9",
"vue-router": "^4.0.16"
},
"devDependencies": {
"@intlify/unplugin-vue-i18n": "^0.10.0",
"@intlify/vite-plugin-vue-i18n": "^6.0.1",
"@limegrass/eslint-plugin-import-alias": "^1.0.5",
"@rushstack/eslint-patch": "^1.1.0",
"@types/node": "^16.11.41",
"@vitejs/plugin-vue": "^4.2.3",
"@vitejs/plugin-vue": "^3.2.0",
"@vue/eslint-config-prettier": "^7.0.0",
"@vue/eslint-config-typescript": "^11.0.0",
"@vue/tsconfig": "^0.1.3",
@@ -56,9 +56,9 @@
"postcss-custom-media": "^9.0.1",
"postcss-nested": "^6.0.0",
"typescript": "^4.9.3",
"vite": "^4.3.8",
"vite-plugin-pages": "^0.29.1",
"vue-tsc": "^1.6.5"
"vite": "^3.2.4",
"vite-plugin-pages": "^0.27.1",
"vue-tsc": "^1.0.9"
},
"private": true,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/lite",

View File

@@ -1,12 +1,32 @@
<template>
<UnreachableHostsModal />
<UiModal
v-if="isSslModalOpen"
:icon="faServer"
color="error"
@close="clearUnreachableHostsUrls"
>
<template #title>{{ $t("unreachable-hosts") }}</template>
<template #subtitle>{{ $t("following-hosts-unreachable") }}</template>
<p>{{ $t("allow-self-signed-ssl") }}</p>
<ul>
<li v-for="url in unreachableHostsUrls" :key="url.hostname">
<a :href="url.href" rel="noopener" target="_blank">{{ url.href }}</a>
</li>
</ul>
<template #buttons>
<UiButton color="success" @click="reload">
{{ $t("unreachable-hosts-reload-page") }}
</UiButton>
<UiButton @click="clearUnreachableHostsUrls">{{ $t("cancel") }}</UiButton>
</template>
</UiModal>
<div v-if="!$route.meta.hasStoryNav && !xenApiStore.isConnected">
<AppLogin />
</div>
<div v-else>
<AppHeader />
<AppHeader v-if="uiStore.hasUi" />
<div style="display: flex">
<AppNavigation />
<AppNavigation v-if="uiStore.hasUi" />
<main class="main">
<RouterView />
</main>
@@ -21,14 +41,21 @@ import AppHeader from "@/components/AppHeader.vue";
import AppLogin from "@/components/AppLogin.vue";
import AppNavigation from "@/components/AppNavigation.vue";
import AppTooltips from "@/components/AppTooltips.vue";
import UnreachableHostsModal from "@/components/UnreachableHostsModal.vue";
import UiButton from "@/components/ui/UiButton.vue";
import UiModal from "@/components/ui/UiModal.vue";
import { useChartTheme } from "@/composables/chart-theme.composable";
import { useHostStore } from "@/stores/host.store";
import { usePoolStore } from "@/stores/pool.store";
import { useUiStore } from "@/stores/ui.store";
import { useXenApiStore } from "@/stores/xen-api.store";
import { faServer } from "@fortawesome/free-solid-svg-icons";
import { useActiveElement, useMagicKeys, whenever } from "@vueuse/core";
import { logicAnd } from "@vueuse/math";
import { computed } from "vue";
import { difference } from "lodash-es";
import { computed, ref, watch } from "vue";
const unreachableHostsUrls = ref<URL[]>([]);
const clearUnreachableHostsUrls = () => (unreachableHostsUrls.value = []);
let link = document.querySelector(
"link[rel~='icon']"
@@ -43,6 +70,7 @@ link.href = favicon;
document.title = "XO Lite";
const xenApiStore = useXenApiStore();
const { records: hosts } = useHostStore().subscribe();
const { pool } = usePoolStore().subscribe();
useChartTheme();
const uiStore = useUiStore();
@@ -65,6 +93,17 @@ if (import.meta.env.DEV) {
);
}
watch(hosts, (hosts, previousHosts) => {
difference(hosts, previousHosts).forEach((host) => {
const url = new URL("http://localhost");
url.protocol = window.location.protocol;
url.hostname = host.address;
fetch(url, { mode: "no-cors" }).catch(() =>
unreachableHostsUrls.value.push(url)
);
});
});
whenever(
() => pool.value?.$ref,
async (poolRef) => {
@@ -73,6 +112,9 @@ whenever(
await xenApi.startWatch();
}
);
const isSslModalOpen = computed(() => unreachableHostsUrls.value.length > 0);
const reload = () => window.location.reload();
</script>
<style lang="postcss">

View File

@@ -1,15 +1,15 @@
<template>
<div v-if="!isDisabled" ref="tooltipElement" class="app-tooltip">
<span class="triangle" />
<span class="label">{{ options.content }}</span>
<span class="label">{{ content }}</span>
</div>
</template>
<script lang="ts" setup>
import type { TooltipOptions } from "@/stores/tooltip.store";
import { isString } from "lodash-es";
import { isEmpty, isFunction, isString } from "lodash-es";
import place from "placement.js";
import { computed, ref, watchEffect } from "vue";
import type { TooltipOptions } from "@/stores/tooltip.store";
const props = defineProps<{
target: HTMLElement;
@@ -18,13 +18,29 @@ const props = defineProps<{
const tooltipElement = ref<HTMLElement>();
const isDisabled = computed(() =>
isString(props.options.content)
? props.options.content.trim() === ""
: props.options.content === false
const content = computed(() =>
isString(props.options) ? props.options : props.options.content
);
const placement = computed(() => props.options.placement ?? "top");
const isDisabled = computed(() => {
if (isEmpty(content.value)) {
return true;
}
if (isString(props.options)) {
return false;
}
if (isFunction(props.options.disabled)) {
return props.options.disabled(props.target);
}
return props.options.disabled ?? false;
});
const placement = computed(() =>
isString(props.options) ? "top" : props.options.placement ?? "top"
);
watchEffect(() => {
if (tooltipElement.value) {

View File

@@ -1,5 +1,5 @@
<template>
<div ref="vmConsoleContainer" class="vm-console" />
<div ref="consoleContainer" class="remote-console" />
</template>
<script lang="ts" setup>
@@ -19,7 +19,7 @@ const props = defineProps<{
isConsoleAvailable: boolean;
}>();
const vmConsoleContainer = ref<HTMLDivElement>();
const consoleContainer = ref<HTMLDivElement>();
const xenApiStore = useXenApiStore();
const url = computed(() => {
if (xenApiStore.currentSessionId == null) {
@@ -78,7 +78,7 @@ const createVncConnection = async () => {
await promiseTimeout(FIBONACCI_MS_ARRAY[nConnectionAttempts - 1]);
}
vncClient = new VncClient(vmConsoleContainer.value!, url.value!.toString(), {
vncClient = new VncClient(consoleContainer.value!, url.value!.toString(), {
wsProtocols: ["binary"],
});
vncClient.scaleViewport = true;
@@ -91,7 +91,7 @@ watch(url, clearVncClient);
watchEffect(() => {
if (
url.value === undefined ||
vmConsoleContainer.value === undefined ||
consoleContainer.value === undefined ||
!props.isConsoleAvailable
) {
return;
@@ -107,8 +107,8 @@ onBeforeUnmount(() => {
</script>
<style lang="postcss" scoped>
.vm-console {
height: 80rem;
.remote-console {
height: 100%;
& > :deep(div) {
background-color: transparent !important;

View File

@@ -1,59 +0,0 @@
<template>
<UiModal
v-if="isSslModalOpen"
:icon="faServer"
color="error"
@close="clearUnreachableHostsUrls"
>
<template #title>{{ $t("unreachable-hosts") }}</template>
<div class="description">
<p>{{ $t("following-hosts-unreachable") }}</p>
<p>{{ $t("allow-self-signed-ssl") }}</p>
<ul>
<li v-for="url in unreachableHostsUrls" :key="url">
<a :href="url" class="link" rel="noopener" target="_blank">{{
url
}}</a>
</li>
</ul>
</div>
<template #buttons>
<UiButton color="success" @click="reload">
{{ $t("unreachable-hosts-reload-page") }}
</UiButton>
<UiButton @click="clearUnreachableHostsUrls">{{ $t("cancel") }}</UiButton>
</template>
</UiModal>
</template>
<script lang="ts" setup>
import { faServer } from "@fortawesome/free-solid-svg-icons";
import UiModal from "@/components/ui/UiModal.vue";
import UiButton from "@/components/ui/UiButton.vue";
import { computed, ref, watch } from "vue";
import { difference } from "lodash";
import { useHostStore } from "@/stores/host.store";
const { records: hosts } = useHostStore().subscribe();
const unreachableHostsUrls = ref<Set<string>>(new Set());
const clearUnreachableHostsUrls = () => unreachableHostsUrls.value.clear();
const isSslModalOpen = computed(() => unreachableHostsUrls.value.size > 0);
const reload = () => window.location.reload();
watch(hosts, (nextHosts, previousHosts) => {
difference(nextHosts, previousHosts).forEach((host) => {
const url = new URL("http://localhost");
url.protocol = window.location.protocol;
url.hostname = host.address;
fetch(url, { mode: "no-cors" }).catch(() =>
unreachableHostsUrls.value.add(url.toString())
);
});
});
</script>
<style lang="postcss" scoped>
.description p {
margin: 1rem 0;
}
</style>

View File

@@ -4,11 +4,11 @@
<div
v-for="item in computedData.sortedArray"
:key="item.id"
class="progress-item"
:class="{
warning: item.value > MIN_WARNING_VALUE,
error: item.value > MIN_DANGEROUS_VALUE,
}"
class="progress-item"
>
<UiProgressBar :value="item.value" color="custom" />
<UiProgressLegend
@@ -18,15 +18,15 @@
</div>
<slot :total-percent="computedData.totalPercentUsage" name="footer" />
</template>
<UiCardSpinner v-else />
<UiSpinner v-else class="spinner" />
</div>
</template>
<script lang="ts" setup>
import { computed } from "vue";
import UiProgressBar from "@/components/ui/progress/UiProgressBar.vue";
import UiProgressLegend from "@/components/ui/progress/UiProgressLegend.vue";
import UiCardSpinner from "@/components/ui/UiCardSpinner.vue";
import { computed } from "vue";
import UiSpinner from "@/components/ui/UiSpinner.vue";
interface Data {
id: string;
@@ -67,6 +67,14 @@ const computedData = computed(() => {
</script>
<style lang="postcss" scoped>
.spinner {
color: var(--color-extra-blue-base);
display: flex;
margin: auto;
width: 40px;
height: 40px;
}
.progress-item:nth-child(1) {
--progress-bar-color: var(--color-extra-blue-d60);
}
@@ -83,11 +91,9 @@ const computedData = computed(() => {
--progress-bar-height: 1.2rem;
--progress-bar-color: var(--color-extra-blue-l20);
--progress-bar-background-color: var(--color-blue-scale-400);
&.warning {
--progress-bar-color: var(--color-orange-world-base);
}
&.error {
--progress-bar-color: var(--color-red-vates-base);
}

View File

@@ -18,19 +18,33 @@
</component>
</template>
<script lang="ts">
export default {
name: "FormCheckbox",
inheritAttrs: false,
};
</script>
<script lang="ts" setup>
import { type HTMLAttributes, computed, inject, ref } from "vue";
import {
type HTMLAttributes,
type InputHTMLAttributes,
computed,
inject,
ref,
} from "vue";
import { faCheck, faCircle, faMinus } from "@fortawesome/free-solid-svg-icons";
import { useVModel } from "@vueuse/core";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
defineOptions({ inheritAttrs: false });
const props = defineProps<{
// Temporary workaround for https://github.com/vuejs/core/issues/4294
interface Props extends Omit<InputHTMLAttributes, ""> {
modelValue?: unknown;
disabled?: boolean;
wrapperAttrs?: HTMLAttributes;
}>();
}
const props = defineProps<Props>();
const emit = defineEmits<{
(event: "update:modelValue", value: boolean): void;

View File

@@ -44,9 +44,17 @@
</span>
</template>
<script lang="ts">
export default {
name: "FormInput",
inheritAttrs: false,
};
</script>
<script lang="ts" setup>
import {
type HTMLAttributes,
type InputHTMLAttributes,
computed,
inject,
nextTick,
@@ -59,22 +67,20 @@ import { faAngleDown } from "@fortawesome/free-solid-svg-icons";
import { useTextareaAutosize, useVModel } from "@vueuse/core";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
defineOptions({ inheritAttrs: false });
// Temporary workaround for https://github.com/vuejs/core/issues/4294
interface Props extends Omit<InputHTMLAttributes, ""> {
modelValue?: unknown;
color?: Color;
before?: Omit<IconDefinition, ""> | string;
after?: Omit<IconDefinition, ""> | string;
beforeWidth?: string;
afterWidth?: string;
disabled?: boolean;
right?: boolean;
wrapperAttrs?: HTMLAttributes;
}
const props = withDefaults(
defineProps<{
modelValue?: any;
color?: Color;
before?: IconDefinition | string;
after?: IconDefinition | string;
beforeWidth?: string;
afterWidth?: string;
disabled?: boolean;
right?: boolean;
wrapperAttrs?: HTMLAttributes;
}>(),
{ color: "info" }
);
const props = withDefaults(defineProps<Props>(), { color: "info" });
const inputElement = ref();

View File

@@ -1,41 +0,0 @@
<template>
<div class="form-input-group">
<slot />
</div>
</template>
<style lang="postcss" scoped>
.form-input-group {
display: inline-flex;
align-items: center;
:slotted(.form-input),
:slotted(.form-select) {
&:hover {
z-index: 1;
}
&:focus-within {
z-index: 2;
}
&:not(:first-child) {
margin-left: -1px;
.input,
.select {
border-top-left-radius: 0;
border-bottom-left-radius: 0;
}
}
&:not(:last-child) {
.input,
.select {
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
}
}
}
</style>

View File

@@ -1,5 +1,12 @@
<template>
<li v-if="host !== undefined" class="infra-host-item">
<li
v-if="host !== undefined"
v-tooltip="{
content: host.name_label,
disabled: isTooltipDisabled,
}"
class="infra-host-item"
>
<InfraItemLabel
:active="isCurrentHost"
:icon="faServer"
@@ -29,6 +36,7 @@ import InfraAction from "@/components/infra/InfraAction.vue";
import InfraItemLabel from "@/components/infra/InfraItemLabel.vue";
import InfraVmList from "@/components/infra/InfraVmList.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import { useHostStore } from "@/stores/host.store";
import { usePoolStore } from "@/stores/pool.store";
import { useUiStore } from "@/stores/ui.store";
@@ -58,6 +66,9 @@ const isCurrentHost = computed(
() => props.hostOpaqueRef === uiStore.currentHostOpaqueRef
);
const [isExpanded, toggle] = useToggle(true);
const isTooltipDisabled = (target: HTMLElement) =>
!hasEllipsis(target.querySelector(".text"));
</script>
<style lang="postcss" scoped>

View File

@@ -7,9 +7,9 @@
class="infra-item-label"
v-bind="$attrs"
>
<a :href="href" class="link" @click="navigate" v-tooltip="hasTooltip">
<a :href="href" class="link" @click="navigate">
<UiIcon :icon="icon" class="icon" />
<div ref="textElement" class="text">
<div class="text">
<slot />
</div>
</a>
@@ -22,10 +22,7 @@
<script lang="ts" setup>
import UiIcon from "@/components/ui/icon/UiIcon.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import type { IconDefinition } from "@fortawesome/fontawesome-common-types";
import { computed, ref } from "vue";
import type { RouteLocationRaw } from "vue-router";
defineProps<{
@@ -33,9 +30,6 @@ defineProps<{
route: RouteLocationRaw;
active?: boolean;
}>();
const textElement = ref<HTMLElement>();
const hasTooltip = computed(() => hasEllipsis(textElement.value));
</script>
<style lang="postcss" scoped>

View File

@@ -1,5 +1,13 @@
<template>
<li v-if="vm !== undefined" ref="rootElement" class="infra-vm-item">
<li
v-if="vm !== undefined"
ref="rootElement"
v-tooltip="{
content: vm.name_label,
disabled: isTooltipDisabled,
}"
class="infra-vm-item"
>
<InfraItemLabel
v-if="isVisible"
:icon="faDisplay"
@@ -19,6 +27,8 @@
import InfraAction from "@/components/infra/InfraAction.vue";
import InfraItemLabel from "@/components/infra/InfraItemLabel.vue";
import PowerStateIcon from "@/components/PowerStateIcon.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import { useVmStore } from "@/stores/vm.store";
import { faDisplay } from "@fortawesome/free-solid-svg-icons";
import { useIntersectionObserver } from "@vueuse/core";
@@ -39,6 +49,9 @@ const { stop } = useIntersectionObserver(rootElement, ([entry]) => {
stop();
}
});
const isTooltipDisabled = (target: HTMLElement) =>
!hasEllipsis(target.querySelector(".text"));
</script>
<style lang="postcss" scoped>

View File

@@ -1,14 +1,13 @@
<template>
<UiCard :color="hasError ? 'error' : undefined">
<UiCard>
<UiCardTitle>
{{ $t("cpu-provisioning") }}
<template v-if="!hasError" #right>
<template #right>
<!-- TODO: add a tooltip for the warning icon -->
<UiStatusIcon v-if="state !== 'success'" :state="state" />
</template>
</UiCardTitle>
<NoDataError v-if="hasError" />
<div v-else-if="isReady" :class="state" class="progress-item">
<div v-if="isReady" :class="state" class="progress-item">
<UiProgressBar :max-value="maxValue" :value="value" color="custom" />
<UiProgressScale :max-value="maxValue" :steps="1" unit="%" />
<UiProgressLegend :label="$t('vcpus')" :value="`${value}%`" />
@@ -23,20 +22,19 @@
</template>
</UiCardFooter>
</div>
<UiCardSpinner v-else />
<UiSpinner v-else class="spinner" />
</UiCard>
</template>
<script lang="ts" setup>
import NoDataError from "@/components/NoDataError.vue";
import UiStatusIcon from "@/components/ui/icon/UiStatusIcon.vue";
import UiProgressBar from "@/components/ui/progress/UiProgressBar.vue";
import UiProgressLegend from "@/components/ui/progress/UiProgressLegend.vue";
import UiProgressScale from "@/components/ui/progress/UiProgressScale.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiCardFooter from "@/components/ui/UiCardFooter.vue";
import UiCardSpinner from "@/components/ui/UiCardSpinner.vue";
import UiCardTitle from "@/components/ui/UiCardTitle.vue";
import UiSpinner from "@/components/ui/UiSpinner.vue";
import { percent } from "@/libs/utils";
import { useHostMetricsStore } from "@/stores/host-metrics.store";
import { useHostStore } from "@/stores/host.store";
@@ -47,19 +45,11 @@ import { computed } from "vue";
const ACTIVE_STATES = new Set(["Running", "Paused"]);
const {
hasError: hostStoreHasError,
isReady: isHostStoreReady,
runningHosts,
} = useHostStore().subscribe({
const { isReady: isHostStoreReady, runningHosts } = useHostStore().subscribe({
hostMetricsSubscription: useHostMetricsStore().subscribe(),
});
const {
hasError: vmStoreHasError,
isReady: isVmStoreReady,
records: vms,
} = useVmStore().subscribe();
const { records: vms, isReady: isVmStoreReady } = useVmStore().subscribe();
const { getByOpaqueRef: getVmMetrics, isReady: isVmMetricsStoreReady } =
useVmMetricsStore().subscribe();
@@ -94,9 +84,6 @@ const isReady = logicAnd(
isHostStoreReady,
isVmMetricsStoreReady
);
const hasError = computed(
() => hostStoreHasError.value || vmStoreHasError.value
);
</script>
<style lang="postcss" scoped>
@@ -115,4 +102,12 @@ const hasError = computed(
color: var(--footer-value-color);
}
}
.spinner {
color: var(--color-extra-blue-base);
display: flex;
margin: 2.6rem auto auto auto;
width: 40px;
height: 40px;
}
</style>

View File

@@ -2,7 +2,7 @@
<UiCard :color="hasError ? 'error' : undefined">
<UiCardTitle>{{ $t("status") }}</UiCardTitle>
<NoDataError v-if="hasError" />
<UiCardSpinner v-else-if="!isReady" />
<UiSpinner v-else-if="!isReady" class="spinner" />
<template v-else>
<PoolDashboardStatusItem
:active="activeHostsCount"
@@ -23,9 +23,9 @@
import NoDataError from "@/components/NoDataError.vue";
import PoolDashboardStatusItem from "@/components/pool/dashboard/PoolDashboardStatusItem.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiCardSpinner from "@/components/ui/UiCardSpinner.vue";
import UiCardTitle from "@/components/ui/UiCardTitle.vue";
import UiSeparator from "@/components/ui/UiSeparator.vue";
import UiSpinner from "@/components/ui/UiSpinner.vue";
import { useHostMetricsStore } from "@/stores/host-metrics.store";
import { useVmStore } from "@/stores/vm.store";
import { computed } from "vue";
@@ -57,3 +57,13 @@ const totalVmsCount = computed(() => vms.value.length);
const activeVmsCount = computed(() => runningVms.value.length);
</script>
<style lang="postcss" scoped>
.spinner {
color: var(--color-extra-blue-base);
display: flex;
margin: auto;
width: 40px;
height: 40px;
}
</style>

View File

@@ -16,7 +16,6 @@ defineProps<{
<style lang="postcss" scoped>
.ui-badge {
white-space: nowrap;
display: inline-flex;
align-items: center;
gap: 0.4rem;

View File

@@ -1,23 +0,0 @@
<template>
<div class="ui-card-spinner">
<UiSpinner class="spinner" />
</div>
</template>
<script lang="ts" setup>
import UiSpinner from "@/components/ui/UiSpinner.vue";
</script>
<style lang="postcss" scoped>
.ui-card-spinner {
display: flex;
align-items: center;
justify-content: center;
padding: 4rem 0;
}
.spinner {
color: var(--color-extra-blue-base);
font-size: 4rem;
}
</style>

View File

@@ -1,13 +1,7 @@
<template>
<div class="legend">
<template v-if="$slots.label || label">
<span class="circle" />
<div class="label-container">
<div ref="labelElement" v-tooltip="isTooltipEnabled" class="label">
<slot name="label">{{ label }}</slot>
</div>
</div>
</template>
<span class="circle" />
<slot name="label">{{ label }}</slot>
<UiBadge class="badge">
<slot name="value">{{ value }}</slot>
</UiBadge>
@@ -16,23 +10,14 @@
<script lang="ts" setup>
import UiBadge from "@/components/ui/UiBadge.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import { computed, ref } from "vue";
defineProps<{
label?: string;
value?: string;
}>();
const labelElement = ref<HTMLElement>();
const isTooltipEnabled = computed(() =>
hasEllipsis(labelElement.value, { vertical: true })
);
</script>
<style lang="postcss" scoped>
<style scoped lang="postcss">
.badge {
font-size: 0.9em;
font-weight: 700;
@@ -40,8 +25,8 @@ const isTooltipEnabled = computed(() =>
.circle {
display: inline-block;
min-width: 1rem;
min-height: 1rem;
width: 1rem;
height: 1rem;
border-radius: 0.5rem;
background-color: var(--progress-bar-color);
}
@@ -53,14 +38,4 @@ const isTooltipEnabled = computed(() =>
gap: 0.5rem;
margin: 1.6em 0;
}
.label-container {
overflow: hidden;
}
.label {
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
}
</style>

View File

@@ -1,37 +0,0 @@
<template>
<UiTabBar>
<RouterTab :to="{ name: 'vm.dashboard', params: { uuid } }">
{{ $t("dashboard") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.console', params: { uuid } }">
{{ $t("console") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.alarms', params: { uuid } }">
{{ $t("alarms") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.stats', params: { uuid } }">
{{ $t("stats") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.system', params: { uuid } }">
{{ $t("system") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.network', params: { uuid } }">
{{ $t("network") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.storage', params: { uuid } }">
{{ $t("storage") }}
</RouterTab>
<RouterTab :to="{ name: 'vm.tasks', params: { uuid } }">
{{ $t("tasks") }}
</RouterTab>
</UiTabBar>
</template>
<script lang="ts" setup>
import RouterTab from "@/components/RouterTab.vue";
import UiTabBar from "@/components/ui/UiTabBar.vue";
defineProps<{
uuid: string;
}>();
</script>

View File

@@ -1,71 +1,36 @@
# Tooltip Directive
By default, the tooltip will appear centered above the target element.
## Directive argument
The directive argument can be either:
- The tooltip content
- An object containing the tooltip content and/or placement: `{ content: "...", placement: "..." }` (both optional)
## Tooltip content
The tooltip content can be either:
- `false` or an empty-string to disable the tooltip
- `true` or `undefined` to enable the tooltip and extract its content from the element's innerText.
- Non-empty string to enable the tooltip and use the string as content.
## Tooltip placement
Tooltip can be placed on the following positions:
- `top`
- `top-start`
- `top-end`
- `bottom`
- `bottom-start`
- `bottom-end`
- `left`
- `left-start`
- `left-end`
- `right`
- `right-start`
- `right-end`
By default, tooltip will appear centered above the target element.
## Usage
```vue
<template>
<!-- Boolean / Undefined -->
<span v-tooltip="true"
>This content will be ellipsized by CSS but displayed entirely in the
tooltip</span
>
<span v-tooltip
>This content will be ellipsized by CSS but displayed entirely in the
tooltip</span
>
<!-- String -->
<!-- Static -->
<span v-tooltip="'Tooltip content'">Item</span>
<!-- Object -->
<!-- Dynamic -->
<span v-tooltip="myTooltipContent">Item</span>
<!-- Placement -->
<span v-tooltip="{ content: 'Foobar', placement: 'left-end' }">Item</span>
<!-- Dynamic -->
<span v-tooltip="myTooltip">Item</span>
<!-- Disabling (variable) -->
<span v-tooltip="{ content: 'Foobar', disabled: isDisabled }">Item</span>
<!-- Conditional -->
<span v-tooltip="isTooltipEnabled && 'Foobar'">Item</span>
<!-- Disabling (function) -->
<span v-tooltip="{ content: 'Foobar', disabled: isDisabledFn }">Item</span>
</template>
<script setup>
import { ref } from "vue";
import { vTooltip } from "@/directives/tooltip.directive";
const myTooltip = ref("Content"); // or ref({ content: "Content", placement: "left-end" })
const isTooltipEnabled = ref(true);
const myTooltipContent = ref("Content");
const isDisabled = ref(true);
const isDisabledFn = (target: Element) => {
// return boolean;
};
</script>
```

View File

@@ -1,36 +1,8 @@
import type { Directive } from "vue";
import type { TooltipEvents, TooltipOptions } from "@/stores/tooltip.store";
import { useTooltipStore } from "@/stores/tooltip.store";
import { isObject } from "lodash-es";
import type { Options } from "placement.js";
import type { Directive } from "vue";
type TooltipDirectiveContent = undefined | boolean | string;
type TooltipDirectiveOptions =
| TooltipDirectiveContent
| {
content?: TooltipDirectiveContent;
placement?: Options["placement"];
};
const parseOptions = (
options: TooltipDirectiveOptions,
target: HTMLElement
): TooltipOptions => {
const { placement, content } = isObject(options)
? options
: { placement: undefined, content: options };
return {
placement,
content:
content === true || content === undefined
? target.innerText.trim()
: content,
};
};
export const vTooltip: Directive<HTMLElement, TooltipDirectiveOptions> = {
export const vTooltip: Directive<HTMLElement, TooltipOptions> = {
mounted(target, binding) {
const store = useTooltipStore();
@@ -38,11 +10,11 @@ export const vTooltip: Directive<HTMLElement, TooltipDirectiveOptions> = {
? { on: "focusin", off: "focusout" }
: { on: "mouseenter", off: "mouseleave" };
store.register(target, parseOptions(binding.value, target), events);
store.register(target, binding.value, events);
},
updated(target, binding) {
const store = useTooltipStore();
store.updateOptions(target, parseOptions(binding.value, target));
store.updateOptions(target, binding.value);
},
beforeUnmount(target) {
const store = useTooltipStore();

View File

@@ -1,5 +1,6 @@
import { createI18n } from "vue-i18n";
import messages from "@intlify/unplugin-vue-i18n/messages";
import en from "@/locales/en.json";
import fr from "@/locales/fr.json";
interface Locales {
[key: string]: {
@@ -19,10 +20,13 @@ export const locales: Locales = {
},
};
export default createI18n({
export default createI18n<[typeof en], "en" | "fr">({
locale: localStorage.getItem("lang") ?? "en",
fallbackLocale: "en",
messages,
messages: {
en,
fr,
},
datetimeFormats: {
en: {
date_short: {

View File

@@ -71,20 +71,8 @@ export function parseDateTime(dateTime: string) {
return date.getTime();
}
export const hasEllipsis = (
target: Element | undefined | null,
{ vertical = false }: { vertical?: boolean } = {}
) => {
if (target == null) {
return false;
}
if (vertical) {
return target.clientHeight < target.scrollHeight;
}
return target.clientWidth < target.scrollWidth;
};
export const hasEllipsis = (target: Element | undefined | null) =>
target != undefined && target.clientWidth < target.scrollWidth;
export function percent(currentValue: number, maxValue: number, precision = 2) {
return round((currentValue / maxValue) * 100, precision);

View File

@@ -17,7 +17,6 @@
"coming-soon": "Coming soon!",
"community": "Community",
"community-name": "{name} community",
"console": "Console",
"copy": "Copy",
"cpu-provisioning": "CPU provisioning",
"cpu-usage": "CPU usage",

View File

@@ -17,7 +17,6 @@
"coming-soon": "Bientôt disponible !",
"community": "Communauté",
"community-name": "Communauté {name}",
"console": "Console",
"copy": "Copier",
"cpu-provisioning": "Provisionnement CPU",
"cpu-usage": "Utilisation CPU",

View File

@@ -1,11 +1,12 @@
import pool from "@/router/pool";
import vm from "@/router/vm";
import HomeView from "@/views/HomeView.vue";
import HostDashboardView from "@/views/host/HostDashboardView.vue";
import HostRootView from "@/views/host/HostRootView.vue";
import PageNotFoundView from "@/views/PageNotFoundView.vue";
import SettingsView from "@/views/settings/SettingsView.vue";
import StoryView from "@/views/StoryView.vue";
import VmConsoleView from "@/views/vm/VmConsoleView.vue";
import VmRootView from "@/views/vm/VmRootView.vue";
import storiesRoutes from "virtual:stories";
import { createRouter, createWebHashHistory } from "vue-router";
@@ -30,7 +31,6 @@ const router = createRouter({
component: SettingsView,
},
pool,
vm,
{
path: "/host/:uuid",
component: HostRootView,
@@ -42,6 +42,17 @@ const router = createRouter({
},
],
},
{
path: "/vm/:uuid",
component: VmRootView,
children: [
{
path: "console",
name: "vm.console",
component: VmConsoleView,
},
],
},
{
path: "/:pathMatch(.*)*",
name: "notFound",

View File

@@ -1,47 +0,0 @@
export default {
path: "/vm/:uuid",
component: () => import("@/views/vm/VmRootView.vue"),
redirect: { name: "vm.console" },
children: [
{
path: "dashboard",
name: "vm.dashboard",
component: () => import("@/views/vm/VmDashboardView.vue"),
},
{
path: "console",
name: "vm.console",
component: () => import("@/views/vm/VmConsoleView.vue"),
},
{
path: "alarms",
name: "vm.alarms",
component: () => import("@/views/vm/VmAlarmsView.vue"),
},
{
path: "stats",
name: "vm.stats",
component: () => import("@/views/vm/VmStatsView.vue"),
},
{
path: "system",
name: "vm.system",
component: () => import("@/views/vm/VmSystemView.vue"),
},
{
path: "network",
name: "vm.network",
component: () => import("@/views/vm/VmNetworkView.vue"),
},
{
path: "storage",
name: "vm.storage",
component: () => import("@/views/vm/VmStorageView.vue"),
},
{
path: "tasks",
name: "vm.tasks",
component: () => import("@/views/vm/VmTasksView.vue"),
},
],
};

View File

@@ -4,10 +4,13 @@ import type { Options } from "placement.js";
import { type EffectScope, computed, effectScope, ref } from "vue";
import { type WindowEventName, useEventListener } from "@vueuse/core";
export type TooltipOptions = {
content: string | false;
placement: Options["placement"];
};
export type TooltipOptions =
| string
| {
content: string;
placement?: Options["placement"];
disabled?: boolean | ((target: HTMLElement) => boolean);
};
export type TooltipEvents = { on: WindowEventName; off: WindowEventName };

View File

@@ -1,11 +1,12 @@
import { useBreakpoints, useColorMode } from "@vueuse/core";
import { defineStore } from "pinia";
import { computed, ref } from "vue";
import { useRoute } from "vue-router";
export const useUiStore = defineStore("ui", () => {
const currentHostOpaqueRef = ref();
const { store: colorMode } = useColorMode({ initialValue: "dark" });
const colorMode = useColorMode({ emitAuto: true, initialValue: "dark" });
const { desktop: isDesktop } = useBreakpoints({
desktop: 1024,
@@ -13,10 +14,15 @@ export const useUiStore = defineStore("ui", () => {
const isMobile = computed(() => !isDesktop.value);
const route = useRoute();
const hasUi = computed(() => route.query.ui !== "0");
return {
colorMode,
currentHostOpaqueRef,
isDesktop,
isMobile,
hasUi,
};
});

View File

@@ -1,11 +0,0 @@
```vue-template
<FormInputGroup>
<FormInput />
<FormInput />
<FormSelect>
<option>Option 1</option>
<option>Option 2</option>
<option>Option 3</option>
</FormSelect>
</FormInputGroup>
```

View File

@@ -1,23 +0,0 @@
<template>
<ComponentStory
:params="[slot().help('Can contains multiple FormInput and FormSelect')]"
>
<FormInputGroup>
<FormInput />
<FormInput />
<FormSelect>
<option>Option 1</option>
<option>Option 2</option>
<option>Option 3</option>
</FormSelect>
</FormInputGroup>
</ComponentStory>
</template>
<script lang="ts" setup>
import ComponentStory from "@/components/component-story/ComponentStory.vue";
import FormInput from "@/components/form/FormInput.vue";
import FormInputGroup from "@/components/form/FormInputGroup.vue";
import FormSelect from "@/components/form/FormSelect.vue";
import { slot } from "@/libs/story/story-param";
</script>

View File

@@ -6,15 +6,15 @@
</UiTitle>
<TasksTable :finished-tasks="finishedTasks" :pending-tasks="pendingTasks" />
<UiCardSpinner v-if="!isReady" />
<UiSpinner v-if="!isReady" class="loader" />
</UiCard>
</template>
<script lang="ts" setup>
import TasksTable from "@/components/tasks/TasksTable.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiCardSpinner from "@/components/ui/UiCardSpinner.vue";
import UiCounter from "@/components/ui/UiCounter.vue";
import UiSpinner from "@/components/ui/UiSpinner.vue";
import UiTitle from "@/components/ui/UiTitle.vue";
import useArrayRemovedItemsHistory from "@/composables/array-removed-items-history.composable";
import useCollectionFilter from "@/composables/collection-filter.composable";
@@ -71,4 +71,11 @@ useTitle(
font-size: 1.4rem;
}
}
.loader {
color: var(--color-extra-blue-base);
display: block;
font-size: 4rem;
margin: 2rem auto 0;
}
</style>

View File

@@ -1,49 +1,32 @@
<template>
<UiCard class="home-view">
<UiCardTitle>Component Story skeleton generator</UiCardTitle>
<div class="row">
Choose a component
<FormSelect v-model="componentPath">
<div class="home-view">
<UiTitle type="h4">
This helper will generate a basic story component
</UiTitle>
<div>
Choose a component:
<select v-model="componentPath">
<option value="" />
<option v-for="path in componentPaths" :key="path">
<option v-for="(component, path) in componentsWithProps" :key="path">
{{ path }}
</option>
</FormSelect>
</select>
<div class="slots">
<label>
Slots names, separated by a comma
<input v-model="slots" />
</label>
<button @click="slots = 'default'">Default</button>
<button @click="slots = ''">Clear</button>
</div>
</div>
<div class="row">
Slot names, separated by comma
<span class="slots">
<FormInput v-model="slots" />
<UiButton @click="slots = 'default'">Default</UiButton>
<UiButton outlined @click="slots = ''">Clear</UiButton>
</span>
</div>
<p v-for="warning in warnings" :key="warning" class="row warning">
<UiIcon :icon="faWarning" />
{{ warning }}
</p>
<CodeHighlight
class="code-highlight"
v-if="componentPath"
:code="template"
/>
</UiCard>
<CodeHighlight v-if="componentPath" :code="template" />
</div>
</template>
<script lang="ts" setup>
import CodeHighlight from "@/components/CodeHighlight.vue";
import FormInput from "@/components/form/FormInput.vue";
import FormSelect from "@/components/form/FormSelect.vue";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
import UiButton from "@/components/ui/UiButton.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiCardTitle from "@/components/ui/UiCardTitle.vue";
import { faWarning } from "@fortawesome/free-solid-svg-icons";
import { castArray } from "lodash-es";
import UiTitle from "@/components/ui/UiTitle.vue";
import { type ComponentOptions, computed, ref, watch } from "vue";
const componentPath = ref("");
@@ -61,14 +44,10 @@ const componentsWithProps = Object.fromEntries(
)
);
const componentPaths = Object.keys(componentsWithProps);
const lines = ref<string[]>([]);
const slots = ref("");
const quote = (str: string) => `'${str}'`;
const camel = (str: string) =>
str.replace(/-([a-z])/g, (match, letter) => letter.toUpperCase());
const paramsToImport = ref(new Set<string>());
const widgetsToImport = ref(new Set<string>());
@@ -82,15 +61,13 @@ const template = computed(() => {
.filter((name) => name !== "");
for (const slotName of slotsNames) {
paramsLines.push(
`slot(${slotName === "default" ? "" : quote(camel(slotName))})`
);
paramsLines.push(`slot(${slotName === "default" ? "" : quote(slotName)})`);
}
for (const slotName of slotsNames) {
paramsLines.push(
`setting(${quote(
`${camel(slotName)}SlotContent`
`${slotName}SlotContent`
)}).preset('Example content for ${slotName} slot').widget(text()).help('Content for ${slotName} slot')`
);
}
@@ -101,7 +78,7 @@ const template = computed(() => {
}
const paramsStr = paramsLines.join(",\n ");
const scriptEndTag = "</" + "script>";
return `<template>
<ComponentStory
v-slot="{ properties, settings }"
@@ -114,10 +91,8 @@ const template = computed(() => {
? `>\n ${slotsNames
.map((name) =>
name === "default"
? `{{ settings.${camel(name)}SlotContent }}`
: `<template #${name}>{{ settings.${camel(
name
)}SlotContent }}</template>`
? `{{ settings.${name}SlotContent }}`
: `<template #${name}>{{ settings.${name}SlotContent }}</template>`
)
.join("\n ")}
</${componentName}>`
@@ -143,30 +118,10 @@ ${
)} } from "@/libs/story/story-widget"`
: ""
}
${scriptEndTag}
${"<"}/script>
`;
});
const warnings = ref(new Set<string>());
const extractTypeFromConstructor = (
ctor: null | (new () => unknown),
propName: string
) => {
if (ctor == null) {
warnings.value.add(
`An unknown type has been detected for prop "${propName}"`
);
return "unknown";
}
if (ctor === Date) {
return "Date";
}
return ctor.name.toLocaleLowerCase();
};
watch(
componentPath,
(path: string) => {
@@ -178,7 +133,6 @@ watch(
slots.value = "";
widgetsToImport.value = new Set();
paramsToImport.value = new Set();
warnings.value = new Set();
lines.value = [];
for (const propName in component.props) {
@@ -193,14 +147,12 @@ watch(
current.push(`default(${quote(prop.default)})`);
}
if (prop.type !== undefined) {
const type = castArray(prop.type)
.map((ctor) => extractTypeFromConstructor(ctor, propName))
.join(" | ");
if (prop.type) {
const type = prop.type();
if (type !== "unknown") {
current.push(`type(${quote(type)})`);
}
current.push(
`type(${quote(Array.isArray(type) ? "array" : typeof type)})`
);
}
const isModel = component.emits?.includes(`update:${propName}`);
@@ -212,28 +164,16 @@ watch(
})`
);
if (!isModel) {
current.push("widget()");
}
current.push("widget()");
lines.value.push(current.join("."));
}
let shouldImportEvent = false;
if (component.emits) {
for (const eventName of component.emits) {
if (eventName.startsWith("update:")) {
continue;
}
shouldImportEvent = true;
lines.value.push(`event(${quote(eventName)})`);
}
}
if (shouldImportEvent) {
paramsToImport.value.add("event");
for (const eventName of component.emits) {
lines.value.push(`event("${eventName}")`);
}
}
},
{ immediate: true }
@@ -245,28 +185,11 @@ watch(
margin: 1rem;
}
.ui-title {
margin-bottom: 1rem;
}
.slots {
display: inline-flex;
align-items: stretch;
gap: 1rem;
:deep(input) {
height: 100%;
}
}
.row {
margin-bottom: 2rem;
font-size: 1.6rem;
}
.warning {
font-size: 1.6rem;
font-weight: 600;
color: var(--color-orange-world-base);
}
.code-highlight {
margin-top: 1rem;
}
</style>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,20 +1,34 @@
<template>
<div v-if="!isReady">Loading...</div>
<div v-else-if="!isVmRunning">Console is only available for running VMs.</div>
<RemoteConsole
v-else-if="vm && vmConsole"
:location="vmConsole.location"
:is-console-available="!isOperationsPending(vm, STOP_OPERATIONS)"
/>
<template v-else-if="vm && vmConsole">
<RemoteConsole
:is-console-available="!isOperationsPending(vm, STOP_OPERATIONS)"
:location="vmConsole.location"
class="remote-console"
/>
<RouterLink
v-if="uiStore.hasUi"
:to="{ query: { ui: '0' } }"
class="open-link"
target="_blank"
>
<UiIcon :icon="faArrowUpRightFromSquare" />
Open in new window
</RouterLink>
</template>
</template>
<script lang="ts" setup>
import RemoteConsole from "@/components/RemoteConsole.vue";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
import { isOperationsPending } from "@/libs/utils";
import { useConsoleStore } from "@/stores/console.store";
import { useUiStore } from "@/stores/ui.store";
import { useVmStore } from "@/stores/vm.store";
import { faArrowUpRightFromSquare } from "@fortawesome/free-solid-svg-icons";
import { computed } from "vue";
import { useRoute } from "vue-router";
import RemoteConsole from "@/components/RemoteConsole.vue";
import { useConsoleStore } from "@/stores/console.store";
import { useVmStore } from "@/stores/vm.store";
import { isOperationsPending } from "@/libs/utils";
const STOP_OPERATIONS = [
"shutdown",
@@ -26,6 +40,7 @@ const STOP_OPERATIONS = [
"suspend",
];
const uiStore = useUiStore();
const route = useRoute();
const { isReady: isVmReady, getByUuid: getVmByUuid } = useVmStore().subscribe();
@@ -49,3 +64,31 @@ const vmConsole = computed(() => {
return getConsoleByOpaqueRef(consoleOpaqueRef);
});
</script>
<style lang="postcss" scoped>
.open-link {
display: flex;
align-items: center;
gap: 1rem;
background-color: var(--color-extra-blue-base);
color: var(--color-blue-scale-500);
text-decoration: none;
padding: 1.5rem;
font-size: 1.6rem;
border-radius: 0 0 0 0.8rem;
position: absolute;
top: 8rem;
right: 0;
white-space: nowrap;
transform: translateX(calc(100% - 4.5rem));
transition: transform 0.2s ease-in-out;
&:hover {
transform: translateX(0);
}
}
.remote-console {
height: calc(100% - 8rem);
}
</style>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,7 +1,6 @@
<template>
<ObjectNotFoundWrapper :is-ready="isReady" :uuid-checker="hasUuid">
<VmHeader />
<VmTabBar :uuid="vm!.uuid" />
<VmHeader v-if="uiStore.hasUi" />
<RouterView />
</ObjectNotFoundWrapper>
</template>
@@ -9,16 +8,18 @@
<script lang="ts" setup>
import ObjectNotFoundWrapper from "@/components/ObjectNotFoundWrapper.vue";
import VmHeader from "@/components/vm/VmHeader.vue";
import VmTabBar from "@/components/vm/VmTabBar.vue";
import { useUiStore } from "@/stores/ui.store";
import { useVmStore } from "@/stores/vm.store";
import { whenever } from "@vueuse/core";
import { computed } from "vue";
import { watchEffect } from "vue";
import { useRoute } from "vue-router";
const route = useRoute();
const { getByUuid, hasUuid, isReady } = useVmStore().subscribe();
const uiStore = useUiStore();
const vm = computed(() => getByUuid(route.params.uuid as string));
whenever(vm, (vm) => (uiStore.currentHostOpaqueRef = vm.resident_on));
watchEffect(() => {
uiStore.currentHostOpaqueRef = getByUuid(
route.params.uuid as string
)?.resident_on;
});
</script>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,7 +0,0 @@
<template>
<PageUnderConstruction />
</template>
<script lang="ts" setup>
import PageUnderConstruction from "@/components/PageUnderConstruction.vue";
</script>

View File

@@ -1,6 +1,6 @@
import vueI18n from "@intlify/unplugin-vue-i18n/vite";
import vueI18n from "@intlify/vite-plugin-vue-i18n";
import vue from "@vitejs/plugin-vue";
import { basename, resolve } from "path";
import { basename } from "path";
import { fileURLToPath, URL } from "url";
import { defineConfig } from "vite";
import pages from "vite-plugin-pages";
@@ -13,9 +13,7 @@ export default defineConfig({
},
plugins: [
vue(),
vueI18n({
include: resolve(__dirname, "src/locales/**"),
}),
vueI18n(),
pages({
moduleId: "virtual:stories",
dirs: [{ dir: "src/stories", baseRoute: "story" }],

View File

@@ -14,15 +14,6 @@ const formatId = timestamp => timestamp.toString(36).padStart(9, '0')
const noop = Function.prototype
// Create a serializable object from an error.
const serializeError = error => ({
...error, // Copy enumerable properties.
code: error.code,
message: error.message,
name: error.name,
stack: error.stack,
})
export default class Tasks extends EventEmitter {
// contains consolidated logs of all live and finished tasks
#store
@@ -36,12 +27,6 @@ export default class Tasks extends EventEmitter {
this.#tasks.delete(id)
},
onTaskUpdate: async taskLog => {
// Error objects are not JSON-ifiable by default
const { result } = taskLog
if (result instanceof Error && result.toJSON === undefined) {
taskLog.result = serializeError(result)
}
try {
const { $root } = taskLog
@@ -66,7 +51,6 @@ export default class Tasks extends EventEmitter {
for await (const taskLog of this.list({ filter: _ => _.status === 'pending' })) {
taskLog.status = 'interrupted'
taskLog.updatedAt = Date.now()
await this.#store.put(taskLog.id, taskLog)
}

View File

@@ -14,14 +14,14 @@
"url": "https://vates.fr"
},
"license": "AGPL-3.0-or-later",
"version": "0.10.1",
"version": "0.10.0",
"engines": {
"node": ">=15.6"
},
"dependencies": {
"@vates/event-listeners-manager": "^1.0.1",
"@vates/parse-duration": "^0.1.1",
"@vates/task": "^0.1.2",
"@vates/task": "^0.1.1",
"@xen-orchestra/log": "^0.6.0",
"acme-client": "^5.0.0",
"app-conf": "^2.3.0",

View File

@@ -27,7 +27,7 @@
"@babel/core": "^7.7.4",
"@babel/preset-env": "^7.7.4",
"cross-env": "^7.0.2",
"rimraf": "^5.0.1"
"rimraf": "^4.1.1"
},
"dependencies": {
"@vates/read-chunk": "^1.1.1"

Some files were not shown because too many files have changed in this diff Show More