Compare commits

..

2 Commits

Author SHA1 Message Date
Florent BEAUCHAMP
d4ca0da8b9 fix(backups/cleanVm): vhd size computation 2023-07-05 16:15:02 +02:00
Florent BEAUCHAMP
486d50f2f1 feat(@xen-orchestra/backups): store more detailled sizes 2023-07-05 14:38:17 +02:00
295 changed files with 4762 additions and 6809 deletions

View File

@@ -1,7 +1,9 @@
import LRU from 'lru-cache'
import Fuse from 'fuse-native'
import { VhdSynthetic } from 'vhd-lib'
import { Disposable, fromCallback } from 'promise-toolbox'
'use strict'
const LRU = require('lru-cache')
const Fuse = require('fuse-native')
const { VhdSynthetic } = require('vhd-lib')
const { Disposable, fromCallback } = require('promise-toolbox')
// build a s stat object from https://github.com/fuse-friends/fuse-native/blob/master/test/fixtures/stat.js
const stat = st => ({
@@ -14,7 +16,7 @@ const stat = st => ({
gid: st.gid !== undefined ? st.gid : process.getgid(),
})
export const mount = Disposable.factory(async function* mount(handler, diskPath, mountDir) {
exports.mount = Disposable.factory(async function* mount(handler, diskPath, mountDir) {
const vhd = yield VhdSynthetic.fromVhdChain(handler, diskPath)
const cache = new LRU({

View File

@@ -1,6 +1,6 @@
{
"name": "@vates/fuse-vhd",
"version": "2.0.0",
"version": "1.0.0",
"license": "ISC",
"private": false,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/fuse-vhd",
@@ -15,9 +15,8 @@
"url": "https://vates.fr"
},
"engines": {
"node": ">=14"
"node": ">=10.0"
},
"main": "./index.mjs",
"dependencies": {
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",

View File

@@ -0,0 +1,42 @@
'use strict'
exports.INIT_PASSWD = Buffer.from('NBDMAGIC') // "NBDMAGIC" ensure we're connected to a nbd server
exports.OPTS_MAGIC = Buffer.from('IHAVEOPT') // "IHAVEOPT" start an option block
exports.NBD_OPT_REPLY_MAGIC = 1100100111001001n // magic received during negociation
exports.NBD_OPT_EXPORT_NAME = 1
exports.NBD_OPT_ABORT = 2
exports.NBD_OPT_LIST = 3
exports.NBD_OPT_STARTTLS = 5
exports.NBD_OPT_INFO = 6
exports.NBD_OPT_GO = 7
exports.NBD_FLAG_HAS_FLAGS = 1 << 0
exports.NBD_FLAG_READ_ONLY = 1 << 1
exports.NBD_FLAG_SEND_FLUSH = 1 << 2
exports.NBD_FLAG_SEND_FUA = 1 << 3
exports.NBD_FLAG_ROTATIONAL = 1 << 4
exports.NBD_FLAG_SEND_TRIM = 1 << 5
exports.NBD_FLAG_FIXED_NEWSTYLE = 1 << 0
exports.NBD_CMD_FLAG_FUA = 1 << 0
exports.NBD_CMD_FLAG_NO_HOLE = 1 << 1
exports.NBD_CMD_FLAG_DF = 1 << 2
exports.NBD_CMD_FLAG_REQ_ONE = 1 << 3
exports.NBD_CMD_FLAG_FAST_ZERO = 1 << 4
exports.NBD_CMD_READ = 0
exports.NBD_CMD_WRITE = 1
exports.NBD_CMD_DISC = 2
exports.NBD_CMD_FLUSH = 3
exports.NBD_CMD_TRIM = 4
exports.NBD_CMD_CACHE = 5
exports.NBD_CMD_WRITE_ZEROES = 6
exports.NBD_CMD_BLOCK_STATUS = 7
exports.NBD_CMD_RESIZE = 8
exports.NBD_REQUEST_MAGIC = 0x25609513 // magic number to create a new NBD request to send to the server
exports.NBD_REPLY_MAGIC = 0x67446698 // magic number received from the server when reading response to a nbd request
exports.NBD_REPLY_ACK = 1
exports.NBD_DEFAULT_PORT = 10809
exports.NBD_DEFAULT_BLOCK_SIZE = 64 * 1024

View File

@@ -1,41 +0,0 @@
export const INIT_PASSWD = Buffer.from('NBDMAGIC') // "NBDMAGIC" ensure we're connected to a nbd server
export const OPTS_MAGIC = Buffer.from('IHAVEOPT') // "IHAVEOPT" start an option block
export const NBD_OPT_REPLY_MAGIC = 1100100111001001n // magic received during negociation
export const NBD_OPT_EXPORT_NAME = 1
export const NBD_OPT_ABORT = 2
export const NBD_OPT_LIST = 3
export const NBD_OPT_STARTTLS = 5
export const NBD_OPT_INFO = 6
export const NBD_OPT_GO = 7
export const NBD_FLAG_HAS_FLAGS = 1 << 0
export const NBD_FLAG_READ_ONLY = 1 << 1
export const NBD_FLAG_SEND_FLUSH = 1 << 2
export const NBD_FLAG_SEND_FUA = 1 << 3
export const NBD_FLAG_ROTATIONAL = 1 << 4
export const NBD_FLAG_SEND_TRIM = 1 << 5
export const NBD_FLAG_FIXED_NEWSTYLE = 1 << 0
export const NBD_CMD_FLAG_FUA = 1 << 0
export const NBD_CMD_FLAG_NO_HOLE = 1 << 1
export const NBD_CMD_FLAG_DF = 1 << 2
export const NBD_CMD_FLAG_REQ_ONE = 1 << 3
export const NBD_CMD_FLAG_FAST_ZERO = 1 << 4
export const NBD_CMD_READ = 0
export const NBD_CMD_WRITE = 1
export const NBD_CMD_DISC = 2
export const NBD_CMD_FLUSH = 3
export const NBD_CMD_TRIM = 4
export const NBD_CMD_CACHE = 5
export const NBD_CMD_WRITE_ZEROES = 6
export const NBD_CMD_BLOCK_STATUS = 7
export const NBD_CMD_RESIZE = 8
export const NBD_REQUEST_MAGIC = 0x25609513 // magic number to create a new NBD request to send to the server
export const NBD_REPLY_MAGIC = 0x67446698 // magic number received from the server when reading response to a nbd request
export const NBD_REPLY_ACK = 1
export const NBD_DEFAULT_PORT = 10809
export const NBD_DEFAULT_BLOCK_SIZE = 64 * 1024

View File

@@ -1,11 +1,8 @@
import assert from 'node:assert'
import { Socket } from 'node:net'
import { connect } from 'node:tls'
import { fromCallback, pRetry, pDelay, pTimeout } from 'promise-toolbox'
import { readChunkStrict } from '@vates/read-chunk'
import { createLogger } from '@xen-orchestra/log'
import {
'use strict'
const assert = require('node:assert')
const { Socket } = require('node:net')
const { connect } = require('node:tls')
const {
INIT_PASSWD,
NBD_CMD_READ,
NBD_DEFAULT_BLOCK_SIZE,
@@ -20,13 +17,16 @@ import {
NBD_REQUEST_MAGIC,
OPTS_MAGIC,
NBD_CMD_DISC,
} from './constants.mjs'
} = require('./constants.js')
const { fromCallback, pRetry, pDelay, pTimeout } = require('promise-toolbox')
const { readChunkStrict } = require('@vates/read-chunk')
const { createLogger } = require('@xen-orchestra/log')
const { warn } = createLogger('vates:nbd-client')
// documentation is here : https://github.com/NetworkBlockDevice/nbd/blob/master/doc/proto.md
export default class NbdClient {
module.exports = class NbdClient {
#serverAddress
#serverCert
#serverPort

View File

@@ -13,18 +13,17 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "2.0.0",
"version": "1.2.1",
"engines": {
"node": ">=14.0"
},
"main": "./index.mjs",
"dependencies": {
"@vates/async-each": "^1.0.0",
"@vates/read-chunk": "^1.2.0",
"@vates/read-chunk": "^1.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.6.0",
"promise-toolbox": "^0.21.0",
"xen-api": "^1.3.4"
"xen-api": "^1.3.3"
},
"devDependencies": {
"tap": "^16.3.0",
@@ -32,6 +31,6 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap --lines 97 --functions 95 --branches 74 --statements 97 tests/*.integ.mjs"
"test-integration": "tap --lines 97 --functions 95 --branches 74 --statements 97 tests/*.integ.js"
}
}

View File

@@ -1,12 +1,13 @@
import NbdClient from '../index.mjs'
import { spawn, exec } from 'node:child_process'
import fs from 'node:fs/promises'
import { test } from 'tap'
import tmp from 'tmp'
import { pFromCallback } from 'promise-toolbox'
import { Socket } from 'node:net'
import { NBD_DEFAULT_PORT } from '../constants.mjs'
import assert from 'node:assert'
'use strict'
const NbdClient = require('../index.js')
const { spawn, exec } = require('node:child_process')
const fs = require('node:fs/promises')
const { test } = require('tap')
const tmp = require('tmp')
const { pFromCallback } = require('promise-toolbox')
const { Socket } = require('node:net')
const { NBD_DEFAULT_PORT } = require('../constants.js')
const assert = require('node:assert')
const FILE_SIZE = 10 * 1024 * 1024

View File

@@ -1,3 +1,4 @@
'use strict'
/*
node-vsphere-soap
@@ -11,18 +12,17 @@
*/
import { EventEmitter } from 'events'
import axios from 'axios'
import https from 'node:https'
import util from 'util'
import soap from 'soap'
import Cookie from 'soap-cookie' // required for session persistence
const EventEmitter = require('events').EventEmitter
const axios = require('axios')
const https = require('node:https')
const util = require('util')
const soap = require('soap')
const Cookie = require('soap-cookie') // required for session persistence
// Client class
// inherits from EventEmitter
// possible events: connect, error, ready
export function Client(vCenterHostname, username, password, sslVerify) {
function Client(vCenterHostname, username, password, sslVerify) {
this.status = 'disconnected'
this.reconnectCount = 0
@@ -228,3 +228,4 @@ function _soapErrorHandler(self, emitter, command, args, err) {
}
// end
exports.Client = Client

View File

@@ -1,8 +1,8 @@
{
"name": "@vates/node-vsphere-soap",
"version": "2.0.0",
"version": "1.0.0",
"description": "interface to vSphere SOAP/WSDL from node for interfacing with vCenter or ESXi, forked from node-vsphere-soap",
"main": "lib/client.mjs",
"main": "lib/client.js",
"author": "reedog117",
"repository": {
"directory": "@vates/node-vsphere-soap",
@@ -30,7 +30,7 @@
"private": false,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/node-vsphere-soap",
"engines": {
"node": ">=14"
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"

View File

@@ -1,11 +1,15 @@
'use strict'
// place your own credentials here for a vCenter or ESXi server
// this information will be used for connecting to a vCenter instance
// for module testing
// name the file config-test.js
export const vCenterTestCreds = {
const vCenterTestCreds = {
vCenterIP: 'vcsa',
vCenterUser: 'vcuser',
vCenterPassword: 'vcpw',
vCenter: true,
}
exports.vCenterTestCreds = vCenterTestCreds

View File

@@ -1,16 +1,18 @@
'use strict'
/*
vsphere-soap.test.js
tests for the vCenterConnectionInstance class
*/
import assert from 'assert'
import { describe, it } from 'test'
const assert = require('assert')
const { describe, it } = require('test')
import * as vc from '../lib/client.mjs'
const vc = require('../lib/client')
// eslint-disable-next-line n/no-missing-import
import { vCenterTestCreds as TestCreds } from '../config-test.mjs'
// eslint-disable-next-line n/no-missing-require
const TestCreds = require('../config-test.js').vCenterTestCreds
const VItest = new vc.Client(TestCreds.vCenterIP, TestCreds.vCenterUser, TestCreds.vCenterPassword, false)

View File

@@ -1,7 +1,6 @@
'use strict'
const assert = require('assert')
const isUtf8 = require('isutf8')
/**
* Read a chunk of data from a stream.
@@ -82,13 +81,6 @@ exports.readChunkStrict = async function readChunkStrict(stream, size) {
if (size !== undefined && chunk.length !== size) {
const error = new Error(`stream has ended with not enough data (actual: ${chunk.length}, expected: ${size})`)
// Buffer.isUtf8 is too recent for now
// @todo : replace external package by Buffer.isUtf8 when the supported version of node reach 18
if (chunk.length < 1024 && isUtf8(chunk)) {
error.text = chunk.toString('utf8')
}
Object.defineProperties(error, {
chunk: {
value: chunk,

View File

@@ -102,37 +102,12 @@ describe('readChunkStrict', function () {
assert.strictEqual(error.chunk, undefined)
})
it('throws if stream ends with not enough data, utf8', async () => {
it('throws if stream ends with not enough data', async () => {
const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 6, expected: 10)')
assert.strictEqual(error.text, 'foobar')
assert.deepEqual(error.chunk, Buffer.from('foobar'))
})
it('throws if stream ends with not enough data, non utf8 ', async () => {
const source = [Buffer.alloc(10, 128), Buffer.alloc(10, 128)]
const error = await rejectionOf(readChunkStrict(makeStream(source), 30))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 20, expected: 30)')
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, Buffer.concat(source))
})
it('throws if stream ends with not enough data, utf8 , long data', async () => {
const source = Buffer.from('a'.repeat(1500))
const error = await rejectionOf(readChunkStrict(makeStream([source]), 2000))
assert(error instanceof Error)
assert.strictEqual(error.message, `stream has ended with not enough data (actual: 1500, expected: 2000)`)
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, source)
})
it('succeed', async () => {
const source = Buffer.from('a'.repeat(20))
const chunk = await readChunkStrict(makeStream([source]), 10)
assert.deepEqual(source.subarray(10), chunk)
})
})
describe('skip', function () {
@@ -159,16 +134,6 @@ describe('skip', function () {
it('returns less size if stream ends', async () => {
assert.deepEqual(await skip(makeStream('foo bar'), 10), 7)
})
it('put back if it read too much', async () => {
let source = makeStream(['foo', 'bar'])
await skip(source, 1) // read part of data chunk
const chunk = (await readChunkStrict(source, 2)).toString('utf-8')
assert.strictEqual(chunk, 'oo')
source = makeStream(['foo', 'bar'])
assert.strictEqual(await skip(source, 3), 3) // read aligned with data chunk
})
})
describe('skipStrict', function () {
@@ -179,9 +144,4 @@ describe('skipStrict', function () {
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 7, expected: 10)')
assert.deepEqual(error.bytesSkipped, 7)
})
it('succeed', async () => {
const source = makeStream(['foo', 'bar', 'baz'])
const res = await skipStrict(source, 4)
assert.strictEqual(res, undefined)
})
})

View File

@@ -19,7 +19,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "1.2.0",
"version": "1.1.1",
"engines": {
"node": ">=8.10"
},
@@ -33,8 +33,5 @@
},
"devDependencies": {
"test": "^3.2.1"
},
"dependencies": {
"isutf8": "^4.0.0"
}
}

View File

@@ -1,5 +1,5 @@
import { asyncMap } from '@xen-orchestra/async-map'
import { RemoteAdapter } from '@xen-orchestra/backups/RemoteAdapter.mjs'
import { RemoteAdapter } from '@xen-orchestra/backups/RemoteAdapter.js'
import { getSyncedHandler } from '@xen-orchestra/fs'
import getopts from 'getopts'
import { basename, dirname } from 'path'

View File

@@ -7,7 +7,7 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.40.0",
"@xen-orchestra/backups": "^0.39.0",
"@xen-orchestra/fs": "^4.0.1",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "1.0.10",
"version": "1.0.9",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -1,8 +1,10 @@
import { Metadata } from './_runners/Metadata.mjs'
import { VmsRemote } from './_runners/VmsRemote.mjs'
import { VmsXapi } from './_runners/VmsXapi.mjs'
'use strict'
export function createRunner(opts) {
const { Metadata } = require('./_runners/Metadata.js')
const { VmsRemote } = require('./_runners/VmsRemote.js')
const { VmsXapi } = require('./_runners/VmsXapi.js')
exports.createRunner = function createRunner(opts) {
const { type } = opts.job
switch (type) {
case 'backup':

View File

@@ -1,6 +1,8 @@
import { asyncMap } from '@xen-orchestra/async-map'
'use strict'
export class DurablePartition {
const { asyncMap } = require('@xen-orchestra/async-map')
exports.DurablePartition = class DurablePartition {
// private resource API is used exceptionally to be able to separate resource creation and release
#partitionDisposers = {}

View File

@@ -1,6 +1,8 @@
import { Task } from './Task.mjs'
'use strict'
export class HealthCheckVmBackup {
const { Task } = require('./Task')
exports.HealthCheckVmBackup = class HealthCheckVmBackup {
#restoredVm
#timeout
#xapi

View File

@@ -1,11 +1,13 @@
import assert from 'node:assert'
'use strict'
import { formatFilenameDate } from './_filenameDate.mjs'
import { importIncrementalVm } from './_incrementalVm.mjs'
import { Task } from './Task.mjs'
import { watchStreamSize } from './_watchStreamSize.mjs'
const assert = require('assert')
export class ImportVmBackup {
const { formatFilenameDate } = require('./_filenameDate.js')
const { importIncrementalVm } = require('./_incrementalVm.js')
const { Task } = require('./Task.js')
const { watchStreamSize } = require('./_watchStreamSize.js')
exports.ImportVmBackup = class ImportVmBackup {
constructor({ adapter, metadata, srUuid, xapi, settings: { newMacAddresses, mapVdisSrs = {} } = {} }) {
this._adapter = adapter
this._importIncrementalVmSettings = { newMacAddresses, mapVdisSrs }

View File

@@ -1,39 +1,43 @@
import { asyncEach } from '@vates/async-each'
import { asyncMap, asyncMapSettled } from '@xen-orchestra/async-map'
import { compose } from '@vates/compose'
import { createLogger } from '@xen-orchestra/log'
import { createVhdDirectoryFromStream, openVhd, VhdAbstract, VhdDirectory, VhdSynthetic } from 'vhd-lib'
import { decorateMethodsWith } from '@vates/decorate-with'
import { deduped } from '@vates/disposable/deduped.js'
import { dirname, join, resolve } from 'node:path'
import { execFile } from 'child_process'
import { mount } from '@vates/fuse-vhd'
import { readdir, lstat } from 'node:fs/promises'
import { synchronized } from 'decorator-synchronized'
import { v4 as uuidv4 } from 'uuid'
import { ZipFile } from 'yazl'
import Disposable from 'promise-toolbox/Disposable'
import fromCallback from 'promise-toolbox/fromCallback'
import fromEvent from 'promise-toolbox/fromEvent'
import groupBy from 'lodash/groupBy.js'
import pDefer from 'promise-toolbox/defer'
import pickBy from 'lodash/pickBy.js'
import tar from 'tar'
import zlib from 'zlib'
'use strict'
import { BACKUP_DIR } from './_getVmBackupDir.mjs'
import { cleanVm } from './_cleanVm.mjs'
import { formatFilenameDate } from './_filenameDate.mjs'
import { getTmpDir } from './_getTmpDir.mjs'
import { isMetadataFile } from './_backupType.mjs'
import { isValidXva } from './_isValidXva.mjs'
import { listPartitions, LVM_PARTITION_TYPE } from './_listPartitions.mjs'
import { lvs, pvs } from './_lvm.mjs'
import { watchStreamSize } from './_watchStreamSize.mjs'
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { synchronized } = require('decorator-synchronized')
const Disposable = require('promise-toolbox/Disposable')
const fromCallback = require('promise-toolbox/fromCallback')
const fromEvent = require('promise-toolbox/fromEvent')
const pDefer = require('promise-toolbox/defer')
const groupBy = require('lodash/groupBy.js')
const pickBy = require('lodash/pickBy.js')
const { dirname, join, normalize, resolve } = require('path')
const { createLogger } = require('@xen-orchestra/log')
const { createVhdDirectoryFromStream, openVhd, VhdAbstract, VhdDirectory, VhdSynthetic } = require('vhd-lib')
const { deduped } = require('@vates/disposable/deduped.js')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { compose } = require('@vates/compose')
const { execFile } = require('child_process')
const { readdir, lstat } = require('fs-extra')
const { v4: uuidv4 } = require('uuid')
const { ZipFile } = require('yazl')
const zlib = require('zlib')
export const DIR_XO_CONFIG_BACKUPS = 'xo-config-backups'
const { BACKUP_DIR } = require('./_getVmBackupDir.js')
const { cleanVm } = require('./_cleanVm.js')
const { formatFilenameDate } = require('./_filenameDate.js')
const { getTmpDir } = require('./_getTmpDir.js')
const { isMetadataFile } = require('./_backupType.js')
const { isValidXva } = require('./_isValidXva.js')
const { listPartitions, LVM_PARTITION_TYPE } = require('./_listPartitions.js')
const { lvs, pvs } = require('./_lvm.js')
const { watchStreamSize } = require('./_watchStreamSize')
// @todo : this import is marked extraneous , sould be fixed when lib is published
const { mount } = require('@vates/fuse-vhd')
const { asyncEach } = require('@vates/async-each')
export const DIR_XO_POOL_METADATA_BACKUPS = 'xo-pool-metadata-backups'
const DIR_XO_CONFIG_BACKUPS = 'xo-config-backups'
exports.DIR_XO_CONFIG_BACKUPS = DIR_XO_CONFIG_BACKUPS
const DIR_XO_POOL_METADATA_BACKUPS = 'xo-pool-metadata-backups'
exports.DIR_XO_POOL_METADATA_BACKUPS = DIR_XO_POOL_METADATA_BACKUPS
const { debug, warn } = createLogger('xo:backups:RemoteAdapter')
@@ -42,23 +46,20 @@ const compareTimestamp = (a, b) => a.timestamp - b.timestamp
const noop = Function.prototype
const resolveRelativeFromFile = (file, path) => resolve('/', dirname(file), path).slice(1)
const makeRelative = path => resolve('/', path).slice(1)
const resolveSubpath = (root, path) => resolve(root, makeRelative(path))
async function addZipEntries(zip, realBasePath, virtualBasePath, relativePaths) {
for (const relativePath of relativePaths) {
const realPath = join(realBasePath, relativePath)
const virtualPath = join(virtualBasePath, relativePath)
const resolveSubpath = (root, path) => resolve(root, `.${resolve('/', path)}`)
const stats = await lstat(realPath)
const { mode, mtime } = stats
const opts = { mode, mtime }
if (stats.isDirectory()) {
zip.addEmptyDirectory(virtualPath, opts)
await addZipEntries(zip, realPath, virtualPath, await readdir(realPath))
} else if (stats.isFile()) {
zip.addFile(realPath, virtualPath, opts)
}
async function addDirectory(files, realPath, metadataPath) {
const stats = await lstat(realPath)
if (stats.isDirectory()) {
await asyncMap(await readdir(realPath), file =>
addDirectory(files, realPath + '/' + file, metadataPath + '/' + file)
)
} else if (stats.isFile()) {
files.push({
realPath,
metadataPath,
})
}
}
@@ -75,7 +76,7 @@ const debounceResourceFactory = factory =>
return this._debounceResource(factory.apply(this, arguments))
}
export class RemoteAdapter {
class RemoteAdapter {
constructor(
handler,
{ debounceResource = res => res, dirMode, vhdDirectoryCompression, useGetDiskLegacy = false } = {}
@@ -186,6 +187,17 @@ export class RemoteAdapter {
})
}
async *_usePartitionFiles(diskId, partitionId, paths) {
const path = yield this.getPartition(diskId, partitionId)
const files = []
await asyncMap(paths, file =>
addDirectory(files, resolveSubpath(path, file), normalize('./' + file).replace(/\/+$/, ''))
)
return files
}
// check if we will be allowed to merge a a vhd created in this adapter
// with the vhd at path `path`
async isMergeableParent(packedParentUid, path) {
@@ -202,24 +214,15 @@ export class RemoteAdapter {
})
}
fetchPartitionFiles(diskId, partitionId, paths, format) {
fetchPartitionFiles(diskId, partitionId, paths) {
const { promise, reject, resolve } = pDefer()
Disposable.use(
async function* () {
const path = yield this.getPartition(diskId, partitionId)
let outputStream
if (format === 'tgz') {
outputStream = tar.c({ cwd: path, gzip: true }, paths.map(makeRelative))
} else if (format === 'zip') {
const zip = new ZipFile()
await addZipEntries(zip, path, '', paths.map(makeRelative))
zip.end()
;({ outputStream } = zip)
} else {
throw new Error('unsupported format ' + format)
}
const files = yield this._usePartitionFiles(diskId, partitionId, paths)
const zip = new ZipFile()
files.forEach(({ realPath, metadataPath }) => zip.addFile(realPath, metadataPath))
zip.end()
const { outputStream } = zip
resolve(outputStream)
await fromEvent(outputStream, 'end')
}.bind(this)
@@ -666,7 +669,7 @@ export class RemoteAdapter {
const handler = this._handler
if (this.useVhdDirectory()) {
const dataPath = `${dirname(path)}/data/${uuidv4()}.vhd`
const size = await createVhdDirectoryFromStream(handler, dataPath, input, {
const sizes = await createVhdDirectoryFromStream(handler, dataPath, input, {
concurrency: writeBlockConcurrency,
compression: this.#getCompressionType(),
async validator() {
@@ -675,9 +678,14 @@ export class RemoteAdapter {
},
})
await VhdAbstract.createAlias(handler, path, dataPath)
return size
return sizes
} else {
return this.outputStream(path, input, { checksum, validator })
const size = this.outputStream(path, input, { checksum, validator })
return {
compressedSize: size,
sourceSize: size,
writtenSize: size,
}
}
}
@@ -826,7 +834,11 @@ decorateMethodsWith(RemoteAdapter, {
debounceResourceFactory,
]),
_usePartitionFiles: Disposable.factory,
getDisk: compose([Disposable.factory, [deduped, diskId => [diskId]], debounceResourceFactory]),
getPartition: Disposable.factory,
})
exports.RemoteAdapter = RemoteAdapter

View File

@@ -1,9 +1,11 @@
import { join, resolve } from 'node:path/posix'
'use strict'
import { DIR_XO_POOL_METADATA_BACKUPS } from './RemoteAdapter.mjs'
import { PATH_DB_DUMP } from './_runners/_PoolMetadataBackup.mjs'
const { join, resolve } = require('node:path/posix')
export class RestoreMetadataBackup {
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
const { PATH_DB_DUMP } = require('./_runners/_PoolMetadataBackup.js')
exports.RestoreMetadataBackup = class RestoreMetadataBackup {
constructor({ backupId, handler, xapi }) {
this._backupId = backupId
this._handler = handler

View File

@@ -1,5 +1,7 @@
import CancelToken from 'promise-toolbox/CancelToken'
import Zone from 'node-zone'
'use strict'
const CancelToken = require('promise-toolbox/CancelToken')
const Zone = require('node-zone')
const logAfterEnd = log => {
const error = new Error('task has already ended')
@@ -28,7 +30,7 @@ const serializeError = error =>
const $$task = Symbol('@xen-orchestra/backups/Task')
export class Task {
class Task {
static get cancelToken() {
const task = Zone.current.data[$$task]
return task !== undefined ? task.#cancelToken : CancelToken.none
@@ -149,6 +151,7 @@ export class Task {
})
}
}
exports.Task = Task
for (const method of ['info', 'warning']) {
Task[method] = (...args) => Zone.current.data[$$task]?.[method](...args)

View File

@@ -0,0 +1,6 @@
'use strict'
exports.isMetadataFile = filename => filename.endsWith('.json')
exports.isVhdFile = filename => filename.endsWith('.vhd')
exports.isXvaFile = filename => filename.endsWith('.xva')
exports.isXvaSumFile = filename => filename.endsWith('.xva.checksum')

View File

@@ -1,4 +0,0 @@
export const isMetadataFile = filename => filename.endsWith('.json')
export const isVhdFile = filename => filename.endsWith('.vhd')
export const isXvaFile = filename => filename.endsWith('.xva')
export const isXvaSumFile = filename => filename.endsWith('.xva.checksum')

View File

@@ -1,25 +1,25 @@
import { createLogger } from '@xen-orchestra/log'
import { catchGlobalErrors } from '@xen-orchestra/log/configure'
'use strict'
import Disposable from 'promise-toolbox/Disposable'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { compose } from '@vates/compose'
import { createCachedLookup } from '@vates/cached-dns.lookup'
import { createDebounceResource } from '@vates/disposable/debounceResource.js'
import { createRunner } from './Backup.mjs'
import { decorateMethodsWith } from '@vates/decorate-with'
import { deduped } from '@vates/disposable/deduped.js'
import { getHandler } from '@xen-orchestra/fs'
import { parseDuration } from '@vates/parse-duration'
import { Xapi } from '@xen-orchestra/xapi'
const logger = require('@xen-orchestra/log').createLogger('xo:backups:worker')
import { RemoteAdapter } from './RemoteAdapter.mjs'
import { Task } from './Task.mjs'
require('@xen-orchestra/log/configure').catchGlobalErrors(logger)
createCachedLookup().patchGlobal()
require('@vates/cached-dns.lookup').createCachedLookup().patchGlobal()
const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { compose } = require('@vates/compose')
const { createDebounceResource } = require('@vates/disposable/debounceResource.js')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { deduped } = require('@vates/disposable/deduped.js')
const { getHandler } = require('@xen-orchestra/fs')
const { createRunner } = require('./Backup.js')
const { parseDuration } = require('@vates/parse-duration')
const { Xapi } = require('@xen-orchestra/xapi')
const { RemoteAdapter } = require('./RemoteAdapter.js')
const { Task } = require('./Task.js')
const logger = createLogger('xo:backups:worker')
catchGlobalErrors(logger)
const { debug } = logger
class BackupWorker {

View File

@@ -1,11 +1,13 @@
import cancelable from 'promise-toolbox/cancelable'
import CancelToken from 'promise-toolbox/CancelToken'
'use strict'
const cancelable = require('promise-toolbox/cancelable')
const CancelToken = require('promise-toolbox/CancelToken')
// Similar to `Promise.all` + `map` but pass a cancel token to the callback
//
// If any of the executions fails, the cancel token will be triggered and the
// first reason will be rejected.
export const cancelableMap = cancelable(async function cancelableMap($cancelToken, iterable, callback) {
exports.cancelableMap = cancelable(async function cancelableMap($cancelToken, iterable, callback) {
const { cancel, token } = CancelToken.source([$cancelToken])
try {
return await Promise.all(

View File

@@ -1,19 +1,19 @@
import test from 'test'
import { strict as assert } from 'node:assert'
'use strict'
import tmp from 'tmp'
import fs from 'fs-extra'
import * as uuid from 'uuid'
import { getHandler } from '@xen-orchestra/fs'
import { pFromCallback } from 'promise-toolbox'
import { RemoteAdapter } from './RemoteAdapter.mjs'
import { VHDFOOTER, VHDHEADER } from './tests.fixtures.mjs'
import { VhdFile, Constants, VhdDirectory, VhdAbstract } from 'vhd-lib'
import { checkAliases } from './_cleanVm.mjs'
import { dirname, basename } from 'node:path'
import { rimraf } from 'rimraf'
const { beforeEach, afterEach, test, describe } = require('test')
const assert = require('assert').strict
const { beforeEach, afterEach, describe } = test
const tmp = require('tmp')
const fs = require('fs-extra')
const uuid = require('uuid')
const { getHandler } = require('@xen-orchestra/fs')
const { pFromCallback } = require('promise-toolbox')
const { RemoteAdapter } = require('./RemoteAdapter')
const { VHDFOOTER, VHDHEADER } = require('./tests.fixtures.js')
const { VhdFile, Constants, VhdDirectory, VhdAbstract } = require('vhd-lib')
const { checkAliases } = require('./_cleanVm')
const { dirname, basename } = require('path')
const { rimraf } = require('rimraf')
let tempDir, adapter, handler, jobId, vdiId, basePath, relativePath
const rootPath = 'xo-vm-backups/VMUUID/'

View File

@@ -1,37 +1,28 @@
import * as UUID from 'uuid'
import sum from 'lodash/sum.js'
import { asyncMap } from '@xen-orchestra/async-map'
import { Constants, openVhd, VhdAbstract, VhdFile } from 'vhd-lib'
import { isVhdAlias, resolveVhdAlias } from 'vhd-lib/aliases.js'
import { dirname, resolve } from 'node:path'
import { isMetadataFile, isVhdFile, isXvaFile, isXvaSumFile } from './_backupType.mjs'
import { limitConcurrency } from 'limit-concurrency-decorator'
import { mergeVhdChain } from 'vhd-lib/merge.js'
import { Task } from './Task.mjs'
import { Disposable } from 'promise-toolbox'
import handlerPath from '@xen-orchestra/fs/path'
'use strict'
const sum = require('lodash/sum')
const UUID = require('uuid')
const { asyncMap } = require('@xen-orchestra/async-map')
const { Constants, openVhd, VhdAbstract } = require('vhd-lib')
const { isVhdAlias, resolveVhdAlias } = require('vhd-lib/aliases')
const { dirname, resolve } = require('path')
const { DISK_TYPES } = Constants
const { isMetadataFile, isVhdFile, isXvaFile, isXvaSumFile } = require('./_backupType.js')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { mergeVhdChain } = require('vhd-lib/merge')
// checking the size of a vhd directory is costly
// 1 Http Query per 1000 blocks
// we only check size of all the vhd are VhdFiles
function shouldComputeVhdsSize(handler, vhds) {
if (handler.isEncrypted) {
return false
}
return vhds.every(vhd => vhd instanceof VhdFile)
}
const { Task } = require('./Task.js')
const { Disposable } = require('promise-toolbox')
const handlerPath = require('@xen-orchestra/fs/path')
const computeVhdsSize = (handler, vhdPaths) =>
Disposable.use(
vhdPaths.map(vhdPath => openVhd(handler, vhdPath)),
async vhds => {
if (shouldComputeVhdsSize(handler, vhds)) {
const sizes = await asyncMap(vhds, vhd => vhd.getSize())
return sum(sizes)
}
await Promise.all(vhds.map(vhd => vhd.readBlockAllocationTable()))
// get file size for vhdfile, computed size from bat for vhd directory
const sizes = await asyncMap(vhds, vhd => vhd.streamSize())
return sum(sizes)
}
)
@@ -116,7 +107,7 @@ const listVhds = async (handler, vmDir, logWarn) => {
return { vhds, interruptedVhds, aliases }
}
export async function checkAliases(
async function checkAliases(
aliasPaths,
targetDataRepository,
{ handler, logInfo = noop, logWarn = console.warn, remove = false }
@@ -175,9 +166,11 @@ export async function checkAliases(
})
}
exports.checkAliases = checkAliases
const defaultMergeLimiter = limitConcurrency(1)
export async function cleanVm(
exports.cleanVm = async function cleanVm(
vmDir,
{
fixMetadata,
@@ -531,11 +524,6 @@ export async function cleanVm(
const linkedVhds = Object.keys(vhds).map(key => resolve('/', vmDir, vhds[key]))
fileSystemSize = await computeVhdsSize(handler, linkedVhds)
// the size is not computed in some cases (e.g. VhdDirectory)
if (fileSystemSize === undefined) {
return
}
// don't warn if the size has changed after a merge
if (!merged && fileSystemSize !== size) {
// FIXME: figure out why it occurs so often and, once fixed, log the real problems with `logWarn`
@@ -553,6 +541,8 @@ export async function cleanVm(
// systematically update size after a merge
if ((merged || fixMetadata) && size !== fileSystemSize) {
// @todo add a cumulatedTransferSize property ?
// @todo update writtenSize, compressedSize
metadata.size = fileSystemSize
mustRegenerateCache = true
try {

View File

@@ -0,0 +1,8 @@
'use strict'
const { utcFormat, utcParse } = require('d3-time-format')
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
exports.formatFilenameDate = utcFormat('%Y%m%dT%H%M%SZ')
exports.parseFilenameDate = utcParse('%Y%m%dT%H%M%SZ')

View File

@@ -1,6 +0,0 @@
import { utcFormat, utcParse } from 'd3-time-format'
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const formatFilenameDate = utcFormat('%Y%m%dT%H%M%SZ')
export const parseFilenameDate = utcParse('%Y%m%dT%H%M%SZ')

View File

@@ -1,4 +1,6 @@
'use strict'
// returns all entries but the last retention-th
export function getOldEntries(retention, entries) {
exports.getOldEntries = function getOldEntries(retention, entries) {
return entries === undefined ? [] : retention > 0 ? entries.slice(0, -retention) : entries
}

View File

@@ -1,11 +1,13 @@
import Disposable from 'promise-toolbox/Disposable'
import { join } from 'node:path'
import { mkdir, rmdir } from 'node:fs/promises'
import { tmpdir } from 'os'
'use strict'
const Disposable = require('promise-toolbox/Disposable')
const { join } = require('path')
const { mkdir, rmdir } = require('fs-extra')
const { tmpdir } = require('os')
const MAX_ATTEMPTS = 3
export async function getTmpDir() {
exports.getTmpDir = async function getTmpDir() {
for (let i = 0; true; ++i) {
const path = join(tmpdir(), Math.random().toString(36).slice(2))
try {

View File

@@ -0,0 +1,8 @@
'use strict'
const BACKUP_DIR = 'xo-vm-backups'
exports.BACKUP_DIR = BACKUP_DIR
exports.getVmBackupDir = function getVmBackupDir(uuid) {
return `${BACKUP_DIR}/${uuid}`
}

View File

@@ -1,5 +0,0 @@
export const BACKUP_DIR = 'xo-vm-backups'
export function getVmBackupDir(uuid) {
return `${BACKUP_DIR}/${uuid}`
}

View File

@@ -1,22 +1,24 @@
import find from 'lodash/find.js'
import groupBy from 'lodash/groupBy.js'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import omit from 'lodash/omit.js'
import { asyncMap } from '@xen-orchestra/async-map'
import { CancelToken } from 'promise-toolbox'
import { compareVersions } from 'compare-versions'
import { createVhdStreamWithLength } from 'vhd-lib'
import { defer } from 'golike-defer'
'use strict'
import { cancelableMap } from './_cancelableMap.mjs'
import { Task } from './Task.mjs'
import pick from 'lodash/pick.js'
const find = require('lodash/find.js')
const groupBy = require('lodash/groupBy.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const omit = require('lodash/omit.js')
const { asyncMap } = require('@xen-orchestra/async-map')
const { CancelToken } = require('promise-toolbox')
const { compareVersions } = require('compare-versions')
const { createVhdStreamWithLength } = require('vhd-lib')
const { defer } = require('golike-defer')
export const TAG_BASE_DELTA = 'xo:base_delta'
const { cancelableMap } = require('./_cancelableMap.js')
const { Task } = require('./Task.js')
const pick = require('lodash/pick.js')
export const TAG_COPY_SRC = 'xo:copy_of'
const TAG_BASE_DELTA = 'xo:base_delta'
exports.TAG_BASE_DELTA = TAG_BASE_DELTA
const TAG_BACKUP_SR = 'xo:backup:sr'
const TAG_COPY_SRC = 'xo:copy_of'
exports.TAG_COPY_SRC = TAG_COPY_SRC
const ensureArray = value => (value === undefined ? [] : Array.isArray(value) ? value : [value])
const resolveUuid = async (xapi, cache, uuid, type) => {
@@ -31,7 +33,7 @@ const resolveUuid = async (xapi, cache, uuid, type) => {
return ref
}
export async function exportIncrementalVm(
exports.exportIncrementalVm = async function exportIncrementalVm(
vm,
baseVm,
{
@@ -141,7 +143,7 @@ export async function exportIncrementalVm(
)
}
export const importIncrementalVm = defer(async function importIncrementalVm(
exports.importIncrementalVm = defer(async function importIncrementalVm(
$defer,
incrementalVm,
sr,
@@ -159,10 +161,7 @@ export const importIncrementalVm = defer(async function importIncrementalVm(
if (detectBase) {
const remoteBaseVmUuid = vmRecord.other_config[TAG_BASE_DELTA]
if (remoteBaseVmUuid) {
baseVm = find(
xapi.objects.all,
obj => (obj = obj.other_config) && obj[TAG_COPY_SRC] === remoteBaseVmUuid && obj[TAG_BACKUP_SR] === sr.$id
)
baseVm = find(xapi.objects.all, obj => (obj = obj.other_config) && obj[TAG_COPY_SRC] === remoteBaseVmUuid)
if (!baseVm) {
throw new Error(`could not find the base VM (copy of ${remoteBaseVmUuid})`)

View File

@@ -1,4 +1,6 @@
import assert from 'node:assert'
'use strict'
const assert = require('assert')
const COMPRESSED_MAGIC_NUMBERS = [
// https://tools.ietf.org/html/rfc1952.html#page-5
@@ -45,7 +47,7 @@ const isValidTar = async (handler, size, fd) => {
}
// TODO: find an heuristic for compressed files
export async function isValidXva(path) {
async function isValidXva(path) {
const handler = this._handler
// size is longer when encrypted + reading part of an encrypted file is not implemented
@@ -72,5 +74,6 @@ export async function isValidXva(path) {
return true
}
}
exports.isValidXva = isValidXva
const noop = Function.prototype

View File

@@ -1,7 +1,9 @@
import fromCallback from 'promise-toolbox/fromCallback'
import { createLogger } from '@xen-orchestra/log'
import { createParser } from 'parse-pairs'
import { execFile } from 'child_process'
'use strict'
const fromCallback = require('promise-toolbox/fromCallback')
const { createLogger } = require('@xen-orchestra/log')
const { createParser } = require('parse-pairs')
const { execFile } = require('child_process')
const { debug } = createLogger('xo:backups:listPartitions')
@@ -22,7 +24,8 @@ const IGNORED_PARTITION_TYPES = {
0x82: true, // swap
}
export const LVM_PARTITION_TYPE = 0x8e
const LVM_PARTITION_TYPE = 0x8e
exports.LVM_PARTITION_TYPE = LVM_PARTITION_TYPE
const parsePartxLine = createParser({
keyTransform: key => (key === 'UUID' ? 'id' : key.toLowerCase()),
@@ -30,7 +33,7 @@ const parsePartxLine = createParser({
})
// returns an empty array in case of a non-partitioned disk
export async function listPartitions(devicePath) {
exports.listPartitions = async function listPartitions(devicePath) {
const parts = await fromCallback(execFile, 'partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',

View File

@@ -1,6 +1,8 @@
import fromCallback from 'promise-toolbox/fromCallback'
import { createParser } from 'parse-pairs'
import { execFile } from 'child_process'
'use strict'
const fromCallback = require('promise-toolbox/fromCallback')
const { createParser } = require('parse-pairs')
const { execFile } = require('child_process')
// ===================================================================
@@ -27,5 +29,5 @@ const makeFunction =
.map(Array.isArray(fields) ? parse : line => parse(line)[fields])
}
export const lvs = makeFunction('lvs')
export const pvs = makeFunction('pvs')
exports.lvs = makeFunction('lvs')
exports.pvs = makeFunction('pvs')

View File

@@ -1,20 +1,22 @@
import { asyncMap } from '@xen-orchestra/async-map'
import Disposable from 'promise-toolbox/Disposable'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
'use strict'
import { extractIdsFromSimplePattern } from '../extractIdsFromSimplePattern.mjs'
import { PoolMetadataBackup } from './_PoolMetadataBackup.mjs'
import { XoMetadataBackup } from './_XoMetadataBackup.mjs'
import { DEFAULT_SETTINGS, Abstract } from './_Abstract.mjs'
import { runTask } from './_runTask.mjs'
import { getAdaptersByRemote } from './_getAdaptersByRemote.mjs'
const { asyncMap } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { PoolMetadataBackup } = require('./_PoolMetadataBackup.js')
const { XoMetadataBackup } = require('./_XoMetadataBackup.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const DEFAULT_METADATA_SETTINGS = {
retentionPoolMetadata: 0,
retentionXoMetadata: 0,
}
export const Metadata = class MetadataBackupRunner extends Abstract {
exports.Metadata = class MetadataBackupRunner extends Abstract {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_METADATA_SETTINGS, config.defaultSettings, config.metadata?.defaultSettings)

View File

@@ -1,15 +1,17 @@
import { asyncMapSettled } from '@xen-orchestra/async-map'
import Disposable from 'promise-toolbox/Disposable'
import { limitConcurrency } from 'limit-concurrency-decorator'
'use strict'
import { extractIdsFromSimplePattern } from '../extractIdsFromSimplePattern.mjs'
import { Task } from '../Task.mjs'
import createStreamThrottle from './_createStreamThrottle.mjs'
import { DEFAULT_SETTINGS, Abstract } from './_Abstract.mjs'
import { runTask } from './_runTask.mjs'
import { getAdaptersByRemote } from './_getAdaptersByRemote.mjs'
import { FullRemote } from './_vmRunners/FullRemote.mjs'
import { IncrementalRemote } from './_vmRunners/IncrementalRemote.mjs'
const { asyncMapSettled } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { Task } = require('../Task.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { FullRemote } = require('./_vmRunners/FullRemote.js')
const { IncrementalRemote } = require('./_vmRunners/IncrementalRemote.js')
const DEFAULT_REMOTE_VM_SETTINGS = {
concurrency: 2,
@@ -25,7 +27,7 @@ const DEFAULT_REMOTE_VM_SETTINGS = {
vmTimeout: 0,
}
export const VmsRemote = class RemoteVmsBackupRunner extends Abstract {
exports.VmsRemote = class RemoteVmsBackupRunner extends Abstract {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_REMOTE_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)

View File

@@ -1,15 +1,17 @@
import { asyncMapSettled } from '@xen-orchestra/async-map'
import Disposable from 'promise-toolbox/Disposable'
import { limitConcurrency } from 'limit-concurrency-decorator'
'use strict'
import { extractIdsFromSimplePattern } from '../extractIdsFromSimplePattern.mjs'
import { Task } from '../Task.mjs'
import createStreamThrottle from './_createStreamThrottle.mjs'
import { DEFAULT_SETTINGS, Abstract } from './_Abstract.mjs'
import { runTask } from './_runTask.mjs'
import { getAdaptersByRemote } from './_getAdaptersByRemote.mjs'
import { IncrementalXapi } from './_vmRunners/IncrementalXapi.mjs'
import { FullXapi } from './_vmRunners/FullXapi.mjs'
const { asyncMapSettled } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { Task } = require('../Task.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { IncrementalXapi } = require('./_vmRunners/IncrementalXapi.js')
const { FullXapi } = require('./_vmRunners/FullXapi.js')
const DEFAULT_XAPI_VM_SETTINGS = {
bypassVdiChainsCheck: false,
@@ -34,7 +36,7 @@ const DEFAULT_XAPI_VM_SETTINGS = {
vmTimeout: 0,
}
export const VmsXapi = class VmsXapiBackupRunner extends Abstract {
exports.VmsXapi = class VmsXapiBackupRunner extends Abstract {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_XAPI_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)

View File

@@ -1,15 +1,17 @@
import Disposable from 'promise-toolbox/Disposable'
import pTimeout from 'promise-toolbox/timeout'
import { compileTemplate } from '@xen-orchestra/template'
import { runTask } from './_runTask.mjs'
import { RemoteTimeoutError } from './_RemoteTimeoutError.mjs'
'use strict'
export const DEFAULT_SETTINGS = {
const Disposable = require('promise-toolbox/Disposable')
const pTimeout = require('promise-toolbox/timeout')
const { compileTemplate } = require('@xen-orchestra/template')
const { runTask } = require('./_runTask.js')
const { RemoteTimeoutError } = require('./_RemoteTimeoutError.js')
exports.DEFAULT_SETTINGS = {
getRemoteTimeout: 300e3,
reportWhen: 'failure',
}
export const Abstract = class AbstractRunner {
exports.Abstract = class AbstractRunner {
constructor({ config, getAdapter, getConnectedRecord, job, schedule }) {
this._config = config
this._getRecord = getConnectedRecord

View File

@@ -1,13 +1,16 @@
import { asyncMap } from '@xen-orchestra/async-map'
'use strict'
import { DIR_XO_POOL_METADATA_BACKUPS } from '../RemoteAdapter.mjs'
import { forkStreamUnpipe } from './_forkStreamUnpipe.mjs'
import { formatFilenameDate } from '../_filenameDate.mjs'
import { Task } from '../Task.mjs'
const { asyncMap } = require('@xen-orchestra/async-map')
export const PATH_DB_DUMP = '/pool/xmldbdump'
const { DIR_XO_POOL_METADATA_BACKUPS } = require('../RemoteAdapter.js')
const { forkStreamUnpipe } = require('./_forkStreamUnpipe.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
export class PoolMetadataBackup {
const PATH_DB_DUMP = '/pool/xmldbdump'
exports.PATH_DB_DUMP = PATH_DB_DUMP
exports.PoolMetadataBackup = class PoolMetadataBackup {
constructor({ config, job, pool, remoteAdapters, schedule, settings }) {
this._config = config
this._job = job

View File

@@ -1,6 +1,8 @@
export class RemoteTimeoutError extends Error {
'use strict'
class RemoteTimeoutError extends Error {
constructor(remoteId) {
super('timeout while getting the remote ' + remoteId)
this.remoteId = remoteId
}
}
exports.RemoteTimeoutError = RemoteTimeoutError

View File

@@ -1,11 +1,13 @@
import { asyncMap } from '@xen-orchestra/async-map'
import { join } from '@xen-orchestra/fs/path'
'use strict'
import { DIR_XO_CONFIG_BACKUPS } from '../RemoteAdapter.mjs'
import { formatFilenameDate } from '../_filenameDate.mjs'
import { Task } from '../Task.mjs'
const { asyncMap } = require('@xen-orchestra/async-map')
const { join } = require('@xen-orchestra/fs/path')
export class XoMetadataBackup {
const { DIR_XO_CONFIG_BACKUPS } = require('../RemoteAdapter.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
exports.XoMetadataBackup = class XoMetadataBackup {
constructor({ config, job, remoteAdapters, schedule, settings }) {
this._config = config
this._job = job

View File

@@ -1,10 +1,12 @@
import { pipeline } from 'node:stream'
import { ThrottleGroup } from '@kldzj/stream-throttle'
import identity from 'lodash/identity.js'
'use strict'
const { pipeline } = require('node:stream')
const { ThrottleGroup } = require('@kldzj/stream-throttle')
const identity = require('lodash/identity.js')
const noop = Function.prototype
export default function createStreamThrottle(rate) {
module.exports = function createStreamThrottle(rate) {
if (rate === 0) {
return identity
}

View File

@@ -1,13 +1,14 @@
import { createLogger } from '@xen-orchestra/log'
import { finished, PassThrough } from 'node:stream'
'use strict'
const { debug } = createLogger('xo:backups:forkStreamUnpipe')
const { finished, PassThrough } = require('node:stream')
const { debug } = require('@xen-orchestra/log').createLogger('xo:backups:forkStreamUnpipe')
// create a new readable stream from an existing one which may be piped later
//
// in case of error in the new readable stream, it will simply be unpiped
// from the original one
export function forkStreamUnpipe(source) {
exports.forkStreamUnpipe = function forkStreamUnpipe(source) {
const { forks = 0 } = source
source.forks = forks + 1

View File

@@ -1,7 +1,9 @@
export function getAdaptersByRemote(adapters) {
'use strict'
const getAdaptersByRemote = adapters => {
const adaptersByRemote = {}
adapters.forEach(({ adapter, remoteId }) => {
adaptersByRemote[remoteId] = adapter
})
return adaptersByRemote
}
exports.getAdaptersByRemote = getAdaptersByRemote

View File

@@ -0,0 +1,6 @@
'use strict'
const { Task } = require('../Task.js')
const noop = Function.prototype
const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs
exports.runTask = runTask

View File

@@ -1,5 +0,0 @@
import { Task } from '../Task.mjs'
const noop = Function.prototype
export const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs

View File

@@ -1,12 +1,14 @@
import { decorateMethodsWith } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import { AbstractRemote } from './_AbstractRemote.mjs'
import { FullRemoteWriter } from '../_writers/FullRemoteWriter.mjs'
import { forkStreamUnpipe } from '../_forkStreamUnpipe.mjs'
import { watchStreamSize } from '../../_watchStreamSize.mjs'
import { Task } from '../../Task.mjs'
'use strict'
export const FullRemote = class FullRemoteVmBackupRunner extends AbstractRemote {
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { AbstractRemote } = require('./_AbstractRemote')
const { FullRemoteWriter } = require('../_writers/FullRemoteWriter')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe')
const { watchStreamSize } = require('../../_watchStreamSize')
const { Task } = require('../../Task')
class FullRemoteVmBackupRunner extends AbstractRemote {
_getRemoteWriter() {
return FullRemoteWriter
}
@@ -45,6 +47,7 @@ export const FullRemote = class FullRemoteVmBackupRunner extends AbstractRemote
}
}
decorateMethodsWith(FullRemote, {
exports.FullRemote = FullRemoteVmBackupRunner
decorateMethodsWith(FullRemoteVmBackupRunner, {
_run: defer,
})

View File

@@ -1,14 +1,16 @@
import { createLogger } from '@xen-orchestra/log'
'use strict'
import { forkStreamUnpipe } from '../_forkStreamUnpipe.mjs'
import { FullRemoteWriter } from '../_writers/FullRemoteWriter.mjs'
import { FullXapiWriter } from '../_writers/FullXapiWriter.mjs'
import { watchStreamSize } from '../../_watchStreamSize.mjs'
import { AbstractXapi } from './_AbstractXapi.mjs'
const { createLogger } = require('@xen-orchestra/log')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe.js')
const { FullRemoteWriter } = require('../_writers/FullRemoteWriter.js')
const { FullXapiWriter } = require('../_writers/FullXapiWriter.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { debug } = createLogger('xo:backups:FullXapiVmBackup')
export const FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
_getWriters() {
return [FullRemoteWriter, FullXapiWriter]
}

View File

@@ -1,14 +1,15 @@
import { asyncEach } from '@vates/async-each'
import { decorateMethodsWith } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import assert from 'node:assert'
import isVhdDifferencingDisk from 'vhd-lib/isVhdDifferencingDisk.js'
import mapValues from 'lodash/mapValues.js'
'use strict'
const assert = require('node:assert')
import { AbstractRemote } from './_AbstractRemote.mjs'
import { forkDeltaExport } from './_forkDeltaExport.mjs'
import { IncrementalRemoteWriter } from '../_writers/IncrementalRemoteWriter.mjs'
import { Task } from '../../Task.mjs'
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { mapValues } = require('lodash')
const { Task } = require('../../Task')
const { AbstractRemote } = require('./_AbstractRemote')
const { IncrementalRemoteWriter } = require('../_writers/IncrementalRemoteWriter')
const { forkDeltaExport } = require('./_forkDeltaExport')
const isVhdDifferencingDisk = require('vhd-lib/isVhdDifferencingDisk')
const { asyncEach } = require('@vates/async-each')
class IncrementalRemoteVmBackupRunner extends AbstractRemote {
_getRemoteWriter() {
@@ -60,7 +61,7 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
}
}
export const IncrementalRemote = IncrementalRemoteVmBackupRunner
exports.IncrementalRemote = IncrementalRemoteVmBackupRunner
decorateMethodsWith(IncrementalRemoteVmBackupRunner, {
_run: defer,
})

View File

@@ -1,26 +1,28 @@
import { asyncEach } from '@vates/async-each'
import { asyncMap } from '@xen-orchestra/async-map'
import { createLogger } from '@xen-orchestra/log'
import { pipeline } from 'node:stream'
import findLast from 'lodash/findLast.js'
import isVhdDifferencingDisk from 'vhd-lib/isVhdDifferencingDisk.js'
import keyBy from 'lodash/keyBy.js'
import mapValues from 'lodash/mapValues.js'
import vhdStreamValidator from 'vhd-lib/vhdStreamValidator.js'
'use strict'
import { AbstractXapi } from './_AbstractXapi.mjs'
import { exportIncrementalVm } from '../../_incrementalVm.mjs'
import { forkDeltaExport } from './_forkDeltaExport.mjs'
import { IncrementalRemoteWriter } from '../_writers/IncrementalRemoteWriter.mjs'
import { IncrementalXapiWriter } from '../_writers/IncrementalXapiWriter.mjs'
import { Task } from '../../Task.mjs'
import { watchStreamSize } from '../../_watchStreamSize.mjs'
const findLast = require('lodash/findLast.js')
const keyBy = require('lodash/keyBy.js')
const mapValues = require('lodash/mapValues.js')
const vhdStreamValidator = require('vhd-lib/vhdStreamValidator.js')
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { pipeline } = require('node:stream')
const { IncrementalRemoteWriter } = require('../_writers/IncrementalRemoteWriter.js')
const { IncrementalXapiWriter } = require('../_writers/IncrementalXapiWriter.js')
const { exportIncrementalVm } = require('../../_incrementalVm.js')
const { Task } = require('../../Task.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { forkDeltaExport } = require('./_forkDeltaExport.js')
const isVhdDifferencingDisk = require('vhd-lib/isVhdDifferencingDisk')
const { asyncEach } = require('@vates/async-each')
const { debug } = createLogger('xo:backups:IncrementalXapiVmBackup')
const noop = Function.prototype
export const IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXapi {
exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXapi {
_getWriters() {
return [IncrementalRemoteWriter, IncrementalXapiWriter]
}

View File

@@ -1,6 +1,8 @@
import { asyncMap } from '@xen-orchestra/async-map'
import { createLogger } from '@xen-orchestra/log'
import { Task } from '../../Task.mjs'
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { Task } = require('../../Task.js')
const { debug, warn } = createLogger('xo:backups:AbstractVmRunner')
@@ -17,7 +19,7 @@ const asyncEach = async (iterable, fn, thisArg = iterable) => {
}
}
export const Abstract = class AbstractVmBackupRunner {
exports.Abstract = class AbstractVmBackupRunner {
// calls fn for each function, warns of any errors, and throws only if there are no writers left
async _callWriters(fn, step, parallel = true) {
const writers = this._writers

View File

@@ -1,11 +1,11 @@
import { asyncEach } from '@vates/async-each'
import { Disposable } from 'promise-toolbox'
'use strict'
const { Abstract } = require('./_Abstract')
import { getVmBackupDir } from '../../_getVmBackupDir.mjs'
const { getVmBackupDir } = require('../../_getVmBackupDir')
const { asyncEach } = require('@vates/async-each')
const { Disposable } = require('promise-toolbox')
import { Abstract } from './_Abstract.mjs'
export const AbstractRemote = class AbstractRemoteVmBackupRunner extends Abstract {
exports.AbstractRemote = class AbstractRemoteVmBackupRunner extends Abstract {
constructor({
config,
job,

View File

@@ -1,16 +1,18 @@
import assert from 'node:assert'
import groupBy from 'lodash/groupBy.js'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { asyncMap } from '@xen-orchestra/async-map'
import { decorateMethodsWith } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import { formatDateTime } from '@xen-orchestra/xapi'
'use strict'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { Task } from '../../Task.mjs'
import { Abstract } from './_Abstract.mjs'
const assert = require('assert')
const groupBy = require('lodash/groupBy.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap } = require('@xen-orchestra/async-map')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { formatDateTime } = require('@xen-orchestra/xapi')
export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { Abstract } = require('./_Abstract.js')
class AbstractXapiVmBackupRunner extends Abstract {
constructor({
config,
getSnapshotNameLabel,
@@ -269,7 +271,8 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
await this._healthCheck()
}
}
exports.AbstractXapi = AbstractXapiVmBackupRunner
decorateMethodsWith(AbstractXapi, {
decorateMethodsWith(AbstractXapiVmBackupRunner, {
run: defer,
})

View File

@@ -0,0 +1,12 @@
'use strict'
const { mapValues } = require('lodash')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe')
exports.forkDeltaExport = function forkDeltaExport(deltaExport) {
return Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
}

View File

@@ -1,11 +0,0 @@
import mapValues from 'lodash/mapValues.js'
import { forkStreamUnpipe } from '../_forkStreamUnpipe.mjs'
export function forkDeltaExport(deltaExport) {
return Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
}

View File

@@ -1,11 +1,13 @@
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { Task } from '../../Task.mjs'
'use strict'
import { MixinRemoteWriter } from './_MixinRemoteWriter.mjs'
import { AbstractFullWriter } from './_AbstractFullWriter.mjs'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
export class FullRemoteWriter extends MixinRemoteWriter(AbstractFullWriter) {
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
exports.FullRemoteWriter = class FullRemoteWriter extends MixinRemoteWriter(AbstractFullWriter) {
constructor(props) {
super(props)

View File

@@ -1,16 +1,18 @@
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { asyncMap, asyncMapSettled } from '@xen-orchestra/async-map'
import { formatDateTime } from '@xen-orchestra/xapi'
'use strict'
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { Task } from '../../Task.mjs'
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { formatDateTime } = require('@xen-orchestra/xapi')
import { AbstractFullWriter } from './_AbstractFullWriter.mjs'
import { MixinXapiWriter } from './_MixinXapiWriter.mjs'
import { listReplicatedVms } from './_listReplicatedVms.mjs'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
export class FullXapiWriter extends MixinXapiWriter(AbstractFullWriter) {
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
const { listReplicatedVms } = require('./_listReplicatedVms.js')
exports.FullXapiWriter = class FullXapiWriter extends MixinXapiWriter(AbstractFullWriter) {
constructor(props) {
super(props)

View File

@@ -1,27 +1,29 @@
import assert from 'node:assert'
import mapValues from 'lodash/mapValues.js'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { asyncEach } from '@vates/async-each'
import { asyncMap } from '@xen-orchestra/async-map'
import { chainVhd, checkVhdChain, openVhd, VhdAbstract } from 'vhd-lib'
import { createLogger } from '@xen-orchestra/log'
import { decorateClass } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import { dirname } from 'node:path'
'use strict'
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { Task } from '../../Task.mjs'
const assert = require('assert')
const mapValues = require('lodash/mapValues.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncEach } = require('@vates/async-each')
const { asyncMap } = require('@xen-orchestra/async-map')
const { chainVhd, checkVhdChain, openVhd, VhdAbstract } = require('vhd-lib')
const { createLogger } = require('@xen-orchestra/log')
const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { dirname } = require('path')
import { MixinRemoteWriter } from './_MixinRemoteWriter.mjs'
import { AbstractIncrementalWriter } from './_AbstractIncrementalWriter.mjs'
import { checkVhd } from './_checkVhd.mjs'
import { packUuid } from './_packUuid.mjs'
import { Disposable } from 'promise-toolbox'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
const { checkVhd } = require('./_checkVhd.js')
const { packUuid } = require('./_packUuid.js')
const { Disposable } = require('promise-toolbox')
const { warn } = createLogger('xo:backups:DeltaBackupWriter')
export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWriter) {
class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWriter) {
async checkBaseVdis(baseUuidToSrcVdi) {
const { handler } = this._adapter
const adapter = this._adapter
@@ -203,7 +205,7 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
// TODO remove when this has been done before the export
await checkVhd(handler, parentPath)
}
// @todo : sum per property
transferSize += await adapter.writeVhd(path, deltaExport.streams[`${id}.vhd`], {
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
@@ -230,12 +232,12 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
return { size: transferSize }
})
metadataContent.size = size
metadataContent.size = size // @todo: transferSize
this._metadataFileName = await adapter.writeVmBackupMetadata(vm.uuid, metadataContent)
// TODO: run cleanup?
}
}
decorateClass(IncrementalRemoteWriter, {
exports.IncrementalRemoteWriter = decorateClass(IncrementalRemoteWriter, {
_transfer: defer,
})

View File

@@ -1,17 +1,19 @@
import { asyncMap, asyncMapSettled } from '@xen-orchestra/async-map'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { formatDateTime } from '@xen-orchestra/xapi'
'use strict'
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { importIncrementalVm, TAG_COPY_SRC } from '../../_incrementalVm.mjs'
import { Task } from '../../Task.mjs'
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { formatDateTime } = require('@xen-orchestra/xapi')
import { AbstractIncrementalWriter } from './_AbstractIncrementalWriter.mjs'
import { MixinXapiWriter } from './_MixinXapiWriter.mjs'
import { listReplicatedVms } from './_listReplicatedVms.mjs'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { importIncrementalVm, TAG_COPY_SRC } = require('../../_incrementalVm.js')
const { Task } = require('../../Task.js')
export class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWriter) {
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
const { listReplicatedVms } = require('./_listReplicatedVms.js')
exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWriter) {
async checkBaseVdis(baseUuidToSrcVdi, baseVm) {
const sr = this._sr
const replicatedVm = listReplicatedVms(sr.$xapi, this._job.id, sr.uuid, this._vmUuid).find(

View File

@@ -1,6 +1,8 @@
import { AbstractWriter } from './_AbstractWriter.mjs'
'use strict'
export class AbstractFullWriter extends AbstractWriter {
const { AbstractWriter } = require('./_AbstractWriter.js')
exports.AbstractFullWriter = class AbstractFullWriter extends AbstractWriter {
async run({ timestamp, sizeContainer, stream, vm, vmSnapshot }) {
try {
return await this._run({ timestamp, sizeContainer, stream, vm, vmSnapshot })

View File

@@ -1,6 +1,8 @@
import { AbstractWriter } from './_AbstractWriter.mjs'
'use strict'
export class AbstractIncrementalWriter extends AbstractWriter {
const { AbstractWriter } = require('./_AbstractWriter.js')
exports.AbstractIncrementalWriter = class AbstractIncrementalWriter extends AbstractWriter {
checkBaseVdis(baseUuidToSrcVdi, baseVm) {
throw new Error('Not implemented')
}

View File

@@ -1,7 +1,9 @@
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getVmBackupDir } from '../../_getVmBackupDir.mjs'
'use strict'
export class AbstractWriter {
const { formatFilenameDate } = require('../../_filenameDate')
const { getVmBackupDir } = require('../../_getVmBackupDir')
exports.AbstractWriter = class AbstractWriter {
constructor({ config, healthCheckSr, job, vmUuid, scheduleId, settings }) {
this._config = config
this._healthCheckSr = healthCheckSr

View File

@@ -1,17 +1,19 @@
import { createLogger } from '@xen-orchestra/log'
import { join } from 'node:path'
import assert from 'node:assert'
'use strict'
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getVmBackupDir } from '../../_getVmBackupDir.mjs'
import { HealthCheckVmBackup } from '../../HealthCheckVmBackup.mjs'
import { ImportVmBackup } from '../../ImportVmBackup.mjs'
import { Task } from '../../Task.mjs'
import * as MergeWorker from '../../merge-worker/index.mjs'
const { createLogger } = require('@xen-orchestra/log')
const { join } = require('path')
const assert = require('assert')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getVmBackupDir } = require('../../_getVmBackupDir.js')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup.js')
const { ImportVmBackup } = require('../../ImportVmBackup.js')
const { Task } = require('../../Task.js')
const MergeWorker = require('../../merge-worker/index.js')
const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
export const MixinRemoteWriter = (BaseClass = Object) =>
exports.MixinRemoteWriter = (BaseClass = Object) =>
class MixinRemoteWriter extends BaseClass {
#lock

View File

@@ -1,10 +1,12 @@
import { extractOpaqueRef } from '@xen-orchestra/xapi'
import assert from 'node:assert/strict'
'use strict'
import { HealthCheckVmBackup } from '../../HealthCheckVmBackup.mjs'
import { Task } from '../../Task.mjs'
const { extractOpaqueRef } = require('@xen-orchestra/xapi')
export const MixinXapiWriter = (BaseClass = Object) =>
const { Task } = require('../../Task')
const assert = require('node:assert/strict')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup')
exports.MixinXapiWriter = (BaseClass = Object) =>
class MixinXapiWriter extends BaseClass {
constructor({ sr, ...rest }) {
super(rest)

View File

@@ -0,0 +1,8 @@
'use strict'
const openVhd = require('vhd-lib').openVhd
const Disposable = require('promise-toolbox/Disposable')
exports.checkVhd = async function checkVhd(handler, path) {
await Disposable.use(openVhd(handler, path), () => {})
}

View File

@@ -1,6 +0,0 @@
import { openVhd } from 'vhd-lib'
import Disposable from 'promise-toolbox/Disposable'
export async function checkVhd(handler, path) {
await Disposable.use(openVhd(handler, path), () => {})
}

View File

@@ -1,3 +1,5 @@
'use strict'
const getReplicatedVmDatetime = vm => {
const { 'xo:backup:datetime': datetime = vm.name_label.slice(-17, -1) } = vm.other_config
return datetime
@@ -5,7 +7,7 @@ const getReplicatedVmDatetime = vm => {
const compareReplicatedVmDatetime = (a, b) => (getReplicatedVmDatetime(a) < getReplicatedVmDatetime(b) ? -1 : 1)
export function listReplicatedVms(xapi, scheduleOrJobId, srUuid, vmUuid) {
exports.listReplicatedVms = function listReplicatedVms(xapi, scheduleOrJobId, srUuid, vmUuid) {
const { all } = xapi.objects
const vms = {}
for (const key in all) {

View File

@@ -1,5 +1,7 @@
'use strict'
const PARSE_UUID_RE = /-/g
export function packUuid(uuid) {
exports.packUuid = function packUuid(uuid) {
return Buffer.from(uuid.replace(PARSE_UUID_RE, ''), 'hex')
}

View File

@@ -1,4 +1,6 @@
export function watchStreamSize(stream, container = { size: 0 }) {
'use strict'
exports.watchStreamSize = function watchStreamSize(stream, container = { size: 0 }) {
stream.on('data', data => {
container.size += data.length
})

View File

@@ -1,4 +1,6 @@
export function extractIdsFromSimplePattern(pattern) {
'use strict'
exports.extractIdsFromSimplePattern = function extractIdsFromSimplePattern(pattern) {
if (pattern === undefined) {
return []
}

View File

@@ -1,5 +1,7 @@
import mapValues from 'lodash/mapValues.js'
import { dirname } from 'node:path'
'use strict'
const mapValues = require('lodash/mapValues.js')
const { dirname } = require('path')
function formatVmBackup(backup) {
return {
@@ -29,6 +31,6 @@ function formatVmBackup(backup) {
}
// format all backups as returned by RemoteAdapter#listAllVmBackups()
export function formatVmBackups(backupsByVM) {
exports.formatVmBackups = function formatVmBackups(backupsByVM) {
return mapValues(backupsByVM, backups => backups.map(formatVmBackup))
}

View File

@@ -2,17 +2,19 @@
// eslint-disable-next-line eslint-comments/disable-enable-pair
/* eslint-disable n/shebang */
import { catchGlobalErrors } from '@xen-orchestra/log/configure'
import { createLogger } from '@xen-orchestra/log'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { join } from 'node:path'
import Disposable from 'promise-toolbox/Disposable'
import min from 'lodash/min.js'
'use strict'
import { getVmBackupDir } from '../_getVmBackupDir.mjs'
import { RemoteAdapter } from '../RemoteAdapter.mjs'
const { catchGlobalErrors } = require('@xen-orchestra/log/configure')
const { createLogger } = require('@xen-orchestra/log')
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { join } = require('path')
const Disposable = require('promise-toolbox/Disposable')
const min = require('lodash/min')
import { CLEAN_VM_QUEUE } from './index.mjs'
const { getVmBackupDir } = require('../_getVmBackupDir.js')
const { RemoteAdapter } = require('../RemoteAdapter.js')
const { CLEAN_VM_QUEUE } = require('./index.js')
// -------------------------------------------------------------------

View File

@@ -1,12 +1,13 @@
import { join } from 'node:path'
import { spawn } from 'child_process'
import { check } from 'proper-lockfile'
'use strict'
export const CLEAN_VM_QUEUE = '/xo-vm-backups/.queue/clean-vm/'
const { join, resolve } = require('path')
const { spawn } = require('child_process')
const { check } = require('proper-lockfile')
const CLI_PATH = new URL('cli.mjs', import.meta.url).pathname
const CLEAN_VM_QUEUE = (exports.CLEAN_VM_QUEUE = '/xo-vm-backups/.queue/clean-vm/')
export const run = async function runMergeWorker(remotePath) {
const CLI_PATH = resolve(__dirname, 'cli.js')
exports.run = async function runMergeWorker(remotePath) {
try {
// TODO: find a way to pass the acquire the lock and then pass it down the worker
if (await check(join(remotePath, CLEAN_VM_QUEUE))) {

View File

@@ -8,13 +8,13 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.40.0",
"version": "0.39.0",
"engines": {
"node": ">=14.18"
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "node--test *.integ.mjs"
"test-integration": "node--test *.integ.js"
},
"dependencies": {
"@kldzj/stream-throttle": "^1.1.1",
@@ -23,8 +23,8 @@
"@vates/compose": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.4",
"@vates/fuse-vhd": "^2.0.0",
"@vates/nbd-client": "^2.0.0",
"@vates/fuse-vhd": "^1.0.0",
"@vates/nbd-client": "^1.2.1",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^4.0.1",
@@ -33,6 +33,7 @@
"compare-versions": "^5.0.1",
"d3-time-format": "^3.0.0",
"decorator-synchronized": "^0.6.0",
"fs-extra": "^11.1.0",
"golike-defer": "^0.5.1",
"limit-concurrency-decorator": "^0.5.0",
"lodash": "^4.17.20",
@@ -40,21 +41,19 @@
"parse-pairs": "^2.0.0",
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"tar": "^6.1.15",
"uuid": "^9.0.0",
"vhd-lib": "^4.5.0",
"xen-api": "^1.3.4",
"xen-api": "^1.3.3",
"yazl": "^2.5.1"
},
"devDependencies": {
"fs-extra": "^11.1.0",
"rimraf": "^5.0.1",
"sinon": "^15.0.1",
"test": "^3.2.1",
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^3.0.0"
"@xen-orchestra/xapi": "^2.2.1"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -1,6 +1,8 @@
import { DIR_XO_CONFIG_BACKUPS, DIR_XO_POOL_METADATA_BACKUPS } from './RemoteAdapter.mjs'
'use strict'
export function parseMetadataBackupId(backupId) {
const { DIR_XO_CONFIG_BACKUPS, DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
exports.parseMetadataBackupId = function parseMetadataBackupId(backupId) {
const [dir, ...rest] = backupId.split('/')
if (dir === DIR_XO_CONFIG_BACKUPS) {
const [scheduleId, timestamp] = rest

View File

@@ -1,11 +1,14 @@
import { createLogger } from '@xen-orchestra/log'
import { fork } from 'child_process'
'use strict'
const path = require('path')
const { createLogger } = require('@xen-orchestra/log')
const { fork } = require('child_process')
const { warn } = createLogger('xo:backups:backupWorker')
const PATH = new URL('_backupWorker.mjs', import.meta.url).pathname
const PATH = path.resolve(__dirname, '_backupWorker.js')
export function runBackupWorker(params, onLog) {
exports.runBackupWorker = function runBackupWorker(params, onLog) {
return new Promise((resolve, reject) => {
const worker = fork(PATH)

View File

@@ -1,5 +1,7 @@
'use strict'
// a valid footer of a 2
export const VHDFOOTER = {
exports.VHDFOOTER = {
cookie: 'conectix',
features: 2,
fileFormatVersion: 65536,
@@ -18,7 +20,7 @@ export const VHDFOOTER = {
hidden: '',
reserved: '',
}
export const VHDHEADER = {
exports.VHDHEADER = {
cookie: 'cxsparse',
dataOffset: undefined,
tableOffset: 2048,

View File

@@ -18,7 +18,7 @@
"preferGlobal": true,
"dependencies": {
"golike-defer": "^0.5.1",
"xen-api": "^1.3.4"
"xen-api": "^1.3.3"
},
"scripts": {
"postversion": "npm publish"

View File

@@ -29,7 +29,7 @@
"@vates/async-each": "^1.0.0",
"@vates/coalesce-calls": "^0.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/read-chunk": "^1.2.0",
"@vates/read-chunk": "^1.1.1",
"@xen-orchestra/log": "^0.6.0",
"bind-property-descriptor": "^2.0.0",
"decorator-synchronized": "^0.6.0",

View File

@@ -209,7 +209,7 @@ describe('encryption', () => {
// encrypt with a non default algorithm
const encryptor = _getEncryptor('aes-256-cbc', '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gcm"}`)
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gmc"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
// remote is now non empty : can't modify key anymore

View File

@@ -1,14 +1,6 @@
# ChangeLog
## **next**
## **0.1.2** (2023-07-28)
- Ability to export selected VMs as CSV file (PR [#6915](https://github.com/vatesfr/xen-orchestra/pull/6915))
- [Pool/VMs] Ability to export selected VMs as JSON file (PR [#6911](https://github.com/vatesfr/xen-orchestra/pull/6911))
- Add Tasks to Pool Dashboard (PR [#6713](https://github.com/vatesfr/xen-orchestra/pull/6713))
## **0.1.1** (2023-07-03)
## **0.2.0**
- Invalidate sessionId token after logout (PR [#6480](https://github.com/vatesfr/xen-orchestra/pull/6480))
- Settings page (PR [#6418](https://github.com/vatesfr/xen-orchestra/pull/6418))

View File

@@ -4,53 +4,6 @@ All collections of `XenApiRecord` are stored inside the `xapiCollectionStore`.
To retrieve a collection, invoke `useXapiCollectionStore().get(type)`.
## TL;DR - How to extend a subscription
_**Note:** Once the extension grows in complexity, it's recommended to create a dedicated file for it (e.g. `host.extension.ts` for `host.store.ts`)._
```typescript
type MyExtension1 = Extension<{ propA: string }>;
type MyExtension2 = Extension<{ propB: string }, { withB: true }>;
type Extensions = [
XenApiRecordExtension<XenApiHost>, // If needed
DeferExtension, // If needed
MyExtension1,
MyExtension2
];
export const useHostStore = defineStore("host", () => {
const hostCollection = useXapiCollectionStore().get("console");
const subscribe = <O extends Options<Extensions>>(options?: O) => {
const originalSubscription = hostCollection.subscribe(options);
const myExtension1: PartialSubscription<MyExtension1> = {
propA: "Hello",
};
const myExtension2: PartialSubscription<MyExtension2> | undefined =
options?.withB
? {
propB: "World",
}
: undefined;
return {
...originalSubscription,
...myExtension1,
...myExtension2,
};
};
return {
...hostCollection,
subscribe,
};
});
```
## Accessing a collection
In order to use a collection, you'll need to subscribe to it.
@@ -87,102 +40,71 @@ export const useConsoleStore = defineStore("console", () =>
To extend the base Subscription, you'll need to override the `subscribe` method.
For that, you can use the `createSubscribe<XenApiRecord, Extensions>((options) => { /* ... */})` helper.
### Define the extensions
Subscription extensions are defined as a simple extension (`Extension<object>`) or as a conditional
extension (`Extension<object, object>`).
Subscription extensions are defined as `(object | [object, RequiredOptions])[]`.
When using a conditional extension, the corresponding `object` type will be added to the subscription only if
the the options passed to `subscribe(options)` do match the second argument or `Extension`.
There is two existing extensions:
- `XenApiRecordExtension<T extends XenApiRecord>`: a simple extension which defined all the base
properties and methods (`records`, `getByOpaqueRef`, `getByUuid`, etc.)
- `DeferExtension`: a conditional extension which add the `start` and `isStarted` properties if the
`immediate` option is set to `false`.
When using a tuple (`[object, RequiredOptions]`), the corresponding `object` type will be added to the subscription if
the `RequiredOptions` for that tuple are present in the options passed to `subscribe`.
```typescript
// Always present extension
type PropABExtension = Extension<{
type DefaultExtension = {
propA: string;
propB: ComputedRef<number>;
}>;
};
// Conditional extension 1
type PropCExtension = Extension<
type FirstConditionalExtension = [
{ propC: ComputedRef<string> }, // <- This signature will be added
{ optC: string } // <- if this condition is met
>;
];
// Conditional extension 2
type PropDExtension = Extension<
type SecondConditionalExtension = [
{ propD: () => void }, // <- This signature will be added
{ optD: number } // <- if this condition is met
>;
];
// Create the extensions array
type Extensions = [
XenApiRecordExtension<XenApiHost>,
DeferExtension,
PropABExtension,
PropCExtension,
PropDExtension
DefaultExtension,
FirstConditionalExtension,
SecondConditionalExtension
];
```
### Define the `subscribe` method
You can then create the `subscribe` function with the help of `Options` and `Subscription` helper types.
This will allow to get correct completion and type checking for the `options` argument, and to get the correct return
type based on passed options.
```typescript
const subscribe = <O extends Options<Extensions>>(options?: O) => {
return {
// ...
} as Subscription<Extensions, O>;
};
```
### Extend the subscription
The `PartialSubscription` type will help to define and check the data to add to subscription for each extension.
### Define the subscription
```typescript
export const useConsoleStore = defineStore("console", () => {
const consoleCollection = useXapiCollectionStore().get("console");
const subscribe = <O extends Options<Extensions>>(options?: O) => {
const subscribe = createSubscribe<XenApiConsole, Extensions>((options) => {
const originalSubscription = consoleCollection.subscribe(options);
const propABSubscription: PartialSubscription<PropABExtension> = {
const extendedSubscription = {
propA: "Some string",
propB: computed(() => 42),
};
const propCSubscription: PartialSubscription<PropCExtension> | undefined =
options?.optC !== undefined
? {
propC: computed(() => "Some other string"),
}
: undefined;
const propCSubscription = options?.optC !== undefined && {
propC: computed(() => "Some other string"),
};
const propDSubscription: PartialSubscription<PropDExtension> | undefined =
options?.optD !== undefined
? {
propD: () => console.log("Hello"),
}
: undefined;
const propDSubscription = options?.optD !== undefined && {
propD: () => console.log("Hello"),
};
return {
...originalSubscription,
...propABSubscription,
...extendedSubscription,
...propCSubscription,
...propDSubscription,
};
};
});
return {
...consoleCollection,
@@ -203,18 +125,20 @@ type Options = {
### Use the subscription
In each case, all the default properties (`records`, `getByUuid`, etc.) will be present.
```typescript
const store = useConsoleStore();
// No options (Contains common properties: `propA`, `propB`, `records`, `getByUuid`, etc.)
const subscription1 = store.subscribe();
// No options (propA and propB will be present)
const subscription = store.subscribe();
// optC option (Contains common properties + `propC`)
const subscription2 = store.subscribe({ optC: "Hello" });
// optC option (propA, propB and propC will be present)
const subscription = store.subscribe({ optC: "Hello" });
// optD option (Contains common properties + `propD`)
const subscription3 = store.subscribe({ optD: 12 });
// optD option (propA, propB and propD will be present)
const subscription = store.subscribe({ optD: 12 });
// optC and optD options (Contains common properties + `propC` + `propD`)
const subscription4 = store.subscribe({ optC: "Hello", optD: 12 });
// optC and optD options (propA, propB, propC and propD will be present)
const subscription = store.subscribe({ optC: "Hello", optD: 12 });
```

View File

@@ -4,7 +4,7 @@
<meta charset="UTF-8" />
<link rel="icon" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>XO Lite</title>
<title>Vite App</title>
</head>
<body>
<div id="root"></div>

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/lite",
"version": "0.1.2",
"version": "0.1.0",
"scripts": {
"dev": "GIT_HEAD=$(git rev-parse HEAD) vite",
"build": "run-p type-check build-only",
@@ -17,16 +17,14 @@
"@fortawesome/vue-fontawesome": "^3.0.1",
"@novnc/novnc": "^1.3.0",
"@types/d3-time-format": "^4.0.0",
"@types/file-saver": "^2.0.5",
"@types/lodash-es": "^4.17.6",
"@types/marked": "^4.0.8",
"@vueuse/core": "^10.1.2",
"@vueuse/math": "^10.1.2",
"complex-matcher": "^0.7.1",
"complex-matcher": "^0.7.0",
"d3-time-format": "^4.1.0",
"decorator-synchronized": "^0.6.0",
"echarts": "^5.3.3",
"file-saver": "^2.0.5",
"highlight.js": "^11.6.0",
"human-format": "^1.1.0",
"iterable-backoff": "^0.1.0",
@@ -59,6 +57,7 @@
"postcss-nested": "^6.0.0",
"typescript": "^4.9.3",
"vite": "^4.3.8",
"vite-plugin-pages": "^0.29.1",
"vue-tsc": "^1.6.5"
},
"private": true,

View File

@@ -4,10 +4,10 @@
<AppLogin />
</div>
<div v-else>
<AppHeader v-if="uiStore.hasUi" />
<AppHeader />
<div style="display: flex">
<AppNavigation v-if="uiStore.hasUi" />
<main class="main" :class="{ 'no-ui': !uiStore.hasUi }">
<AppNavigation />
<main class="main">
<RouterView />
</main>
</div>
@@ -41,6 +41,8 @@ if (link == null) {
}
link.href = favicon;
document.title = "XO Lite";
const xenApiStore = useXenApiStore();
const { pool } = usePoolStore().subscribe();
useChartTheme();
@@ -90,9 +92,5 @@ whenever(
flex: 1;
height: calc(100vh - 8rem);
background-color: var(--background-color-secondary);
&.no-ui {
height: 100vh;
}
}
</style>

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 63 KiB

View File

@@ -24,7 +24,6 @@
</template>
<script lang="ts" setup>
import { usePageTitleStore } from "@/stores/page-title.store";
import { storeToRefs } from "pinia";
import { onMounted, ref, watch } from "vue";
import { useI18n } from "vue-i18n";
@@ -34,7 +33,6 @@ import UiButton from "@/components/ui/UiButton.vue";
import { useXenApiStore } from "@/stores/xen-api.store";
const { t } = useI18n();
usePageTitleStore().setTitle(t("login"));
const xenApiStore = useXenApiStore();
const { isConnecting } = storeToRefs(xenApiStore);
const login = ref("root");
@@ -64,7 +62,7 @@ async function handleSubmit() {
isInvalidPassword.value = true;
error.value = t("password-invalid");
} else {
error.value = t("error-occurred");
error.value = t("error-occured");
console.error(err);
}
}

View File

@@ -5,6 +5,7 @@
<script lang="ts" setup>
import markdown from "@/libs/markdown";
import { useEventListener } from "@vueuse/core";
import "highlight.js/styles/github-dark.css";
import { computed, type Ref, ref } from "vue";
const rootElement = ref() as Ref<HTMLElement>;

View File

@@ -3,13 +3,14 @@
</template>
<script lang="ts" setup>
import { type AcceptedLanguage, highlight } from "@/libs/highlight";
import HLJS from "highlight.js";
import { computed } from "vue";
import "highlight.js/styles/github-dark.css";
const props = withDefaults(
defineProps<{
code?: any;
lang?: AcceptedLanguage;
lang?: string;
}>(),
{ lang: "typescript" }
);
@@ -26,7 +27,7 @@ const codeAsText = computed(() => {
});
const codeAsHtml = computed(
() => highlight(codeAsText.value, { language: props.lang }).value
() => HLJS.highlight(codeAsText.value, { language: props.lang }).value
);
</script>

View File

@@ -7,12 +7,12 @@
</template>
<script
generic="T extends XenApiRecord<RawObjectType>, I extends T['uuid']"
generic="T extends XenApiRecord<string>, I extends T['uuid']"
lang="ts"
setup
>
import UiSpinner from "@/components/ui/UiSpinner.vue";
import type { RawObjectType, XenApiRecord } from "@/libs/xen-api";
import type { XenApiRecord } from "@/libs/xen-api";
import ObjectNotFoundView from "@/views/ObjectNotFoundView.vue";
import { computed } from "vue";
import { useRouter } from "vue-router";

View File

@@ -3,11 +3,11 @@
</template>
<script lang="ts" setup>
import { useXenApiStore } from "@/stores/xen-api.store";
import VncClient from "@novnc/novnc/core/rfb";
import { promiseTimeout } from "@vueuse/shared";
import { fibonacci } from "iterable-backoff";
import { computed, onBeforeUnmount, ref, watchEffect } from "vue";
import VncClient from "@novnc/novnc/core/rfb";
import { useXenApiStore } from "@/stores/xen-api.store";
import { promiseTimeout } from "@vueuse/shared";
const N_TOTAL_TRIES = 8;
const FIBONACCI_MS_ARRAY: number[] = Array.from(

View File

@@ -11,8 +11,7 @@
</template>
<script lang="ts" setup>
import { IK_TAB_BAR_DISABLED } from "@/types/injection-keys";
import { computed, inject } from "vue";
import { type ComputedRef, computed, inject } from "vue";
import type { RouteLocationRaw } from "vue-router";
import UiTab from "@/components/ui/UiTab.vue";
@@ -21,8 +20,8 @@ defineProps<{
disabled?: boolean;
}>();
const isTabBarDisabled = inject(
IK_TAB_BAR_DISABLED,
const isTabBarDisabled = inject<ComputedRef<boolean>>(
"isTabBarDisabled",
computed(() => false)
);
</script>

View File

@@ -31,7 +31,7 @@ import { faServer } from "@fortawesome/free-solid-svg-icons";
import UiModal from "@/components/ui/UiModal.vue";
import UiButton from "@/components/ui/UiButton.vue";
import { computed, ref, watch } from "vue";
import { difference } from "lodash-es";
import { difference } from "lodash";
import { useHostStore } from "@/stores/host.store";
const { records: hosts } = useHostStore().subscribe();

View File

@@ -0,0 +1,61 @@
<template>
<div class="chart-summary">
<div>
<div class="label">{{ $t("total-used") }}</div>
<div>
{{ usedPercent }}%
<br />
{{ valueFormatter(used) }}
</div>
</div>
<div>
<div class="label">{{ $t("total-free") }}</div>
<div>
{{ freePercent }}%
<br />
{{ valueFormatter(total - used) }}
</div>
</div>
</div>
</template>
<script lang="ts" setup>
import { percent } from "@/libs/utils";
import { computed, type ComputedRef, inject } from "vue";
const props = defineProps<{
total: number;
used: number;
}>();
const usedPercent = computed(() => percent(props.used, props.total));
const freePercent = computed(() =>
percent(props.total - props.used, props.total)
);
const valueFormatter = inject("valueFormatter") as ComputedRef<
(value: number) => string
>;
</script>
<style lang="postcss" scoped>
.chart-summary {
font-size: 1.4rem;
font-weight: 700;
display: flex;
margin-top: 2rem;
color: var(--color-blue-scale-200);
gap: 4rem;
& > div {
display: flex;
flex: 1;
justify-content: space-between;
}
}
.label {
text-transform: uppercase;
}
</style>

View File

@@ -6,11 +6,11 @@
</template>
<script lang="ts" setup>
import UiCard from "@/components/ui/UiCard.vue";
import type { LinearChartData, ValueFormatter } from "@/types/chart";
import { IK_CHART_VALUE_FORMATTER } from "@/types/injection-keys";
import { utcFormat } from "d3-time-format";
import type { EChartsOption } from "echarts";
import { computed, provide } from "vue";
import VueCharts from "vue-echarts";
import type { LinearChartData } from "@/types/chart";
import { LineChart } from "echarts/charts";
import {
GridComponent,
@@ -20,8 +20,8 @@ import {
} from "echarts/components";
import { use } from "echarts/core";
import { CanvasRenderer } from "echarts/renderers";
import { computed, provide } from "vue";
import VueCharts from "vue-echarts";
import type { OptionDataValue } from "echarts/types/src/util/types";
import UiCard from "@/components/ui/UiCard.vue";
const Y_AXIS_MAX_VALUE = 200;
@@ -29,23 +29,23 @@ const props = defineProps<{
title?: string;
subtitle?: string;
data: LinearChartData;
valueFormatter?: ValueFormatter;
valueFormatter?: (value: number) => string;
maxValue?: number;
}>();
const valueFormatter = computed<ValueFormatter>(() => {
const valueFormatter = computed(() => {
const formatter = props.valueFormatter;
return (value) => {
if (formatter === undefined) {
return value.toString();
return (value: OptionDataValue | OptionDataValue[]) => {
if (formatter) {
return formatter(value as number);
}
return formatter(value);
return value.toString();
};
});
provide(IK_CHART_VALUE_FORMATTER, valueFormatter);
provide("valueFormatter", valueFormatter);
use([
CanvasRenderer,
@@ -65,7 +65,7 @@ const option = computed<EChartsOption>(() => ({
data: props.data.map((series) => series.label),
},
tooltip: {
valueFormatter: (v) => valueFormatter.value(v as number),
valueFormatter: valueFormatter.value,
},
xAxis: {
type: "time",

Some files were not shown because too many files have changed in this diff Show More