Compare commits

..

1 Commits

Author SHA1 Message Date
Florent Beauchamp
f618fcdaf8 feat(vhd): implement encryption on vhd directory 2022-06-18 11:12:30 +02:00
146 changed files with 3480 additions and 4217 deletions

2
.gitignore vendored
View File

@@ -10,6 +10,8 @@
/packages/*/dist/
/packages/*/node_modules/
/packages/vhd-cli/src/commands/index.js
/packages/xen-api/examples/node_modules/
/packages/xen-api/plot.dat

View File

@@ -14,7 +14,7 @@ Returns a promise wich rejects as soon as a call to `iteratee` throws or a promi
`opts` is an object that can contains the following options:
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `10`. The value `0` means no concurrency limit.
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `1`
- `signal`: an abort signal to stop the iteration
- `stopOnError`: wether to stop iteration of first error, or wait for all calls to finish and throw an `AggregateError`, defaults to `true`

View File

@@ -32,7 +32,7 @@ Returns a promise wich rejects as soon as a call to `iteratee` throws or a promi
`opts` is an object that can contains the following options:
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `10`. The value `0` means no concurrency limit.
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `1`
- `signal`: an abort signal to stop the iteration
- `stopOnError`: wether to stop iteration of first error, or wait for all calls to finish and throw an `AggregateError`, defaults to `true`

View File

@@ -9,16 +9,7 @@ class AggregateError extends Error {
}
}
/**
* @template Item
* @param {Iterable<Item>} iterable
* @param {(item: Item, index: number, iterable: Iterable<Item>) => Promise<void>} iteratee
* @returns {Promise<void>}
*/
exports.asyncEach = function asyncEach(iterable, iteratee, { concurrency = 10, signal, stopOnError = true } = {}) {
if (concurrency === 0) {
concurrency = Infinity
}
exports.asyncEach = function asyncEach(iterable, iteratee, { concurrency = 1, signal, stopOnError = true } = {}) {
return new Promise((resolve, reject) => {
const it = (iterable[Symbol.iterator] || iterable[Symbol.asyncIterator]).call(iterable)
const errors = []

View File

@@ -36,7 +36,7 @@ describe('asyncEach', () => {
it('works', async () => {
const iteratee = jest.fn(async () => {})
await asyncEach.call(thisArg, iterable, iteratee, { concurrency: 1 })
await asyncEach.call(thisArg, iterable, iteratee)
expect(iteratee.mock.instances).toEqual(Array.from(values, () => thisArg))
expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable]))
@@ -66,7 +66,7 @@ describe('asyncEach', () => {
}
})
expect(await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true }))).toBe(error)
expect(await rejectionOf(asyncEach(iterable, iteratee, { stopOnError: true }))).toBe(error)
expect(iteratee).toHaveBeenCalledTimes(2)
})
@@ -91,9 +91,7 @@ describe('asyncEach', () => {
}
})
await expect(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal })).rejects.toThrow(
'asyncEach aborted'
)
await expect(asyncEach(iterable, iteratee, { signal: ac.signal })).rejects.toThrow('asyncEach aborted')
expect(iteratee).toHaveBeenCalledTimes(2)
})
})

View File

@@ -35,7 +35,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.1",
"version": "1.0.0",
"scripts": {
"postversion": "npm publish --access public",
"test": "tap --branches=72"

View File

@@ -19,7 +19,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "1.0.0",
"version": "0.1.2",
"engines": {
"node": ">=8.10"
},

View File

@@ -7,8 +7,8 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.27.0",
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/backups": "^0.25.0",
"@xen-orchestra/fs": "^1.0.3",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "0.7.5",
"version": "0.7.3",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -15,7 +15,7 @@ const { deduped } = require('@vates/disposable/deduped.js')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { compose } = require('@vates/compose')
const { execFile } = require('child_process')
const { readdir, lstat } = require('fs-extra')
const { readdir, stat } = require('fs-extra')
const { v4: uuidv4 } = require('uuid')
const { ZipFile } = require('yazl')
const zlib = require('zlib')
@@ -47,12 +47,13 @@ const resolveSubpath = (root, path) => resolve(root, `.${resolve('/', path)}`)
const RE_VHDI = /^vhdi(\d+)$/
async function addDirectory(files, realPath, metadataPath) {
const stats = await lstat(realPath)
if (stats.isDirectory()) {
await asyncMap(await readdir(realPath), file =>
addDirectory(files, realPath + '/' + file, metadataPath + '/' + file)
)
} else if (stats.isFile()) {
try {
const subFiles = await readdir(realPath)
await asyncMap(subFiles, file => addDirectory(files, realPath + '/' + file, metadataPath + '/' + file))
} catch (error) {
if (error == null || error.code !== 'ENOTDIR') {
throw error
}
files.push({
realPath,
metadataPath,
@@ -74,11 +75,15 @@ const debounceResourceFactory = factory =>
}
class RemoteAdapter {
constructor(handler, { debounceResource = res => res, dirMode, vhdDirectoryCompression } = {}) {
constructor(
handler,
{ debounceResource = res => res, dirMode, vhdDirectoryCompression, vhdDirectoryEncryption } = {}
) {
this._debounceResource = debounceResource
this._dirMode = dirMode
this._handler = handler
this._vhdDirectoryCompression = vhdDirectoryCompression
this._vhdDirectoryEncryption = vhdDirectoryEncryption
this._readCacheListVmBackups = synchronized.withKey()(this._readCacheListVmBackups)
}
@@ -200,7 +205,9 @@ class RemoteAdapter {
const isVhdDirectory = vhd instanceof VhdDirectory
return isVhdDirectory
? this.#useVhdDirectory() && this.#getCompressionType() === vhd.compressionType
? this.#useVhdDirectory() &&
this.#getCompressionType() === vhd.compressionType &&
this.#getEncryption() === vhd.encryption
: !this.#useVhdDirectory()
})
}
@@ -290,8 +297,12 @@ class RemoteAdapter {
return this._vhdDirectoryCompression
}
#getEncryption() {
return this._vhdDirectoryEncryption
}
#useVhdDirectory() {
return this.handler.useVhdDirectory()
return this.handler.type === 's3'
}
#useAlias() {
@@ -382,12 +393,8 @@ class RemoteAdapter {
const entriesMap = {}
await asyncMap(await readdir(path), async name => {
try {
const stats = await lstat(`${path}/${name}`)
if (stats.isDirectory()) {
entriesMap[name + '/'] = {}
} else if (stats.isFile()) {
entriesMap[name] = {}
}
const stats = await stat(`${path}/${name}`)
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
} catch (error) {
if (error == null || error.code !== 'ENOENT') {
throw error
@@ -579,6 +586,7 @@ class RemoteAdapter {
await createVhdDirectoryFromStream(handler, dataPath, input, {
concurrency: 16,
compression: this.#getCompressionType(),
encryption: this.#getEncryption(),
async validator() {
await input.task
return validator.apply(this, arguments)

View File

@@ -71,6 +71,7 @@ class BackupWorker {
debounceResource: this.debounceResource,
dirMode: this.#config.dirMode,
vhdDirectoryCompression: this.#config.vhdDirectoryCompression,
vhdDirectoryEncryption: this.#config.vhdDirectoryEncryption,
})
} finally {
await handler.forget()

View File

@@ -2,7 +2,6 @@
const assert = require('assert')
const sum = require('lodash/sum')
const UUID = require('uuid')
const { asyncMap } = require('@xen-orchestra/async-map')
const { Constants, mergeVhd, openVhd, VhdAbstract, VhdFile } = require('vhd-lib')
const { isVhdAlias, resolveVhdAlias } = require('vhd-lib/aliases')
@@ -51,7 +50,7 @@ const computeVhdsSize = (handler, vhdPaths) =>
async function mergeVhdChain(chain, { handler, logInfo, remove, merge }) {
assert(chain.length >= 2)
const chainCopy = [...chain]
const parent = chainCopy.shift()
const parent = chainCopy.pop()
const children = chainCopy
if (merge) {
@@ -60,20 +59,30 @@ async function mergeVhdChain(chain, { handler, logInfo, remove, merge }) {
let done, total
const handle = setInterval(() => {
if (done !== undefined) {
logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total })
logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total})
}
}, 10e3)
const mergedSize = await mergeVhd(handler, parent, handler, children, {
logInfo,
onProgress({ done: d, total: t }) {
done = d
total = t
},
remove,
})
clearInterval(handle)
const mergeTargetChild = children.shift()
await Promise.all([
VhdAbstract.rename(handler, parent, mergeTargetChild),
asyncMap(children, child => {
logInfo(`the VHD child is already merged`, { child })
if (remove) {
logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
}),
])
return mergedSize
}
}
@@ -188,7 +197,6 @@ exports.cleanVm = async function cleanVm(
const handler = this._handler
const vhdsToJSons = new Set()
const vhdById = new Map()
const vhdParents = { __proto__: null }
const vhdChildren = { __proto__: null }
@@ -210,27 +218,6 @@ exports.cleanVm = async function cleanVm(
}
vhdChildren[parent] = path
}
// Detect VHDs with the same UUIDs
//
// Due to a bug introduced in a1bcd35e2
const duplicate = vhdById.get(UUID.stringify(vhd.footer.uuid))
let vhdKept = vhd
if (duplicate !== undefined) {
logWarn('uuid is duplicated', { uuid: UUID.stringify(vhd.footer.uuid) })
if (duplicate.containsAllDataOf(vhd)) {
logWarn(`should delete ${path}`)
vhdKept = duplicate
vhds.delete(path)
} else if (vhd.containsAllDataOf(duplicate)) {
logWarn(`should delete ${duplicate._path}`)
vhds.delete(duplicate._path)
} else {
logWarn(`same ids but different content`)
}
} else {
logInfo('not duplicate', UUID.stringify(vhd.footer.uuid), path)
}
vhdById.set(UUID.stringify(vhdKept.footer.uuid), vhdKept)
})
} catch (error) {
vhds.delete(path)
@@ -385,7 +372,7 @@ exports.cleanVm = async function cleanVm(
const unusedVhdsDeletion = []
const toMerge = []
{
// VHD chains (as list from oldest to most recent) to merge indexed by most recent
// VHD chains (as list from child to ancestor) to merge indexed by last
// ancestor
const vhdChainsToMerge = { __proto__: null }
@@ -409,7 +396,7 @@ exports.cleanVm = async function cleanVm(
if (child !== undefined) {
const chain = getUsedChildChainOrDelete(child)
if (chain !== undefined) {
chain.unshift(vhd)
chain.push(vhd)
return chain
}
}

View File

@@ -8,7 +8,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.27.0",
"version": "0.25.0",
"engines": {
"node": ">=14.6"
},
@@ -22,7 +22,7 @@
"@vates/disposable": "^0.1.1",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/fs": "^1.0.3",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/template": "^0.1.0",
"compare-versions": "^4.0.1",
@@ -38,7 +38,7 @@
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"uuid": "^8.3.2",
"vhd-lib": "^3.3.2",
"vhd-lib": "^3.2.0",
"yazl": "^2.5.1"
},
"devDependencies": {
@@ -46,7 +46,7 @@
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^1.4.0"
"@xen-orchestra/xapi": "^1.2.0"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/fs",
"version": "1.1.0",
"version": "1.0.3",
"license": "AGPL-3.0-or-later",
"description": "The File System for Xen Orchestra backups.",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
@@ -42,7 +42,7 @@
"proper-lockfile": "^4.1.2",
"readable-stream": "^3.0.6",
"through2": "^4.0.2",
"xo-remote-parser": "^0.9.1"
"xo-remote-parser": "^0.8.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",

View File

@@ -424,10 +424,6 @@ export default class RemoteHandlerAbstract {
// Methods that can be implemented by inheriting classes
useVhdDirectory() {
return this._remote.useVhdDirectory ?? false
}
async _closeFile(fd) {
throw new Error('Not implemented')
}

View File

@@ -525,8 +525,4 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _closeFile(fd) {}
useVhdDirectory() {
return true
}
}

View File

@@ -1,219 +0,0 @@
import { createLogger } from '@xen-orchestra/log'
import { genSelfSignedCert } from '@xen-orchestra/self-signed'
import pRetry from 'promise-toolbox/retry'
import { X509Certificate } from 'crypto'
import fs from 'node:fs/promises'
import { dirname } from 'path'
import pw from 'pw'
import tls from 'node:tls'
const { debug, info, warn } = createLogger('xo:mixins:sslCertificate')
async function outputFile(path, content) {
await fs.mkdir(dirname(path), { recursive: true })
await fs.writeFile(path, content, { flag: 'w', mode: 0o400 })
}
class SslCertificate {
#app
#configKey
#updateSslCertificatePromise
#secureContext
#validTo
constructor(app, configKey) {
this.#app = app
this.#configKey = configKey
}
#createSecureContext(cert, key, passphrase) {
return tls.createSecureContext({
cert,
key,
passphrase,
})
}
// load on register
async #loadSslCertificate(config) {
const certPath = config.cert
const keyPath = config.key
let key, cert, passphrase
try {
;[cert, key] = await Promise.all([fs.readFile(certPath), fs.readFile(keyPath)])
if (keyPath.includes('ENCRYPTED')) {
if (config.autoCert) {
throw new Error(`encrytped certificates aren't compatible with autoCert option`)
}
passphrase = await new Promise(resolve => {
// eslint-disable-next-line no-console
process.stdout.write(`Enter pass phrase: `)
pw(resolve)
})
}
} catch (error) {
if (!(config.autoCert && error.code === 'ENOENT')) {
throw error
}
// self signed certificate or let's encrypt will be generated on demand
}
// create secure context also make a validation of the certificate
const secureContext = this.#createSecureContext(cert, key, passphrase)
this.#secureContext = secureContext
// will be tested and eventually renewed on first query
const { validTo } = new X509Certificate(cert)
this.#validTo = new Date(validTo)
}
#getConfig() {
const config = this.#app.config.get(this.#configKey)
if (config === undefined) {
throw new Error(`config for key ${this.#configKey} is unavailable`)
}
return config
}
async #getSelfSignedContext(config) {
return pRetry(
async () => {
const { cert, key } = await genSelfSignedCert()
info('new certificates generated', { cert, key })
try {
await Promise.all([outputFile(config.cert, cert), outputFile(config.key, key)])
} catch (error) {
warn(`can't save self signed certificates `, { error, config })
}
// create secure context also make a validation of the certificate
const { validTo } = new X509Certificate(cert)
return { secureContext: this.#createSecureContext(cert, key), validTo: new Date(validTo) }
},
{
tries: 2,
when: e => e.code === 'ERR_SSL_EE_KEY_TOO_SMALL',
onRetry: () => {
warn('got ERR_SSL_EE_KEY_TOO_SMALL while generating self signed certificate ')
},
}
)
}
// get the current certificate for this hostname
async getSecureContext(hostName) {
const config = this.#getConfig()
if (config === undefined) {
throw new Error(`config for key ${this.#configKey} is unavailable`)
}
if (this.#updateSslCertificatePromise) {
debug('certificate is already refreshing')
return this.#updateSslCertificatePromise
}
let certificateIsValid = this.#validTo !== undefined
let shouldRenew = !certificateIsValid
if (certificateIsValid) {
certificateIsValid = this.#validTo >= new Date()
shouldRenew = !certificateIsValid || this.#validTo - new Date() < 30 * 24 * 60 * 60 * 1000
}
let promise = Promise.resolve()
if (shouldRenew) {
try {
// @todo : should also handle let's encrypt
if (config.autoCert === true) {
promise = promise.then(() => this.#getSelfSignedContext(config))
}
this.#updateSslCertificatePromise = promise
// cleanup and store
promise = promise.then(
({ secureContext, validTo }) => {
this.#validTo = validTo
this.#secureContext = secureContext
this.#updateSslCertificatePromise = undefined
return secureContext
},
async error => {
console.warn('error while updating ssl certificate', { error })
this.#updateSslCertificatePromise = undefined
if (!certificateIsValid) {
// we couldn't generate a valid certificate
// only throw if the current certificate is invalid
warn('deleting invalid certificate')
this.#secureContext = undefined
this.#validTo = undefined
await Promise.all([fs.unlink(config.cert), fs.unlink(config.key)])
throw error
}
}
)
} catch (error) {
warn('error while refreshing ssl certificate', { error })
throw error
}
}
if (certificateIsValid) {
// still valid : does not need to wait for the refresh
return this.#secureContext
}
if (this.#updateSslCertificatePromise === undefined) {
throw new Error(`Invalid certificate and no strategy defined to renew it. Try activating autoCert in the config`)
}
// invalid cert : wait for refresh
return this.#updateSslCertificatePromise
}
async register() {
await this.#loadSslCertificate(this.#getConfig())
}
}
export default class SslCertificates {
#app
#handlers = {}
constructor(app, { httpServer }) {
// don't setup the proxy if httpServer is not present
//
// that can happen when the app is instanciated in another context like xo-server-recover-account
if (httpServer === undefined) {
return
}
this.#app = app
httpServer.getSecureContext = this.getSecureContext.bind(this)
}
async getSecureContext(hostname, configKey) {
const config = this.#app.config.get(`http.listen.${configKey}`)
if (!config || !config.cert || !config.key) {
throw new Error(`HTTPS configuration does no exists for key http.listen.${configKey}`)
}
if (this.#handlers[configKey] === undefined) {
throw new Error(`the SslCertificate handler for key http.listen.${configKey} does not exists.`)
}
return this.#handlers[configKey].getSecureContext(hostname, config)
}
async register() {
// http.listen can be an array or an object
const configs = this.#app.config.get('http.listen') || []
const configKeys = Object.keys(configs) || []
await Promise.all(
configKeys
.filter(configKey => configs[configKey].cert !== undefined && configs[configKey].key !== undefined)
.map(async configKey => {
this.#handlers[configKey] = new SslCertificate(this.#app, `http.listen.${configKey}`)
return this.#handlers[configKey].register(configs[configKey])
})
)
}
}

View File

@@ -16,18 +16,16 @@
"license": "AGPL-3.0-or-later",
"version": "0.5.0",
"engines": {
"node": ">=14"
"node": ">=12"
},
"dependencies": {
"@vates/event-listeners-manager": "^1.0.1",
"@vates/event-listeners-manager": "^1.0.0",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/emit-async": "^1.0.0",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/self-signed": "^0.1.3",
"app-conf": "^2.1.0",
"lodash": "^4.17.21",
"promise-toolbox": "^0.21.0",
"pw": "^0.0.4"
"promise-toolbox": "^0.21.0"
},
"scripts": {
"postversion": "npm publish --access public"

View File

@@ -9,7 +9,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.1.2",
"version": "0.1.1",
"engines": {
"node": ">=8.10"
},
@@ -30,7 +30,7 @@
"rimraf": "^3.0.0"
},
"dependencies": {
"@vates/read-chunk": "^1.0.0"
"@vates/read-chunk": "^0.1.2"
},
"author": {
"name": "Vates SAS",

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/proxy-cli",
"version": "0.3.1",
"version": "0.3.0",
"license": "AGPL-3.0-or-later",
"description": "CLI for @xen-orchestra/proxy",
"keywords": [
@@ -26,7 +26,7 @@
},
"dependencies": {
"@iarna/toml": "^2.2.0",
"@vates/read-chunk": "^1.0.0",
"@vates/read-chunk": "^0.1.2",
"ansi-colors": "^4.1.1",
"app-conf": "^2.1.0",
"content-type": "^1.0.4",

View File

@@ -3,11 +3,13 @@
import forOwn from 'lodash/forOwn.js'
import fse from 'fs-extra'
import getopts from 'getopts'
import pRetry from 'promise-toolbox/retry'
import { catchGlobalErrors } from '@xen-orchestra/log/configure.js'
import { create as createServer } from 'http-server-plus'
import { createCachedLookup } from '@vates/cached-dns.lookup'
import { createLogger } from '@xen-orchestra/log'
import { createSecureServer } from 'http2'
import { genSelfSignedCert } from '@xen-orchestra/self-signed'
import { load as loadConfig } from 'app-conf'
// -------------------------------------------------------------------
@@ -54,21 +56,41 @@ ${APP_NAME} v${APP_VERSION}
createSecureServer: opts => createSecureServer({ ...opts, allowHTTP1: true }),
})
forOwn(config.http.listen, async ({ autoCert, cert, key, ...opts }, listenKey) => {
forOwn(config.http.listen, async ({ autoCert, cert, key, ...opts }) => {
try {
if (cert !== undefined && key !== undefined) {
opts.SNICallback = async (serverName, callback) => {
// injected by @xen-orchestr/mixins/sslCertificate.mjs
try {
const secureContext = await httpServer.getSecureContext(serverName, listenKey)
callback(null, secureContext)
} catch (error) {
warn('An error occured during certificate context creation', { error, listenKey, serverName })
callback(error)
const niceAddress = await pRetry(
async () => {
if (cert !== undefined && key !== undefined) {
try {
opts.cert = fse.readFileSync(cert)
opts.key = fse.readFileSync(key)
} catch (error) {
if (!(autoCert && error.code === 'ENOENT')) {
throw error
}
const pems = await genSelfSignedCert()
fse.outputFileSync(cert, pems.cert, { flag: 'wx', mode: 0o400 })
fse.outputFileSync(key, pems.key, { flag: 'wx', mode: 0o400 })
info('new certificate generated', { cert, key })
opts.cert = pems.cert
opts.key = pems.key
}
}
return httpServer.listen(opts)
},
{
tries: 2,
when: e => autoCert && e.code === 'ERR_SSL_EE_KEY_TOO_SMALL',
onRetry: () => {
warn('deleting invalid certificate')
fse.unlinkSync(cert)
fse.unlinkSync(key)
},
}
}
const niceAddress = httpServer.listen(opts)
)
info(`Web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress !== undefined) {
@@ -116,8 +138,6 @@ ${APP_NAME} v${APP_VERSION}
const { default: fromCallback } = await import('promise-toolbox/fromCallback')
app.hooks.on('stop', () => fromCallback(cb => httpServer.stop(cb)))
await app.sslCertificate.register()
await app.hooks.start()
// Gracefully shutdown on signals.
@@ -126,7 +146,6 @@ ${APP_NAME} v${APP_VERSION}
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
// eslint-disable-next-line n/no-process-exit
process.exit(1)
}
alreadyCalled = true
@@ -144,7 +163,7 @@ main(process.argv.slice(2)).then(
},
error => {
fatal(error)
// eslint-disable-next-line n/no-process-exit
process.exit(1)
}
)

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "@xen-orchestra/proxy",
"version": "0.23.5",
"version": "0.23.2",
"license": "AGPL-3.0-or-later",
"description": "XO Proxy used to remotely execute backup jobs",
"keywords": [
@@ -26,18 +26,19 @@
},
"dependencies": {
"@iarna/toml": "^2.2.0",
"@koa/router": "^11.0.1",
"@koa/router": "^10.0.0",
"@vates/cached-dns.lookup": "^1.0.0",
"@vates/compose": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.27.0",
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/backups": "^0.25.0",
"@xen-orchestra/fs": "^1.0.3",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/mixin": "^0.1.0",
"@xen-orchestra/mixins": "^0.5.0",
"@xen-orchestra/xapi": "^1.4.0",
"@xen-orchestra/self-signed": "^0.1.3",
"@xen-orchestra/xapi": "^1.2.0",
"ajv": "^8.0.3",
"app-conf": "^2.1.0",
"async-iterator-to-stream": "^1.1.0",

View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -0,0 +1 @@
../../scripts/babel-eslintrc.js

View File

@@ -14,13 +14,31 @@
"name": "Vates SAS",
"url": "https://vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"browserslist": [
">2%"
],
"engines": {
"node": ">=6"
},
"dependencies": {
"lodash": "^4.17.15"
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish --access public"
},
"dependencies": {
"lodash": "^4.17.15"
}
}

View File

@@ -1,10 +1,8 @@
'use strict'
const escapeRegExp = require('lodash/escapeRegExp')
import escapeRegExp from 'lodash/escapeRegExp'
const compareLengthDesc = (a, b) => b.length - a.length
exports.compileTemplate = function compileTemplate(pattern, rules) {
export function compileTemplate(pattern, rules) {
const matches = Object.keys(rules).sort(compareLengthDesc).map(escapeRegExp).join('|')
const regExp = new RegExp(`\\\\(?:\\\\|${matches})|${matches}`, 'g')
return (...params) =>

View File

@@ -1,8 +1,5 @@
/* eslint-env jest */
'use strict'
const { compileTemplate } = require('.')
import { compileTemplate } from '.'
it("correctly replaces the template's variables", () => {
const replacer = compileTemplate('{property}_\\{property}_\\\\{property}_{constant}_%_FOO', {

View File

@@ -43,7 +43,7 @@
"pw": "^0.0.4",
"xdg-basedir": "^4.0.0",
"xo-lib": "^0.11.1",
"xo-vmdk-to-vhd": "^2.4.2"
"xo-vmdk-to-vhd": "^2.4.1"
},
"devDependencies": {
"@babel/cli": "^7.0.0",

View File

@@ -1,9 +0,0 @@
'use strict'
// TODO: remove when Node >=15.0
module.exports = class AggregateError extends Error {
constructor(errors, message) {
super(message)
this.errors = errors
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/xapi",
"version": "1.4.0",
"version": "1.2.0",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/xapi",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
@@ -26,10 +26,9 @@
"@xen-orchestra/log": "^0.3.0",
"d3-time-format": "^3.0.0",
"golike-defer": "^0.5.1",
"json-rpc-protocol": "^0.13.2",
"lodash": "^4.17.15",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^3.3.2",
"vhd-lib": "^3.2.0",
"xo-common": "^0.8.0"
},
"private": false,

View File

@@ -1,18 +1,12 @@
'use strict'
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { incorrectState } = require('xo-common/api-errors')
const { VDI_FORMAT_RAW } = require('./index.js')
const peekFooterFromStream = require('vhd-lib/peekFooterFromVhdStream')
const AggregateError = require('./_AggregateError.js')
const { warn } = require('@xen-orchestra/log').createLogger('xo:xapi:sr')
const OC_MAINTENANCE = 'xo:maintenanceState'
class Sr {
async create({
content_type = 'user', // recommended by Citrix
@@ -44,108 +38,6 @@ class Sr {
return ref
}
// Switch the SR to maintenance mode:
// - shutdown all running VMs with a VDI on this SR
// - their UUID is saved into SR.other_config[OC_MAINTENANCE].shutdownVms
// - clean shutdown is attempted, and falls back to a hard shutdown
// - unplug all connected hosts from this SR
async enableMaintenanceMode($defer, ref, { vmsToShutdown = [] } = {}) {
const state = { timestamp: Date.now() }
// will throw if already in maintenance mode
await this.call('SR.add_to_other_config', ref, OC_MAINTENANCE, JSON.stringify(state))
await $defer.onFailure.call(this, 'call', 'SR.remove_from_other_config', ref, OC_MAINTENANCE)
const runningVms = new Map()
const handleVbd = async ref => {
const vmRef = await this.getField('VBD', ref, 'VM')
if (!runningVms.has(vmRef)) {
const power_state = await this.getField('VM', vmRef, 'power_state')
const isPaused = power_state === 'Paused'
if (isPaused || power_state === 'Running') {
runningVms.set(vmRef, isPaused)
}
}
}
await asyncMap(await this.getField('SR', ref, 'VDIs'), async ref => {
await asyncMap(await this.getField('VDI', ref, 'VBDs'), handleVbd)
})
{
const runningVmUuids = await asyncMap(runningVms.keys(), ref => this.getField('VM', ref, 'uuid'))
const set = new Set(vmsToShutdown)
for (const vmUuid of runningVmUuids) {
if (!set.has(vmUuid)) {
throw incorrectState({
actual: vmsToShutdown,
expected: runningVmUuids,
property: 'vmsToShutdown',
})
}
}
}
state.shutdownVms = {}
await asyncMapSettled(runningVms, async ([ref, isPaused]) => {
state.shutdownVms[await this.getField('VM', ref, 'uuid')] = isPaused
try {
await this.callAsync('VM.clean_shutdown', ref)
} catch (error) {
warn('SR_enableMaintenanceMode, VM clean shutdown', { error })
await this.callAsync('VM.hard_shutdown', ref)
}
$defer.onFailure.call(this, 'callAsync', 'VM.start', ref, isPaused, true)
})
state.unpluggedPbds = []
await asyncMapSettled(await this.getField('SR', ref, 'PBDs'), async ref => {
if (await this.getField('PBD', ref, 'currently_attached')) {
state.unpluggedPbds.push(await this.getField('PBD', ref, 'uuid'))
await this.callAsync('PBD.unplug', ref)
$defer.onFailure.call(this, 'callAsync', 'PBD.plug', ref)
}
})
await this.setFieldEntry('SR', ref, 'other_config', OC_MAINTENANCE, JSON.stringify(state))
}
// this method is best effort and will not stop on first error
async disableMaintenanceMode(ref) {
const state = JSON.parse((await this.getField('SR', ref, 'other_config'))[OC_MAINTENANCE])
// will throw if not in maintenance mode
await this.call('SR.remove_from_other_config', ref, OC_MAINTENANCE)
const errors = []
await asyncMap(state.unpluggedPbds, async uuid => {
try {
await this.callAsync('PBD.plug', await this.call('PBD.get_by_uuid', uuid))
} catch (error) {
errors.push(error)
}
})
await asyncMap(Object.entries(state.shutdownVms), async ([uuid, isPaused]) => {
try {
await this.callAsync('VM.start', await this.call('VM.get_by_uuid', uuid), isPaused, true)
} catch (error) {
errors.push(error)
}
})
if (errors.length !== 0) {
throw new AggregateError(errors)
}
}
async importVdi(
$defer,
ref,
@@ -161,4 +53,4 @@ class Sr {
}
module.exports = Sr
decorateClass(Sr, { enableMaintenanceMode: defer, importVdi: defer })
decorateClass(Sr, { importVdi: defer })

View File

@@ -12,7 +12,6 @@ const { createLogger } = require('@xen-orchestra/log')
const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { incorrectState, forbiddenOperation } = require('xo-common/api-errors.js')
const { JsonRpcError } = require('json-rpc-protocol')
const { Ref } = require('xen-api')
const extractOpaqueRef = require('./_extractOpaqueRef.js')
@@ -510,22 +509,6 @@ class Vm {
}
return ref
} catch (error) {
if (
// xxhash is the new form consistency hashing in CH 8.1 which uses a faster,
// more efficient hashing algorithm to generate the consistency checks
// in order to support larger files without the consistency checking process taking an incredibly long time
error.code === 'IMPORT_ERROR' &&
error.params?.some(
param =>
param.includes('INTERNAL_ERROR') &&
param.includes('Expected to find an inline checksum') &&
param.includes('.xxhash')
)
) {
warn('import', { error })
throw new JsonRpcError('Importing this VM requires XCP-ng or Citrix Hypervisor >=8.1')
}
// augment the error with as much relevant info as possible
const [poolMaster, sr] = await Promise.all([
safeGetRecord(this, 'host', this.pool.master),

View File

@@ -1,77 +1,8 @@
# ChangeLog
## **5.72.1** (2022-07-11)
<img id="latest" src="https://badgen.net/badge/channel/latest/yellow" alt="Channel: latest" />
### Enhancements
- [SR] When SR is in maintenance, add "Maintenance mode" badge next to its name (PR [#6313](https://github.com/vatesfr/xen-orchestra/pull/6313))
### Bug fixes
- [Tasks] Fix tasks not displayed when running CR backup job [Forum#6038](https://xcp-ng.org/forum/topic/6038/not-seeing-tasks-any-more-as-admin) (PR [#6315](https://github.com/vatesfr/xen-orchestra/pull/6315))
- [Backup] Fix failing merge multiple VHDs at once (PR [#6317](https://github.com/vatesfr/xen-orchestra/pull/6317))
- [VM/Console] Fix _Connect with SSH/RDP_ when address is IPv6
- [Audit] Ignore side-effects free API methods `xoa.check`, `xoa.clearCheckCache` and `xoa.getHVSupportedVersions`
### Released packages
- @xen-orchestra/backups 0.27.0
- @xen-orchestra/backups-cli 0.7.5
- @xen-orchestra/proxy 0.23.5
- vhd-lib 3.3.2
- xo-server 5.98.1
- xo-server-audit 0.10.0
- xo-web 5.100.0
## **5.72.0** (2022-06-30)
### Highlights
- [Backup] Merge delta backups without copying data when using VHD directories on NFS/SMB/local remote(https://github.com/vatesfr/xen-orchestra/pull/6271))
- [Proxies] Ability to copy the proxy access URL (PR [#6287](https://github.com/vatesfr/xen-orchestra/pull/6287))
- [SR/Advanced] Ability to enable/disable _Maintenance Mode_ [#6215](https://github.com/vatesfr/xen-orchestra/issues/6215) (PRs [#6308](https://github.com/vatesfr/xen-orchestra/pull/6308), [#6297](https://github.com/vatesfr/xen-orchestra/pull/6297))
- [User] User tokens management through XO interface (PR [#6276](https://github.com/vatesfr/xen-orchestra/pull/6276))
- [Tasks, VM/General] Self Service users: show tasks related to their pools, hosts, SRs, networks and VMs (PR [#6217](https://github.com/vatesfr/xen-orchestra/pull/6217))
### Enhancements
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Backup/Restore] Clearer error message when importing a VM backup requires XCP-n/CH >= 8.1 (PR [#6304](https://github.com/vatesfr/xen-orchestra/pull/6304))
- [Backup] Users can use VHD directory on any remote type (PR [#6273](https://github.com/vatesfr/xen-orchestra/pull/6273))
### Bug fixes
> Users must be able to say: “I had this issue, happy to know it's fixed”
- [VDI Import] Fix `this._getOrWaitObject is not a function`
- [VM] Attempting to delete a protected VM should display a modal with the error and the ability to bypass it (PR [#6290](https://github.com/vatesfr/xen-orchestra/pull/6290))
- [OVA Import] Fix import stuck after first disk
- [File restore] Ignore symbolic links
### Released packages
- @vates/event-listeners-manager 1.0.1
- @vates/read-chunk 1.0.0
- @xen-orchestra/backups 0.26.0
- @xen-orchestra/backups-cli 0.7.4
- xo-remote-parser 0.9.1
- @xen-orchestra/fs 1.1.0
- @xen-orchestra/openflow 0.1.2
- @xen-orchestra/xapi 1.4.0
- @xen-orchestra/proxy 0.23.4
- @xen-orchestra/proxy-cli 0.3.1
- vhd-lib 3.3.1
- vhd-cli 0.8.0
- xo-vmdk-to-vhd 2.4.2
- xo-server 5.98.0
- xo-web 5.99.0
## **5.71.1 (2022-06-13)**
<img id="stable" src="https://badgen.net/badge/channel/stable/green" alt="Channel: stable" />
<img id="latest" src="https://badgen.net/badge/channel/latest/yellow" alt="Channel: latest" />
### Enhancements
@@ -183,6 +114,8 @@
## 5.70.0 (2022-04-29)
<img id="stable" src="https://badgen.net/badge/channel/stable/green" alt="Channel: stable" />
### Highlights
- [VM export] Feat export to `ova` format (PR [#6006](https://github.com/vatesfr/xen-orchestra/pull/6006))

View File

@@ -11,6 +11,9 @@
> Users must be able to say: “I had this issue, happy to know it's fixed”
- [VDI Import] Fix `this._getOrWaitObject is not a function`
- [VM] Attempting to delete a protected VM should display a modal with the error and the ability to bypass it (PR [#6290](https://github.com/vatesfr/xen-orchestra/pull/6290))
### Packages to release
> When modifying a package, add it here with its release type.
@@ -27,9 +30,9 @@
<!--packages-start-->
- @vates/async-each major
- @xen-orchestra/mixins minor
- @xen-orchestra/proxy patch
- @xen-orchestra/xo-server patch
- @vates/event-listeners-manager patch
- @vates/read-chunk major
- @xen-orchestra/xapi minor
- xo-server patch
<!--packages-end-->

View File

@@ -68,10 +68,9 @@ You shouldn't have to change this. It's the path where `xo-web` files are served
If you use certificates signed by an in-house CA for your XCP-ng or XenServer hosts, and want to have Xen Orchestra connect to them without rejection, you can use the [`NODE_EXTRA_CA_CERTS`](https://nodejs.org/api/cli.html#cli_node_extra_ca_certs_file) environment variable.
To enable this option in your XOA, create `/etc/systemd/system/xo-server.service.d/ca.conf` with the following content:
To enable this option in your XOA, edit the `/etc/systemd/system/xo-server.service` file and add this:
```
[Service]
Environment=NODE_EXTRA_CA_CERTS=/usr/local/share/ca-certificates/my-cert.crt
```
@@ -82,8 +81,6 @@ Don't forget to reload `systemd` conf and restart `xo-server`:
# systemctl restart xo-server.service
```
> For XO Proxy, the process is almost the same except the file to create is `/etc/systemd/system/xo-proxy.service.d/ca.conf` and the service to restart is `xo-proxy.service`.
## Redis server
By default, XO-server will try to contact Redis server on `localhost`, with the port `6379`. But you can define whatever you want:

View File

@@ -24,15 +24,16 @@ Please, do explain:
The best way to propose a change to the documentation or code is
to create a [GitHub pull request](https://help.github.com/articles/using-pull-requests/).
1. Fork the [Xen Orchestra repository](https://github.com/vatesfr/xen-orchestra) using the Fork button
2. Follow [the documentation](installation.md#from-the-sources) to install and run Xen Orchestra from the sources
3. Create a branch for your work
4. Edit the source files
5. Add a summary of your changes to `CHANGELOG.unreleased.md`, if your changes do not relate to an existing changelog item and update the list of packages that must be released to take your changes into account
6. [Create a pull request](https://github.com/vatesfr/xen-orchestra/compare) for this branch against the `master` branch
7. Push into the branch until the pull request is ready to merge
8. Avoid unnecessary merges: keep you branch up to date by regularly rebasing `git rebase origin/master`
9. When ready to merge, clean up the history (reorder commits, squash some of them together, rephrase messages): `git rebase -i origin/master`
:::tip
Your pull request should always be against the `master` branch and not against `stable` which is the stable branch!
:::
1. Create a branch for your work
2. Add a summary of your changes to `CHANGELOG.md` under the `next` section, if your changes do not relate to an existing changelog item
3. Create a pull request for this branch against the `master` branch
4. Push into the branch until the pull request is ready to merge
5. Avoid unnecessary merges: keep you branch up to date by regularly rebasing `git rebase origin/master`
6. When ready to merge, clean up the history (reorder commits, squash some of them together, rephrase messages): `git rebase -i origin/master`
### Issue triage

View File

@@ -3,7 +3,7 @@
"@babel/core": "^7.0.0",
"@babel/eslint-parser": "^7.13.8",
"@babel/register": "^7.0.0",
"babel-jest": "^28.1.2",
"babel-jest": "^27.3.1",
"benchmark": "^2.1.4",
"deptree": "^1.0.0",
"eslint": "^8.7.0",
@@ -19,7 +19,7 @@
"globby": "^13.1.1",
"handlebars": "^4.7.6",
"husky": "^4.2.5",
"jest": "^28.1.2",
"jest": "^27.3.1",
"lint-staged": "^12.0.3",
"lodash": "^4.17.4",
"prettier": "^2.0.5",

View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -0,0 +1 @@
../../scripts/babel-eslintrc.js

View File

@@ -1,14 +0,0 @@
'use strict'
const { parse } = require('./')
const { ast, pattern } = require('./index.fixtures')
module.exports = ({ benchmark }) => {
benchmark('parse', () => {
parse(pattern)
})
benchmark('toString', () => {
ast.toString()
})
}

View File

@@ -16,6 +16,7 @@
"url": "https://vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"browserslist": [
">2%"
],
@@ -25,7 +26,21 @@
"dependencies": {
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -0,0 +1,12 @@
import { parse } from './'
import { ast, pattern } from './index.fixtures'
export default ({ benchmark }) => {
benchmark('parse', () => {
parse(pattern)
})
benchmark('toString', () => {
ast.toString()
})
}

View File

@@ -1,10 +1,8 @@
'use strict'
import * as CM from './'
const CM = require('./')
export const pattern = 'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
exports.pattern = 'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
exports.ast = new CM.And([
export const ast = new CM.And([
new CM.String('foo'),
new CM.Not(new CM.String('\\ "')),
new CM.Property('name', new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])),

View File

@@ -1,6 +1,4 @@
'use strict'
const { escapeRegExp, isPlainObject, some } = require('lodash')
import { escapeRegExp, isPlainObject, some } from 'lodash'
// ===================================================================
@@ -25,7 +23,7 @@ class Node {
}
}
class Null extends Node {
export class Null extends Node {
match() {
return true
}
@@ -34,11 +32,10 @@ class Null extends Node {
return ''
}
}
exports.Null = Null
const formatTerms = terms => terms.map(term => term.toString(true)).join(' ')
class And extends Node {
export class And extends Node {
constructor(children) {
super()
@@ -57,9 +54,8 @@ class And extends Node {
return isNested ? `(${terms})` : terms
}
}
exports.And = And
class Comparison extends Node {
export class Comparison extends Node {
constructor(operator, value) {
super()
this._comparator = Comparison.comparators[operator]
@@ -75,7 +71,6 @@ class Comparison extends Node {
return this._operator + String(this._value)
}
}
exports.Comparison = Comparison
Comparison.comparators = {
'>': (a, b) => a > b,
'>=': (a, b) => a >= b,
@@ -83,7 +78,7 @@ Comparison.comparators = {
'<=': (a, b) => a <= b,
}
class Or extends Node {
export class Or extends Node {
constructor(children) {
super()
@@ -101,9 +96,8 @@ class Or extends Node {
return `|(${formatTerms(this.children)})`
}
}
exports.Or = Or
class Not extends Node {
export class Not extends Node {
constructor(child) {
super()
@@ -118,9 +112,8 @@ class Not extends Node {
return '!' + this.child.toString(true)
}
}
exports.Not = Not
exports.Number = exports.NumberNode = class NumberNode extends Node {
export class NumberNode extends Node {
constructor(value) {
super()
@@ -140,8 +133,9 @@ exports.Number = exports.NumberNode = class NumberNode extends Node {
return String(this.value)
}
}
export { NumberNode as Number }
class NumberOrStringNode extends Node {
export class NumberOrStringNode extends Node {
constructor(value) {
super()
@@ -166,9 +160,9 @@ class NumberOrStringNode extends Node {
return this.value
}
}
exports.NumberOrString = exports.NumberOrStringNode = NumberOrStringNode
export { NumberOrStringNode as NumberOrString }
class Property extends Node {
export class Property extends Node {
constructor(name, child) {
super()
@@ -184,13 +178,12 @@ class Property extends Node {
return `${formatString(this.name)}:${this.child.toString(true)}`
}
}
exports.Property = Property
const escapeChar = char => '\\' + char
const formatString = value =>
Number.isNaN(+value) ? (isRawString(value) ? value : `"${value.replace(/\\|"/g, escapeChar)}"`) : `"${value}"`
class GlobPattern extends Node {
export class GlobPattern extends Node {
constructor(value) {
// fallback to string node if no wildcard
if (value.indexOf('*') === -1) {
@@ -223,9 +216,8 @@ class GlobPattern extends Node {
return this.value
}
}
exports.GlobPattern = GlobPattern
class RegExpNode extends Node {
export class RegExpNode extends Node {
constructor(pattern, flags) {
super()
@@ -253,9 +245,9 @@ class RegExpNode extends Node {
return this.re.toString()
}
}
exports.RegExp = RegExpNode
export { RegExpNode as RegExp }
class StringNode extends Node {
export class StringNode extends Node {
constructor(value) {
super()
@@ -283,9 +275,9 @@ class StringNode extends Node {
return formatString(this.value)
}
}
exports.String = exports.StringNode = StringNode
export { StringNode as String }
class TruthyProperty extends Node {
export class TruthyProperty extends Node {
constructor(name) {
super()
@@ -300,7 +292,6 @@ class TruthyProperty extends Node {
return formatString(this.name) + '?'
}
}
exports.TruthyProperty = TruthyProperty
// -------------------------------------------------------------------
@@ -540,7 +531,7 @@ const parser = P.grammar({
),
ws: P.regex(/\s*/),
}).default
exports.parse = parser.parse.bind(parser)
export const parse = parser.parse.bind(parser)
// -------------------------------------------------------------------
@@ -582,7 +573,7 @@ const _getPropertyClauseStrings = ({ child }) => {
}
// Find possible values for property clauses in a and clause.
exports.getPropertyClausesStrings = function getPropertyClausesStrings(node) {
export const getPropertyClausesStrings = node => {
if (!node) {
return {}
}
@@ -614,7 +605,7 @@ exports.getPropertyClausesStrings = function getPropertyClausesStrings(node) {
// -------------------------------------------------------------------
exports.setPropertyClause = function setPropertyClause(node, name, child) {
export const setPropertyClause = (node, name, child) => {
const property = child && new Property(name, typeof child === 'string' ? new StringNode(child) : child)
if (node === undefined) {

View File

@@ -1,9 +1,7 @@
/* eslint-env jest */
'use strict'
const { ast, pattern } = require('./index.fixtures')
const {
import { ast, pattern } from './index.fixtures'
import {
getPropertyClausesStrings,
GlobPattern,
Null,
@@ -13,7 +11,7 @@ const {
Property,
setPropertyClause,
StringNode,
} = require('./')
} from './'
it('getPropertyClausesStrings', () => {
const tmp = getPropertyClausesStrings(parse('foo bar:baz baz:|(foo bar /^boo$/ /^far$/) foo:/^bar$/'))

View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -0,0 +1 @@
../../scripts/babel-eslintrc.js

View File

@@ -16,13 +16,27 @@
"url": "https://vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"browserslist": [
">2%"
],
"engines": {
"node": ">=6"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
const match = (pattern, value) => {
if (Array.isArray(pattern)) {
return (
@@ -45,6 +43,4 @@ const match = (pattern, value) => {
return pattern === value
}
exports.createPredicate = function createPredicate(pattern) {
return value => match(pattern, value)
}
export const createPredicate = pattern => value => match(pattern, value)

View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -0,0 +1 @@
../../scripts/babel-eslintrc.js

View File

@@ -1,43 +0,0 @@
//
// This file has been generated by [index-modules](https://npmjs.com/index-modules)
//
var d = Object.defineProperty
function de(o, n, v) {
d(o, n, { enumerable: true, value: v })
return v
}
function dl(o, n, g, a) {
d(o, n, {
configurable: true,
enumerable: true,
get: function () {
return de(o, n, g(a))
},
})
}
function r(p) {
var v = require(p)
return v && v.__esModule
? v
: typeof v === 'object' || typeof v === 'function'
? Object.create(v, { default: { enumerable: true, value: v } })
: { default: v }
}
function e(p, i) {
dl(defaults, i, function () {
return exports[i].default
})
dl(exports, i, r, p)
}
d(exports, '__esModule', { value: true })
var defaults = de(exports, 'default', {})
e('./check.js', 'check')
e('./compare.js', 'compare')
e('./copy.js', 'copy')
e('./info.js', 'info')
e('./merge.js', 'merge')
e('./raw.js', 'raw')
e('./repl.js', 'repl')
e('./synthetize.js', 'synthetize')

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "vhd-cli",
"version": "0.8.0",
"version": "0.7.2",
"license": "ISC",
"description": "Tools to read/create and merge VHD files",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/vhd-cli",
@@ -16,24 +16,40 @@
"url": "https://vates.fr"
},
"preferGlobal": true,
"main": "dist/",
"bin": {
"vhd-cli": "./index.js"
"vhd-cli": "dist/index.js"
},
"engines": {
"node": ">=10"
"node": ">=8.10"
},
"dependencies": {
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/fs": "^1.0.3",
"cli-progress": "^3.1.0",
"exec-promise": "^0.7.0",
"getopts": "^2.2.3",
"human-format": "^1.0.0",
"lodash": "^4.17.21",
"promise-toolbox": "^0.21.0",
"uuid": "^8.3.2",
"vhd-lib": "^3.3.2"
"vhd-lib": "^3.2.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^7.0.2",
"execa": "^5.0.0",
"index-modules": "^0.4.3",
"promise-toolbox": "^0.21.0",
"rimraf": "^3.0.0",
"tmp": "^0.2.1"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "rimraf dist/ && index-modules --cjs-lazy src/commands",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
const { createWriteStream } = require('fs')
const { PassThrough } = require('stream')
@@ -14,7 +12,7 @@ const createOutputStream = path => {
return stream
}
exports.writeStream = function writeStream(input, path) {
export const writeStream = (input, path) => {
const output = createOutputStream(path)
return new Promise((resolve, reject) =>

View File

@@ -1,13 +1,11 @@
'use strict'
const { VhdFile, checkVhdChain } = require('vhd-lib')
const getopts = require('getopts')
const { getHandler } = require('@xen-orchestra/fs')
const { resolve } = require('path')
import { VhdFile, checkVhdChain } from 'vhd-lib'
import getopts from 'getopts'
import { getHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
const checkVhd = (handler, path) => new VhdFile(handler, path).readHeaderAndFooter()
module.exports = async function check(rawArgs) {
export default async rawArgs => {
const { chain, _: args } = getopts(rawArgs, {
boolean: ['chain'],
default: {

View File

@@ -1,11 +1,9 @@
'use strict'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { openVhd, Constants } from 'vhd-lib'
import Disposable from 'promise-toolbox/Disposable'
import omit from 'lodash/omit'
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { openVhd, Constants } = require('vhd-lib')
const Disposable = require('promise-toolbox/Disposable')
const omit = require('lodash/omit')
function deepCompareObjects(src, dest, path) {
const deepCompareObjects = function (src, dest, path) {
for (const key of Object.keys(src)) {
const srcValue = src[key]
const destValue = dest[key]
@@ -31,7 +29,7 @@ function deepCompareObjects(src, dest, path) {
}
}
module.exports = async function compare(args) {
export default async args => {
if (args.length < 4 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: compare <sourceRemoteUrl> <source VHD> <destionationRemoteUrl> <destination> `
}

View File

@@ -1,11 +1,9 @@
'use strict'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { openVhd, VhdFile, VhdDirectory } from 'vhd-lib'
import Disposable from 'promise-toolbox/Disposable'
import getopts from 'getopts'
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { openVhd, VhdFile, VhdDirectory } = require('vhd-lib')
const Disposable = require('promise-toolbox/Disposable')
const getopts = require('getopts')
module.exports = async function copy(rawArgs) {
export default async rawArgs => {
const {
directory,
help,

View File

@@ -1,13 +1,9 @@
'use strict'
const { Constants, VhdFile } = require('vhd-lib')
const { getHandler } = require('@xen-orchestra/fs')
const { openVhd } = require('vhd-lib/openVhd')
const { resolve } = require('path')
const Disposable = require('promise-toolbox/Disposable')
const humanFormat = require('human-format')
const invert = require('lodash/invert.js')
const UUID = require('uuid')
import { Constants, VhdFile } from 'vhd-lib'
import { getHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
import * as UUID from 'uuid'
import humanFormat from 'human-format'
import invert from 'lodash/invert.js'
const { PLATFORMS } = Constants
@@ -36,8 +32,8 @@ function mapProperties(object, mapping) {
return result
}
async function showDetails(handler, path) {
const vhd = new VhdFile(handler, resolve(path))
export default async args => {
const vhd = new VhdFile(getHandler({ url: 'file:///' }), resolve(args[0]))
try {
await vhd.readHeaderAndFooter()
@@ -71,29 +67,3 @@ async function showDetails(handler, path) {
})
)
}
async function showList(handler, paths) {
let previousUuid
for (const path of paths) {
await Disposable.use(openVhd(handler, resolve(path)), async vhd => {
const uuid = MAPPERS.uuid(vhd.footer.uuid)
const fields = [path, MAPPERS.bytes(vhd.footer.currentSize), uuid, MAPPERS.diskType(vhd.footer.diskType)]
if (vhd.footer.diskType === Constants.DISK_TYPES.DIFFERENCING) {
const parentUuid = MAPPERS.uuid(vhd.header.parentUuid)
fields.push(parentUuid === previousUuid ? '<above VHD>' : parentUuid)
}
previousUuid = uuid
console.log(fields.join(' | '))
})
}
}
module.exports = async function info(args) {
const handler = getHandler({ url: 'file:///' })
if (args.length === 1) {
return showDetails(handler, args[0])
}
return showList(handler, args)
}

View File

@@ -1,11 +1,9 @@
'use strict'
import { Bar } from 'cli-progress'
import { mergeVhd } from 'vhd-lib'
import { getHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
const { Bar } = require('cli-progress')
const { mergeVhd } = require('vhd-lib')
const { getHandler } = require('@xen-orchestra/fs')
const { resolve } = require('path')
module.exports = async function merge(args) {
export default async function main(args) {
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: ${this.command} <child VHD> <parent VHD>`
}

View File

@@ -1,13 +1,11 @@
'use strict'
import { openVhd } from 'vhd-lib'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
const { openVhd } = require('vhd-lib')
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { resolve } = require('path')
import { writeStream } from '../_utils'
import { Disposable } from 'promise-toolbox'
const { writeStream } = require('../_utils')
const { Disposable } = require('promise-toolbox')
module.exports = async function raw(args) {
export default async args => {
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: ${this.command} <input VHD> [<output raw>]`
}

View File

@@ -1,12 +1,10 @@
'use strict'
import { asCallback, fromCallback, fromEvent } from 'promise-toolbox'
import { getHandler } from '@xen-orchestra/fs'
import { relative } from 'path'
import { start as createRepl } from 'repl'
import * as vhdLib from 'vhd-lib'
const { asCallback, fromCallback, fromEvent } = require('promise-toolbox')
const { getHandler } = require('@xen-orchestra/fs')
const { relative } = require('path')
const { start: createRepl } = require('repl')
const vhdLib = require('vhd-lib')
module.exports = async function repl(args) {
export default async args => {
const cwd = process.cwd()
const handler = getHandler({ url: 'file://' + cwd })
await handler.sync()

View File

@@ -1,11 +1,9 @@
'use strict'
import path from 'path'
import { createSyntheticStream } from 'vhd-lib'
import { createWriteStream } from 'fs'
import { getHandler } from '@xen-orchestra/fs'
const path = require('path')
const { createSyntheticStream } = require('vhd-lib')
const { createWriteStream } = require('fs')
const { getHandler } = require('@xen-orchestra/fs')
module.exports = async function synthetize(args) {
export default async function main(args) {
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: ${this.command} <input VHD> <output VHD>`
}

View File

@@ -1,11 +1,10 @@
#!/usr/bin/env node
'use strict'
import execPromise from 'exec-promise'
const execPromise = require('exec-promise')
import pkg from '../package.json'
const pkg = require('./package.json')
const commands = require('./commands').default
import commands from './commands'
function runCommand(commands, [command, ...args]) {
if (command === undefined || command === '-h' || command === '--help') {

View File

@@ -86,19 +86,10 @@ exports.VhdAbstract = class VhdAbstract {
}
/**
* @typedef {Object} BitmapBlock
* @property {number} id
* @property {Buffer} bitmap
*
* @typedef {Object} FullBlock
* @property {number} id
* @property {Buffer} bitmap
* @property {Buffer} data
* @property {Buffer} buffer - bitmap + data
*
* @param {number} blockId
* @param {boolean} onlyBitmap
* @returns {Promise<BitmapBlock | FullBlock>}
* @returns {Buffer}
*/
readBlock(blockId, onlyBitmap = false) {
throw new Error(`reading ${onlyBitmap ? 'bitmap of block' : 'block'} ${blockId} is not implemented`)
@@ -113,7 +104,7 @@ exports.VhdAbstract = class VhdAbstract {
*
* @returns {number} the merged data size
*/
async mergeBlock(child, blockId) {
async coalesceBlock(child, blockId) {
const block = await child.readBlock(blockId)
await this.writeEntireBlock(block)
return block.data.length
@@ -343,21 +334,4 @@ exports.VhdAbstract = class VhdAbstract {
stream.length = footer.currentSize
return stream
}
async containsAllDataOf(child) {
await this.readBlockAllocationTable()
await child.readBlockAllocationTable()
for await (const block of child.blocks()) {
const { id, data: childData } = block
// block is in child not in parent
if (!this.containsBlock(id)) {
return false
}
const { data: parentData } = await this.readBlock(id)
if (!childData.equals(parentData)) {
return false
}
}
return true
}
}

View File

@@ -53,25 +53,19 @@ test('Can coalesce block', async () => {
const childDirectoryVhd = yield openVhd(handler, childDirectoryName)
await childDirectoryVhd.readBlockAllocationTable()
let childBlockData = (await childDirectoryVhd.readBlock(0)).data
await parentVhd.mergeBlock(childDirectoryVhd, 0)
await parentVhd.coalesceBlock(childFileVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
let parentBlockData = (await parentVhd.readBlock(0)).data
// block should be present in parent
let childBlockData = (await childFileVhd.readBlock(0)).data
expect(parentBlockData.equals(childBlockData)).toEqual(true)
// block should not be in child since it's a rename for vhd directory
await expect(childDirectoryVhd.readBlock(0)).rejects.toThrowError()
childBlockData = (await childFileVhd.readBlock(1)).data
await parentVhd.mergeBlock(childFileVhd, 1)
await parentVhd.coalesceBlock(childDirectoryVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
parentBlockData = (await parentVhd.readBlock(1)).data
// block should be present in parent in case of mixed vhdfile/vhddirectory
expect(parentBlockData.equals(childBlockData)).toEqual(true)
// block should still be child
await childFileVhd.readBlock(1)
parentBlockData = (await parentVhd.readBlock(0)).data
childBlockData = (await childDirectoryVhd.readBlock(0)).data
expect(parentBlockData).toEqual(childBlockData)
})
})

View File

@@ -5,58 +5,11 @@ const { createLogger } = require('@xen-orchestra/log')
const { fuFooter, fuHeader, checksumStruct } = require('../_structs')
const { test, set: setBitmap } = require('../_bitmap')
const { VhdAbstract } = require('./VhdAbstract')
const { _getCompressor: getCompressor } = require('./_compressors')
const { _getEncryptor: getEncryptor } = require('./_encryptors')
const assert = require('assert')
const promisify = require('promise-toolbox/promisify')
const zlib = require('zlib')
const { debug } = createLogger('vhd-lib:VhdDirectory')
const NULL_COMPRESSOR = {
compress: buffer => buffer,
decompress: buffer => buffer,
baseOptions: {},
}
const COMPRESSORS = {
gzip: {
compress: (
gzip => buffer =>
gzip(buffer, { level: zlib.constants.Z_BEST_SPEED })
)(promisify(zlib.gzip)),
decompress: promisify(zlib.gunzip),
},
brotli: {
compress: (
brotliCompress => buffer =>
brotliCompress(buffer, {
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MIN_QUALITY,
},
})
)(promisify(zlib.brotliCompress)),
decompress: promisify(zlib.brotliDecompress),
},
}
// inject identifiers
for (const id of Object.keys(COMPRESSORS)) {
COMPRESSORS[id].id = id
}
function getCompressor(compressorType) {
if (compressorType === undefined) {
return NULL_COMPRESSOR
}
const compressor = COMPRESSORS[compressorType]
if (compressor === undefined) {
throw new Error(`Compression type ${compressorType} is not supported`)
}
return compressor
}
// ===================================================================
// Directory format
// <path>
@@ -77,10 +30,15 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
#header
footer
#compressor
#encryptor
#encryption
get compressionType() {
return this.#compressor.id
}
get encryption() {
return this.#encryption
}
set header(header) {
this.#header = header
@@ -116,9 +74,9 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
}
static async create(handler, path, { flags = 'wx+', compression } = {}) {
await handler.mktree(path)
const vhd = new VhdDirectory(handler, path, { flags, compression })
static async create(handler, path, { flags = 'wx+', compression, encryption } = {}) {
await handler.mkdir(path)
const vhd = new VhdDirectory(handler, path, { flags, compression, encryption })
return {
dispose: () => {},
value: vhd,
@@ -131,6 +89,8 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
this._path = path
this._opts = opts
this.#compressor = getCompressor(opts?.compression)
this.#encryption = opts?.encryption
this.#encryptor = getEncryptor(opts?.encryption)
}
async readBlockAllocationTable() {
@@ -142,15 +102,16 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
return test(this.#blockTable, blockId)
}
#getChunkPath(partName) {
_getChunkPath(partName) {
return this._path + '/' + partName
}
async _readChunk(partName) {
// here we can implement compression and / or crypto
const buffer = await this._handler.readFile(this.#getChunkPath(partName))
const buffer = await this._handler.readFile(this._getChunkPath(partName))
const uncompressed = await this.#compressor.decompress(buffer)
const decrypted = await this.#encryptor.decrypt(buffer)
const uncompressed = await this.#compressor.decompress(decrypted)
return {
buffer: uncompressed,
}
@@ -163,23 +124,18 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
`Can't write a chunk ${partName} in ${this._path} with read permission`
)
// in case of VhdDirectory, we want to create the file if it does not exists
const flags = this._opts?.flags === 'r+' ? 'w' : this._opts?.flags
const compressed = await this.#compressor.compress(buffer)
return this._handler.outputFile(this.#getChunkPath(partName), compressed, { flags })
const encrypted = await this.#encryptor.encrypt(compressed)
return this._handler.outputFile(this._getChunkPath(partName), encrypted, this._opts)
}
// put block in subdirectories to limit impact when doing directory listing
#getBlockPath(blockId) {
_getBlockPath(blockId) {
const blockPrefix = Math.floor(blockId / 1e3)
const blockSuffix = blockId - blockPrefix * 1e3
return `blocks/${blockPrefix}/${blockSuffix}`
}
_getFullBlockPath(blockId) {
return this.#getChunkPath(this.#getBlockPath(blockId))
}
async readHeaderAndFooter() {
await this.#readChunkFilters()
@@ -206,7 +162,7 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
if (onlyBitmap) {
throw new Error(`reading 'bitmap of block' ${blockId} in a VhdDirectory is not implemented`)
}
const { buffer } = await this._readChunk(this.#getBlockPath(blockId))
const { buffer } = await this._readChunk(this._getBlockPath(blockId))
return {
id: blockId,
bitmap: buffer.slice(0, this.bitmapSize),
@@ -246,39 +202,26 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
// only works if data are in the same handler
// and if the full block is modified in child ( which is the case with xcp)
// and if the full block is modified in child ( which is the case whit xcp)
// and if the compression type is same on both sides
async mergeBlock(child, blockId, isResumingMerge = false) {
const childBlockPath = child._getFullBlockPath?.(blockId)
async coalesceBlock(child, blockId) {
if (
childBlockPath !== undefined ||
!(child instanceof VhdDirectory) ||
this._handler !== child._handler ||
child.compressionType !== this.compressionType ||
child.compressionType === 'MIXED'
child.encryption !== this.encryption
) {
return super.mergeBlock(child, blockId)
return super.coalesceBlock(child, blockId)
}
try {
await this._handler.rename(childBlockPath, this._getFullBlockPath(blockId))
} catch (error) {
if (error.code === 'ENOENT' && isResumingMerge === true) {
// when resuming, the blocks moved since the last merge state write are
// not in the child anymore but it should be ok
// it will throw an error if block is missing in parent
// won't detect if the block was already in parent and is broken/missing in child
const { data } = await this.readBlock(blockId)
assert.strictEqual(data.length, this.header.blockSize)
} else {
throw error
}
}
setBitmap(this.#blockTable, blockId)
await this._handler.copy(
child._getChunkPath(child._getBlockPath(blockId)),
this._getChunkPath(this._getBlockPath(blockId))
)
return sectorsToBytes(this.sectorsPerBlock)
}
async writeEntireBlock(block) {
await this._writeChunk(this.#getBlockPath(block.id), block.buffer)
await this._writeChunk(this._getBlockPath(block.id), block.buffer)
setBitmap(this.#blockTable, block.id)
}
@@ -293,11 +236,12 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
async #writeChunkFilters() {
const compressionType = this.compressionType
const encryption = this.encryption
const path = this._path + '/chunk-filters.json'
if (compressionType === undefined) {
if (compressionType === undefined && encryption === undefined) {
await this._handler.unlink(path)
} else {
await this._handler.writeFile(path, JSON.stringify([compressionType]), { flags: 'w' })
await this._handler.writeFile(path, JSON.stringify([compressionType, encryption]), { flags: 'w' })
}
}
@@ -308,6 +252,9 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
throw error
})
assert(chunkFilters.length, 2)
this.#compressor = getCompressor(chunkFilters[0])
this.#encryption = chunkFilters[1]
this.#encryptor = getEncryptor(chunkFilters[1])
}
}

View File

@@ -222,14 +222,14 @@ test('Can coalesce block', async () => {
const childDirectoryVhd = yield openVhd(handler, childDirectoryName)
await childDirectoryVhd.readBlockAllocationTable()
await parentVhd.mergeBlock(childFileVhd, 0)
await parentVhd.coalesceBlock(childFileVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
let parentBlockData = (await parentVhd.readBlock(0)).data
let childBlockData = (await childFileVhd.readBlock(0)).data
expect(parentBlockData).toEqual(childBlockData)
await parentVhd.mergeBlock(childDirectoryVhd, 0)
await parentVhd.coalesceBlock(childDirectoryVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
parentBlockData = (await parentVhd.readBlock(0)).data

View File

@@ -55,7 +55,7 @@ test('It can read block and parent locator from a synthetic vhd', async () => {
await bigVhd.readHeaderAndFooter()
const syntheticVhd = yield VhdSynthetic.open(handler, [bigVhdFileName, smallVhdFileName])
const syntheticVhd = yield VhdSynthetic.open(handler, [smallVhdFileName, bigVhdFileName])
await syntheticVhd.readBlockAllocationTable()
expect(syntheticVhd.header.diskType).toEqual(bigVhd.header.diskType)

View File

@@ -15,13 +15,14 @@ const VhdSynthetic = class VhdSynthetic extends VhdAbstract {
#vhds = []
get header() {
// this the most recent vhd
const vhd = this.#vhds[this.#vhds.length - 1]
// this the VHD we want to synthetize
const vhd = this.#vhds[0]
// this is the root VHD
const rootVhd = this.#vhds[0]
const rootVhd = this.#vhds[this.#vhds.length - 1]
// data of our synthetic VHD
// TODO: set parentLocatorEntry-s in header
return {
...vhd.header,
parentLocatorEntry: cloneDeep(rootVhd.header.parentLocatorEntry),
@@ -33,28 +34,15 @@ const VhdSynthetic = class VhdSynthetic extends VhdAbstract {
}
get footer() {
// this the most recent vhd
const vhd = this.#vhds[this.#vhds.length - 1]
// this is the oldest VHD
const rootVhd = this.#vhds[0]
// this is the root VHD
const rootVhd = this.#vhds[this.#vhds.length - 1]
return {
...vhd.footer,
...this.#vhds[0].footer,
dataOffset: FOOTER_SIZE,
diskType: rootVhd.footer.diskType,
}
}
get compressionType() {
const compressionType = this.vhds[0].compressionType
for (let i = 0; i < this.vhds.length; i++) {
if (compressionType !== this.vhds[i].compressionType) {
return 'MIXED'
}
}
return compressionType
}
/**
* @param {Array<VhdAbstract>} vhds the chain of Vhds used to compute this Vhd, from the deepest child (in position 0), to the root (in the last position)
* only the last one can have any type. Other must have type DISK_TYPES.DIFFERENCING (delta)
@@ -79,44 +67,24 @@ const VhdSynthetic = class VhdSynthetic extends VhdAbstract {
await asyncMap(vhds, vhd => vhd.readHeaderAndFooter())
for (let i = 0, n = vhds.length - 1; i < n; ++i) {
const parent = vhds[i]
const child = vhds[i + 1]
const child = vhds[i]
const parent = vhds[i + 1]
assert.strictEqual(child.footer.diskType, DISK_TYPES.DIFFERENCING)
assert.strictEqual(UUID.stringify(child.header.parentUuid), UUID.stringify(parent.footer.uuid))
}
}
#getVhdWithBlock(blockId) {
for (let i = this.#vhds.length - 1; i >= 0; i--) {
const vhd = this.#vhds[i]
if (vhd.containsBlock(blockId)) {
return vhd
}
}
assert(false, `no such block ${blockId}`)
}
async readBlock(blockId, onlyBitmap = false) {
// only read the content of the first vhd containing this block
return await this.#getVhdWithBlock(blockId).readBlock(blockId, onlyBitmap)
}
const index = this.#vhds.findIndex(vhd => vhd.containsBlock(blockId))
assert(index !== -1, `no such block ${blockId}`)
async mergeBlock(child, blockId) {
throw new Error(`can't coalesce block into a vhd synthetic`)
// only read the content of the first vhd containing this block
return await this.#vhds[index].readBlock(blockId, onlyBitmap)
}
_readParentLocatorData(id) {
return this.#vhds[this.#vhds.length - 1]._readParentLocatorData(id)
}
_getFullBlockPath(blockId) {
const vhd = this.#getVhdWithBlock(blockId)
return vhd?._getFullBlockPath(blockId)
}
// return true if all the vhds ar an instance of cls
checkVhdsClass(cls) {
return this.#vhds.every(vhd => vhd instanceof cls)
}
}
// add decorated static method
@@ -126,7 +94,7 @@ VhdSynthetic.fromVhdChain = Disposable.factory(async function* fromVhdChain(hand
const vhds = []
do {
vhd = yield openVhd(handler, vhdPath)
vhds.unshift(vhd) // from oldest to most recent
vhds.push(vhd)
vhdPath = resolveRelativeFromFile(vhdPath, vhd.header.parentUnicodeName)
} while (vhd.footer.diskType !== DISK_TYPES.DYNAMIC)

View File

@@ -0,0 +1,51 @@
'use strict'
const zlib = require('zlib')
const promisify = require('promise-toolbox/promisify')
const NULL_COMPRESSOR = {
compress: buffer => buffer,
decompress: buffer => buffer,
}
const COMPRESSORS = {
gzip: {
compress: (
gzip => buffer =>
gzip(buffer, { level: zlib.constants.Z_BEST_SPEED })
)(promisify(zlib.gzip)),
decompress: promisify(zlib.gunzip),
},
brotli: {
compress: (
brotliCompress => buffer =>
brotliCompress(buffer, {
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MIN_QUALITY,
},
})
)(promisify(zlib.brotliCompress)),
decompress: promisify(zlib.brotliDecompress),
},
}
// inject identifiers
for (const id of Object.keys(COMPRESSORS)) {
COMPRESSORS[id].id = id
}
function getCompressor(compressorType) {
if (compressorType === undefined) {
return NULL_COMPRESSOR
}
const compressor = COMPRESSORS[compressorType]
if (compressor === undefined) {
throw new Error(`Compression type ${compressorType} is not supported`)
}
return compressor
}
exports._getCompressor = getCompressor

View File

@@ -0,0 +1,22 @@
'use strict'
const crypto = require('crypto')
const { _getEncryptor: getEncryptor } = require('./_encryptors')
/* eslint-env jest */
test('can encrypt and decryp AES 256', async () => {
const { encrypt, decrypt } = getEncryptor(
JSON.stringify({
algorithm: 'aes-256-cbc',
key: crypto.randomBytes(32),
ivLength: 16,
})
)
const buffer = crypto.randomBytes(1024)
const encrypted = encrypt(buffer)
const decrypted = decrypt(encrypted)
expect(buffer.equals(decrypted)).toEqual(true)
})

View File

@@ -0,0 +1,44 @@
'use strict'
const crypto = require('crypto')
const secretStore = require('./_secretStore.js')
function getEncryptor(id = '{}') {
const { algorithm, key, ivLength } = secretStore.get(id)
if (algorithm === undefined) {
return {
id: 'NULL_COMPRESSOR',
algorithm,
key,
ivLength,
encrypt: buffer => buffer,
decrypt: buffer => buffer,
}
}
function encrypt(buffer) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = cipher.update(buffer)
return Buffer.concat([iv, encrypted, cipher.final()])
}
function decrypt(buffer) {
const iv = buffer.slice(0, ivLength)
const encrypted = buffer.slice(ivLength)
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
const decrypted = decipher.update(encrypted)
return Buffer.concat([decrypted, decipher.final()])
}
return {
id,
algorithm,
key,
ivLength,
encrypt,
decrypt,
}
}
exports._getEncryptor = getEncryptor

View File

@@ -0,0 +1,4 @@
'use strict'
// @todo : should be moved to his own module
module.exports.get = id => JSON.parse(id || '') || {}

View File

@@ -8,7 +8,7 @@ const { Disposable } = require('promise-toolbox')
module.exports = async function chain(parentHandler, parentPath, childHandler, childPath, force = false) {
await Disposable.use(
[openVhd(parentHandler, parentPath), openVhd(childHandler, childPath, { flags: 'r+' })],
[openVhd(parentHandler, parentPath), openVhd(childHandler, childPath)],
async ([parentVhd, childVhd]) => {
await childVhd.readHeaderAndFooter()
const { header, footer } = childVhd

View File

@@ -1,15 +1,17 @@
'use strict'
const { createLogger } = require('@xen-orchestra/log')
const { parseVhdStream } = require('./parseVhdStream.js')
const { VhdDirectory } = require('./Vhd/VhdDirectory.js')
const { Disposable } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const { warn } = createLogger('vhd-lib:createVhdDirectoryFromStream')
const buildVhd = Disposable.wrap(async function* (handler, path, inputStream, { concurrency, compression }) {
const vhd = yield VhdDirectory.create(handler, path, { compression })
const buildVhd = Disposable.wrap(async function* (
handler,
path,
inputStream,
{ concurrency, compression, encryption }
) {
const vhd = yield VhdDirectory.create(handler, path, { compression, encryption })
await asyncEach(
parseVhdStream(inputStream),
async function (item) {
@@ -44,16 +46,16 @@ exports.createVhdDirectoryFromStream = async function createVhdDirectoryFromStre
handler,
path,
inputStream,
{ validator, concurrency = 16, compression } = {}
{ validator, concurrency = 16, compression, encryption } = {}
) {
try {
await buildVhd(handler, path, inputStream, { concurrency, compression })
await buildVhd(handler, path, inputStream, { concurrency, compression, encryption })
if (validator !== undefined) {
await validator.call(this, path)
}
} catch (error) {
// cleanup on error
await handler.rmtree(path).catch(warn)
await handler.rmtree(path)
throw error
}
}

View File

@@ -6,8 +6,7 @@ exports.checkVhdChain = require('./checkChain')
exports.createReadableSparseStream = require('./createReadableSparseStream')
exports.createVhdStreamWithLength = require('./createVhdStreamWithLength')
exports.createVhdDirectoryFromStream = require('./createVhdDirectoryFromStream').createVhdDirectoryFromStream
const { mergeVhd } = require('./merge')
exports.mergeVhd = mergeVhd
exports.mergeVhd = require('./merge')
exports.peekFooterFromVhdStream = require('./peekFooterFromVhdStream')
exports.openVhd = require('./openVhd').openVhd
exports.VhdAbstract = require('./Vhd/VhdAbstract').VhdAbstract

View File

@@ -9,7 +9,6 @@ const { getHandler } = require('@xen-orchestra/fs')
const { pFromCallback } = require('promise-toolbox')
const { VhdFile, chainVhd, mergeVhd } = require('./index')
const { _cleanupVhds: cleanupVhds } = require('./merge')
const { checkFile, createRandomFile, convertFromRawToVhd } = require('./tests/utils')
@@ -39,15 +38,14 @@ test('merge works in normal cases', async () => {
await createRandomFile(`${tempDir}/${childRandomFileName}`, mbOfChildren)
await convertFromRawToVhd(`${tempDir}/${childRandomFileName}`, `${tempDir}/${child1FileName}`)
await chainVhd(handler, parentFileName, handler, child1FileName, true)
await checkFile(`${tempDir}/${parentFileName}`)
// merge
await mergeVhd(handler, parentFileName, handler, child1FileName)
// check that the merged vhd is still valid
await checkFile(`${tempDir}/${child1FileName}`)
// check that vhd is still valid
await checkFile(`${tempDir}/${parentFileName}`)
const parentVhd = new VhdFile(handler, child1FileName)
const parentVhd = new VhdFile(handler, parentFileName)
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
@@ -140,11 +138,11 @@ test('it can resume a merge ', async () => {
await mergeVhd(handler, 'parent.vhd', handler, 'child1.vhd')
// reload header footer and block allocation table , they should succed
await childVhd.readHeaderAndFooter()
await childVhd.readBlockAllocationTable()
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
let offset = 0
// check that the data are the same as source
for await (const block of childVhd.blocks()) {
for await (const block of parentVhd.blocks()) {
const blockContent = block.data
// first block is marked as already merged, should not be modified
// second block should come from children
@@ -155,7 +153,7 @@ test('it can resume a merge ', async () => {
await fs.read(fd, buffer, 0, buffer.length, offset)
expect(buffer.equals(blockContent)).toEqual(true)
offset += childVhd.header.blockSize
offset += parentVhd.header.blockSize
}
})
@@ -185,9 +183,9 @@ test('it merge multiple child in one pass ', async () => {
await mergeVhd(handler, parentFileName, handler, [grandChildFileName, childFileName])
// check that vhd is still valid
await checkFile(grandChildFileName)
await checkFile(parentFileName)
const parentVhd = new VhdFile(handler, grandChildFileName)
const parentVhd = new VhdFile(handler, parentFileName)
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
@@ -208,20 +206,3 @@ test('it merge multiple child in one pass ', async () => {
offset += parentVhd.header.blockSize
}
})
test('it cleans vhd mergedfiles', async () => {
const handler = getHandler({ url: `file://${tempDir}` })
await handler.writeFile('parent', 'parentData')
await handler.writeFile('child1', 'child1Data')
await handler.writeFile('child2', 'child2Data')
await handler.writeFile('child3', 'child3Data')
await cleanupVhds(handler, 'parent', ['child1', 'child2', 'child3'], { remove: true })
// only child3 should stay, with the data of parent
const [child3, ...other] = await handler.list('.')
expect(other.length).toEqual(0)
expect(child3).toEqual('child3')
expect((await handler.readFile('child3')).toString('utf8')).toEqual('parentData')
})

View File

@@ -4,7 +4,6 @@
const assert = require('assert')
const noop = require('./_noop')
const UUID = require('uuid')
const { createLogger } = require('@xen-orchestra/log')
const { limitConcurrency } = require('limit-concurrency-decorator')
@@ -13,34 +12,11 @@ const { basename, dirname } = require('path')
const { DISK_TYPES } = require('./_constants')
const { Disposable } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const { VhdAbstract } = require('./Vhd/VhdAbstract')
const { VhdDirectory } = require('./Vhd/VhdDirectory')
const { VhdSynthetic } = require('./Vhd/VhdSynthetic')
const { asyncMap } = require('@xen-orchestra/async-map')
const { warn } = createLogger('vhd-lib:merge')
// The chain we want to merge is [ ancestor, child_1, ..., child_n]
//
// 1. Create a VhdSynthetic from all children if more than 1 child
// 2. Merge the resulting VHD into the ancestor
// 2.a if at least one is a file: copy file part from child to parent
// 2.b if they are all VhdDirectory: move blocks from children to the ancestor
// 3. Update the size, UUID and timestamp of the ancestor with those of child_n
// 3. Delete all (now) unused VHDs
// 4. Rename the ancestor to to child_n
//
// VhdSynthetic
// |
// /‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾\
// [ ancestor, child_1, ...,child_n-1, child_n ]
// | \____________________/ ^
// | | |
// | unused VHDs |
// | |
// \_____________rename_____________/
// write the merge progress file at most every `delay` seconds
function makeThrottledWriter(handler, path, delay) {
let lastWrite = Date.now()
return async json => {
@@ -52,45 +28,21 @@ function makeThrottledWriter(handler, path, delay) {
}
}
// make the rename / delete part of the merge process
// will fail if parent and children are in different remote
function cleanupVhds(handler, parent, children, { logInfo = noop, remove = false } = {}) {
if (!Array.isArray(children)) {
children = [children]
}
const mergeTargetChild = children.pop()
return Promise.all([
VhdAbstract.rename(handler, parent, mergeTargetChild),
asyncMap(children, child => {
logInfo(`the VHD child is already merged`, { child })
if (remove) {
logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
}),
])
}
module.exports._cleanupVhds = cleanupVhds
// Merge one or multiple vhd child into vhd parent.
// childPath can be array to create a synthetic VHD from multiple VHDs
// childPath is from the grand children to the children
//
// TODO: rename the VHD file during the merge
module.exports.mergeVhd = limitConcurrency(2)(async function merge(
module.exports = limitConcurrency(2)(async function merge(
parentHandler,
parentPath,
childHandler,
childPath,
{ onProgress = noop, logInfo = noop, remove } = {}
{ onProgress = noop } = {}
) {
const mergeStatePath = dirname(parentPath) + '/' + '.' + basename(parentPath) + '.merge.json'
return await Disposable.use(async function* () {
let mergeState
let isResuming = false
try {
const mergeStateContent = await parentHandler.readFile(mergeStatePath)
mergeState = JSON.parse(mergeStateContent)
@@ -107,26 +59,22 @@ module.exports.mergeVhd = limitConcurrency(2)(async function merge(
checkSecondFooter: mergeState === undefined,
})
let childVhd
const parentIsVhdDirectory = parentVhd instanceof VhdDirectory
let childIsVhdDirectory
if (Array.isArray(childPath)) {
childVhd = yield VhdSynthetic.open(childHandler, childPath)
childIsVhdDirectory = childVhd.checkVhdsClass(VhdDirectory)
} else {
childVhd = yield openVhd(childHandler, childPath)
childIsVhdDirectory = childVhd instanceof VhdDirectory
}
const concurrency = parentIsVhdDirectory && childIsVhdDirectory ? 16 : 1
const concurrency = childVhd instanceof VhdDirectory ? 16 : 1
if (mergeState === undefined) {
// merge should be along a vhd chain
assert.strictEqual(UUID.stringify(childVhd.header.parentUuid), UUID.stringify(parentVhd.footer.uuid))
assert.strictEqual(childVhd.header.parentUuid.equals(parentVhd.footer.uuid), true)
const parentDiskType = parentVhd.footer.diskType
assert(parentDiskType === DISK_TYPES.DIFFERENCING || parentDiskType === DISK_TYPES.DYNAMIC)
assert.strictEqual(childVhd.footer.diskType, DISK_TYPES.DIFFERENCING)
assert.strictEqual(childVhd.header.blockSize, parentVhd.header.blockSize)
} else {
isResuming = true
// vhd should not have changed to resume
assert.strictEqual(parentVhd.header.checksum, mergeState.parent.header)
assert.strictEqual(childVhd.header.checksum, mergeState.child.header)
@@ -167,12 +115,12 @@ module.exports.mergeVhd = limitConcurrency(2)(async function merge(
let counter = 0
const mergeStateWriter = makeThrottledWriter(parentHandler, mergeStatePath, 10e3)
await asyncEach(
toMerge,
async blockId => {
merging.add(blockId)
mergeState.mergedDataSize += await parentVhd.mergeBlock(childVhd, blockId, isResuming)
mergeState.mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
merging.delete(blockId)
onProgress({
@@ -207,8 +155,6 @@ module.exports.mergeVhd = limitConcurrency(2)(async function merge(
// should be a disposable
parentHandler.unlink(mergeStatePath).catch(warn)
await cleanupVhds(parentHandler, parentPath, childPath, { logInfo, remove })
return mergeState.mergedDataSize
})
})

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "vhd-lib",
"version": "3.3.2",
"version": "3.2.0",
"license": "AGPL-3.0-or-later",
"description": "Primitives for VHD file handling",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/vhd-lib",
@@ -16,7 +16,7 @@
},
"dependencies": {
"@vates/async-each": "^0.1.0",
"@vates/read-chunk": "^1.0.0",
"@vates/read-chunk": "^0.1.2",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.3.0",
"async-iterator-to-stream": "^1.0.2",
@@ -29,7 +29,7 @@
"uuid": "^8.3.1"
},
"devDependencies": {
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/fs": "^1.0.3",
"execa": "^5.0.0",
"get-stream": "^6.0.0",
"rimraf": "^3.0.2",

View File

@@ -85,9 +85,10 @@ async function convertToVhdDirectory(rawFileName, vhdFileName, path) {
await fs.mkdir(path + '/blocks/0/')
const stats = await fs.stat(rawFileName)
for (let i = 0, offset = 0; offset < stats.size; i++, offset += blockDataSize) {
const sizeMB = stats.size / 1024 / 1024
for (let i = 0, offset = 0; i < sizeMB; i++, offset += blockDataSize) {
const blockData = Buffer.alloc(blockDataSize)
await fs.read(srcRaw, blockData, 0, blockData.length, offset)
await fs.read(srcRaw, blockData, offset)
await fs.writeFile(path + '/blocks/0/' + i, Buffer.concat([bitmap, blockData]))
}
await fs.close(srcRaw)

View File

@@ -8,6 +8,6 @@
"promise-toolbox": "^0.19.2",
"readable-stream": "^3.1.1",
"throttle": "^1.0.3",
"vhd-lib": "^3.3.2"
"vhd-lib": "^3.2.0"
}
}

View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -0,0 +1 @@
../../scripts/babel-eslintrc.js

View File

@@ -1,216 +1,3 @@
'use strict'
const { BaseError } = require('make-error')
const { iteratee } = require('lodash')
class XoError extends BaseError {
constructor({ code, message, data }) {
super(message)
this.code = code
this.data = data
}
toJsonRpcError() {
return {
message: this.message,
code: this.code,
data: this.data,
}
}
}
const create = (code, getProps) => {
const factory = (...args) => {
const props = getProps(...args)
props.code = code
throw new XoError(props)
}
factory.is = (error, predicate) => error.code === code && (predicate === undefined || iteratee(predicate)(error.data))
return factory
}
// =============================================================================
exports.notImplemented = create(0, () => ({
message: 'not implemented',
}))
exports.noSuchObject = create(1, (id, type) => ({
data: { id, type },
message: `no such ${type || 'object'} ${id}`,
}))
exports.unauthorized = create(2, (permission, objectId, objectType) => ({
data: {
permission,
object: {
id: objectId,
type: objectType,
},
},
message: 'not enough permissions',
}))
exports.invalidCredentials = create(3, () => ({
message: 'invalid credentials',
}))
// Deprecated alreadyAuthenticated (4)
exports.forbiddenOperation = create(5, (operation, reason) => ({
data: { operation, reason },
message: `forbidden operation: ${operation}`,
}))
// Deprecated GenericError (6)
exports.noHostsAvailable = create(7, () => ({
message: 'no hosts available',
}))
exports.authenticationFailed = create(8, () => ({
message: 'authentication failed',
}))
exports.serverUnreachable = create(9, objectId => ({
data: {
objectId,
},
message: 'server unreachable',
}))
exports.invalidParameters = create(10, (message, errors) => {
if (Array.isArray(message)) {
errors = message
message = undefined
}
return {
data: { errors },
message: message || 'invalid parameters',
}
})
exports.vmMissingPvDrivers = create(11, ({ vm }) => ({
data: {
objectId: vm,
},
message: 'missing PV drivers',
}))
exports.vmIsTemplate = create(12, ({ vm }) => ({
data: {
objectId: vm,
},
message: 'VM is a template',
}))
exports.vmBadPowerState = create(13, ({ vm, expected, actual }) => ({
data: {
objectId: vm,
expected,
actual,
},
message: `VM state is ${actual} but should be ${expected}`,
}))
exports.vmLacksFeature = create(14, ({ vm, feature }) => ({
data: {
objectId: vm,
feature,
},
message: `VM lacks feature ${feature || ''}`,
}))
exports.notSupportedDuringUpgrade = create(15, () => ({
message: 'not supported during upgrade',
}))
exports.objectAlreadyExists = create(16, ({ objectId, objectType }) => ({
data: {
objectId,
objectType,
},
message: `${objectType || 'object'} already exists`,
}))
exports.vdiInUse = create(17, ({ vdi, operation }) => ({
data: {
objectId: vdi,
operation,
},
message: 'VDI in use',
}))
exports.hostOffline = create(18, ({ host }) => ({
data: {
objectId: host,
},
message: 'host offline',
}))
exports.operationBlocked = create(19, ({ objectId, code }) => ({
data: {
objectId,
code,
},
message: 'operation blocked',
}))
exports.patchPrecheckFailed = create(20, ({ errorType, patch }) => ({
data: {
objectId: patch,
errorType,
},
message: `patch precheck failed: ${errorType}`,
}))
exports.operationFailed = create(21, ({ objectId, code }) => ({
data: {
objectId,
code,
},
message: 'operation failed',
}))
exports.missingAuditRecord = create(22, ({ id, nValid }) => ({
data: {
id,
nValid,
},
message: 'missing record',
}))
exports.alteredAuditRecord = create(23, ({ id, record, nValid }) => ({
data: {
id,
record,
nValid,
},
message: 'altered record',
}))
exports.notEnoughResources = create(24, data => ({
data, // [{ resourceSet, resourceType, available, requested }]
message: 'not enough resources in resource set',
}))
exports.incorrectState = create(25, ({ actual, expected, object, property }) => ({
data: {
actual,
expected,
object,
property,
},
message: 'incorrect state',
}))
exports.featureUnauthorized = create(26, ({ featureCode, currentPlan, minPlan }) => ({
data: {
featureCode,
currentPlan,
minPlan,
},
message: 'feature Unauthorized',
}))
module.exports = require('./dist/api-errors')

View File

@@ -15,6 +15,10 @@
"name": "Vates SAS",
"url": "https://vates.fr"
},
"preferGlobal": false,
"browserslist": [
"> 1%"
],
"engines": {
"node": ">=6"
},
@@ -22,7 +26,21 @@
"lodash": "^4.16.6",
"make-error": "^1.2.1"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -0,0 +1,210 @@
import { BaseError } from 'make-error'
import { iteratee } from 'lodash'
class XoError extends BaseError {
constructor({ code, message, data }) {
super(message)
this.code = code
this.data = data
}
toJsonRpcError() {
return {
message: this.message,
code: this.code,
data: this.data,
}
}
}
const create = (code, getProps) => {
const factory = (...args) => new XoError({ ...getProps(...args), code })
factory.is = (error, predicate) => error.code === code && (predicate === undefined || iteratee(predicate)(error.data))
return factory
}
// =============================================================================
export const notImplemented = create(0, () => ({
message: 'not implemented',
}))
export const noSuchObject = create(1, (id, type) => ({
data: { id, type },
message: `no such ${type || 'object'} ${id}`,
}))
export const unauthorized = create(2, (permission, objectId, objectType) => ({
data: {
permission,
object: {
id: objectId,
type: objectType,
},
},
message: 'not enough permissions',
}))
export const invalidCredentials = create(3, () => ({
message: 'invalid credentials',
}))
// Deprecated alreadyAuthenticated (4)
export const forbiddenOperation = create(5, (operation, reason) => ({
data: { operation, reason },
message: `forbidden operation: ${operation}`,
}))
// Deprecated GenericError (6)
export const noHostsAvailable = create(7, () => ({
message: 'no hosts available',
}))
export const authenticationFailed = create(8, () => ({
message: 'authentication failed',
}))
export const serverUnreachable = create(9, objectId => ({
data: {
objectId,
},
message: 'server unreachable',
}))
export const invalidParameters = create(10, (message, errors) => {
if (Array.isArray(message)) {
errors = message
message = undefined
}
return {
data: { errors },
message: message || 'invalid parameters',
}
})
export const vmMissingPvDrivers = create(11, ({ vm }) => ({
data: {
objectId: vm,
},
message: 'missing PV drivers',
}))
export const vmIsTemplate = create(12, ({ vm }) => ({
data: {
objectId: vm,
},
message: 'VM is a template',
}))
export const vmBadPowerState = create(13, ({ vm, expected, actual }) => ({
data: {
objectId: vm,
expected,
actual,
},
message: `VM state is ${actual} but should be ${expected}`,
}))
export const vmLacksFeature = create(14, ({ vm, feature }) => ({
data: {
objectId: vm,
feature,
},
message: `VM lacks feature ${feature || ''}`,
}))
export const notSupportedDuringUpgrade = create(15, () => ({
message: 'not supported during upgrade',
}))
export const objectAlreadyExists = create(16, ({ objectId, objectType }) => ({
data: {
objectId,
objectType,
},
message: `${objectType || 'object'} already exists`,
}))
export const vdiInUse = create(17, ({ vdi, operation }) => ({
data: {
objectId: vdi,
operation,
},
message: 'VDI in use',
}))
export const hostOffline = create(18, ({ host }) => ({
data: {
objectId: host,
},
message: 'host offline',
}))
export const operationBlocked = create(19, ({ objectId, code }) => ({
data: {
objectId,
code,
},
message: 'operation blocked',
}))
export const patchPrecheckFailed = create(20, ({ errorType, patch }) => ({
data: {
objectId: patch,
errorType,
},
message: `patch precheck failed: ${errorType}`,
}))
export const operationFailed = create(21, ({ objectId, code }) => ({
data: {
objectId,
code,
},
message: 'operation failed',
}))
export const missingAuditRecord = create(22, ({ id, nValid }) => ({
data: {
id,
nValid,
},
message: 'missing record',
}))
export const alteredAuditRecord = create(23, ({ id, record, nValid }) => ({
data: {
id,
record,
nValid,
},
message: 'altered record',
}))
export const notEnoughResources = create(24, data => ({
data, // [{ resourceSet, resourceType, available, requested }]
message: 'not enough resources in resource set',
}))
export const incorrectState = create(25, ({ actual, expected, object, property }) => ({
data: {
actual,
expected,
object,
property,
},
message: 'incorrect state',
}))
export const featureUnauthorized = create(26, ({ featureCode, currentPlan, minPlan }) => ({
data: {
featureCode,
currentPlan,
minPlan,
},
message: 'feature Unauthorized',
}))

View File

@@ -40,7 +40,7 @@
"xo-lib": "^0.11.1"
},
"devDependencies": {
"@types/node": "^18.0.1",
"@types/node": "^17.0.25",
"@types/through2": "^2.0.31",
"typescript": "^4.6.3"
},

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "xo-remote-parser",
"version": "0.9.1",
"version": "0.8.0",
"license": "AGPL-3.0-or-later",
"description": "Parse and format XO remote URLs",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-remote-parser",

View File

@@ -2,54 +2,26 @@ import filter from 'lodash/filter'
import map from 'lodash/map'
import trim from 'lodash/trim'
import trimStart from 'lodash/trimStart'
import queryString from 'querystring'
import urlParser from 'url-parse'
const NFS_RE = /^([^:]+):(?:(\d+):)?([^:?]+)(\?[^?]*)?$/
const SMB_RE = /^([^:]+):(.+)@([^@]+)\\\\([^\0?]+)(?:\0([^?]*))?(\?[^?]*)?$/
const NFS_RE = /^([^:]+):(?:(\d+):)?([^:]+)$/
const SMB_RE = /^([^:]+):(.+)@([^@]+)\\\\([^\0]+)(?:\0(.*))?$/
const sanitizePath = (...paths) => filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join('/')
const parseOptionList = (optionList = '') => {
if (optionList.startsWith('?')) {
optionList = optionList.substring(1)
}
const parsed = queryString.parse(optionList)
Object.keys(parsed).forEach(key => {
const val = parsed[key]
// some incorrect values have been saved in users database (introduced by #6270)
parsed[key] = val === '' ? false : JSON.parse(val)
})
return parsed
}
const makeOptionList = options => {
const encoded = {}
Object.keys(options)
// don't save undefined options
.filter(key => options[key] !== undefined)
.forEach(key => {
const val = options[key]
encoded[key] = JSON.stringify(val)
})
return queryString.stringify(encoded)
}
export const parse = string => {
let object = {}
let [type, rest] = string.split('://')
const object = {}
const [type, rest] = string.split('://')
if (type === 'file') {
object.type = 'file'
let optionList
;[rest, optionList] = rest.split('?')
object.path = `/${trimStart(rest, '/')}` // the leading slash has been forgotten on client side first implementation
object = { ...parseOptionList(optionList), ...object }
} else if (type === 'nfs') {
object.type = 'nfs'
let host, port, path, optionList
let host, port, path
// Some users have a remote with a colon in the URL, which breaks the parsing since this commit: https://github.com/vatesfr/xen-orchestra/commit/fb1bf6a1e748b457f2d2b89ba02fa104554c03df
try {
;[, host, port, path, optionList] = NFS_RE.exec(rest)
;[, host, port, path] = NFS_RE.exec(rest)
} catch (err) {
;[host, path] = rest.split(':')
object.invalidUrl = true
@@ -57,18 +29,16 @@ export const parse = string => {
object.host = host
object.port = port
object.path = `/${trimStart(path, '/')}` // takes care of a missing leading slash coming from previous version format
object = { ...parseOptionList(optionList), ...object }
} else if (type === 'smb') {
object.type = 'smb'
const [, username, password, domain, host, path = '', optionList] = SMB_RE.exec(rest)
const [, username, password, domain, host, path = ''] = SMB_RE.exec(rest)
object.host = host
object.path = path
object.domain = domain
object.username = username
object.password = password
object = { ...parseOptionList(optionList), ...object }
} else if (type === 's3' || type === 's3+http') {
const parsed = urlParser(string, false)
const parsed = urlParser(string, true)
object.protocol = parsed.protocol === 's3:' ? 'https' : 'http'
object.type = 's3'
object.region = parsed.hash.length === 0 ? undefined : parsed.hash.slice(1) // remove '#'
@@ -76,12 +46,24 @@ export const parse = string => {
object.path = parsed.pathname
object.username = parsed.username
object.password = decodeURIComponent(parsed.password)
object = { ...parseOptionList(parsed.query), ...object }
const qs = parsed.query
object.allowUnauthorized = qs.allowUnauthorized === 'true'
}
return object
}
export const format = ({ type, host, path, port, username, password, domain, protocol = type, region, ...options }) => {
export const format = ({
type,
host,
path,
port,
username,
password,
domain,
protocol = type,
region,
allowUnauthorized = false,
}) => {
type === 'local' && (type = 'file')
let string = `${type}://`
if (type === 'nfs') {
@@ -103,10 +85,8 @@ export const format = ({ type, host, path, port, username, password, domain, pro
}
string += path
const optionsList = makeOptionList(options)
if (optionsList !== '') {
string += '?' + optionsList
if (type === 's3' && allowUnauthorized === true) {
string += `?allowUnauthorized=true`
}
if (type === 's3' && region !== undefined) {
string += `#${region}`

View File

@@ -15,14 +15,6 @@ const data = deepFreeze({
path: '/var/lib/xoa/backup',
},
},
'file with use vhd directory': {
string: 'file:///var/lib/xoa/backup?useVhdDirectory=true',
object: {
type: 'file',
path: '/var/lib/xoa/backup',
useVhdDirectory: true,
},
},
SMB: {
string: 'smb://Administrator:pas:sw@ord@toto\\\\192.168.100.225\\smb\0',
object: {
@@ -34,18 +26,6 @@ const data = deepFreeze({
password: 'pas:sw@ord',
},
},
'smb with directory': {
string: 'smb://Administrator:pas:sw@ord@toto\\\\192.168.100.225\\smb\0?useVhdDirectory=true',
object: {
type: 'smb',
host: '192.168.100.225\\smb',
path: '',
domain: 'toto',
username: 'Administrator',
password: 'pas:sw@ord',
useVhdDirectory: true,
},
},
NFS: {
string: 'nfs://192.168.100.225:/media/nfs',
object: {
@@ -64,18 +44,8 @@ const data = deepFreeze({
path: '/media/nfs',
},
},
'nfs with vhdDirectory': {
string: 'nfs://192.168.100.225:20:/media/nfs?useVhdDirectory=true',
object: {
type: 'nfs',
host: '192.168.100.225',
port: '20',
path: '/media/nfs',
useVhdDirectory: true,
},
},
S3: {
string: 's3://AKIAS:XSuBupZ0mJlu%2B@s3-us-west-2.amazonaws.com/test-bucket/dir?allowUnauthorized=false',
string: 's3://AKIAS:XSuBupZ0mJlu%2B@s3-us-west-2.amazonaws.com/test-bucket/dir',
object: {
type: 's3',
protocol: 'https',
@@ -100,21 +70,6 @@ const data = deepFreeze({
allowUnauthorized: true,
},
},
'S3 with brotli': {
string:
's3+http://Administrator:password@192.168.100.225/bucket/dir?compressionType=%22brotli%22&compressionOptions=%7B%22level%22%3A1%7D#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
compressionType: 'brotli',
compressionOptions: { level: 1 },
},
},
})
const parseData = deepFreeze({
@@ -156,6 +111,7 @@ const parseData = deepFreeze({
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: false,
},
},
'S3 accepting self signed certificate': {
@@ -170,6 +126,19 @@ const parseData = deepFreeze({
password: 'password',
allowUnauthorized: true,
},
'S3 with broken allowUnauthorized': {
string: 's3+http://Administrator:password@192.168.100.225/bucket/dir?allowUnauthorized=notTrue#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: false,
},
},
},
})
@@ -183,6 +152,19 @@ const formatData = deepFreeze({
path: '/var/lib/xoa/backup',
},
},
'S3 with broken allowUnauthorized': {
string: 's3+http://Administrator:password@192.168.100.225/bucket/dir#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: 'notTrue',
},
},
})
// -------------------------------------------------------------------

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-audit",
"version": "0.10.0",
"version": "0.9.3",
"license": "AGPL-3.0-or-later",
"description": "Audit plugin for XO-Server",
"keywords": [

View File

@@ -56,10 +56,7 @@ const DEFAULT_BLOCKED_LIST = {
'vm.getHaValues': true,
'vm.stats': true,
'xo.getAllObjects': true,
'xoa.check': true,
'xoa.clearCheckCache': true,
'xoa.getApplianceInfo': true,
'xoa.getHVSupportedVersions': true,
'xoa.licenses.get': true,
'xoa.licenses.getAll': true,
'xoa.licenses.getSelf': true,

View File

@@ -35,7 +35,7 @@
"ensure-array": "^1.0.0",
"exec-promise": "^0.7.0",
"inquirer": "^8.0.0",
"ldapts": "^4.1.0",
"ldapts": "^3.1.1",
"promise-toolbox": "^0.21.0"
},
"devDependencies": {

View File

@@ -29,7 +29,7 @@
"dependencies": {
"@vates/coalesce-calls": "^0.1.0",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/openflow": "^0.1.2",
"@xen-orchestra/openflow": "^0.1.1",
"ipaddr.js": "^2.0.1",
"lodash": "^4.17.11",
"node-openssl-cert": "^0.1.34",

View File

@@ -31,7 +31,7 @@
"app-conf": "^2.1.0",
"babel-plugin-lodash": "^3.2.11",
"golike-defer": "^0.5.1",
"jest": "^28.1.2",
"jest": "^27.3.1",
"lodash": "^4.17.11",
"promise-toolbox": "^0.21.0",
"xo-collection": "^0.5.0",

View File

@@ -88,6 +88,7 @@ snapshotNameLabelTpl = '[XO Backup {job.name}] {vm.name_label}'
listingDebounce = '1 min'
vhdDirectoryCompression = 'brotli'
vhdDirectoryEncryption = '{"algorithm": "aes-256-cbc", "key": "45eb3ffe48dd29e7bd04a7941ba425f2" ,"ivLength": 16}'
# This is a work-around.
#

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "xo-server",
"version": "5.98.1",
"version": "5.96.0",
"license": "AGPL-3.0-or-later",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -20,7 +20,6 @@
"preferGlobal": true,
"bin": {
"xo-server": "dist/cli.mjs",
"xo-server-db": "dist/db-cli.mjs",
"xo-server-logs": "dist/logs-cli.mjs",
"xo-server-recover-account": "dist/recover-account-cli.mjs"
},
@@ -34,22 +33,23 @@
"@vates/compose": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.1",
"@vates/event-listeners-manager": "^1.0.1",
"@vates/event-listeners-manager": "^1.0.0",
"@vates/multi-key-map": "^0.1.0",
"@vates/parse-duration": "^0.1.1",
"@vates/predicates": "^1.0.0",
"@vates/read-chunk": "^1.0.0",
"@vates/read-chunk": "^0.1.2",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.27.0",
"@xen-orchestra/backups": "^0.25.0",
"@xen-orchestra/cron": "^1.0.6",
"@xen-orchestra/defined": "^0.0.1",
"@xen-orchestra/emit-async": "^1.0.0",
"@xen-orchestra/fs": "^1.1.0",
"@xen-orchestra/fs": "^1.0.3",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/mixin": "^0.1.0",
"@xen-orchestra/mixins": "^0.5.0",
"@xen-orchestra/self-signed": "^0.1.3",
"@xen-orchestra/template": "^0.1.0",
"@xen-orchestra/xapi": "^1.4.0",
"@xen-orchestra/xapi": "^1.2.0",
"ajv": "^8.0.3",
"app-conf": "^2.1.0",
"async-iterator-to-stream": "^1.0.1",
@@ -75,7 +75,7 @@
"fs-extra": "^10.0.0",
"get-stream": "^6.0.0",
"golike-defer": "^0.5.1",
"hashy": "^0.11.1",
"hashy": "^0.11.0",
"helmet": "^3.9.0",
"highland": "^2.11.1",
"http-proxy": "^1.16.2",
@@ -103,12 +103,13 @@
"openpgp": "^5.0.0",
"otplib": "^11.0.0",
"partial-stream": "0.0.0",
"passport": "^0.6.0",
"passport": "^0.5.2",
"passport-local": "^1.0.0",
"promise-toolbox": "^0.21.0",
"proxy-agent": "^5.0.0",
"pug": "^3.0.0",
"pumpify": "^2.0.0",
"pw": "^0.0.4",
"readable-stream": "^3.2.0",
"redis": "^3.0.2",
"schema-inspector": "^2.0.1",
@@ -124,15 +125,15 @@
"unzipper": "^0.10.5",
"uuid": "^8.3.1",
"value-matcher": "^0.2.0",
"vhd-lib": "^3.3.2",
"vhd-lib": "^3.2.0",
"ws": "^8.2.3",
"xdg-basedir": "^5.1.0",
"xen-api": "^1.2.1",
"xo-acl-resolver": "^0.4.1",
"xo-collection": "^0.5.0",
"xo-common": "^0.8.0",
"xo-remote-parser": "^0.9.1",
"xo-vmdk-to-vhd": "^2.4.2"
"xo-remote-parser": "^0.8.0",
"xo-vmdk-to-vhd": "^2.4.1"
},
"devDependencies": {
"@babel/cli": "^7.0.0",

View File

@@ -913,38 +913,3 @@ stats.params = {
stats.resolve = {
sr: ['id', 'SR', 'view'],
}
// -------------------------------------------------------------------
export function enableMaintenanceMode({ sr, vmsToShutdown }) {
return this.getXapiObject(sr).$enableMaintenanceMode({ vmsToShutdown })
}
enableMaintenanceMode.description = 'switch the SR into maintenance mode'
enableMaintenanceMode.params = {
id: { type: 'string' },
vmsToShutdown: { type: 'array', items: { type: 'string' }, optional: true },
}
enableMaintenanceMode.permission = 'admin'
enableMaintenanceMode.resolve = {
sr: ['id', 'SR', 'operate'],
}
export function disableMaintenanceMode({ sr }) {
return this.getXapiObject(sr).$disableMaintenanceMode()
}
disableMaintenanceMode.description = 'disable the maintenance of the SR'
disableMaintenanceMode.params = {
id: { type: 'string' },
}
disableMaintenanceMode.permission = 'admin'
disableMaintenanceMode.resolve = {
sr: ['id', 'SR', 'operate'],
}

View File

@@ -25,17 +25,38 @@ create.params = {
// -------------------------------------------------------------------
async function delete_({ pattern, tokens }) {
await this.deleteAuthenticationTokens({ filter: pattern ?? { id: { __or: tokens } } })
// TODO: an user should be able to delete its own tokens.
async function delete_({ token: id }) {
await this.deleteAuthenticationToken(id)
}
export { delete_ as delete }
delete_.description = 'delete an existing authentication token'
delete_.permission = 'admin'
delete_.params = {
tokens: { type: 'array', optional: true, items: { type: 'string' } },
pattern: { type: 'object', optional: true },
token: { type: 'string' },
}
// -------------------------------------------------------------------
export async function deleteAll({ except }) {
await this.deleteAuthenticationTokens({
filter: {
user_id: this.apiContext.user.id,
id: {
__not: except,
},
},
})
}
deleteAll.description = 'delete all tokens of the current user except the current one'
deleteAll.params = {
except: { type: 'string', optional: true },
}
// -------------------------------------------------------------------

View File

@@ -1,6 +1,7 @@
import Model from './model.mjs'
import { BaseError } from 'make-error'
import { EventEmitter } from 'events'
import { isObject } from './utils.mjs'
import { isObject, map } from './utils.mjs'
// ===================================================================
@@ -13,16 +14,34 @@ export class ModelAlreadyExists extends BaseError {
// ===================================================================
export default class Collection extends EventEmitter {
// Default value for Model.
get Model() {
return Model
}
// Make this property writable.
set Model(Model) {
Object.defineProperty(this, 'Model', {
configurable: true,
enumerale: true,
value: Model,
writable: true,
})
}
async add(models, opts) {
const array = Array.isArray(models)
if (!array) {
models = [models]
}
const { Model } = this
map(models, model => (model instanceof Model ? model.properties : model), models)
models = await this._add(models, opts)
this.emit('add', models)
return array ? models : models[0]
return array ? models : new this.Model(models[0])
}
async first(properties) {
@@ -30,7 +49,8 @@ export default class Collection extends EventEmitter {
properties = properties !== undefined ? { id: properties } : {}
}
return await this._first(properties)
const model = await this._first(properties)
return model && new this.Model(model)
}
async get(properties) {
@@ -41,29 +61,14 @@ export default class Collection extends EventEmitter {
return /* await */ this._get(properties)
}
// remove(id: string)
// remove(ids: string[])
// remove(properties: object)
async remove(properties) {
let ids
if (typeof properties === 'object') {
if (Array.isArray(properties)) {
ids = properties
} else {
ids = (await this.get(properties)).map(_ => _.id)
if (ids.length === 0) {
return false
}
}
} else {
ids = [properties]
async remove(ids) {
if (!Array.isArray(ids)) {
ids = [ids]
}
await this._remove(ids)
this.emit('remove', ids)
// FIXME: returns false if some ids were not removed
return true
}
@@ -73,18 +78,33 @@ export default class Collection extends EventEmitter {
models = [models]
}
models.forEach(model => {
// Missing models should be added not updated.
if (model.id === undefined) {
// FIXME: should not throw an exception but return a rejected promise.
throw new Error('a model without an id cannot be updated')
}
})
const { Model } = this
map(
models,
model => {
if (!(model instanceof Model)) {
// TODO: Problems, we may be mixing in some default
// properties which will overwrite existing ones.
model = new Model(model)
}
const id = model.get('id')
// Missing models should be added not updated.
if (id === undefined) {
// FIXME: should not throw an exception but return a rejected promise.
throw new Error('a model without an id cannot be updated')
}
return model.properties
},
models
)
models = await this._update(models)
this.emit('update', models)
return array ? models : models[0]
return array ? models : new this.Model(models[0])
}
// Methods to override in implementations.

Some files were not shown because too many files have changed in this diff Show More