Compare commits

..

24 Commits

Author SHA1 Message Date
Julien Fontanet
d32c5b31e7 WiP: feat(mixin): support for lazy mixins 2022-06-23 16:38:13 +02:00
Julien Fontanet
667d0724c3 docs(configuration/custom ca): fix systemd path
Introduced by 03a66e469
2022-06-22 11:32:24 +02:00
Julien Fontanet
a49395553a docs(configuration/custom ca): fix systemd path
Introduced by 03a66e469
2022-06-22 11:30:09 +02:00
Julien Fontanet
cce09bd9cc docs(configuration/custom ca): add note regarding XO Proxy 2022-06-22 10:44:25 +02:00
Julien Fontanet
03a66e4690 docs(configuration/custom ca): use separate systemd file
This is better as it avoids conflicts with existing config and is compatible with the way XO Proxy service is handled.
2022-06-22 10:44:25 +02:00
Florent BEAUCHAMP
fd752fee80 feat(backups,vhd-lib): implement copyless merge (#6271) 2022-06-22 10:36:57 +02:00
Julien Fontanet
8a71f84733 chore(xo-server): remove Model wrapping 2022-06-22 10:10:39 +02:00
Julien Fontanet
9ef2c7da4c chore(complex-matcher): remove build step 2022-06-22 09:55:59 +02:00
Julien Fontanet
8975073416 fix(xapi): add missing file
Introduced by b12c17947

Thanks @Danp2.
2022-06-22 00:07:32 +02:00
Julien Fontanet
d1c1378c9d feat(xo-server-db): minimal CLI to browser the DB 2022-06-21 18:11:44 +02:00
Julien Fontanet
7941284a1d feat(xo-server/collection/Redis): set of all indexes 2022-06-21 17:47:56 +02:00
Julien Fontanet
af2d17b7a5 feat(xo-server/collection/Redis): set of all namespaces 2022-06-21 17:29:19 +02:00
Julien Fontanet
3ca2b01d9a feat(xo-server/collection/Redis): assert namespace doesnt contain _ or : 2022-06-21 17:24:10 +02:00
Julien Fontanet
67193a2ab7 chore(xo-server/collection/Redis): replace prefix by namespace 2022-06-21 17:23:25 +02:00
Julien Fontanet
9757aa36de chore(xo-server/collection/Redis): _id field was never used 2022-06-21 17:23:18 +02:00
Julien Fontanet
29854a9f87 feat(xo-server): new sr.{enable,disable}MaintenanceMode methods 2022-06-21 15:07:09 +02:00
Julien Fontanet
b12c179470 feat(xapi): new SR_{enable,disable}MaintenanceMode methods 2022-06-21 15:07:09 +02:00
Julien Fontanet
bbef15e4e4 feat(xo-server/proxy.get{,All}); return associated URL(s) (#6291) 2022-06-21 11:33:25 +02:00
Florent BEAUCHAMP
c483929a0d fix(ova import): drain disk entry completly (#6284) 2022-06-20 16:09:20 +02:00
Julien Fontanet
1741f395dd chore(xo-server/deleteAuthenticationTokens): optimization
Don't use xo-server/deleteAuthenticationToken to avoid fetching the records twice.
2022-06-19 11:37:42 +02:00
Julien Fontanet
0f29262797 chore(value-matcher): remove build step 2022-06-19 11:28:11 +02:00
Julien Fontanet
31ed477b96 feat(xo-server/token.delete): available for non-admins 2022-06-17 11:59:29 +02:00
Julien Fontanet
9e5de5413d feat(xo-server/Collection#remove): accept a pattern 2022-06-17 11:59:29 +02:00
Florent BEAUCHAMP
0f297a81a4 feat(xo-remote-parser): additional parameters in URL (#6270) 2022-06-16 23:14:34 +02:00
66 changed files with 897 additions and 654 deletions

View File

@@ -75,15 +75,11 @@ const debounceResourceFactory = factory =>
}
class RemoteAdapter {
constructor(
handler,
{ debounceResource = res => res, dirMode, vhdDirectoryCompression, vhdDirectoryEncryption } = {}
) {
constructor(handler, { debounceResource = res => res, dirMode, vhdDirectoryCompression } = {}) {
this._debounceResource = debounceResource
this._dirMode = dirMode
this._handler = handler
this._vhdDirectoryCompression = vhdDirectoryCompression
this._vhdDirectoryEncryption = vhdDirectoryEncryption
this._readCacheListVmBackups = synchronized.withKey()(this._readCacheListVmBackups)
}
@@ -205,9 +201,7 @@ class RemoteAdapter {
const isVhdDirectory = vhd instanceof VhdDirectory
return isVhdDirectory
? this.#useVhdDirectory() &&
this.#getCompressionType() === vhd.compressionType &&
this.#getEncryption() === vhd.encryption
? this.#useVhdDirectory() && this.#getCompressionType() === vhd.compressionType
: !this.#useVhdDirectory()
})
}
@@ -297,10 +291,6 @@ class RemoteAdapter {
return this._vhdDirectoryCompression
}
#getEncryption() {
return this._vhdDirectoryEncryption
}
#useVhdDirectory() {
return this.handler.type === 's3'
}
@@ -586,7 +576,6 @@ class RemoteAdapter {
await createVhdDirectoryFromStream(handler, dataPath, input, {
concurrency: 16,
compression: this.#getCompressionType(),
encryption: this.#getEncryption(),
async validator() {
await input.task
return validator.apply(this, arguments)

View File

@@ -71,7 +71,6 @@ class BackupWorker {
debounceResource: this.debounceResource,
dirMode: this.#config.dirMode,
vhdDirectoryCompression: this.#config.vhdDirectoryCompression,
vhdDirectoryEncryption: this.#config.vhdDirectoryEncryption,
})
} finally {
await handler.forget()

View File

@@ -59,30 +59,20 @@ async function mergeVhdChain(chain, { handler, logInfo, remove, merge }) {
let done, total
const handle = setInterval(() => {
if (done !== undefined) {
logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total})
logInfo(`merging children in progress`, { children, parent, doneCount: done, totalCount: total })
}
}, 10e3)
const mergedSize = await mergeVhd(handler, parent, handler, children, {
logInfo,
onProgress({ done: d, total: t }) {
done = d
total = t
},
remove,
})
clearInterval(handle)
const mergeTargetChild = children.shift()
await Promise.all([
VhdAbstract.rename(handler, parent, mergeTargetChild),
asyncMap(children, child => {
logInfo(`the VHD child is already merged`, { child })
if (remove) {
logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
}),
])
return mergedSize
}
}

View File

@@ -2,7 +2,12 @@
const camelCase = require('lodash/camelCase')
const { defineProperties, defineProperty, keys } = Object
const {
defineProperties,
defineProperty,
hasOwn = Function.prototype.call.bind(Object.prototype.hasOwnProperty),
keys,
} = Object
const noop = Function.prototype
const MIXIN_CYCLIC_DESCRIPTOR = {
@@ -13,23 +18,49 @@ const MIXIN_CYCLIC_DESCRIPTOR = {
}
module.exports = function mixin(object, mixins, args) {
const importing = { __proto__: null }
const importers = { __proto__: null }
function instantiateMixin(name, Mixin) {
defineProperty(object, name, MIXIN_CYCLIC_DESCRIPTOR)
const instance = new Mixin(object, ...args)
defineProperty(object, name, {
value: instance,
})
return instance
}
// add lazy property for each of the mixin, this allows mixins to depend on
// one another without any special ordering
const descriptors = {}
const descriptors = {
loadMixin(name) {
if (hasOwn(this, name)) {
return Promise.resolve(this[name])
}
let promise = importing[name]
if (promise === undefined) {
const clean = () => {
delete importing[name]
}
promise = importers[name]().then(Mixin => instantiateMixin(name, Mixin))
promise.then(clean, clean)
importing[name] = promise
}
return promise
},
}
keys(mixins).forEach(name => {
const Mixin = mixins[name]
name = camelCase(name)
descriptors[name] = {
configurable: true,
get: () => {
defineProperty(object, name, MIXIN_CYCLIC_DESCRIPTOR)
const instance = new Mixin(object, ...args)
defineProperty(object, name, {
value: instance,
})
return instance
},
if (Mixin.prototype === undefined) {
importers[name] = Mixin(name)
} else {
descriptors[name] = {
configurable: true,
get: () => instantiateMixin(name, Mixin),
}
}
})
defineProperties(object, descriptors)

View File

@@ -16,7 +16,7 @@
},
"preferGlobal": false,
"engines": {
"node": ">=6"
"node": ">=7.6"
},
"dependencies": {
"bind-property-descriptor": "^2.0.0",

View File

@@ -0,0 +1,9 @@
'use strict'
// TODO: remove when Node >=15.0
module.exports = class AggregateError extends Error {
constructor(errors, message) {
super(message)
this.errors = errors
}
}

View File

@@ -1,12 +1,18 @@
'use strict'
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { incorrectState } = require('xo-common/api-errors')
const { VDI_FORMAT_RAW } = require('./index.js')
const peekFooterFromStream = require('vhd-lib/peekFooterFromVhdStream')
const AggregateError = require('./_AggregateError.js')
const { warn } = require('@xen-orchestra/log').createLogger('xo:xapi:sr')
const OC_MAINTENANCE = 'xo:maintenanceState'
class Sr {
async create({
content_type = 'user', // recommended by Citrix
@@ -38,6 +44,108 @@ class Sr {
return ref
}
// Switch the SR to maintenance mode:
// - shutdown all running VMs with a VDI on this SR
// - their UUID is saved into SR.other_config[OC_MAINTENANCE].shutdownVms
// - clean shutdown is attempted, and falls back to a hard shutdown
// - unplug all connected hosts from this SR
async enableMaintenanceMode($defer, ref, { vmsToShutdown = [] } = {}) {
const state = { timestamp: Date.now() }
// will throw if already in maintenance mode
await this.call('SR.add_to_other_config', ref, OC_MAINTENANCE, JSON.stringify(state))
await $defer.onFailure.call(this, 'call', 'SR.remove_from_other_config', ref, OC_MAINTENANCE)
const runningVms = new Map()
const handleVbd = async ref => {
const vmRef = await this.getField('VBD', ref, 'VM')
if (!runningVms.has(vmRef)) {
const power_state = await this.getField('VM', vmRef, 'power_state')
const isPaused = power_state === 'Paused'
if (isPaused || power_state === 'Running') {
runningVms.set(vmRef, isPaused)
}
}
}
await asyncMap(await this.getField('SR', ref, 'VDIs'), async ref => {
await asyncMap(await this.getField('VDI', ref, 'VBDs'), handleVbd)
})
{
const runningVmUuids = await asyncMap(runningVms.keys(), ref => this.getField('VM', ref, 'uuid'))
const set = new Set(vmsToShutdown)
for (const vmUuid of runningVmUuids) {
if (!set.has(vmUuid)) {
throw incorrectState({
actual: vmsToShutdown,
expected: runningVmUuids,
property: 'vmsToShutdown',
})
}
}
}
state.shutdownVms = {}
await asyncMapSettled(runningVms, async ([ref, isPaused]) => {
state.shutdownVms[await this.getField('VM', ref, 'uuid')] = isPaused
try {
await this.callAsync('VM.clean_shutdown', ref)
} catch (error) {
warn('SR_enableMaintenanceMode, VM clean shutdown', { error })
await this.callAsync('VM.hard_shutdown', ref)
}
$defer.onFailure.call(this, 'callAsync', 'VM.start', ref, isPaused, true)
})
state.unpluggedPbds = []
await asyncMapSettled(await this.getField('SR', ref, 'PBDs'), async ref => {
if (await this.getField('PBD', ref, 'currently_attached')) {
state.unpluggedPbds.push(await this.getField('PBD', ref, 'uuid'))
await this.callAsync('PBD.unplug', ref)
$defer.onFailure.call(this, 'callAsync', 'PBD.plug', ref)
}
})
await this.setFieldEntry('SR', ref, 'other_config', OC_MAINTENANCE, JSON.stringify(state))
}
// this method is best effort and will not stop on first error
async disableMaintenanceMode(ref) {
const state = JSON.parse((await this.getField('SR', ref, 'other_config'))[OC_MAINTENANCE])
// will throw if not in maintenance mode
await this.call('SR.remove_from_other_config', ref, OC_MAINTENANCE)
const errors = []
await asyncMap(state.unpluggedPbds, async uuid => {
try {
await this.callAsync('PBD.plug', await this.call('PBD.get_by_uuid', uuid))
} catch (error) {
errors.push(error)
}
})
await asyncMap(Object.entries(state.shutdownVms), async ([uuid, isPaused]) => {
try {
await this.callAsync('VM.start', await this.call('VM.get_by_uuid', uuid), isPaused, true)
} catch (error) {
errors.push(error)
}
})
if (errors.length !== 0) {
throw new AggregateError(errors)
}
}
async importVdi(
$defer,
ref,
@@ -53,4 +161,4 @@ class Sr {
}
module.exports = Sr
decorateClass(Sr, { importVdi: defer })
decorateClass(Sr, { enableMaintenanceMode: defer, importVdi: defer })

View File

@@ -7,12 +7,15 @@
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Backup] Merge delta backups without copying data when using VHD directories on NFS/SMB/local remote(https://github.com/vatesfr/xen-orchestra/pull/6271))
### Bug fixes
> Users must be able to say: “I had this issue, happy to know it's fixed”
- [VDI Import] Fix `this._getOrWaitObject is not a function`
- [VM] Attempting to delete a protected VM should display a modal with the error and the ability to bypass it (PR [#6290](https://github.com/vatesfr/xen-orchestra/pull/6290))
- [OVA Import] Fix import stuck after first disk
### Packages to release
@@ -32,7 +35,11 @@
- @vates/event-listeners-manager patch
- @vates/read-chunk major
- @xen-orchestra/backups minor
- @xen-orchestra/xapi minor
- xo-server patch
- vhd-lib minor
- xo-remote-parser minor
- xo-server minor
- xo-vmdk-to-vhd patch
<!--packages-end-->

View File

@@ -68,9 +68,10 @@ You shouldn't have to change this. It's the path where `xo-web` files are served
If you use certificates signed by an in-house CA for your XCP-ng or XenServer hosts, and want to have Xen Orchestra connect to them without rejection, you can use the [`NODE_EXTRA_CA_CERTS`](https://nodejs.org/api/cli.html#cli_node_extra_ca_certs_file) environment variable.
To enable this option in your XOA, edit the `/etc/systemd/system/xo-server.service` file and add this:
To enable this option in your XOA, create `/etc/systemd/system/xo-server.service.d/ca.conf` with the following content:
```
[Service]
Environment=NODE_EXTRA_CA_CERTS=/usr/local/share/ca-certificates/my-cert.crt
```
@@ -81,6 +82,8 @@ Don't forget to reload `systemd` conf and restart `xo-server`:
# systemctl restart xo-server.service
```
> For XO Proxy, the process is almost the same except the file to create is `/etc/systemd/system/xo-proxy.service.d/ca.conf` and the service to restart is `xo-proxy.service`.
## Redis server
By default, XO-server will try to contact Redis server on `localhost`, with the port `6379`. But you can define whatever you want:

View File

@@ -1,3 +0,0 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -1 +0,0 @@
../../scripts/babel-eslintrc.js

View File

@@ -0,0 +1,14 @@
'use strict'
const { parse } = require('./')
const { ast, pattern } = require('./index.fixtures')
module.exports = ({ benchmark }) => {
benchmark('parse', () => {
parse(pattern)
})
benchmark('toString', () => {
ast.toString()
})
}

View File

@@ -1,8 +1,10 @@
import * as CM from './'
'use strict'
export const pattern = 'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
const CM = require('./')
export const ast = new CM.And([
exports.pattern = 'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
exports.ast = new CM.And([
new CM.String('foo'),
new CM.Not(new CM.String('\\ "')),
new CM.Property('name', new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])),

View File

@@ -1,4 +1,6 @@
import { escapeRegExp, isPlainObject, some } from 'lodash'
'use strict'
const { escapeRegExp, isPlainObject, some } = require('lodash')
// ===================================================================
@@ -23,7 +25,7 @@ class Node {
}
}
export class Null extends Node {
class Null extends Node {
match() {
return true
}
@@ -32,10 +34,11 @@ export class Null extends Node {
return ''
}
}
exports.Null = Null
const formatTerms = terms => terms.map(term => term.toString(true)).join(' ')
export class And extends Node {
class And extends Node {
constructor(children) {
super()
@@ -54,8 +57,9 @@ export class And extends Node {
return isNested ? `(${terms})` : terms
}
}
exports.And = And
export class Comparison extends Node {
class Comparison extends Node {
constructor(operator, value) {
super()
this._comparator = Comparison.comparators[operator]
@@ -71,6 +75,7 @@ export class Comparison extends Node {
return this._operator + String(this._value)
}
}
exports.Comparison = Comparison
Comparison.comparators = {
'>': (a, b) => a > b,
'>=': (a, b) => a >= b,
@@ -78,7 +83,7 @@ Comparison.comparators = {
'<=': (a, b) => a <= b,
}
export class Or extends Node {
class Or extends Node {
constructor(children) {
super()
@@ -96,8 +101,9 @@ export class Or extends Node {
return `|(${formatTerms(this.children)})`
}
}
exports.Or = Or
export class Not extends Node {
class Not extends Node {
constructor(child) {
super()
@@ -112,8 +118,9 @@ export class Not extends Node {
return '!' + this.child.toString(true)
}
}
exports.Not = Not
export class NumberNode extends Node {
exports.Number = exports.NumberNode = class NumberNode extends Node {
constructor(value) {
super()
@@ -133,9 +140,8 @@ export class NumberNode extends Node {
return String(this.value)
}
}
export { NumberNode as Number }
export class NumberOrStringNode extends Node {
class NumberOrStringNode extends Node {
constructor(value) {
super()
@@ -160,9 +166,9 @@ export class NumberOrStringNode extends Node {
return this.value
}
}
export { NumberOrStringNode as NumberOrString }
exports.NumberOrString = exports.NumberOrStringNode = NumberOrStringNode
export class Property extends Node {
class Property extends Node {
constructor(name, child) {
super()
@@ -178,12 +184,13 @@ export class Property extends Node {
return `${formatString(this.name)}:${this.child.toString(true)}`
}
}
exports.Property = Property
const escapeChar = char => '\\' + char
const formatString = value =>
Number.isNaN(+value) ? (isRawString(value) ? value : `"${value.replace(/\\|"/g, escapeChar)}"`) : `"${value}"`
export class GlobPattern extends Node {
class GlobPattern extends Node {
constructor(value) {
// fallback to string node if no wildcard
if (value.indexOf('*') === -1) {
@@ -216,8 +223,9 @@ export class GlobPattern extends Node {
return this.value
}
}
exports.GlobPattern = GlobPattern
export class RegExpNode extends Node {
class RegExpNode extends Node {
constructor(pattern, flags) {
super()
@@ -245,9 +253,9 @@ export class RegExpNode extends Node {
return this.re.toString()
}
}
export { RegExpNode as RegExp }
exports.RegExp = RegExpNode
export class StringNode extends Node {
class StringNode extends Node {
constructor(value) {
super()
@@ -275,9 +283,9 @@ export class StringNode extends Node {
return formatString(this.value)
}
}
export { StringNode as String }
exports.String = exports.StringNode = StringNode
export class TruthyProperty extends Node {
class TruthyProperty extends Node {
constructor(name) {
super()
@@ -292,6 +300,7 @@ export class TruthyProperty extends Node {
return formatString(this.name) + '?'
}
}
exports.TruthyProperty = TruthyProperty
// -------------------------------------------------------------------
@@ -531,7 +540,7 @@ const parser = P.grammar({
),
ws: P.regex(/\s*/),
}).default
export const parse = parser.parse.bind(parser)
exports.parse = parser.parse.bind(parser)
// -------------------------------------------------------------------
@@ -573,7 +582,7 @@ const _getPropertyClauseStrings = ({ child }) => {
}
// Find possible values for property clauses in a and clause.
export const getPropertyClausesStrings = node => {
exports.getPropertyClausesStrings = function getPropertyClausesStrings(node) {
if (!node) {
return {}
}
@@ -605,7 +614,7 @@ export const getPropertyClausesStrings = node => {
// -------------------------------------------------------------------
export const setPropertyClause = (node, name, child) => {
exports.setPropertyClause = function setPropertyClause(node, name, child) {
const property = child && new Property(name, typeof child === 'string' ? new StringNode(child) : child)
if (node === undefined) {

View File

@@ -1,7 +1,9 @@
/* eslint-env jest */
import { ast, pattern } from './index.fixtures'
import {
'use strict'
const { ast, pattern } = require('./index.fixtures')
const {
getPropertyClausesStrings,
GlobPattern,
Null,
@@ -11,7 +13,7 @@ import {
Property,
setPropertyClause,
StringNode,
} from './'
} = require('./')
it('getPropertyClausesStrings', () => {
const tmp = getPropertyClausesStrings(parse('foo bar:baz baz:|(foo bar /^boo$/ /^far$/) foo:/^bar$/'))

View File

@@ -16,7 +16,6 @@
"url": "https://vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"browserslist": [
">2%"
],
@@ -26,21 +25,7 @@
"dependencies": {
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -1,12 +0,0 @@
import { parse } from './'
import { ast, pattern } from './index.fixtures'
export default ({ benchmark }) => {
benchmark('parse', () => {
parse(pattern)
})
benchmark('toString', () => {
ast.toString()
})
}

View File

@@ -1,3 +0,0 @@
'use strict'
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -1 +0,0 @@
../../scripts/babel-eslintrc.js

View File

@@ -1,3 +1,5 @@
'use strict'
const match = (pattern, value) => {
if (Array.isArray(pattern)) {
return (
@@ -43,4 +45,6 @@ const match = (pattern, value) => {
return pattern === value
}
export const createPredicate = pattern => value => match(pattern, value)
exports.createPredicate = function createPredicate(pattern) {
return value => match(pattern, value)
}

View File

@@ -16,27 +16,13 @@
"url": "https://vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"browserslist": [
">2%"
],
"engines": {
"node": ">=6"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
}
}

View File

@@ -104,7 +104,7 @@ exports.VhdAbstract = class VhdAbstract {
*
* @returns {number} the merged data size
*/
async coalesceBlock(child, blockId) {
async mergeBlock(child, blockId) {
const block = await child.readBlock(blockId)
await this.writeEntireBlock(block)
return block.data.length

View File

@@ -53,19 +53,25 @@ test('Can coalesce block', async () => {
const childDirectoryVhd = yield openVhd(handler, childDirectoryName)
await childDirectoryVhd.readBlockAllocationTable()
await parentVhd.coalesceBlock(childFileVhd, 0)
let childBlockData = (await childDirectoryVhd.readBlock(0)).data
await parentVhd.mergeBlock(childDirectoryVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
let parentBlockData = (await parentVhd.readBlock(0)).data
let childBlockData = (await childFileVhd.readBlock(0)).data
// block should be present in parent
expect(parentBlockData.equals(childBlockData)).toEqual(true)
// block should not be in child since it's a rename for vhd directory
await expect(childDirectoryVhd.readBlock(0)).rejects.toThrowError()
await parentVhd.coalesceBlock(childDirectoryVhd, 0)
childBlockData = (await childFileVhd.readBlock(1)).data
await parentVhd.mergeBlock(childFileVhd, 1)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
parentBlockData = (await parentVhd.readBlock(0)).data
childBlockData = (await childDirectoryVhd.readBlock(0)).data
expect(parentBlockData).toEqual(childBlockData)
parentBlockData = (await parentVhd.readBlock(1)).data
// block should be present in parent in case of mixed vhdfile/vhddirectory
expect(parentBlockData.equals(childBlockData)).toEqual(true)
// block should still be child
await childFileVhd.readBlock(1)
})
})

View File

@@ -5,11 +5,58 @@ const { createLogger } = require('@xen-orchestra/log')
const { fuFooter, fuHeader, checksumStruct } = require('../_structs')
const { test, set: setBitmap } = require('../_bitmap')
const { VhdAbstract } = require('./VhdAbstract')
const { _getCompressor: getCompressor } = require('./_compressors')
const { _getEncryptor: getEncryptor } = require('./_encryptors')
const assert = require('assert')
const promisify = require('promise-toolbox/promisify')
const zlib = require('zlib')
const { debug } = createLogger('vhd-lib:VhdDirectory')
const NULL_COMPRESSOR = {
compress: buffer => buffer,
decompress: buffer => buffer,
baseOptions: {},
}
const COMPRESSORS = {
gzip: {
compress: (
gzip => buffer =>
gzip(buffer, { level: zlib.constants.Z_BEST_SPEED })
)(promisify(zlib.gzip)),
decompress: promisify(zlib.gunzip),
},
brotli: {
compress: (
brotliCompress => buffer =>
brotliCompress(buffer, {
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MIN_QUALITY,
},
})
)(promisify(zlib.brotliCompress)),
decompress: promisify(zlib.brotliDecompress),
},
}
// inject identifiers
for (const id of Object.keys(COMPRESSORS)) {
COMPRESSORS[id].id = id
}
function getCompressor(compressorType) {
if (compressorType === undefined) {
return NULL_COMPRESSOR
}
const compressor = COMPRESSORS[compressorType]
if (compressor === undefined) {
throw new Error(`Compression type ${compressorType} is not supported`)
}
return compressor
}
// ===================================================================
// Directory format
// <path>
@@ -30,15 +77,10 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
#header
footer
#compressor
#encryptor
#encryption
get compressionType() {
return this.#compressor.id
}
get encryption() {
return this.#encryption
}
set header(header) {
this.#header = header
@@ -74,9 +116,9 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
}
static async create(handler, path, { flags = 'wx+', compression, encryption } = {}) {
static async create(handler, path, { flags = 'wx+', compression } = {}) {
await handler.mkdir(path)
const vhd = new VhdDirectory(handler, path, { flags, compression, encryption })
const vhd = new VhdDirectory(handler, path, { flags, compression })
return {
dispose: () => {},
value: vhd,
@@ -89,8 +131,6 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
this._path = path
this._opts = opts
this.#compressor = getCompressor(opts?.compression)
this.#encryption = opts?.encryption
this.#encryptor = getEncryptor(opts?.encryption)
}
async readBlockAllocationTable() {
@@ -102,16 +142,15 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
return test(this.#blockTable, blockId)
}
_getChunkPath(partName) {
#getChunkPath(partName) {
return this._path + '/' + partName
}
async _readChunk(partName) {
// here we can implement compression and / or crypto
const buffer = await this._handler.readFile(this._getChunkPath(partName))
const buffer = await this._handler.readFile(this.#getChunkPath(partName))
const decrypted = await this.#encryptor.decrypt(buffer)
const uncompressed = await this.#compressor.decompress(decrypted)
const uncompressed = await this.#compressor.decompress(buffer)
return {
buffer: uncompressed,
}
@@ -125,17 +164,20 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
)
const compressed = await this.#compressor.compress(buffer)
const encrypted = await this.#encryptor.encrypt(compressed)
return this._handler.outputFile(this._getChunkPath(partName), encrypted, this._opts)
return this._handler.outputFile(this.#getChunkPath(partName), compressed, this._opts)
}
// put block in subdirectories to limit impact when doing directory listing
_getBlockPath(blockId) {
#getBlockPath(blockId) {
const blockPrefix = Math.floor(blockId / 1e3)
const blockSuffix = blockId - blockPrefix * 1e3
return `blocks/${blockPrefix}/${blockSuffix}`
}
_getFullBlockPath(blockId) {
return this.#getChunkPath(this.#getBlockPath(blockId))
}
async readHeaderAndFooter() {
await this.#readChunkFilters()
@@ -162,7 +204,7 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
if (onlyBitmap) {
throw new Error(`reading 'bitmap of block' ${blockId} in a VhdDirectory is not implemented`)
}
const { buffer } = await this._readChunk(this._getBlockPath(blockId))
const { buffer } = await this._readChunk(this.#getBlockPath(blockId))
return {
id: blockId,
bitmap: buffer.slice(0, this.bitmapSize),
@@ -202,26 +244,39 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
// only works if data are in the same handler
// and if the full block is modified in child ( which is the case whit xcp)
// and if the full block is modified in child ( which is the case with xcp)
// and if the compression type is same on both sides
async coalesceBlock(child, blockId) {
async mergeBlock(child, blockId, isResumingMerge = false) {
const childBlockPath = child._getFullBlockPath?.(blockId)
if (
!(child instanceof VhdDirectory) ||
childBlockPath !== undefined ||
this._handler !== child._handler ||
child.compressionType !== this.compressionType ||
child.encryption !== this.encryption
child.compressionType === 'MIXED'
) {
return super.coalesceBlock(child, blockId)
return super.mergeBlock(child, blockId)
}
await this._handler.copy(
child._getChunkPath(child._getBlockPath(blockId)),
this._getChunkPath(this._getBlockPath(blockId))
)
try {
await this._handler.rename(childBlockPath, this._getFullBlockPath(blockId))
} catch (error) {
if (error.code === 'ENOENT' && isResumingMerge === true) {
// when resuming, the blocks moved since the last merge state write are
// not in the child anymore but it should be ok
// it will throw an error if block is missing in parent
// won't detect if the block was already in parent and is broken/missing in child
const { data } = await this.readBlock(blockId)
assert.strictEqual(data.length, this.header.blockSize)
} else {
throw error
}
}
setBitmap(this.#blockTable, blockId)
return sectorsToBytes(this.sectorsPerBlock)
}
async writeEntireBlock(block) {
await this._writeChunk(this._getBlockPath(block.id), block.buffer)
await this._writeChunk(this.#getBlockPath(block.id), block.buffer)
setBitmap(this.#blockTable, block.id)
}
@@ -236,12 +291,11 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
async #writeChunkFilters() {
const compressionType = this.compressionType
const encryption = this.encryption
const path = this._path + '/chunk-filters.json'
if (compressionType === undefined && encryption === undefined) {
if (compressionType === undefined) {
await this._handler.unlink(path)
} else {
await this._handler.writeFile(path, JSON.stringify([compressionType, encryption]), { flags: 'w' })
await this._handler.writeFile(path, JSON.stringify([compressionType]), { flags: 'w' })
}
}
@@ -252,9 +306,6 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
throw error
})
assert(chunkFilters.length, 2)
this.#compressor = getCompressor(chunkFilters[0])
this.#encryption = chunkFilters[1]
this.#encryptor = getEncryptor(chunkFilters[1])
}
}

View File

@@ -222,14 +222,14 @@ test('Can coalesce block', async () => {
const childDirectoryVhd = yield openVhd(handler, childDirectoryName)
await childDirectoryVhd.readBlockAllocationTable()
await parentVhd.coalesceBlock(childFileVhd, 0)
await parentVhd.mergeBlock(childFileVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
let parentBlockData = (await parentVhd.readBlock(0)).data
let childBlockData = (await childFileVhd.readBlock(0)).data
expect(parentBlockData).toEqual(childBlockData)
await parentVhd.coalesceBlock(childDirectoryVhd, 0)
await parentVhd.mergeBlock(childDirectoryVhd, 0)
await parentVhd.writeFooter()
await parentVhd.writeBlockAllocationTable()
parentBlockData = (await parentVhd.readBlock(0)).data

View File

@@ -43,6 +43,16 @@ const VhdSynthetic = class VhdSynthetic extends VhdAbstract {
}
}
get compressionType() {
const compressionType = this.vhds[0].compressionType
for (let i = 0; i < this.vhds.length; i++) {
if (compressionType !== this.vhds[i].compressionType) {
return 'MIXED'
}
}
return compressionType
}
/**
* @param {Array<VhdAbstract>} vhds the chain of Vhds used to compute this Vhd, from the deepest child (in position 0), to the root (in the last position)
* only the last one can have any type. Other must have type DISK_TYPES.DIFFERENCING (delta)
@@ -74,17 +84,28 @@ const VhdSynthetic = class VhdSynthetic extends VhdAbstract {
}
}
async readBlock(blockId, onlyBitmap = false) {
#getVhdWithBlock(blockId) {
const index = this.#vhds.findIndex(vhd => vhd.containsBlock(blockId))
assert(index !== -1, `no such block ${blockId}`)
return this.#vhds[index]
}
async readBlock(blockId, onlyBitmap = false) {
// only read the content of the first vhd containing this block
return await this.#vhds[index].readBlock(blockId, onlyBitmap)
return await this.#getVhdWithBlock(blockId).readBlock(blockId, onlyBitmap)
}
async mergeBlock(child, blockId) {
throw new Error(`can't coalesce block into a vhd synthetic`)
}
_readParentLocatorData(id) {
return this.#vhds[this.#vhds.length - 1]._readParentLocatorData(id)
}
_getFullBlockPath(blockId) {
const vhd = this.#getVhdWithBlock(blockId)
return vhd?._getFullBlockPath(blockId)
}
}
// add decorated static method

View File

@@ -1,51 +0,0 @@
'use strict'
const zlib = require('zlib')
const promisify = require('promise-toolbox/promisify')
const NULL_COMPRESSOR = {
compress: buffer => buffer,
decompress: buffer => buffer,
}
const COMPRESSORS = {
gzip: {
compress: (
gzip => buffer =>
gzip(buffer, { level: zlib.constants.Z_BEST_SPEED })
)(promisify(zlib.gzip)),
decompress: promisify(zlib.gunzip),
},
brotli: {
compress: (
brotliCompress => buffer =>
brotliCompress(buffer, {
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MIN_QUALITY,
},
})
)(promisify(zlib.brotliCompress)),
decompress: promisify(zlib.brotliDecompress),
},
}
// inject identifiers
for (const id of Object.keys(COMPRESSORS)) {
COMPRESSORS[id].id = id
}
function getCompressor(compressorType) {
if (compressorType === undefined) {
return NULL_COMPRESSOR
}
const compressor = COMPRESSORS[compressorType]
if (compressor === undefined) {
throw new Error(`Compression type ${compressorType} is not supported`)
}
return compressor
}
exports._getCompressor = getCompressor

View File

@@ -1,22 +0,0 @@
'use strict'
const crypto = require('crypto')
const { _getEncryptor: getEncryptor } = require('./_encryptors')
/* eslint-env jest */
test('can encrypt and decryp AES 256', async () => {
const { encrypt, decrypt } = getEncryptor(
JSON.stringify({
algorithm: 'aes-256-cbc',
key: crypto.randomBytes(32),
ivLength: 16,
})
)
const buffer = crypto.randomBytes(1024)
const encrypted = encrypt(buffer)
const decrypted = decrypt(encrypted)
expect(buffer.equals(decrypted)).toEqual(true)
})

View File

@@ -1,44 +0,0 @@
'use strict'
const crypto = require('crypto')
const secretStore = require('./_secretStore.js')
function getEncryptor(id = '{}') {
const { algorithm, key, ivLength } = secretStore.get(id)
if (algorithm === undefined) {
return {
id: 'NULL_COMPRESSOR',
algorithm,
key,
ivLength,
encrypt: buffer => buffer,
decrypt: buffer => buffer,
}
}
function encrypt(buffer) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = cipher.update(buffer)
return Buffer.concat([iv, encrypted, cipher.final()])
}
function decrypt(buffer) {
const iv = buffer.slice(0, ivLength)
const encrypted = buffer.slice(ivLength)
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
const decrypted = decipher.update(encrypted)
return Buffer.concat([decrypted, decipher.final()])
}
return {
id,
algorithm,
key,
ivLength,
encrypt,
decrypt,
}
}
exports._getEncryptor = getEncryptor

View File

@@ -1,4 +0,0 @@
'use strict'
// @todo : should be moved to his own module
module.exports.get = id => JSON.parse(id || '') || {}

View File

@@ -5,13 +5,8 @@ const { VhdDirectory } = require('./Vhd/VhdDirectory.js')
const { Disposable } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const buildVhd = Disposable.wrap(async function* (
handler,
path,
inputStream,
{ concurrency, compression, encryption }
) {
const vhd = yield VhdDirectory.create(handler, path, { compression, encryption })
const buildVhd = Disposable.wrap(async function* (handler, path, inputStream, { concurrency, compression }) {
const vhd = yield VhdDirectory.create(handler, path, { compression })
await asyncEach(
parseVhdStream(inputStream),
async function (item) {
@@ -46,10 +41,10 @@ exports.createVhdDirectoryFromStream = async function createVhdDirectoryFromStre
handler,
path,
inputStream,
{ validator, concurrency = 16, compression, encryption } = {}
{ validator, concurrency = 16, compression } = {}
) {
try {
await buildVhd(handler, path, inputStream, { concurrency, compression, encryption })
await buildVhd(handler, path, inputStream, { concurrency, compression })
if (validator !== undefined) {
await validator.call(this, path)
}

View File

@@ -6,7 +6,8 @@ exports.checkVhdChain = require('./checkChain')
exports.createReadableSparseStream = require('./createReadableSparseStream')
exports.createVhdStreamWithLength = require('./createVhdStreamWithLength')
exports.createVhdDirectoryFromStream = require('./createVhdDirectoryFromStream').createVhdDirectoryFromStream
exports.mergeVhd = require('./merge')
const { mergeVhd } = require('./merge')
exports.mergeVhd = mergeVhd
exports.peekFooterFromVhdStream = require('./peekFooterFromVhdStream')
exports.openVhd = require('./openVhd').openVhd
exports.VhdAbstract = require('./Vhd/VhdAbstract').VhdAbstract

View File

@@ -9,6 +9,7 @@ const { getHandler } = require('@xen-orchestra/fs')
const { pFromCallback } = require('promise-toolbox')
const { VhdFile, chainVhd, mergeVhd } = require('./index')
const { _cleanupVhds: cleanupVhds } = require('./merge')
const { checkFile, createRandomFile, convertFromRawToVhd } = require('./tests/utils')
@@ -38,14 +39,15 @@ test('merge works in normal cases', async () => {
await createRandomFile(`${tempDir}/${childRandomFileName}`, mbOfChildren)
await convertFromRawToVhd(`${tempDir}/${childRandomFileName}`, `${tempDir}/${child1FileName}`)
await chainVhd(handler, parentFileName, handler, child1FileName, true)
await checkFile(`${tempDir}/${parentFileName}`)
// merge
await mergeVhd(handler, parentFileName, handler, child1FileName)
// check that vhd is still valid
await checkFile(`${tempDir}/${parentFileName}`)
// check that the merged vhd is still valid
await checkFile(`${tempDir}/${child1FileName}`)
const parentVhd = new VhdFile(handler, parentFileName)
const parentVhd = new VhdFile(handler, child1FileName)
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
@@ -138,11 +140,11 @@ test('it can resume a merge ', async () => {
await mergeVhd(handler, 'parent.vhd', handler, 'child1.vhd')
// reload header footer and block allocation table , they should succed
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
await childVhd.readHeaderAndFooter()
await childVhd.readBlockAllocationTable()
let offset = 0
// check that the data are the same as source
for await (const block of parentVhd.blocks()) {
for await (const block of childVhd.blocks()) {
const blockContent = block.data
// first block is marked as already merged, should not be modified
// second block should come from children
@@ -153,7 +155,7 @@ test('it can resume a merge ', async () => {
await fs.read(fd, buffer, 0, buffer.length, offset)
expect(buffer.equals(blockContent)).toEqual(true)
offset += parentVhd.header.blockSize
offset += childVhd.header.blockSize
}
})
@@ -183,9 +185,9 @@ test('it merge multiple child in one pass ', async () => {
await mergeVhd(handler, parentFileName, handler, [grandChildFileName, childFileName])
// check that vhd is still valid
await checkFile(parentFileName)
await checkFile(grandChildFileName)
const parentVhd = new VhdFile(handler, parentFileName)
const parentVhd = new VhdFile(handler, grandChildFileName)
await parentVhd.readHeaderAndFooter()
await parentVhd.readBlockAllocationTable()
@@ -206,3 +208,21 @@ test('it merge multiple child in one pass ', async () => {
offset += parentVhd.header.blockSize
}
})
test('it cleans vhd mergedfiles', async () => {
const handler = getHandler({ url: `file://${tempDir}` })
await handler.writeFile('parent', 'parentData')
await handler.writeFile('child1', 'child1Data')
await handler.writeFile('child2', 'child2Data')
await handler.writeFile('child3', 'child3Data')
// childPath is from the grand children to the children
await cleanupVhds(handler, 'parent', ['child3', 'child2', 'child1'], { remove: true })
// only child3 should stay, with the data of parent
const [child3, ...other] = await handler.list('.')
expect(other.length).toEqual(0)
expect(child3).toEqual('child3')
expect((await handler.readFile('child3')).toString('utf8')).toEqual('parentData')
})

View File

@@ -12,11 +12,35 @@ const { basename, dirname } = require('path')
const { DISK_TYPES } = require('./_constants')
const { Disposable } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const { VhdAbstract } = require('./Vhd/VhdAbstract')
const { VhdDirectory } = require('./Vhd/VhdDirectory')
const { VhdSynthetic } = require('./Vhd/VhdSynthetic')
const { asyncMap } = require('@xen-orchestra/async-map')
const { warn } = createLogger('vhd-lib:merge')
// the chain we want to merge is [ ancestor, child1, ..., childn]
// this chain can have grand children or grand parent
//
// 1. Create a VhdSynthetic from all children if more than 1 child are merged
// 2. Merge the resulting vhd into the ancestor
// 2.a if at least one is a file : copy file part from child to parent
// 2.b if they are all vhd directory : move blocks from children to the ancestor
// 3. update the size, uuid and timestamp of the ancestor with those of child n
// 3. Delete all (now) unused VHDs
// 4. Rename the ancestor to to child n
//
// VhdSynthetic
// |
// /‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾\
// [ ancestor, child1, ...,child n-1, child n ]
// | \___________________/ ^
// | | |
// | unused VHDs |
// | |
// \___________rename_____________/
// write the merge progress file at most every `delay` seconds
function makeThrottledWriter(handler, path, delay) {
let lastWrite = Date.now()
return async json => {
@@ -28,21 +52,45 @@ function makeThrottledWriter(handler, path, delay) {
}
}
// make the rename / delete part of the merge process
// will fail if parent and children are in different remote
function cleanupVhds(handler, parent, children, { logInfo = noop, remove = false } = {}) {
if (!Array.isArray(children)) {
children = [children]
}
const mergeTargetChild = children.shift()
return Promise.all([
VhdAbstract.rename(handler, parent, mergeTargetChild),
asyncMap(children, child => {
logInfo(`the VHD child is already merged`, { child })
if (remove) {
logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
}),
])
}
module.exports._cleanupVhds = cleanupVhds
// Merge one or multiple vhd child into vhd parent.
// childPath can be array to create a synthetic VHD from multiple VHDs
// childPath is from the grand children to the children
//
// TODO: rename the VHD file during the merge
module.exports = limitConcurrency(2)(async function merge(
module.exports.mergeVhd = limitConcurrency(2)(async function merge(
parentHandler,
parentPath,
childHandler,
childPath,
{ onProgress = noop } = {}
{ onProgress = noop, logInfo = noop, remove } = {}
) {
const mergeStatePath = dirname(parentPath) + '/' + '.' + basename(parentPath) + '.merge.json'
return await Disposable.use(async function* () {
let mergeState
let isResuming = false
try {
const mergeStateContent = await parentHandler.readFile(mergeStatePath)
mergeState = JSON.parse(mergeStateContent)
@@ -75,6 +123,7 @@ module.exports = limitConcurrency(2)(async function merge(
assert.strictEqual(childVhd.footer.diskType, DISK_TYPES.DIFFERENCING)
assert.strictEqual(childVhd.header.blockSize, parentVhd.header.blockSize)
} else {
isResuming = true
// vhd should not have changed to resume
assert.strictEqual(parentVhd.header.checksum, mergeState.parent.header)
assert.strictEqual(childVhd.header.checksum, mergeState.child.header)
@@ -115,12 +164,12 @@ module.exports = limitConcurrency(2)(async function merge(
let counter = 0
const mergeStateWriter = makeThrottledWriter(parentHandler, mergeStatePath, 10e3)
await asyncEach(
toMerge,
async blockId => {
merging.add(blockId)
mergeState.mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
mergeState.mergedDataSize += await parentVhd.mergeBlock(childVhd, blockId, isResuming)
merging.delete(blockId)
onProgress({
@@ -155,6 +204,8 @@ module.exports = limitConcurrency(2)(async function merge(
// should be a disposable
parentHandler.unlink(mergeStatePath).catch(warn)
await cleanupVhds(parentHandler, parentPath, childPath, { logInfo, remove })
return mergeState.mergedDataSize
})
})

View File

@@ -85,10 +85,9 @@ async function convertToVhdDirectory(rawFileName, vhdFileName, path) {
await fs.mkdir(path + '/blocks/0/')
const stats = await fs.stat(rawFileName)
const sizeMB = stats.size / 1024 / 1024
for (let i = 0, offset = 0; i < sizeMB; i++, offset += blockDataSize) {
for (let i = 0, offset = 0; offset < stats.size; i++, offset += blockDataSize) {
const blockData = Buffer.alloc(blockDataSize)
await fs.read(srcRaw, blockData, offset)
await fs.read(srcRaw, blockData, 0, blockData.length, offset)
await fs.writeFile(path + '/blocks/0/' + i, Buffer.concat([bitmap, blockData]))
}
await fs.close(srcRaw)

View File

@@ -2,26 +2,51 @@ import filter from 'lodash/filter'
import map from 'lodash/map'
import trim from 'lodash/trim'
import trimStart from 'lodash/trimStart'
import queryString from 'querystring'
import urlParser from 'url-parse'
const NFS_RE = /^([^:]+):(?:(\d+):)?([^:]+)$/
const SMB_RE = /^([^:]+):(.+)@([^@]+)\\\\([^\0]+)(?:\0(.*))?$/
const NFS_RE = /^([^:]+):(?:(\d+):)?([^:?]+)(\?[^?]*)?$/
const SMB_RE = /^([^:]+):(.+)@([^@]+)\\\\([^\0?]+)(?:\0([^?]*))?(\?[^?]*)?$/
const sanitizePath = (...paths) => filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join('/')
export const parse = string => {
const object = {}
const parseOptionList = (optionList = '') => {
if (optionList.startsWith('?')) {
optionList = optionList.substring(1)
}
const parsed = queryString.parse(optionList)
Object.keys(parsed).forEach(key => {
const val = parsed[key]
parsed[key] = JSON.parse(val)
})
return parsed
}
const [type, rest] = string.split('://')
const makeOptionList = options => {
const encoded = {}
Object.keys(options).forEach(key => {
const val = options[key]
encoded[key] = JSON.stringify(val)
})
return queryString.stringify(encoded)
}
export const parse = string => {
let object = {}
let [type, rest] = string.split('://')
if (type === 'file') {
object.type = 'file'
let optionList
;[rest, optionList] = rest.split('?')
object.path = `/${trimStart(rest, '/')}` // the leading slash has been forgotten on client side first implementation
object = { ...parseOptionList(optionList), ...object }
} else if (type === 'nfs') {
object.type = 'nfs'
let host, port, path
let host, port, path, optionList
// Some users have a remote with a colon in the URL, which breaks the parsing since this commit: https://github.com/vatesfr/xen-orchestra/commit/fb1bf6a1e748b457f2d2b89ba02fa104554c03df
try {
;[, host, port, path] = NFS_RE.exec(rest)
;[, host, port, path, optionList] = NFS_RE.exec(rest)
} catch (err) {
;[host, path] = rest.split(':')
object.invalidUrl = true
@@ -29,16 +54,18 @@ export const parse = string => {
object.host = host
object.port = port
object.path = `/${trimStart(path, '/')}` // takes care of a missing leading slash coming from previous version format
object = { ...parseOptionList(optionList), ...object }
} else if (type === 'smb') {
object.type = 'smb'
const [, username, password, domain, host, path = ''] = SMB_RE.exec(rest)
const [, username, password, domain, host, path = '', optionList] = SMB_RE.exec(rest)
object.host = host
object.path = path
object.domain = domain
object.username = username
object.password = password
object = { ...parseOptionList(optionList), ...object }
} else if (type === 's3' || type === 's3+http') {
const parsed = urlParser(string, true)
const parsed = urlParser(string, false)
object.protocol = parsed.protocol === 's3:' ? 'https' : 'http'
object.type = 's3'
object.region = parsed.hash.length === 0 ? undefined : parsed.hash.slice(1) // remove '#'
@@ -46,24 +73,12 @@ export const parse = string => {
object.path = parsed.pathname
object.username = parsed.username
object.password = decodeURIComponent(parsed.password)
const qs = parsed.query
object.allowUnauthorized = qs.allowUnauthorized === 'true'
object = { ...parseOptionList(parsed.query), ...object }
}
return object
}
export const format = ({
type,
host,
path,
port,
username,
password,
domain,
protocol = type,
region,
allowUnauthorized = false,
}) => {
export const format = ({ type, host, path, port, username, password, domain, protocol = type, region, ...options }) => {
type === 'local' && (type = 'file')
let string = `${type}://`
if (type === 'nfs') {
@@ -85,8 +100,10 @@ export const format = ({
}
string += path
if (type === 's3' && allowUnauthorized === true) {
string += `?allowUnauthorized=true`
const optionsList = makeOptionList(options)
if (optionsList !== '') {
string += '?' + optionsList
}
if (type === 's3' && region !== undefined) {
string += `#${region}`

View File

@@ -15,6 +15,14 @@ const data = deepFreeze({
path: '/var/lib/xoa/backup',
},
},
'file with use vhd directory': {
string: 'file:///var/lib/xoa/backup?useVhdDirectory=true',
object: {
type: 'file',
path: '/var/lib/xoa/backup',
useVhdDirectory: true,
},
},
SMB: {
string: 'smb://Administrator:pas:sw@ord@toto\\\\192.168.100.225\\smb\0',
object: {
@@ -26,6 +34,18 @@ const data = deepFreeze({
password: 'pas:sw@ord',
},
},
'smb with directory': {
string: 'smb://Administrator:pas:sw@ord@toto\\\\192.168.100.225\\smb\0?useVhdDirectory=true',
object: {
type: 'smb',
host: '192.168.100.225\\smb',
path: '',
domain: 'toto',
username: 'Administrator',
password: 'pas:sw@ord',
useVhdDirectory: true,
},
},
NFS: {
string: 'nfs://192.168.100.225:/media/nfs',
object: {
@@ -44,8 +64,18 @@ const data = deepFreeze({
path: '/media/nfs',
},
},
'nfs with vhdDirectory': {
string: 'nfs://192.168.100.225:20:/media/nfs?useVhdDirectory=true',
object: {
type: 'nfs',
host: '192.168.100.225',
port: '20',
path: '/media/nfs',
useVhdDirectory: true,
},
},
S3: {
string: 's3://AKIAS:XSuBupZ0mJlu%2B@s3-us-west-2.amazonaws.com/test-bucket/dir',
string: 's3://AKIAS:XSuBupZ0mJlu%2B@s3-us-west-2.amazonaws.com/test-bucket/dir?allowUnauthorized=false',
object: {
type: 's3',
protocol: 'https',
@@ -70,6 +100,21 @@ const data = deepFreeze({
allowUnauthorized: true,
},
},
'S3 with brotli': {
string:
's3+http://Administrator:password@192.168.100.225/bucket/dir?compressionType=%22brotli%22&compressionOptions=%7B%22level%22%3A1%7D#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
compressionType: 'brotli',
compressionOptions: { level: 1 },
},
},
})
const parseData = deepFreeze({
@@ -111,7 +156,6 @@ const parseData = deepFreeze({
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: false,
},
},
'S3 accepting self signed certificate': {
@@ -126,19 +170,6 @@ const parseData = deepFreeze({
password: 'password',
allowUnauthorized: true,
},
'S3 with broken allowUnauthorized': {
string: 's3+http://Administrator:password@192.168.100.225/bucket/dir?allowUnauthorized=notTrue#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: false,
},
},
},
})
@@ -152,19 +183,6 @@ const formatData = deepFreeze({
path: '/var/lib/xoa/backup',
},
},
'S3 with broken allowUnauthorized': {
string: 's3+http://Administrator:password@192.168.100.225/bucket/dir#reg1',
object: {
type: 's3',
host: '192.168.100.225',
protocol: 'http',
path: '/bucket/dir',
region: 'reg1',
username: 'Administrator',
password: 'password',
allowUnauthorized: 'notTrue',
},
},
})
// -------------------------------------------------------------------

View File

@@ -88,7 +88,6 @@ snapshotNameLabelTpl = '[XO Backup {job.name}] {vm.name_label}'
listingDebounce = '1 min'
vhdDirectoryCompression = 'brotli'
vhdDirectoryEncryption = '{"algorithm": "aes-256-cbc", "key": "45eb3ffe48dd29e7bd04a7941ba425f2" ,"ivLength": 16}'
# This is a work-around.
#

View File

@@ -20,6 +20,7 @@
"preferGlobal": true,
"bin": {
"xo-server": "dist/cli.mjs",
"xo-server-db": "dist/db-cli.mjs",
"xo-server-logs": "dist/logs-cli.mjs",
"xo-server-recover-account": "dist/recover-account-cli.mjs"
},

View File

@@ -913,3 +913,38 @@ stats.params = {
stats.resolve = {
sr: ['id', 'SR', 'view'],
}
// -------------------------------------------------------------------
export function enableMaintenanceMode({ sr, vmsToShutdown }) {
return this.getXapiObject(sr).$enableMaintenanceMode({ vmsToShutdown })
}
enableMaintenanceMode.description = 'switch the SR into maintenance mode'
enableMaintenanceMode.params = {
id: { type: 'string' },
vmsToShutdown: { type: 'array', items: { type: 'string' }, optional: true },
}
enableMaintenanceMode.permission = 'admin'
enableMaintenanceMode.resolve = {
sr: ['id', 'SR', 'operate'],
}
export function disableMaintenanceMode({ sr }) {
return this.getXapiObject(sr).$disableMaintenanceMode()
}
disableMaintenanceMode.description = 'disable the maintenance of the SR'
disableMaintenanceMode.params = {
id: { type: 'string' },
}
disableMaintenanceMode.permission = 'admin'
disableMaintenanceMode.resolve = {
sr: ['id', 'SR', 'operate'],
}

View File

@@ -25,7 +25,6 @@ create.params = {
// -------------------------------------------------------------------
// TODO: an user should be able to delete its own tokens.
async function delete_({ token: id }) {
await this.deleteAuthenticationToken(id)
}
@@ -34,8 +33,6 @@ export { delete_ as delete }
delete_.description = 'delete an existing authentication token'
delete_.permission = 'admin'
delete_.params = {
token: { type: 'string' },
}

View File

@@ -1,7 +1,6 @@
import Model from './model.mjs'
import { BaseError } from 'make-error'
import { EventEmitter } from 'events'
import { isObject, map } from './utils.mjs'
import { isObject } from './utils.mjs'
// ===================================================================
@@ -14,34 +13,16 @@ export class ModelAlreadyExists extends BaseError {
// ===================================================================
export default class Collection extends EventEmitter {
// Default value for Model.
get Model() {
return Model
}
// Make this property writable.
set Model(Model) {
Object.defineProperty(this, 'Model', {
configurable: true,
enumerale: true,
value: Model,
writable: true,
})
}
async add(models, opts) {
const array = Array.isArray(models)
if (!array) {
models = [models]
}
const { Model } = this
map(models, model => (model instanceof Model ? model.properties : model), models)
models = await this._add(models, opts)
this.emit('add', models)
return array ? models : new this.Model(models[0])
return array ? models : models[0]
}
async first(properties) {
@@ -49,8 +30,7 @@ export default class Collection extends EventEmitter {
properties = properties !== undefined ? { id: properties } : {}
}
const model = await this._first(properties)
return model && new this.Model(model)
return await this._first(properties)
}
async get(properties) {
@@ -61,14 +41,29 @@ export default class Collection extends EventEmitter {
return /* await */ this._get(properties)
}
async remove(ids) {
if (!Array.isArray(ids)) {
ids = [ids]
// remove(id: string)
// remove(ids: string[])
// remove(properties: object)
async remove(properties) {
let ids
if (typeof properties === 'object') {
if (Array.isArray(properties)) {
ids = properties
} else {
ids = (await this.get(properties)).map(_ => _.id)
if (ids.length === 0) {
return false
}
}
} else {
ids = [properties]
}
await this._remove(ids)
this.emit('remove', ids)
// FIXME: returns false if some ids were not removed
return true
}
@@ -78,33 +73,18 @@ export default class Collection extends EventEmitter {
models = [models]
}
const { Model } = this
map(
models,
model => {
if (!(model instanceof Model)) {
// TODO: Problems, we may be mixing in some default
// properties which will overwrite existing ones.
model = new Model(model)
}
const id = model.get('id')
// Missing models should be added not updated.
if (id === undefined) {
// FIXME: should not throw an exception but return a rejected promise.
throw new Error('a model without an id cannot be updated')
}
return model.properties
},
models
)
models.forEach(model => {
// Missing models should be added not updated.
if (model.id === undefined) {
// FIXME: should not throw an exception but return a rejected promise.
throw new Error('a model without an id cannot be updated')
}
})
models = await this._update(models)
this.emit('update', models)
return array ? models : new this.Model(models[0])
return array ? models : models[0]
}
// Methods to override in implementations.

View File

@@ -1,3 +1,4 @@
import assert from 'assert'
import asyncMapSettled from '@xen-orchestra/async-map/legacy.js'
import difference from 'lodash/difference.js'
import filter from 'lodash/filter.js'
@@ -16,7 +17,8 @@ import Collection, { ModelAlreadyExists } from '../collection.mjs'
// ///////////////////////////////////////////////////////////////////
// Data model:
// - prefix +'_id': value of the last generated identifier;
// - 'xo::namespaces': set of all available namespaces
// - prefix + '::indexes': set containing all indexes;
// - prefix +'_ids': set containing identifier of all models;
// - prefix +'_'+ index +':' + lowerCase(value): set of identifiers
// which have value for the given index.
@@ -34,13 +36,20 @@ import Collection, { ModelAlreadyExists } from '../collection.mjs'
const VERSION = '20170905'
export default class Redis extends Collection {
constructor({ connection, indexes = [], prefix, uri }) {
constructor({ connection, indexes = [], namespace, uri }) {
super()
assert(!namespace.includes(':'), 'namespace must not contains ":": ' + namespace)
assert(!namespace.includes('_'), 'namespace must not contains "_": ' + namespace)
const prefix = 'xo:' + namespace
this.indexes = indexes
this.prefix = prefix
const redis = (this.redis = promisifyAll(connection || createRedisClient(uri)))
redis.sadd('xo::namespaces', namespace)::ignoreErrors()
const key = `${prefix}:version`
redis
.get(key)
@@ -61,28 +70,32 @@ export default class Redis extends Collection {
::ignoreErrors()
}
rebuildIndexes() {
async rebuildIndexes() {
const { indexes, prefix, redis } = this
await redis.del(`${prefix}::indexes`)
if (indexes.length === 0) {
return Promise.resolve()
return
}
const idsIndex = `${prefix}_ids`
return asyncMapSettled(indexes, index =>
await redis.sadd(`${prefix}::indexes`, indexes)
await asyncMapSettled(indexes, index =>
redis.keys(`${prefix}_${index}:*`).then(keys => keys.length !== 0 && redis.del(keys))
).then(() =>
asyncMapSettled(redis.smembers(idsIndex), id =>
redis.hgetall(`${prefix}:${id}`).then(values =>
values == null
? redis.srem(idsIndex, id) // entry no longer exists
: asyncMapSettled(indexes, index => {
const value = values[index]
if (value !== undefined) {
return redis.sadd(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
}
})
)
)
const idsIndex = `${prefix}_ids`
await asyncMapSettled(redis.smembers(idsIndex), id =>
redis.hgetall(`${prefix}:${id}`).then(values =>
values == null
? redis.srem(idsIndex, id) // entry no longer exists
: asyncMapSettled(indexes, index => {
const value = values[index]
if (value !== undefined) {
return redis.sadd(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
}
})
)
)
}

117
packages/xo-server/src/db-cli.mjs Executable file
View File

@@ -0,0 +1,117 @@
#!/usr/bin/env node
import { createClient as createRedisClient } from 'redis'
import appConf from 'app-conf'
import fromCallback from 'promise-toolbox/fromCallback'
import fromEvent from 'promise-toolbox/fromEvent'
import RedisCollection from './collection/redis.mjs'
function assert(test, message) {
if (!test) {
console.error(message)
process.exit(1)
}
}
async function getDb(namespace) {
const { connection } = this
return new RedisCollection({
connection,
indexes: await fromCallback.call(connection, 'smembers', `xo:${namespace}::indexes`),
namespace,
})
}
function parseParam(args) {
const params = {}
for (const arg of args) {
const i = arg.indexOf('=')
if (i === -1) {
throw new Error('invalid arg: ' + arg)
}
const name = arg.slice(0, i)
let value = arg.slice(i + 1)
if (value.startsWith('json:')) {
value = JSON.parse(value.slice(5))
}
params[name] = value
}
return params
}
function sortKeys(object) {
const result = {}
const keys = Object.keys(object).sort()
for (const key of keys) {
result[key] = object[key]
}
return result
}
const COMMANDS = {
async ls(args) {
if (args.length === 0) {
const namespaces = await fromCallback.call(this.connection, 'smembers', 'xo::namespaces')
namespaces.sort()
for (const ns of namespaces) {
console.log(ns)
}
} else {
const db = await this.getDb(args.shift())
const records = await db.get(parseParam(args))
process.stdout.write(`${records.length} record(s) found\n`)
for (const record of records) {
console.log(sortKeys(record))
}
}
},
}
async function main(args) {
if (args.length === 0 || args.includes('-h') || args.includes('--help')) {
process.stdout.write(`
xo-server-db --help, -h
Display this help message.
xo-server-logs ls
List the available namespaces.
xo-server-logs ls <namespace> [<pattern>...]
List all entries in the given namespace.
<pattern>
Patterns can be used to filter entries.
Patterns have the following format \`<field>=<value>\`.
`)
return
}
const config = await appConf.load('xo-server', {
appDir: new URL('..', import.meta.url).pathname,
ignoreUnknownFormats: true,
})
const { renameCommands, socket: path, uri: url } = config.redis || {}
const connection = createRedisClient({
path,
rename_commands: renameCommands,
url,
})
try {
const fn = COMMANDS[args.shift()]
assert(fn !== undefined, 'command must be one of: ' + Object.keys(COMMANDS).join(', '))
await fn.call({ connection, getDb }, args)
} finally {
connection.quit()
await fromEvent(connection, 'end')
}
}
main(process.argv.slice(2))

View File

@@ -1,61 +0,0 @@
import { EventEmitter } from 'events'
import { forEach, isEmpty } from './utils.mjs'
// ===================================================================
export default class Model extends EventEmitter {
constructor(properties) {
super()
this.properties = { ...this.default }
if (properties) {
this.set(properties)
}
}
// Get a property.
get(name, def) {
const value = this.properties[name]
return value !== undefined ? value : def
}
// Check whether a property exists.
has(name) {
return this.properties[name] !== undefined
}
// Set properties.
set(properties, value) {
// This method can also be used with two arguments to set a single
// property.
if (typeof properties === 'string') {
properties = { [properties]: value }
}
const previous = {}
forEach(properties, (value, name) => {
const prev = this.properties[name]
if (value !== prev) {
previous[name] = prev
if (value === undefined) {
delete this.properties[name]
} else {
this.properties[name] = value
}
}
})
if (!isEmpty(previous)) {
this.emit('change', previous)
forEach(previous, (value, name) => {
this.emit('change:' + name, value)
})
}
}
}

View File

@@ -1,5 +1,4 @@
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { forEach, multiKeyHash } from '../utils.mjs'
// ===================================================================
@@ -10,27 +9,16 @@ const DEFAULT_ACTION = 'admin'
// ===================================================================
export default class Acl extends Model {}
// -------------------------------------------------------------------
export class Acls extends Collection {
get Model() {
return Acl
}
create(subject, object, action) {
return multiKeyHash(subject, object, action)
.then(
hash =>
new Acl({
id: hash,
subject,
object,
action,
})
)
.then(acl => this.add(acl))
return multiKeyHash(subject, object, action).then(hash =>
this.add({
id: hash,
subject,
object,
action,
})
)
}
delete(subject, object, action) {

View File

@@ -1,7 +1,6 @@
import isEmpty from 'lodash/isEmpty.js'
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { forEach } from '../utils.mjs'
@@ -9,17 +8,9 @@ import { parseProp } from './utils.mjs'
// ===================================================================
export default class Group extends Model {}
// ===================================================================
export class Groups extends Collection {
get Model() {
return Group
}
create(name, provider, providerGroupId) {
return this.add(new Group({ name, provider, providerGroupId }))
return this.add({ name, provider, providerGroupId })
}
async save(group) {

View File

@@ -1,5 +1,4 @@
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { createLogger } from '@xen-orchestra/log'
import { forEach } from '../utils.mjs'
@@ -7,15 +6,7 @@ const log = createLogger('xo:plugin-metadata')
// ===================================================================
export default class PluginMetadata extends Model {}
// ===================================================================
export class PluginsMetadata extends Collection {
get Model() {
return PluginMetadata
}
async save({ id, autoload, configuration }) {
return /* await */ this.update({
id,
@@ -31,7 +22,7 @@ export class PluginsMetadata extends Collection {
}
return /* await */ this.save({
...pluginMetadata.properties,
...pluginMetadata,
...data,
})
}

View File

@@ -1,18 +1,11 @@
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { forEach, serializeError } from '../utils.mjs'
import { parseProp } from './utils.mjs'
// ===================================================================
export default class Remote extends Model {}
export class Remotes extends Collection {
get Model() {
return Remote
}
async get(properties) {
const remotes = await super.get(properties)
forEach(remotes, remote => {

View File

@@ -1,20 +1,11 @@
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { forEach, serializeError } from '../utils.mjs'
import { parseProp } from './utils.mjs'
// ===================================================================
export default class Server extends Model {}
// -------------------------------------------------------------------
export class Servers extends Collection {
get Model() {
return Server
}
async create(params) {
const { host } = params

View File

@@ -1,10 +1,5 @@
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
// ===================================================================
export default class Token extends Model {}
// -------------------------------------------------------------------
export class Tokens extends Collection {}

View File

@@ -1,20 +1,11 @@
import isEmpty from 'lodash/isEmpty.js'
import Collection from '../collection/redis.mjs'
import Model from '../model.mjs'
import { parseProp } from './utils.mjs'
// ===================================================================
export default class User extends Model {}
User.prototype.default = {
permission: 'none',
}
// -------------------------------------------------------------------
const serialize = user => {
let tmp
return {
@@ -26,6 +17,7 @@ const serialize = user => {
}
const deserialize = user => ({
permission: 'none',
...user,
authProviders: parseProp('user', user, 'authProviders', undefined),
groups: parseProp('user', user, 'groups', []),
@@ -33,10 +25,6 @@ const deserialize = user => ({
})
export class Users extends Collection {
get Model() {
return User
}
async create(properties) {
const { email } = properties
@@ -45,11 +33,8 @@ export class Users extends Collection {
throw new Error(`the user ${email} already exists`)
}
// Create the user object.
const user = new User(serialize(properties))
// Adds the user to the collection.
return /* await */ this.add(user)
return /* await */ this.add(serialize(properties))
}
async save(user) {

View File

@@ -55,7 +55,7 @@ export function extractProperty(obj, prop) {
// -------------------------------------------------------------------
export const getUserPublicProperties = user =>
pick(user.properties || user, 'authProviders', 'id', 'email', 'groups', 'permission', 'preferences')
pick(user, 'authProviders', 'id', 'email', 'groups', 'permission', 'preferences')
// -------------------------------------------------------------------

View File

@@ -762,7 +762,8 @@ export default class Xapi extends XapiBase {
stream,
table.grainLogicalAddressList,
table.grainFileOffsetList,
compression[entry.name] === 'gzip'
compression[entry.name] === 'gzip',
entry.size
)
try {
await vdi.$importContent(vhdStream, { format: VDI_FORMAT_VHD })

View File

@@ -14,7 +14,7 @@ export default class {
const aclsDb = (this._acls = new Acls({
connection: app._redis,
prefix: 'xo:acl',
namespace: 'acl',
indexes: ['subject', 'object'],
}))

View File

@@ -5,7 +5,7 @@ import { invalidCredentials, noSuchObject } from 'xo-common/api-errors.js'
import { parseDuration } from '@vates/parse-duration'
import patch from '../patch.mjs'
import Token, { Tokens } from '../models/token.mjs'
import { Tokens } from '../models/token.mjs'
import { forEach, generateToken } from '../utils.mjs'
// ===================================================================
@@ -39,7 +39,7 @@ export default class {
// Creates persistent collections.
const tokensDb = (this._tokens = new Tokens({
connection: app._redis,
prefix: 'xo:token',
namespace: 'token',
indexes: ['user_id'],
}))
@@ -183,41 +183,52 @@ export default class {
}
const now = Date.now()
const token = new Token({
const token = {
created_at: now,
description,
id: await generateToken(),
user_id: userId,
expiration: now + duration,
})
}
await this._tokens.add(token)
// TODO: use plain properties directly.
return token.properties
return token
}
async deleteAuthenticationToken(id) {
if (!(await this._tokens.remove(id))) {
let predicate
const { apiContext } = this._app
if (apiContext === undefined || apiContext.permission === 'admin') {
predicate = id
} else {
predicate = { id, user_id: apiContext.user.id }
}
if (!(await this._tokens.remove(predicate))) {
throw noSuchAuthenticationToken(id)
}
}
async deleteAuthenticationTokens({ filter }) {
return Promise.all(
(await this._tokens.get()).filter(createPredicate(filter)).map(({ id }) => this.deleteAuthenticationToken(id))
)
let predicate
const { apiContext } = this._app
if (apiContext !== undefined && apiContext.permission !== 'admin') {
predicate = { user_id: apiContext.user.id }
}
const db = this._tokens
return db.remove((await db.get(predicate)).filter(createPredicate(filter)).map(({ id }) => id))
}
async getAuthenticationToken(properties) {
const id = typeof properties === 'string' ? properties : properties.id
let token = await this._tokens.first(properties)
const token = await this._tokens.first(properties)
if (token === undefined) {
throw noSuchAuthenticationToken(id)
}
token = token.properties
unserialize(token)
if (!(token.expiration > Date.now())) {

View File

@@ -14,7 +14,7 @@ export default class {
this._app = app
const db = (this._db = new CloudConfigs({
connection: app._redis,
prefix: 'xo:cloudConfig',
namespace: 'cloudConfig',
}))
app.hooks.on('clean', () => db.rebuildIndexes())
@@ -28,7 +28,7 @@ export default class {
}
createCloudConfig(cloudConfig) {
return this._db.add(cloudConfig).properties
return this._db.add(cloudConfig)
}
async updateCloudConfig({ id, name, template }) {
@@ -54,6 +54,6 @@ export default class {
if (cloudConfig === undefined) {
throw noSuchObject(id, 'cloud config')
}
return cloudConfig.properties
return cloudConfig
}
}

View File

@@ -49,7 +49,7 @@ const serialize = job => {
class JobsDb extends Collection {
async create(job) {
return normalize((await this.add(serialize(job))).properties)
return normalize(await this.add(serialize(job)))
}
async save(job) {
@@ -75,7 +75,7 @@ export default class Jobs {
const executors = (this._executors = { __proto__: null })
const jobsDb = (this._jobs = new JobsDb({
connection: app._redis,
prefix: 'xo:job',
namespace: 'job',
indexes: ['user_id', 'key'],
}))
this._logger = undefined
@@ -137,12 +137,11 @@ export default class Jobs {
}
async getJob(id, type) {
let job = await this._jobs.first(id)
if (job === undefined || (type !== undefined && job.properties.type !== type)) {
const job = await this._jobs.first(id)
if (job === undefined || (type !== undefined && job.type !== type)) {
throw noSuchObject(id, 'job')
}
job = job.properties
job.runId = this._runningJobs[id]
return job

View File

@@ -21,7 +21,7 @@ export default class {
this._pluginsMetadata = new PluginsMetadata({
connection: app._redis,
prefix: 'xo:plugin-metadata',
namespace: 'plugin-metadata',
})
app.hooks.on('start', () => {
@@ -49,9 +49,8 @@ export default class {
return plugin
}
async _getPluginMetadata(id) {
const metadata = await this._pluginsMetadata.first(id)
return metadata?.properties
_getPluginMetadata(id) {
return this._pluginsMetadata.first(id)
}
async registerPlugin(name, instance, configurationSchema, configurationPresets, description, testSchema, version) {

View File

@@ -29,7 +29,6 @@ import { generateToken } from '../utils.mjs'
const DEBOUNCE_TIME_PROXY_STATE = 60000
const extractProperties = _ => _.properties
const synchronizedWrite = synchronized()
const log = createLogger('xo:proxy')
@@ -44,6 +43,31 @@ const assertProxyAddress = (proxy, address) => {
throw error
}
function addProxyUrl(proxy) {
const url = new URL('https://localhost')
url.username = proxy.authenticationToken
const { address } = proxy
if (address !== undefined) {
url.host = address
} else {
try {
const vm = this._app.getXapiObject(proxy.vmUuid, 'VM')
const hostname = extractIpFromVmNetworks(vm.$guest_metrics?.networks)
if (hostname === undefined) {
return
}
url.hostname = hostname
} catch (error) {
log.warn('addProxyUrl', { error, proxy })
return
}
}
delete proxy.authenticationToken
proxy.url = url.href
}
export default class Proxy {
constructor(app) {
this._app = app
@@ -57,7 +81,7 @@ export default class Proxy {
const db = (this._db = new Collection({
connection: app._redis,
indexes: ['address', 'vmUuid'],
prefix: 'xo:proxy',
namespace: 'proxy',
}))
app.hooks.on('clean', () => db.rebuildIndexes())
@@ -106,7 +130,7 @@ export default class Proxy {
}
async unregisterProxy(id) {
const { vmUuid } = await this.getProxy(id)
const { vmUuid } = await this._getProxy(id)
await this._db.remove(id)
@@ -122,7 +146,7 @@ export default class Proxy {
}
async destroyProxy(id) {
const { vmUuid } = await this.getProxy(id)
const { vmUuid } = await this._getProxy(id)
if (vmUuid !== undefined) {
try {
await this._app.getXapiObject(vmUuid).$destroy()
@@ -135,28 +159,37 @@ export default class Proxy {
return this.unregisterProxy(id)
}
async getProxy(id) {
async _getProxy(id) {
const proxy = await this._db.first(id)
if (proxy === undefined) {
throw noSuchObject(id, 'proxy')
}
return extractProperties(proxy)
return proxy
}
getAllProxies() {
return this._db.get()
async getProxy(id) {
const proxy = await this._getProxy(id)
addProxyUrl.call(this, proxy)
return proxy
}
async getAllProxies() {
const proxies = await this._db.get()
proxies.forEach(addProxyUrl, this)
return proxies
}
@synchronizedWrite
async updateProxy(id, { address, authenticationToken, name, vmUuid }) {
const proxy = await this.getProxy(id)
const proxy = await this._getProxy(id)
await this._throwIfRegistered(
proxy.address !== address ? address : undefined,
proxy.vm !== vmUuid ? vmUuid : undefined
)
patch(proxy, { address, authenticationToken, name, vmUuid })
return this._db.update(proxy).then(extractProperties)
return this._db.update(proxy)
}
async upgradeProxyAppliance(id, ignoreRunningJobs = false) {
@@ -183,7 +216,7 @@ export default class Proxy {
xenstoreData['vm-data/xoa-updater-channel'] = JSON.stringify(await this._getChannel())
}
const { vmUuid } = await this.getProxy(id)
const { vmUuid } = await this._getProxy(id)
const xapi = this._app.getXapi(vmUuid)
await xapi.getObject(vmUuid).update_xenstore_data(xenstoreData)
@@ -285,7 +318,7 @@ export default class Proxy {
const redeploy = proxyId !== undefined
if (redeploy) {
const { vmUuid } = await this.getProxy(proxyId)
const { vmUuid } = await this._getProxy(proxyId)
if (vmUuid !== undefined) {
try {
await app.getXapiObject(vmUuid).$destroy()
@@ -364,7 +397,7 @@ export default class Proxy {
// enum assertType {iterator, scalar, stream}
async callProxyMethod(id, method, params, { assertType = 'scalar' } = {}) {
const proxy = await this.getProxy(id)
const proxy = await this._getProxy(id)
const request = {
body: format.request(0, method, params),

View File

@@ -23,7 +23,7 @@ export default class {
this._handlers = { __proto__: null }
this._remotes = new Remotes({
connection: app._redis,
prefix: 'xo:remote',
namespace: 'remote',
indexes: ['enabled'],
})
this._remotesInfo = {}
@@ -153,7 +153,7 @@ export default class {
if (remote === undefined) {
throw noSuchObject(id, 'remote')
}
return remote.properties
return remote
}
async getRemoteWithCredentials(id) {
@@ -184,7 +184,7 @@ export default class {
params.options = options
}
const remote = await this._remotes.add(params)
return /* await */ this.updateRemote(remote.get('id'), { enabled: true })
return /* await */ this.updateRemote(remote.id, { enabled: true })
}
updateRemote(id, { enabled, name, options, proxy, url }) {
@@ -215,7 +215,7 @@ export default class {
patch(remote, props)
return (await this._remotes.update(remote)).properties
return await this._remotes.update(remote)
}
async removeRemote(id) {

View File

@@ -33,7 +33,7 @@ export default class Scheduling {
const db = (this._db = new Schedules({
connection: app._redis,
prefix: 'xo:schedule',
namespace: 'schedule',
}))
this._runs = { __proto__: null }
@@ -73,16 +73,14 @@ export default class Scheduling {
}
async createSchedule({ cron, enabled, jobId, name = '', timezone, userId }) {
const schedule = (
await this._db.add({
cron,
enabled,
jobId,
name,
timezone,
userId,
})
).properties
const schedule = await this._db.add({
cron,
enabled,
jobId,
name,
timezone,
userId,
})
this._start(schedule)
return schedule
}
@@ -92,7 +90,7 @@ export default class Scheduling {
if (schedule === undefined) {
throw noSuchObject(id, 'schedule')
}
return schedule.properties
return schedule
}
async getAllSchedules() {

View File

@@ -26,11 +26,11 @@ export default class {
const groupsDb = (this._groups = new Groups({
connection: redis,
prefix: 'xo:group',
namespace: 'group',
}))
const usersDb = (this._users = new Users({
connection: redis,
prefix: 'xo:user',
namespace: 'user',
indexes: ['email'],
}))
@@ -85,7 +85,7 @@ export default class {
// TODO: use plain objects
const user = await this._users.create(properties)
return user.properties
return user
}
async deleteUser(id) {
@@ -183,7 +183,7 @@ export default class {
// TODO: this method will no longer be async when users are
// integrated to the main collection.
async getUser(id) {
const user = (await this._getUser(id)).properties
const user = await this._getUser(id)
// TODO: remove when no longer the email property has been
// completely eradicated.
@@ -200,7 +200,7 @@ export default class {
// TODO: change `email` by `username`.
const user = await this._users.first({ email: username })
if (user !== undefined) {
return user.properties
return user
}
if (returnNullIfMissing) {
@@ -323,7 +323,7 @@ export default class {
async createGroup({ name, provider, providerGroupId }) {
// TODO: use plain objects.
const group = (await this._groups.create(name, provider, providerGroupId)).properties
const group = await this._groups.create(name, provider, providerGroupId)
return group
}
@@ -362,7 +362,7 @@ export default class {
throw noSuchObject(id, 'group')
}
return group.properties
return group
}
async getAllGroups() {

View File

@@ -44,7 +44,7 @@ export default class {
this._objectConflicts = { __proto__: null } // TODO: clean when a server is disconnected.
const serversDb = (this._servers = new Servers({
connection: app._redis,
prefix: 'xo:server',
namespace: 'server',
indexes: ['host'],
}))
this._serverIdsByPool = { __proto__: null }
@@ -120,7 +120,7 @@ export default class {
username,
})
return server.properties
return server
}
async unregisterXenServer(id) {
@@ -148,39 +148,39 @@ export default class {
throw new Error('this entry require disconnecting the server to update it')
}
if (label !== undefined) server.set('label', label || undefined)
if (host) server.set('host', host)
if (username) server.set('username', username)
if (password) server.set('password', password)
if (label !== undefined) server.label = label || undefined
if (host) server.host = host
if (username) server.username = username
if (password) server.password = password
if (error !== undefined) {
server.set('error', error)
server.error = error
}
if (enabled !== undefined) {
server.set('enabled', enabled)
server.enabled = enabled
}
if (readOnly !== undefined) {
server.set('readOnly', readOnly)
server.readOnly = readOnly
if (xapi !== undefined) {
xapi.readOnly = readOnly
}
}
if (allowUnauthorized !== undefined) {
server.set('allowUnauthorized', allowUnauthorized)
server.allowUnauthorized = allowUnauthorized
}
if (httpProxy !== undefined) {
// if value is null, pass undefined to the model , so it will delete this optionnal property from the Server object
server.set('httpProxy', httpProxy === null ? undefined : httpProxy)
server.httpProxy = httpProxy === null ? undefined : httpProxy
}
await this._servers.update(server)
}
async getXenServer(id) {
return (await this._getXenServer(id)).properties
return await this._getXenServer(id)
}
// TODO: this method will no longer be async when servers are

View File

@@ -16,8 +16,8 @@ export { default as readVmdkGrainTable, readCapacityAndGrainTable } from './vmdk
* @param gzipped
* @returns a stream whose bytes represent a VHD file containing the VMDK data
*/
async function vmdkToVhd(vmdkReadStream, grainLogicalAddressList, grainFileOffsetList, gzipped = false) {
const parser = new VMDKDirectParser(vmdkReadStream, grainLogicalAddressList, grainFileOffsetList, gzipped)
async function vmdkToVhd(vmdkReadStream, grainLogicalAddressList, grainFileOffsetList, gzipped = false, length) {
const parser = new VMDKDirectParser(vmdkReadStream, grainLogicalAddressList, grainFileOffsetList, gzipped, length)
const header = await parser.readHeader()
return createReadableSparseStream(
header.capacitySectors * 512,

View File

@@ -64,7 +64,7 @@ function alignSectors(number) {
}
export default class VMDKDirectParser {
constructor(readStream, grainLogicalAddressList, grainFileOffsetList, gzipped = false) {
constructor(readStream, grainLogicalAddressList, grainFileOffsetList, gzipped = false, length) {
if (gzipped) {
const unzipStream = zlib.createGunzip()
readStream.pipe(unzipStream)
@@ -74,6 +74,7 @@ export default class VMDKDirectParser {
this.grainFileOffsetList = grainFileOffsetList
this.virtualBuffer = new VirtualBuffer(readStream)
this.header = null
this._length = length
}
// I found a VMDK file whose L1 and L2 table did not have a marker, but they were at the top
@@ -195,6 +196,16 @@ export default class VMDKDirectParser {
}
yield { logicalAddressBytes: lba, data: grain }
}
console.log('yielded last VMDK block')
// drain remaining
// stream.resume does not seems to be enough to consume completly the stream
// especially when this stream is part of a tar ( ova) , potentially gzipped
if (this._length !== undefined) {
while (this.virtualBuffer.position < this._length) {
await this.virtualBuffer.readChunk(
Math.min(this._length - this.virtualBuffer.position, 1024 * 1024),
'draining'
)
}
}
}
}