Compare commits

..

2 Commits

Author SHA1 Message Date
Florent Beauchamp
5299c101c2 fix: add missing service worker file 2021-10-07 17:55:20 +02:00
Florent Beauchamp
83ca34807d feat(xo-web): send notification 2021-10-07 10:27:52 +02:00
732 changed files with 15442 additions and 34570 deletions

View File

@@ -1,7 +1,5 @@
'use strict'
module.exports = {
extends: ['plugin:eslint-comments/recommended', 'plugin:n/recommended', 'standard', 'standard-jsx', 'prettier'],
extends: ['plugin:eslint-comments/recommended', 'standard', 'standard-jsx', 'prettier'],
globals: {
__DEV__: true,
$Dict: true,
@@ -17,40 +15,11 @@ module.exports = {
{
files: ['cli.{,c,m}js', '*-cli.{,c,m}js', '**/*cli*/**/*.{,c,m}js'],
rules: {
'n/no-process-exit': 'off',
'no-console': 'off',
},
},
{
files: ['*.mjs'],
parserOptions: {
sourceType: 'module',
},
},
{
files: ['*.spec.{,c,m}js'],
rules: {
'n/no-unsupported-features/node-builtins': [
'error',
{
version: '>=16',
},
],
'n/no-unsupported-features/es-syntax': [
'error',
{
version: '>=16',
},
],
},
},
],
parserOptions: {
ecmaVersion: 13,
sourceType: 'script',
},
rules: {
// disabled because XAPI objects are using camel case
camelcase: ['off'],
@@ -65,7 +34,5 @@ module.exports = {
'lines-between-class-members': 'off',
'no-console': ['error', { allow: ['warn', 'error'] }],
strict: 'error',
},
}

16
.flowconfig Normal file
View File

@@ -0,0 +1,16 @@
[ignore]
<PROJECT_ROOT>/node_modules/.*
[include]
[libs]
[lints]
[options]
esproposal.decorators=ignore
esproposal.optional_chaining=enable
include_warnings=true
module.use_strict=true
[strict]

View File

@@ -1,46 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 'status: triaging :triangular_flag_on_post:, type: bug :bug:'
assignees: ''
---
**XOA or XO from the sources?**
If XOA:
- which release channel? (`stable` vs `latest`)
- please consider creating a support ticket in [your dedicated support area](https://xen-orchestra.com/#!/member/support)
If XO from the sources:
- Don't forget to [read this first](https://xen-orchestra.com/docs/community.html)
- As well as follow [this guide](https://xen-orchestra.com/docs/community.html#report-a-bug)
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Environment (please provide the following information):**
- Node: [e.g. 16.12.1]
- xo-server: [e.g. 5.82.3]
- xo-web: [e.g. 5.87.0]
- hypervisor: [e.g. XCP-ng 8.2.0]
**Additional context**
Add any other context about the problem here.

View File

@@ -1,19 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,13 +0,0 @@
name: CI
on: [push]
jobs:
build:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- run: docker-compose -f docker/docker-compose.dev.yml build
- run: docker-compose -f docker/docker-compose.dev.yml up

9
.gitignore vendored
View File

@@ -1,4 +1,5 @@
/_book/
/coverage/
/node_modules/
/lerna-debug.log
/lerna-debug.log.*
@@ -10,6 +11,10 @@
/packages/*/dist/
/packages/*/node_modules/
/@xen-orchestra/proxy/src/app/mixins/index.mjs
/packages/vhd-cli/src/commands/index.js
/packages/xen-api/examples/node_modules/
/packages/xen-api/plot.dat
@@ -30,7 +35,3 @@ pnpm-debug.log.*
yarn-error.log
yarn-error.log.*
.env
# code coverage
.nyc_output/
coverage/

23
.travis.yml Normal file
View File

@@ -0,0 +1,23 @@
language: node_js
node_js:
- 14
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
sudo: false
addons:
apt:
packages:
- qemu-utils
- blktap-utils
- vmdk-stream-converter
before_install:
- curl -o- -L https://yarnpkg.com/install.sh | bash
- export PATH="$HOME/.yarn/bin:$PATH"
cache:
yarn: true
script:
- yarn run travis-tests

View File

@@ -1,35 +0,0 @@
### `asyncEach(iterable, iteratee, [opts])`
Executes `iteratee` in order for each value yielded by `iterable`.
Returns a promise wich rejects as soon as a call to `iteratee` throws or a promise returned by it rejects, and which resolves when all promises returned by `iteratee` have resolved.
`iterable` must be an iterable or async iterable.
`iteratee` is called with the same `this` value as `asyncEach`, and with the following arguments:
- `value`: the value yielded by `iterable`
- `index`: the 0-based index for this value
- `iterable`: the iterable itself
`opts` is an object that can contains the following options:
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `10`. The value `0` means no concurrency limit.
- `signal`: an abort signal to stop the iteration
- `stopOnError`: wether to stop iteration of first error, or wait for all calls to finish and throw an `AggregateError`, defaults to `true`
```js
import { asyncEach } from '@vates/async-each'
const contents = []
await asyncEach(
['foo.txt', 'bar.txt', 'baz.txt'],
async function (filename, i) {
contents[i] = await readFile(filename)
},
{
// reads two files at a time
concurrency: 2,
}
)
```

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,68 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/async-each
[![Package Version](https://badgen.net/npm/v/@vates/async-each)](https://npmjs.org/package/@vates/async-each) ![License](https://badgen.net/npm/license/@vates/async-each) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/async-each)](https://bundlephobia.com/result?p=@vates/async-each) [![Node compatibility](https://badgen.net/npm/node/@vates/async-each)](https://npmjs.org/package/@vates/async-each)
> Run async fn for each item in (async) iterable
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/async-each):
```
> npm install --save @vates/async-each
```
## Usage
### `asyncEach(iterable, iteratee, [opts])`
Executes `iteratee` in order for each value yielded by `iterable`.
Returns a promise wich rejects as soon as a call to `iteratee` throws or a promise returned by it rejects, and which resolves when all promises returned by `iteratee` have resolved.
`iterable` must be an iterable or async iterable.
`iteratee` is called with the same `this` value as `asyncEach`, and with the following arguments:
- `value`: the value yielded by `iterable`
- `index`: the 0-based index for this value
- `iterable`: the iterable itself
`opts` is an object that can contains the following options:
- `concurrency`: a number which indicates the maximum number of parallel call to `iteratee`, defaults to `10`. The value `0` means no concurrency limit.
- `signal`: an abort signal to stop the iteration
- `stopOnError`: wether to stop iteration of first error, or wait for all calls to finish and throw an `AggregateError`, defaults to `true`
```js
import { asyncEach } from '@vates/async-each'
const contents = []
await asyncEach(
['foo.txt', 'bar.txt', 'baz.txt'],
async function (filename, i) {
contents[i] = await readFile(filename)
},
{
// reads two files at a time
concurrency: 2,
}
)
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -1,108 +0,0 @@
'use strict'
const noop = Function.prototype
class AggregateError extends Error {
constructor(errors, message) {
super(message)
this.errors = errors
}
}
/**
* @template Item
* @param {Iterable<Item>} iterable
* @param {(item: Item, index: number, iterable: Iterable<Item>) => Promise<void>} iteratee
* @returns {Promise<void>}
*/
exports.asyncEach = function asyncEach(iterable, iteratee, { concurrency = 10, signal, stopOnError = true } = {}) {
if (concurrency === 0) {
concurrency = Infinity
}
return new Promise((resolve, reject) => {
const it = (iterable[Symbol.iterator] || iterable[Symbol.asyncIterator]).call(iterable)
const errors = []
let running = 0
let index = 0
let onAbort
if (signal !== undefined) {
onAbort = () => {
onRejectedWrapper(new Error('asyncEach aborted'))
}
signal.addEventListener('abort', onAbort)
}
const clean = () => {
onFulfilled = onRejected = noop
if (onAbort !== undefined) {
signal.removeEventListener('abort', onAbort)
}
}
resolve = (resolve =>
function resolveAndClean(value) {
resolve(value)
clean()
})(resolve)
reject = (reject =>
function rejectAndClean(reason) {
reject(reason)
clean()
})(reject)
let onFulfilled = value => {
--running
next()
}
const onFulfilledWrapper = value => onFulfilled(value)
let onRejected = stopOnError
? reject
: error => {
--running
errors.push(error)
next()
}
const onRejectedWrapper = reason => onRejected(reason)
let nextIsRunning = false
let next = async () => {
if (nextIsRunning) {
return
}
nextIsRunning = true
if (running < concurrency) {
const cursor = await it.next()
if (cursor.done) {
next = () => {
if (running === 0) {
if (errors.length === 0) {
resolve()
} else {
reject(new AggregateError(errors))
}
}
}
} else {
++running
try {
const result = iteratee.call(this, cursor.value, index++, iterable)
let then
if (result != null && typeof result === 'object' && typeof (then = result.then) === 'function') {
then.call(result, onFulfilledWrapper, onRejectedWrapper)
} else {
onFulfilled(result)
}
} catch (error) {
onRejected(error)
}
}
nextIsRunning = false
return next()
}
nextIsRunning = false
}
next()
})
}

View File

@@ -1,101 +0,0 @@
'use strict'
/* eslint-env jest */
const { asyncEach } = require('./')
const randomDelay = (max = 10) =>
new Promise(resolve => {
setTimeout(resolve, Math.floor(Math.random() * max + 1))
})
const rejectionOf = p =>
new Promise((resolve, reject) => {
p.then(reject, resolve)
})
describe('asyncEach', () => {
const thisArg = 'qux'
const values = ['foo', 'bar', 'baz']
Object.entries({
'sync iterable': () => values,
'async iterable': async function* () {
for (const value of values) {
await randomDelay()
yield value
}
},
}).forEach(([what, getIterable]) =>
describe('with ' + what, () => {
let iterable
beforeEach(() => {
iterable = getIterable()
})
it('works', async () => {
const iteratee = jest.fn(async () => {})
await asyncEach.call(thisArg, iterable, iteratee, { concurrency: 1 })
expect(iteratee.mock.instances).toEqual(Array.from(values, () => thisArg))
expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable]))
})
;[1, 2, 4].forEach(concurrency => {
it('respects a concurrency of ' + concurrency, async () => {
let running = 0
await asyncEach(
values,
async () => {
++running
expect(running).toBeLessThanOrEqual(concurrency)
await randomDelay()
--running
},
{ concurrency }
)
})
})
it('stops on first error when stopOnError is true', async () => {
const error = new Error()
const iteratee = jest.fn((_, i) => {
if (i === 1) {
throw error
}
})
expect(await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true }))).toBe(error)
expect(iteratee).toHaveBeenCalledTimes(2)
})
it('rejects AggregateError when stopOnError is false', async () => {
const errors = []
const iteratee = jest.fn(() => {
const error = new Error()
errors.push(error)
throw error
})
const error = await rejectionOf(asyncEach(iterable, iteratee, { stopOnError: false }))
expect(error.errors).toEqual(errors)
expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable]))
})
it('can be interrupted with an AbortSignal', async () => {
const ac = new AbortController()
const iteratee = jest.fn((_, i) => {
if (i === 1) {
ac.abort()
}
})
await expect(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal })).rejects.toThrow(
'asyncEach aborted'
)
expect(iteratee).toHaveBeenCalledTimes(2)
})
})
)
})

View File

@@ -1,34 +0,0 @@
{
"private": false,
"name": "@vates/async-each",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/async-each",
"description": "Run async fn for each item in (async) iterable",
"keywords": [
"array",
"async",
"collection",
"each",
"for",
"foreach",
"iterable",
"iterator"
],
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/async-each",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"engines": {
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
}
}

View File

@@ -1,30 +0,0 @@
Node does not cache queries to `dns.lookup`, which can lead application doing a lot of connections to have perf issues and to saturate Node threads pool.
This library attempts to mitigate these problems by providing a version of this function with a version short cache, applied on both errors and results.
> Limitation: `verbatim: false` option is not supported.
It has exactly the same API as the native method and can be used directly:
```js
import { createCachedLookup } from '@vates/cached-dns.lookup'
const lookup = createCachedLookup()
lookup('example.net', { all: true, family: 0 }, (error, result) => {
if (error != null) {
return console.warn(error)
}
console.log(result)
})
```
Or it can be used to replace the native implementation and speed up the whole app:
```js
// assign our cached implementation to dns.lookup
const restore = createCachedLookup().patchGlobal()
// to restore the previous implementation
restore()
```

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,63 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/cached-dns.lookup
[![Package Version](https://badgen.net/npm/v/@vates/cached-dns.lookup)](https://npmjs.org/package/@vates/cached-dns.lookup) ![License](https://badgen.net/npm/license/@vates/cached-dns.lookup) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/cached-dns.lookup)](https://bundlephobia.com/result?p=@vates/cached-dns.lookup) [![Node compatibility](https://badgen.net/npm/node/@vates/cached-dns.lookup)](https://npmjs.org/package/@vates/cached-dns.lookup)
> Cached implementation of dns.lookup
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/cached-dns.lookup):
```
> npm install --save @vates/cached-dns.lookup
```
## Usage
Node does not cache queries to `dns.lookup`, which can lead application doing a lot of connections to have perf issues and to saturate Node threads pool.
This library attempts to mitigate these problems by providing a version of this function with a version short cache, applied on both errors and results.
> Limitation: `verbatim: false` option is not supported.
It has exactly the same API as the native method and can be used directly:
```js
import { createCachedLookup } from '@vates/cached-dns.lookup'
const lookup = createCachedLookup()
lookup('example.net', { all: true, family: 0 }, (error, result) => {
if (error != null) {
return console.warn(error)
}
console.log(result)
})
```
Or it can be used to replace the native implementation and speed up the whole app:
```js
// assign our cached implementation to dns.lookup
const restore = createCachedLookup().patchGlobal()
// to restore the previous implementation
restore()
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -1,72 +0,0 @@
'use strict'
const assert = require('assert')
const dns = require('dns')
const LRU = require('lru-cache')
function reportResults(all, results, callback) {
if (all) {
callback(null, results)
} else {
const first = results[0]
callback(null, first.address, first.family)
}
}
exports.createCachedLookup = function createCachedLookup({ lookup = dns.lookup } = {}) {
const cache = new LRU({
max: 500,
// 1 minute: long enough to be effective, short enough so there is no need to bother with DNS TTLs
ttl: 60e3,
})
function cachedLookup(hostname, options, callback) {
let all = false
let family = 0
if (typeof options === 'function') {
callback = options
} else if (typeof options === 'number') {
family = options
} else if (options != null) {
assert.notStrictEqual(options.verbatim, false, 'not supported by this implementation')
;({ all = all, family = family } = options)
}
// cache by family option because there will be an error if there is no
// entries for the requestion family so we cannot easily cache all families
// and filter on reporting back
const key = hostname + '/' + family
const result = cache.get(key)
if (result !== undefined) {
setImmediate(reportResults, all, result, callback)
} else {
lookup(hostname, { all: true, family, verbatim: true }, function onLookup(error, results) {
// errors are not cached because this will delay recovery after DNS/network issues
//
// there are no reliable way to detect if the error is real or simply
// that there are no results for the requested hostname
//
// there should be much fewer errors than success, therefore it should
// not be a big deal to not cache them
if (error != null) {
return callback(error)
}
cache.set(key, results)
reportResults(all, results, callback)
})
}
}
cachedLookup.patchGlobal = function patchGlobal() {
const previous = dns.lookup
dns.lookup = cachedLookup
return function restoreGlobal() {
assert.strictEqual(dns.lookup, cachedLookup)
dns.lookup = previous
}
}
return cachedLookup
}

View File

@@ -1,32 +0,0 @@
{
"engines": {
"node": ">=8"
},
"dependencies": {
"lru-cache": "^7.0.4"
},
"private": false,
"name": "@vates/cached-dns.lookup",
"description": "Cached implementation of dns.lookup",
"keywords": [
"cache",
"dns",
"lookup"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/cached-dns.lookup",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/cached-dns.lookup",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"scripts": {
"postversion": "npm publish --access public"
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
exports.coalesceCalls = function (fn) {
let promise
const clean = () => {

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { coalesceCalls } = require('./')

View File

@@ -65,23 +65,6 @@ const f = compose(
)
```
Functions can receive extra parameters:
```js
const isIn = (value, min, max) => min <= value && value <= max
// Only compatible when `fns` is passed as an array!
const f = compose([
[add, 2],
[isIn, 3, 10],
])
console.log(f(1))
// → true
```
> Note: if the first function is defined with extra parameters, it will only receive the first value passed to the composed function, instead of all the parameters.
## Contributions
Contributions are _very_ welcomed, either on the documentation or on

View File

@@ -46,20 +46,3 @@ const f = compose(
[add2, mul3]
)
```
Functions can receive extra parameters:
```js
const isIn = (value, min, max) => min <= value && value <= max
// Only compatible when `fns` is passed as an array!
const f = compose([
[add, 2],
[isIn, 3, 10],
])
console.log(f(1))
// → true
```
> Note: if the first function is defined with extra parameters, it will only receive the first value passed to the composed function, instead of all the parameters.

View File

@@ -4,13 +4,11 @@ const defaultOpts = { async: false, right: false }
exports.compose = function compose(opts, fns) {
if (Array.isArray(opts)) {
fns = opts.slice() // don't mutate passed array
fns = opts
opts = defaultOpts
} else if (typeof opts === 'object') {
opts = Object.assign({}, defaultOpts, opts)
if (Array.isArray(fns)) {
fns = fns.slice() // don't mutate passed array
} else {
if (!Array.isArray(fns)) {
fns = Array.prototype.slice.call(arguments, 1)
}
} else {
@@ -22,24 +20,6 @@ exports.compose = function compose(opts, fns) {
if (n === 0) {
throw new TypeError('at least one function must be passed')
}
for (let i = 0; i < n; ++i) {
const entry = fns[i]
if (Array.isArray(entry)) {
const fn = entry[0]
const args = entry.slice()
args[0] = undefined
fns[i] = function composeWithArgs(value) {
args[0] = value
try {
return fn.apply(this, args)
} finally {
args[0] = undefined
}
}
}
}
if (n === 1) {
return fns[0]
}

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { compose } = require('./')

View File

@@ -14,7 +14,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "2.1.0",
"version": "2.0.0",
"engines": {
"node": ">=7.6"
},

View File

@@ -1,86 +0,0 @@
### `decorateWith(fn, ...args)`
Creates a new ([legacy](https://babeljs.io/docs/en/babel-plugin-syntax-decorators#legacy)) method decorator from a function decorator, for instance, allows using Lodash's functions as decorators:
```js
import { decorateWith } from '@vates/decorate-with'
class Foo {
@decorateWith(lodash.debounce, 150)
bar() {
// body
}
}
```
### `decorateClass(class, map)`
Decorates a number of accessors and methods directly, without using the decorator syntax:
```js
import { decorateClass } from '@vates/decorate-with'
class Foo {
get bar() {
// body
}
set bar(value) {
// body
}
baz() {
// body
}
}
decorateClass(Foo, {
// getter and/or setter
bar: {
// without arguments
get: lodash.memoize,
// with arguments
set: [lodash.debounce, 150],
},
// method (with or without arguments)
baz: lodash.curry,
})
```
The decorated class is returned, so you can export it directly.
To apply multiple transforms to an accessor/method, you can either call `decorateClass` multiple times or use [`@vates/compose`](https://www.npmjs.com/package/@vates/compose):
```js
decorateClass(Foo, {
baz: compose([
[lodash.debounce, 150]
lodash.curry,
])
})
```
### `perInstance(fn, ...args)`
Helper to decorate the method by instance instead of for the whole class.
This is often necessary for caching or deduplicating calls.
```js
import { perInstance } from '@vates/decorateWith'
class Foo {
@decorateWith(perInstance, lodash.memoize)
bar() {
// body
}
}
```
Because it's a normal function, it can also be used with `decorateClass`, with `compose` or even by itself.
### `decorateMethodsWith(class, map)`
> Deprecated alias for [`decorateClass(class, map)`](#decorateclassclass-map).

View File

@@ -31,19 +31,15 @@ class Foo {
}
```
### `decorateClass(class, map)`
### `decorateMethodsWith(class, map)`
Decorates a number of accessors and methods directly, without using the decorator syntax:
Decorates a number of methods directly, without using the decorator syntax:
```js
import { decorateClass } from '@vates/decorate-with'
import { decorateMethodsWith } from '@vates/decorate-with'
class Foo {
get bar() {
// body
}
set bar(value) {
bar() {
// body
}
@@ -52,57 +48,17 @@ class Foo {
}
}
decorateClass(Foo, {
// getter and/or setter
bar: {
// without arguments
get: lodash.memoize,
decorateMethodsWith(Foo, {
// without arguments
bar: lodash.curry,
// with arguments
set: [lodash.debounce, 150],
},
// method (with or without arguments)
baz: lodash.curry,
// with arguments
baz: [lodash.debounce, 150],
})
```
The decorated class is returned, so you can export it directly.
To apply multiple transforms to an accessor/method, you can either call `decorateClass` multiple times or use [`@vates/compose`](https://www.npmjs.com/package/@vates/compose):
```js
decorateClass(Foo, {
baz: compose([
[lodash.debounce, 150]
lodash.curry,
])
})
```
### `perInstance(fn, ...args)`
Helper to decorate the method by instance instead of for the whole class.
This is often necessary for caching or deduplicating calls.
```js
import { perInstance } from '@vates/decorateWith'
class Foo {
@decorateWith(perInstance, lodash.memoize)
bar() {
// body
}
}
```
Because it's a normal function, it can also be used with `decorateClass`, with `compose` or even by itself.
### `decorateMethodsWith(class, map)`
> Deprecated alias for [`decorateClass(class, map)`](#decorateclassclass-map).
## Contributions
Contributions are _very_ welcomed, either on the documentation or on

View File

@@ -0,0 +1,42 @@
### `decorateWith(fn, ...args)`
Creates a new ([legacy](https://babeljs.io/docs/en/babel-plugin-syntax-decorators#legacy)) method decorator from a function decorator, for instance, allows using Lodash's functions as decorators:
```js
import { decorateWith } from '@vates/decorate-with'
class Foo {
@decorateWith(lodash.debounce, 150)
bar() {
// body
}
}
```
### `decorateMethodsWith(class, map)`
Decorates a number of methods directly, without using the decorator syntax:
```js
import { decorateMethodsWith } from '@vates/decorate-with'
class Foo {
bar() {
// body
}
baz() {
// body
}
}
decorateMethodsWith(Foo, {
// without arguments
bar: lodash.curry,
// with arguments
baz: [lodash.debounce, 150],
})
```
The decorated class is returned, so you can export it directly.

View File

@@ -1,5 +1,3 @@
'use strict'
exports.decorateWith = function decorateWith(fn, ...args) {
return (target, name, descriptor) => ({
...descriptor,
@@ -9,40 +7,15 @@ exports.decorateWith = function decorateWith(fn, ...args) {
const { getOwnPropertyDescriptor, defineProperty } = Object
function applyDecorator(decorator, value) {
return typeof decorator === 'function' ? decorator(value) : decorator[0](value, ...decorator.slice(1))
}
exports.decorateClass = exports.decorateMethodsWith = function decorateClass(klass, map) {
exports.decorateMethodsWith = function decorateMethodsWith(klass, map) {
const { prototype } = klass
for (const name of Object.keys(map)) {
const decorator = map[name]
const descriptor = getOwnPropertyDescriptor(prototype, name)
if (typeof decorator === 'function' || Array.isArray(decorator)) {
descriptor.value = applyDecorator(decorator, descriptor.value)
} else {
const { get, set } = decorator
if (get !== undefined) {
descriptor.get = applyDecorator(get, descriptor.get)
}
if (set !== undefined) {
descriptor.set = applyDecorator(set, descriptor.set)
}
}
const { value } = descriptor
const decorator = map[name]
descriptor.value = typeof decorator === 'function' ? decorator(value) : decorator[0](value, ...decorator.slice(1))
defineProperty(prototype, name, descriptor)
}
return klass
}
exports.perInstance = function perInstance(fn, decorator, ...args) {
const map = new WeakMap()
return function () {
let decorated = map.get(this)
if (decorated === undefined) {
decorated = decorator(fn, ...args)
map.set(this, decorated)
}
return decorated.apply(this, arguments)
}
}

View File

@@ -1,152 +0,0 @@
'use strict'
const assert = require('assert')
const { describe, it } = require('tap').mocha
const { decorateClass, decorateWith, decorateMethodsWith, perInstance } = require('./')
const identity = _ => _
describe('decorateWith', () => {
it('works', () => {
const expectedArgs = [Math.random(), Math.random()]
const expectedFn = Function.prototype
const newFn = () => {}
const decorator = decorateWith(function wrapper(fn, ...args) {
assert.deepStrictEqual(fn, expectedFn)
assert.deepStrictEqual(args, expectedArgs)
return newFn
}, ...expectedArgs)
const descriptor = {
configurable: true,
enumerable: false,
value: expectedFn,
writable: true,
}
assert.deepStrictEqual(decorator({}, 'foo', descriptor), {
...descriptor,
value: newFn,
})
})
})
describe('decorateClass', () => {
it('works', () => {
class C {
foo() {}
bar() {}
get baz() {}
// eslint-disable-next-line accessor-pairs
set qux(_) {}
}
const expectedArgs = [Math.random(), Math.random()]
const P = C.prototype
const descriptors = Object.getOwnPropertyDescriptors(P)
const newFoo = () => {}
const newBar = () => {}
const newGetBaz = () => {}
const newSetQux = _ => {}
decorateClass(C, {
foo(fn) {
assert.strictEqual(arguments.length, 1)
assert.strictEqual(fn, P.foo)
return newFoo
},
bar: [
function (fn, ...args) {
assert.strictEqual(fn, P.bar)
assert.deepStrictEqual(args, expectedArgs)
return newBar
},
...expectedArgs,
],
baz: {
get(fn) {
assert.strictEqual(arguments.length, 1)
assert.strictEqual(fn, descriptors.baz.get)
return newGetBaz
},
},
qux: {
set: [
function (fn, ...args) {
assert.strictEqual(fn, descriptors.qux.set)
assert.deepStrictEqual(args, expectedArgs)
return newSetQux
},
...expectedArgs,
],
},
})
const newDescriptors = Object.getOwnPropertyDescriptors(P)
assert.deepStrictEqual(newDescriptors.foo, { ...descriptors.foo, value: newFoo })
assert.deepStrictEqual(newDescriptors.bar, { ...descriptors.bar, value: newBar })
assert.deepStrictEqual(newDescriptors.baz, { ...descriptors.baz, get: newGetBaz })
assert.deepStrictEqual(newDescriptors.qux, { ...descriptors.qux, set: newSetQux })
})
it('throws if using an accessor decorator for a method', function () {
assert.throws(() =>
decorateClass(
class {
foo() {}
},
{ foo: { get: identity, set: identity } }
)
)
})
it('throws if using a method decorator for an accessor', function () {
assert.throws(() =>
decorateClass(
class {
get foo() {}
},
{ foo: identity }
)
)
})
})
it('decorateMethodsWith is an alias of decorateClass', function () {
assert.strictEqual(decorateMethodsWith, decorateClass)
})
describe('perInstance', () => {
it('works', () => {
let calls = 0
const expectedArgs = [Math.random(), Math.random()]
const expectedFn = Function.prototype
function wrapper(fn, ...args) {
assert.strictEqual(fn, expectedFn)
assert.deepStrictEqual(args, expectedArgs)
const i = ++calls
return () => i
}
const wrapped = perInstance(expectedFn, wrapper, ...expectedArgs)
// decorator is not called before decorated called
assert.strictEqual(calls, 0)
const o1 = {}
const o2 = {}
assert.strictEqual(wrapped.call(o1), 1)
// the same decorated function is returned for the same instance
assert.strictEqual(wrapped.call(o1), 1)
// a new decorated function is returned for another instance
assert.strictEqual(wrapped.call(o2), 2)
})
})

View File

@@ -20,15 +20,11 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "2.0.0",
"version": "0.1.0",
"engines": {
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public",
"test": "tap"
},
"devDependencies": {
"tap": "^16.0.1"
"postversion": "npm publish --access public"
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { createDebounceResource } = require('./debounceResource')

View File

@@ -1,5 +1,3 @@
'use strict'
const ensureArray = require('ensure-array')
const { MultiKeyMap } = require('@vates/multi-key-map')

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { deduped } = require('./deduped')

View File

@@ -1,50 +0,0 @@
> This library is compatible with Node's `EventEmitter` and web browsers' `EventTarget` APIs.
### API
```js
import { EventListenersManager } from '@vates/event-listeners-manager'
const events = new EventListenersManager(emitter)
// adding listeners
events.add('foo', onFoo).add('bar', onBar).on('baz', onBaz)
// removing a specific listener
events.remove('foo', onFoo)
// removing all listeners for a specific event
events.removeAll('foo')
// removing all listeners
events.removeAll()
```
### Typical use case
> Removing all listeners when no longer necessary.
Manually:
```js
const onFoo = () => {}
const onBar = () => {}
const onBaz = () => {}
emitter.on('foo', onFoo).on('bar', onBar).on('baz', onBaz)
// CODE LOGIC
emitter.off('foo', onFoo).off('bar', onBar).off('baz', onBaz)
```
With this library:
```js
const events = new EventListenersManager(emitter)
events.add('foo', () => {})).add('bar', () => {})).add('baz', () => {}))
// CODE LOGIC
events.removeAll()
```

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,81 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/event-listeners-manager
[![Package Version](https://badgen.net/npm/v/@vates/event-listeners-manager)](https://npmjs.org/package/@vates/event-listeners-manager) ![License](https://badgen.net/npm/license/@vates/event-listeners-manager) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/event-listeners-manager)](https://bundlephobia.com/result?p=@vates/event-listeners-manager) [![Node compatibility](https://badgen.net/npm/node/@vates/event-listeners-manager)](https://npmjs.org/package/@vates/event-listeners-manager)
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/event-listeners-manager):
```
> npm install --save @vates/event-listeners-manager
```
## Usage
> This library is compatible with Node's `EventEmitter` and web browsers' `EventTarget` APIs.
### API
```js
import { EventListenersManager } from '@vates/event-listeners-manager'
const events = new EventListenersManager(emitter)
// adding listeners
events.add('foo', onFoo).add('bar', onBar).on('baz', onBaz)
// removing a specific listener
events.remove('foo', onFoo)
// removing all listeners for a specific event
events.removeAll('foo')
// removing all listeners
events.removeAll()
```
### Typical use case
> Removing all listeners when no longer necessary.
Manually:
```js
const onFoo = () => {}
const onBar = () => {}
const onBaz = () => {}
emitter.on('foo', onFoo).on('bar', onBar).on('baz', onBaz)
// CODE LOGIC
emitter.off('foo', onFoo).off('bar', onBar).off('baz', onBaz)
```
With this library:
```js
const events = new EventListenersManager(emitter)
events.add('foo', () => {})).add('bar', () => {})).add('baz', () => {}))
// CODE LOGIC
events.removeAll()
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -1,56 +0,0 @@
'use strict'
exports.EventListenersManager = class EventListenersManager {
constructor(emitter) {
this._listeners = new Map()
this._add = (emitter.addListener || emitter.addEventListener).bind(emitter)
this._remove = (emitter.removeListener || emitter.removeEventListener).bind(emitter)
}
add(type, listener) {
let listeners = this._listeners.get(type)
if (listeners === undefined) {
listeners = new Set()
this._listeners.set(type, listeners)
}
// don't add the same listener multiple times (allowed on Node.js)
if (!listeners.has(listener)) {
listeners.add(listener)
this._add(type, listener)
}
return this
}
remove(type, listener) {
const allListeners = this._listeners
const listeners = allListeners.get(type)
if (listeners !== undefined && listeners.delete(listener)) {
this._remove(type, listener)
if (listeners.size === 0) {
allListeners.delete(type)
}
}
return this
}
removeAll(type) {
const allListeners = this._listeners
const remove = this._remove
const types = type !== undefined ? [type] : allListeners.keys()
for (const type of types) {
const listeners = allListeners.get(type)
if (listeners !== undefined) {
allListeners.delete(type)
for (const listener of listeners) {
remove(type, listener)
}
}
}
return this
}
}

View File

@@ -1,67 +0,0 @@
'use strict'
const t = require('tap')
const { EventEmitter } = require('events')
const { EventListenersManager } = require('./')
const noop = Function.prototype
// function spy (impl = Function.prototype) {
// function spy() {
// spy.calls.push([Array.from(arguments), this])
// }
// spy.calls = []
// return spy
// }
function assertListeners(t, event, listeners) {
t.strictSame(t.context.ee.listeners(event), listeners)
}
t.beforeEach(function (t) {
t.context.ee = new EventEmitter()
t.context.em = new EventListenersManager(t.context.ee)
})
t.test('.add adds a listener', function (t) {
t.context.em.add('foo', noop)
assertListeners(t, 'foo', [noop])
t.end()
})
t.test('.add does not add a duplicate listener', function (t) {
t.context.em.add('foo', noop).add('foo', noop)
assertListeners(t, 'foo', [noop])
t.end()
})
t.test('.remove removes a listener', function (t) {
t.context.em.add('foo', noop).remove('foo', noop)
assertListeners(t, 'foo', [])
t.end()
})
t.test('.removeAll removes all listeners of a given type', function (t) {
t.context.em.add('foo', noop).add('bar', noop).removeAll('foo')
assertListeners(t, 'foo', [])
assertListeners(t, 'bar', [noop])
t.end()
})
t.test('.removeAll removes all listeners', function (t) {
t.context.em.add('foo', noop).add('bar', noop).removeAll()
assertListeners(t, 'foo', [])
assertListeners(t, 'bar', [])
t.end()
})

View File

@@ -1,46 +0,0 @@
{
"engines": {
"node": ">=6"
},
"private": false,
"name": "@vates/event-listeners-manager",
"descriptions": "Easy way to clean up event listeners",
"keywords": [
"add",
"addEventListener",
"addListener",
"browser",
"clear",
"DOM",
"emitter",
"event",
"EventEmitter",
"EventTarget",
"management",
"manager",
"node",
"remove",
"removeEventListener",
"removeListener"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/event-listeners-manager",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/event-listeners-manager",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.1",
"scripts": {
"postversion": "npm publish --access public",
"test": "tap --branches=72"
},
"devDependencies": {
"tap": "^16.2.0"
}
}

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,71 +0,0 @@
'use strict'
const LRU = require('lru-cache')
const Fuse = require('fuse-native')
const { VhdSynthetic } = require('vhd-lib')
const { Disposable, fromCallback } = require('promise-toolbox')
const { createLogger } = require('@xen-orchestra/log')
const { warn } = createLogger('vates:fuse-vhd')
// build a s stat object from https://github.com/fuse-friends/fuse-native/blob/master/test/fixtures/stat.js
const stat = st => ({
mtime: st.mtime || new Date(),
atime: st.atime || new Date(),
ctime: st.ctime || new Date(),
size: st.size !== undefined ? st.size : 0,
mode: st.mode === 'dir' ? 16877 : st.mode === 'file' ? 33188 : st.mode === 'link' ? 41453 : st.mode,
uid: st.uid !== undefined ? st.uid : process.getuid(),
gid: st.gid !== undefined ? st.gid : process.getgid(),
})
exports.mount = Disposable.factory(async function* mount(handler, diskPath, mountDir) {
const vhd = yield VhdSynthetic.fromVhdChain(handler, diskPath)
const cache = new LRU({
max: 16, // each cached block is 2MB in size
})
await vhd.readBlockAllocationTable()
const fuse = new Fuse(mountDir, {
async readdir(path, cb) {
if (path === '/') {
return cb(null, ['vhd0'])
}
cb(Fuse.ENOENT)
},
async getattr(path, cb) {
if (path === '/') {
return cb(
null,
stat({
mode: 'dir',
size: 4096,
})
)
}
if (path === '/vhd0') {
return cb(
null,
stat({
mode: 'file',
size: vhd.footer.currentSize,
})
)
}
cb(Fuse.ENOENT)
},
read(path, fd, buf, len, pos, cb) {
if (path === '/vhd0') {
return vhd
.readRawData(pos, len, cache, buf)
.then(cb)
}
throw new Error(`read file ${path} not exists`)
},
})
return new Disposable(
() => fromCallback(() => fuse.unmount()),
fromCallback(() => fuse.mount())
)
})

View File

@@ -1,30 +0,0 @@
{
"name": "@vates/fuse-vhd",
"version": "1.0.0",
"license": "ISC",
"private": false,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/fuse-vhd",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/fuse-vhd",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"engines": {
"node": ">=10.0"
},
"dependencies": {
"@xen-orchestra/log": "^0.3.0",
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^4.1.0"
},
"scripts": {
"postversion": "npm publish --access public"
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
class Node {
constructor(value) {
this.children = new Map()

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { MultiKeyMap } = require('./')

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,41 +0,0 @@
export const INIT_PASSWD = 'NBDMAGIC' // "NBDMAGIC" ensure we're connected to a nbd server
export const OPTS_MAGIC = 'IHAVEOPT' // "IHAVEOPT" start an option block
export const NBD_OPT_REPLY_MAGIC = 0x3e889045565a9 // magic received during negociation
export const NBD_OPT_EXPORT_NAME = 1
export const NBD_OPT_ABORT = 2
export const NBD_OPT_LIST = 3
export const NBD_OPT_STARTTLS = 5
export const NBD_OPT_INFO = 6
export const NBD_OPT_GO = 7
export const NBD_FLAG_HAS_FLAGS = 1 << 0
export const NBD_FLAG_READ_ONLY = 1 << 1
export const NBD_FLAG_SEND_FLUSH = 1 << 2
export const NBD_FLAG_SEND_FUA = 1 << 3
export const NBD_FLAG_ROTATIONAL = 1 << 4
export const NBD_FLAG_SEND_TRIM = 1 << 5
export const NBD_FLAG_FIXED_NEWSTYLE = 1 << 0
export const NBD_CMD_FLAG_FUA = 1 << 0
export const NBD_CMD_FLAG_NO_HOLE = 1 << 1
export const NBD_CMD_FLAG_DF = 1 << 2
export const NBD_CMD_FLAG_REQ_ONE = 1 << 3
export const NBD_CMD_FLAG_FAST_ZERO = 1 << 4
export const NBD_CMD_READ = 0
export const NBD_CMD_WRITE = 1
export const NBD_CMD_DISC = 2
export const NBD_CMD_FLUSH = 3
export const NBD_CMD_TRIM = 4
export const NBD_CMD_CACHE = 5
export const NBD_CMD_WRITE_ZEROES = 6
export const NBD_CMD_BLOCK_STATUS = 7
export const NBD_CMD_RESIZE = 8
export const NBD_REQUEST_MAGIC = 0x25609513 // magic number to create a new NBD request to send to the server
export const NBD_REPLY_MAGIC = 0x67446698 // magic number received from the server when reading response to a nbd request
export const NBD_DEFAULT_PORT = 10809
export const NBD_DEFAULT_BLOCK_SIZE = 64 * 1024
export const MAX_BUFFER_LENGTH = 10 * 1024 * 1024

View File

@@ -1,272 +0,0 @@
import assert from 'node:assert'
import { Socket } from 'node:net'
import { connect } from 'node:tls'
import {
INIT_PASSWD,
MAX_BUFFER_LENGTH,
NBD_CMD_READ,
NBD_DEFAULT_BLOCK_SIZE,
NBD_DEFAULT_PORT,
NBD_FLAG_FIXED_NEWSTYLE,
NBD_FLAG_HAS_FLAGS,
NBD_OPT_EXPORT_NAME,
NBD_OPT_REPLY_MAGIC,
NBD_OPT_STARTTLS,
NBD_REPLY_MAGIC,
NBD_REQUEST_MAGIC,
OPTS_MAGIC,
} from './constants.mjs'
// documentation is here : https://github.com/NetworkBlockDevice/nbd/blob/master/doc/proto.md
export default class NbdClient {
_serverAddress
_serverCert
_serverPort
_serverSocket
_useSecureConnection = false
_exportname
_nbDiskBlocks = 0
_receptionBuffer = Buffer.alloc(0)
_sendingBuffer = Buffer.alloc(0)
// ensure the read are resolved in the right order
_rawReadResolve = []
_rawReadLength = []
// AFAIK, there is no guaranty the server answer in the same order as the query
_nextCommandQueryId = BigInt(0)
_commandQueries = {} // map of queries waiting for an answer
constructor({ address, port = NBD_DEFAULT_PORT, exportname, cert, secure = true }) {
this._address = address
this._serverPort = port
this._exportname = exportname
this._serverCert = cert
this._useSecureConnection = secure
}
get nbBlocks() {
return this._nbDiskBlocks
}
_handleData(data) {
if (data !== undefined) {
this._receptionBuffer = Buffer.concat([this._receptionBuffer, Buffer.from(data)])
}
if (this._receptionBuffer.length > MAX_BUFFER_LENGTH) {
throw new Error(
`Buffer grown too much with a total size of ${this._receptionBuffer.length} bytes (last chunk is ${data.length})`
)
}
// if we're waiting for a specific bit length (in the handshake for example or a block data)
while (this._rawReadResolve.length > 0 && this._receptionBuffer.length >= this._rawReadLength[0]) {
const resolve = this._rawReadResolve.shift()
const waitingForLength = this._rawReadLength.shift()
resolve(this._takeFromBuffer(waitingForLength))
}
if (this._rawReadResolve.length === 0 && this._receptionBuffer.length > 4) {
if (this._receptionBuffer.readInt32BE(0) === NBD_REPLY_MAGIC) {
this._readBlockResponse()
}
// keep the received bits in the buffer for subsequent use
}
}
async _addListenners() {
const serverSocket = this._serverSocket
serverSocket.on('data', data => this._handleData(data))
serverSocket.on('close', function () {
console.log('Connection closed')
})
serverSocket.on('error', function (err) {
throw err
})
}
async _tlsConnect() {
return new Promise(resolve => {
this._serverSocket = connect(
{
socket: this._serverSocket,
rejectUnauthorized: false,
cert: this._serverCert,
},
resolve
)
this._addListenners()
})
}
async _unsecureConnect() {
this._serverSocket = new Socket()
this._addListenners()
return new Promise((resolve, reject) => {
this._serverSocket.connect(this._serverPort, this._serverAddress, () => {
resolve()
})
})
}
async connect() {
await this._unsecureConnect()
await this._handshake()
}
async disconnect() {
await this._serverSocket.destroy()
}
async _sendOption(option, buffer = Buffer.alloc(0)) {
await this._writeToSocket(OPTS_MAGIC)
await this._writeToSocketInt32(option)
await this._writeToSocketInt32(buffer.length)
await this._writeToSocket(buffer)
assert(await this._readFromSocketInt64(), NBD_OPT_REPLY_MAGIC) // magic number everywhere
assert(await this._readFromSocketInt32(), option) // the option passed
assert(await this._readFromSocketInt32(), 1) // ACK
const length = await this._readFromSocketInt32()
assert(length === 0) // length
}
async _handshake() {
assert(await this._readFromSocket(8), INIT_PASSWD)
assert(await this._readFromSocket(8), OPTS_MAGIC)
const flagsBuffer = await this._readFromSocket(2)
const flags = flagsBuffer.readInt16BE(0)
assert(flags | NBD_FLAG_FIXED_NEWSTYLE) // only FIXED_NEWSTYLE one is supported from the server options
await this._writeToSocketInt32(NBD_FLAG_FIXED_NEWSTYLE) // client also support NBD_FLAG_C_FIXED_NEWSTYLE
if (this._useSecureConnection) {
// upgrade socket to TLS
await this._sendOption(NBD_OPT_STARTTLS)
await this._tlsConnect()
}
// send export name required it also implictly closes the negociation phase
await this._writeToSocket(Buffer.from(OPTS_MAGIC))
await this._writeToSocketInt32(NBD_OPT_EXPORT_NAME)
await this._writeToSocketInt32(this._exportname.length)
await this._writeToSocket(Buffer.from(this._exportname))
// 8 + 2 + 124
const answer = await this._readFromSocket(134)
const exportSize = answer.readBigUInt64BE(0)
const transmissionFlags = answer.readInt16BE(8)
assert(transmissionFlags & NBD_FLAG_HAS_FLAGS, 'NBD_FLAG_HAS_FLAGS') // must always be 1 by the norm
// xapi server always send NBD_FLAG_READ_ONLY (3) as a flag
this._nbDiskBlocks = Number(exportSize / BigInt(NBD_DEFAULT_BLOCK_SIZE))
this._exportSize = exportSize
}
_takeFromBuffer(length) {
const res = Buffer.from(this._receptionBuffer.slice(0, length))
this._receptionBuffer = this._receptionBuffer.slice(length)
return res
}
_readFromSocket(length) {
if (this._receptionBuffer.length >= length) {
return this._takeFromBuffer(length)
}
return new Promise(resolve => {
this._rawReadResolve.push(resolve)
this._rawReadLength.push(length)
})
}
_writeToSocket(buffer) {
return new Promise(resolve => {
this._serverSocket.write(buffer, resolve)
})
}
async _readFromSocketInt32() {
const buffer = await this._readFromSocket(4)
return buffer.readInt32BE(0)
}
async _readFromSocketInt64() {
const buffer = await this._readFromSocket(8)
return buffer.readBigUInt64BE(0)
}
_writeToSocketUInt32(int) {
const buffer = Buffer.alloc(4)
buffer.writeUInt32BE(int)
return this._writeToSocket(buffer)
}
_writeToSocketInt32(int) {
const buffer = Buffer.alloc(4)
buffer.writeInt32BE(int)
return this._writeToSocket(buffer)
}
_writeToSocketInt16(int) {
const buffer = Buffer.alloc(2)
buffer.writeInt16BE(int)
return this._writeToSocket(buffer)
}
_writeToSocketInt64(int) {
const buffer = Buffer.alloc(8)
buffer.writeBigUInt64BE(BigInt(int))
return this._writeToSocket(buffer)
}
async _readBlockResponse() {
const magic = await this._readFromSocketInt32()
if (magic !== NBD_REPLY_MAGIC) {
throw new Error(`magic number for block answer is wrong : ${magic}`)
}
// error
const error = await this._readFromSocketInt32()
if (error !== 0) {
throw new Error(`GOT ERROR CODE : ${error}`)
}
const blockQueryId = await this._readFromSocketInt64()
const query = this._commandQueries[blockQueryId]
if (!query) {
throw new Error(` no query associated with id ${blockQueryId} ${Object.keys(this._commandQueries)}`)
}
delete this._commandQueries[blockQueryId]
const data = await this._readFromSocket(query.size)
assert.strictEqual(data.length, query.size)
query.resolve(data)
this._handleData()
}
async readBlock(index, size = NBD_DEFAULT_BLOCK_SIZE) {
const queryId = this._nextCommandQueryId
this._nextCommandQueryId++
const buffer = Buffer.alloc(28)
buffer.writeInt32BE(NBD_REQUEST_MAGIC, 0)
buffer.writeInt16BE(0, 4) // no command flags for a simple block read
buffer.writeInt16BE(NBD_CMD_READ, 6)
buffer.writeBigUInt64BE(queryId, 8)
// byte offset in the raw disk
const offset = BigInt(index) * BigInt(size)
buffer.writeBigUInt64BE(offset, 16)
// ensure we do not read after the end of the export (which immediatly disconnect us)
const maxSize = Math.min(Number(this._exportSize - offset), size)
// size wanted
buffer.writeInt32BE(maxSize, 24)
return new Promise(resolve => {
this._commandQueries[queryId] = {
size: maxSize,
resolve,
}
// write command at once to ensure no concurrency issue
this._writeToSocket(buffer)
})
}
}

View File

@@ -1,75 +0,0 @@
import assert from 'assert'
import NbdClient from './index.mjs'
import { spawn } from 'node:child_process'
import fs from 'node:fs/promises'
import { test } from 'tap'
import tmp from 'tmp'
import { pFromCallback } from 'promise-toolbox'
import { asyncEach } from '@vates/async-each'
const FILE_SIZE = 2 * 1024 * 1024
async function createTempFile(size) {
const tmpPath = await pFromCallback(cb => tmp.file(cb))
const data = Buffer.alloc(size, 0)
for (let i = 0; i < size; i += 4) {
data.writeUInt32BE(i, i)
}
await fs.writeFile(tmpPath, data)
return tmpPath
}
test('it works with unsecured network', async tap => {
const path = await createTempFile(FILE_SIZE)
const nbdServer = spawn(
'nbdkit',
[
'file',
path,
'--newstyle', //
'--exit-with-parent',
'--read-only',
'--export-name=MY_SECRET_EXPORT',
],
{
stdio: ['inherit', 'inherit', 'inherit'],
}
)
const client = new NbdClient({
address: 'localhost',
exportname: 'MY_SECRET_EXPORT',
secure: false,
})
await client.connect()
const CHUNK_SIZE = 32 * 1024 // non default size
const indexes = []
for (let i = 0; i < FILE_SIZE / CHUNK_SIZE; i++) {
indexes.push(i)
}
// read mutiple blocks in parallel
await asyncEach(
indexes,
async i => {
const block = await client.readBlock(i, CHUNK_SIZE)
let blockOk = true
let firstFail
for (let j = 0; j < CHUNK_SIZE; j += 4) {
const wanted = i * CHUNK_SIZE + j
const found = block.readUInt32BE(j)
blockOk = blockOk && found === wanted
if (!blockOk && firstFail === undefined) {
firstFail = j
}
}
tap.ok(blockOk, `check block ${i} content`)
},
{ concurrency: 8 }
)
await client.disconnect()
nbdServer.kill()
await fs.unlink(path)
})

View File

@@ -1,30 +0,0 @@
{
"private": true,
"name": "@vates/nbd-client",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/nbd-client",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/nbd-client",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "AGPL-3.0-or-later",
"version": "0.0.1",
"engines": {
"node": ">=14.0"
},
"dependencies": {
"@vates/async-each": "^1.0.0",
"@xen-orchestra/async-map": "^0.1.2",
"promise-toolbox": "^0.21.0",
"xen-api": "^1.2.2"
},
"devDependencies": {
"tap": "^16.3.0",
"tmp": "^0.2.1"
}
}

View File

@@ -1,152 +0,0 @@
import NbdClient from '../index.js'
import { Xapi } from 'xen-api'
import readline from 'node:readline'
import { stdin as input, stdout as output } from 'node:process'
import { asyncMap } from '@xen-orchestra/async-map'
import { downloadVhd, getFullBlocks, getChangedNbdBlocks } from './utils.mjs'
const xapi = new Xapi({
auth: {
user: 'root',
password: 'vateslab',
},
url: '172.16.210.11',
allowUnauthorized: true,
})
await xapi.connect()
const networks = await xapi.call('network.get_all_records')
const nbdNetworks = Object.values(networks).filter(
network => network.purpose.includes('nbd') || network.purpose.includes('insecure_nbd')
)
let secure = false
if (!nbdNetworks.length) {
console.log(`you don't have any nbd enabled network`)
console.log(`please add a purpose of nbd (to use tls) or insecure_nbd to oneof the host network`)
process.exit()
}
const network = nbdNetworks[0]
secure = network.purpose.includes('nbd')
console.log(`we will use network **${network.name_label}** ${secure ? 'with' : 'without'} TLS`)
const rl = readline.createInterface({ input, output })
const question = text => {
return new Promise(resolve => {
rl.question(text, resolve)
})
}
let vmuuid, vmRef
do {
vmuuid = await question('VM uuid ? ')
try {
vmRef = xapi.getObject(vmuuid).$ref
} catch (e) {
// console.log(e)
console.log('maybe the objects was not loaded, try again ')
await new Promise(resolve => setTimeout(resolve, 1000))
}
} while (!vmRef)
const vdiRefs = (
await asyncMap(await xapi.call('VM.get_VBDs', vmRef), async vbd => {
const vdi = await xapi.call('VBD.get_VDI', vbd)
return vdi
})
).filter(vdiRef => vdiRef !== 'OpaqueRef:NULL')
const vdiRef = vdiRefs[0]
const vdi = xapi.getObject(vdiRef)
console.log('Will work on vdi [', vdi.name_label, ']')
const cbt_enabled = vdi.cbt_enabled
console.log('Change block tracking is [', cbt_enabled ? 'enabled' : 'disabled', ']')
if (!cbt_enabled) {
const shouldEnable = await question('would you like to enable it ? Y/n ')
if (shouldEnable === 'Y') {
await xapi.call('VDI.enable_cbt', vdiRef)
console.log('CBT is now enable for this VDI')
console.log('You must make a snapshot, write some data and relaunch this script to backup changes')
} else {
console.warn('did nothing')
}
process.exit()
}
console.log('will search for suitable snapshots')
const snapshots = vdi.snapshots.map(snapshotRef => xapi.getObject(snapshotRef)).filter(({ cbt_enabled }) => cbt_enabled)
if (snapshots.length < 2) {
throw new Error(`not enough snapshots with cbt enabled , found ${snapshots.length} and 2 are needed`)
}
console.log('found snapshots will compare last two snapshots with cbt_enabled')
const snapshotRef = xapi.getObject(snapshots[snapshots.length - 1].uuid).$ref
const snapshotTarget = xapi.getObject(snapshots[snapshots.length - 2].uuid).$ref
console.log('older snapshot is ', xapi.getObject(snapshotRef).snapshot_time)
console.log('newer one is ', xapi.getObject(snapshotTarget).snapshot_time)
console.log('## will get bitmap of changed blocks')
const cbt = Buffer.from(await xapi.call('VDI.list_changed_blocks', snapshotRef, snapshotTarget), 'base64')
console.log('got changes')
console.log('will connect to NBD server')
const nbd = (await xapi.call('VDI.get_nbd_info', snapshotTarget))[0]
if (!nbd) {
console.error('Nbd is not enabled on the host')
console.error('you should add `insecure_nbd` as the `purpose` of a network of this host')
process.exit()
}
nbd.secure = true
// console.log(nbd)
const client = new NbdClient(nbd)
await client.connect()
// @todo : should also handle last blocks that could be incomplete
const stats = {}
for (const nbBlocksRead of [32, 16, 8, 4, 2, 1]) {
const blockSize = nbBlocksRead * 64 * 1024
stats[blockSize] = {}
const MASK = 0x80
const test = (map, bit) => ((map[bit >> 3] << (bit & 7)) & MASK) !== 0
const changed = []
for (let i = 0; i < (cbt.length * 8) / nbBlocksRead; i++) {
let blockChanged = false
for (let j = 0; j < nbBlocksRead; j++) {
blockChanged = blockChanged || test(cbt, i * nbBlocksRead + j)
}
if (blockChanged) {
changed.push(i)
}
}
console.log(changed.length, 'block changed')
for (const concurrency of [32, 16, 8, 4, 2]) {
const { speed } = await getChangedNbdBlocks(client, changed, concurrency, blockSize)
stats[blockSize][concurrency] = speed
}
}
console.log('speed summary')
console.table(stats)
console.log('## will check full download of the base vdi ')
await getFullBlocks(client, 16, 512 * 1024) // a good sweet spot
console.log('## will check vhd delta export size and speed')
console.log('## will check full vhd export size and speed')
await downloadVhd(xapi, {
format: 'vhd',
vdi: snapshotTarget,
})

View File

@@ -1,97 +0,0 @@
import NbdClient from '../index.js'
import { Xapi } from 'xen-api'
import { asyncMap } from '@xen-orchestra/async-map'
import { downloadVhd, getFullBlocks } from './utils.mjs'
import fs from 'fs/promises'
const xapi = new Xapi({
auth: {
user: 'root',
password: 'vateslab',
},
url: '172.16.210.11',
allowUnauthorized: true,
})
await xapi.connect()
const vmuuid = '123e4f2b-498e-d0af-15ae-f835a1e9f59f'
let vmRef
do {
try {
vmRef = xapi.getObject(vmuuid).$ref
} catch (e) {
console.log('maybe the objects was not loaded, try again ')
await new Promise(resolve => setTimeout(resolve, 1000))
}
} while (!vmRef)
const vdiRefs = (
await asyncMap(await xapi.call('VM.get_VBDs', vmRef), async vbd => {
const vdi = await xapi.call('VBD.get_VDI', vbd)
return vdi
})
).filter(vdiRef => vdiRef !== 'OpaqueRef:NULL')
const vdiRef = vdiRefs[0]
const vdi = xapi.getObject(vdiRef)
console.log('Will work on vdi [', vdi.name_label, ']')
console.log('will search for suitable snapshots')
const snapshots = vdi.snapshots.map(snapshotRef => xapi.getObject(snapshotRef))
console.log('found snapshots will use the last one for tests')
const snapshotRef = xapi.getObject(snapshots[snapshots.length - 1].uuid).$ref
console.log('will connect to NBD server')
const nbd = (await xapi.call('VDI.get_nbd_info', snapshotRef))[0]
if (!nbd) {
console.error('Nbd is not enabled on the host')
console.error('you should add `insecure_nbd` as the `purpose` of a network of this host')
process.exit()
}
if (!nbd) {
console.error('Nbd is not enabled on the host')
console.error('you should add `insecure_nbd` as the `purpose` of a network of this host')
process.exit()
}
const nbdClient = new NbdClient(nbd)
await nbdClient.connect()
let fd = await fs.open('/tmp/nbd.raw', 'w')
await getFullBlocks({
nbdClient,
concurrency: 8,
nbBlocksRead: 16 /* 1MB block */,
fd,
})
console.log(' done nbd ')
await fd.close()
fd = await fs.open('/tmp/export.raw', 'w')
await downloadVhd({
xapi,
query: {
format: 'raw',
vdi: snapshotRef,
},
fd,
})
fd.close()
fd = await fs.open('/tmp/export.vhd', 'w')
await downloadVhd({
xapi,
query: {
format: 'vhd',
vdi: snapshotRef,
},
fd,
})
fd.close()

View File

@@ -1,117 +0,0 @@
import NbdClient from '../index.js'
import { Xapi } from 'xen-api'
import readline from 'node:readline'
import { stdin as input, stdout as output } from 'node:process'
import { asyncMap } from '@xen-orchestra/async-map'
import { downloadVhd, getFullBlocks } from './utils.mjs'
const xapi = new Xapi({
auth: {
user: 'root',
password: 'vateslab',
},
url: '172.16.210.11',
allowUnauthorized: true,
})
await xapi.connect()
const networks = await xapi.call('network.get_all_records')
console.log({ networks })
const nbdNetworks = Object.values(networks).filter(
network => network.purpose.includes('nbd') || network.purpose.includes('insecure_nbd')
)
let secure = false
if (!nbdNetworks.length) {
console.log(`you don't have any nbd enabled network`)
console.log(`please add a purpose of nbd (to use tls) or insecure_nbd to oneof the host network`)
process.exit()
}
const network = nbdNetworks[0]
secure = network.purpose.includes('nbd')
console.log(`we will use network **${network.name_label}** ${secure ? 'with' : 'without'} TLS`)
const rl = readline.createInterface({ input, output })
const question = text => {
return new Promise(resolve => {
rl.question(text, resolve)
})
}
let vmuuid, vmRef
do {
vmuuid = '123e4f2b-498e-d0af-15ae-f835a1e9f59f' // await question('VM uuid ? ')
try {
vmRef = xapi.getObject(vmuuid).$ref
} catch (e) {
console.log(e)
console.log('maybe the objects was not loaded, try again ')
}
} while (!vmRef)
const vdiRefs = (
await asyncMap(await xapi.call('VM.get_VBDs', vmRef), async vbd => {
const vdi = await xapi.call('VBD.get_VDI', vbd)
return vdi
})
).filter(vdiRef => vdiRef !== 'OpaqueRef:NULL')
const vdiRef = vdiRefs[0]
const vdi = xapi.getObject(vdiRef)
console.log('Will work on vdi [', vdi.name_label, ']')
console.log('will search for suitable snapshots')
const snapshots = vdi.snapshots.map(snapshotRef => xapi.getObject(snapshotRef))
console.log('found snapshots will use the last one for tests')
const snapshotRef = xapi.getObject(snapshots[snapshots.length - 1].uuid).$ref
console.log('will connect to NBD server')
const nbd = (await xapi.call('VDI.get_nbd_info', snapshotRef))[0]
if (!nbd) {
console.error('Nbd is not enabled on the host')
console.error('you should add `insecure_nbd` as the `purpose` of a network of this host')
process.exit()
}
nbd.secure = secure
const nbdClient = new NbdClient(nbd)
await nbdClient.connect()
const maxDuration =
parseInt(await question('Maximum duration per test in second ? (-1 for unlimited, default 30) '), 10) || 30
console.log('Will start downloading blocks during ', maxDuration, 'seconds')
console.log('## will check the vhd download speed')
const stats = {}
for (const nbBlocksRead of [32, 16, 8, 4, 2, 1]) {
stats[nbBlocksRead * 64 * 1024] = {}
for (const concurrency of [32, 16, 8, 4, 2]) {
const { speed } = await getFullBlocks({ nbdClient, concurrency, nbBlocksRead })
stats[concurrency] = speed
}
}
console.log('speed summary')
console.table(stats)
console.log('## will check full vhd export size and speed')
await downloadVhd(xapi, {
format: 'vhd',
vdi: snapshotRef,
})
console.log('## will check full raw export size and speed')
await downloadVhd(xapi, {
format: 'raw',
vdi: snapshotRef,
})
process.exit()

View File

@@ -1,116 +0,0 @@
import { asyncEach } from '@vates/async-each'
import { CancelToken } from 'promise-toolbox'
import zlib from 'node:zlib'
export async function getChangedNbdBlocks(nbdClient, changed, concurrency, blockSize) {
let nbModified = 0
let size = 0
let compressedSize = 0
const start = new Date()
console.log('### with concurrency ', concurrency, ' blockSize ', blockSize / 1024 / 1024, 'MB')
const interval = setInterval(() => {
console.log(`${nbModified} block handled in ${new Date() - start} ms`)
}, 5000)
await asyncEach(
changed,
async blockIndex => {
if (new Date() - start > 30000) {
return
}
const data = await nbdClient.readBlock(blockIndex, blockSize)
await new Promise(resolve => {
zlib.gzip(data, { level: zlib.constants.Z_BEST_SPEED }, (_, compressed) => {
compressedSize += compressed.length
resolve()
})
})
size += data?.length ?? 0
nbModified++
},
{
concurrency,
}
)
clearInterval(interval)
console.log('duration :', new Date() - start)
console.log('read : ', size, 'octets, compressed: ', compressedSize, 'ratio ', size / compressedSize)
console.log('speed : ', Math.round(((size / 1024 / 1024) * 1000) / (new Date() - start)), 'MB/s')
return { speed: Math.round(((size / 1024 / 1024) * 1000) / (new Date() - start)) }
}
export async function getFullBlocks({ nbdClient, concurrency = 1, nbBlocksRead = 1, fd, maxDuration = -1 } = {}) {
const blockSize = nbBlocksRead * 64 * 1024
let nbModified = 0
let size = 0
console.log('### with concurrency ', concurrency)
const start = new Date()
console.log(' max nb blocks ', nbdClient.nbBlocks / nbBlocksRead)
function* blockIterator() {
for (let i = 0; i < nbdClient.nbBlocks / nbBlocksRead; i++) {
yield i
}
}
const interval = setInterval(() => {
console.log(`${nbModified} block handled in ${new Date() - start} ms`)
}, 5000)
await asyncEach(
blockIterator(),
async blockIndex => {
if (maxDuration > 0 && new Date() - start > maxDuration * 1000) {
return
}
const data = await nbdClient.readBlock(blockIndex, blockSize)
size += data?.length ?? 0
nbModified++
if (fd) {
await fd.write(data, 0, data.length, blockIndex * blockSize)
}
},
{
concurrency,
}
)
clearInterval(interval)
if (new Date() - start < 10000) {
console.warn(
`data set too small or performance to high, result won't be usefull. Please relaunch with bigger snapshot or higher maximum data size `
)
}
console.log('duration :', new Date() - start)
console.log('nb blocks : ', nbModified)
console.log('read : ', size, 'octets')
const speed = Math.round(((size / 1024 / 1024) * 1000 * 100) / (new Date() - start)) / 100
console.log('speed : ', speed, 'MB/s')
return { speed }
}
export async function downloadVhd({ xapi, query, fd, maxDuration = -1 } = {}) {
const startStream = new Date()
let sizeStream = 0
let nbChunk = 0
const interval = setInterval(() => {
console.log(`${nbChunk} chunks , ${sizeStream} octets handled in ${new Date() - startStream} ms`)
}, 5000)
const stream = await xapi.getResource(CancelToken.none, '/export_raw_vdi/', {
query,
})
for await (const chunk of stream) {
sizeStream += chunk.length
if (fd) {
await fd.write(chunk)
}
nbChunk++
if (maxDuration > 0 && new Date() - startStream > maxDuration * 1000) {
break
}
}
clearInterval(interval)
console.log('Stream duration :', new Date() - startStream)
console.log('Stream read : ', sizeStream, 'octets')
const speed = Math.round(((sizeStream / 1024 / 1024) * 1000 * 100) / (new Date() - startStream)) / 100
console.log('speed : ', speed, 'MB/s')
}

View File

@@ -1,5 +1,3 @@
'use strict'
const ms = require('ms')
exports.parseDuration = value => {

View File

@@ -1,57 +0,0 @@
`undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition:
```js
const compositePredicate = every(undefined, some(predicate2, undefined))
// ends up as
const compositePredicate = predicate2
```
Predicates can also be passed wrapped in an array:
```js
const compositePredicate = every([predicate1, some([predicate2, predicate3])])
```
`this` and all arguments are passed to the nested predicates.
### `every(predicates)`
> Returns a predicate that returns `true` iff every predicate returns `true`.
```js
const isBetween3And7 = every(
n => n >= 3,
n => n <= 7
)
isBetween3And10(0)
// → false
isBetween3And10(5)
// → true
isBetween3And10(10)
// → false
```
### `some(predicates)`
> Returns a predicate that returns `true` iff some predicate returns `true`.
```js
const isAliceOrBob = some(
name => name === 'Alice',
name => name === 'Bob'
)
isAliceOrBob('Alice')
// → true
isAliceOrBob('Bob')
// → true
isAliceOrBob('Oscar')
// → false
```

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,90 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/predicates
[![Package Version](https://badgen.net/npm/v/@vates/predicates)](https://npmjs.org/package/@vates/predicates) ![License](https://badgen.net/npm/license/@vates/predicates) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/predicates)](https://bundlephobia.com/result?p=@vates/predicates) [![Node compatibility](https://badgen.net/npm/node/@vates/predicates)](https://npmjs.org/package/@vates/predicates)
> Utilities to compose predicates
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/predicates):
```
> npm install --save @vates/predicates
```
## Usage
`undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition:
```js
const compositePredicate = every(undefined, some(predicate2, undefined))
// ends up as
const compositePredicate = predicate2
```
Predicates can also be passed wrapped in an array:
```js
const compositePredicate = every([predicate1, some([predicate2, predicate3])])
```
`this` and all arguments are passed to the nested predicates.
### `every(predicates)`
> Returns a predicate that returns `true` iff every predicate returns `true`.
```js
const isBetween3And7 = every(
n => n >= 3,
n => n <= 7
)
isBetween3And10(0)
// → false
isBetween3And10(5)
// → true
isBetween3And10(10)
// → false
```
### `some(predicates)`
> Returns a predicate that returns `true` iff some predicate returns `true`.
```js
const isAliceOrBob = some(
name => name === 'Alice',
name => name === 'Bob'
)
isAliceOrBob('Alice')
// → true
isAliceOrBob('Bob')
// → true
isAliceOrBob('Oscar')
// → false
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -1,71 +0,0 @@
'use strict'
const {
isArray,
prototype: { filter },
} = Array
class InvalidPredicate extends TypeError {
constructor(value) {
super('not a valid predicate')
this.value = value
}
}
function isDefinedPredicate(value) {
if (value === undefined) {
return false
}
if (typeof value !== 'function') {
throw new InvalidPredicate(value)
}
return true
}
function handleArgs() {
let predicates
if (!(arguments.length === 1 && isArray((predicates = arguments[0])))) {
predicates = arguments
}
return filter.call(predicates, isDefinedPredicate)
}
exports.every = function every() {
const predicates = handleArgs.apply(this, arguments)
const n = predicates.length
if (n === 0) {
return
}
if (n === 1) {
return predicates[0]
}
return function everyPredicate() {
for (let i = 0; i < n; ++i) {
if (!predicates[i].apply(this, arguments)) {
return false
}
}
return true
}
}
exports.some = function some() {
const predicates = handleArgs.apply(this, arguments)
const n = predicates.length
if (n === 0) {
return
}
if (n === 1) {
return predicates[0]
}
return function somePredicate() {
for (let i = 0; i < n; ++i) {
if (predicates[i].apply(this, arguments)) {
return true
}
}
return false
}
}

View File

@@ -1,65 +0,0 @@
'use strict'
const assert = require('assert/strict')
const { describe, it } = require('tap').mocha
const { every, some } = require('./')
const T = () => true
const F = () => false
const testArgsHandling = fn => {
it('returns undefined if all predicates are undefined', () => {
assert.equal(fn(undefined), undefined)
assert.equal(fn([undefined]), undefined)
})
it('returns the predicate if only a single one is passed', () => {
assert.equal(fn(undefined, T), T)
assert.equal(fn([undefined, T]), T)
})
it('throws if it receives a non-predicate', () => {
const error = new TypeError('not a valid predicate')
error.value = 3
assert.throws(() => fn(3), error)
})
it('forwards this and arguments to predicates', () => {
const thisArg = 'qux'
const args = ['foo', 'bar', 'baz']
const predicate = function () {
assert.equal(this, thisArg)
assert.deepEqual(Array.from(arguments), args)
}
fn(predicate, predicate).apply(thisArg, args)
})
}
const runTests = (fn, truthTable) =>
it('works', () => {
truthTable.forEach(([result, ...predicates]) => {
assert.equal(fn(...predicates)(), result)
assert.equal(fn(predicates)(), result)
})
})
describe('every', () => {
testArgsHandling(every)
runTests(every, [
[true, T, T],
[false, T, F],
[false, F, T],
[false, F, F],
])
})
describe('some', () => {
testArgsHandling(some)
runTests(some, [
[true, T, T],
[true, T, F],
[true, F, T],
[false, F, F],
])
})

View File

@@ -1,40 +0,0 @@
{
"private": false,
"name": "@vates/predicates",
"description": "Utilities to compose predicates",
"keywords": [
"and",
"combine",
"compose",
"every",
"function",
"functions",
"or",
"predicate",
"predicates",
"some"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/predicates",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/predicates",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"engines": {
"node": ">=6"
},
"scripts": {
"postversion": "npm publish --access public",
"test": "tap"
},
"devDependencies": {
"tap": "^16.0.1"
}
}

View File

@@ -1,26 +0,0 @@
### `readChunk(stream, [size])`
- returns the next available chunk of data
- like `stream.read()`, a number of bytes can be specified
- returns with less data than expected if stream has ended
- returns `null` if the stream has ended and no data has been read
```js
import { readChunk } from '@vates/read-chunk'
;(async () => {
let chunk
while ((chunk = await readChunk(stream, 1024)) !== null) {
// do something with chunk
}
})()
```
### `readChunkStrict(stream, [size])`
Similar behavior to `readChunk` but throws if the stream ended before the requested data could be read.
```js
import { readChunkStrict } from '@vates/read-chunk'
const chunk = await readChunkStrict(stream, 1024)
```

View File

@@ -16,12 +16,9 @@ Installation of the [npm package](https://npmjs.org/package/@vates/read-chunk):
## Usage
### `readChunk(stream, [size])`
- returns the next available chunk of data
- like `stream.read()`, a number of bytes can be specified
- returns with less data than expected if stream has ended
- returns `null` if the stream has ended and no data has been read
- returns `null` if the stream has ended
```js
import { readChunk } from '@vates/read-chunk'
@@ -33,16 +30,6 @@ import { readChunk } from '@vates/read-chunk'
})()
```
### `readChunkStrict(stream, [size])`
Similar behavior to `readChunk` but throws if the stream ended before the requested data could be read.
```js
import { readChunkStrict } from '@vates/read-chunk'
const chunk = await readChunkStrict(stream, 1024)
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on

View File

@@ -0,0 +1,13 @@
- returns the next available chunk of data
- like `stream.read()`, a number of bytes can be specified
- returns `null` if the stream has ended
```js
import { readChunk } from '@vates/read-chunk'
;(async () => {
let chunk
while ((chunk = await readChunk(stream, 1024)) !== null) {
// do something with chunk
}
})()
```

View File

@@ -1,5 +1,3 @@
'use strict'
const readChunk = (stream, size) =>
size === 0
? Promise.resolve(Buffer.alloc(0))
@@ -30,22 +28,3 @@ const readChunk = (stream, size) =>
onReadable()
})
exports.readChunk = readChunk
exports.readChunkStrict = async function readChunkStrict(stream, size) {
const chunk = await readChunk(stream, size)
if (chunk === null) {
throw new Error('stream has ended without data')
}
if (size !== undefined && chunk.length !== size) {
const error = new Error('stream has ended with not enough data')
Object.defineProperties(error, {
chunk: {
value: chunk,
},
})
throw error
}
return chunk
}

View File

@@ -1,10 +1,8 @@
'use strict'
/* eslint-env jest */
const { Readable } = require('stream')
const { readChunk, readChunkStrict } = require('./')
const { readChunk } = require('./')
const makeStream = it => Readable.from(it, { objectMode: false })
makeStream.obj = Readable.from
@@ -43,27 +41,3 @@ describe('readChunk', () => {
})
})
})
const rejectionOf = promise =>
promise.then(
value => {
throw value
},
error => error
)
describe('readChunkStrict', function () {
it('throws if stream is empty', async () => {
const error = await rejectionOf(readChunkStrict(makeStream([])))
expect(error).toBeInstanceOf(Error)
expect(error.message).toBe('stream has ended without data')
expect(error.chunk).toEqual(undefined)
})
it('throws if stream ends with not enough data', async () => {
const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10))
expect(error).toBeInstanceOf(Error)
expect(error.message).toBe('stream has ended with not enough data')
expect(error.chunk).toEqual(Buffer.from('foobar'))
})
})

View File

@@ -19,7 +19,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "1.0.0",
"version": "0.1.2",
"engines": {
"node": ">=8.10"
},

View File

@@ -1,7 +1,5 @@
#!/usr/bin/env node
'use strict'
const fs = require('fs')
const mapKeys = (object, iteratee) => {

View File

@@ -1,5 +1,3 @@
'use strict'
const wrapCall = (fn, arg, thisArg) => {
try {
return Promise.resolve(fn.call(thisArg, arg))

View File

@@ -1,5 +1,3 @@
'use strict'
/* eslint-env jest */
const { asyncMapSettled } = require('./')

View File

@@ -1,5 +1,3 @@
'use strict'
// type MaybePromise<T> = Promise<T> | T
//
// declare export function asyncMap<T1, T2>(

View File

@@ -0,0 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@@ -9,14 +9,28 @@
},
"version": "0.2.0",
"engines": {
"node": ">=14"
"node": ">=10"
},
"main": "dist/",
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"postversion": "npm publish --access public",
"test": "tap --lines 67 --functions 92 --branches 52 --statements 67"
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
},
"devDependencies": {
"@babel/cli": "^7.7.4",
"@babel/core": "^7.7.4",
"@babel/plugin-proposal-decorators": "^7.8.0",
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.8.0",
"@babel/preset-env": "^7.7.4",
"cross-env": "^7.0.2",
"rimraf": "^3.0.0"
},
"dependencies": {
"@vates/decorate-with": "^2.0.0",
"@vates/decorate-with": "^0.1.0",
"@xen-orchestra/log": "^0.3.0",
"golike-defer": "^0.5.1",
"object-hash": "^2.0.1"
@@ -26,8 +40,5 @@
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"devDependencies": {
"tap": "^16.0.1"
}
}

View File

@@ -1,14 +1,12 @@
'use strict'
const assert = require('assert')
const hash = require('object-hash')
const { createLogger } = require('@xen-orchestra/log')
const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
import assert from 'assert'
import hash from 'object-hash'
import { createLogger } from '@xen-orchestra/log'
import { decorateWith } from '@vates/decorate-with'
import { defer } from 'golike-defer'
const log = createLogger('xo:audit-core')
exports.Storage = class Storage {
export class Storage {
constructor() {
this._lock = Promise.resolve()
}
@@ -31,7 +29,7 @@ const ID_TO_ALGORITHM = {
5: 'sha256',
}
class AlteredRecordError extends Error {
export class AlteredRecordError extends Error {
constructor(id, nValid, record) {
super('altered record')
@@ -40,9 +38,8 @@ class AlteredRecordError extends Error {
this.record = record
}
}
exports.AlteredRecordError = AlteredRecordError
class MissingRecordError extends Error {
export class MissingRecordError extends Error {
constructor(id, nValid) {
super('missing record')
@@ -50,10 +47,8 @@ class MissingRecordError extends Error {
this.nValid = nValid
}
}
exports.MissingRecordError = MissingRecordError
const NULL_ID = 'nullId'
exports.NULL_ID = NULL_ID
export const NULL_ID = 'nullId'
const HASH_ALGORITHM_ID = '5'
const createHash = (data, algorithmId = HASH_ALGORITHM_ID) =>
@@ -62,12 +57,13 @@ const createHash = (data, algorithmId = HASH_ALGORITHM_ID) =>
excludeKeys: key => key === 'id',
})}`
class AuditCore {
export class AuditCore {
constructor(storage) {
assert.notStrictEqual(storage, undefined)
this._storage = storage
}
@decorateWith(defer)
async add($defer, subject, event, data) {
const time = Date.now()
$defer(await this._storage.acquireLock())
@@ -152,6 +148,7 @@ class AuditCore {
}
}
@decorateWith(defer)
async deleteRangeAndRewrite($defer, newest, oldest) {
assert.notStrictEqual(newest, undefined)
assert.notStrictEqual(oldest, undefined)
@@ -192,9 +189,3 @@ class AuditCore {
}
}
}
exports.AuditCore = AuditCore
decorateClass(AuditCore, {
add: defer,
deleteRangeAndRewrite: defer,
})

View File

@@ -1,9 +1,6 @@
'use strict'
/* eslint-env jest */
const assert = require('assert/strict')
const { afterEach, describe, it } = require('tap').mocha
const { AlteredRecordError, AuditCore, MissingRecordError, NULL_ID, Storage } = require('.')
import { AlteredRecordError, AuditCore, MissingRecordError, NULL_ID, Storage } from '.'
const asyncIteratorToArray = async asyncIterator => {
const array = []
@@ -75,7 +72,7 @@ const auditCore = new AuditCore(db)
const storeAuditRecords = async () => {
await Promise.all(DATA.map(data => auditCore.add(...data)))
const records = await asyncIteratorToArray(auditCore.getFrom())
assert.equal(records.length, DATA.length)
expect(records.length).toBe(DATA.length)
return records
}
@@ -86,11 +83,10 @@ describe('auditCore', () => {
const [newestRecord, deletedRecord] = await storeAuditRecords()
const nValidRecords = await auditCore.checkIntegrity(NULL_ID, newestRecord.id)
assert.equal(nValidRecords, DATA.length)
expect(nValidRecords).toBe(DATA.length)
await db.del(deletedRecord.id)
await assert.rejects(
auditCore.checkIntegrity(NULL_ID, newestRecord.id),
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
new MissingRecordError(deletedRecord.id, 1)
)
})
@@ -101,8 +97,7 @@ describe('auditCore', () => {
alteredRecord.event = ''
await db.put(alteredRecord)
await assert.rejects(
auditCore.checkIntegrity(NULL_ID, newestRecord.id),
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
new AlteredRecordError(alteredRecord.id, 1, alteredRecord)
)
})
@@ -112,8 +107,8 @@ describe('auditCore', () => {
await auditCore.deleteFrom(secondRecord.id)
assert.equal(await db.get(firstRecord.id), undefined)
assert.equal(await db.get(secondRecord.id), undefined)
expect(await db.get(firstRecord.id)).toBe(undefined)
expect(await db.get(secondRecord.id)).toBe(undefined)
await auditCore.checkIntegrity(secondRecord.id, thirdRecord.id)
})

View File

@@ -46,7 +46,7 @@ module.exports = function (pkg, configs = {}) {
return {
comments: !__PROD__,
ignore: __PROD__ ? [/\btests?\//, /\.spec\.js$/] : undefined,
ignore: __PROD__ ? [/\.spec\.js$/] : undefined,
plugins: Object.keys(plugins)
.map(plugin => [plugin, plugins[plugin]])
.sort(([a], [b]) => {

View File

@@ -10,7 +10,7 @@
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"engines": {
"node": ">=8.3"
"node": ">=6"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -1,5 +1,3 @@
'use strict'
const getopts = require('getopts')
const { version } = require('./package.json')

View File

@@ -1,5 +1,3 @@
'use strict'
const { dirname } = require('path')
const fs = require('promise-toolbox/promisifyAll')(require('fs'))

View File

@@ -1,4 +1,4 @@
'use strict'
#!/usr/bin/env node
// -----------------------------------------------------------------------------
@@ -26,13 +26,7 @@ module.exports = async function main(args) {
await asyncMap(_, async vmDir => {
vmDir = resolve(vmDir)
try {
await adapter.cleanVm(vmDir, {
fixMetadata: fix,
remove,
merge,
logInfo: (...args) => console.log(...args),
logWarn: (...args) => console.warn(...args),
})
await adapter.cleanVm(vmDir, { fixMetadata: fix, remove, merge, onLog: (...args) => console.warn(...args) })
} catch (error) {
console.error('adapter.cleanVm', vmDir, error)
}

View File

@@ -1,5 +1,3 @@
'use strict'
const filenamify = require('filenamify')
const get = require('lodash/get')
const { asyncMap } = require('@xen-orchestra/async-map')

View File

@@ -1,5 +1,3 @@
'use strict'
const groupBy = require('lodash/groupBy')
const { asyncMap } = require('@xen-orchestra/async-map')
const { createHash } = require('crypto')

View File

@@ -1,7 +1,5 @@
#!/usr/bin/env node
'use strict'
require('./_composeCommands')({
'clean-vms': {
get main() {

View File

@@ -7,12 +7,12 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.28.0",
"@xen-orchestra/fs": "^3.1.0",
"@xen-orchestra/backups": "^0.13.0",
"@xen-orchestra/fs": "^0.18.0",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",
"promise-toolbox": "^0.21.0"
"promise-toolbox": "^0.19.2"
},
"engines": {
"node": ">=7.10.1"
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "0.7.8",
"version": "0.6.0",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -1,12 +1,10 @@
'use strict'
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const Disposable = require('promise-toolbox/Disposable.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors.js')
const { compileTemplate } = require('@xen-orchestra/template')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('./extractIdsFromSimplePattern.js')
const { extractIdsFromSimplePattern } = require('./_extractIdsFromSimplePattern.js')
const { PoolMetadataBackup } = require('./_PoolMetadataBackup.js')
const { Task } = require('./Task.js')
const { VmBackup } = require('./_VmBackup.js')
@@ -24,34 +22,6 @@ const getAdaptersByRemote = adapters => {
const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs
const DEFAULT_SETTINGS = {
reportWhen: 'failure',
}
const DEFAULT_VM_SETTINGS = {
bypassVdiChainsCheck: false,
checkpointSnapshot: false,
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
exportRetention: 0,
fullInterval: 0,
healthCheckSr: undefined,
healthCheckVmsWithTags: [],
maxMergedDeltasPerRun: 2,
offlineBackup: false,
offlineSnapshot: false,
snapshotRetention: 0,
timeout: 0,
unconditionalSnapshot: false,
vmTimeout: 0,
}
const DEFAULT_METADATA_SETTINGS = {
retentionPoolMetadata: 0,
retentionXoMetadata: 0,
}
exports.Backup = class Backup {
constructor({ config, getAdapter, getConnectedRecord, job, schedule }) {
this._config = config
@@ -70,22 +40,17 @@ exports.Backup = class Backup {
'{job.name}': job.name,
'{vm.name_label}': vm => vm.name_label,
})
}
const { type } = job
const baseSettings = { ...DEFAULT_SETTINGS }
run() {
const type = this._job.type
if (type === 'backup') {
Object.assign(baseSettings, DEFAULT_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)
this.run = this._runVmBackup
return this._runVmBackup()
} else if (type === 'metadataBackup') {
Object.assign(baseSettings, DEFAULT_METADATA_SETTINGS, config.defaultSettings, config.metadata?.defaultSettings)
this.run = this._runMetadataBackup
return this._runMetadataBackup()
} else {
throw new Error(`No runner for the backup type ${type}`)
}
Object.assign(baseSettings, job.settings[''])
this._baseSettings = baseSettings
this._settings = { ...baseSettings, ...job.settings[schedule.id] }
}
async _runMetadataBackup() {
@@ -97,6 +62,13 @@ exports.Backup = class Backup {
}
const config = this._config
const settings = {
...config.defaultSettings,
...config.metadata.defaultSettings,
...job.settings[''],
...job.settings[schedule.id],
}
const poolIds = extractIdsFromSimplePattern(job.pools)
const isEmptyPools = poolIds.length === 0
const isXoMetadata = job.xoMetadata !== undefined
@@ -104,8 +76,6 @@ exports.Backup = class Backup {
throw new Error('no metadata mode found')
}
const settings = this._settings
const { retentionPoolMetadata, retentionXoMetadata } = settings
if (
@@ -217,7 +187,14 @@ exports.Backup = class Backup {
const schedule = this._schedule
const config = this._config
const settings = this._settings
const { settings } = job
const scheduleSettings = {
...config.defaultSettings,
...config.vm.defaultSettings,
...settings[''],
...settings[schedule.id],
}
await Disposable.use(
Disposable.all(
extractIdsFromSimplePattern(job.srs).map(id =>
@@ -245,15 +222,14 @@ exports.Backup = class Backup {
})
)
),
() => (settings.healthCheckSr !== undefined ? this._getRecord('SR', settings.healthCheckSr) : undefined),
async (srs, remoteAdapters, healthCheckSr) => {
async (srs, remoteAdapters) => {
// remove adapters that failed (already handled)
remoteAdapters = remoteAdapters.filter(_ => _ !== undefined)
// remove srs that failed (already handled)
srs = srs.filter(_ => _ !== undefined)
if (remoteAdapters.length === 0 && srs.length === 0 && settings.snapshotRetention === 0) {
if (remoteAdapters.length === 0 && srs.length === 0 && scheduleSettings.snapshotRetention === 0) {
return
}
@@ -263,27 +239,23 @@ exports.Backup = class Backup {
remoteAdapters = getAdaptersByRemote(remoteAdapters)
const allSettings = this._job.settings
const baseSettings = this._baseSettings
const handleVm = vmUuid =>
runTask({ name: 'backup VM', data: { type: 'VM', id: vmUuid } }, () =>
Disposable.use(this._getRecord('VM', vmUuid), vm =>
new VmBackup({
baseSettings,
config,
getSnapshotNameLabel,
healthCheckSr,
job,
// remotes,
remoteAdapters,
schedule,
settings: { ...settings, ...allSettings[vm.uuid] },
settings: { ...scheduleSettings, ...settings[vmUuid] },
srs,
vm,
}).run()
)
)
const { concurrency } = settings
const { concurrency } = scheduleSettings
await asyncMapSettled(vmIds, concurrency === 0 ? handleVm : limitConcurrency(concurrency)(handleVm))
}
)

View File

@@ -1,5 +1,3 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
exports.DurablePartition = class DurablePartition {

View File

@@ -1,64 +0,0 @@
'use strict'
const { Task } = require('./Task')
exports.HealthCheckVmBackup = class HealthCheckVmBackup {
#xapi
#restoredVm
constructor({ restoredVm, xapi }) {
this.#restoredVm = restoredVm
this.#xapi = xapi
}
async run() {
return Task.run(
{
name: 'vmstart',
},
async () => {
let restoredVm = this.#restoredVm
const xapi = this.#xapi
const restoredId = restoredVm.uuid
// remove vifs
await Promise.all(restoredVm.$VIFs.map(vif => xapi.callAsync('VIF.destroy', vif.$ref)))
const start = new Date()
// start Vm
await xapi.callAsync(
'VM.start',
restoredVm.$ref,
false, // Start paused?
false // Skip pre-boot checks?
)
const started = new Date()
const timeout = 10 * 60 * 1000
const startDuration = started - start
let remainingTimeout = timeout - startDuration
if (remainingTimeout < 0) {
throw new Error(`VM ${restoredId} not started after ${timeout / 1000} second`)
}
// wait for the 'Running' event to be really stored in local xapi object cache
restoredVm = await xapi.waitObjectState(restoredVm.$ref, vm => vm.power_state === 'Running', {
timeout: remainingTimeout,
})
const running = new Date()
remainingTimeout -= running - started
if (remainingTimeout < 0) {
throw new Error(`local xapi did not get Runnig state for VM ${restoredId} after ${timeout / 1000} second`)
}
// wait for the guest tool version to be defined
await xapi.waitObjectState(restoredVm.guest_metrics, gm => gm?.PV_drivers_version?.major !== undefined, {
timeout: remainingTimeout,
})
}
)
}
}

View File

@@ -1,5 +1,3 @@
'use strict'
const assert = require('assert')
const { formatFilenameDate } = require('./_filenameDate.js')
@@ -8,9 +6,9 @@ const { Task } = require('./Task.js')
const { watchStreamSize } = require('./_watchStreamSize.js')
exports.ImportVmBackup = class ImportVmBackup {
constructor({ adapter, metadata, srUuid, xapi, settings: { newMacAddresses, mapVdisSrs = {} } = {} }) {
constructor({ adapter, metadata, srUuid, xapi, settings: { newMacAddresses } = {} }) {
this._adapter = adapter
this._importDeltaVmSettings = { newMacAddresses, mapVdisSrs }
this._importDeltaVmSettings = { newMacAddresses }
this._metadata = metadata
this._srUuid = srUuid
this._xapi = xapi
@@ -30,12 +28,7 @@ exports.ImportVmBackup = class ImportVmBackup {
} else {
assert.strictEqual(metadata.mode, 'delta')
const ignoredVdis = new Set(
Object.entries(this._importDeltaVmSettings.mapVdisSrs)
.filter(([_, srUuid]) => srUuid === null)
.map(([vdiUuid]) => vdiUuid)
)
backup = await adapter.readDeltaVmBackup(metadata, ignoredVdis)
backup = await adapter.readDeltaVmBackup(metadata)
Object.values(backup.streams).forEach(stream => watchStreamSize(stream, sizeContainer))
}

Some files were not shown because too many files have changed in this diff Show More