Compare commits
10 Commits
nr-delete-
...
select-nfs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2951f617b | ||
|
|
0ea64bdca7 | ||
|
|
f06bee3737 | ||
|
|
2693598ac8 | ||
|
|
19011ad372 | ||
|
|
86eb7744a1 | ||
|
|
fe13ef6ff9 | ||
|
|
5607d34719 | ||
|
|
4501018dd6 | ||
|
|
9be9007fde |
15
.eslintrc.js
15
.eslintrc.js
@@ -21,7 +21,7 @@ module.exports = {
|
||||
|
||||
overrides: [
|
||||
{
|
||||
files: ['cli.js', '*-cli.js', '**/*cli*/**/*.js'],
|
||||
files: ['cli.js', '*-cli.js', 'packages/*cli*/**/*.js'],
|
||||
rules: {
|
||||
'no-console': 'off',
|
||||
},
|
||||
@@ -38,15 +38,10 @@ module.exports = {
|
||||
// disabled because XAPI objects are using camel case
|
||||
camelcase: ['off'],
|
||||
|
||||
'react/jsx-handler-names': 'off',
|
||||
|
||||
// disabled because not always relevant, we might reconsider in the future
|
||||
//
|
||||
// enabled by https://github.com/standard/eslint-config-standard/commit/319b177750899d4525eb1210686f6aca96190b2f
|
||||
//
|
||||
// example: https://github.com/vatesfr/xen-orchestra/blob/31ed3767c67044ca445658eb6b560718972402f2/packages/xen-api/src/index.js#L156-L157
|
||||
'lines-between-class-members': 'off',
|
||||
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }],
|
||||
'no-var': 'error',
|
||||
'node/no-extraneous-import': 'error',
|
||||
'node/no-extraneous-require': 'error',
|
||||
'prefer-const': 'error',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
module.exports = {
|
||||
arrowParens: 'avoid',
|
||||
jsxSingleQuote: true,
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
|
||||
// 2020-11-24: Requested by nraynaud and approved by the rest of the team
|
||||
//
|
||||
// https://team.vates.fr/vates/pl/a1i8af1b9id7pgzm3jcg4toacy
|
||||
printWidth: 120,
|
||||
trailingComma: 'es5',
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 12
|
||||
#- stable # disable for now due to an issue of indirect dep upath with Node 9
|
||||
- 8
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/coalesce-calls
|
||||
|
||||
[](https://npmjs.org/package/@vates/coalesce-calls)  [](https://bundlephobia.com/result?p=@vates/coalesce-calls) [](https://npmjs.org/package/@vates/coalesce-calls)
|
||||
|
||||
> Wraps an async function so that concurrent calls will be coalesced
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/coalesce-calls):
|
||||
|
||||
```
|
||||
> npm install --save @vates/coalesce-calls
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { coalesceCalls } from '@vates/coalesce-calls'
|
||||
|
||||
const connect = coalesceCalls(async function () {
|
||||
// async operation
|
||||
})
|
||||
|
||||
connect()
|
||||
|
||||
// the previous promise result will be returned if the operation is not
|
||||
// complete yet
|
||||
connect()
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,13 +0,0 @@
|
||||
```js
|
||||
import { coalesceCalls } from '@vates/coalesce-calls'
|
||||
|
||||
const connect = coalesceCalls(async function () {
|
||||
// async operation
|
||||
})
|
||||
|
||||
connect()
|
||||
|
||||
// the previous promise result will be returned if the operation is not
|
||||
// complete yet
|
||||
connect()
|
||||
```
|
||||
@@ -1,14 +0,0 @@
|
||||
exports.coalesceCalls = function (fn) {
|
||||
let promise
|
||||
const clean = () => {
|
||||
promise = undefined
|
||||
}
|
||||
return function () {
|
||||
if (promise !== undefined) {
|
||||
return promise
|
||||
}
|
||||
promise = fn.apply(this, arguments)
|
||||
promise.then(clean, clean)
|
||||
return promise
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
const { coalesceCalls } = require('./')
|
||||
|
||||
const pDefer = () => {
|
||||
const r = {}
|
||||
r.promise = new Promise((resolve, reject) => {
|
||||
r.reject = reject
|
||||
r.resolve = resolve
|
||||
})
|
||||
return r
|
||||
}
|
||||
|
||||
describe('coalesceCalls', () => {
|
||||
it('decorates an async function', async () => {
|
||||
const fn = coalesceCalls(promise => promise)
|
||||
|
||||
const defer1 = pDefer()
|
||||
const promise1 = fn(defer1.promise)
|
||||
const defer2 = pDefer()
|
||||
const promise2 = fn(defer2.promise)
|
||||
|
||||
defer1.resolve('foo')
|
||||
expect(await promise1).toBe('foo')
|
||||
expect(await promise2).toBe('foo')
|
||||
|
||||
const defer3 = pDefer()
|
||||
const promise3 = fn(defer3.promise)
|
||||
|
||||
defer3.resolve('bar')
|
||||
expect(await promise3).toBe('bar')
|
||||
})
|
||||
})
|
||||
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/coalesce-calls",
|
||||
"description": "Wraps an async function so that concurrent calls will be coalesced",
|
||||
"keywords": [
|
||||
"async",
|
||||
"calls",
|
||||
"coalesce",
|
||||
"decorate",
|
||||
"decorator",
|
||||
"merge",
|
||||
"promise",
|
||||
"wrap",
|
||||
"wrapper"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/coalesce-calls",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/coalesce-calls",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/decorate-with
|
||||
|
||||
[](https://npmjs.org/package/@vates/decorate-with)  [](https://bundlephobia.com/result?p=@vates/decorate-with) [](https://npmjs.org/package/@vates/decorate-with)
|
||||
|
||||
> Creates a decorator from a function wrapper
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/decorate-with):
|
||||
|
||||
```
|
||||
> npm install --save @vates/decorate-with
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
For instance, allows using Lodash's functions as decorators:
|
||||
|
||||
```js
|
||||
import { decorateWith } from '@vates/decorate-with'
|
||||
|
||||
class Foo {
|
||||
@decorateWith(lodash.debounce, 150)
|
||||
bar() {
|
||||
// body
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,12 +0,0 @@
|
||||
For instance, allows using Lodash's functions as decorators:
|
||||
|
||||
```js
|
||||
import { decorateWith } from '@vates/decorate-with'
|
||||
|
||||
class Foo {
|
||||
@decorateWith(lodash.debounce, 150)
|
||||
bar() {
|
||||
// body
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -1,4 +0,0 @@
|
||||
exports.decorateWith = (fn, ...args) => (target, name, descriptor) => ({
|
||||
...descriptor,
|
||||
value: fn(descriptor.value, ...args),
|
||||
})
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/decorate-with",
|
||||
"description": "Creates a decorator from a function wrapper",
|
||||
"keywords": [
|
||||
"apply",
|
||||
"decorator",
|
||||
"factory",
|
||||
"wrapper"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/decorate-with",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/decorate-with",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC",
|
||||
"version": "0.0.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/multi-key-map
|
||||
|
||||
[](https://npmjs.org/package/@vates/multi-key-map)  [](https://bundlephobia.com/result?p=@vates/multi-key-map) [](https://npmjs.org/package/@vates/multi-key-map)
|
||||
|
||||
> Create map with values affected to multiple keys
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/multi-key-map):
|
||||
|
||||
```
|
||||
> npm install --save @vates/multi-key-map
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { MultiKeyMap } from '@vates/multi-key-map'
|
||||
|
||||
const map = new MultiKeyMap()
|
||||
|
||||
const OBJ = {}
|
||||
map.set([], 0)
|
||||
map.set(['foo'], 1)
|
||||
map.set(['foo', 'bar'], 2)
|
||||
map.set(['bar', 'foo'], 3)
|
||||
map.set([OBJ], 4)
|
||||
map.set([{}], 5)
|
||||
|
||||
map.get([]) // 0
|
||||
map.get(['foo']) // 1
|
||||
map.get(['foo', 'bar']) // 2
|
||||
map.get(['bar', 'foo']) // 3
|
||||
map.get([OBJ]) // 4
|
||||
map.get([{}]) // undefined
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,20 +0,0 @@
|
||||
```js
|
||||
import { MultiKeyMap } from '@vates/multi-key-map'
|
||||
|
||||
const map = new MultiKeyMap()
|
||||
|
||||
const OBJ = {}
|
||||
map.set([], 0)
|
||||
map.set(['foo'], 1)
|
||||
map.set(['foo', 'bar'], 2)
|
||||
map.set(['bar', 'foo'], 3)
|
||||
map.set([OBJ], 4)
|
||||
map.set([{}], 5)
|
||||
|
||||
map.get([]) // 0
|
||||
map.get(['foo']) // 1
|
||||
map.get(['foo', 'bar']) // 2
|
||||
map.get(['bar', 'foo']) // 3
|
||||
map.get([OBJ]) // 4
|
||||
map.get([{}]) // undefined
|
||||
```
|
||||
@@ -1,34 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
const { MultiKeyMap } = require('./')
|
||||
|
||||
describe('MultiKeyMap', () => {
|
||||
it('works', () => {
|
||||
const map = new MultiKeyMap()
|
||||
|
||||
const keys = [
|
||||
// null key
|
||||
[],
|
||||
// simple key
|
||||
['foo'],
|
||||
// composite key
|
||||
['foo', 'bar'],
|
||||
// reverse composite key
|
||||
['bar', 'foo'],
|
||||
]
|
||||
const values = keys.map(() => ({}))
|
||||
|
||||
// set all values first to make sure they are all stored and not only the
|
||||
// last one
|
||||
keys.forEach((key, i) => {
|
||||
map.set(key, values[i])
|
||||
})
|
||||
|
||||
keys.forEach((key, i) => {
|
||||
// copy the key to make sure the array itself is not the key
|
||||
expect(map.get(key.slice())).toBe(values[i])
|
||||
map.delete(key.slice())
|
||||
expect(map.get(key.slice())).toBe(undefined)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/multi-key-map",
|
||||
"description": "Create map with values affected to multiple keys",
|
||||
"keywords": [
|
||||
"cache",
|
||||
"map"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/multi-key-map",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/multi-key-map",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/parse-duration
|
||||
|
||||
[](https://npmjs.org/package/@vates/parse-duration)  [](https://bundlephobia.com/result?p=@vates/parse-duration) [](https://npmjs.org/package/@vates/parse-duration)
|
||||
|
||||
> Small wrapper around ms to parse a duration
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/parse-duration):
|
||||
|
||||
```
|
||||
> npm install --save @vates/parse-duration
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`ms` without magic: always parse a duration and throws if invalid.
|
||||
|
||||
```js
|
||||
import { parseDuration } from '@vates/parse-duration'
|
||||
|
||||
parseDuration('2 days')
|
||||
// 172800000
|
||||
|
||||
parseDuration(172800000)
|
||||
// 172800000
|
||||
|
||||
parseDuration(undefined)
|
||||
// throws TypeError('not a valid duration: undefined')
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,14 +0,0 @@
|
||||
`ms` without magic: always parse a duration and throws if invalid.
|
||||
|
||||
```js
|
||||
import { parseDuration } from '@vates/parse-duration'
|
||||
|
||||
parseDuration('2 days')
|
||||
// 172800000
|
||||
|
||||
parseDuration(172800000)
|
||||
// 172800000
|
||||
|
||||
parseDuration(undefined)
|
||||
// throws TypeError('not a valid duration: undefined')
|
||||
```
|
||||
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/parse-duration",
|
||||
"description": "Small wrapper around ms to parse a duration",
|
||||
"keywords": [
|
||||
"duration",
|
||||
"ms",
|
||||
"parse"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/parse-duration",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/parse-duration",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": "^2.1.2"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/read-chunk
|
||||
|
||||
[](https://npmjs.org/package/@vates/read-chunk)  [](https://bundlephobia.com/result?p=@vates/read-chunk) [](https://npmjs.org/package/@vates/read-chunk)
|
||||
|
||||
> Read a chunk of a Node stream
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/read-chunk):
|
||||
|
||||
```
|
||||
> npm install --save @vates/read-chunk
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- returns the next available chunk of data
|
||||
- like `stream.read()`, a number of bytes can be specified
|
||||
- returns `null` if the stream has ended
|
||||
|
||||
```js
|
||||
import { readChunk } from '@vates/read-chunk'
|
||||
;(async () => {
|
||||
let chunk
|
||||
while ((chunk = await readChunk(stream, 1024)) !== null) {
|
||||
// do something with chunk
|
||||
}
|
||||
})()
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,13 +0,0 @@
|
||||
- returns the next available chunk of data
|
||||
- like `stream.read()`, a number of bytes can be specified
|
||||
- returns `null` if the stream has ended
|
||||
|
||||
```js
|
||||
import { readChunk } from '@vates/read-chunk'
|
||||
;(async () => {
|
||||
let chunk
|
||||
while ((chunk = await readChunk(stream, 1024)) !== null) {
|
||||
// do something with chunk
|
||||
}
|
||||
})()
|
||||
```
|
||||
@@ -1,27 +0,0 @@
|
||||
exports.readChunk = (stream, size) =>
|
||||
new Promise((resolve, reject) => {
|
||||
function onEnd() {
|
||||
resolve(null)
|
||||
removeListeners()
|
||||
}
|
||||
function onError(error) {
|
||||
reject(error)
|
||||
removeListeners()
|
||||
}
|
||||
function onReadable() {
|
||||
const data = stream.read(size)
|
||||
if (data !== null) {
|
||||
resolve(data)
|
||||
removeListeners()
|
||||
}
|
||||
}
|
||||
function removeListeners() {
|
||||
stream.removeListener('end', onEnd)
|
||||
stream.removeListener('error', onError)
|
||||
stream.removeListener('readable', onReadable)
|
||||
}
|
||||
stream.on('end', onEnd)
|
||||
stream.on('error', onError)
|
||||
stream.on('readable', onReadable)
|
||||
onReadable()
|
||||
})
|
||||
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/read-chunk",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/read-chunk",
|
||||
"description": "Read a chunk of a Node stream",
|
||||
"license": "ISC",
|
||||
"keywords": [
|
||||
"async",
|
||||
"chunk",
|
||||
"data",
|
||||
"node",
|
||||
"promise",
|
||||
"read",
|
||||
"stream"
|
||||
],
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/read-chunk",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"version": "0.1.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
# @xen-orchestra/async-map [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
# @xen-orchestra/async-map
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/async-map)  [](https://bundlephobia.com/result?p=@xen-orchestra/async-map) [](https://npmjs.org/package/@xen-orchestra/async-map)
|
||||
|
||||
> Similar to Promise.all + lodash.map but wait for all promises to be settled
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
@@ -16,15 +12,30 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/async
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
**TODO**
|
||||
|
||||
const array = await asyncMap(collection, iteratee)
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
@@ -35,4 +46,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
```js
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
|
||||
const array = await asyncMap(collection, iteratee)
|
||||
```
|
||||
@@ -1,9 +1,8 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/async-map",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "Similar to Promise.all + lodash.map but wait for all promises to be settled",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/async-map",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
@@ -13,8 +12,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -37,8 +36,8 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
@@ -46,6 +45,7 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"postversion": "npm publish"
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
@@ -1,24 +0,0 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
@@ -1,28 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/audit-core
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/audit-core)  [](https://bundlephobia.com/result?p=@xen-orchestra/audit-core) [](https://npmjs.org/package/@xen-orchestra/audit-core)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/audit-core):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/audit-core
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,45 +0,0 @@
|
||||
{
|
||||
"name": "@xen-orchestra/audit-core",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/audit-core",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@xen-orchestra/audit-core",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"version": "0.2.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"main": "dist/",
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"postversion": "npm publish --access public",
|
||||
"prebuild": "rimraf dist/",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.7.4",
|
||||
"@babel/core": "^7.7.4",
|
||||
"@babel/plugin-proposal-decorators": "^7.8.0",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.8.0",
|
||||
"@babel/preset-env": "^7.7.4",
|
||||
"cross": "^1.0.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/log": "^0.2.0",
|
||||
"core-js": "^3.6.4",
|
||||
"golike-defer": "^0.4.1",
|
||||
"lodash": "^4.17.15",
|
||||
"object-hash": "^2.0.1"
|
||||
},
|
||||
"private": false,
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
@@ -1,193 +0,0 @@
|
||||
// see https://github.com/babel/babel/issues/8450
|
||||
import 'core-js/features/symbol/async-iterator'
|
||||
|
||||
import assert from 'assert'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import defer from 'golike-defer'
|
||||
import hash from 'object-hash'
|
||||
|
||||
const log = createLogger('xo:audit-core')
|
||||
|
||||
export class Storage {
|
||||
constructor() {
|
||||
this._lock = Promise.resolve()
|
||||
}
|
||||
|
||||
async acquireLock() {
|
||||
const lock = this._lock
|
||||
let releaseLock
|
||||
this._lock = new Promise(resolve => {
|
||||
releaseLock = resolve
|
||||
})
|
||||
await lock
|
||||
return releaseLock
|
||||
}
|
||||
}
|
||||
|
||||
// Format: $<algorithm>$<salt>$<encrypted>
|
||||
//
|
||||
// http://man7.org/linux/man-pages/man3/crypt.3.html#NOTES
|
||||
const ID_TO_ALGORITHM = {
|
||||
5: 'sha256',
|
||||
}
|
||||
|
||||
export class AlteredRecordError extends Error {
|
||||
constructor(id, nValid, record) {
|
||||
super('altered record')
|
||||
|
||||
this.id = id
|
||||
this.nValid = nValid
|
||||
this.record = record
|
||||
}
|
||||
}
|
||||
|
||||
export class MissingRecordError extends Error {
|
||||
constructor(id, nValid) {
|
||||
super('missing record')
|
||||
|
||||
this.id = id
|
||||
this.nValid = nValid
|
||||
}
|
||||
}
|
||||
|
||||
export const NULL_ID = 'nullId'
|
||||
|
||||
const HASH_ALGORITHM_ID = '5'
|
||||
const createHash = (data, algorithmId = HASH_ALGORITHM_ID) =>
|
||||
`$${algorithmId}$$${hash(data, {
|
||||
algorithm: ID_TO_ALGORITHM[algorithmId],
|
||||
excludeKeys: key => key === 'id',
|
||||
})}`
|
||||
|
||||
export class AuditCore {
|
||||
constructor(storage) {
|
||||
assert.notStrictEqual(storage, undefined)
|
||||
this._storage = storage
|
||||
}
|
||||
|
||||
@defer
|
||||
async add($defer, subject, event, data) {
|
||||
const time = Date.now()
|
||||
$defer(await this._storage.acquireLock())
|
||||
return this._addUnsafe({
|
||||
data,
|
||||
event,
|
||||
subject,
|
||||
time,
|
||||
})
|
||||
}
|
||||
|
||||
async _addUnsafe({ data, event, subject, time }) {
|
||||
const storage = this._storage
|
||||
|
||||
// delete "undefined" properties and normalize data with JSON.stringify
|
||||
const record = JSON.parse(
|
||||
JSON.stringify({
|
||||
data,
|
||||
event,
|
||||
previousId: (await storage.getLastId()) ?? NULL_ID,
|
||||
subject,
|
||||
time,
|
||||
})
|
||||
)
|
||||
record.id = createHash(record)
|
||||
await storage.put(record)
|
||||
await storage.setLastId(record.id)
|
||||
return record
|
||||
}
|
||||
|
||||
async checkIntegrity(oldest, newest) {
|
||||
const storage = this._storage
|
||||
|
||||
// handle separated chains case
|
||||
if (newest !== (await storage.getLastId())) {
|
||||
let isNewestAccessible = false
|
||||
for await (const { id } of this.getFrom()) {
|
||||
if (id === newest) {
|
||||
isNewestAccessible = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!isNewestAccessible) {
|
||||
throw new MissingRecordError(newest, 0)
|
||||
}
|
||||
}
|
||||
|
||||
let nValid = 0
|
||||
while (newest !== oldest) {
|
||||
const record = await storage.get(newest)
|
||||
if (record === undefined) {
|
||||
throw new MissingRecordError(newest, nValid)
|
||||
}
|
||||
if (newest !== createHash(record, newest.slice(1, newest.indexOf('$', 1)))) {
|
||||
throw new AlteredRecordError(newest, nValid, record)
|
||||
}
|
||||
newest = record.previousId
|
||||
nValid++
|
||||
}
|
||||
return nValid
|
||||
}
|
||||
|
||||
async *getFrom(newest) {
|
||||
const storage = this._storage
|
||||
|
||||
let id = newest ?? (await storage.getLastId())
|
||||
if (id === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
let record
|
||||
while ((record = await storage.get(id)) !== undefined) {
|
||||
yield record
|
||||
id = record.previousId
|
||||
}
|
||||
}
|
||||
|
||||
async deleteFrom(newest) {
|
||||
assert.notStrictEqual(newest, undefined)
|
||||
for await (const { id } of this.getFrom(newest)) {
|
||||
await this._storage.del(id)
|
||||
}
|
||||
}
|
||||
|
||||
@defer
|
||||
async deleteRangeAndRewrite($defer, newest, oldest) {
|
||||
assert.notStrictEqual(newest, undefined)
|
||||
assert.notStrictEqual(oldest, undefined)
|
||||
|
||||
const storage = this._storage
|
||||
$defer(await storage.acquireLock())
|
||||
|
||||
assert.notStrictEqual(await storage.get(newest), undefined)
|
||||
const oldestRecord = await storage.get(oldest)
|
||||
assert.notStrictEqual(oldestRecord, undefined)
|
||||
|
||||
const lastId = await storage.getLastId()
|
||||
const recentRecords = []
|
||||
for await (const record of this.getFrom(lastId)) {
|
||||
if (record.id === newest) {
|
||||
break
|
||||
}
|
||||
|
||||
recentRecords.push(record)
|
||||
}
|
||||
|
||||
for await (const record of this.getFrom(newest)) {
|
||||
await storage.del(record.id)
|
||||
if (record.id === oldest) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
await storage.setLastId(oldestRecord.previousId)
|
||||
|
||||
for (const record of recentRecords) {
|
||||
try {
|
||||
await this._addUnsafe(record)
|
||||
await storage.del(record.id)
|
||||
} catch (error) {
|
||||
log.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { AlteredRecordError, AuditCore, MissingRecordError, NULL_ID, Storage } from '.'
|
||||
|
||||
const asyncIteratorToArray = async asyncIterator => {
|
||||
const array = []
|
||||
for await (const entry of asyncIterator) {
|
||||
array.push(entry)
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
class DB extends Storage {
|
||||
constructor() {
|
||||
super()
|
||||
|
||||
this._db = new Map()
|
||||
this._lastId = undefined
|
||||
}
|
||||
|
||||
async put(record) {
|
||||
this._db.set(record.id, record)
|
||||
}
|
||||
|
||||
async setLastId(id) {
|
||||
this._lastId = id
|
||||
}
|
||||
|
||||
async getLastId() {
|
||||
return this._lastId
|
||||
}
|
||||
|
||||
async del(id) {
|
||||
this._db.delete(id)
|
||||
}
|
||||
|
||||
async get(id) {
|
||||
return this._db.get(id)
|
||||
}
|
||||
|
||||
_clear() {
|
||||
return this._db.clear()
|
||||
}
|
||||
}
|
||||
|
||||
const DATA = [
|
||||
[
|
||||
{
|
||||
name: 'subject0',
|
||||
},
|
||||
'event0',
|
||||
{},
|
||||
],
|
||||
[
|
||||
{
|
||||
name: 'subject1',
|
||||
},
|
||||
'event1',
|
||||
{},
|
||||
],
|
||||
[
|
||||
{
|
||||
name: 'subject2',
|
||||
},
|
||||
'event2',
|
||||
{},
|
||||
],
|
||||
]
|
||||
|
||||
const db = new DB()
|
||||
const auditCore = new AuditCore(db)
|
||||
const storeAuditRecords = async () => {
|
||||
await Promise.all(DATA.map(data => auditCore.add(...data)))
|
||||
const records = await asyncIteratorToArray(auditCore.getFrom())
|
||||
expect(records.length).toBe(DATA.length)
|
||||
return records
|
||||
}
|
||||
|
||||
describe('auditCore', () => {
|
||||
afterEach(() => db._clear())
|
||||
|
||||
it('detects that a record is missing', async () => {
|
||||
const [newestRecord, deletedRecord] = await storeAuditRecords()
|
||||
|
||||
const nValidRecords = await auditCore.checkIntegrity(NULL_ID, newestRecord.id)
|
||||
expect(nValidRecords).toBe(DATA.length)
|
||||
|
||||
await db.del(deletedRecord.id)
|
||||
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
|
||||
new MissingRecordError(deletedRecord.id, 1)
|
||||
)
|
||||
})
|
||||
|
||||
it('detects that a record has been altered', async () => {
|
||||
const [newestRecord, alteredRecord] = await storeAuditRecords()
|
||||
|
||||
alteredRecord.event = ''
|
||||
await db.put(alteredRecord)
|
||||
|
||||
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
|
||||
new AlteredRecordError(alteredRecord.id, 1, alteredRecord)
|
||||
)
|
||||
})
|
||||
|
||||
it('confirms interval integrity after deletion of records outside of the interval', async () => {
|
||||
const [thirdRecord, secondRecord, firstRecord] = await storeAuditRecords()
|
||||
|
||||
await auditCore.deleteFrom(secondRecord.id)
|
||||
|
||||
expect(await db.get(firstRecord.id)).toBe(undefined)
|
||||
expect(await db.get(secondRecord.id)).toBe(undefined)
|
||||
|
||||
await auditCore.checkIntegrity(secondRecord.id, thirdRecord.id)
|
||||
})
|
||||
})
|
||||
@@ -1,26 +0,0 @@
|
||||
class Storage {
|
||||
acquire: () => Promise<() => undefined>
|
||||
del: (id: string) => Promise<void>
|
||||
get: (id: string) => Promise<Record | void>
|
||||
getLastId: () => Promise<string | void>
|
||||
put: (record: Record) => Promise<void>
|
||||
setLastId: (id: string) => Promise<void>
|
||||
}
|
||||
|
||||
interface Record {
|
||||
data: object
|
||||
event: string
|
||||
id: string
|
||||
previousId: string
|
||||
subject: object
|
||||
time: number
|
||||
}
|
||||
|
||||
export class AuditCore {
|
||||
constructor(storage: Storage) { }
|
||||
public add(subject: any, event: string, data: any): Promise<Record> { }
|
||||
public checkIntegrity(oldest: string, newest: string): Promise<number> { }
|
||||
public getFrom(newest?: string): AsyncIterator { }
|
||||
public deleteFrom(newest: string): Promise<void> { }
|
||||
public deleteRangeAndRewrite(newest: string, oldest: string): Promise<void> { }
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/babel-config
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -32,19 +32,27 @@ const configs = {
|
||||
}
|
||||
return { browsers: pkg.browserslist, node }
|
||||
})(),
|
||||
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
const getConfig = (key, ...args) => {
|
||||
const config = configs[key]
|
||||
return config === undefined ? {} : typeof config === 'function' ? config(...args) : config
|
||||
return config === undefined
|
||||
? {}
|
||||
: typeof config === 'function'
|
||||
? config(...args)
|
||||
: config
|
||||
}
|
||||
|
||||
// some plugins must be used in a specific order
|
||||
const pluginsOrder = ['@babel/plugin-proposal-decorators', '@babel/plugin-proposal-class-properties']
|
||||
const pluginsOrder = [
|
||||
'@babel/plugin-proposal-decorators',
|
||||
'@babel/plugin-proposal-class-properties',
|
||||
]
|
||||
|
||||
module.exports = function (pkg, plugins, presets) {
|
||||
module.exports = function(pkg, plugins, presets) {
|
||||
plugins === undefined && (plugins = {})
|
||||
presets === undefined && (presets = {})
|
||||
|
||||
|
||||
@@ -8,13 +8,5 @@
|
||||
"directory": "@xen-orchestra/babel-config",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/backups-cli
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/backups-cli)  [](https://bundlephobia.com/result?p=@xen-orchestra/backups-cli) [](https://npmjs.org/package/@xen-orchestra/backups-cli)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups-cli):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/backups-cli
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,5 +0,0 @@
|
||||
const curryRight = require('lodash/curryRight')
|
||||
|
||||
module.exports = curryRight((iterable, fn) =>
|
||||
Promise.all(Array.isArray(iterable) ? iterable.map(fn) : Array.from(iterable, fn))
|
||||
)
|
||||
@@ -1,32 +0,0 @@
|
||||
const getopts = require('getopts')
|
||||
|
||||
const { version } = require('./package.json')
|
||||
|
||||
module.exports = commands =>
|
||||
async function (args, prefix) {
|
||||
const opts = getopts(args, {
|
||||
alias: {
|
||||
help: 'h',
|
||||
},
|
||||
boolean: ['help'],
|
||||
stopEarly: true,
|
||||
})
|
||||
|
||||
const commandName = opts.help || args.length === 0 ? 'help' : args[0]
|
||||
const command = commands[commandName]
|
||||
if (command === undefined) {
|
||||
process.stdout.write(`Usage:
|
||||
|
||||
${Object.keys(commands)
|
||||
.filter(command => command !== 'help')
|
||||
.map(command => ` ${prefix} ${command} ${commands[command].usage || ''}`)
|
||||
.join('\n\n')}
|
||||
|
||||
xo-backups v${version}
|
||||
`)
|
||||
process.exitCode = commandName === 'help' ? 0 : 1
|
||||
return
|
||||
}
|
||||
|
||||
return command.main(args.slice(1), prefix + ' ' + commandName)
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
const { dirname } = require('path')
|
||||
|
||||
const fs = require('promise-toolbox/promisifyAll')(require('fs'))
|
||||
module.exports = fs
|
||||
|
||||
fs.getSize = path =>
|
||||
fs.stat(path).then(
|
||||
_ => _.size,
|
||||
error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return 0
|
||||
}
|
||||
throw error
|
||||
}
|
||||
)
|
||||
|
||||
fs.mktree = async function mkdirp(path) {
|
||||
try {
|
||||
await fs.mkdir(path)
|
||||
} catch (error) {
|
||||
const { code } = error
|
||||
if (code === 'EEXIST') {
|
||||
await fs.readdir(path)
|
||||
return
|
||||
}
|
||||
if (code === 'ENOENT') {
|
||||
await mkdirp(dirname(path))
|
||||
return mkdirp(path)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// - easier:
|
||||
// - single param for direct use in `Array#map`
|
||||
// - files are prefixed with directory path
|
||||
// - safer: returns empty array if path is missing or not a directory
|
||||
fs.readdir2 = path =>
|
||||
fs.readdir(path).then(
|
||||
entries => {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = `${path}/${entry}`
|
||||
})
|
||||
|
||||
return entries
|
||||
},
|
||||
error => {
|
||||
const { code } = error
|
||||
if (code === 'ENOENT') {
|
||||
// do nothing
|
||||
} else if (code === 'ENOTDIR') {
|
||||
console.warn('WARN: readdir(%s)', path, error)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
return []
|
||||
}
|
||||
)
|
||||
|
||||
fs.symlink2 = async (target, path) => {
|
||||
try {
|
||||
await fs.symlink(target, path)
|
||||
} catch (error) {
|
||||
if (error.code === 'EEXIST' && (await fs.readlink(path)) === target) {
|
||||
return
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -1,336 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
// assigned when options are parsed by the main function
|
||||
let merge, remove
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const assert = require('assert')
|
||||
const flatten = require('lodash/flatten')
|
||||
const getopts = require('getopts')
|
||||
const limitConcurrency = require('limit-concurrency-decorator').default
|
||||
const lockfile = require('proper-lockfile')
|
||||
const pipe = require('promise-toolbox/pipe')
|
||||
const { default: Vhd, mergeVhd } = require('vhd-lib')
|
||||
const { dirname, resolve } = require('path')
|
||||
const { DISK_TYPE_DIFFERENCING } = require('vhd-lib/dist/_constants')
|
||||
const { isValidXva } = require('@xen-orchestra/backups/isValidXva')
|
||||
|
||||
const asyncMap = require('../_asyncMap')
|
||||
const fs = require('../_fs')
|
||||
|
||||
const handler = require('@xen-orchestra/fs').getHandler({ url: 'file://' })
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// chain is an array of VHDs from child to parent
|
||||
//
|
||||
// the whole chain will be merged into parent, parent will be renamed to child
|
||||
// and all the others will deleted
|
||||
const mergeVhdChain = limitConcurrency(1)(async function mergeVhdChain(chain) {
|
||||
assert(chain.length >= 2)
|
||||
|
||||
let child = chain[0]
|
||||
const parent = chain[chain.length - 1]
|
||||
const children = chain.slice(0, -1).reverse()
|
||||
|
||||
console.warn('Unused parents of VHD', child)
|
||||
chain
|
||||
.slice(1)
|
||||
.reverse()
|
||||
.forEach(parent => {
|
||||
console.warn(' ', parent)
|
||||
})
|
||||
merge && console.warn(' merging…')
|
||||
console.warn('')
|
||||
if (merge) {
|
||||
// `mergeVhd` does not work with a stream, either
|
||||
// - make it accept a stream
|
||||
// - or create synthetic VHD which is not a stream
|
||||
if (children.length !== 1) {
|
||||
console.warn('TODO: implement merging multiple children')
|
||||
children.length = 1
|
||||
child = children[0]
|
||||
}
|
||||
|
||||
let done, total
|
||||
const handle = setInterval(() => {
|
||||
if (done !== undefined) {
|
||||
console.log('merging %s: %s/%s', child, done, total)
|
||||
}
|
||||
}, 10e3)
|
||||
|
||||
await mergeVhd(
|
||||
handler,
|
||||
parent,
|
||||
handler,
|
||||
child,
|
||||
// children.length === 1
|
||||
// ? child
|
||||
// : await createSyntheticStream(handler, children),
|
||||
{
|
||||
onProgress({ done: d, total: t }) {
|
||||
done = d
|
||||
total = t
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
clearInterval(handle)
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
remove && fs.rename(parent, child),
|
||||
asyncMap(children.slice(0, -1), child => {
|
||||
console.warn('Unused VHD', child)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return remove && handler.unlink(child)
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
const listVhds = pipe([
|
||||
vmDir => vmDir + '/vdis',
|
||||
fs.readdir2,
|
||||
asyncMap(fs.readdir2),
|
||||
flatten,
|
||||
asyncMap(fs.readdir2),
|
||||
flatten,
|
||||
_ => _.filter(_ => _.endsWith('.vhd')),
|
||||
])
|
||||
|
||||
async function handleVm(vmDir) {
|
||||
const vhds = new Set()
|
||||
const vhdParents = { __proto__: null }
|
||||
const vhdChildren = { __proto__: null }
|
||||
|
||||
// remove broken VHDs
|
||||
await asyncMap(await listVhds(vmDir), async path => {
|
||||
try {
|
||||
const vhd = new Vhd(handler, path)
|
||||
await vhd.readHeaderAndFooter()
|
||||
vhds.add(path)
|
||||
if (vhd.footer.diskType === DISK_TYPE_DIFFERENCING) {
|
||||
const parent = resolve(dirname(path), vhd.header.parentUnicodeName)
|
||||
vhdParents[path] = parent
|
||||
if (parent in vhdChildren) {
|
||||
const error = new Error('this script does not support multiple VHD children')
|
||||
error.parent = parent
|
||||
error.child1 = vhdChildren[parent]
|
||||
error.child2 = path
|
||||
throw error // should we throw?
|
||||
}
|
||||
vhdChildren[parent] = path
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Error while checking VHD', path)
|
||||
console.warn(' ', error)
|
||||
if (error != null && error.code === 'ERR_ASSERTION') {
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
remove && (await handler.unlink(path))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// remove VHDs with missing ancestors
|
||||
{
|
||||
const deletions = []
|
||||
|
||||
// return true if the VHD has been deleted or is missing
|
||||
const deleteIfOrphan = vhd => {
|
||||
const parent = vhdParents[vhd]
|
||||
if (parent === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
// no longer needs to be checked
|
||||
delete vhdParents[vhd]
|
||||
|
||||
deleteIfOrphan(parent)
|
||||
|
||||
if (!vhds.has(parent)) {
|
||||
vhds.delete(vhd)
|
||||
|
||||
console.warn('Error while checking VHD', vhd)
|
||||
console.warn(' missing parent', parent)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
remove && deletions.push(handler.unlink(vhd))
|
||||
}
|
||||
}
|
||||
|
||||
// > A property that is deleted before it has been visited will not be
|
||||
// > visited later.
|
||||
// >
|
||||
// > -- https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...in#Deleted_added_or_modified_properties
|
||||
for (const child in vhdParents) {
|
||||
deleteIfOrphan(child)
|
||||
}
|
||||
|
||||
await Promise.all(deletions)
|
||||
}
|
||||
|
||||
const [jsons, xvas, xvaSums] = await fs
|
||||
.readdir2(vmDir)
|
||||
.then(entries => [
|
||||
entries.filter(_ => _.endsWith('.json')),
|
||||
new Set(entries.filter(_ => _.endsWith('.xva'))),
|
||||
entries.filter(_ => _.endsWith('.xva.cheksum')),
|
||||
])
|
||||
|
||||
await asyncMap(xvas, async path => {
|
||||
// check is not good enough to delete the file, the best we can do is report
|
||||
// it
|
||||
if (!(await isValidXva(path))) {
|
||||
console.warn('Potential broken XVA', path)
|
||||
console.warn('')
|
||||
}
|
||||
})
|
||||
|
||||
const unusedVhds = new Set(vhds)
|
||||
const unusedXvas = new Set(xvas)
|
||||
|
||||
// compile the list of unused XVAs and VHDs, and remove backup metadata which
|
||||
// reference a missing XVA/VHD
|
||||
await asyncMap(jsons, async json => {
|
||||
const metadata = JSON.parse(await fs.readFile(json))
|
||||
const { mode } = metadata
|
||||
if (mode === 'full') {
|
||||
const linkedXva = resolve(vmDir, metadata.xva)
|
||||
|
||||
if (xvas.has(linkedXva)) {
|
||||
unusedXvas.delete(linkedXva)
|
||||
} else {
|
||||
console.warn('Error while checking backup', json)
|
||||
console.warn(' missing file', linkedXva)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
remove && (await handler.unlink(json))
|
||||
}
|
||||
} else if (mode === 'delta') {
|
||||
const linkedVhds = (() => {
|
||||
const { vhds } = metadata
|
||||
return Object.keys(vhds).map(key => resolve(vmDir, vhds[key]))
|
||||
})()
|
||||
|
||||
// FIXME: find better approach by keeping as much of the backup as
|
||||
// possible (existing disks) even if one disk is missing
|
||||
if (linkedVhds.every(_ => vhds.has(_))) {
|
||||
linkedVhds.forEach(_ => unusedVhds.delete(_))
|
||||
} else {
|
||||
console.warn('Error while checking backup', json)
|
||||
const missingVhds = linkedVhds.filter(_ => !vhds.has(_))
|
||||
console.warn(' %i/%i missing VHDs', missingVhds.length, linkedVhds.length)
|
||||
missingVhds.forEach(vhd => {
|
||||
console.warn(' ', vhd)
|
||||
})
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
remove && (await handler.unlink(json))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// TODO: parallelize by vm/job/vdi
|
||||
const unusedVhdsDeletion = []
|
||||
{
|
||||
// VHD chains (as list from child to ancestor) to merge indexed by last
|
||||
// ancestor
|
||||
const vhdChainsToMerge = { __proto__: null }
|
||||
|
||||
const toCheck = new Set(unusedVhds)
|
||||
|
||||
const getUsedChildChainOrDelete = vhd => {
|
||||
if (vhd in vhdChainsToMerge) {
|
||||
const chain = vhdChainsToMerge[vhd]
|
||||
delete vhdChainsToMerge[vhd]
|
||||
return chain
|
||||
}
|
||||
|
||||
if (!unusedVhds.has(vhd)) {
|
||||
return [vhd]
|
||||
}
|
||||
|
||||
// no longer needs to be checked
|
||||
toCheck.delete(vhd)
|
||||
|
||||
const child = vhdChildren[vhd]
|
||||
if (child !== undefined) {
|
||||
const chain = getUsedChildChainOrDelete(child)
|
||||
if (chain !== undefined) {
|
||||
chain.push(vhd)
|
||||
return chain
|
||||
}
|
||||
}
|
||||
|
||||
console.warn('Unused VHD', vhd)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
remove && unusedVhdsDeletion.push(handler.unlink(vhd))
|
||||
}
|
||||
|
||||
toCheck.forEach(vhd => {
|
||||
vhdChainsToMerge[vhd] = getUsedChildChainOrDelete(vhd)
|
||||
})
|
||||
|
||||
Object.keys(vhdChainsToMerge).forEach(key => {
|
||||
const chain = vhdChainsToMerge[key]
|
||||
if (chain !== undefined) {
|
||||
unusedVhdsDeletion.push(mergeVhdChain(chain))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
unusedVhdsDeletion,
|
||||
asyncMap(unusedXvas, path => {
|
||||
console.warn('Unused XVA', path)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return remove && handler.unlink(path)
|
||||
}),
|
||||
asyncMap(xvaSums, path => {
|
||||
// no need to handle checksums for XVAs deleted by the script, they will be handled by `unlink()`
|
||||
if (!xvas.has(path.slice(0, -'.checksum'.length))) {
|
||||
console.warn('Unused XVA checksum', path)
|
||||
remove && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return remove && handler.unlink(path)
|
||||
}
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
module.exports = async function main(args) {
|
||||
const opts = getopts(args, {
|
||||
alias: {
|
||||
remove: 'r',
|
||||
merge: 'm',
|
||||
},
|
||||
boolean: ['merge', 'remove'],
|
||||
default: {
|
||||
merge: false,
|
||||
remove: false,
|
||||
},
|
||||
})
|
||||
|
||||
;({ remove, merge } = opts)
|
||||
await asyncMap(opts._, async vmDir => {
|
||||
vmDir = resolve(vmDir)
|
||||
|
||||
// TODO: implement this in `xo-server`, not easy because not compatible with
|
||||
// `@xen-orchestra/fs`.
|
||||
const release = await lockfile.lock(vmDir)
|
||||
try {
|
||||
await handleVm(vmDir)
|
||||
} catch (error) {
|
||||
console.error('handleVm', vmDir, error)
|
||||
} finally {
|
||||
await release()
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
const filenamify = require('filenamify')
|
||||
const get = require('lodash/get')
|
||||
const { dirname, join, relative } = require('path')
|
||||
|
||||
const asyncMap = require('../_asyncMap')
|
||||
const { mktree, readdir2, readFile, symlink2 } = require('../_fs')
|
||||
|
||||
module.exports = async function createSymlinkIndex([backupDir, fieldPath]) {
|
||||
const indexDir = join(backupDir, 'indexes', filenamify(fieldPath))
|
||||
await mktree(indexDir)
|
||||
|
||||
await asyncMap(await readdir2(backupDir), async vmDir =>
|
||||
asyncMap(
|
||||
(await readdir2(vmDir)).filter(_ => _.endsWith('.json')),
|
||||
async json => {
|
||||
const metadata = JSON.parse(await readFile(json))
|
||||
const value = get(metadata, fieldPath)
|
||||
if (value !== undefined) {
|
||||
const target = relative(indexDir, dirname(json))
|
||||
const path = join(indexDir, filenamify(String(value)))
|
||||
await symlink2(target, path).catch(error => {
|
||||
console.warn('symlink(%s, %s)', target, path, error)
|
||||
})
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
const groupBy = require('lodash/groupBy')
|
||||
const { createHash } = require('crypto')
|
||||
const { dirname, resolve } = require('path')
|
||||
|
||||
const asyncMap = require('../_asyncMap')
|
||||
const { readdir2, readFile, getSize } = require('../_fs')
|
||||
|
||||
const sha512 = str => createHash('sha512').update(str).digest('hex')
|
||||
const sum = values => values.reduce((a, b) => a + b)
|
||||
|
||||
module.exports = async function info(vmDirs) {
|
||||
const jsonFiles = (
|
||||
await asyncMap(vmDirs, async vmDir => (await readdir2(vmDir)).filter(_ => _.endsWith('.json')))
|
||||
).flat()
|
||||
|
||||
const hashes = { __proto__: null }
|
||||
|
||||
const info = (
|
||||
await asyncMap(jsonFiles, async jsonFile => {
|
||||
try {
|
||||
const jsonDir = dirname(jsonFile)
|
||||
const json = await readFile(jsonFile)
|
||||
|
||||
const hash = sha512(json)
|
||||
if (hash in hashes) {
|
||||
console.log(jsonFile, 'duplicate of', hashes[hash])
|
||||
return
|
||||
}
|
||||
hashes[hash] = jsonFile
|
||||
|
||||
const metadata = JSON.parse(json)
|
||||
|
||||
return {
|
||||
jsonDir,
|
||||
jsonFile,
|
||||
metadata,
|
||||
size:
|
||||
json.length +
|
||||
(await (metadata.mode === 'delta'
|
||||
? asyncMap(Object.values(metadata.vhds), _ => getSize(resolve(jsonDir, _))).then(sum)
|
||||
: getSize(resolve(jsonDir, metadata.xva)))),
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(jsonFile, error)
|
||||
}
|
||||
})
|
||||
).filter(_ => _ !== undefined)
|
||||
const byJobs = groupBy(info, 'metadata.jobId')
|
||||
Object.keys(byJobs)
|
||||
.sort()
|
||||
.forEach(jobId => {
|
||||
console.log(jobId, sum(byJobs[jobId].map(_ => _.size)))
|
||||
})
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
require('./_composeCommands')({
|
||||
'clean-vms': {
|
||||
get main() {
|
||||
return require('./commands/clean-vms')
|
||||
},
|
||||
usage: '[--force] [--merge] xo-vm-backups/*',
|
||||
},
|
||||
'create-symlink-index': {
|
||||
get main() {
|
||||
return require('./commands/create-symlink-index')
|
||||
},
|
||||
usage: 'xo-vm-backups <field path>',
|
||||
},
|
||||
info: {
|
||||
get main() {
|
||||
return require('./commands/info')
|
||||
},
|
||||
usage: 'xo-vm-backups/*',
|
||||
},
|
||||
})(process.argv.slice(2), 'xo-backups').catch(error => {
|
||||
console.error('main', error)
|
||||
process.exitCode = 1
|
||||
})
|
||||
@@ -1,42 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"bin": {
|
||||
"xo-backups": "index.js"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"dependencies": {
|
||||
"@xen-orchestra/backups": "^0.1.1",
|
||||
"@xen-orchestra/fs": "^0.12.1",
|
||||
"filenamify": "^4.1.0",
|
||||
"getopts": "^2.2.5",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"lodash": "^4.17.15",
|
||||
"promise-toolbox": "^0.16.0",
|
||||
"proper-lockfile": "^4.1.1",
|
||||
"vhd-lib": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.10.1"
|
||||
},
|
||||
"files": [
|
||||
"commands",
|
||||
"*.js"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/backups-cli",
|
||||
"name": "@xen-orchestra/backups-cli",
|
||||
"repository": {
|
||||
"directory": "@xen-orchestra/backups-cli",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"version": "0.3.0",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/backups
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/backups)  [](https://bundlephobia.com/result?p=@xen-orchestra/backups) [](https://npmjs.org/package/@xen-orchestra/backups)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/backups
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,30 +0,0 @@
|
||||
function extractIdsFromSimplePattern(pattern) {
|
||||
if (pattern === undefined) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (pattern !== null && typeof pattern === 'object') {
|
||||
let keys = Object.keys(pattern)
|
||||
|
||||
if (keys.length === 1 && keys[0] === 'id') {
|
||||
pattern = pattern.id
|
||||
if (typeof pattern === 'string') {
|
||||
return [pattern]
|
||||
}
|
||||
if (pattern !== null && typeof pattern === 'object') {
|
||||
keys = Object.keys(pattern)
|
||||
if (
|
||||
keys.length === 1 &&
|
||||
keys[0] === '__or' &&
|
||||
Array.isArray((pattern = pattern.__or)) &&
|
||||
pattern.every(_ => typeof _ === 'string')
|
||||
) {
|
||||
return pattern
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('invalid pattern')
|
||||
}
|
||||
exports.extractIdsFromSimplePattern = extractIdsFromSimplePattern
|
||||
@@ -1,6 +0,0 @@
|
||||
const { utcFormat, utcParse } = require('d3-time-format')
|
||||
|
||||
// Format a date in ISO 8601 in a safe way to be used in filenames
|
||||
// (even on Windows).
|
||||
exports.formatFilenameDate = utcFormat('%Y%m%dT%H%M%SZ')
|
||||
exports.parseFilenameDate = utcParse('%Y%m%dT%H%M%SZ')
|
||||
@@ -1,3 +0,0 @@
|
||||
// returns all entries but the last retention-th
|
||||
exports.getOldEntries = (retention, entries) =>
|
||||
entries === undefined ? [] : retention > 0 ? entries.slice(0, -retention) : entries
|
||||
@@ -1,59 +0,0 @@
|
||||
const assert = require('assert')
|
||||
const fs = require('fs-extra')
|
||||
|
||||
const isGzipFile = async fd => {
|
||||
// https://tools.ietf.org/html/rfc1952.html#page-5
|
||||
const magicNumber = Buffer.allocUnsafe(2)
|
||||
assert.strictEqual((await fs.read(fd, magicNumber, 0, magicNumber.length, 0)).bytesRead, magicNumber.length)
|
||||
return magicNumber[0] === 31 && magicNumber[1] === 139
|
||||
}
|
||||
|
||||
// TODO: better check?
|
||||
//
|
||||
// our heuristic is not good enough, there has been some false positives
|
||||
// (detected as invalid by us but valid by `tar` and imported with success),
|
||||
// either THOUGH THEY MAY HAVE BEEN COMPRESSED FILES:
|
||||
// - these files were normal but the check is incorrect
|
||||
// - these files were invalid but without data loss
|
||||
// - these files were invalid but with silent data loss
|
||||
//
|
||||
// maybe reading the end of the file looking for a file named
|
||||
// /^Ref:\d+/\d+\.checksum$/ and then validating the tar structure from it
|
||||
//
|
||||
// https://github.com/npm/node-tar/issues/234#issuecomment-538190295
|
||||
const isValidTar = async (size, fd) => {
|
||||
if (size <= 1024 || size % 512 !== 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
const buf = Buffer.allocUnsafe(1024)
|
||||
assert.strictEqual((await fs.read(fd, buf, 0, buf.length, size - buf.length)).bytesRead, buf.length)
|
||||
return buf.every(_ => _ === 0)
|
||||
}
|
||||
|
||||
// TODO: find an heuristic for compressed files
|
||||
const isValidXva = async path => {
|
||||
try {
|
||||
const fd = await fs.open(path, 'r')
|
||||
try {
|
||||
const { size } = await fs.fstat(fd)
|
||||
if (size < 20) {
|
||||
// neither a valid gzip not tar
|
||||
return false
|
||||
}
|
||||
|
||||
return (await isGzipFile(fd))
|
||||
? true // gzip files cannot be validated at this time
|
||||
: await isValidTar(size, fd)
|
||||
} finally {
|
||||
fs.close(fd).catch(noop)
|
||||
}
|
||||
} catch (error) {
|
||||
// never throw, log and report as valid to avoid side effects
|
||||
console.error('isValidXva', path, error)
|
||||
return true
|
||||
}
|
||||
}
|
||||
exports.isValidXva = isValidXva
|
||||
|
||||
const noop = Function.prototype
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/backups",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/backups",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@xen-orchestra/backups",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"version": "0.1.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"d3-time-format": "^3.0.0",
|
||||
"fs-extra": "^9.0.0"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
exports.watchStreamSize = stream => {
|
||||
const container = { size: 0 }
|
||||
const isPaused = stream.isPaused()
|
||||
stream.on('data', data => {
|
||||
container.size += data.length
|
||||
})
|
||||
if (isPaused) {
|
||||
stream.pause()
|
||||
}
|
||||
return container
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/cr-seed-cli
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/cr-seed-cli)  [](https://bundlephobia.com/result?p=@xen-orchestra/cr-seed-cli) [](https://npmjs.org/package/@xen-orchestra/cr-seed-cli)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cr-seed-cli):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/cr-seed-cli
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -5,7 +5,7 @@ const { NULL_REF, Xapi } = require('xen-api')
|
||||
|
||||
const pkg = require('./package.json')
|
||||
|
||||
Xapi.prototype.getVmDisks = async function (vm) {
|
||||
Xapi.prototype.getVmDisks = async function(vm) {
|
||||
const disks = { __proto__: null }
|
||||
await Promise.all([
|
||||
...vm.VBDs.map(async vbdRef => {
|
||||
@@ -32,7 +32,14 @@ ${cliName} v${pkg.version}
|
||||
)
|
||||
}
|
||||
|
||||
const [srcXapiUrl, srcSnapshotUuid, tgtXapiUrl, tgtVmUuid, jobId, scheduleId] = args
|
||||
const [
|
||||
srcXapiUrl,
|
||||
srcSnapshotUuid,
|
||||
tgtXapiUrl,
|
||||
tgtVmUuid,
|
||||
jobId,
|
||||
scheduleId,
|
||||
] = args
|
||||
|
||||
const srcXapi = new Xapi({
|
||||
allowUnauthorized: true,
|
||||
@@ -63,10 +70,16 @@ ${cliName} v${pkg.version}
|
||||
'xo:backup:vm': srcVm.uuid,
|
||||
}
|
||||
|
||||
const [srcDisks, tgtDisks] = await Promise.all([srcXapi.getVmDisks(srcSnapshot), tgtXapi.getVmDisks(tgtVm)])
|
||||
const [srcDisks, tgtDisks] = await Promise.all([
|
||||
srcXapi.getVmDisks(srcSnapshot),
|
||||
tgtXapi.getVmDisks(tgtVm),
|
||||
])
|
||||
const userDevices = Object.keys(tgtDisks)
|
||||
|
||||
const tgtSr = await tgtXapi.getRecord('SR', tgtDisks[Object.keys(tgtDisks)[0]].SR)
|
||||
const tgtSr = await tgtXapi.getRecord(
|
||||
'SR',
|
||||
tgtDisks[Object.keys(tgtDisks)[0]].SR
|
||||
)
|
||||
|
||||
await Promise.all([
|
||||
srcSnapshot.update_other_config(metadata),
|
||||
@@ -77,7 +90,10 @@ ${cliName} v${pkg.version}
|
||||
'xo:backup:sr': tgtSr.uuid,
|
||||
'xo:copy_of': srcSnapshotUuid,
|
||||
}),
|
||||
tgtVm.update_blocked_operations('start', 'Start operation for this vm is blocked, clone it if you want to use it.'),
|
||||
tgtVm.update_blocked_operations(
|
||||
'start',
|
||||
'Start operation for this vm is blocked, clone it if you want to use it.'
|
||||
),
|
||||
Promise.all(
|
||||
userDevices.map(userDevice => {
|
||||
const srcDisk = srcDisks[userDevice]
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/cr-seed-cli",
|
||||
"version": "0.2.0",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/cr-seed-cli",
|
||||
@@ -15,17 +14,11 @@
|
||||
"bin": {
|
||||
"xo-cr-seed": "./index.js"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.29.0"
|
||||
"xen-api": "^0.25.2"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/cron
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/cron)  [](https://bundlephobia.com/result?p=@xen-orchestra/cron) [](https://npmjs.org/package/@xen-orchestra/cron)
|
||||
# @xen-orchestra/cron [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> Focused, well maintained, cron parser/scheduler
|
||||
|
||||
@@ -14,14 +10,13 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cron)
|
||||
> npm install --save @xen-orchestra/cron
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Pattern syntax
|
||||
|
||||
```
|
||||
<minute> <hour> <day of month> <month> <day of week>
|
||||
```
|
||||
|
||||
|
||||
Each entry can be:
|
||||
|
||||
- a single value
|
||||
@@ -34,12 +29,12 @@ A wildcard (`*`) can be used as a shortcut for the whole range
|
||||
Step values can be used in conjunctions with ranges. For instance,
|
||||
`1-7/2` is the same as `1,3,5,7`.
|
||||
|
||||
| Field | Allowed values |
|
||||
| ---------------- | ------------------------------------------------------------------ |
|
||||
| minute | 0-59 |
|
||||
| hour | 0-23 |
|
||||
| day of the month | 1-31 or 3-letter names (`jan`, `feb`, …) |
|
||||
| month | 0-11 |
|
||||
| Field | Allowed values |
|
||||
|------------------|----------------|
|
||||
| minute | 0-59 |
|
||||
| hour | 0-23 |
|
||||
| day of the month | 1-31 or 3-letter names (`jan`, `feb`, …) |
|
||||
| month | 0-11 |
|
||||
| day of week | 0-7 (0 and 7 both mean Sunday) or 3-letter names (`mon`, `tue`, …) |
|
||||
|
||||
> Note: the month range is 0-11 to be compatible with
|
||||
@@ -115,9 +110,28 @@ job.start()
|
||||
job.stop()
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
@@ -128,4 +142,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
### Pattern syntax
|
||||
|
||||
```
|
||||
<minute> <hour> <day of month> <month> <day of week>
|
||||
```
|
||||
|
||||
Each entry can be:
|
||||
|
||||
- a single value
|
||||
- a range (`0-23` or `*/2`)
|
||||
- a list of values/ranges (`1,8-12`)
|
||||
|
||||
A wildcard (`*`) can be used as a shortcut for the whole range
|
||||
(`first-last`).
|
||||
|
||||
Step values can be used in conjunctions with ranges. For instance,
|
||||
`1-7/2` is the same as `1,3,5,7`.
|
||||
|
||||
| Field | Allowed values |
|
||||
| ---------------- | ------------------------------------------------------------------ |
|
||||
| minute | 0-59 |
|
||||
| hour | 0-23 |
|
||||
| day of the month | 1-31 or 3-letter names (`jan`, `feb`, …) |
|
||||
| month | 0-11 |
|
||||
| day of week | 0-7 (0 and 7 both mean Sunday) or 3-letter names (`mon`, `tue`, …) |
|
||||
|
||||
> Note: the month range is 0-11 to be compatible with
|
||||
> [cron](https://github.com/kelektiv/node-cron), it does not appear to
|
||||
> be very standard though.
|
||||
|
||||
### API
|
||||
|
||||
`createSchedule(pattern: string, zone: string = 'utc'): Schedule`
|
||||
|
||||
> Create a new schedule.
|
||||
|
||||
- `pattern`: the pattern to use, see [the syntax](#pattern-syntax)
|
||||
- `zone`: the timezone to use, use `'local'` for the local timezone
|
||||
|
||||
```js
|
||||
import { createSchedule } from '@xen-orchestra/cron'
|
||||
|
||||
const schedule = createSchedule('0 0 * * sun', 'America/New_York')
|
||||
```
|
||||
|
||||
`Schedule#createJob(fn: Function): Job`
|
||||
|
||||
> Create a new job from this schedule.
|
||||
|
||||
- `fn`: function to execute, if it returns a promise, it will be
|
||||
awaited before scheduling the next run.
|
||||
|
||||
```js
|
||||
const job = schedule.createJob(() => {
|
||||
console.log(new Date())
|
||||
})
|
||||
```
|
||||
|
||||
`Schedule#next(n: number): Array<Date>`
|
||||
|
||||
> Returns the next dates matching this schedule.
|
||||
|
||||
- `n`: number of dates to return
|
||||
|
||||
```js
|
||||
schedule.next(2)
|
||||
// [ 2018-02-11T05:00:00.000Z, 2018-02-18T05:00:00.000Z ]
|
||||
```
|
||||
|
||||
`Schedule#startJob(fn: Function): () => void`
|
||||
|
||||
> Start a new job from this schedule and return a function to stop it.
|
||||
|
||||
- `fn`: function to execute, if it returns a promise, it will be
|
||||
awaited before scheduling the next run.
|
||||
|
||||
```js
|
||||
const stopJob = schedule.startJob(() => {
|
||||
console.log(new Date())
|
||||
})
|
||||
stopJob()
|
||||
```
|
||||
|
||||
`Job#start(): void`
|
||||
|
||||
> Start this job.
|
||||
|
||||
```js
|
||||
job.start()
|
||||
```
|
||||
|
||||
`Job#stop(): void`
|
||||
|
||||
> Stop this job.
|
||||
|
||||
```js
|
||||
job.stop()
|
||||
```
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/cron",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.3",
|
||||
"license": "ISC",
|
||||
"description": "Focused, well maintained, cron parser/scheduler",
|
||||
"keywords": [
|
||||
@@ -23,8 +22,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -47,8 +46,8 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
|
||||
@@ -7,21 +7,7 @@ const MAX_DELAY = 2 ** 31 - 1
|
||||
|
||||
class Job {
|
||||
constructor(schedule, fn) {
|
||||
let scheduledDate
|
||||
const wrapper = () => {
|
||||
const now = Date.now()
|
||||
if (scheduledDate > now) {
|
||||
// we're early, delay
|
||||
//
|
||||
// no need to check _isEnabled, we're just delaying the existing timeout
|
||||
//
|
||||
// see https://github.com/vatesfr/xen-orchestra/issues/4625
|
||||
this._timeout = setTimeout(wrapper, scheduledDate - now)
|
||||
return
|
||||
}
|
||||
|
||||
this._isRunning = true
|
||||
|
||||
let result
|
||||
try {
|
||||
result = fn()
|
||||
@@ -36,33 +22,23 @@ class Job {
|
||||
}
|
||||
}
|
||||
const scheduleNext = () => {
|
||||
this._isRunning = false
|
||||
|
||||
if (this._isEnabled) {
|
||||
const now = schedule._createDate()
|
||||
scheduledDate = +next(schedule._schedule, now)
|
||||
const delay = scheduledDate - now
|
||||
this._timeout = delay < MAX_DELAY ? setTimeout(wrapper, delay) : setTimeout(scheduleNext, MAX_DELAY)
|
||||
}
|
||||
const delay = schedule._nextDelay()
|
||||
this._timeout =
|
||||
delay < MAX_DELAY
|
||||
? setTimeout(wrapper, delay)
|
||||
: setTimeout(scheduleNext, MAX_DELAY)
|
||||
}
|
||||
|
||||
this._isEnabled = false
|
||||
this._isRunning = false
|
||||
this._scheduleNext = scheduleNext
|
||||
this._timeout = undefined
|
||||
}
|
||||
|
||||
start() {
|
||||
this.stop()
|
||||
|
||||
this._isEnabled = true
|
||||
if (!this._isRunning) {
|
||||
this._scheduleNext()
|
||||
}
|
||||
this._scheduleNext()
|
||||
}
|
||||
|
||||
stop() {
|
||||
this._isEnabled = false
|
||||
clearTimeout(this._timeout)
|
||||
}
|
||||
}
|
||||
@@ -70,7 +46,12 @@ class Job {
|
||||
class Schedule {
|
||||
constructor(pattern, zone = 'utc') {
|
||||
this._schedule = parse(pattern)
|
||||
this._createDate = zone.toLowerCase() === 'utc' ? moment.utc : zone === 'local' ? moment : () => moment.tz(zone)
|
||||
this._createDate =
|
||||
zone.toLowerCase() === 'utc'
|
||||
? moment.utc
|
||||
: zone === 'local'
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
}
|
||||
|
||||
createJob(fn) {
|
||||
@@ -87,6 +68,11 @@ class Schedule {
|
||||
return dates
|
||||
}
|
||||
|
||||
_nextDelay() {
|
||||
const now = this._createDate()
|
||||
return next(this._schedule, now) - now
|
||||
}
|
||||
|
||||
startJob(fn) {
|
||||
const job = this.createJob(fn)
|
||||
job.start()
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { createSchedule } from './'
|
||||
|
||||
const wrap = value => () => value
|
||||
|
||||
describe('issues', () => {
|
||||
let originalDateNow
|
||||
beforeAll(() => {
|
||||
originalDateNow = Date.now
|
||||
})
|
||||
afterAll(() => {
|
||||
Date.now = originalDateNow
|
||||
originalDateNow = undefined
|
||||
})
|
||||
|
||||
test('stop during async execution', async () => {
|
||||
let nCalls = 0
|
||||
let resolve, promise
|
||||
|
||||
const schedule = createSchedule('* * * * *')
|
||||
const job = schedule.createJob(() => {
|
||||
++nCalls
|
||||
|
||||
// eslint-disable-next-line promise/param-names
|
||||
promise = new Promise(r => {
|
||||
resolve = r
|
||||
})
|
||||
return promise
|
||||
})
|
||||
|
||||
job.start()
|
||||
Date.now = wrap(+schedule.next(1)[0])
|
||||
jest.runAllTimers()
|
||||
|
||||
expect(nCalls).toBe(1)
|
||||
|
||||
job.stop()
|
||||
|
||||
resolve()
|
||||
await promise
|
||||
|
||||
jest.runAllTimers()
|
||||
expect(nCalls).toBe(1)
|
||||
})
|
||||
|
||||
test('stop then start during async job execution', async () => {
|
||||
let nCalls = 0
|
||||
let resolve, promise
|
||||
|
||||
const schedule = createSchedule('* * * * *')
|
||||
const job = schedule.createJob(() => {
|
||||
++nCalls
|
||||
|
||||
// eslint-disable-next-line promise/param-names
|
||||
promise = new Promise(r => {
|
||||
resolve = r
|
||||
})
|
||||
return promise
|
||||
})
|
||||
|
||||
job.start()
|
||||
Date.now = wrap(+schedule.next(1)[0])
|
||||
jest.runAllTimers()
|
||||
|
||||
expect(nCalls).toBe(1)
|
||||
|
||||
job.stop()
|
||||
job.start()
|
||||
|
||||
resolve()
|
||||
await promise
|
||||
|
||||
Date.now = wrap(+schedule.next(1)[0])
|
||||
jest.runAllTimers()
|
||||
expect(nCalls).toBe(2)
|
||||
})
|
||||
})
|
||||
@@ -37,7 +37,9 @@ describe('next()', () => {
|
||||
})
|
||||
|
||||
it('fails when no solutions has been found', () => {
|
||||
expect(() => N('0 0 30 feb *')).toThrow('no solutions found for this schedule')
|
||||
expect(() => N('0 0 30 feb *')).toThrow(
|
||||
'no solutions found for this schedule'
|
||||
)
|
||||
})
|
||||
|
||||
it('select the first sunday of the month', () => {
|
||||
|
||||
@@ -66,7 +66,9 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
|
||||
aliasesRegExp.lastIndex = i
|
||||
const matches = aliasesRegExp.exec(pattern)
|
||||
if (matches === null) {
|
||||
throw new SyntaxError(`${field.name}: missing alias or integer at character ${i}`)
|
||||
throw new SyntaxError(
|
||||
`${field.name}: missing alias or integer at character ${i}`
|
||||
)
|
||||
}
|
||||
const [alias] = matches
|
||||
i += alias.length
|
||||
@@ -75,7 +77,9 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
|
||||
|
||||
const { range } = field
|
||||
if (value < range[0] || value > range[1]) {
|
||||
throw new SyntaxError(`${field.name}: ${value} is not between ${range[0]} and ${range[1]}`)
|
||||
throw new SyntaxError(
|
||||
`${field.name}: ${value} is not between ${range[0]} and ${range[1]}`
|
||||
)
|
||||
}
|
||||
return value
|
||||
}
|
||||
@@ -113,7 +117,9 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
|
||||
{
|
||||
const schedule = presets[p]
|
||||
if (schedule !== undefined) {
|
||||
return typeof schedule === 'string' ? (presets[p] = parse(schedule)) : schedule
|
||||
return typeof schedule === 'string'
|
||||
? (presets[p] = parse(schedule))
|
||||
: schedule
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +142,9 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
|
||||
|
||||
consumeWhitespaces()
|
||||
if (i !== n) {
|
||||
throw new SyntaxError(`unexpected character at offset ${i}, expected end`)
|
||||
throw new SyntaxError(
|
||||
`unexpected character at offset ${i}, expected end`
|
||||
)
|
||||
}
|
||||
|
||||
return schedule
|
||||
|
||||
@@ -33,7 +33,9 @@ describe('parse()', () => {
|
||||
})
|
||||
|
||||
it('reports invalid aliases', () => {
|
||||
expect(() => parse('* * * jan-foo *')).toThrow('month: missing alias or integer at character 10')
|
||||
expect(() => parse('* * * jan-foo *')).toThrow(
|
||||
'month: missing alias or integer at character 10'
|
||||
)
|
||||
})
|
||||
|
||||
it('dayOfWeek: 0 and 7 bind to sunday', () => {
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,28 +1,49 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
# ${pkg.name} [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
# @xen-orchestra/defined
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/defined)  [](https://bundlephobia.com/result?p=@xen-orchestra/defined) [](https://npmjs.org/package/@xen-orchestra/defined)
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/defined):
|
||||
Installation of the [npm package](https://npmjs.org/package/${pkg.name}):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/defined
|
||||
> npm install --save ${pkg.name}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
- report any [issue](${pkg.bugs})
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
${pkg.license} © [${pkg.author.name}](${pkg.author.url})
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/defined",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
@@ -13,8 +12,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -35,8 +34,8 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
|
||||
@@ -60,4 +60,5 @@ export const get = (accessor: (input: ?any) => any, arg: ?any) => {
|
||||
// _ => new ProxyAgent(_)
|
||||
// )
|
||||
// ```
|
||||
export const ifDef = (value: ?any, thenFn: (value: any) => any) => (value !== undefined ? thenFn(value) : value)
|
||||
export const ifDef = (value: ?any, thenFn: (value: any) => any) =>
|
||||
value !== undefined ? thenFn(value) : value
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
# @xen-orchestra/emit-async [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
# @xen-orchestra/emit-async
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/emit-async)  [](https://bundlephobia.com/result?p=@xen-orchestra/emit-async) [](https://npmjs.org/package/@xen-orchestra/emit-async)
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
@@ -19,11 +17,6 @@ import EE from 'events'
|
||||
import emitAsync from '@xen-orchestra/emit-async'
|
||||
|
||||
const ee = new EE()
|
||||
|
||||
// exposing emitAsync on our event emitter
|
||||
//
|
||||
// it's not required though and we could have used directly via
|
||||
// emitAsync.call(ee, event, args...)
|
||||
ee.emitAsync = emitAsync
|
||||
|
||||
ee.on('start', async function () {
|
||||
@@ -31,32 +24,48 @@ ee.on('start', async function () {
|
||||
})
|
||||
|
||||
// similar to EventEmmiter#emit() but returns a promise which resolves when all
|
||||
// listeners have settled
|
||||
// listeners have resolved
|
||||
await ee.emitAsync('start')
|
||||
|
||||
// by default, it will rejects as soon as one listener reject, you can customise
|
||||
// error handling though:
|
||||
await ee.emitAsync(
|
||||
{
|
||||
onError(error) {
|
||||
console.warn(error)
|
||||
},
|
||||
},
|
||||
'start'
|
||||
)
|
||||
await ee.emitAsync({
|
||||
onError (error) {
|
||||
console.warn(error)
|
||||
}
|
||||
}, 'start')
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
- report any [issue](${pkg.bugs})
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
${pkg.license} © [${pkg.author.name}](${pkg.author.url})
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
```js
|
||||
import EE from 'events'
|
||||
import emitAsync from '@xen-orchestra/emit-async'
|
||||
|
||||
const ee = new EE()
|
||||
|
||||
// exposing emitAsync on our event emitter
|
||||
//
|
||||
// it's not required though and we could have used directly via
|
||||
// emitAsync.call(ee, event, args...)
|
||||
ee.emitAsync = emitAsync
|
||||
|
||||
ee.on('start', async function () {
|
||||
// whatever
|
||||
})
|
||||
|
||||
// similar to EventEmmiter#emit() but returns a promise which resolves when all
|
||||
// listeners have settled
|
||||
await ee.emitAsync('start')
|
||||
|
||||
// by default, it will rejects as soon as one listener reject, you can customise
|
||||
// error handling though:
|
||||
await ee.emitAsync(
|
||||
{
|
||||
onError(error) {
|
||||
console.warn(error)
|
||||
},
|
||||
},
|
||||
'start'
|
||||
)
|
||||
```
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/emit-async",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
@@ -13,8 +12,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -34,8 +33,8 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/fs
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/fs)  [](https://bundlephobia.com/result?p=@xen-orchestra/fs) [](https://npmjs.org/package/@xen-orchestra/fs)
|
||||
|
||||
> The File System for Xen Orchestra backups.
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/fs):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/fs
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.12.1",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"version": "0.9.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
|
||||
@@ -19,40 +18,36 @@
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2": "^0.17.2",
|
||||
"@sindresorhus/df": "^3.1.1",
|
||||
"@sullux/aws-sdk": "^1.0.5",
|
||||
"@marsaud/smb2": "^0.14.0",
|
||||
"@sindresorhus/df": "^2.1.0",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"aws-sdk": "^2.686.0",
|
||||
"decorator-synchronized": "^0.5.0",
|
||||
"execa": "^5.0.0",
|
||||
"fs-extra": "^9.0.0",
|
||||
"get-stream": "^6.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^8.0.1",
|
||||
"get-stream": "^4.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.16.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^4.0.2",
|
||||
"tmp": "^0.2.1",
|
||||
"xo-remote-parser": "^0.6.0"
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.1.0",
|
||||
"xo-remote-parser": "^0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/plugin-proposal-decorators": "^7.1.6",
|
||||
"@babel/plugin-proposal-function-bind": "^7.0.0",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.4.4",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^7.0.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"dotenv": "^8.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
@@ -60,11 +55,7 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run clean",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"prepare": "yarn run build",
|
||||
"postversion": "npm publish"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,19 +6,36 @@ import { tmpdir } from 'os'
|
||||
|
||||
import LocalHandler from './local'
|
||||
|
||||
const sudoExeca = (command, args, opts) => execa('sudo', [command, ...args], opts)
|
||||
const sudoExeca = (command, args, opts) =>
|
||||
execa('sudo', [command, ...args], opts)
|
||||
|
||||
export default class MountHandler extends LocalHandler {
|
||||
constructor(remote, { mountsDir = join(tmpdir(), 'xo-fs-mounts'), useSudo = false, ...opts } = {}, params) {
|
||||
constructor(
|
||||
remote,
|
||||
{
|
||||
mountsDir = join(tmpdir(), 'xo-fs-mounts'),
|
||||
useSudo = false,
|
||||
...opts
|
||||
} = {},
|
||||
params
|
||||
) {
|
||||
super(remote, opts)
|
||||
|
||||
this._execa = useSudo ? sudoExeca : execa
|
||||
this._keeper = undefined
|
||||
this._params = {
|
||||
...params,
|
||||
options: [params.options, remote.options ?? params.defaultOptions].filter(_ => _ !== undefined).join(','),
|
||||
options: [params.options, remote.options]
|
||||
.filter(_ => _ !== undefined)
|
||||
.join(','),
|
||||
}
|
||||
this._realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
|
||||
this._realPath = join(
|
||||
mountsDir,
|
||||
remote.id ||
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.slice(2)
|
||||
)
|
||||
}
|
||||
|
||||
async _forget() {
|
||||
@@ -58,15 +75,16 @@ export default class MountHandler extends LocalHandler {
|
||||
|
||||
try {
|
||||
const { type, device, options, env } = this._params
|
||||
|
||||
// Linux mount is more flexible in which order the mount arguments appear.
|
||||
// But FreeBSD requires this order of the arguments.
|
||||
await this._execa('mount', ['-o', options, '-t', type, device, realPath], {
|
||||
env: {
|
||||
LANG: 'C',
|
||||
...env,
|
||||
},
|
||||
})
|
||||
await this._execa(
|
||||
'mount',
|
||||
['-t', type, device, realPath, '-o', options],
|
||||
{
|
||||
env: {
|
||||
LANG: 'C',
|
||||
...env,
|
||||
},
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
try {
|
||||
// the failure may mean it's already mounted, use `findmnt` to check
|
||||
@@ -81,7 +99,9 @@ export default class MountHandler extends LocalHandler {
|
||||
|
||||
// keep an open file on the mount to prevent it from being unmounted if used
|
||||
// by another handler/process
|
||||
const keeperPath = `${realPath}/.keeper_${Math.random().toString(36).slice(2)}`
|
||||
const keeperPath = `${realPath}/.keeper_${Math.random()
|
||||
.toString(36)
|
||||
.slice(2)}`
|
||||
this._keeper = await fs.open(keeperPath, 'w')
|
||||
ignoreErrors.call(fs.unlink(keeperPath))
|
||||
}
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
import getStream from 'get-stream'
|
||||
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import CancelToken from 'promise-toolbox/CancelToken'
|
||||
import limit from 'limit-concurrency-decorator'
|
||||
import path, { basename } from 'path'
|
||||
import path from 'path'
|
||||
import synchronized from 'decorator-synchronized'
|
||||
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
@@ -33,7 +31,6 @@ const computeRate = (hrtime: number[], size: number) => {
|
||||
}
|
||||
|
||||
const DEFAULT_TIMEOUT = 6e5 // 10 min
|
||||
const DEFAULT_MAX_PARALLEL_OPERATIONS = 10
|
||||
|
||||
const ignoreEnoent = error => {
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
@@ -86,23 +83,6 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
;({ timeout: this._timeout = DEFAULT_TIMEOUT } = options)
|
||||
|
||||
const sharedLimit = limit(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS)
|
||||
this.closeFile = sharedLimit(this.closeFile)
|
||||
this.getInfo = sharedLimit(this.getInfo)
|
||||
this.getSize = sharedLimit(this.getSize)
|
||||
this.list = sharedLimit(this.list)
|
||||
this.mkdir = sharedLimit(this.mkdir)
|
||||
this.openFile = sharedLimit(this.openFile)
|
||||
this.outputFile = sharedLimit(this.outputFile)
|
||||
this.read = sharedLimit(this.read)
|
||||
this.readFile = sharedLimit(this.readFile)
|
||||
this.rename = sharedLimit(this.rename)
|
||||
this.rmdir = sharedLimit(this.rmdir)
|
||||
this.truncate = sharedLimit(this.truncate)
|
||||
this.unlink = sharedLimit(this.unlink)
|
||||
this.write = sharedLimit(this.write)
|
||||
this.writeFile = sharedLimit(this.writeFile)
|
||||
}
|
||||
|
||||
// Public members
|
||||
@@ -117,7 +97,46 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
async closeFile(fd: FileDescriptor): Promise<void> {
|
||||
await this.__closeFile(fd)
|
||||
await timeout.call(this._closeFile(fd.fd), this._timeout)
|
||||
}
|
||||
|
||||
async createOutputStream(
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = timeout.call(
|
||||
this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}),
|
||||
this._timeout
|
||||
)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value =>
|
||||
this._outputFile(checksumFile(path), value, { flags: 'wx' })
|
||||
)
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
createReadStream(
|
||||
@@ -128,24 +147,30 @@ export default class RemoteHandlerAbstract {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = timeout.call(this._createReadStream(file, options), this._timeout).then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
const streamP = timeout
|
||||
.call(this._createReadStream(file, options), this._timeout)
|
||||
.then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (stream.length === undefined && options.end === undefined && options.start === undefined) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this._getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this._getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
@@ -158,7 +183,10 @@ export default class RemoteHandlerAbstract {
|
||||
checksum =>
|
||||
streamP.then(stream => {
|
||||
const { length } = stream
|
||||
stream = (validChecksumOfReadStream(stream, String(checksum).trim()): LaxReadable)
|
||||
stream = (validChecksumOfReadStream(
|
||||
stream,
|
||||
String(checksum).trim()
|
||||
): LaxReadable)
|
||||
stream.length = length
|
||||
|
||||
return stream
|
||||
@@ -172,18 +200,19 @@ export default class RemoteHandlerAbstract {
|
||||
)
|
||||
}
|
||||
|
||||
// write a stream to a file using a temporary file
|
||||
async outputStream(
|
||||
path: string,
|
||||
input: Readable | Promise<Readable>,
|
||||
{ checksum = true, dirMode, cancelToken = CancelToken.none }: { checksum?: boolean, dirMode?: number } = {}
|
||||
): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
return this._outputStream(await input, normalizePath(path), {
|
||||
checksum,
|
||||
dirMode,
|
||||
cancelToken,
|
||||
})
|
||||
createWriteStream(
|
||||
file: File,
|
||||
options: { end?: number, flags?: string, start?: number } = {}
|
||||
): Promise<LaxWritable> {
|
||||
return timeout.call(
|
||||
this._createWriteStream(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
{
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Free the resources possibly dedicated to put the remote at work, when it
|
||||
@@ -202,12 +231,18 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
async getSize(file: File): Promise<number> {
|
||||
return timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout)
|
||||
return timeout.call(
|
||||
this._getSize(typeof file === 'string' ? normalizePath(file) : file),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async list(
|
||||
dir: string,
|
||||
{ filter, prependDir = false }: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
const virtualDir = normalizePath(dir)
|
||||
dir = normalizePath(dir)
|
||||
@@ -226,47 +261,95 @@ export default class RemoteHandlerAbstract {
|
||||
return entries
|
||||
}
|
||||
|
||||
async mkdir(dir: string, { mode }: { mode?: number } = {}): Promise<void> {
|
||||
await this.__mkdir(normalizePath(dir), { mode })
|
||||
async mkdir(dir: string): Promise<void> {
|
||||
dir = normalizePath(dir)
|
||||
try {
|
||||
await this._mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error == null || error.code !== 'EEXIST') {
|
||||
throw error
|
||||
}
|
||||
|
||||
// this operation will throw if it's not already a directory
|
||||
await this._list(dir)
|
||||
}
|
||||
}
|
||||
|
||||
async mktree(dir: string, { mode }: { mode?: number } = {}): Promise<void> {
|
||||
await this._mktree(normalizePath(dir), { mode })
|
||||
async mktree(dir: string): Promise<void> {
|
||||
await this._mktree(normalizePath(dir))
|
||||
}
|
||||
|
||||
openFile(path: string, flags: string): Promise<FileDescriptor> {
|
||||
return this.__openFile(path, flags)
|
||||
async openFile(path: string, flags: string): Promise<FileDescriptor> {
|
||||
path = normalizePath(path)
|
||||
|
||||
return {
|
||||
fd: await timeout.call(this._openFile(path, flags), this._timeout),
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
{ dirMode, flags = 'wx' }: { dirMode?: number, flags?: string } = {}
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._outputFile(normalizePath(file), data, { dirMode, flags })
|
||||
await this._outputFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
async read(file: File, buffer: Buffer, position?: number): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
|
||||
async read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
buffer,
|
||||
position
|
||||
)
|
||||
}
|
||||
|
||||
async readFile(file: string, { flags = 'r' }: { flags?: string } = {}): Promise<Buffer> {
|
||||
async readFile(
|
||||
file: string,
|
||||
{ flags = 'r' }: { flags?: string } = {}
|
||||
): Promise<Buffer> {
|
||||
return this._readFile(normalizePath(file), { flags })
|
||||
}
|
||||
|
||||
async rename(oldPath: string, newPath: string, { checksum = false }: Object = {}) {
|
||||
async refreshChecksum(path: string): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
|
||||
const stream = (await this._createReadStream(path, { flags: 'r' })).pipe(
|
||||
createChecksumStream()
|
||||
)
|
||||
stream.resume() // start reading the whole file
|
||||
await this._outputFile(checksumFile(path), await stream.checksum, {
|
||||
flags: 'wx',
|
||||
})
|
||||
}
|
||||
|
||||
async rename(
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
oldPath = normalizePath(oldPath)
|
||||
newPath = normalizePath(newPath)
|
||||
|
||||
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
|
||||
if (checksum) {
|
||||
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
|
||||
p = Promise.all([
|
||||
p,
|
||||
this._rename(checksumFile(oldPath), checksumFile(newPath)),
|
||||
])
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
async rmdir(dir: string): Promise<void> {
|
||||
await timeout.call(this._rmdir(normalizePath(dir)).catch(ignoreEnoent), this._timeout)
|
||||
await timeout.call(
|
||||
this._rmdir(normalizePath(dir)).catch(ignoreEnoent),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async rmtree(dir: string): Promise<void> {
|
||||
@@ -285,7 +368,7 @@ export default class RemoteHandlerAbstract {
|
||||
async test(): Promise<Object> {
|
||||
const SIZE = 1024 * 1024 * 10
|
||||
const testFileName = normalizePath(`${Date.now()}.test`)
|
||||
const data = await fromCallback(randomBytes, SIZE)
|
||||
const data = await fromCallback(cb => randomBytes(SIZE, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
const writeStart = process.hrtime()
|
||||
@@ -331,49 +414,33 @@ export default class RemoteHandlerAbstract {
|
||||
await this._unlink(file).catch(ignoreEnoent)
|
||||
}
|
||||
|
||||
async write(file: File, buffer: Buffer, position: number): Promise<{| bytesWritten: number, buffer: Buffer |}> {
|
||||
await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
|
||||
async write(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position: number
|
||||
): Promise<{| bytesWritten: number, buffer: Buffer |}> {
|
||||
await this._write(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
buffer,
|
||||
position
|
||||
)
|
||||
}
|
||||
|
||||
async writeFile(file: string, data: Data, { flags = 'wx' }: { flags?: string } = {}): Promise<void> {
|
||||
async writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._writeFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
// Methods that can be called by private methods to avoid parallel limit on public methods
|
||||
|
||||
async __closeFile(fd: FileDescriptor): Promise<void> {
|
||||
await timeout.call(this._closeFile(fd.fd), this._timeout)
|
||||
}
|
||||
|
||||
async __mkdir(dir: string, { mode }: { mode?: number } = {}): Promise<void> {
|
||||
try {
|
||||
await this._mkdir(dir, { mode })
|
||||
} catch (error) {
|
||||
if (error == null || error.code !== 'EEXIST') {
|
||||
throw error
|
||||
}
|
||||
|
||||
// this operation will throw if it's not already a directory
|
||||
await this._list(dir)
|
||||
}
|
||||
}
|
||||
|
||||
async __openFile(path: string, flags: string): Promise<FileDescriptor> {
|
||||
path = normalizePath(path)
|
||||
|
||||
return {
|
||||
fd: await timeout.call(this._openFile(path, flags), this._timeout),
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
// Methods that can be implemented by inheriting classes
|
||||
|
||||
async _closeFile(fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _createOutputStream(file: File, { dirMode, ...options }: Object = {}): Promise<LaxWritable> {
|
||||
async _createOutputStream(file: File, options: Object): Promise<LaxWritable> {
|
||||
try {
|
||||
return await this._createWriteStream(file, options)
|
||||
} catch (error) {
|
||||
@@ -382,7 +449,7 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file), { mode: dirMode })
|
||||
await this._mktree(dirname(file))
|
||||
return this._createOutputStream(file, options)
|
||||
}
|
||||
|
||||
@@ -413,94 +480,45 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _mktree(dir: string, { mode }: { mode?: number } = {}): Promise<void> {
|
||||
async _mktree(dir: string): Promise<void> {
|
||||
try {
|
||||
return await this.__mkdir(dir, { mode })
|
||||
return await this.mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(dir), { mode })
|
||||
return this._mktree(dir, { mode })
|
||||
await this._mktree(dirname(dir))
|
||||
return this._mktree(dir)
|
||||
}
|
||||
|
||||
async _openFile(path: string, flags: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _outputFile(file: string, data: Data, { dirMode, flags }: { dirMode?: number, flags?: string }): Promise<void> {
|
||||
async _outputFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
try {
|
||||
return await this._writeFile(file, data, { flags })
|
||||
return await this._writeFile(file, data, options)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file), { mode: dirMode })
|
||||
return this._outputFile(file, data, { flags })
|
||||
await this._mktree(dirname(file))
|
||||
return this._outputFile(file, data, options)
|
||||
}
|
||||
|
||||
async _createOutputStreamChecksum(file: File, { checksum = false, ...options }: Object = {}): Promise<LaxWritable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = timeout.call(
|
||||
this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}),
|
||||
this._timeout
|
||||
)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value => this._outputFile(checksumFile(path), value, { flags: 'wx' }))
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
async _outputStream(
|
||||
input: Readable,
|
||||
path: string,
|
||||
{ checksum, dirMode, cancelToken = CancelToken.none }: { checksum?: boolean, dirMode?: number }
|
||||
) {
|
||||
const tmpPath = `${dirname(path)}/.${basename(path)}`
|
||||
const output = await this._createOutputStreamChecksum(tmpPath, { checksum })
|
||||
try {
|
||||
cancelToken.promise.then(reason => {
|
||||
input.destroy(reason)
|
||||
})
|
||||
input.pipe(output)
|
||||
await fromEvent(output, 'finish')
|
||||
await output.checksumWritten
|
||||
// $FlowFixMe
|
||||
await input.task
|
||||
await this.rename(tmpPath, path, { checksum })
|
||||
} catch (error) {
|
||||
await this.unlink(tmpPath, { checksum })
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
_read(file: File, buffer: Buffer, position?: number): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
_read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
@@ -547,22 +565,30 @@ export default class RemoteHandlerAbstract {
|
||||
async _write(file: File, buffer: Buffer, position: number): Promise<void> {
|
||||
const isPath = typeof file === 'string'
|
||||
if (isPath) {
|
||||
file = await this.__openFile(file, 'r+')
|
||||
file = await this.openFile(file, 'r+')
|
||||
}
|
||||
try {
|
||||
return await this._writeFd(file, buffer, position)
|
||||
} finally {
|
||||
if (isPath) {
|
||||
await this.__closeFile(file)
|
||||
await this.closeFile(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _writeFd(fd: FileDescriptor, buffer: Buffer, position: number): Promise<void> {
|
||||
async _writeFd(
|
||||
fd: FileDescriptor,
|
||||
buffer: Buffer,
|
||||
position: number
|
||||
): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _writeFile(file: string, data: Data, options: { flags?: string }): Promise<void> {
|
||||
async _writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
@@ -582,12 +608,13 @@ function createPrefixWrapperMethods() {
|
||||
if (
|
||||
hasOwnProperty.call(pPw, name) ||
|
||||
name[0] === '_' ||
|
||||
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name)).value) !== 'function'
|
||||
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name))
|
||||
.value) !== 'function'
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
descriptor.value = function () {
|
||||
descriptor.value = function() {
|
||||
let path
|
||||
if (arguments.length !== 0 && typeof (path = arguments[0]) === 'string') {
|
||||
arguments[0] = this._resolve(path)
|
||||
|
||||
@@ -30,6 +30,30 @@ describe('closeFile()', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('createOutputStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createOutputStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createOutputStream('File')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createReadStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createReadStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createReadStream('file')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getInfo()', () => {
|
||||
it('throws in case of timeout', async () => {
|
||||
const testHandler = new TestHandler({
|
||||
|
||||
@@ -27,7 +27,9 @@ const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
|
||||
// const checksumStream = source.pipe(createChecksumStream())
|
||||
// checksumStream.resume() // make the data flow without an output
|
||||
// console.log(await checksumStream.checksum)
|
||||
export const createChecksumStream = (algorithm: string = 'md5'): Transform & { checksum: Promise<string> } => {
|
||||
export const createChecksumStream = (
|
||||
algorithm: string = 'md5'
|
||||
): Transform & { checksum: Promise<string> } => {
|
||||
const algorithmId = ALGORITHM_TO_ID[algorithm]
|
||||
|
||||
if (!algorithmId) {
|
||||
@@ -58,7 +60,10 @@ export const validChecksumOfReadStream = (
|
||||
stream: Readable,
|
||||
expectedChecksum: string
|
||||
): Readable & { checksumVerified: Promise<void> } => {
|
||||
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
|
||||
const algorithmId = expectedChecksum.slice(
|
||||
1,
|
||||
expectedChecksum.indexOf('$', 1)
|
||||
)
|
||||
|
||||
if (!algorithmId) {
|
||||
throw new Error(`unknown algorithm: ${algorithmId}`)
|
||||
@@ -77,7 +82,11 @@ export const validChecksumOfReadStream = (
|
||||
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
|
||||
|
||||
callback(
|
||||
checksum !== expectedChecksum ? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`) : null
|
||||
checksum !== expectedChecksum
|
||||
? new Error(
|
||||
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
|
||||
)
|
||||
: null
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -2,10 +2,13 @@
|
||||
|
||||
import 'dotenv/config'
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import getStream from 'get-stream'
|
||||
import { forOwn, random } from 'lodash'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import { getHandler } from './'
|
||||
import { getHandler } from '.'
|
||||
|
||||
// https://gist.github.com/julien-f/3228c3f34fdac01ade09
|
||||
const unsecureRandomBytes = n => {
|
||||
@@ -25,7 +28,7 @@ const unsecureRandomBytes = n => {
|
||||
|
||||
const TEST_DATA_LEN = 1024
|
||||
const TEST_DATA = unsecureRandomBytes(TEST_DATA_LEN)
|
||||
const createTestDataStream = asyncIteratorToStream(function* () {
|
||||
const createTestDataStream = asyncIteratorToStream(function*() {
|
||||
yield TEST_DATA
|
||||
})
|
||||
|
||||
@@ -80,13 +83,39 @@ handlers.forEach(url => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('#outputStream()', () => {
|
||||
describe('#createOutputStream()', () => {
|
||||
it('creates parent dir if missing', async () => {
|
||||
await handler.outputStream('dir/file', createTestDataStream())
|
||||
const stream = await handler.createOutputStream('dir/file')
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('dir/file')).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createReadStream()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file, flags }) => {
|
||||
await expect(
|
||||
await getStream.buffer(
|
||||
await handler.createReadStream(file, { flags })
|
||||
)
|
||||
).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createWriteStream()', () => {
|
||||
testWithFileDescriptor('file', 'wx', async ({ file, flags }) => {
|
||||
const stream = await handler.createWriteStream(file, { flags })
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('fails if parent dir is missing', async () => {
|
||||
const error = await rejectionOf(handler.createWriteStream('dir/file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getInfo()', () => {
|
||||
let info
|
||||
beforeAll(async () => {
|
||||
@@ -123,12 +152,16 @@ handlers.forEach(url => {
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@@ -186,12 +219,6 @@ handlers.forEach(url => {
|
||||
const error = await rejectionOf(handler.outputFile('file', ''))
|
||||
expect(error.code).toBe('EEXIST')
|
||||
})
|
||||
|
||||
it("shouldn't timeout in case of the respect of the parallel execution restriction", async () => {
|
||||
const handler = getHandler({ url }, { maxParallelOperations: 1 })
|
||||
await handler.sync()
|
||||
await handler.outputFile(`xo-fs-tests-${Date.now()}/test`, '')
|
||||
}, 40)
|
||||
})
|
||||
|
||||
describe('#read()', () => {
|
||||
@@ -301,7 +328,10 @@ handlers.forEach(url => {
|
||||
return { offset, expected }
|
||||
})(),
|
||||
'increase file size': (() => {
|
||||
const offset = random(TEST_DATA_LEN - PATCH_DATA_LEN + 1, TEST_DATA_LEN)
|
||||
const offset = random(
|
||||
TEST_DATA_LEN - PATCH_DATA_LEN + 1,
|
||||
TEST_DATA_LEN
|
||||
)
|
||||
|
||||
const expected = Buffer.alloc(offset + PATCH_DATA_LEN)
|
||||
TEST_DATA.copy(expected)
|
||||
|
||||
@@ -4,7 +4,6 @@ import execa from 'execa'
|
||||
import type RemoteHandler from './abstract'
|
||||
import RemoteHandlerLocal from './local'
|
||||
import RemoteHandlerNfs from './nfs'
|
||||
import RemoteHandlerS3 from './s3'
|
||||
import RemoteHandlerSmb from './smb'
|
||||
import RemoteHandlerSmbMount from './smb-mount'
|
||||
|
||||
@@ -14,7 +13,6 @@ export type Remote = { url: string }
|
||||
const HANDLERS = {
|
||||
file: RemoteHandlerLocal,
|
||||
nfs: RemoteHandlerNfs,
|
||||
s3: RemoteHandlerS3,
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
@@ -47,23 +47,14 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
async _getInfo() {
|
||||
// df.file() resolves with an object with the following properties:
|
||||
// filesystem, type, size, used, available, capacity and mountpoint.
|
||||
// size, used, available and capacity may be `NaN` so we remove any `NaN`
|
||||
// value from the object.
|
||||
const info = await df.file(this._getFilePath('/'))
|
||||
Object.keys(info).forEach(key => {
|
||||
if (Number.isNaN(info[key])) {
|
||||
delete info[key]
|
||||
}
|
||||
})
|
||||
|
||||
return info
|
||||
_getInfo() {
|
||||
return df.file(this._getFilePath('/'))
|
||||
}
|
||||
|
||||
async _getSize(file) {
|
||||
const stats = await fs.stat(this._getFilePath(typeof file === 'string' ? file : file.path))
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
@@ -71,8 +62,8 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
_mkdir(dir, { mode }) {
|
||||
return fs.mkdir(this._getFilePath(dir), { mode })
|
||||
_mkdir(dir) {
|
||||
return fs.mkdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _openFile(path, flags) {
|
||||
@@ -83,7 +74,13 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
|
||||
try {
|
||||
return await fs.read(file, buffer, 0, buffer.length, position === undefined ? null : position)
|
||||
return await fs.read(
|
||||
file,
|
||||
buffer,
|
||||
0,
|
||||
buffer.length,
|
||||
position === undefined ? null : position
|
||||
)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await fs.close(file)
|
||||
|
||||
@@ -2,13 +2,15 @@ import { parse } from 'xo-remote-parser'
|
||||
|
||||
import MountHandler from './_mount'
|
||||
|
||||
const DEFAULT_NFS_OPTIONS = 'vers=3'
|
||||
|
||||
export default class NfsHandler extends MountHandler {
|
||||
constructor(remote, opts) {
|
||||
const { host, port, path } = parse(remote.url)
|
||||
super(remote, opts, {
|
||||
type: 'nfs',
|
||||
device: `${host}${port !== undefined ? ':' + port : ''}:${path}`,
|
||||
defaultOptions: 'vers=3',
|
||||
options: DEFAULT_NFS_OPTIONS,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,303 +0,0 @@
|
||||
import aws from '@sullux/aws-sdk'
|
||||
import assert from 'assert'
|
||||
import http from 'http'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import { createChecksumStream } from './checksum'
|
||||
import CancelToken from 'promise-toolbox/CancelToken'
|
||||
|
||||
// endpoints https://docs.aws.amazon.com/general/latest/gr/s3.html
|
||||
|
||||
// limits: https://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html
|
||||
const MIN_PART_SIZE = 1024 * 1024 * 5 // 5MB
|
||||
const MAX_PART_SIZE = 1024 * 1024 * 1024 * 5 // 5GB
|
||||
const MAX_PARTS_COUNT = 10000
|
||||
const MAX_OBJECT_SIZE = 1024 * 1024 * 1024 * 1024 * 5 // 5TB
|
||||
const IDEAL_FRAGMENT_SIZE = Math.ceil(MAX_OBJECT_SIZE / MAX_PARTS_COUNT) // the smallest fragment size that still allows a 5TB upload in 10000 fragments, about 524MB
|
||||
|
||||
const USE_SSL = true
|
||||
export default class S3Handler extends RemoteHandlerAbstract {
|
||||
constructor(remote, _opts) {
|
||||
super(remote)
|
||||
const { host, path, username, password } = parse(remote.url)
|
||||
// https://www.zenko.io/blog/first-things-first-getting-started-scality-s3-server/
|
||||
const params = {
|
||||
accessKeyId: username,
|
||||
apiVersion: '2006-03-01',
|
||||
endpoint: host,
|
||||
s3ForcePathStyle: true,
|
||||
secretAccessKey: password,
|
||||
signatureVersion: 'v4',
|
||||
httpOptions: {
|
||||
timeout: 600000,
|
||||
},
|
||||
}
|
||||
if (!USE_SSL) {
|
||||
params.httpOptions.agent = new http.Agent()
|
||||
params.sslEnabled = false
|
||||
}
|
||||
this._s3 = aws(params).s3
|
||||
|
||||
const splitPath = path.split('/').filter(s => s.length)
|
||||
this._bucket = splitPath.shift()
|
||||
this._dir = splitPath.join('/')
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 's3'
|
||||
}
|
||||
|
||||
_createParams(file) {
|
||||
return { Bucket: this._bucket, Key: this._dir + file }
|
||||
}
|
||||
|
||||
async _outputStream(input, path, { checksum, cancelToken = CancelToken.none }) {
|
||||
cancelToken.promise.then(reason => {
|
||||
input.destroy(reason)
|
||||
})
|
||||
let inputStream = input
|
||||
if (checksum) {
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
input.pipe(checksumStream)
|
||||
input.on('error', forwardError)
|
||||
inputStream = checksumStream
|
||||
}
|
||||
await this._s3.upload(
|
||||
{
|
||||
...this._createParams(path),
|
||||
Body: inputStream,
|
||||
},
|
||||
{ partSize: IDEAL_FRAGMENT_SIZE, queueSize: 1 }
|
||||
)
|
||||
if (checksum) {
|
||||
const checksum = await inputStream.checksum
|
||||
const params = {
|
||||
...this._createParams(path + '.checksum'),
|
||||
Body: checksum,
|
||||
}
|
||||
await this._s3.upload(params)
|
||||
}
|
||||
await input.task
|
||||
}
|
||||
|
||||
async _writeFile(file, data, options) {
|
||||
return this._s3.putObject({ ...this._createParams(file), Body: data })
|
||||
}
|
||||
|
||||
async _createReadStream(file, options) {
|
||||
// https://github.com/Sullux/aws-sdk/issues/11
|
||||
return this._s3.getObject.raw(this._createParams(file)).createReadStream()
|
||||
}
|
||||
|
||||
async _unlink(file) {
|
||||
return this._s3.deleteObject(this._createParams(file))
|
||||
}
|
||||
|
||||
async _list(dir) {
|
||||
function splitPath(path) {
|
||||
return path.split('/').filter(d => d.length)
|
||||
}
|
||||
|
||||
const prefix = [this._dir, dir].join('/')
|
||||
const splitPrefix = splitPath(prefix)
|
||||
const result = await this._s3.listObjectsV2({
|
||||
Bucket: this._bucket,
|
||||
Prefix: splitPrefix.join('/'),
|
||||
})
|
||||
const uniq = new Set()
|
||||
for (const entry of result.Contents) {
|
||||
const line = splitPath(entry.Key)
|
||||
if (line.length > splitPrefix.length) {
|
||||
uniq.add(line[splitPrefix.length])
|
||||
}
|
||||
}
|
||||
return [...uniq]
|
||||
}
|
||||
|
||||
async _rename(oldPath, newPath) {
|
||||
const size = await this._getSize(oldPath)
|
||||
const multipartParams = await this._s3.createMultipartUpload({ ...this._createParams(newPath) })
|
||||
const param2 = { ...multipartParams, CopySource: `/${this._bucket}/${this._dir}${oldPath}` }
|
||||
try {
|
||||
const parts = []
|
||||
let start = 0
|
||||
while (start < size) {
|
||||
const range = `bytes=${start}-${Math.min(start + MAX_PART_SIZE, size) - 1}`
|
||||
const partParams = { ...param2, PartNumber: parts.length + 1, CopySourceRange: range }
|
||||
const upload = await this._s3.uploadPartCopy(partParams)
|
||||
parts.push({ ETag: upload.CopyPartResult.ETag, PartNumber: partParams.PartNumber })
|
||||
start += MAX_PART_SIZE
|
||||
}
|
||||
await this._s3.completeMultipartUpload({ ...multipartParams, MultipartUpload: { Parts: parts } })
|
||||
} catch (e) {
|
||||
await this._s3.abortMultipartUpload(multipartParams)
|
||||
throw e
|
||||
}
|
||||
await this._s3.deleteObject(this._createParams(oldPath))
|
||||
}
|
||||
|
||||
async _getSize(file) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const result = await this._s3.headObject(this._createParams(file))
|
||||
return +result.ContentLength
|
||||
}
|
||||
|
||||
async _read(file, buffer, position = 0) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const params = this._createParams(file)
|
||||
params.Range = `bytes=${position}-${position + buffer.length - 1}`
|
||||
const result = await this._s3.getObject(params)
|
||||
result.Body.copy(buffer)
|
||||
return { bytesRead: result.Body.length, buffer }
|
||||
}
|
||||
|
||||
async _write(file, buffer, position) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const uploadParams = this._createParams(file)
|
||||
let fileSize
|
||||
try {
|
||||
fileSize = +(await this._s3.headObject(uploadParams)).ContentLength
|
||||
} catch (e) {
|
||||
if (e.code === 'NotFound') {
|
||||
fileSize = 0
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
if (fileSize < MIN_PART_SIZE) {
|
||||
const resultBuffer = Buffer.alloc(Math.max(fileSize, position + buffer.length))
|
||||
const fileContent = fileSize !== 0 ? (await this._s3.getObject(uploadParams)).Body : Buffer.alloc(0)
|
||||
fileContent.copy(resultBuffer)
|
||||
buffer.copy(resultBuffer, position)
|
||||
await this._s3.putObject({ ...uploadParams, Body: resultBuffer })
|
||||
return { buffer, bytesWritten: buffer.length }
|
||||
} else {
|
||||
// using this trick: https://stackoverflow.com/a/38089437/72637
|
||||
// multipart fragments have a minimum size of 5Mo and a max of 5Go unless they are last
|
||||
// splitting the file in 3 parts: [prefix, edit, suffix]
|
||||
// if `prefix` is bigger than 5Mo, it will be sourced from uploadPartCopy()
|
||||
// otherwise otherwise it will be downloaded, concatenated to `edit`
|
||||
// `edit` will always be an upload part
|
||||
// `suffix` will always be sourced from uploadPartCopy()
|
||||
// Then everything will be sliced in 5Gb parts before getting uploaded
|
||||
const multipartParams = await this._s3.createMultipartUpload(uploadParams)
|
||||
const copyMultipartParams = {
|
||||
...multipartParams,
|
||||
CopySource: `/${this._bucket}/${this._dir + file}`,
|
||||
}
|
||||
try {
|
||||
const parts = []
|
||||
const prefixSize = position
|
||||
let suffixOffset = prefixSize + buffer.length
|
||||
let suffixSize = Math.max(0, fileSize - suffixOffset)
|
||||
let hasSuffix = suffixSize > 0
|
||||
let editBuffer = buffer
|
||||
let editBufferOffset = position
|
||||
let partNumber = 1
|
||||
let prefixPosition = 0
|
||||
// use floor() so that last fragment is handled in the if bellow
|
||||
let fragmentsCount = Math.floor(prefixSize / MAX_PART_SIZE)
|
||||
const prefixFragmentSize = MAX_PART_SIZE
|
||||
let prefixLastFragmentSize = prefixSize - prefixFragmentSize * fragmentsCount
|
||||
if (prefixLastFragmentSize >= MIN_PART_SIZE) {
|
||||
// the last fragment of the prefix is smaller than MAX_PART_SIZE, but bigger than the minimum
|
||||
// so we can copy it too
|
||||
fragmentsCount++
|
||||
prefixLastFragmentSize = 0
|
||||
}
|
||||
for (let i = 0; i < fragmentsCount; i++) {
|
||||
const fragmentEnd = Math.min(prefixPosition + prefixFragmentSize, prefixSize)
|
||||
assert.strictEqual(fragmentEnd - prefixPosition <= MAX_PART_SIZE, true)
|
||||
const range = `bytes=${prefixPosition}-${fragmentEnd - 1}`
|
||||
const copyPrefixParams = { ...copyMultipartParams, PartNumber: partNumber++, CopySourceRange: range }
|
||||
const part = await this._s3.uploadPartCopy(copyPrefixParams)
|
||||
parts.push({ ETag: part.CopyPartResult.ETag, PartNumber: copyPrefixParams.PartNumber })
|
||||
prefixPosition += prefixFragmentSize
|
||||
}
|
||||
if (prefixLastFragmentSize) {
|
||||
// grab everything from the prefix that was too small to be copied, download and merge to the edit buffer.
|
||||
const downloadParams = { ...uploadParams, Range: `bytes=${prefixPosition}-${prefixSize - 1}` }
|
||||
const prefixBuffer = prefixSize > 0 ? (await this._s3.getObject(downloadParams)).Body : Buffer.alloc(0)
|
||||
editBuffer = Buffer.concat([prefixBuffer, buffer])
|
||||
editBufferOffset -= prefixLastFragmentSize
|
||||
}
|
||||
if (hasSuffix && editBuffer.length < MIN_PART_SIZE) {
|
||||
// the edit fragment is too short and is not the last fragment
|
||||
// let's steal from the suffix fragment to reach the minimum size
|
||||
// the suffix might be too short and itself entirely absorbed in the edit fragment, making it the last one.
|
||||
const complementSize = Math.min(MIN_PART_SIZE - editBuffer.length, suffixSize)
|
||||
const complementOffset = editBufferOffset + editBuffer.length
|
||||
suffixOffset += complementSize
|
||||
suffixSize -= complementSize
|
||||
hasSuffix = suffixSize > 0
|
||||
const prefixRange = `bytes=${complementOffset}-${complementOffset + complementSize - 1}`
|
||||
const downloadParams = { ...uploadParams, Range: prefixRange }
|
||||
const complementBuffer = (await this._s3.getObject(downloadParams)).Body
|
||||
editBuffer = Buffer.concat([editBuffer, complementBuffer])
|
||||
}
|
||||
const editParams = { ...multipartParams, Body: editBuffer, PartNumber: partNumber++ }
|
||||
const editPart = await this._s3.uploadPart(editParams)
|
||||
parts.push({ ETag: editPart.ETag, PartNumber: editParams.PartNumber })
|
||||
if (hasSuffix) {
|
||||
// use ceil because the last fragment can be arbitrarily small.
|
||||
const suffixFragments = Math.ceil(suffixSize / MAX_PART_SIZE)
|
||||
let suffixFragmentOffset = suffixOffset
|
||||
for (let i = 0; i < suffixFragments; i++) {
|
||||
const fragmentEnd = suffixFragmentOffset + MAX_PART_SIZE
|
||||
assert.strictEqual(Math.min(fileSize, fragmentEnd) - suffixFragmentOffset <= MAX_PART_SIZE, true)
|
||||
const suffixRange = `bytes=${suffixFragmentOffset}-${Math.min(fileSize, fragmentEnd) - 1}`
|
||||
const copySuffixParams = { ...copyMultipartParams, PartNumber: partNumber++, CopySourceRange: suffixRange }
|
||||
const suffixPart = (await this._s3.uploadPartCopy(copySuffixParams)).CopyPartResult
|
||||
parts.push({ ETag: suffixPart.ETag, PartNumber: copySuffixParams.PartNumber })
|
||||
suffixFragmentOffset = fragmentEnd
|
||||
}
|
||||
}
|
||||
await this._s3.completeMultipartUpload({
|
||||
...multipartParams,
|
||||
MultipartUpload: { Parts: parts },
|
||||
})
|
||||
} catch (e) {
|
||||
await this._s3.abortMultipartUpload(multipartParams)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _openFile(path, flags) {
|
||||
return path
|
||||
}
|
||||
|
||||
async _closeFile(fd) {}
|
||||
|
||||
// https://stackoverflow.com/a/48955582/72637
|
||||
async _rmtree(dir) {
|
||||
const listParams = {
|
||||
Bucket: this._bucket,
|
||||
Prefix: this._dir + dir,
|
||||
}
|
||||
let listedObjects = {}
|
||||
do {
|
||||
listedObjects = await this._s3.listObjectsV2({
|
||||
...listParams,
|
||||
ContinuationToken: listedObjects.NextContinuationToken,
|
||||
})
|
||||
if (listedObjects.Contents.length === 0) {
|
||||
return
|
||||
}
|
||||
await this._s3.deleteObjects({
|
||||
Bucket: this._bucket,
|
||||
Delete: { Objects: listedObjects.Contents.map(({ Key }) => ({ Key })) },
|
||||
})
|
||||
} while (listedObjects.IsTruncated)
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,9 @@ import normalizePath from './_normalizePath'
|
||||
|
||||
export default class SmbMountHandler extends MountHandler {
|
||||
constructor(remote, opts) {
|
||||
const { domain = 'WORKGROUP', host, password, path, username } = parse(remote.url)
|
||||
const { domain = 'WORKGROUP', host, password, path, username } = parse(
|
||||
remote.url
|
||||
)
|
||||
super(remote, opts, {
|
||||
type: 'cifs',
|
||||
device: '//' + host + normalizePath(path),
|
||||
|
||||
@@ -17,7 +17,8 @@ const normalizeError = (error, shouldBeDirectory) => {
|
||||
? wrapError(error, 'EISDIR')
|
||||
: code === 'STATUS_NOT_A_DIRECTORY'
|
||||
? wrapError(error, 'ENOTDIR')
|
||||
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' || code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? wrapError(error, 'ENOENT')
|
||||
: code === 'STATUS_OBJECT_NAME_COLLISION'
|
||||
? wrapError(error, 'EEXIST')
|
||||
@@ -43,7 +44,12 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
_getFilePath(file) {
|
||||
return this._prefix + (typeof file === 'string' ? file : file.path).slice(1).replace(/\//g, '\\')
|
||||
return (
|
||||
this._prefix +
|
||||
(typeof file === 'string' ? file : file.path)
|
||||
.slice(1)
|
||||
.replace(/\//g, '\\')
|
||||
)
|
||||
}
|
||||
|
||||
_dirname(file) {
|
||||
@@ -90,13 +96,15 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
return this._client.readdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
_mkdir(dir, { mode }) {
|
||||
return this._client.mkdir(this._getFilePath(dir), mode).catch(normalizeDirError)
|
||||
_mkdir(dir) {
|
||||
return this._client.mkdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
_openFile(path, flags) {
|
||||
return this._client.open(this._getFilePath(path), flags).catch(normalizeError)
|
||||
return this._client
|
||||
.open(this._getFilePath(path), flags)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
async _read(file, buffer, position) {
|
||||
@@ -115,7 +123,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
_readFile(file, options) {
|
||||
return this._client.readFile(this._getFilePath(file), options).catch(normalizeError)
|
||||
return this._client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_rename(oldPath, newPath) {
|
||||
@@ -146,7 +156,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
_truncate(file, len) {
|
||||
return this._client.truncate(this._getFilePath(file), len).catch(normalizeError)
|
||||
return this._client
|
||||
.truncate(this._getFilePath(file), len)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_unlink(file) {
|
||||
@@ -158,6 +170,8 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
_writeFile(file, data, options) {
|
||||
return this._client.writeFile(this._getFilePath(file), data, options).catch(normalizeError)
|
||||
return this._client
|
||||
.writeFile(this._getFilePath(file), data, options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
# @xen-orchestra/log [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
# @xen-orchestra/log
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/log)  [](https://bundlephobia.com/result?p=@xen-orchestra/log) [](https://npmjs.org/package/@xen-orchestra/log)
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
@@ -17,7 +15,7 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/log):
|
||||
Everywhere something should be logged:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
|
||||
const log = createLogger('my-module')
|
||||
|
||||
@@ -26,25 +24,11 @@ log.info('this information is relevant to the user')
|
||||
log.warn('something went wrong but did not prevent current action')
|
||||
log.error('something went wrong')
|
||||
log.fatal('service/app is going down')
|
||||
|
||||
// you can add contextual info
|
||||
log.debug('new API request', {
|
||||
method: 'foo',
|
||||
params: [ 'bar', 'baz' ]
|
||||
user: 'qux'
|
||||
})
|
||||
|
||||
// by convention, errors go into the `error` field
|
||||
log.error('could not join server', {
|
||||
error,
|
||||
server: 'example.org',
|
||||
})
|
||||
```
|
||||
|
||||
Then, at application level, configure the logs are handled:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
import { configure, catchGlobalErrors } from '@xen-orchestra/log/configure'
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
@@ -53,10 +37,13 @@ const transport = transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb'
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
to: [
|
||||
'jane.smith@gmail.com',
|
||||
'sam.doe@yahoo.com'
|
||||
]
|
||||
})
|
||||
|
||||
configure([
|
||||
@@ -66,19 +53,19 @@ configure([
|
||||
// matched against the namespace of the logs
|
||||
filter: process.env.DEBUG,
|
||||
|
||||
transport: transportConsole(),
|
||||
transport: transportConsole()
|
||||
},
|
||||
{
|
||||
// only levels >= warn
|
||||
level: 'warn',
|
||||
|
||||
transport,
|
||||
},
|
||||
transport
|
||||
}
|
||||
])
|
||||
|
||||
// send all global errors (uncaught exceptions, warnings, unhandled rejections)
|
||||
// to this logger
|
||||
catchGlobalErrors(createLogger('app'))
|
||||
// to this transport
|
||||
catchGlobalErrors(transport)
|
||||
```
|
||||
|
||||
### Transports
|
||||
@@ -104,17 +91,18 @@ Configuration:
|
||||
```js
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
configure(
|
||||
transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
})
|
||||
)
|
||||
configure(transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb'
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: [
|
||||
'jane.smith@gmail.com',
|
||||
'sam.doe@yahoo.com'
|
||||
]
|
||||
}))
|
||||
```
|
||||
|
||||
#### Syslog
|
||||
@@ -137,17 +125,36 @@ configure(transportSyslog())
|
||||
configure(transportSyslog('tcp://syslog.company.lan'))
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
- report any [issue](https://github.com/vatesfr/xo-web/issues/)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
Everywhere something should be logged:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
|
||||
const log = createLogger('my-module')
|
||||
|
||||
log.debug('only useful for debugging')
|
||||
log.info('this information is relevant to the user')
|
||||
log.warn('something went wrong but did not prevent current action')
|
||||
log.error('something went wrong')
|
||||
log.fatal('service/app is going down')
|
||||
|
||||
// you can add contextual info
|
||||
log.debug('new API request', {
|
||||
method: 'foo',
|
||||
params: [ 'bar', 'baz' ]
|
||||
user: 'qux'
|
||||
})
|
||||
|
||||
// by convention, errors go into the `error` field
|
||||
log.error('could not join server', {
|
||||
error,
|
||||
server: 'example.org',
|
||||
})
|
||||
```
|
||||
|
||||
Then, at application level, configure the logs are handled:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
import { configure, catchGlobalErrors } from '@xen-orchestra/log/configure'
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
const transport = transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
})
|
||||
|
||||
configure([
|
||||
{
|
||||
// if filter is a string, then it is pattern
|
||||
// (https://github.com/visionmedia/debug#wildcards) which is
|
||||
// matched against the namespace of the logs
|
||||
filter: process.env.DEBUG,
|
||||
|
||||
transport: transportConsole(),
|
||||
},
|
||||
{
|
||||
// only levels >= warn
|
||||
level: 'warn',
|
||||
|
||||
transport,
|
||||
},
|
||||
])
|
||||
|
||||
// send all global errors (uncaught exceptions, warnings, unhandled rejections)
|
||||
// to this logger
|
||||
catchGlobalErrors(createLogger('app'))
|
||||
```
|
||||
|
||||
### Transports
|
||||
|
||||
#### Console
|
||||
|
||||
```js
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
|
||||
configure(transportConsole())
|
||||
```
|
||||
|
||||
#### Email
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add nodemailer pretty-format
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
configure(
|
||||
transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
})
|
||||
)
|
||||
```
|
||||
|
||||
#### Syslog
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add split-host syslog-client
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportSyslog from '@xen-orchestra/log/transports/syslog'
|
||||
|
||||
// By default, log to udp://localhost:514
|
||||
configure(transportSyslog())
|
||||
|
||||
// But TCP, a different host, or a different port can be used
|
||||
configure(transportSyslog('tcp://syslog.company.lan'))
|
||||
```
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/log",
|
||||
"version": "0.2.0",
|
||||
"version": "0.1.4",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -13,8 +12,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -32,16 +31,16 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.16.0"
|
||||
"promise-toolbox": "^0.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^7.0.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
|
||||
@@ -12,15 +12,14 @@ const createTransport = config => {
|
||||
if (Array.isArray(config)) {
|
||||
const transports = config.map(createTransport)
|
||||
const { length } = transports
|
||||
return function () {
|
||||
return function() {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
transports[i].apply(this, arguments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let { filter } = config
|
||||
let transport = createTransport(config.transport)
|
||||
let { filter, transport } = config
|
||||
const level = resolve(config.level)
|
||||
|
||||
if (filter !== undefined) {
|
||||
@@ -30,14 +29,14 @@ const createTransport = config => {
|
||||
}
|
||||
|
||||
const orig = transport
|
||||
transport = function (log) {
|
||||
transport = function(log) {
|
||||
if ((level !== undefined && log.level >= level) || filter(log)) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
} else if (level !== undefined) {
|
||||
const orig = transport
|
||||
transport = function (log) {
|
||||
transport = function(log) {
|
||||
if (log.level >= level) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
@@ -47,14 +46,16 @@ const createTransport = config => {
|
||||
return transport
|
||||
}
|
||||
|
||||
const symbol = typeof Symbol !== 'undefined' ? Symbol.for('@xen-orchestra/log') : '@@@xen-orchestra/log'
|
||||
const symbol =
|
||||
typeof Symbol !== 'undefined'
|
||||
? Symbol.for('@xen-orchestra/log')
|
||||
: '@@@xen-orchestra/log'
|
||||
|
||||
const { env } = process
|
||||
global[symbol] = createTransport({
|
||||
// display warnings or above, and all that are enabled via DEBUG or
|
||||
// NODE_DEBUG env
|
||||
filter: [env.DEBUG, env.NODE_DEBUG].filter(Boolean).join(','),
|
||||
level: resolve(env.LOG_LEVEL, LEVELS.INFO),
|
||||
filter: process.env.DEBUG || process.env.NODE_DEBUG,
|
||||
level: LEVELS.INFO,
|
||||
|
||||
transport: createConsoleTransport(),
|
||||
})
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import createTransport from './transports/console'
|
||||
import LEVELS, { resolve } from './levels'
|
||||
import LEVELS from './levels'
|
||||
|
||||
const symbol = typeof Symbol !== 'undefined' ? Symbol.for('@xen-orchestra/log') : '@@@xen-orchestra/log'
|
||||
const symbol =
|
||||
typeof Symbol !== 'undefined'
|
||||
? Symbol.for('@xen-orchestra/log')
|
||||
: '@@@xen-orchestra/log'
|
||||
if (!(symbol in global)) {
|
||||
// the default behavior, without requiring `configure` is to avoid
|
||||
// logging anything unless it's a real error
|
||||
const transport = createTransport()
|
||||
const level = resolve(process.env.LOG_LEVEL, LEVELS.WARN)
|
||||
global[symbol] = log => log.level >= level && transport(log)
|
||||
global[symbol] = log => log.level > LEVELS.WARN && transport(log)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@@ -35,7 +37,7 @@ const { prototype } = Logger
|
||||
for (const name in LEVELS) {
|
||||
const level = LEVELS[name]
|
||||
|
||||
prototype[name.toLowerCase()] = function (message, data) {
|
||||
prototype[name.toLowerCase()] = function(message, data) {
|
||||
if (typeof message !== 'string') {
|
||||
if (message instanceof Error) {
|
||||
data = { error: message }
|
||||
@@ -51,22 +53,24 @@ for (const name in LEVELS) {
|
||||
}
|
||||
}
|
||||
|
||||
prototype.wrap = function (message, fn) {
|
||||
prototype.wrap = function(message, fn) {
|
||||
const logger = this
|
||||
const warnAndRethrow = error => {
|
||||
logger.warn(message, { error })
|
||||
throw error
|
||||
}
|
||||
return function () {
|
||||
return function() {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
const then = result != null && result.then
|
||||
return typeof then === 'function' ? then.call(result, warnAndRethrow) : result
|
||||
return typeof then === 'function'
|
||||
? then.call(result, warnAndRethrow)
|
||||
: result
|
||||
} catch (error) {
|
||||
warnAndRethrow(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const createLogger = namespace => new Logger(namespace)
|
||||
const createLogger = namespace => new Logger(namespace)
|
||||
export { createLogger as default }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user