Compare commits
354 Commits
vhd-lib-v0
...
xen-api-v0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf3f1a1705 | ||
|
|
08583c06ef | ||
|
|
5271a5c984 | ||
|
|
e69610643b | ||
|
|
ef61e4fe6d | ||
|
|
4f776e1370 | ||
|
|
aa72708996 | ||
|
|
8751180634 | ||
|
|
2e327be49d | ||
|
|
f06a937c9c | ||
|
|
e65b3200cd | ||
|
|
30d3701ab1 | ||
|
|
05fa76dad3 | ||
|
|
4020081492 | ||
|
|
2fbd4a62b2 | ||
|
|
b773f5e821 | ||
|
|
76c5ced1dd | ||
|
|
197768875b | ||
|
|
f0483862a5 | ||
|
|
ac46d3a5a2 | ||
|
|
2da576a1f8 | ||
|
|
2e1ac27cf5 | ||
|
|
258404affc | ||
|
|
5121d9d1d7 | ||
|
|
f2a38c5ddd | ||
|
|
97a77b1a33 | ||
|
|
88ca41231f | ||
|
|
9a8f84ccb5 | ||
|
|
dd50fc37fe | ||
|
|
cafcadb286 | ||
|
|
db3d6bba79 | ||
|
|
11a0fc2a22 | ||
|
|
1e0a8a5034 | ||
|
|
34ef3e5998 | ||
|
|
e73fcc450d | ||
|
|
2946eaa156 | ||
|
|
6dcae9a7d7 | ||
|
|
abeb36f06c | ||
|
|
41139578ba | ||
|
|
cda7621b5d | ||
|
|
b75dd2d424 | ||
|
|
273f208722 | ||
|
|
c01e8e892e | ||
|
|
9dfd81c28f | ||
|
|
5dd26ebe33 | ||
|
|
4c0fe3c14f | ||
|
|
2353581da8 | ||
|
|
2934b23d2f | ||
|
|
82e4197237 | ||
|
|
a23189f132 | ||
|
|
47fa1ec81e | ||
|
|
4b468663f3 | ||
|
|
6628dc777d | ||
|
|
3ef3ae0166 | ||
|
|
bc6dbe2771 | ||
|
|
5651160d1c | ||
|
|
6da2669c6f | ||
|
|
8094b5097f | ||
|
|
bdb0547b86 | ||
|
|
ea08fbbfba | ||
|
|
b4cbd8b2b5 | ||
|
|
f8fbb6b7d3 | ||
|
|
c8da9fec0a | ||
|
|
79fb3ec8bd | ||
|
|
2243966ce1 | ||
|
|
ca7d520997 | ||
|
|
df44487363 | ||
|
|
b39eb0f60d | ||
|
|
a3dcdc4fd5 | ||
|
|
2daac73c17 | ||
|
|
23eb3c3094 | ||
|
|
776d0f9e4a | ||
|
|
54bdcc6dd2 | ||
|
|
38084c8199 | ||
|
|
4525ee7491 | ||
|
|
66a476bd21 | ||
|
|
be6cc12632 | ||
|
|
673475dcb2 | ||
|
|
7dc1a80a83 | ||
|
|
d49294849f | ||
|
|
6b394302c1 | ||
|
|
00e1601f85 | ||
|
|
b75e746586 | ||
|
|
32a9fa9bb0 | ||
|
|
79d68dece4 | ||
|
|
1701e1d4ba | ||
|
|
497b3eb296 | ||
|
|
ecfafa0fea | ||
|
|
def66d8218 | ||
|
|
eeb08abec2 | ||
|
|
90923c657d | ||
|
|
4ff6eeb424 | ||
|
|
2d98fb40f1 | ||
|
|
256a58ded2 | ||
|
|
bf3b31a9ef | ||
|
|
7fc8d59605 | ||
|
|
1a39b2113a | ||
|
|
cb9f3fbb2c | ||
|
|
487f413cdd | ||
|
|
f847969206 | ||
|
|
5d9aad44c2 | ||
|
|
ba2027e6d7 | ||
|
|
087da9376f | ||
|
|
218e3b46e0 | ||
|
|
f9921e354e | ||
|
|
341148a7d3 | ||
|
|
7216165f1e | ||
|
|
a9557af04b | ||
|
|
abb80270ad | ||
|
|
72e93384a5 | ||
|
|
663b1b76ec | ||
|
|
24b8c671fa | ||
|
|
986fec1cd3 | ||
|
|
f6c2cbc5cf | ||
|
|
289ed89a78 | ||
|
|
73de421d47 | ||
|
|
dc1eb82295 | ||
|
|
6629c12166 | ||
|
|
ec5bc1db95 | ||
|
|
ac2c40c842 | ||
|
|
61bf669252 | ||
|
|
4105c53155 | ||
|
|
aeab2b2a08 | ||
|
|
95e33ee612 | ||
|
|
093bda7039 | ||
|
|
4e35b19ac5 | ||
|
|
244d8a51e8 | ||
|
|
9d6cc77cc8 | ||
|
|
d5e0150880 | ||
|
|
5cf29a98b3 | ||
|
|
165c2262c0 | ||
|
|
74f5d2e0cd | ||
|
|
2d93456f52 | ||
|
|
fd401ca335 | ||
|
|
97ba93a9ad | ||
|
|
0788c25710 | ||
|
|
82bba951db | ||
|
|
6efd611b80 | ||
|
|
b7d43b42b9 | ||
|
|
801b71d9ae | ||
|
|
0ff7c2188a | ||
|
|
bc1667440f | ||
|
|
227b464a8e | ||
|
|
f6c43650b4 | ||
|
|
597689fde0 | ||
|
|
da6b71fde8 | ||
|
|
5f2590c858 | ||
|
|
37b0867151 | ||
|
|
85031cfb9d | ||
|
|
a13f86fb7c | ||
|
|
7cbc5e642f | ||
|
|
48d4abc259 | ||
|
|
c805f3b1a7 | ||
|
|
40212582a9 | ||
|
|
2c875928de | ||
|
|
c24eb9778e | ||
|
|
eb079b1360 | ||
|
|
120a519303 | ||
|
|
95def95678 | ||
|
|
8274a00f91 | ||
|
|
3c6c4976cd | ||
|
|
0fd35b1679 | ||
|
|
3c931604be | ||
|
|
02dddbd662 | ||
|
|
675763d039 | ||
|
|
63acc7ef32 | ||
|
|
cbd78bdfef | ||
|
|
a09a2ed6c3 | ||
|
|
7d18a6d8a9 | ||
|
|
65a5984d4c | ||
|
|
d5f519bf5a | ||
|
|
bede39c8f3 | ||
|
|
a9e3682776 | ||
|
|
87c3c8732f | ||
|
|
0011bfea8c | ||
|
|
e047649c3b | ||
|
|
de397b63c5 | ||
|
|
75b7726fca | ||
|
|
d83a2366c2 | ||
|
|
2d4d653c55 | ||
|
|
c7a1d55f6f | ||
|
|
46b5c5ccd1 | ||
|
|
4607417e7a | ||
|
|
76887c7e25 | ||
|
|
ab7cae5816 | ||
|
|
b1ce389ad8 | ||
|
|
52aa5ff780 | ||
|
|
30372e511e | ||
|
|
dc15a6282a | ||
|
|
97dcc204ef | ||
|
|
ecda3e0174 | ||
|
|
8342bb2bc8 | ||
|
|
51a137c4e5 | ||
|
|
a26ced5de9 | ||
|
|
85f0c69c03 | ||
|
|
3aac757ef5 | ||
|
|
91541d0ba4 | ||
|
|
dfd66a56c3 | ||
|
|
60f9393d29 | ||
|
|
cdced7cdc1 | ||
|
|
69709009ed | ||
|
|
bf14560709 | ||
|
|
775b629ee9 | ||
|
|
ec9717dafb | ||
|
|
0cd84ee250 | ||
|
|
b3681e7c39 | ||
|
|
a7a7597d9a | ||
|
|
bed3da81e1 | ||
|
|
c43dc31a55 | ||
|
|
c5a21922d1 | ||
|
|
2ae660a46b | ||
|
|
f6fcae4489 | ||
|
|
e0a3b8ace8 | ||
|
|
b67231c56b | ||
|
|
aa5b3dc426 | ||
|
|
1a528adfbb | ||
|
|
64d295ee3f | ||
|
|
b940ade902 | ||
|
|
37a906a233 | ||
|
|
e76603ce7e | ||
|
|
aca9aa0a7a | ||
|
|
6d20ef5d51 | ||
|
|
4d18ab1ae0 | ||
|
|
fa5c707fbc | ||
|
|
37b9d8ec10 | ||
|
|
61db0269a2 | ||
|
|
a8ad13f60e | ||
|
|
f14dd04ea7 | ||
|
|
0add8cd5a3 | ||
|
|
16cc539a57 | ||
|
|
5ba25a34cb | ||
|
|
61de65fc21 | ||
|
|
5195539a95 | ||
|
|
ce93fb0e4c | ||
|
|
3cb58ed700 | ||
|
|
bb48c960fe | ||
|
|
286a0031dd | ||
|
|
dcbd7e1113 | ||
|
|
0a43454c8a | ||
|
|
f5f1491e47 | ||
|
|
e935ae567f | ||
|
|
3f08f099fe | ||
|
|
18a5ba0029 | ||
|
|
c426d0328f | ||
|
|
91b2456c15 | ||
|
|
585aa74e0c | ||
|
|
eefaec5abd | ||
|
|
c7a5eebff6 | ||
|
|
f077528936 | ||
|
|
39728974b1 | ||
|
|
e14585895b | ||
|
|
0999042718 | ||
|
|
4e2e669533 | ||
|
|
de266ae6a8 | ||
|
|
d7cd87a6e4 | ||
|
|
c5aabbadc2 | ||
|
|
36a5e3c2ab | ||
|
|
f475261b9a | ||
|
|
62dce8f92a | ||
|
|
e6d90d2154 | ||
|
|
b5d823ec1a | ||
|
|
a786c68e8b | ||
|
|
e6fa00c4d8 | ||
|
|
5721fac793 | ||
|
|
674ed4384a | ||
|
|
1d7d83f8c6 | ||
|
|
f812cc2729 | ||
|
|
abc50a5e84 | ||
|
|
e94cae3044 | ||
|
|
0b35a35576 | ||
|
|
0253c63db3 | ||
|
|
0d3b2bc814 | ||
|
|
65307e5bc7 | ||
|
|
90de47d708 | ||
|
|
72a4179c03 | ||
|
|
4eee195d21 | ||
|
|
9488711406 | ||
|
|
4cf04aca72 | ||
|
|
410d6762bf | ||
|
|
0a95426e63 | ||
|
|
4ec4970d49 | ||
|
|
e57ae0a8ce | ||
|
|
1e7852369f | ||
|
|
bdefd0bcd8 | ||
|
|
fa88e1789c | ||
|
|
df90094cae | ||
|
|
efdbc18a0a | ||
|
|
fc1dd3ce09 | ||
|
|
10aff53d2c | ||
|
|
85c3d64c04 | ||
|
|
5a71ab53be | ||
|
|
d022b40732 | ||
|
|
e105c0aad1 | ||
|
|
eb9655125c | ||
|
|
a10fea2823 | ||
|
|
0c05d89d3f | ||
|
|
d600d4cc28 | ||
|
|
4f0e5317ed | ||
|
|
fce7c7fd49 | ||
|
|
929ca767ca | ||
|
|
873db3bf26 | ||
|
|
c795887a35 | ||
|
|
23824bafe8 | ||
|
|
5cca58f2b3 | ||
|
|
d05c9b6133 | ||
|
|
39a84a1ac0 | ||
|
|
b1c851c9d6 | ||
|
|
6280a9365c | ||
|
|
2741dacd64 | ||
|
|
4c2c2390bd | ||
|
|
635b8ce5f0 | ||
|
|
efc13cc456 | ||
|
|
078f319fe1 | ||
|
|
0f0e785871 | ||
|
|
4e4c85121c | ||
|
|
019d6f4cb6 | ||
|
|
725b0342d1 | ||
|
|
c93ccb8111 | ||
|
|
670befdaf6 | ||
|
|
55eefd865f | ||
|
|
43e5d610e3 | ||
|
|
b1245bc5be | ||
|
|
c2feab245e | ||
|
|
cb3753213e | ||
|
|
ec8c7a24af | ||
|
|
2456be2da3 | ||
|
|
8c5d4240f9 | ||
|
|
b1e12d1542 | ||
|
|
a58d7d2ff4 | ||
|
|
5308b8b9ed | ||
|
|
c15dffce8f | ||
|
|
874680462e | ||
|
|
bb42540775 | ||
|
|
b18511c905 | ||
|
|
5c660f4f64 | ||
|
|
f2bae73f77 | ||
|
|
e54d34f269 | ||
|
|
6470cbd2ee | ||
|
|
c06ebcb4a4 | ||
|
|
3eaa72c98c | ||
|
|
694fff060d | ||
|
|
2705062ac3 | ||
|
|
3df055a296 | ||
|
|
802bc15e0c | ||
|
|
ad2de40a9d | ||
|
|
19298570f8 | ||
|
|
1da4d1f1e9 | ||
|
|
fe4e9c18fa | ||
|
|
2c9f84f17f | ||
|
|
0b2e76600b | ||
|
|
873554fc01 | ||
|
|
82e2d013ae | ||
|
|
1eb5e80f1f | ||
|
|
9c0ab5b3cb |
@@ -1,9 +1,10 @@
|
||||
module.exports = {
|
||||
extends: ['standard', 'standard-jsx'],
|
||||
extends: ['standard', 'standard-jsx', 'prettier'],
|
||||
globals: {
|
||||
__DEV__: true,
|
||||
$Dict: true,
|
||||
$Diff: true,
|
||||
$ElementType: true,
|
||||
$Exact: true,
|
||||
$Keys: true,
|
||||
$PropertyType: true,
|
||||
@@ -16,12 +17,9 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'comma-dangle': ['error', 'always-multiline'],
|
||||
indent: 'off',
|
||||
'no-var': 'error',
|
||||
'node/no-extraneous-import': 'error',
|
||||
'node/no-extraneous-require': 'error',
|
||||
'prefer-const': 'error',
|
||||
'react/jsx-indent': 'off',
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
jsxSingleQuote: true,
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'es5',
|
||||
|
||||
10
.travis.yml
10
.travis.yml
@@ -2,7 +2,6 @@ language: node_js
|
||||
node_js:
|
||||
#- stable # disable for now due to an issue of indirect dep upath with Node 9
|
||||
- 8
|
||||
- 6
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
@@ -10,9 +9,9 @@ sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
|
||||
before_install:
|
||||
- curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
@@ -22,5 +21,4 @@ cache:
|
||||
yarn: true
|
||||
|
||||
script:
|
||||
- yarn run test
|
||||
- yarn run test-integration
|
||||
- yarn run travis-tests
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,8 +42,8 @@ const getConfig = (key, ...args) => {
|
||||
return config === undefined
|
||||
? {}
|
||||
: typeof config === 'function'
|
||||
? config(...args)
|
||||
: config
|
||||
? config(...args)
|
||||
: config
|
||||
}
|
||||
|
||||
module.exports = function (pkg, plugins, presets) {
|
||||
|
||||
@@ -83,6 +83,9 @@ ${cliName} v${pkg.version}
|
||||
|
||||
await Promise.all([
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', metadata),
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', {
|
||||
'xo:backup:exported': 'true',
|
||||
}),
|
||||
tgtXapi.setField(
|
||||
tgtVm,
|
||||
'name_label',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/cr-seed-cli",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/cr-seed-cli",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
@@ -15,6 +15,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.19.0"
|
||||
"xen-api": "^0.22.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,12 @@ const MAX_DELAY = 2 ** 31 - 1
|
||||
class Job {
|
||||
constructor (schedule, fn) {
|
||||
const wrapper = () => {
|
||||
const result = fn()
|
||||
let result
|
||||
try {
|
||||
result = fn()
|
||||
} catch (_) {
|
||||
// catch any thrown value to ensure it does not break the job
|
||||
}
|
||||
let then
|
||||
if (result != null && typeof (then = result.then) === 'function') {
|
||||
then.call(result, scheduleNext, scheduleNext)
|
||||
@@ -45,8 +50,8 @@ class Schedule {
|
||||
zone.toLowerCase() === 'utc'
|
||||
? moment.utc
|
||||
: zone === 'local'
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
}
|
||||
|
||||
createJob (fn) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.3.1",
|
||||
"version": "0.4.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
@@ -21,12 +21,13 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2": "^0.9.0",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"get-stream": "^4.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"through2": "^2.0.3",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"xo-remote-parser": "^0.5.0"
|
||||
},
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import getStream from 'get-stream'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { fromCallback, fromEvent, ignoreErrors } from 'promise-toolbox'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { resolve } from 'path'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
|
||||
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
|
||||
|
||||
@@ -17,43 +20,57 @@ type File = FileDescriptor | string
|
||||
|
||||
const checksumFile = file => file + '.checksum'
|
||||
|
||||
// normalize the path:
|
||||
// - does not contains `.` or `..` (cannot escape root dir)
|
||||
// - always starts with `/`
|
||||
const normalizePath = path => resolve('/', path)
|
||||
|
||||
const DEFAULT_TIMEOUT = 6e5 // 10 min
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
_remote: Object
|
||||
constructor (remote: any) {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
_timeout: number
|
||||
|
||||
constructor(remote: any, options: Object = {}) {
|
||||
if (remote.url === 'test://') {
|
||||
this._remote = remote
|
||||
} else {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
}
|
||||
;({ timeout: this._timeout = DEFAULT_TIMEOUT } = options)
|
||||
}
|
||||
|
||||
get type (): string {
|
||||
get type(): string {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks the handler to sync the state of the effective remote with its' metadata
|
||||
*/
|
||||
async sync (): Promise<mixed> {
|
||||
async sync(): Promise<mixed> {
|
||||
return this._sync()
|
||||
}
|
||||
|
||||
async _sync (): Promise<mixed> {
|
||||
async _sync(): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
|
||||
*/
|
||||
async forget (): Promise<void> {
|
||||
async forget(): Promise<void> {
|
||||
await this._forget()
|
||||
}
|
||||
|
||||
async _forget (): Promise<void> {
|
||||
async _forget(): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async test (): Promise<Object> {
|
||||
const testFileName = `${Date.now()}.test`
|
||||
async test(): Promise<Object> {
|
||||
const testFileName = `/${Date.now()}.test`
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
@@ -78,29 +95,33 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
return this._outputFile(file, data, {
|
||||
async outputFile(file: string, data: Data, options?: Object): Promise<void> {
|
||||
return this._outputFile(normalizePath(file), data, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
async _outputFile(file: string, data: Data, options?: Object): Promise<void> {
|
||||
const stream = await this.createOutputStream(normalizePath(file), options)
|
||||
const promise = fromEvent(stream, 'finish')
|
||||
stream.end(data)
|
||||
await promise
|
||||
}
|
||||
|
||||
async read (
|
||||
async read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(file, buffer, position)
|
||||
return this._read(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
buffer,
|
||||
position
|
||||
)
|
||||
}
|
||||
|
||||
_read (
|
||||
_read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
@@ -108,20 +129,23 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this._readFile(file, options)
|
||||
async readFile(file: string, options?: Object): Promise<Buffer> {
|
||||
return this._readFile(normalizePath(file), options)
|
||||
}
|
||||
|
||||
_readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
_readFile(file: string, options?: Object): Promise<Buffer> {
|
||||
return this.createReadStream(file, options).then(getStream.buffer)
|
||||
}
|
||||
|
||||
async rename (
|
||||
async rename(
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
let p = this._rename(oldPath, newPath)
|
||||
oldPath = normalizePath(oldPath)
|
||||
newPath = normalizePath(newPath)
|
||||
|
||||
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
|
||||
if (checksum) {
|
||||
p = Promise.all([
|
||||
p,
|
||||
@@ -131,18 +155,53 @@ export default class RemoteHandlerAbstract {
|
||||
return p
|
||||
}
|
||||
|
||||
async _rename (oldPath: string, newPath: string) {
|
||||
async _rename(oldPath: string, newPath: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async list (
|
||||
async rmdir(
|
||||
dir: string,
|
||||
{ recursive = false }: { recursive?: boolean } = {}
|
||||
) {
|
||||
dir = normalizePath(dir)
|
||||
await (recursive ? this._rmtree(dir) : this._rmdir(dir))
|
||||
}
|
||||
|
||||
async _rmdir(dir: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _rmtree(dir: string) {
|
||||
try {
|
||||
return await this._rmdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOTEMPTY') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const files = await this._list(dir)
|
||||
await asyncMap(files, file =>
|
||||
this._unlink(`${dir}/${file}`).catch(error => {
|
||||
if (error.code === 'EISDIR') {
|
||||
return this._rmtree(`${dir}/${file}`)
|
||||
}
|
||||
throw error
|
||||
})
|
||||
)
|
||||
return this._rmtree(dir)
|
||||
}
|
||||
|
||||
async list(
|
||||
dir: string = '.',
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
let entries = await this._list(dir)
|
||||
dir = normalizePath(dir)
|
||||
|
||||
let entries = await timeout.call(this._list(dir), this._timeout)
|
||||
if (filter !== undefined) {
|
||||
entries = entries.filter(filter)
|
||||
}
|
||||
@@ -156,37 +215,42 @@ export default class RemoteHandlerAbstract {
|
||||
return entries
|
||||
}
|
||||
|
||||
async _list (dir: string): Promise<string[]> {
|
||||
async _list(dir: string): Promise<string[]> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
createReadStream (
|
||||
file: string,
|
||||
createReadStream(
|
||||
file: File,
|
||||
{ checksum = false, ignoreMissingChecksum = false, ...options }: Object = {}
|
||||
): Promise<LaxReadable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createReadStream(file, options).then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
const streamP = timeout
|
||||
.call(this._createReadStream(file, options), this._timeout)
|
||||
.then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this.getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this.getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
@@ -216,30 +280,34 @@ export default class RemoteHandlerAbstract {
|
||||
)
|
||||
}
|
||||
|
||||
async _createReadStream (
|
||||
file: string,
|
||||
options?: Object
|
||||
): Promise<LaxReadable> {
|
||||
async _createReadStream(file: File, options?: Object): Promise<LaxReadable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async openFile (path: string, flags?: string): Promise<FileDescriptor> {
|
||||
return { fd: await this._openFile(path, flags), path }
|
||||
async openFile(path: string, flags?: string): Promise<FileDescriptor> {
|
||||
path = normalizePath(path)
|
||||
|
||||
return {
|
||||
fd: await timeout.call(this._openFile(path, flags), this._timeout),
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
async _openFile (path: string, flags?: string): Promise<mixed> {
|
||||
async _openFile(path: string, flags?: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async closeFile (fd: FileDescriptor): Promise<void> {
|
||||
await this._closeFile(fd.fd)
|
||||
async closeFile(fd: FileDescriptor): Promise<void> {
|
||||
await timeout.call(this._closeFile(fd.fd), this._timeout)
|
||||
}
|
||||
|
||||
async _closeFile (fd: mixed): Promise<void> {
|
||||
async _closeFile(fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async refreshChecksum (path: string): Promise<void> {
|
||||
async refreshChecksum(path: string): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
|
||||
const stream = (await this.createReadStream(path)).pipe(
|
||||
createChecksumStream()
|
||||
)
|
||||
@@ -247,15 +315,21 @@ export default class RemoteHandlerAbstract {
|
||||
await this.outputFile(checksumFile(path), await stream.checksum)
|
||||
}
|
||||
|
||||
async createOutputStream (
|
||||
async createOutputStream(
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
const streamP = timeout.call(
|
||||
this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}),
|
||||
this._timeout
|
||||
)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
@@ -278,30 +352,35 @@ export default class RemoteHandlerAbstract {
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
async _createOutputStream (
|
||||
file: mixed,
|
||||
async _createOutputStream(
|
||||
file: File,
|
||||
options?: Object
|
||||
): Promise<LaxWritable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async unlink (file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
async unlink(file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
file = normalizePath(file)
|
||||
|
||||
if (checksum) {
|
||||
ignoreErrors.call(this._unlink(checksumFile(file)))
|
||||
}
|
||||
|
||||
await this._unlink(file)
|
||||
await timeout.call(this._unlink(file), this._timeout)
|
||||
}
|
||||
|
||||
async _unlink (file: mixed): Promise<void> {
|
||||
async _unlink(file: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getSize (file: mixed): Promise<number> {
|
||||
return this._getSize(file)
|
||||
async getSize(file: File): Promise<number> {
|
||||
return timeout.call(
|
||||
this._getSize(typeof file === 'string' ? normalizePath(file) : file),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async _getSize (file: mixed): Promise<number> {
|
||||
async _getSize(file: File): Promise<number> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
113
@xen-orchestra/fs/src/abstract.spec.js
Normal file
113
@xen-orchestra/fs/src/abstract.spec.js
Normal file
@@ -0,0 +1,113 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { TimeoutError } from 'promise-toolbox'
|
||||
|
||||
import AbstractHandler from './abstract'
|
||||
|
||||
const TIMEOUT = 10e3
|
||||
|
||||
class TestHandler extends AbstractHandler {
|
||||
constructor(impl) {
|
||||
super({ url: 'test://' }, { timeout: TIMEOUT })
|
||||
|
||||
Object.keys(impl).forEach(method => {
|
||||
this[`_${method}`] = impl[method]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
describe('rename()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
rename: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.rename('oldPath', 'newPath')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('list()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
list: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.list()
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createReadStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createReadStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createReadStream('file')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('openFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
openFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.openFile('path')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('closeFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
closeFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.closeFile({ fd: undefined, path: '' })
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createOutputStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createOutputStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createOutputStream('File')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('unlink()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
unlink: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.unlink('')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSize()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
getSize: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.getSize('')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import through2 from 'through2'
|
||||
import { createHash } from 'crypto'
|
||||
import { defer, fromEvent } from 'promise-toolbox'
|
||||
|
||||
@@ -14,7 +14,7 @@ const HANDLERS = {
|
||||
nfs: RemoteHandlerNfs,
|
||||
}
|
||||
|
||||
export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
export const getHandler = (remote: Remote, ...rest: any): RemoteHandler => {
|
||||
// FIXME: should be done in xo-remote-parser.
|
||||
const type = remote.url.split('://')[0]
|
||||
|
||||
@@ -22,5 +22,5 @@ export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
if (!Handler) {
|
||||
throw new Error('Unhandled remote type')
|
||||
}
|
||||
return new Handler(remote)
|
||||
return new Handler(remote, ...rest)
|
||||
}
|
||||
|
||||
@@ -1,51 +1,41 @@
|
||||
import fs from 'fs-extra'
|
||||
import { dirname, resolve } from 'path'
|
||||
import { noop, startsWith } from 'lodash'
|
||||
import { dirname } from 'path'
|
||||
import { noop } from 'lodash'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
get type() {
|
||||
return 'file'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
_getRealPath() {
|
||||
return this._remote.path
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const realPath = this._getRealPath()
|
||||
const parts = [realPath]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, realPath)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
}
|
||||
return path
|
||||
_getFilePath(file) {
|
||||
return this._getRealPath() + file
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
}
|
||||
async _sync() {
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
async _forget() {
|
||||
return noop()
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
async _outputFile(file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(path, data, options)
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
async _read(file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
|
||||
try {
|
||||
@@ -63,19 +53,19 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
async _readFile(file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
async _rename(oldPath, newPath) {
|
||||
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
async _list(dir = '.') {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
async _createReadStream(file, options) {
|
||||
return typeof file === 'string'
|
||||
? fs.createReadStream(this._getFilePath(file), options)
|
||||
: fs.createReadStream('', {
|
||||
@@ -85,7 +75,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
async _createOutputStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
@@ -98,7 +88,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
async _unlink(file) {
|
||||
return fs.unlink(this._getFilePath(file)).catch(error => {
|
||||
// do not throw if the file did not exist
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
@@ -107,18 +97,22 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
async _getSize(file) {
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
async _openFile (path, flags) {
|
||||
async _openFile(path, flags) {
|
||||
return fs.open(this._getFilePath(path), flags)
|
||||
}
|
||||
|
||||
async _closeFile (fd) {
|
||||
async _closeFile(fd) {
|
||||
return fs.close(fd)
|
||||
}
|
||||
|
||||
async _rmdir(dir) {
|
||||
return fs.rmdir(dir)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,31 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import LocalHandler from './local'
|
||||
|
||||
const DEFAULT_NFS_OPTIONS = 'vers=3'
|
||||
|
||||
export default class NfsHandler extends LocalHandler {
|
||||
get type () {
|
||||
constructor(
|
||||
remote,
|
||||
{ mountsDir = join(tmpdir(), 'xo-fs-mounts'), ...opts } = {}
|
||||
) {
|
||||
super(remote, opts)
|
||||
|
||||
this._realPath = join(mountsDir, remote.id)
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return `/run/xo-server/mounts/${this._remote.id}`
|
||||
_getRealPath() {
|
||||
return this._realPath
|
||||
}
|
||||
|
||||
async _mount () {
|
||||
async _mount() {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
const { host, path, port, options } = this._remote
|
||||
return execa(
|
||||
@@ -33,23 +44,23 @@ export default class NfsHandler extends LocalHandler {
|
||||
},
|
||||
}
|
||||
).catch(error => {
|
||||
if (!error.stderr.includes('already mounted')) {
|
||||
if (
|
||||
error == null ||
|
||||
typeof error.stderr !== 'string' ||
|
||||
!error.stderr.includes('already mounted')
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
await this._mount()
|
||||
} else {
|
||||
await this._umount()
|
||||
}
|
||||
async _sync() {
|
||||
await this._mount()
|
||||
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
async _forget() {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (_) {
|
||||
@@ -57,13 +68,17 @@ export default class NfsHandler extends LocalHandler {
|
||||
}
|
||||
}
|
||||
|
||||
async _umount () {
|
||||
async _umount() {
|
||||
await execa('umount', ['--force', this._getRealPath()], {
|
||||
env: {
|
||||
LANG: 'C',
|
||||
},
|
||||
}).catch(error => {
|
||||
if (!error.stderr.includes('not mounted')) {
|
||||
if (
|
||||
error == null ||
|
||||
typeof error.stderr !== 'string' ||
|
||||
!error.stderr.includes('not mounted')
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
|
||||
@@ -6,33 +6,38 @@ import RemoteHandlerAbstract from './abstract'
|
||||
const noop = () => {}
|
||||
|
||||
// Normalize the error code for file not found.
|
||||
const normalizeError = error => {
|
||||
class ErrorWrapper extends Error {
|
||||
constructor(error, newCode) {
|
||||
super(error.message)
|
||||
this.cause = error
|
||||
this.code = newCode
|
||||
}
|
||||
}
|
||||
const normalizeError = (error, shouldBeDirectory) => {
|
||||
const { code } = error
|
||||
|
||||
return code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? Object.create(error, {
|
||||
code: {
|
||||
configurable: true,
|
||||
readable: true,
|
||||
value: 'ENOENT',
|
||||
writable: true,
|
||||
},
|
||||
})
|
||||
? new ErrorWrapper(error, 'ENOENT')
|
||||
: code === 'STATUS_NOT_SUPPORTED' || code === 'STATUS_INVALID_PARAMETER'
|
||||
? new ErrorWrapper(error, shouldBeDirectory ? 'ENOTDIR' : 'EISDIR')
|
||||
: error
|
||||
}
|
||||
|
||||
export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
super(remote)
|
||||
constructor(remote, opts) {
|
||||
super(remote, opts)
|
||||
this._forget = noop
|
||||
|
||||
const prefix = this._remote.path
|
||||
this._prefix = prefix !== '' ? prefix + '\\' : prefix
|
||||
}
|
||||
|
||||
get type () {
|
||||
get type() {
|
||||
return 'smb'
|
||||
}
|
||||
|
||||
_getClient () {
|
||||
_getClient() {
|
||||
const remote = this._remote
|
||||
|
||||
return new Smb2({
|
||||
@@ -44,40 +49,24 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
if (file === '.') {
|
||||
file = undefined
|
||||
}
|
||||
|
||||
let path = this._remote.path !== '' ? this._remote.path : ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
path += '\\'
|
||||
}
|
||||
|
||||
if (file) {
|
||||
path += file.replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
return path
|
||||
_getFilePath(file) {
|
||||
return this._prefix + file.slice(1).replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
_dirname (file) {
|
||||
_dirname(file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
}
|
||||
async _sync() {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options = {}) {
|
||||
async _outputFile(file, data, options = {}) {
|
||||
const client = this._getClient()
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
@@ -91,7 +80,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
})
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
async _read(file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
|
||||
let client
|
||||
@@ -112,7 +101,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options = {}) {
|
||||
async _readFile(file, options = {}) {
|
||||
const client = this._getClient()
|
||||
let content
|
||||
|
||||
@@ -129,7 +118,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
return content
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
async _rename(oldPath, newPath) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
@@ -145,7 +134,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
async _list(dir = '.') {
|
||||
const client = this._getClient()
|
||||
let list
|
||||
|
||||
@@ -154,13 +143,13 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
throw normalizeError(error, true)
|
||||
}
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
async _createReadStream (file, options = {}) {
|
||||
async _createReadStream(file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
@@ -178,7 +167,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options = {}) {
|
||||
async _createOutputStream(file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
@@ -199,7 +188,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
return stream
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
async _unlink(file) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
@@ -211,7 +200,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
async _getSize(file) {
|
||||
const client = await this._getClient()
|
||||
let size
|
||||
|
||||
@@ -229,7 +218,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
async _openFile (path) {
|
||||
async _openFile(path) {
|
||||
const client = this._getClient()
|
||||
return {
|
||||
client,
|
||||
@@ -237,7 +226,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _closeFile ({ client, file }) {
|
||||
async _closeFile({ client, file }) {
|
||||
await client.close(file)
|
||||
client.disconnect()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@xen-orchestra/log",
|
||||
"version": "0.0.0",
|
||||
"version": "0.1.4",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -19,7 +18,9 @@
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
"configure.js",
|
||||
"dist/",
|
||||
"transports/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
@@ -29,7 +30,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1"
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -86,7 +86,7 @@ export const catchGlobalErrors = logger => {
|
||||
const { prototype } = EventEmitter
|
||||
const { emit } = prototype
|
||||
function patchedEmit (event, error) {
|
||||
if (event === 'error' && !this.listenerCount(event)) {
|
||||
if (event === 'error' && this.listenerCount(event) === 0) {
|
||||
logger.error('unhandled error event', { error })
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -38,6 +38,17 @@ for (const name in LEVELS) {
|
||||
const level = LEVELS[name]
|
||||
|
||||
prototype[name.toLowerCase()] = function (message, data) {
|
||||
if (typeof message !== 'string') {
|
||||
if (message instanceof Error) {
|
||||
data = { error: message }
|
||||
;({ message = 'an error has occured' } = message)
|
||||
} else {
|
||||
return this.warn('incorrect value passed to logger', {
|
||||
level,
|
||||
value: message,
|
||||
})
|
||||
}
|
||||
}
|
||||
global[symbol](new Log(data, level, this._namespace, message, new Date()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,10 +13,10 @@ const consoleTransport = ({ data, level, namespace, message, time }) => {
|
||||
level < INFO
|
||||
? debugConsole
|
||||
: level < WARN
|
||||
? infoConsole
|
||||
: level < ERROR
|
||||
? warnConsole
|
||||
: errorConsole
|
||||
? infoConsole
|
||||
: level < ERROR
|
||||
? warnConsole
|
||||
: errorConsole
|
||||
|
||||
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
|
||||
data != null && fn(data)
|
||||
|
||||
@@ -53,14 +53,12 @@ export default ({
|
||||
fromCallback(cb =>
|
||||
transporter.sendMail(
|
||||
{
|
||||
subject: evalTemplate(
|
||||
subject,
|
||||
key =>
|
||||
key === 'level'
|
||||
? NAMES[log.level]
|
||||
: key === 'time'
|
||||
? log.time.toISOString()
|
||||
: log[key]
|
||||
subject: evalTemplate(subject, key =>
|
||||
key === 'level'
|
||||
? NAMES[log.level]
|
||||
: key === 'time'
|
||||
? log.time.toISOString()
|
||||
: log[key]
|
||||
),
|
||||
text: prettyFormat(log.data),
|
||||
},
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
dist/transports
|
||||
1
@xen-orchestra/log/transports/console.js
Normal file
1
@xen-orchestra/log/transports/console.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/console.js')
|
||||
1
@xen-orchestra/log/transports/email.js
Normal file
1
@xen-orchestra/log/transports/email.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/email.js')
|
||||
1
@xen-orchestra/log/transports/memory.js
Normal file
1
@xen-orchestra/log/transports/memory.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/memory.js')
|
||||
1
@xen-orchestra/log/transports/syslog.js
Normal file
1
@xen-orchestra/log/transports/syslog.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/syslog.js')
|
||||
160
CHANGELOG.md
160
CHANGELOG.md
@@ -4,6 +4,162 @@
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Perf alert] Ability to trigger an alarm if a host/VM/SR usage value is below the threshold [#3612](https://github.com/vatesfr/xen-orchestra/issues/3612) (PR [#3675](https://github.com/vatesfr/xen-orchestra/pull/3675))
|
||||
- [Home/VMs] Display pool's name [#2226](https://github.com/vatesfr/xen-orchestra/issues/2226) (PR [#3709](https://github.com/vatesfr/xen-orchestra/pull/3709))
|
||||
- [Servers] Prevent new connection if pool is already connected [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3724](https://github.com/vatesfr/xen-orchestra/pull/3724))
|
||||
- [VM] Pause (like Suspend but doesn't copy RAM on disk) [#3727](https://github.com/vatesfr/xen-orchestra/issues/3727) (PR [#3731](https://github.com/vatesfr/xen-orchestra/pull/3731))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Servers] Fix deleting server on joining a pool [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3728](https://github.com/vatesfr/xen-orchestra/pull/3728))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.22.0
|
||||
- xo-server-perf-alert v0.2.0
|
||||
- xo-server-usage-report v0.7.1
|
||||
- xo-server v5.31.0
|
||||
- xo-web v5.31.0
|
||||
|
||||
## **5.28.2** (2018-11-16)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM] Ability to set nested virtualization in settings [#3619](https://github.com/vatesfr/xen-orchestra/issues/3619) (PR [#3625](https://github.com/vatesfr/xen-orchestra/pull/3625))
|
||||
- [Legacy Backup] Restore and File restore functionalities moved to the Backup NG view [#3499](https://github.com/vatesfr/xen-orchestra/issues/3499) (PR [#3610](https://github.com/vatesfr/xen-orchestra/pull/3610))
|
||||
- [Backup NG logs] Display warning in case of missing VMs instead of a ghosts VMs tasks (PR [#3647](https://github.com/vatesfr/xen-orchestra/pull/3647))
|
||||
- [VM] On migration, automatically selects the host and SR when only one is available [#3502](https://github.com/vatesfr/xen-orchestra/issues/3502) (PR [#3654](https://github.com/vatesfr/xen-orchestra/pull/3654))
|
||||
- [VM] Display VGA and video RAM for PVHVM guests [#3576](https://github.com/vatesfr/xen-orchestra/issues/3576) (PR [#3664](https://github.com/vatesfr/xen-orchestra/pull/3664))
|
||||
- [Backup NG form] Display a warning to let the user know that the Delta Backup and the Continuous Replication are not supported on XenServer < 6.5 [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [Backup NG form] Omit VMs(Simple Backup)/pools(Smart Backup/Resident on) with XenServer < 6.5 from the selection when the Delta Backup mode or the Continuous Replication mode are selected [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [VM] Allow to switch the Virtualization mode [#2372](https://github.com/vatesfr/xen-orchestra/issues/2372) (PR [#3669](https://github.com/vatesfr/xen-orchestra/pull/3669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup ng logs] Fix restarting VMs with concurrency issue [#3603](https://github.com/vatesfr/xen-orchestra/issues/3603) (PR [#3634](https://github.com/vatesfr/xen-orchestra/pull/3634))
|
||||
- Validate modal containing a confirm text input by pressing the Enter key [#2735](https://github.com/vatesfr/xen-orchestra/issues/2735) (PR [#2890](https://github.com/vatesfr/xen-orchestra/pull/2890))
|
||||
- [Patches] Bulk install correctly ignores upgrade patches on licensed hosts (PR [#3651](https://github.com/vatesfr/xen-orchestra/pull/3651))
|
||||
- [Backup NG logs] Handle failed restores (PR [#3648](https://github.com/vatesfr/xen-orchestra/pull/3648))
|
||||
- [Self/New VM] Incorrect limit computation [#3658](https://github.com/vatesfr/xen-orchestra/issues/3658) (PR [#3666](https://github.com/vatesfr/xen-orchestra/pull/3666))
|
||||
- [Plugins] Don't expose credentials in config to users (PR [#3671](https://github.com/vatesfr/xen-orchestra/pull/3671))
|
||||
- [Self/New VM] `not enough … available in the set …` error in some cases (PR [#3667](https://github.com/vatesfr/xen-orchestra/pull/3667))
|
||||
- [XOSAN] Creation stuck at "Configuring VMs" [#3688](https://github.com/vatesfr/xen-orchestra/issues/3688) (PR [#3689](https://github.com/vatesfr/xen-orchestra/pull/3689))
|
||||
- [Backup NG] Errors listing backups on SMB remotes with extraneous files (PR [#3685](https://github.com/vatesfr/xen-orchestra/pull/3685))
|
||||
- [Remotes] Don't expose credentials to users [#3682](https://github.com/vatesfr/xen-orchestra/issues/3682) (PR [#3687](https://github.com/vatesfr/xen-orchestra/pull/3687))
|
||||
- [VM] Correctly display guest metrics updates (tools, network, etc.) [#3533](https://github.com/vatesfr/xen-orchestra/issues/3533) (PR [#3694](https://github.com/vatesfr/xen-orchestra/pull/3694))
|
||||
- [VM Templates] Fix deletion [#3498](https://github.com/vatesfr/xen-orchestra/issues/3498) (PR [#3695](https://github.com/vatesfr/xen-orchestra/pull/3695))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.21.0
|
||||
- xo-common v0.2.0
|
||||
- xo-acl-resolver v0.4.0
|
||||
- xo-server v5.30.1
|
||||
- xo-web v5.30.0
|
||||
|
||||
## **5.28.1** (2018-11-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Increase timeout in stale remotes detection to limit false positives (PR [#3632](https://github.com/vatesfr/xen-orchestra/pull/3632))
|
||||
- Fix re-registration issue ([4e35b19ac](https://github.com/vatesfr/xen-orchestra/commit/4e35b19ac56c60f61c0e771cde70a50402797b8a))
|
||||
- [Backup NG logs] Fix started jobs filter [#3636](https://github.com/vatesfr/xen-orchestra/issues/3636) (PR [#3641](https://github.com/vatesfr/xen-orchestra/pull/3641))
|
||||
- [New VM] CPU and memory user inputs were ignored since previous release [#3644](https://github.com/vatesfr/xen-orchestra/issues/3644) (PR [#3646](https://github.com/vatesfr/xen-orchestra/pull/3646))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.4.1
|
||||
- xo-server v5.29.4
|
||||
- xo-web v5.29.3
|
||||
|
||||
## **5.28.0** (2018-10-31)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Usage Report] Add IOPS read/write/total per VM [#3309](https://github.com/vatesfr/xen-orchestra/issues/3309) (PR [#3455](https://github.com/vatesfr/xen-orchestra/pull/3455))
|
||||
- [Self service] Sort resource sets by name (PR [#3507](https://github.com/vatesfr/xen-orchestra/pull/3507))
|
||||
- [Usage Report] Add top 3 SRs which use the most IOPS read/write/total [#3306](https://github.com/vatesfr/xen-orchestra/issues/3306) (PR [#3508](https://github.com/vatesfr/xen-orchestra/pull/3508))
|
||||
- [New VM] Display a warning when the memory is below the template memory static min [#3496](https://github.com/vatesfr/xen-orchestra/issues/3496) (PR [#3513](https://github.com/vatesfr/xen-orchestra/pull/3513))
|
||||
- [Backup NG form] Add link to plugins setting [#3457](https://github.com/vatesfr/xen-orchestra/issues/3457) (PR [#3514](https://github.com/vatesfr/xen-orchestra/pull/3514))
|
||||
- [Backup reports] Add job and run ID [#3488](https://github.com/vatesfr/xen-orchestra/issues/3488) (PR [#3516](https://github.com/vatesfr/xen-orchestra/pull/3516))
|
||||
- [Usage Report] Add top 3 VMs which use the most IOPS read/write/total [#3308](https://github.com/vatesfr/xen-orchestra/issues/3308) (PR [#3463](https://github.com/vatesfr/xen-orchestra/pull/3463))
|
||||
- [Settings/logs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3528](https://github.com/vatesfr/xen-orchestra/pull/3528))
|
||||
- [Settings/acls] Add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3536](https://github.com/vatesfr/xen-orchestra/pull/3536))
|
||||
- [Home] Improve search usage: raw numbers also match in names [#2906](https://github.com/vatesfr/xen-orchestra/issues/2906) (PR [#3552](https://github.com/vatesfr/xen-orchestra/pull/3552))
|
||||
- [Backup NG] Timeout of a job is now in hours [#3550](https://github.com/vatesfr/xen-orchestra/issues/3550) (PR [#3553](https://github.com/vatesfr/xen-orchestra/pull/3553))
|
||||
- [Backup NG] Explicit error if a VM is missing [#3434](https://github.com/vatesfr/xen-orchestra/issues/3434) (PR [#3522](https://github.com/vatesfr/xen-orchestra/pull/3522))
|
||||
- [Backup NG] Show all advanced settings with non-default values in overview [#3549](https://github.com/vatesfr/xen-orchestra/issues/3549) (PR [#3554](https://github.com/vatesfr/xen-orchestra/pull/3554))
|
||||
- [Backup NG] Collapse advanced settings by default [#3551](https://github.com/vatesfr/xen-orchestra/issues/3551) (PR [#3559](https://github.com/vatesfr/xen-orchestra/pull/3559))
|
||||
- [Scheduling] Merge selection and interval tabs [#1902](https://github.com/vatesfr/xen-orchestra/issues/1902) (PR [#3519](https://github.com/vatesfr/xen-orchestra/pull/3519))
|
||||
- [Backup NG/Restore] The backup selector now also shows the job name [#3366](https://github.com/vatesfr/xen-orchestra/issues/3366) (PR [#3564](https://github.com/vatesfr/xen-orchestra/pull/3564))
|
||||
- Sort buttons by criticality in tables [#3168](https://github.com/vatesfr/xen-orchestra/issues/3168) (PR [#3545](https://github.com/vatesfr/xen-orchestra/pull/3545))
|
||||
- [Usage Report] Ability to send a daily report [#3544](https://github.com/vatesfr/xen-orchestra/issues/3544) (PR [#3582](https://github.com/vatesfr/xen-orchestra/pull/3582))
|
||||
- [Backup NG logs] Disable state filters with no entries [#3438](https://github.com/vatesfr/xen-orchestra/issues/3438) (PR [#3442](https://github.com/vatesfr/xen-orchestra/pull/3442))
|
||||
- [ACLs] Global performance improvement on UI for non-admin users [#3578](https://github.com/vatesfr/xen-orchestra/issues/3578) (PR [#3584](https://github.com/vatesfr/xen-orchestra/pull/3584))
|
||||
- [Backup NG] Improve the Schedule's view (Replace table by list) [#3491](https://github.com/vatesfr/xen-orchestra/issues/3491) (PR [#3586](https://github.com/vatesfr/xen-orchestra/pull/3586))
|
||||
- ([Host/Storage], [Sr/hosts]) add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3539](https://github.com/vatesfr/xen-orchestra/pull/3539))
|
||||
- [xo-server] Use @xen-orchestra/log for basic logging [#3555](https://github.com/vatesfr/xen-orchestra/issues/3555) (PR [#3579](https://github.com/vatesfr/xen-orchestra/pull/3579))
|
||||
- [Backup Report] Log error when job failed [#3458](https://github.com/vatesfr/xen-orchestra/issues/3458) (PR [#3593](https://github.com/vatesfr/xen-orchestra/pull/3593))
|
||||
- [Backup NG] Display logs for backup restoration [#2511](https://github.com/vatesfr/xen-orchestra/issues/2511) (PR [#3609](https://github.com/vatesfr/xen-orchestra/pull/3609))
|
||||
- [XOA] Display product version and list of all installed packages [#3560](https://github.com/vatesfr/xen-orchestra/issues/3560) (PR [#3621](https://github.com/vatesfr/xen-orchestra/pull/3621))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remotes] Fix removal of broken remotes [#3327](https://github.com/vatesfr/xen-orchestra/issues/3327) (PR [#3521](https://github.com/vatesfr/xen-orchestra/pull/3521))
|
||||
- [Backups] Fix stuck backups due to broken NFS remotes [#3467](https://github.com/vatesfr/xen-orchestra/issues/3467) (PR [#3534](https://github.com/vatesfr/xen-orchestra/pull/3534))
|
||||
- [New VM] Fix missing cloud config when creating multiple VMs at once in some cases [#3532](https://github.com/vatesfr/xen-orchestra/issues/3532) (PR [#3535](https://github.com/vatesfr/xen-orchestra/pull/3535))
|
||||
- [VM] Fix an error when an admin tried to add a disk on a Self VM whose resource set had been deleted [#2814](https://github.com/vatesfr/xen-orchestra/issues/2814) (PR [#3530](https://github.com/vatesfr/xen-orchestra/pull/3530))
|
||||
- [Self/Create VM] Fix some quotas based on the template instead of the user inputs [#2683](https://github.com/vatesfr/xen-orchestra/issues/2683) (PR [#3546](https://github.com/vatesfr/xen-orchestra/pull/3546))
|
||||
- [Self] Ignore DR and CR VMs when computing quotas [#3064](https://github.com/vatesfr/xen-orchestra/issues/3064) (PR [#3561](https://github.com/vatesfr/xen-orchestra/pull/3561))
|
||||
- [Patches] Wrongly requiring to eject CDs from halted VMs and snapshots before installing patches (PR [#3611](https://github.com/vatesfr/xen-orchestra/pull/3611))
|
||||
- [Jobs] Ensure the scheduling is not interrupted in rare cases (PR [#3617](https://github.com/vatesfr/xen-orchestra/pull/3617))
|
||||
- [Home] Fix `server.getAll` error at login when user is not admin [#2335](https://github.com/vatesfr/xen-orchestra/issues/2335) (PR [#3613](https://github.com/vatesfr/xen-orchestra/pull/3613))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server-backup-reports v0.15.0
|
||||
- xo-common v0.1.2
|
||||
- @xen-orchestra/log v0.1.0
|
||||
- @xen-orchestra/fs v0.4.0
|
||||
- complex-matcher v0.5.0
|
||||
- vhd-lib v0.4.0
|
||||
- xen-api v0.20.0
|
||||
- xo-server-usage-report v0.7.0
|
||||
- xo-server v5.29.0
|
||||
- xo-web v5.29.0
|
||||
|
||||
## **5.27.2** (2018-10-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Host/Networks] Remove "Add network" button [#3386](https://github.com/vatesfr/xen-orchestra/issues/3386) (PR [#3478](https://github.com/vatesfr/xen-orchestra/pull/3478))
|
||||
- [Host/networks] Private networks table [#3387](https://github.com/vatesfr/xen-orchestra/issues/3387) (PR [#3481](https://github.com/vatesfr/xen-orchestra/pull/3481))
|
||||
- [Home/pool] Patch count pill now shows the number of unique patches in the pool [#3321](https://github.com/vatesfr/xen-orchestra/issues/3321) (PR [#3483](https://github.com/vatesfr/xen-orchestra/pull/3483))
|
||||
- [Patches] Pre-install checks to avoid errors [#3252](https://github.com/vatesfr/xen-orchestra/issues/3252) (PR [#3484](https://github.com/vatesfr/xen-orchestra/pull/3484))
|
||||
- [Vm/Snapshots] Allow VM operators to create snapshots and delete those they created [#3443](https://github.com/vatesfr/xen-orchestra/issues/3443) (PR [#3482](https://github.com/vatesfr/xen-orchestra/pull/3482))
|
||||
- [VM/clone] Handle ACLs and Self Service [#3139](https://github.com/vatesfr/xen-orchestra/issues/3139) (PR [#3493](https://github.com/vatesfr/xen-orchestra/pull/3493))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Fix `Cannot read property 'uuid' of undefined` when a disk is removed from a VM to backup (PR [#3479](https://github.com/vatesfr/xen-orchestra/pull/3479))
|
||||
- [Backup NG] Fix unexpected full after failure, interruption or basic rolling snapshot (PR [#3485](https://github.com/vatesfr/xen-orchestra/pull/3485))
|
||||
- [Usage report] Display top 3 used SRs instead of top 3 biggest SRs [#3307](https://github.com/vatesfr/xen-orchestra/issues/3307) (PR [#3475](https://github.com/vatesfr/xen-orchestra/pull/3475))
|
||||
|
||||
### Released packages
|
||||
|
||||
- vhd-lib v0.3.2
|
||||
- xo-vmdk-to-vhd v0.1.5
|
||||
- xo-server-usage-report v0.6.0
|
||||
- xo-acl-resolver v0.3.0
|
||||
- xo-server v5.28.0
|
||||
- xo-web v5.28.0
|
||||
|
||||
## **5.27.1** (2018-09-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [OVA Import] Allow import of files bigger than 127GB (PR [#3451](https://github.com/vatesfr/xen-orchestra/pull/3451))
|
||||
@@ -18,8 +174,8 @@
|
||||
- @xen-orchestra/fs v0.3.1
|
||||
- vhd-lib v0.3.1
|
||||
- xo-vmdk-to-vhd v0.1.4
|
||||
- xo-server v5.28.0
|
||||
- xo-web v5.28.0
|
||||
- xo-server v5.27.2
|
||||
- xo-web v5.27.1
|
||||
|
||||
## **5.27.0** (2018-09-24)
|
||||
|
||||
|
||||
@@ -39,6 +39,10 @@ You can check if a coalesce job is currently active by running `ps axf | grep vh
|
||||
|
||||
If you don't see any running coalesce jobs, and can't find any other reason that XenServer has not started one, you can attempt to make it start a coalesce job by rescanning the SR. This is harmless to try, but will not always result in a coalesce. Visit the problematic SR in the XOA UI, then click the "Rescan All Disks" button towards the top right: it looks like a refresh circle icon. This should begin the coalesce process - if you click the Advanced tab in the SR view, the "disks needing to be coalesced" list should become smaller and smaller.
|
||||
|
||||
### Parse Error
|
||||
|
||||
This is most likely due to running a backup job that uses Delta functionality (eg: delta backups, or continuous replication) on a version of XenServer older than 6.5. To use delta functionality you must run [XenServer 6.5 or later](https://xen-orchestra.com/docs/supported-version.html).
|
||||
|
||||
### SR_BACKEND_FAILURE_44 (insufficient space)
|
||||
|
||||
> This message can be triggered by any backup method.
|
||||
@@ -72,4 +76,4 @@ To check your free space, enter your XOA and run `xoa check` to check free syste
|
||||
|
||||
This is happening when you have a *smart backup job* that doesn't match any VMs. For example: you created a job to backup all running VMs. If no VMs are running on backup schedule, you'll have this message. This could also happen if you lost connection with your pool master (the VMs aren't visible anymore from Xen Orchestra).
|
||||
|
||||
Edit your job and try to see matching VMs or check if your pool is connected to XOA.
|
||||
Edit your job and try to see matching VMs or check if your pool is connected to XOA.
|
||||
|
||||
@@ -14,13 +14,13 @@ As you may have seen,in other parts of the documentation, XO is composed of two
|
||||
|
||||
### NodeJS
|
||||
|
||||
XO needs Node.js. **Please always use the LTS version of Node**.
|
||||
XO needs Node.js. **Please use Node 8**.
|
||||
|
||||
We'll consider at this point that you've got a working node on your box. E.g:
|
||||
|
||||
```
|
||||
$ node -v
|
||||
v8.9.1
|
||||
v8.12.0
|
||||
```
|
||||
|
||||
If not, see [this page](https://nodejs.org/en/download/package-manager/) for instructions on how to install Node.
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# xo-cli
|
||||
|
||||
This is another client of `xo-server` - this time in command line form.
|
||||
@@ -104,5 +103,6 @@ encoding by prefixing with `json:`:
|
||||
##### VM import
|
||||
|
||||
```
|
||||
> xo-cli vm.import host=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
> xo-cli vm.import sr=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
```
|
||||
> Note: `xo-cli` only supports the import of XVA files. It will not import OVA files. To import OVA images, you must use the XOA web UI.
|
||||
|
||||
13
package.json
13
package.json
@@ -3,25 +3,26 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/register": "^7.0.0",
|
||||
"babel-core": "^7.0.0-0",
|
||||
"babel-eslint": "^9.0.0",
|
||||
"babel-eslint": "^10.0.1",
|
||||
"babel-jest": "^23.0.1",
|
||||
"benchmark": "^2.1.4",
|
||||
"eslint": "^5.1.0",
|
||||
"eslint-config-prettier": "^3.3.0",
|
||||
"eslint-config-standard": "12.0.0",
|
||||
"eslint-config-standard-jsx": "^6.0.2",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
"eslint-plugin-node": "^7.0.1",
|
||||
"eslint-plugin-node": "^8.0.0",
|
||||
"eslint-plugin-promise": "^4.0.0",
|
||||
"eslint-plugin-react": "^7.6.1",
|
||||
"eslint-plugin-standard": "^4.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"flow-bin": "^0.81.0",
|
||||
"flow-bin": "^0.86.0",
|
||||
"globby": "^8.0.0",
|
||||
"husky": "^1.0.0-rc.15",
|
||||
"jest": "^23.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"prettier": "^1.10.2",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"sorted-object": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -33,6 +34,7 @@
|
||||
}
|
||||
},
|
||||
"jest": {
|
||||
"timers": "fake",
|
||||
"collectCoverage": true,
|
||||
"projects": [
|
||||
"<rootDir>"
|
||||
@@ -57,7 +59,8 @@
|
||||
"prepare": "scripts/run-script prepare",
|
||||
"pretest": "eslint --ignore-path .gitignore .",
|
||||
"test": "jest \"^(?!.*\\.integ\\.spec\\.js$)\"",
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\""
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\"",
|
||||
"travis-tests": "scripts/travis-tests"
|
||||
},
|
||||
"workspaces": [
|
||||
"@xen-orchestra/*",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "complex-matcher",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
|
||||
@@ -11,7 +11,7 @@ export const ast = new CM.And([
|
||||
new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])
|
||||
),
|
||||
new CM.TruthyProperty('hasCape'),
|
||||
new CM.Property('age', new CM.Number(32)),
|
||||
new CM.Property('age', new CM.NumberOrStringNode('32')),
|
||||
new CM.GlobPattern('chi*go'),
|
||||
new CM.RegExp('^foo/bar\\.', 'i'),
|
||||
])
|
||||
|
||||
@@ -153,6 +153,34 @@ export class NumberNode extends Node {
|
||||
}
|
||||
export { NumberNode as Number }
|
||||
|
||||
export class NumberOrStringNode extends Node {
|
||||
constructor (value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this, value.toLowerCase(), +value),
|
||||
})
|
||||
}
|
||||
|
||||
match (lcValue, numValue, value) {
|
||||
return (
|
||||
value === numValue ||
|
||||
(typeof value === 'string'
|
||||
? value.toLowerCase().indexOf(lcValue) !== -1
|
||||
: (Array.isArray(value) || isPlainObject(value)) &&
|
||||
some(value, this.match))
|
||||
)
|
||||
}
|
||||
|
||||
toString () {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
export { NumberOrStringNode as NumberOrString }
|
||||
|
||||
export class Property extends Node {
|
||||
constructor (name, child) {
|
||||
super()
|
||||
@@ -382,11 +410,10 @@ class P {
|
||||
|
||||
static text (text) {
|
||||
const { length } = text
|
||||
return new P(
|
||||
(input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
return new P((input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -450,17 +477,16 @@ class P {
|
||||
}
|
||||
}
|
||||
|
||||
P.eof = new P(
|
||||
(input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
P.eof = new P((input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const parser = P.grammar({
|
||||
default: r =>
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(
|
||||
([, terms]) => (terms.length === 0 ? new Null() : new And(terms))
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(([, terms]) =>
|
||||
terms.length === 0 ? new Null() : new And(terms)
|
||||
),
|
||||
globPattern: new P((input, pos, end) => {
|
||||
let value = ''
|
||||
@@ -564,7 +590,7 @@ const parser = P.grammar({
|
||||
const asNum = +str
|
||||
return Number.isNaN(asNum)
|
||||
? new GlobPattern(str)
|
||||
: new NumberNode(asNum)
|
||||
: new NumberOrStringNode(str)
|
||||
})
|
||||
),
|
||||
ws: P.regex(/\s*/),
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
GlobPattern,
|
||||
Null,
|
||||
NumberNode,
|
||||
NumberOrStringNode,
|
||||
parse,
|
||||
setPropertyClause,
|
||||
} from './'
|
||||
@@ -32,7 +33,7 @@ describe('parse', () => {
|
||||
|
||||
node = parse('32')
|
||||
expect(node.match(32)).toBe(true)
|
||||
expect(node.match('32')).toBe(false)
|
||||
expect(node.match('32')).toBe(true)
|
||||
expect(node.toString()).toBe('32')
|
||||
|
||||
node = parse('"32"')
|
||||
@@ -54,6 +55,12 @@ describe('Number', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('NumberOrStringNode', () => {
|
||||
it('match a string', () => {
|
||||
expect(new NumberOrStringNode('123').match([{ foo: '123' }])).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setPropertyClause', () => {
|
||||
it('creates a node if none passed', () => {
|
||||
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
|
||||
|
||||
@@ -26,11 +26,11 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/fs": "^0.3.0",
|
||||
"@xen-orchestra/fs": "^0.4.1",
|
||||
"cli-progress": "^2.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"vhd-lib": "^0.3.0"
|
||||
"vhd-lib": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
@@ -40,7 +40,7 @@
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"rimraf": "^2.6.1",
|
||||
"tmp": "^0.0.33"
|
||||
},
|
||||
|
||||
@@ -9,13 +9,12 @@ export default async function main (args) {
|
||||
}
|
||||
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
const stream = await createSyntheticStream(handler, path.resolve(args[0]))
|
||||
return new Promise((resolve, reject) => {
|
||||
createSyntheticStream(handler, path.resolve(args[0]))
|
||||
.on('error', reject)
|
||||
.pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
stream.on('error', reject).pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -9,11 +9,9 @@ import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { randomBytes } from 'crypto'
|
||||
|
||||
import chainVhd from './chain'
|
||||
import createReadStream from './createSyntheticStream'
|
||||
import Vhd from './vhd'
|
||||
import vhdMerge from './merge'
|
||||
import { SECTOR_SIZE } from './_constants'
|
||||
import Vhd, { chainVhd, createSyntheticStream, mergeVhd as vhdMerge } from './'
|
||||
|
||||
import { SECTOR_SIZE } from './src/_constants'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -270,14 +268,18 @@ test('coalesce works in normal cases', async () => {
|
||||
|
||||
test('createSyntheticStream passes vhd-util check', async () => {
|
||||
const initalSize = 4
|
||||
const expectedVhdSize = 4197888
|
||||
await createRandomFile('randomfile', initalSize)
|
||||
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
|
||||
const handler = getHandler({ url: 'file://' + process.cwd() })
|
||||
const stream = createReadStream(handler, 'randomfile.vhd')
|
||||
const stream = await createSyntheticStream(handler, 'randomfile.vhd')
|
||||
expect(stream.length).toEqual(expectedVhdSize)
|
||||
await fromEvent(
|
||||
stream.pipe(await fs.createWriteStream('recovered.vhd')),
|
||||
'finish'
|
||||
)
|
||||
await checkFile('recovered.vhd')
|
||||
const stats = await fs.stat('recovered.vhd')
|
||||
expect(stats.size).toEqual(expectedVhdSize)
|
||||
await execa('qemu-img', ['compare', 'recovered.vhd', 'randomfile'])
|
||||
})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vhd-lib",
|
||||
"version": "0.3.1",
|
||||
"version": "0.4.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Primitives for VHD file handling",
|
||||
"keywords": [],
|
||||
@@ -21,10 +21,11 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"core-js": "3.0.0-beta.3",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
@@ -33,7 +34,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"@xen-orchestra/fs": "^0.3.1",
|
||||
"@xen-orchestra/fs": "^0.4.1",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -17,7 +17,7 @@ import { set as setBitmap } from './_bitmap'
|
||||
const VHD_BLOCK_SIZE_SECTORS = VHD_BLOCK_SIZE_BYTES / SECTOR_SIZE
|
||||
|
||||
/**
|
||||
* @returns {Array} an array of occupation bitmap, each bit mapping an input block size of bytes
|
||||
* @returns currentVhdPositionSector the first free sector after the data
|
||||
*/
|
||||
function createBAT (
|
||||
firstBlockPosition,
|
||||
@@ -36,9 +36,10 @@ function createBAT (
|
||||
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
|
||||
}
|
||||
})
|
||||
return currentVhdPositionSector
|
||||
}
|
||||
|
||||
export default asyncIteratorToStream(async function * (
|
||||
export default async function createReadableStream (
|
||||
diskSize,
|
||||
incomingBlockSize,
|
||||
blockAddressList,
|
||||
@@ -79,7 +80,14 @@ export default asyncIteratorToStream(async function * (
|
||||
const bitmapSize =
|
||||
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
|
||||
createBAT(firstBlockPosition, blockAddressList, ratio, bat, bitmapSize)
|
||||
const endOfData = createBAT(
|
||||
firstBlockPosition,
|
||||
blockAddressList,
|
||||
ratio,
|
||||
bat,
|
||||
bitmapSize
|
||||
)
|
||||
const fileSize = endOfData * SECTOR_SIZE + FOOTER_SIZE
|
||||
let position = 0
|
||||
function * yieldAndTrack (buffer, expectedPosition) {
|
||||
if (expectedPosition !== undefined) {
|
||||
@@ -120,9 +128,16 @@ export default asyncIteratorToStream(async function * (
|
||||
}
|
||||
yield * yieldAndTrack(currentBlockWithBitmap)
|
||||
}
|
||||
yield * yieldAndTrack(footer, 0)
|
||||
yield * yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield * yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield * generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield * yieldAndTrack(footer)
|
||||
})
|
||||
|
||||
async function * iterator () {
|
||||
yield * yieldAndTrack(footer, 0)
|
||||
yield * yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield * yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield * generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield * yieldAndTrack(footer)
|
||||
}
|
||||
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
}
|
||||
|
||||
@@ -15,18 +15,24 @@ import { test as mapTestBit } from './_bitmap'
|
||||
const resolveRelativeFromFile = (file, path) =>
|
||||
resolve('/', dirname(file), path).slice(1)
|
||||
|
||||
export default asyncIteratorToStream(function * (handler, path) {
|
||||
export default async function createSyntheticStream (handler, path) {
|
||||
const fds = []
|
||||
|
||||
const cleanup = () => {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
}
|
||||
try {
|
||||
const vhds = []
|
||||
while (true) {
|
||||
const fd = yield handler.openFile(path, 'r')
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
fds.push(fd)
|
||||
const vhd = new Vhd(handler, fd)
|
||||
vhds.push(vhd)
|
||||
yield vhd.readHeaderAndFooter()
|
||||
yield vhd.readBlockAllocationTable()
|
||||
await vhd.readHeaderAndFooter()
|
||||
await vhd.readBlockAllocationTable()
|
||||
|
||||
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
|
||||
break
|
||||
@@ -64,14 +70,8 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
const nBlocks = Math.ceil(footer.currentSize / header.blockSize)
|
||||
|
||||
const blocksOwner = new Array(nBlocks)
|
||||
for (
|
||||
let iBlock = 0,
|
||||
blockOffset = Math.ceil(
|
||||
(header.tableOffset + bat.length) / SECTOR_SIZE
|
||||
);
|
||||
iBlock < nBlocks;
|
||||
++iBlock
|
||||
) {
|
||||
let blockOffset = Math.ceil((header.tableOffset + bat.length) / SECTOR_SIZE)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
let blockSector = BLOCK_UNUSED
|
||||
for (let i = 0; i < nVhds; ++i) {
|
||||
if (vhds[i].containsBlock(iBlock)) {
|
||||
@@ -83,71 +83,78 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
}
|
||||
bat.writeUInt32BE(blockSector, iBlock * 4)
|
||||
}
|
||||
const fileSize = blockOffset * SECTOR_SIZE + FOOTER_SIZE
|
||||
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
const iterator = function * () {
|
||||
try {
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
|
||||
yield bat
|
||||
yield bat
|
||||
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function * (iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, i, n)
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, start, i)
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function * (iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, i, n)
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, start, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
yield * emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
yield footer
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
yield * emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
|
||||
yield footer
|
||||
} finally {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
} catch (e) {
|
||||
cleanup()
|
||||
throw e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// see https://github.com/babel/babel/issues/8450
|
||||
import 'core-js/features/symbol/async-iterator'
|
||||
|
||||
export { default } from './vhd'
|
||||
export { default as chainVhd } from './chain'
|
||||
export { default as createContentStream } from './createContentStream'
|
||||
|
||||
@@ -228,16 +228,15 @@ export default class Vhd {
|
||||
return this._read(
|
||||
sectorsToBytes(blockAddr),
|
||||
onlyBitmap ? this.bitmapSize : this.fullBlockSize
|
||||
).then(
|
||||
buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
).then(buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,9 +5,9 @@ import tmp from 'tmp'
|
||||
import { createWriteStream, readFile } from 'fs-promise'
|
||||
import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import { createFooter } from './_createFooterHeader'
|
||||
import createReadableRawVHDStream from './createReadableRawStream'
|
||||
import createReadableSparseVHDStream from './createReadableSparseStream'
|
||||
import { createReadableRawStream, createReadableSparseStream } from './'
|
||||
|
||||
import { createFooter } from './src/_createFooterHeader'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -54,7 +54,7 @@ test('ReadableRawVHDStream does not crash', async () => {
|
||||
},
|
||||
}
|
||||
const fileSize = 1000
|
||||
const stream = createReadableRawVHDStream(fileSize, mockParser)
|
||||
const stream = createReadableRawStream(fileSize, mockParser)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
@@ -85,7 +85,7 @@ test('ReadableRawVHDStream detects when blocks are out of order', async () => {
|
||||
}
|
||||
return expect(
|
||||
new Promise((resolve, reject) => {
|
||||
const stream = createReadableRawVHDStream(100000, mockParser)
|
||||
const stream = createReadableRawStream(100000, mockParser)
|
||||
stream.on('error', reject)
|
||||
const pipe = stream.pipe(createWriteStream('outputStream'))
|
||||
pipe.on('finish', resolve)
|
||||
@@ -107,12 +107,13 @@ test('ReadableSparseVHDStream can handle a sparse file', async () => {
|
||||
},
|
||||
]
|
||||
const fileSize = blockSize * 110
|
||||
const stream = createReadableSparseVHDStream(
|
||||
const stream = await createReadableSparseStream(
|
||||
fileSize,
|
||||
blockSize,
|
||||
blocks.map(b => b.offsetBytes),
|
||||
blocks
|
||||
)
|
||||
expect(stream.length).toEqual(4197888)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
3
packages/xapi-explore-sr/.babelrc.js
Normal file
3
packages/xapi-explore-sr/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
packages/xapi-explore-sr/.npmignore
Normal file
24
packages/xapi-explore-sr/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
52
packages/xapi-explore-sr/README.md
Normal file
52
packages/xapi-explore-sr/README.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# xapi-explore-sr [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> Display the list of VDIs (unmanaged and snapshots included) of a SR
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/xapi-explore-sr):
|
||||
|
||||
```
|
||||
> npm install --global xapi-explore-sr
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
> xapi-explore-sr
|
||||
Usage: xapi-explore-sr [--full] <SR UUID> <XenServer URL> <XenServer user> [<XenServer password>]
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> npm install
|
||||
|
||||
# Run the tests
|
||||
> npm test
|
||||
|
||||
# Continuously compile
|
||||
> npm run dev
|
||||
|
||||
# Continuously run the tests
|
||||
> npm run dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> npm run build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
60
packages/xapi-explore-sr/package.json
Normal file
60
packages/xapi-explore-sr/package.json
Normal file
@@ -0,0 +1,60 @@
|
||||
{
|
||||
"name": "xapi-explore-sr",
|
||||
"version": "0.2.1",
|
||||
"license": "ISC",
|
||||
"description": "Display the list of VDIs (unmanaged and snapshots included) of a SR",
|
||||
"keywords": [
|
||||
"api",
|
||||
"sr",
|
||||
"vdi",
|
||||
"vdis",
|
||||
"xen",
|
||||
"xen-api",
|
||||
"xenapi"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xapi-explore-sr",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"main": "dist/",
|
||||
"bin": {
|
||||
"xapi-explore-sr": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"dependencies": {
|
||||
"archy": "^1.0.0",
|
||||
"chalk": "^2.3.2",
|
||||
"exec-promise": "^0.7.0",
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"pw": "^0.0.4",
|
||||
"xen-api": "^0.22.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.1.5",
|
||||
"@babel/core": "^7.1.5",
|
||||
"@babel/preset-env": "^7.1.5",
|
||||
"babel-plugin-lodash": "^3.2.11",
|
||||
"cross-env": "^5.1.4",
|
||||
"rimraf": "^2.6.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "rimraf dist/",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
161
packages/xapi-explore-sr/src/index.js
Executable file
161
packages/xapi-explore-sr/src/index.js
Executable file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import archy from 'archy'
|
||||
import chalk from 'chalk'
|
||||
import execPromise from 'exec-promise'
|
||||
import humanFormat from 'human-format'
|
||||
import pw from 'pw'
|
||||
import { createClient } from 'xen-api'
|
||||
import { forEach, map, orderBy } from 'lodash'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const askPassword = prompt =>
|
||||
new Promise(resolve => {
|
||||
prompt && process.stderr.write(`${prompt}: `)
|
||||
pw(resolve)
|
||||
})
|
||||
|
||||
const formatSize = bytes =>
|
||||
humanFormat(bytes, {
|
||||
prefix: 'Gi',
|
||||
scale: 'binary',
|
||||
})
|
||||
|
||||
const required = name => {
|
||||
const e = `missing required argument <${name}>`
|
||||
throw e
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const STYLES = [
|
||||
[
|
||||
vdi => !vdi.managed,
|
||||
chalk.enabled ? chalk.red : label => `[unmanaged] ${label}`,
|
||||
],
|
||||
[
|
||||
vdi => vdi.is_a_snapshot,
|
||||
chalk.enabled ? chalk.yellow : label => `[snapshot] ${label}`,
|
||||
],
|
||||
]
|
||||
const getStyle = vdi => {
|
||||
for (let i = 0, n = STYLES.length; i < n; ++i) {
|
||||
const entry = STYLES[i]
|
||||
if (entry[0](vdi)) {
|
||||
return entry[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mapFilter = (collection, iteratee, results = []) => {
|
||||
forEach(collection, function () {
|
||||
const result = iteratee.apply(this, arguments)
|
||||
if (result !== undefined) {
|
||||
results.push(result)
|
||||
}
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
execPromise(async args => {
|
||||
if (args.length === 0 || args[0] === '-h' || args[0] === '--help') {
|
||||
return `Usage: xapi-explore-sr [--full] <SR UUID> <XenServer URL> <XenServer user> [<XenServer password>]`
|
||||
}
|
||||
|
||||
const full = args[0] === '--full'
|
||||
if (full) {
|
||||
args.shift()
|
||||
}
|
||||
|
||||
const [
|
||||
srUuid = required('SR UUID'),
|
||||
url = required('XenServer URL'),
|
||||
user = required('XenServer user'),
|
||||
password = await askPassword('XenServer password'),
|
||||
] = args
|
||||
|
||||
const xapi = createClient({
|
||||
allowUnauthorized: true,
|
||||
auth: { user, password },
|
||||
readOnly: true,
|
||||
url,
|
||||
watchEvents: false,
|
||||
})
|
||||
await xapi.connect()
|
||||
|
||||
const srRef = await xapi.call('SR.get_by_uuid', srUuid)
|
||||
const sr = await xapi.call('SR.get_record', srRef)
|
||||
|
||||
const vdisByRef = {}
|
||||
await Promise.all(
|
||||
map(sr.VDIs, async ref => {
|
||||
const vdi = await xapi.call('VDI.get_record', ref)
|
||||
vdisByRef[ref] = vdi
|
||||
})
|
||||
)
|
||||
|
||||
const hasParents = {}
|
||||
const vhdChildrenByUuid = {}
|
||||
forEach(vdisByRef, vdi => {
|
||||
const vhdParent = vdi.sm_config['vhd-parent']
|
||||
if (vhdParent) {
|
||||
;(
|
||||
vhdChildrenByUuid[vhdParent] || (vhdChildrenByUuid[vhdParent] = [])
|
||||
).push(vdi)
|
||||
} else if (!(vdi.snapshot_of in vdisByRef)) {
|
||||
return
|
||||
}
|
||||
|
||||
hasParents[vdi.uuid] = true
|
||||
})
|
||||
|
||||
const makeVdiNode = vdi => {
|
||||
const { uuid } = vdi
|
||||
|
||||
let label = `${vdi.name_label} - ${uuid} - ${formatSize(
|
||||
+vdi.physical_utilisation
|
||||
)}`
|
||||
const nodes = []
|
||||
|
||||
const vhdChildren = vhdChildrenByUuid[uuid]
|
||||
if (vhdChildren) {
|
||||
mapFilter(
|
||||
orderBy(vhdChildren, 'is_a_snapshot', 'desc'),
|
||||
makeVdiNode,
|
||||
nodes
|
||||
)
|
||||
}
|
||||
|
||||
mapFilter(
|
||||
vdi.snapshots,
|
||||
ref => {
|
||||
const vdi = vdisByRef[ref]
|
||||
if (full || !vdi.sm_config['vhd-parent']) {
|
||||
return makeVdiNode(vdi)
|
||||
}
|
||||
},
|
||||
nodes
|
||||
)
|
||||
|
||||
const style = getStyle(vdi)
|
||||
if (style) {
|
||||
label = style(label)
|
||||
}
|
||||
|
||||
return { label, nodes }
|
||||
}
|
||||
|
||||
const nodes = mapFilter(orderBy(vdisByRef, ['name_label', 'uuid']), vdi => {
|
||||
if (!hasParents[vdi.uuid]) {
|
||||
return makeVdiNode(vdi)
|
||||
}
|
||||
})
|
||||
|
||||
return archy({
|
||||
label: `${sr.name_label} (${sr.VDIs.length} VDIs)`,
|
||||
nodes,
|
||||
})
|
||||
})
|
||||
@@ -4,6 +4,9 @@
|
||||
|
||||
Tested with:
|
||||
|
||||
- XenServer 7.6
|
||||
- XenServer 7.5
|
||||
- XenServer 7.4
|
||||
- XenServer 7.3
|
||||
- XenServer 7.2
|
||||
- XenServer 7.1
|
||||
@@ -44,6 +47,7 @@ Options:
|
||||
- `allowUnauthorized`: whether to accept self-signed certificates
|
||||
- `auth`: credentials used to sign in (can also be specified in the URL)
|
||||
- `readOnly = false`: if true, no methods with side-effects can be called
|
||||
- `callTimeout`: number of milliseconds after which a call is considered failed (can also be a map of timeouts by methods)
|
||||
|
||||
```js
|
||||
// Force connection.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xen-api",
|
||||
"version": "0.19.0",
|
||||
"version": "0.22.0",
|
||||
"license": "ISC",
|
||||
"description": "Connector to the Xen API",
|
||||
"keywords": [
|
||||
@@ -39,13 +39,13 @@
|
||||
"http-request-plus": "^0.6.0",
|
||||
"iterable-backoff": "^0.0.0",
|
||||
"jest-diff": "^23.5.0",
|
||||
"json-rpc-protocol": "^0.12.0",
|
||||
"json-rpc-protocol": "^0.13.1",
|
||||
"kindof": "^2.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"make-error": "^1.3.0",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^2.1.1",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"pw": "0.0.4",
|
||||
"xmlrpc": "^1.3.2",
|
||||
"xo-collection": "^0.4.1"
|
||||
|
||||
17
packages/xen-api/src/_replaceSensitiveValues.js
Normal file
17
packages/xen-api/src/_replaceSensitiveValues.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import mapValues from 'lodash/mapValues'
|
||||
|
||||
export default function replaceSensitiveValues (value, replacement) {
|
||||
function helper (value, name) {
|
||||
if (name === 'password' && typeof value === 'string') {
|
||||
return replacement
|
||||
}
|
||||
|
||||
if (typeof value !== 'object' || value === null) {
|
||||
return value
|
||||
}
|
||||
|
||||
return Array.isArray(value) ? value.map(helper) : mapValues(value, helper)
|
||||
}
|
||||
|
||||
return helper(value)
|
||||
}
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
forEach,
|
||||
isArray,
|
||||
isInteger,
|
||||
isObject,
|
||||
map,
|
||||
noop,
|
||||
omit,
|
||||
@@ -23,6 +22,7 @@ import {
|
||||
cancelable,
|
||||
defer,
|
||||
fromEvents,
|
||||
ignoreErrors,
|
||||
pCatch,
|
||||
pDelay,
|
||||
pFinally,
|
||||
@@ -31,6 +31,7 @@ import {
|
||||
} from 'promise-toolbox'
|
||||
|
||||
import autoTransport from './transports/auto'
|
||||
import replaceSensitiveValues from './_replaceSensitiveValues'
|
||||
|
||||
const debug = createDebug('xen-api')
|
||||
|
||||
@@ -87,14 +88,14 @@ const isSessionInvalid = ({ code }) => code === 'SESSION_INVALID'
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class XapiError extends BaseError {
|
||||
constructor (code, params) {
|
||||
constructor(code, params) {
|
||||
super(`${code}(${params.join(', ')})`)
|
||||
|
||||
this.code = code
|
||||
this.params = params
|
||||
|
||||
// slots than can be assigned later
|
||||
this.method = undefined
|
||||
this.call = undefined
|
||||
this.url = undefined
|
||||
this.task = undefined
|
||||
}
|
||||
@@ -137,8 +138,8 @@ const parseUrl = url => {
|
||||
const {
|
||||
create: createObject,
|
||||
defineProperties,
|
||||
defineProperty,
|
||||
freeze: freezeObject,
|
||||
keys: getKeys,
|
||||
} = Object
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@@ -190,10 +191,6 @@ const getKey = o => o.$id
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const EMPTY_ARRAY = freezeObject([])
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const getTaskResult = task => {
|
||||
const { status } = task
|
||||
if (status === 'cancelled') {
|
||||
@@ -213,6 +210,33 @@ const getTaskResult = task => {
|
||||
}
|
||||
}
|
||||
|
||||
function defined() {
|
||||
for (let i = 0, n = arguments.length; i < n; ++i) {
|
||||
const arg = arguments[i]
|
||||
if (arg !== undefined) {
|
||||
return arg
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const makeCallSetting = (setting, defaultValue) =>
|
||||
setting === undefined
|
||||
? () => defaultValue
|
||||
: typeof setting === 'function'
|
||||
? setting
|
||||
: typeof setting !== 'object'
|
||||
? () => setting
|
||||
: method => defined(setting[method], setting['*'], defaultValue)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const RESERVED_FIELDS = {
|
||||
id: true,
|
||||
pool: true,
|
||||
ref: true,
|
||||
type: true,
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const CONNECTED = 'connected'
|
||||
@@ -222,14 +246,19 @@ const DISCONNECTED = 'disconnected'
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Xapi extends EventEmitter {
|
||||
constructor (opts) {
|
||||
constructor(opts) {
|
||||
super()
|
||||
|
||||
this._allowUnauthorized = opts.allowUnauthorized
|
||||
this._auth = opts.auth
|
||||
this._callTimeout = makeCallSetting(opts.callTimeout, 0)
|
||||
this._debounce = opts.debounce == null ? 200 : opts.debounce
|
||||
this._pool = null
|
||||
this._readOnly = Boolean(opts.readOnly)
|
||||
this._RecordsByType = createObject(null)
|
||||
this._sessionId = null
|
||||
;(this._objects = new Collection()).getKey = getKey
|
||||
;(this._objectsByRef = createObject(null))[NULL_REF] = undefined
|
||||
const url = (this._url = parseUrl(opts.url))
|
||||
|
||||
if (this._auth === undefined) {
|
||||
@@ -244,39 +273,39 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
// Memoize this function _addObject().
|
||||
this._getPool = () => this._pool
|
||||
|
||||
if (opts.watchEvents !== false) {
|
||||
this._debounce = opts.debounce == null ? 200 : opts.debounce
|
||||
|
||||
this._eventWatchers = createObject(null)
|
||||
|
||||
this._fromToken = ''
|
||||
|
||||
// Memoize this function _addObject().
|
||||
this._getPool = () => this._pool
|
||||
|
||||
this._nTasks = 0
|
||||
|
||||
const objects = (this._objects = new Collection())
|
||||
objects.getKey = getKey
|
||||
|
||||
this._objectsByRefs = createObject(null)
|
||||
this._objectsByRefs[NULL_REF] = undefined
|
||||
|
||||
this._taskWatchers = Object.create(null)
|
||||
|
||||
this.on('connected', this._watchEvents)
|
||||
this.on('disconnected', () => {
|
||||
this._fromToken = ''
|
||||
objects.clear()
|
||||
})
|
||||
this.watchEvents()
|
||||
}
|
||||
}
|
||||
|
||||
get _url () {
|
||||
watchEvents() {
|
||||
this._eventWatchers = createObject(null)
|
||||
|
||||
this._fromToken = ''
|
||||
|
||||
this._nTasks = 0
|
||||
|
||||
this._taskWatchers = Object.create(null)
|
||||
|
||||
if (this.status === CONNECTED) {
|
||||
ignoreErrors.call(this._watchEvents())
|
||||
}
|
||||
|
||||
this.on('connected', this._watchEvents)
|
||||
this.on('disconnected', () => {
|
||||
this._fromToken = ''
|
||||
this._objects.clear()
|
||||
})
|
||||
}
|
||||
|
||||
get _url() {
|
||||
return this.__url
|
||||
}
|
||||
|
||||
set _url (url) {
|
||||
set _url(url) {
|
||||
this.__url = url
|
||||
this._call = autoTransport({
|
||||
allowUnauthorized: this._allowUnauthorized,
|
||||
@@ -284,15 +313,15 @@ export class Xapi extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
get readOnly () {
|
||||
get readOnly() {
|
||||
return this._readOnly
|
||||
}
|
||||
|
||||
set readOnly (ro) {
|
||||
set readOnly(ro) {
|
||||
this._readOnly = Boolean(ro)
|
||||
}
|
||||
|
||||
get sessionId () {
|
||||
get sessionId() {
|
||||
const id = this._sessionId
|
||||
|
||||
if (!id || id === CONNECTING) {
|
||||
@@ -302,20 +331,20 @@ export class Xapi extends EventEmitter {
|
||||
return id
|
||||
}
|
||||
|
||||
get status () {
|
||||
get status() {
|
||||
const id = this._sessionId
|
||||
|
||||
return id ? (id === CONNECTING ? CONNECTING : CONNECTED) : DISCONNECTED
|
||||
}
|
||||
|
||||
get _humanId () {
|
||||
get _humanId() {
|
||||
return `${this._auth.user}@${this._url.hostname}`
|
||||
}
|
||||
|
||||
// ensure we have received all events up to this call
|
||||
//
|
||||
// optionally returns the up to date object for the given ref
|
||||
barrier (ref) {
|
||||
barrier(ref) {
|
||||
const eventWatchers = this._eventWatchers
|
||||
if (eventWatchers === undefined) {
|
||||
return Promise.reject(
|
||||
@@ -356,7 +385,7 @@ export class Xapi extends EventEmitter {
|
||||
)
|
||||
}
|
||||
|
||||
connect () {
|
||||
connect() {
|
||||
const { status } = this
|
||||
|
||||
if (status === CONNECTED) {
|
||||
@@ -393,7 +422,7 @@ export class Xapi extends EventEmitter {
|
||||
)
|
||||
}
|
||||
|
||||
disconnect () {
|
||||
disconnect() {
|
||||
return Promise.resolve().then(() => {
|
||||
const { status } = this
|
||||
|
||||
@@ -412,14 +441,14 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
// High level calls.
|
||||
call (method, ...args) {
|
||||
call(method, ...args) {
|
||||
return this._readOnly && !isReadOnlyCall(method, args)
|
||||
? Promise.reject(new Error(`cannot call ${method}() in read only mode`))
|
||||
: this._sessionCall(method, prepareParam(args))
|
||||
}
|
||||
|
||||
@cancelable
|
||||
callAsync ($cancelToken, method, ...args) {
|
||||
callAsync($cancelToken, method, ...args) {
|
||||
return this._readOnly && !isReadOnlyCall(method, args)
|
||||
? Promise.reject(new Error(`cannot call ${method}() in read only mode`))
|
||||
: this._sessionCall(`Async.${method}`, args).then(taskRef => {
|
||||
@@ -438,7 +467,7 @@ export class Xapi extends EventEmitter {
|
||||
//
|
||||
// allowed even in read-only mode because it does not have impact on the
|
||||
// XenServer and it's necessary for getResource()
|
||||
createTask (nameLabel, nameDescription = '') {
|
||||
createTask(nameLabel, nameDescription = '') {
|
||||
const promise = this._sessionCall('task.create', [
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
@@ -456,13 +485,13 @@ export class Xapi extends EventEmitter {
|
||||
// Nice getter which returns the object for a given $id (internal to
|
||||
// this lib), UUID (unique identifier that some objects have) or
|
||||
// opaque reference (internal to XAPI).
|
||||
getObject (idOrUuidOrRef, defaultValue) {
|
||||
getObject(idOrUuidOrRef, defaultValue) {
|
||||
if (typeof idOrUuidOrRef === 'object') {
|
||||
idOrUuidOrRef = idOrUuidOrRef.$id
|
||||
}
|
||||
|
||||
const object =
|
||||
this._objects.all[idOrUuidOrRef] || this._objectsByRefs[idOrUuidOrRef]
|
||||
this._objects.all[idOrUuidOrRef] || this._objectsByRef[idOrUuidOrRef]
|
||||
|
||||
if (object !== undefined) return object
|
||||
|
||||
@@ -473,8 +502,8 @@ export class Xapi extends EventEmitter {
|
||||
|
||||
// Returns the object for a given opaque reference (internal to
|
||||
// XAPI).
|
||||
getObjectByRef (ref, defaultValue) {
|
||||
const object = this._objectsByRefs[ref]
|
||||
getObjectByRef(ref, defaultValue) {
|
||||
const object = this._objectsByRef[ref]
|
||||
|
||||
if (object !== undefined) return object
|
||||
|
||||
@@ -485,7 +514,7 @@ export class Xapi extends EventEmitter {
|
||||
|
||||
// Returns the object for a given UUID (unique identifier that some
|
||||
// objects have).
|
||||
getObjectByUuid (uuid, defaultValue) {
|
||||
getObjectByUuid(uuid, defaultValue) {
|
||||
// Objects ids are already UUIDs if they have one.
|
||||
const object = this._objects.all[uuid]
|
||||
|
||||
@@ -496,20 +525,20 @@ export class Xapi extends EventEmitter {
|
||||
throw new Error('no object with UUID: ' + uuid)
|
||||
}
|
||||
|
||||
async getRecord (type, ref) {
|
||||
const record = await this._sessionCall(`${type}.get_record`, [ref])
|
||||
|
||||
// All custom properties are read-only and non enumerable.
|
||||
defineProperties(record, {
|
||||
$id: { value: record.uuid || ref },
|
||||
$ref: { value: ref },
|
||||
$type: { value: type },
|
||||
})
|
||||
|
||||
return record
|
||||
async getRecord(type, ref) {
|
||||
return this._wrapRecord(
|
||||
await this._sessionCall(`${type}.get_record`, [ref])
|
||||
)
|
||||
}
|
||||
|
||||
async getRecordByUuid (type, uuid) {
|
||||
async getAllRecords(type) {
|
||||
return map(
|
||||
await this._sessionCall(`${type}.get_all_records`),
|
||||
(record, ref) => this._wrapRecord(type, ref, record)
|
||||
)
|
||||
}
|
||||
|
||||
async getRecordByUuid(type, uuid) {
|
||||
return this.getRecord(
|
||||
type,
|
||||
await this._sessionCall(`${type}.get_by_uuid`, [uuid])
|
||||
@@ -517,7 +546,7 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
@cancelable
|
||||
getResource ($cancelToken, pathname, { host, query, task }) {
|
||||
getResource($cancelToken, pathname, { host, query, task }) {
|
||||
return this._autoTask(task, `Xapi#getResource ${pathname}`).then(
|
||||
taskRef => {
|
||||
query = { ...query, session_id: this.sessionId }
|
||||
@@ -557,7 +586,7 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
@cancelable
|
||||
putResource ($cancelToken, body, pathname, { host, query, task } = {}) {
|
||||
putResource($cancelToken, body, pathname, { host, query, task } = {}) {
|
||||
if (this._readOnly) {
|
||||
return Promise.reject(
|
||||
new Error(new Error('cannot put resource in read only mode'))
|
||||
@@ -663,13 +692,13 @@ export class Xapi extends EventEmitter {
|
||||
)
|
||||
}
|
||||
|
||||
setField ({ $type, $ref }, field, value) {
|
||||
setField({ $type, $ref }, field, value) {
|
||||
return this.call(`${$type}.set_${field}`, $ref, value).then(noop)
|
||||
}
|
||||
|
||||
setFieldEntries (record, field, entries) {
|
||||
setFieldEntries(record, field, entries) {
|
||||
return Promise.all(
|
||||
Object.keys(entries).map(entry => {
|
||||
getKeys(entries).map(entry => {
|
||||
const value = entries[entry]
|
||||
if (value !== undefined) {
|
||||
return value === null
|
||||
@@ -680,7 +709,7 @@ export class Xapi extends EventEmitter {
|
||||
).then(noop)
|
||||
}
|
||||
|
||||
async setFieldEntry ({ $type, $ref }, field, entry, value) {
|
||||
async setFieldEntry({ $type, $ref }, field, entry, value) {
|
||||
while (true) {
|
||||
try {
|
||||
await this.call(`${$type}.add_to_${field}`, $ref, entry, value)
|
||||
@@ -694,11 +723,11 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
unsetFieldEntry ({ $type, $ref }, field, entry) {
|
||||
unsetFieldEntry({ $type, $ref }, field, entry) {
|
||||
return this.call(`${$type}.remove_from_${field}`, $ref, entry)
|
||||
}
|
||||
|
||||
watchTask (ref) {
|
||||
watchTask(ref) {
|
||||
const watchers = this._taskWatchers
|
||||
if (watchers === undefined) {
|
||||
throw new Error('Xapi#watchTask() requires events watching')
|
||||
@@ -710,7 +739,7 @@ export class Xapi extends EventEmitter {
|
||||
let watcher = watchers[ref]
|
||||
if (watcher === undefined) {
|
||||
// sync check if the task is already settled
|
||||
const task = this._objectsByRefs[ref]
|
||||
const task = this._objectsByRef[ref]
|
||||
if (task !== undefined) {
|
||||
const result = getTaskResult(task)
|
||||
if (result !== undefined) {
|
||||
@@ -723,16 +752,16 @@ export class Xapi extends EventEmitter {
|
||||
return watcher.promise
|
||||
}
|
||||
|
||||
get pool () {
|
||||
get pool() {
|
||||
return this._pool
|
||||
}
|
||||
|
||||
get objects () {
|
||||
get objects() {
|
||||
return this._objects
|
||||
}
|
||||
|
||||
// return a promise which resolves to a task ref or undefined
|
||||
_autoTask (task = this._taskWatchers !== undefined, name) {
|
||||
_autoTask(task = this._taskWatchers !== undefined, name) {
|
||||
if (task === false) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
@@ -746,7 +775,7 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
// Medium level call: handle session errors.
|
||||
_sessionCall (method, args) {
|
||||
_sessionCall(method, args) {
|
||||
try {
|
||||
if (startsWith(method, 'session.')) {
|
||||
throw new Error('session.*() methods are disabled from this interface')
|
||||
@@ -757,96 +786,50 @@ export class Xapi extends EventEmitter {
|
||||
newArgs.push.apply(newArgs, args)
|
||||
}
|
||||
|
||||
return pCatch.call(
|
||||
this._transportCall(method, newArgs),
|
||||
isSessionInvalid,
|
||||
() => {
|
||||
// XAPI is sometimes reinitialized and sessions are lost.
|
||||
// Try to login again.
|
||||
debug('%s: the session has been reinitialized', this._humanId)
|
||||
return pTimeout.call(
|
||||
pCatch.call(
|
||||
this._transportCall(method, newArgs),
|
||||
isSessionInvalid,
|
||||
() => {
|
||||
// XAPI is sometimes reinitialized and sessions are lost.
|
||||
// Try to login again.
|
||||
debug('%s: the session has been reinitialized', this._humanId)
|
||||
|
||||
this._sessionId = null
|
||||
return this.connect().then(() => this._sessionCall(method, args))
|
||||
}
|
||||
this._sessionId = null
|
||||
return this.connect().then(() => this._sessionCall(method, args))
|
||||
}
|
||||
),
|
||||
this._callTimeout(method, args)
|
||||
)
|
||||
} catch (error) {
|
||||
return Promise.reject(error)
|
||||
}
|
||||
}
|
||||
|
||||
_addObject (type, ref, object) {
|
||||
const { _objectsByRefs: objectsByRefs } = this
|
||||
|
||||
const reservedKeys = {
|
||||
id: true,
|
||||
pool: true,
|
||||
ref: true,
|
||||
type: true,
|
||||
}
|
||||
const getKey = (key, obj) =>
|
||||
reservedKeys[key] && obj === object ? `$$${key}` : `$${key}`
|
||||
|
||||
// Creates resolved properties.
|
||||
forEach(object, function resolveObject (value, key, object) {
|
||||
if (isArray(value)) {
|
||||
if (!value.length) {
|
||||
// If the array is empty, it isn't possible to be sure that
|
||||
// it is not supposed to contain links, therefore, in
|
||||
// benefice of the doubt, a resolved property is defined.
|
||||
defineProperty(object, getKey(key, object), {
|
||||
value: EMPTY_ARRAY,
|
||||
})
|
||||
|
||||
// Minor memory optimization, use the same empty array for
|
||||
// everyone.
|
||||
object[key] = EMPTY_ARRAY
|
||||
} else if (isOpaqueRef(value[0])) {
|
||||
// This is an array of refs.
|
||||
defineProperty(object, getKey(key, object), {
|
||||
get: () => freezeObject(map(value, ref => objectsByRefs[ref])),
|
||||
})
|
||||
|
||||
freezeObject(value)
|
||||
}
|
||||
} else if (isObject(value)) {
|
||||
forEach(value, resolveObject)
|
||||
|
||||
freezeObject(value)
|
||||
} else if (isOpaqueRef(value)) {
|
||||
defineProperty(object, getKey(key, object), {
|
||||
get: () => objectsByRefs[value],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// All custom properties are read-only and non enumerable.
|
||||
defineProperties(object, {
|
||||
$id: { value: object.uuid || ref },
|
||||
$pool: { get: this._getPool },
|
||||
$ref: { value: ref },
|
||||
$type: { value: type },
|
||||
})
|
||||
_addObject(type, ref, object) {
|
||||
object = this._wrapRecord(type, ref, object)
|
||||
|
||||
// Finally freezes the object.
|
||||
freezeObject(object)
|
||||
|
||||
const objects = this._objects
|
||||
const objectsByRef = this._objectsByRef
|
||||
|
||||
// An object's UUID can change during its life.
|
||||
const prev = objectsByRefs[ref]
|
||||
const prev = objectsByRef[ref]
|
||||
let prevUuid
|
||||
if (prev && (prevUuid = prev.uuid) && prevUuid !== object.uuid) {
|
||||
objects.remove(prevUuid)
|
||||
}
|
||||
|
||||
this._objects.set(object)
|
||||
objectsByRefs[ref] = object
|
||||
objectsByRef[ref] = object
|
||||
|
||||
if (type === 'pool') {
|
||||
this._pool = object
|
||||
|
||||
const eventWatchers = this._eventWatchers
|
||||
Object.keys(object.other_config).forEach(key => {
|
||||
getKeys(object.other_config).forEach(key => {
|
||||
const eventWatcher = eventWatchers[key]
|
||||
if (eventWatcher !== undefined) {
|
||||
delete eventWatchers[key]
|
||||
@@ -870,8 +853,8 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
_removeObject (type, ref) {
|
||||
const byRefs = this._objectsByRefs
|
||||
_removeObject(type, ref) {
|
||||
const byRefs = this._objectsByRef
|
||||
const object = byRefs[ref]
|
||||
if (object !== undefined) {
|
||||
this._objects.unset(object.$id)
|
||||
@@ -893,7 +876,7 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
_processEvents (events) {
|
||||
_processEvents(events) {
|
||||
forEach(events, event => {
|
||||
const { class: type, ref } = event
|
||||
if (event.operation === 'del') {
|
||||
@@ -904,7 +887,7 @@ export class Xapi extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
_watchEvents () {
|
||||
_watchEvents() {
|
||||
const loop = () =>
|
||||
this.status === CONNECTED &&
|
||||
pTimeout
|
||||
@@ -977,7 +960,7 @@ export class Xapi extends EventEmitter {
|
||||
// methods.
|
||||
//
|
||||
// It also has to manually get all objects first.
|
||||
_watchEventsLegacy () {
|
||||
_watchEventsLegacy() {
|
||||
const getAllObjects = () => {
|
||||
return this._sessionCall('system.listMethods').then(methods => {
|
||||
// Uses introspection to determine the methods to use to get
|
||||
@@ -1028,22 +1011,109 @@ export class Xapi extends EventEmitter {
|
||||
|
||||
return getAllObjects().then(watchEvents)
|
||||
}
|
||||
|
||||
_wrapRecord(type, ref, data) {
|
||||
const RecordsByType = this._RecordsByType
|
||||
let Record = RecordsByType[type]
|
||||
if (Record === undefined) {
|
||||
const fields = getKeys(data)
|
||||
const nFields = fields.length
|
||||
const xapi = this
|
||||
|
||||
const objectsByRef = this._objectsByRef
|
||||
const getObjectByRef = ref => objectsByRef[ref]
|
||||
|
||||
Record = function(ref, data) {
|
||||
defineProperties(this, {
|
||||
$id: { value: data.uuid || ref },
|
||||
$ref: { value: ref },
|
||||
})
|
||||
for (let i = 0; i < nFields; ++i) {
|
||||
const field = fields[i]
|
||||
this[field] = data[field]
|
||||
}
|
||||
}
|
||||
|
||||
const getters = { $pool: this._getPool }
|
||||
const props = { $type: type }
|
||||
fields.forEach(field => {
|
||||
props[`set_${field}`] = function(value) {
|
||||
return xapi.setField(this, field, value)
|
||||
}
|
||||
|
||||
const $field = (field in RESERVED_FIELDS ? '$$' : '$') + field
|
||||
|
||||
const value = data[field]
|
||||
if (isArray(value)) {
|
||||
if (value.length === 0 || isOpaqueRef(value[0])) {
|
||||
getters[$field] = function() {
|
||||
const value = this[field]
|
||||
return value.length === 0 ? value : value.map(getObjectByRef)
|
||||
}
|
||||
}
|
||||
|
||||
props[`add_to_${field}`] = function(...values) {
|
||||
return xapi
|
||||
.call(`${type}.add_${field}`, this.$ref, values)
|
||||
.then(noop)
|
||||
}
|
||||
} else if (value !== null && typeof value === 'object') {
|
||||
getters[$field] = function() {
|
||||
const value = this[field]
|
||||
const result = {}
|
||||
getKeys(value).forEach(key => {
|
||||
result[key] = objectsByRef[value[key]]
|
||||
})
|
||||
return result
|
||||
}
|
||||
props[`update_${field}`] = function(entries) {
|
||||
return xapi.setFieldEntries(this, field, entries)
|
||||
}
|
||||
} else if (isOpaqueRef(value)) {
|
||||
getters[$field] = function() {
|
||||
return objectsByRef[this[field]]
|
||||
}
|
||||
}
|
||||
})
|
||||
const descriptors = {}
|
||||
getKeys(getters).forEach(key => {
|
||||
descriptors[key] = {
|
||||
configurable: true,
|
||||
get: getters[key],
|
||||
}
|
||||
})
|
||||
getKeys(props).forEach(key => {
|
||||
descriptors[key] = {
|
||||
configurable: true,
|
||||
value: props[key],
|
||||
writable: true,
|
||||
}
|
||||
})
|
||||
defineProperties(Record.prototype, descriptors)
|
||||
|
||||
RecordsByType[type] = Record
|
||||
}
|
||||
return new Record(ref, data)
|
||||
}
|
||||
}
|
||||
|
||||
Xapi.prototype._transportCall = reduce(
|
||||
[
|
||||
function (method, args) {
|
||||
function(method, args) {
|
||||
return this._call(method, args).catch(error => {
|
||||
if (!(error instanceof Error)) {
|
||||
error = wrapError(error)
|
||||
}
|
||||
|
||||
error.method = method
|
||||
error.call = {
|
||||
method,
|
||||
params: replaceSensitiveValues(args, '* obfuscated *'),
|
||||
}
|
||||
throw error
|
||||
})
|
||||
},
|
||||
call =>
|
||||
function () {
|
||||
function() {
|
||||
let iterator // lazily created
|
||||
const loop = () =>
|
||||
pCatch.call(
|
||||
@@ -1084,7 +1154,7 @@ Xapi.prototype._transportCall = reduce(
|
||||
return loop()
|
||||
},
|
||||
call =>
|
||||
function loop () {
|
||||
function loop() {
|
||||
return pCatch.call(
|
||||
call.apply(this, arguments),
|
||||
isHostSlave,
|
||||
@@ -1107,7 +1177,7 @@ Xapi.prototype._transportCall = reduce(
|
||||
)
|
||||
},
|
||||
call =>
|
||||
function (method) {
|
||||
function(method) {
|
||||
const startTime = Date.now()
|
||||
return call.apply(this, arguments).then(
|
||||
result => {
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
'use strict'
|
||||
|
||||
const { unauthorized } = require('xo-common/api-errors')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// These global variables are not a problem because the algorithm is
|
||||
// synchronous.
|
||||
let permissionsByObject
|
||||
@@ -86,7 +90,7 @@ const checkAuthorizationByTypes = {
|
||||
|
||||
'VM-controller': checkMember('$container'),
|
||||
|
||||
'VM-snapshot': checkMember('$snapshot_of'),
|
||||
'VM-snapshot': or(checkSelf, checkMember('$snapshot_of')),
|
||||
|
||||
'VM-template': or(checkSelf, checkMember('$pool')),
|
||||
}
|
||||
@@ -105,23 +109,26 @@ function checkAuthorization (objectId, permission) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
module.exports = (
|
||||
function assertPermissions (
|
||||
permissionsByObject_,
|
||||
getObject_,
|
||||
permissions,
|
||||
permission
|
||||
) => {
|
||||
) {
|
||||
// Assign global variables.
|
||||
permissionsByObject = permissionsByObject_
|
||||
getObject = getObject_
|
||||
|
||||
try {
|
||||
if (permission) {
|
||||
return checkAuthorization(permissions, permission)
|
||||
if (permission !== undefined) {
|
||||
const objectId = permissions
|
||||
if (!checkAuthorization(objectId, permission)) {
|
||||
throw unauthorized(permission, objectId)
|
||||
}
|
||||
} else {
|
||||
for (const [objectId, permission] of permissions) {
|
||||
if (!checkAuthorization(objectId, permission)) {
|
||||
return false
|
||||
throw unauthorized(permission, objectId)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,3 +139,16 @@ module.exports = (
|
||||
permissionsByObject = getObject = null
|
||||
}
|
||||
}
|
||||
exports.assert = assertPermissions
|
||||
|
||||
exports.check = function checkPermissions () {
|
||||
try {
|
||||
assertPermissions.apply(undefined, arguments)
|
||||
return true
|
||||
} catch (error) {
|
||||
if (unauthorized.is(error)) {
|
||||
return false
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-acl-resolver",
|
||||
"version": "0.2.4",
|
||||
"version": "0.4.0",
|
||||
"license": "ISC",
|
||||
"description": "Xen-Orchestra internal: do ACLs resolution",
|
||||
"keywords": [],
|
||||
@@ -21,5 +21,8 @@
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"xo-common": "^0.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ encoding by prefixing with `json:`:
|
||||
##### VM import
|
||||
|
||||
```
|
||||
> xo-cli vm.import host=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
> xo-cli vm.import sr=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
@@ -31,10 +31,9 @@
|
||||
"@babel/polyfill": "^7.0.0",
|
||||
"bluebird": "^3.5.1",
|
||||
"chalk": "^2.2.0",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"fs-promise": "^2.0.3",
|
||||
"got": "^8.0.1",
|
||||
"http-request-plus": "^0.6.0",
|
||||
"human-format": "^0.10.0",
|
||||
"l33teral": "^3.0.3",
|
||||
"lodash": "^4.17.4",
|
||||
@@ -43,6 +42,8 @@
|
||||
"nice-pipe": "0.0.0",
|
||||
"pretty-ms": "^4.0.0",
|
||||
"progress-stream": "^2.0.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"pump": "^3.0.0",
|
||||
"pw": "^0.0.4",
|
||||
"strip-indent": "^2.0.0",
|
||||
"xdg-basedir": "^3.0.0",
|
||||
|
||||
@@ -11,10 +11,10 @@ const resolveUrl = require('url').resolve
|
||||
const stat = require('fs-promise').stat
|
||||
|
||||
const chalk = require('chalk')
|
||||
const eventToPromise = require('event-to-promise')
|
||||
const forEach = require('lodash/forEach')
|
||||
const fromCallback = require('promise-toolbox/fromCallback')
|
||||
const getKeys = require('lodash/keys')
|
||||
const got = require('got')
|
||||
const hrp = require('http-request-plus').default
|
||||
const humanFormat = require('human-format')
|
||||
const identity = require('lodash/identity')
|
||||
const isArray = require('lodash/isArray')
|
||||
@@ -23,6 +23,7 @@ const micromatch = require('micromatch')
|
||||
const nicePipe = require('nice-pipe')
|
||||
const pairs = require('lodash/toPairs')
|
||||
const pick = require('lodash/pick')
|
||||
const pump = require('pump')
|
||||
const startsWith = require('lodash/startsWith')
|
||||
const prettyMs = require('pretty-ms')
|
||||
const progressStream = require('progress-stream')
|
||||
@@ -362,50 +363,43 @@ async function call (args) {
|
||||
ensurePathParam(method, file)
|
||||
url = resolveUrl(baseUrl, result[key])
|
||||
const output = createWriteStream(file)
|
||||
const response = await hrp(url)
|
||||
|
||||
const progress = progressStream({ time: 1e3 }, printProgress)
|
||||
|
||||
return eventToPromise(
|
||||
nicePipe([
|
||||
got.stream(url).on('response', function (response) {
|
||||
const length = response.headers['content-length']
|
||||
if (length !== undefined) {
|
||||
progress.length(length)
|
||||
}
|
||||
}),
|
||||
progress,
|
||||
output,
|
||||
]),
|
||||
'finish'
|
||||
const progress = progressStream(
|
||||
{
|
||||
length: response.headers['content-length'],
|
||||
time: 1e3,
|
||||
},
|
||||
printProgress
|
||||
)
|
||||
|
||||
return fromCallback(cb => pump(response, progress, output, cb))
|
||||
}
|
||||
|
||||
if (key === '$sendTo') {
|
||||
ensurePathParam(method, file)
|
||||
url = resolveUrl(baseUrl, result[key])
|
||||
|
||||
const stats = await stat(file)
|
||||
const length = stats.size
|
||||
|
||||
const { size: length } = await stat(file)
|
||||
const input = nicePipe([
|
||||
createReadStream(file),
|
||||
progressStream(
|
||||
{
|
||||
length: length,
|
||||
length,
|
||||
time: 1e3,
|
||||
},
|
||||
printProgress
|
||||
),
|
||||
])
|
||||
|
||||
const response = await got.post(url, {
|
||||
body: input,
|
||||
headers: {
|
||||
'content-length': length,
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
return response.body
|
||||
return hrp
|
||||
.post(url, {
|
||||
body: input,
|
||||
headers: {
|
||||
'content-length': length,
|
||||
},
|
||||
})
|
||||
.readAll('utf-8')
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-common",
|
||||
"version": "0.1.1",
|
||||
"version": "0.2.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Code shared between [XO](https://xen-orchestra.com) server and clients",
|
||||
"keywords": [],
|
||||
|
||||
@@ -20,7 +20,8 @@ class XoError extends BaseError {
|
||||
const create = (code, getProps) => {
|
||||
const factory = (...args) => new XoError({ ...getProps(...args), code })
|
||||
factory.is = (error, predicate) =>
|
||||
error.code === code && iteratee(predicate)(error)
|
||||
error.code === code &&
|
||||
(predicate === undefined || iteratee(predicate)(error))
|
||||
|
||||
return factory
|
||||
}
|
||||
@@ -33,11 +34,18 @@ export const notImplemented = create(0, () => ({
|
||||
|
||||
export const noSuchObject = create(1, (id, type) => ({
|
||||
data: { id, type },
|
||||
message: 'no such object',
|
||||
message: `no such ${type || 'object'} ${id}`,
|
||||
}))
|
||||
|
||||
export const unauthorized = create(2, () => ({
|
||||
message: 'not authenticated or not enough permissions',
|
||||
export const unauthorized = create(2, (permission, objectId, objectType) => ({
|
||||
data: {
|
||||
permission,
|
||||
object: {
|
||||
id: objectId,
|
||||
type: objectType,
|
||||
},
|
||||
},
|
||||
message: 'not enough permissions',
|
||||
}))
|
||||
|
||||
export const invalidCredentials = create(3, () => ({
|
||||
|
||||
10
packages/xo-import-servers-csv/.npmignore
Normal file
10
packages/xo-import-servers-csv/.npmignore
Normal file
@@ -0,0 +1,10 @@
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
64
packages/xo-import-servers-csv/README.md
Normal file
64
packages/xo-import-servers-csv/README.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# xo-import-servers-csv [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> CLI to import servers in XO from a CSV file
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/xo-import-servers-csv):
|
||||
|
||||
```
|
||||
> npm install --global xo-import-servers-csv
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`servers.csv`:
|
||||
|
||||
```csv
|
||||
host,username,password
|
||||
xs1.company.net,user1,password1
|
||||
xs2.company.net:8080,user2,password2
|
||||
http://xs3.company.net,user3,password3
|
||||
```
|
||||
|
||||
> The CSV file can also contains these optional fields: `label`, `autoConnect`, `allowUnauthorized`.
|
||||
|
||||
Shell command:
|
||||
|
||||
```
|
||||
> xo-import-servers-csv 'https://xo.company.tld' admin@admin.net admin < servers.csv
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> npm install
|
||||
|
||||
# Run the tests
|
||||
> npm test
|
||||
|
||||
# Continuously compile
|
||||
> npm run dev
|
||||
|
||||
# Continuously run the tests
|
||||
> npm run dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> npm run build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](http://vates.fr)
|
||||
59
packages/xo-import-servers-csv/package.json
Normal file
59
packages/xo-import-servers-csv/package.json
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"name": "xo-import-servers-csv",
|
||||
"version": "1.1.0",
|
||||
"license": "ISC",
|
||||
"description": "CLI to import servers in XO from a CSV file",
|
||||
"keywords": [
|
||||
"csv",
|
||||
"host",
|
||||
"import",
|
||||
"orchestra",
|
||||
"pool",
|
||||
"server",
|
||||
"xen",
|
||||
"xen-orchestra"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-import-servers-csv",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"main": "dist/",
|
||||
"bin": {
|
||||
"xo-import-servers-csv": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"dependencies": {
|
||||
"csv-parser": "^2.1.0",
|
||||
"end-of-stream": "^1.1.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"highland": "^2.10.1",
|
||||
"through2": "^3.0.0",
|
||||
"xo-lib": "^0.9.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^10.12.2",
|
||||
"@types/through2": "^2.0.31",
|
||||
"tslint": "^5.9.1",
|
||||
"tslint-config-standard": "^8.0.1",
|
||||
"typescript": "^3.1.6"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"dev": "tsc -w",
|
||||
"lint": "tslint 'src/*.ts'",
|
||||
"posttest": "yarn run lint",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"start": "node dist/index.js"
|
||||
}
|
||||
}
|
||||
23
packages/xo-import-servers-csv/src/index.d.ts
vendored
Normal file
23
packages/xo-import-servers-csv/src/index.d.ts
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
declare module 'csv-parser' {
|
||||
function csvParser(opts?: Object): any
|
||||
export = csvParser
|
||||
}
|
||||
|
||||
declare module 'exec-promise' {
|
||||
function execPromise(cb: (args: string[]) => any): void
|
||||
export = execPromise
|
||||
}
|
||||
|
||||
declare module 'xo-lib' {
|
||||
export default class Xo {
|
||||
user?: { email: string }
|
||||
|
||||
constructor(opts?: { credentials?: {}; url: string })
|
||||
|
||||
call(method: string, ...params: any[]): Promise<any>
|
||||
|
||||
open(): Promise<void>
|
||||
|
||||
signIn(credentials: {}): Promise<void>
|
||||
}
|
||||
}
|
||||
87
packages/xo-import-servers-csv/src/index.ts
Executable file
87
packages/xo-import-servers-csv/src/index.ts
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/// <reference path="./index.d.ts" />
|
||||
|
||||
import csvParser = require('csv-parser')
|
||||
import execPromise = require('exec-promise')
|
||||
import through2 = require('through2')
|
||||
import Xo from 'xo-lib'
|
||||
|
||||
const parseBoolean = (
|
||||
value: string,
|
||||
defaultValue?: boolean
|
||||
): boolean | undefined => {
|
||||
if (value === undefined || value === '') {
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const lcValue = value.toLocaleLowerCase()
|
||||
|
||||
if (value === '0' || lcValue === 'false') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (value === '1' || lcValue === 'true') {
|
||||
return true
|
||||
}
|
||||
|
||||
throw new Error(`invalid boolean value: ${value}`)
|
||||
}
|
||||
|
||||
const requiredParam = (name: string) => {
|
||||
throw `missing param: ${name}
|
||||
|
||||
Usage: xo-import-servers-csv $url $username $password < $csvFile`
|
||||
}
|
||||
|
||||
execPromise(
|
||||
async ([
|
||||
url = requiredParam('url'),
|
||||
username = requiredParam('username'),
|
||||
password = requiredParam('password'),
|
||||
]): Promise<void> => {
|
||||
const xo = new Xo({ url })
|
||||
|
||||
await xo.open()
|
||||
await xo.signIn({ username, password })
|
||||
console.log('connected as', xo.user!.email)
|
||||
|
||||
const errors: any[] = []
|
||||
|
||||
const stream = process.stdin.pipe(csvParser()).pipe(
|
||||
through2.obj(
|
||||
(
|
||||
{ allowUnauthorized, autoConnect, host, label, password, username },
|
||||
_,
|
||||
next
|
||||
) => {
|
||||
console.log('server', host)
|
||||
|
||||
xo.call('server.add', {
|
||||
allowUnauthorized: parseBoolean(allowUnauthorized),
|
||||
autoConnect: parseBoolean(autoConnect, false),
|
||||
host,
|
||||
label,
|
||||
password,
|
||||
username,
|
||||
}).then(
|
||||
() => next(),
|
||||
(error: any) => {
|
||||
errors.push({ host, error })
|
||||
return next()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
stream.on('finish', resolve)
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.error(errors)
|
||||
}
|
||||
}
|
||||
)
|
||||
14
packages/xo-import-servers-csv/tsconfig.json
Normal file
14
packages/xo-import-servers-csv/tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"newLine": "lf",
|
||||
"noImplicitAny": true,
|
||||
"outDir": "dist/",
|
||||
"removeComments": true,
|
||||
"sourceMap": true,
|
||||
"strictNullChecks": true,
|
||||
"target": "es2015"
|
||||
},
|
||||
"includes": "src/**/*"
|
||||
}
|
||||
3
packages/xo-import-servers-csv/tslint.json
Normal file
3
packages/xo-import-servers-csv/tslint.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "tslint-config-standard"
|
||||
}
|
||||
@@ -38,7 +38,7 @@
|
||||
"inquirer": "^6.0.0",
|
||||
"ldapjs": "^1.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1"
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server-backup-reports",
|
||||
"version": "0.14.0",
|
||||
"version": "0.15.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Backup reports plugin for XO-Server",
|
||||
"keywords": [
|
||||
|
||||
@@ -50,6 +50,7 @@ const ICON_FAILURE = '🚨'
|
||||
const ICON_INTERRUPTED = '⚠️'
|
||||
const ICON_SKIPPED = '⏩'
|
||||
const ICON_SUCCESS = '✔'
|
||||
const ICON_WARNING = '⚠️'
|
||||
|
||||
const STATUS_ICON = {
|
||||
failure: ICON_FAILURE,
|
||||
@@ -99,12 +100,13 @@ const isSkippedError = error =>
|
||||
error.message === UNHEALTHY_VDI_CHAIN_ERROR ||
|
||||
error.message === NO_SUCH_OBJECT_ERROR
|
||||
|
||||
const INDENT = ' '
|
||||
const createGetTemporalDataMarkdown = formatDate => (
|
||||
start,
|
||||
end,
|
||||
nbIndent = 0
|
||||
) => {
|
||||
const indent = ' '.repeat(nbIndent)
|
||||
const indent = INDENT.repeat(nbIndent)
|
||||
|
||||
const markdown = [`${indent}- **Start time**: ${formatDate(start)}`]
|
||||
if (end !== undefined) {
|
||||
@@ -117,6 +119,17 @@ const createGetTemporalDataMarkdown = formatDate => (
|
||||
return markdown
|
||||
}
|
||||
|
||||
const addWarnings = (text, warnings, nbIndent = 0) => {
|
||||
if (warnings === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const indent = INDENT.repeat(nbIndent)
|
||||
warnings.forEach(({ message }) => {
|
||||
text.push(`${indent}- **${ICON_WARNING} ${message}**`)
|
||||
})
|
||||
}
|
||||
|
||||
class BackupReportsXoPlugin {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
@@ -180,13 +193,14 @@ class BackupReportsXoPlugin {
|
||||
let markdown = [
|
||||
`## Global status: ${log.status}`,
|
||||
'',
|
||||
`- **Job ID**: ${log.jobId}`,
|
||||
`- **Run ID**: ${runJobId}`,
|
||||
`- **mode**: ${mode}`,
|
||||
...getTemporalDataMarkdown(log.start, log.end),
|
||||
`- **Error**: ${log.result.message}`,
|
||||
'---',
|
||||
'',
|
||||
`*${pkg.name} v${pkg.version}*`,
|
||||
]
|
||||
addWarnings(markdown, log.warnings)
|
||||
markdown.push('---', '', `*${pkg.name} v${pkg.version}*`)
|
||||
|
||||
markdown = markdown.join('\n')
|
||||
return this._sendReport({
|
||||
@@ -228,6 +242,7 @@ class BackupReportsXoPlugin {
|
||||
`- **UUID**: ${vm !== undefined ? vm.uuid : vmId}`,
|
||||
...getTemporalDataMarkdown(taskLog.start, taskLog.end),
|
||||
]
|
||||
addWarnings(text, taskLog.warnings)
|
||||
|
||||
const failedSubTasks = []
|
||||
const snapshotText = []
|
||||
@@ -262,6 +277,7 @@ class BackupReportsXoPlugin {
|
||||
}** (${id}) ${icon}`,
|
||||
...getTemporalDataMarkdown(subTaskLog.start, subTaskLog.end, 2)
|
||||
)
|
||||
addWarnings(remotesText, subTaskLog.warnings, 2)
|
||||
if (subTaskLog.status === 'failure') {
|
||||
failedSubTasks.push(remote !== undefined ? remote.name : id)
|
||||
remotesText.push('', errorMessage)
|
||||
@@ -278,6 +294,7 @@ class BackupReportsXoPlugin {
|
||||
` - **${srName}** (${srUuid}) ${icon}`,
|
||||
...getTemporalDataMarkdown(subTaskLog.start, subTaskLog.end, 2)
|
||||
)
|
||||
addWarnings(srsText, subTaskLog.warnings, 2)
|
||||
if (subTaskLog.status === 'failure') {
|
||||
failedSubTasks.push(sr !== undefined ? sr.name_label : id)
|
||||
srsText.push('', errorMessage)
|
||||
@@ -293,6 +310,7 @@ class BackupReportsXoPlugin {
|
||||
}
|
||||
|
||||
const operationInfoText = []
|
||||
addWarnings(operationInfoText, operationLog.warnings, 3)
|
||||
if (operationLog.status === 'success') {
|
||||
const size = operationLog.result.size
|
||||
if (operationLog.message === 'merge') {
|
||||
@@ -395,6 +413,8 @@ class BackupReportsXoPlugin {
|
||||
let markdown = [
|
||||
`## Global status: ${log.status}`,
|
||||
'',
|
||||
`- **Job ID**: ${log.jobId}`,
|
||||
`- **Run ID**: ${runJobId}`,
|
||||
`- **mode**: ${mode}`,
|
||||
...getTemporalDataMarkdown(log.start, log.end),
|
||||
`- **Successes**: ${nSuccesses} / ${nVms}`,
|
||||
@@ -406,6 +426,7 @@ class BackupReportsXoPlugin {
|
||||
if (globalMergeSize !== 0) {
|
||||
markdown.push(`- **Merge size**: ${formatSize(globalMergeSize)}`)
|
||||
}
|
||||
addWarnings(markdown, log.warnings)
|
||||
markdown.push('')
|
||||
|
||||
if (nFailures !== 0) {
|
||||
@@ -646,8 +667,8 @@ class BackupReportsXoPlugin {
|
||||
const globalStatus = globalSuccess
|
||||
? `Success`
|
||||
: nFailures !== 0
|
||||
? `Failure`
|
||||
: `Skipped`
|
||||
? `Failure`
|
||||
: `Skipped`
|
||||
|
||||
let markdown = [
|
||||
`## Global status: ${globalStatus}`,
|
||||
@@ -706,8 +727,8 @@ class BackupReportsXoPlugin {
|
||||
globalSuccess
|
||||
? ICON_SUCCESS
|
||||
: nFailures !== 0
|
||||
? ICON_FAILURE
|
||||
: ICON_SKIPPED
|
||||
? ICON_FAILURE
|
||||
: ICON_SKIPPED
|
||||
}`,
|
||||
nagiosStatus: globalSuccess ? 0 : 2,
|
||||
nagiosMarkdown: globalSuccess
|
||||
|
||||
@@ -31,9 +31,8 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"event-to-promise": "^0.8.0",
|
||||
"jsonrpc-websocket-client": "^0.4.1",
|
||||
"superagent": "^3.8.2"
|
||||
"http-request-plus": "^0.6.0",
|
||||
"jsonrpc-websocket-client": "^0.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import Client, { createBackoff } from 'jsonrpc-websocket-client'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import request from 'superagent'
|
||||
import { PassThrough } from 'stream'
|
||||
import hrp from 'http-request-plus'
|
||||
|
||||
const UPDATER_URL = 'localhost'
|
||||
const WS_PORT = 9001
|
||||
const HTTP_PORT = 9002
|
||||
const WS_URL = 'ws://localhost:9001'
|
||||
const HTTP_URL = 'http://localhost:9002'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -49,7 +46,7 @@ class XoServerCloud {
|
||||
this
|
||||
)
|
||||
|
||||
const updater = (this._updater = new Client(`${UPDATER_URL}:${WS_PORT}`))
|
||||
const updater = (this._updater = new Client(WS_URL))
|
||||
const connect = () =>
|
||||
updater.open(createBackoff()).catch(error => {
|
||||
console.error('xo-server-cloud: fail to connect to updater', error)
|
||||
@@ -145,17 +142,16 @@ class XoServerCloud {
|
||||
throw new Error('cannot get download token')
|
||||
}
|
||||
|
||||
const req = request
|
||||
.get(`${UPDATER_URL}:${HTTP_PORT}/`)
|
||||
.set('Authorization', `Bearer ${downloadToken}`)
|
||||
const response = await hrp(HTTP_URL, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${downloadToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
// Impossible to pipe the response directly: https://github.com/visionmedia/superagent/issues/1187
|
||||
const pt = new PassThrough()
|
||||
req.pipe(pt)
|
||||
const { headers } = await eventToPromise(req, 'response')
|
||||
pt.length = headers['content-length']
|
||||
// currently needed for XenApi#putResource()
|
||||
response.length = response.headers['content-length']
|
||||
|
||||
return pt
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,14 +3,18 @@ import { createSchedule } from '@xen-orchestra/cron'
|
||||
import { assign, forOwn, map, mean } from 'lodash'
|
||||
import { utcParse } from 'd3-time-format'
|
||||
|
||||
const COMPARATOR_FN = {
|
||||
'>': (a, b) => a > b,
|
||||
'<': (a, b) => a < b,
|
||||
}
|
||||
|
||||
const VM_FUNCTIONS = {
|
||||
cpuUsage: {
|
||||
name: 'VM CPU usage',
|
||||
description:
|
||||
'Raises an alarm when the average usage of any CPU is higher than the threshold',
|
||||
'Raises an alarm when the average usage of any CPU is higher/lower than the threshold',
|
||||
unit: '%',
|
||||
comparator: '>',
|
||||
createParser: (legend, threshold) => {
|
||||
createParser: (comparator, legend, threshold) => {
|
||||
const regex = /cpu[0-9]+/
|
||||
const filteredLegends = legend.filter(l => l.name.match(regex))
|
||||
const accumulator = Object.assign(
|
||||
@@ -27,17 +31,17 @@ const VM_FUNCTIONS = {
|
||||
})
|
||||
},
|
||||
getDisplayableValue,
|
||||
shouldAlarm: () => getDisplayableValue() > threshold,
|
||||
shouldAlarm: () =>
|
||||
COMPARATOR_FN[comparator](getDisplayableValue(), threshold),
|
||||
}
|
||||
},
|
||||
},
|
||||
memoryUsage: {
|
||||
name: 'VM memory usage',
|
||||
description:
|
||||
'Raises an alarm when the used memory % is higher than the threshold',
|
||||
'Raises an alarm when the used memory % is higher/lower than the threshold',
|
||||
unit: '% used',
|
||||
comparator: '>',
|
||||
createParser: (legend, threshold) => {
|
||||
createParser: (comparator, legend, threshold) => {
|
||||
const memoryBytesLegend = legend.find(l => l.name === 'memory')
|
||||
const memoryKBytesFreeLegend = legend.find(
|
||||
l => l.name === 'memory_internal_free'
|
||||
@@ -52,9 +56,8 @@ const VM_FUNCTIONS = {
|
||||
)
|
||||
},
|
||||
getDisplayableValue,
|
||||
shouldAlarm: () => {
|
||||
return getDisplayableValue() > threshold
|
||||
},
|
||||
shouldAlarm: () =>
|
||||
COMPARATOR_FN[comparator](getDisplayableValue(), threshold),
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -64,10 +67,9 @@ const HOST_FUNCTIONS = {
|
||||
cpuUsage: {
|
||||
name: 'host CPU usage',
|
||||
description:
|
||||
'Raises an alarm when the average usage of any CPU is higher than the threshold',
|
||||
'Raises an alarm when the average usage of any CPU is higher/lower than the threshold',
|
||||
unit: '%',
|
||||
comparator: '>',
|
||||
createParser: (legend, threshold) => {
|
||||
createParser: (comparator, legend, threshold) => {
|
||||
const regex = /^cpu[0-9]+$/
|
||||
const filteredLegends = legend.filter(l => l.name.match(regex))
|
||||
const accumulator = Object.assign(
|
||||
@@ -84,17 +86,17 @@ const HOST_FUNCTIONS = {
|
||||
})
|
||||
},
|
||||
getDisplayableValue,
|
||||
shouldAlarm: () => getDisplayableValue() > threshold,
|
||||
shouldAlarm: () =>
|
||||
COMPARATOR_FN[comparator](getDisplayableValue(), threshold),
|
||||
}
|
||||
},
|
||||
},
|
||||
memoryUsage: {
|
||||
name: 'host memory usage',
|
||||
description:
|
||||
'Raises an alarm when the used memory % is higher than the threshold',
|
||||
'Raises an alarm when the used memory % is higher/lower than the threshold',
|
||||
unit: '% used',
|
||||
comparator: '>',
|
||||
createParser: (legend, threshold) => {
|
||||
createParser: (comparator, legend, threshold) => {
|
||||
const memoryKBytesLegend = legend.find(l => l.name === 'memory_total_kib')
|
||||
const memoryKBytesFreeLegend = legend.find(
|
||||
l => l.name === 'memory_free_kib'
|
||||
@@ -109,7 +111,8 @@ const HOST_FUNCTIONS = {
|
||||
)
|
||||
},
|
||||
getDisplayableValue,
|
||||
shouldAlarm: () => getDisplayableValue() > threshold,
|
||||
shouldAlarm: () =>
|
||||
COMPARATOR_FN[comparator](getDisplayableValue(), threshold),
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -119,15 +122,15 @@ const SR_FUNCTIONS = {
|
||||
storageUsage: {
|
||||
name: 'SR storage usage',
|
||||
description:
|
||||
'Raises an alarm when the used disk space % is higher than the threshold',
|
||||
'Raises an alarm when the used disk space % is higher/lower than the threshold',
|
||||
unit: '% used',
|
||||
comparator: '>',
|
||||
createGetter: threshold => sr => {
|
||||
createGetter: (comparator, threshold) => sr => {
|
||||
const getDisplayableValue = () =>
|
||||
(sr.physical_utilisation * 100) / sr.physical_size
|
||||
return {
|
||||
getDisplayableValue,
|
||||
shouldAlarm: () => getDisplayableValue() > threshold,
|
||||
shouldAlarm: () =>
|
||||
COMPARATOR_FN[comparator](getDisplayableValue(), threshold),
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -139,6 +142,13 @@ const TYPE_FUNCTION_MAP = {
|
||||
sr: SR_FUNCTIONS,
|
||||
}
|
||||
|
||||
const COMPARATOR_ENTRY = {
|
||||
title: 'Comparator',
|
||||
type: 'string',
|
||||
default: Object.keys(COMPARATOR_FN)[0],
|
||||
enum: Object.keys(COMPARATOR_FN),
|
||||
}
|
||||
|
||||
// list of currently ringing alarms, to avoid double notification
|
||||
const currentAlarms = {}
|
||||
|
||||
@@ -182,10 +192,11 @@ export const configurationSchema = {
|
||||
default: Object.keys(HOST_FUNCTIONS)[0],
|
||||
enum: Object.keys(HOST_FUNCTIONS),
|
||||
},
|
||||
comparator: COMPARATOR_ENTRY,
|
||||
alarmTriggerLevel: {
|
||||
title: 'Threshold',
|
||||
description:
|
||||
'The direction of the crossing is given by the Alarm type',
|
||||
'The direction of the crossing is given by the comparator type',
|
||||
type: 'number',
|
||||
default: 40,
|
||||
},
|
||||
@@ -222,7 +233,7 @@ export const configurationSchema = {
|
||||
description: Object.keys(VM_FUNCTIONS)
|
||||
.map(
|
||||
k =>
|
||||
` * ${k} (${VM_FUNCTIONS[k].unit}):${
|
||||
` * ${k} (${VM_FUNCTIONS[k].unit}): ${
|
||||
VM_FUNCTIONS[k].description
|
||||
}`
|
||||
)
|
||||
@@ -231,10 +242,11 @@ export const configurationSchema = {
|
||||
default: Object.keys(VM_FUNCTIONS)[0],
|
||||
enum: Object.keys(VM_FUNCTIONS),
|
||||
},
|
||||
comparator: COMPARATOR_ENTRY,
|
||||
alarmTriggerLevel: {
|
||||
title: 'Threshold',
|
||||
description:
|
||||
'The direction of the crossing is given by the Alarm type',
|
||||
'The direction of the crossing is given by the comparator type',
|
||||
type: 'number',
|
||||
default: 40,
|
||||
},
|
||||
@@ -281,10 +293,11 @@ export const configurationSchema = {
|
||||
default: Object.keys(SR_FUNCTIONS)[0],
|
||||
enum: Object.keys(SR_FUNCTIONS),
|
||||
},
|
||||
comparator: COMPARATOR_ENTRY,
|
||||
alarmTriggerLevel: {
|
||||
title: 'Threshold',
|
||||
description:
|
||||
'The direction of the crossing is given by the Alarm type',
|
||||
'The direction of the crossing is given by the comparator type',
|
||||
type: 'number',
|
||||
default: 80,
|
||||
},
|
||||
@@ -440,6 +453,7 @@ ${monitorBodies.join('\n')}`
|
||||
relatedNode[l.name] = l
|
||||
})
|
||||
const parser = typeFunction.createParser(
|
||||
definition.comparator,
|
||||
parsedLegend.filter(l => l.uuid === uuid),
|
||||
definition.alarmTriggerLevel
|
||||
)
|
||||
@@ -454,7 +468,7 @@ ${monitorBodies.join('\n')}`
|
||||
...definition,
|
||||
alarmId,
|
||||
vmFunction: typeFunction,
|
||||
title: `${typeFunction.name} ${typeFunction.comparator} ${
|
||||
title: `${typeFunction.name} ${definition.comparator} ${
|
||||
definition.alarmTriggerLevel
|
||||
}${typeFunction.unit}`,
|
||||
snapshot: async () => {
|
||||
@@ -463,7 +477,6 @@ ${monitorBodies.join('\n')}`
|
||||
try {
|
||||
const result = {
|
||||
uuid,
|
||||
name: definition.name,
|
||||
object: this._xo.getXapi(uuid).getObject(uuid),
|
||||
}
|
||||
|
||||
@@ -489,6 +502,7 @@ ${monitorBodies.join('\n')}`
|
||||
} else {
|
||||
// Stats via XAPI
|
||||
const getter = typeFunction.createGetter(
|
||||
definition.comparator,
|
||||
definition.alarmTriggerLevel
|
||||
)
|
||||
const data = getter(result.object)
|
||||
@@ -535,6 +549,40 @@ ${monitorBodies.join('\n')}`
|
||||
)
|
||||
}
|
||||
|
||||
// Sample of a monitor
|
||||
// {
|
||||
// uuids: ['8485ea1f-b475-f6f2-58a7-895ab626ce5d'],
|
||||
// variableName: 'cpuUsage',
|
||||
// comparator: '>',
|
||||
// alarmTriggerLevel: 50,
|
||||
// alarmTriggerPeriod: 60,
|
||||
// objectType: 'host',
|
||||
// alarmId: 'host|cpuUsage|50',
|
||||
// title: 'host CPU usage > 50',
|
||||
// vmFunction: {
|
||||
// name: 'host CPU usage',
|
||||
// description: 'Raises an alarm when the average usage of any CPU is higher/lower than the threshold',
|
||||
// unit: '%',
|
||||
// createParser: [Function: createParser],
|
||||
// },
|
||||
// snapshot: [Function: snapshot],
|
||||
// }
|
||||
//
|
||||
// Sample of an entry of a snapshot
|
||||
// {
|
||||
// uuid: '8485ea1f-b475-f6f2-58a7-895ab626ce5d',
|
||||
// object: host,
|
||||
// objectLink: '[lab1](localhost:3000#/hosts/485ea1f-b475-f6f2-58a7-895ab626ce5d/stats)'
|
||||
// rrd: stats,
|
||||
// data: {
|
||||
// parseRow: [Function: parseRow],
|
||||
// getDisplayableValue: [Function: getDisplayableValue],
|
||||
// shouldAlarm: [Function: shouldAlarm],
|
||||
// },
|
||||
// value: 70,
|
||||
// shouldAlarm: true,
|
||||
// listItem: ' * [lab1](localhost:3000#/hosts/485ea1f-b475-f6f2-58a7-895ab626ce5d/stats): 70%\n'
|
||||
// }
|
||||
async _checkMonitors () {
|
||||
const monitors = this._getMonitors()
|
||||
for (const monitor of monitors) {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"dependencies": {
|
||||
"nodemailer": "^4.4.1",
|
||||
"nodemailer-markdown": "^1.0.1",
|
||||
"promise-toolbox": "^0.10.1"
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"slack-node": "^0.1.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server-usage-report",
|
||||
"version": "0.5.0",
|
||||
"version": "0.7.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "",
|
||||
"keywords": [
|
||||
@@ -34,11 +34,14 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"@xen-orchestra/cron": "^1.0.3",
|
||||
"@xen-orchestra/log": "^0.1.4",
|
||||
"handlebars": "^4.0.6",
|
||||
"html-minifier": "^3.5.8",
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1"
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -83,10 +83,6 @@
|
||||
border-top: 1px solid #95a5a6;
|
||||
}
|
||||
|
||||
.page .global tr:nth-last-child(2) td {
|
||||
border-bottom: 1px solid #95a5a6;
|
||||
}
|
||||
|
||||
.top table{
|
||||
margin: auto;
|
||||
margin-top: 20px;
|
||||
@@ -149,9 +145,9 @@
|
||||
|
||||
<div class="page">
|
||||
|
||||
<table class ="global">
|
||||
<table class="global">
|
||||
<tr>
|
||||
<td id="title" rowspan="13">VMs</td>
|
||||
<td id="title" rowspan="8">VMs</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Number:</td>
|
||||
@@ -160,37 +156,37 @@
|
||||
<tr>
|
||||
<td>CPU:</td>
|
||||
<td>{{normaliseValue global.vms.cpu}} % {{normaliseEvolution global.vmsEvolution.cpu}}</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RAM:</td>
|
||||
<td>{{normaliseValue global.vms.ram}} GiB {{normaliseEvolution global.vmsEvolution.ram}}</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Disk read:</td>
|
||||
<td>{{normaliseValue global.vms.diskRead}} MiB {{normaliseEvolution global.vmsEvolution.diskRead}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Disk write:</td>
|
||||
<td>{{normaliseValue global.vms.diskWrite}} MiB {{normaliseEvolution global.vmsEvolution.diskWrite}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Network RX:</td>
|
||||
<td>{{normaliseValue global.vms.netReception}} KiB {{normaliseEvolution global.vmsEvolution.netReception}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Network TX:</td>
|
||||
<td>{{normaliseValue global.vms.netTransmission}} KiB {{normaliseEvolution global.vmsEvolution.netTransmission}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div class="top">
|
||||
|
||||
<table>
|
||||
<caption>3rd top usages</caption>
|
||||
<caption>Top VMs</caption>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>UUID</th>
|
||||
@@ -239,6 +235,9 @@
|
||||
<td>{{normaliseValue this.value}} MiB</td>
|
||||
</tr>
|
||||
{{/each}}
|
||||
|
||||
{{getTopIops topVms}}
|
||||
|
||||
<tr>
|
||||
<td rowspan='{{math topVms.netReception.length "+" 1}}' class="tableHeader">Network RX</td>
|
||||
</tr>
|
||||
@@ -264,9 +263,9 @@
|
||||
</div>
|
||||
|
||||
<div class="page">
|
||||
<table class ="global">
|
||||
<table class="global">
|
||||
<tr>
|
||||
<td id="title" rowspan="13">Hosts</td>
|
||||
<td id="title" rowspan="7">Hosts</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Number:</td>
|
||||
@@ -277,34 +276,33 @@
|
||||
<td>CPU:</td>
|
||||
<td>{{normaliseValue global.hosts.cpu}} % {{normaliseEvolution global.hostsEvolution.cpu}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RAM:</td>
|
||||
<td>{{normaliseValue global.hosts.ram}} GiB {{normaliseEvolution global.hostsEvolution.ram}}
|
||||
</td>
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Load average:</td>
|
||||
<td>{{normaliseValue global.hosts.load}} {{normaliseEvolution global.hostsEvolution.load}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Network RX:</td>
|
||||
<td>{{normaliseValue global.hosts.netReception}} KiB {{normaliseEvolution global.hostsEvolution.netReception}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Network TX:</td>
|
||||
<td>{{normaliseValue global.hosts.netTransmission}} KiB {{normaliseEvolution global.hostsEvolution.netTransmission}}
|
||||
</td>
|
||||
<tr>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<div class="top">
|
||||
|
||||
<table>
|
||||
<caption>3rd top usages</caption>
|
||||
<caption>Top hosts</caption>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>UUID</th>
|
||||
@@ -368,19 +366,14 @@
|
||||
<div class="page">
|
||||
<div class="top">
|
||||
<table>
|
||||
<caption>Most used storages </caption>
|
||||
<caption>Top SRs</caption>
|
||||
<tr>
|
||||
<th />
|
||||
<th>UUID</th>
|
||||
<th>Name</th>
|
||||
<th>value</th>
|
||||
</tr>
|
||||
{{#each topSrs}}
|
||||
<tr>
|
||||
<td>{{shortUUID this.uuid}}</td>
|
||||
<td>{{this.name}}</td>
|
||||
<td>{{normaliseValue this.value}} GiB</td>
|
||||
</tr>
|
||||
{{/each}}
|
||||
{{getTopSrs topSrs}}
|
||||
</table>
|
||||
<table>
|
||||
<caption>Hosts missing patches</caption>
|
||||
@@ -531,6 +524,9 @@
|
||||
<th>RAM (GiB)</th>
|
||||
<th>Disk read (MiB)</th>
|
||||
<th>Disk write (MiB)</th>
|
||||
<th>IOPS read</th>
|
||||
<th>IOPS write</th>
|
||||
<th>IOPS total</th>
|
||||
<th>Network RX (KiB)</th>
|
||||
<th>Network TX (KiB)</th>
|
||||
</tr>
|
||||
@@ -542,6 +538,9 @@
|
||||
<td>{{normaliseValue this.ram}} {{normaliseEvolution this.evolution.ram}}</td>
|
||||
<td>{{normaliseValue this.diskRead}} {{normaliseEvolution this.evolution.diskRead}}</td>
|
||||
<td>{{normaliseValue this.diskWrite}} {{normaliseEvolution this.evolution.diskWrite}}</td>
|
||||
<td>{{formatIops this.iopsRead}} {{normaliseEvolution this.evolution.iopsRead}}</td>
|
||||
<td>{{formatIops this.iopsWrite}} {{normaliseEvolution this.evolution.iopsWrite}}</td>
|
||||
<td>{{formatIops this.iopsTotal}} {{normaliseEvolution this.evolution.iopsTotal}}</td>
|
||||
<td>{{normaliseValue this.netReception}} {{normaliseEvolution this.evolution.netReception}}</td>
|
||||
<td>{{normaliseValue this.netTransmission}} {{normaliseEvolution this.evolution.netTransmission}}</td>
|
||||
</tr>
|
||||
@@ -584,8 +583,8 @@
|
||||
<td>{{shortUUID this.uuid}}</td>
|
||||
<td>{{this.name}}</td>
|
||||
<td>{{normaliseValue this.total}} {{normaliseEvolution this.evolution.total}}</td>
|
||||
<td>{{normaliseValue this.used}}</td>
|
||||
<td>{{normaliseValue this.free}}</td>
|
||||
<td>{{normaliseValue this.usedSpace}}</td>
|
||||
<td>{{normaliseValue this.freeSpace}}</td>
|
||||
</tr>
|
||||
{{/each}}
|
||||
</table>
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import Handlebars from 'handlebars'
|
||||
import humanFormat from 'human-format'
|
||||
import { createSchedule } from '@xen-orchestra/cron'
|
||||
import { minify } from 'html-minifier'
|
||||
import {
|
||||
@@ -21,6 +24,10 @@ import { readFile, writeFile } from 'fs'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const log = createLogger('xo:xo-server-usage-report')
|
||||
|
||||
const GRANULARITY = 'days'
|
||||
|
||||
const pReadFile = promisify(readFile)
|
||||
const pWriteFile = promisify(writeFile)
|
||||
|
||||
@@ -75,7 +82,7 @@ export const configurationSchema = {
|
||||
},
|
||||
periodicity: {
|
||||
type: 'string',
|
||||
enum: ['monthly', 'weekly'],
|
||||
enum: ['monthly', 'weekly', 'daily'],
|
||||
description:
|
||||
'If you choose weekly you will receive the report every sunday and if you choose monthly you will receive it every first day of the month.',
|
||||
},
|
||||
@@ -87,6 +94,24 @@ export const configurationSchema = {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const shortUuid = uuid => {
|
||||
if (typeof uuid === 'string') {
|
||||
return uuid.split('-')[0]
|
||||
}
|
||||
}
|
||||
|
||||
const formatIops = value =>
|
||||
isFinite(value)
|
||||
? humanFormat(value, {
|
||||
unit: 'IOPS',
|
||||
decimals: 2,
|
||||
})
|
||||
: '-'
|
||||
|
||||
const normaliseValue = value => (isFinite(value) ? round(value, 2) : '-')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
Handlebars.registerHelper('compare', function (
|
||||
lvalue,
|
||||
operator,
|
||||
@@ -122,29 +147,62 @@ Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) {
|
||||
return mathOperators[operator](+lvalue, +rvalue)
|
||||
})
|
||||
|
||||
Handlebars.registerHelper('shortUUID', uuid => {
|
||||
if (typeof uuid === 'string') {
|
||||
return uuid.split('-')[0]
|
||||
}
|
||||
})
|
||||
Handlebars.registerHelper('shortUUID', shortUuid)
|
||||
|
||||
Handlebars.registerHelper(
|
||||
'normaliseValue',
|
||||
value => (isFinite(value) ? round(value, 2) : '-')
|
||||
)
|
||||
Handlebars.registerHelper('normaliseValue', normaliseValue)
|
||||
|
||||
Handlebars.registerHelper(
|
||||
'normaliseEvolution',
|
||||
value =>
|
||||
new Handlebars.SafeString(
|
||||
isFinite(+value) && +value !== 0
|
||||
? (value = round(value, 2)) > 0
|
||||
isFinite((value = round(value, 2))) && value !== 0
|
||||
? value > 0
|
||||
? `(<b style="color: green;">▲ ${value}%</b>)`
|
||||
: `(<b style="color: red;">▼ ${String(value).slice(1)}%</b>)`
|
||||
: ''
|
||||
)
|
||||
)
|
||||
|
||||
Handlebars.registerHelper('formatIops', formatIops)
|
||||
|
||||
const getHeader = (label, size) => `
|
||||
<tr>
|
||||
<td rowspan='${size + 1}' class="tableHeader">${label}</td>
|
||||
</tr>
|
||||
`
|
||||
|
||||
const getBody = ({ uuid, name, value }, transformValue, unit) => `
|
||||
<tr>
|
||||
<td>${shortUuid(uuid)}</td>
|
||||
<td>${name}</td>
|
||||
<td>${transformValue(value)}${unit !== undefined ? ` ${unit}` : ''}</td>
|
||||
</tr>
|
||||
`
|
||||
|
||||
const getTopIops = ({ iopsRead, iopsWrite, iopsTotal }) => `
|
||||
${getHeader('IOPS read', iopsRead.length)}
|
||||
${iopsRead.map(obj => getBody(obj, formatIops)).join('')}
|
||||
${getHeader('IOPS write', iopsWrite.length)}
|
||||
${iopsWrite.map(obj => getBody(obj, formatIops)).join('')}
|
||||
${getHeader('IOPS total', iopsTotal.length)}
|
||||
${iopsTotal.map(obj => getBody(obj, formatIops)).join('')}
|
||||
`
|
||||
|
||||
Handlebars.registerHelper(
|
||||
'getTopSrs',
|
||||
({ usedSpace, iopsRead, iopsWrite, iopsTotal }) =>
|
||||
new Handlebars.SafeString(`
|
||||
${getHeader('Used space', usedSpace.length)}
|
||||
${usedSpace.map(obj => getBody(obj, normaliseValue, 'GiB')).join('')}
|
||||
${getTopIops({ iopsRead, iopsWrite, iopsTotal })}
|
||||
`)
|
||||
)
|
||||
|
||||
Handlebars.registerHelper(
|
||||
'getTopIops',
|
||||
props => new Handlebars.SafeString(getTopIops(props))
|
||||
)
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function computeMean (values) {
|
||||
@@ -196,12 +254,10 @@ function getTop (objects, options) {
|
||||
function computePercentage (curr, prev, options) {
|
||||
return zipObject(
|
||||
options,
|
||||
map(
|
||||
options,
|
||||
opt =>
|
||||
prev[opt] === 0 || prev[opt] === null
|
||||
? 'NONE'
|
||||
: `${((curr[opt] - prev[opt]) * 100) / prev[opt]}`
|
||||
map(options, opt =>
|
||||
prev[opt] === 0 || prev[opt] === null
|
||||
? 'NONE'
|
||||
: `${((curr[opt] - prev[opt]) * 100) / prev[opt]}`
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -217,26 +273,47 @@ function getMemoryUsedMetric ({ memory, memoryFree = memory }) {
|
||||
return map(memory, (value, key) => value - memoryFree[key])
|
||||
}
|
||||
|
||||
const METRICS_MEAN = {
|
||||
cpu: computeDoubleMean,
|
||||
disk: value => computeDoubleMean(values(value)) / mibPower,
|
||||
iops: value => computeDoubleMean(values(value)),
|
||||
load: computeMean,
|
||||
net: value => computeDoubleMean(value) / kibPower,
|
||||
ram: stats => computeMean(getMemoryUsedMetric(stats)) / gibPower,
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function getVmsStats ({ runningVms, xo }) {
|
||||
return orderBy(
|
||||
await Promise.all(
|
||||
map(runningVms, async vm => {
|
||||
const vmStats = await xo.getXapiVmStats(vm, 'days')
|
||||
const { stats } = await xo
|
||||
.getXapiVmStats(vm, GRANULARITY)
|
||||
.catch(error => {
|
||||
log.warn('Error on fetching VM stats', {
|
||||
error,
|
||||
vmId: vm.id,
|
||||
})
|
||||
return {
|
||||
stats: {},
|
||||
}
|
||||
})
|
||||
|
||||
const iopsRead = METRICS_MEAN.iops(get(stats.iops, 'r'))
|
||||
const iopsWrite = METRICS_MEAN.iops(get(stats.iops, 'w'))
|
||||
return {
|
||||
uuid: vm.uuid,
|
||||
name: vm.name_label,
|
||||
cpu: computeDoubleMean(vmStats.stats.cpus),
|
||||
ram: computeMean(getMemoryUsedMetric(vmStats.stats)) / gibPower,
|
||||
diskRead:
|
||||
computeDoubleMean(values(get(vmStats.stats.xvds, 'r'))) / mibPower,
|
||||
diskWrite:
|
||||
computeDoubleMean(values(get(vmStats.stats.xvds, 'w'))) / mibPower,
|
||||
netReception:
|
||||
computeDoubleMean(get(vmStats.stats.vifs, 'rx')) / kibPower,
|
||||
netTransmission:
|
||||
computeDoubleMean(get(vmStats.stats.vifs, 'tx')) / kibPower,
|
||||
cpu: METRICS_MEAN.cpu(stats.cpus),
|
||||
ram: METRICS_MEAN.ram(stats),
|
||||
diskRead: METRICS_MEAN.disk(get(stats.xvds, 'r')),
|
||||
diskWrite: METRICS_MEAN.disk(get(stats.xvds, 'w')),
|
||||
iopsRead,
|
||||
iopsWrite,
|
||||
iopsTotal: iopsRead + iopsWrite,
|
||||
netReception: METRICS_MEAN.net(get(stats.vifs, 'rx')),
|
||||
netTransmission: METRICS_MEAN.net(get(stats.vifs, 'tx')),
|
||||
}
|
||||
})
|
||||
),
|
||||
@@ -249,17 +326,26 @@ async function getHostsStats ({ runningHosts, xo }) {
|
||||
return orderBy(
|
||||
await Promise.all(
|
||||
map(runningHosts, async host => {
|
||||
const hostStats = await xo.getXapiHostStats(host, 'days')
|
||||
const { stats } = await xo
|
||||
.getXapiHostStats(host, GRANULARITY)
|
||||
.catch(error => {
|
||||
log.warn('Error on fetching host stats', {
|
||||
error,
|
||||
hostId: host.id,
|
||||
})
|
||||
return {
|
||||
stats: {},
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
uuid: host.uuid,
|
||||
name: host.name_label,
|
||||
cpu: computeDoubleMean(hostStats.stats.cpus),
|
||||
ram: computeMean(getMemoryUsedMetric(hostStats.stats)) / gibPower,
|
||||
load: computeMean(hostStats.stats.load),
|
||||
netReception:
|
||||
computeDoubleMean(get(hostStats.stats.pifs, 'rx')) / kibPower,
|
||||
netTransmission:
|
||||
computeDoubleMean(get(hostStats.stats.pifs, 'tx')) / kibPower,
|
||||
cpu: METRICS_MEAN.cpu(stats.cpus),
|
||||
ram: METRICS_MEAN.ram(stats),
|
||||
load: METRICS_MEAN.load(stats.load),
|
||||
netReception: METRICS_MEAN.net(get(stats.pifs, 'rx')),
|
||||
netTransmission: METRICS_MEAN.net(get(stats.pifs, 'tx')),
|
||||
}
|
||||
})
|
||||
),
|
||||
@@ -268,24 +354,54 @@ async function getHostsStats ({ runningHosts, xo }) {
|
||||
)
|
||||
}
|
||||
|
||||
function getSrsStats (xoObjects) {
|
||||
async function getSrsStats ({ xo, xoObjects }) {
|
||||
return orderBy(
|
||||
map(filter(xoObjects, obj => obj.type === 'SR' && obj.size > 0), sr => {
|
||||
const total = sr.size / gibPower
|
||||
const used = sr.physical_usage / gibPower
|
||||
let name = sr.name_label
|
||||
if (!sr.shared) {
|
||||
name += ` (${find(xoObjects, { id: sr.$container }).name_label})`
|
||||
await asyncMap(
|
||||
filter(
|
||||
xoObjects,
|
||||
obj => obj.type === 'SR' && obj.size > 0 && obj.$PBDs.length > 0
|
||||
),
|
||||
async sr => {
|
||||
const totalSpace = sr.size / gibPower
|
||||
const usedSpace = sr.physical_usage / gibPower
|
||||
let name = sr.name_label
|
||||
// [Bug in XO] a SR with not container can be found (SR attached to a PBD with no host attached)
|
||||
let container
|
||||
if (
|
||||
!sr.shared &&
|
||||
(container = find(xoObjects, { id: sr.$container })) !== undefined
|
||||
) {
|
||||
name += ` (${container.name_label})`
|
||||
}
|
||||
|
||||
const { stats } = await xo
|
||||
.getXapiSrStats(sr.id, GRANULARITY)
|
||||
.catch(error => {
|
||||
log.warn('Error on fetching SR stats', {
|
||||
error,
|
||||
srId: sr.id,
|
||||
})
|
||||
return {
|
||||
stats: {},
|
||||
}
|
||||
})
|
||||
|
||||
const iopsRead = computeMean(get(stats.iops, 'r'))
|
||||
const iopsWrite = computeMean(get(stats.iops, 'w'))
|
||||
|
||||
return {
|
||||
uuid: sr.uuid,
|
||||
name,
|
||||
total: totalSpace,
|
||||
usedSpace,
|
||||
freeSpace: totalSpace - usedSpace,
|
||||
iopsRead,
|
||||
iopsWrite,
|
||||
iopsTotal: iopsRead + iopsWrite,
|
||||
}
|
||||
}
|
||||
return {
|
||||
uuid: sr.uuid,
|
||||
name,
|
||||
total,
|
||||
used,
|
||||
free: total - used,
|
||||
}
|
||||
}),
|
||||
'total',
|
||||
),
|
||||
'name',
|
||||
'desc'
|
||||
)
|
||||
}
|
||||
@@ -351,6 +467,9 @@ function getTopVms ({ vmsStats, xo }) {
|
||||
'ram',
|
||||
'diskRead',
|
||||
'diskWrite',
|
||||
'iopsRead',
|
||||
'iopsWrite',
|
||||
'iopsTotal',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
])
|
||||
@@ -366,8 +485,8 @@ function getTopHosts ({ hostsStats, xo }) {
|
||||
])
|
||||
}
|
||||
|
||||
function getTopSrs ({ srsStats, xo }) {
|
||||
return getTop(srsStats, ['total']).total
|
||||
function getTopSrs (srsStats) {
|
||||
return getTop(srsStats, ['usedSpace', 'iopsRead', 'iopsWrite', 'iopsTotal'])
|
||||
}
|
||||
|
||||
async function getHostsMissingPatches ({ runningHosts, xo }) {
|
||||
@@ -376,6 +495,13 @@ async function getHostsMissingPatches ({ runningHosts, xo }) {
|
||||
let hostsPatches = await xo
|
||||
.getXapi(host)
|
||||
.listMissingPoolPatchesOnHost(host._xapiId)
|
||||
.catch(error => {
|
||||
console.error(
|
||||
'[WARN] error on fetching hosts missing patches:',
|
||||
JSON.stringify(error)
|
||||
)
|
||||
return []
|
||||
})
|
||||
|
||||
if (host.license_params.sku_type === 'free') {
|
||||
hostsPatches = filter(hostsPatches, { paid: false })
|
||||
@@ -417,6 +543,9 @@ async function computeEvolution ({ storedStatsPath, ...newStats }) {
|
||||
'ram',
|
||||
'diskRead',
|
||||
'diskWrite',
|
||||
'iopsRead',
|
||||
'iopsWrite',
|
||||
'iopsTotal',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
],
|
||||
@@ -506,7 +635,7 @@ async function dataBuilder ({ xo, storedStatsPath, all }) {
|
||||
xo.getAllUsers(),
|
||||
getVmsStats({ xo, runningVms }),
|
||||
getHostsStats({ xo, runningHosts }),
|
||||
getSrsStats(xoObjects),
|
||||
getSrsStats({ xo, xoObjects }),
|
||||
getHostsMissingPatches({ xo, runningHosts }),
|
||||
])
|
||||
|
||||
@@ -522,7 +651,7 @@ async function dataBuilder ({ xo, storedStatsPath, all }) {
|
||||
computeGlobalHostsStats({ xo, hostsStats, haltedHosts }),
|
||||
getTopVms({ xo, vmsStats }),
|
||||
getTopHosts({ xo, hostsStats }),
|
||||
getTopSrs({ xo, srsStats }),
|
||||
getTopSrs(srsStats),
|
||||
getAllUsersEmail(users),
|
||||
])
|
||||
|
||||
@@ -571,6 +700,12 @@ async function dataBuilder ({ xo, storedStatsPath, all }) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const CRON_BY_PERIODICITY = {
|
||||
monthly: '0 6 1 * *',
|
||||
weekly: '0 6 * * 0',
|
||||
daily: '0 6 * * *',
|
||||
}
|
||||
|
||||
class UsageReportPlugin {
|
||||
constructor ({ xo, getDataDir }) {
|
||||
this._xo = xo
|
||||
@@ -591,7 +726,7 @@ class UsageReportPlugin {
|
||||
}
|
||||
|
||||
this._job = createSchedule(
|
||||
configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0'
|
||||
CRON_BY_PERIODICITY[configuration.periodicity]
|
||||
).createJob(async () => {
|
||||
try {
|
||||
await this._sendReport(true)
|
||||
|
||||
@@ -11,21 +11,8 @@ require('../better-stacks')
|
||||
// less memory usage.
|
||||
global.Promise = require('bluebird')
|
||||
|
||||
// Make unhandled rejected promises visible.
|
||||
process.on('unhandledRejection', function (reason) {
|
||||
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
|
||||
})
|
||||
|
||||
;(function (EE) {
|
||||
var proto = EE.prototype
|
||||
var emit = proto.emit
|
||||
proto.emit = function patchedError (event, error) {
|
||||
if (event === 'error' && !this.listenerCount(event)) {
|
||||
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
|
||||
}
|
||||
|
||||
return emit.apply(this, arguments)
|
||||
}
|
||||
})(require('events').EventEmitter)
|
||||
require('@xen-orchestra/log/configure').catchGlobalErrors(
|
||||
require('@xen-orchestra/log').default('xo:xo-server')
|
||||
)
|
||||
|
||||
require('exec-promise')(require('../'))
|
||||
|
||||
@@ -2,6 +2,13 @@
|
||||
//
|
||||
// See sample.config.yaml to override.
|
||||
{
|
||||
"apiWebSocketOptions": {
|
||||
// https://github.com/websockets/ws#websocket-compression
|
||||
// "perMessageDeflate": {
|
||||
// "threshold": 524288 // 512kiB
|
||||
// }
|
||||
},
|
||||
|
||||
"http": {
|
||||
"listen": [
|
||||
{
|
||||
@@ -27,6 +34,7 @@
|
||||
|
||||
"mounts": {}
|
||||
},
|
||||
|
||||
"datadir": "/var/lib/xo-server/data",
|
||||
|
||||
// Should users be created on first sign in?
|
||||
@@ -34,6 +42,12 @@
|
||||
// Necessary for external authentication providers.
|
||||
"createUserOnFirstSignin": true,
|
||||
|
||||
"remoteOptions": {
|
||||
"mountsDir": "/run/xo-server/mounts",
|
||||
|
||||
"timeout": 600e3
|
||||
},
|
||||
|
||||
// Whether API logs should contains the full request/response on
|
||||
// errors.
|
||||
//
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "5.27.1",
|
||||
"version": "5.30.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -34,10 +34,11 @@
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"@xen-orchestra/cron": "^1.0.3",
|
||||
"@xen-orchestra/emit-async": "^0.0.0",
|
||||
"@xen-orchestra/fs": "^0.3.1",
|
||||
"@xen-orchestra/fs": "^0.4.1",
|
||||
"@xen-orchestra/log": "^0.1.4",
|
||||
"@xen-orchestra/mixin": "^0.0.0",
|
||||
"ajv": "^6.1.1",
|
||||
"app-conf": "^0.5.0",
|
||||
"app-conf": "^0.6.0",
|
||||
"archiver": "^3.0.0",
|
||||
"async-iterator-to-stream": "^1.0.1",
|
||||
"base64url": "^3.0.0",
|
||||
@@ -94,7 +95,7 @@
|
||||
"passport": "^0.4.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"pretty-format": "^23.0.0",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"proxy-agent": "^3.0.0",
|
||||
"pug": "^2.0.0-rc.4",
|
||||
"pump": "^3.0.0",
|
||||
@@ -108,19 +109,19 @@
|
||||
"stoppable": "^1.0.5",
|
||||
"struct-fu": "^1.2.0",
|
||||
"tar-stream": "^1.5.5",
|
||||
"through2": "^2.0.3",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"uuid": "^3.0.1",
|
||||
"value-matcher": "^0.2.0",
|
||||
"vhd-lib": "^0.3.1",
|
||||
"vhd-lib": "^0.4.0",
|
||||
"ws": "^6.0.0",
|
||||
"xen-api": "^0.19.0",
|
||||
"xen-api": "^0.22.0",
|
||||
"xml2js": "^0.4.19",
|
||||
"xo-acl-resolver": "^0.2.4",
|
||||
"xo-acl-resolver": "^0.4.0",
|
||||
"xo-collection": "^0.4.1",
|
||||
"xo-common": "^0.1.1",
|
||||
"xo-common": "^0.2.0",
|
||||
"xo-remote-parser": "^0.5.0",
|
||||
"xo-vmdk-to-vhd": "^0.1.3",
|
||||
"xo-vmdk-to-vhd": "^0.1.5",
|
||||
"yazl": "^2.4.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -130,12 +131,14 @@
|
||||
"@babel/plugin-proposal-export-default-from": "^7.0.0",
|
||||
"@babel/plugin-proposal-export-namespace-from": "^7.0.0",
|
||||
"@babel/plugin-proposal-function-bind": "^7.0.0",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.0.0",
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.0.0",
|
||||
"@babel/plugin-proposal-pipeline-operator": "^7.0.0",
|
||||
"@babel/plugin-proposal-throw-expressions": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"babel-plugin-transform-dev": "^2.0.1",
|
||||
"cross-env": "^5.1.3",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2"
|
||||
|
||||
@@ -46,14 +46,12 @@
|
||||
|
||||
# Configuration of the embedded HTTP server.
|
||||
http:
|
||||
|
||||
# Hosts & ports on which to listen.
|
||||
#
|
||||
# By default, the server listens on [::]:80.
|
||||
listen:
|
||||
# Basic HTTP.
|
||||
-
|
||||
# Address on which the server is listening on.
|
||||
- # Address on which the server is listening on.
|
||||
#
|
||||
# Sets it to 'localhost' for IP to listen only on the local host.
|
||||
#
|
||||
@@ -124,23 +122,20 @@ http:
|
||||
|
||||
# Connection to the Redis server.
|
||||
redis:
|
||||
# Unix sockets can be used
|
||||
#
|
||||
# Default: undefined
|
||||
#socket: /var/run/redis/redis.sock
|
||||
|
||||
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
|
||||
#
|
||||
# Default: redis://localhost:6379/0
|
||||
#uri: redis://redis.company.lan/42
|
||||
|
||||
# List of aliased commands.
|
||||
#
|
||||
# See http://redis.io/topics/security#disabling-of-specific-commands
|
||||
#renameCommands:
|
||||
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
|
||||
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
|
||||
|
||||
# Unix sockets can be used
|
||||
#
|
||||
# Default: undefined
|
||||
#socket: /var/run/redis/redis.sock
|
||||
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
|
||||
#
|
||||
# Default: redis://localhost:6379/0
|
||||
#uri: redis://redis.company.lan/42
|
||||
# List of aliased commands.
|
||||
#
|
||||
# See http://redis.io/topics/security#disabling-of-specific-commands
|
||||
#renameCommands:
|
||||
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
|
||||
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
|
||||
|
||||
# Directory containing the database of XO.
|
||||
# Currently used for logs.
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import archiver from 'archiver'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import { basename } from 'path'
|
||||
import { format } from 'json-rpc-peer'
|
||||
import { forEach } from 'lodash'
|
||||
|
||||
const log = createLogger('xo:backup')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function list ({ remote }) {
|
||||
@@ -62,7 +65,7 @@ function handleFetchFiles (
|
||||
|
||||
const archive = archiver(archiveFormat)
|
||||
archive.on('error', error => {
|
||||
console.error(error)
|
||||
log.error(error)
|
||||
res.end(format.error(0, error))
|
||||
})
|
||||
|
||||
@@ -74,7 +77,7 @@ function handleFetchFiles (
|
||||
archive.pipe(res)
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error)
|
||||
log.error(error)
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, error))
|
||||
})
|
||||
|
||||
@@ -1,23 +1,38 @@
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import pump from 'pump'
|
||||
import { format } from 'json-rpc-peer'
|
||||
import { unauthorized } from 'xo-common/api-errors'
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
|
||||
import { parseSize } from '../utils'
|
||||
|
||||
const log = createLogger('xo:disk')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function create ({ name, size, sr, vm, bootable, position, mode }) {
|
||||
const attach = vm !== undefined
|
||||
|
||||
let resourceSet
|
||||
if (attach && (resourceSet = vm.resourceSet) != null) {
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [sr.id])
|
||||
await this.allocateLimitsInResourceSet({ disk: size }, resourceSet)
|
||||
} else if (
|
||||
!(await this.hasPermissions(this.user.id, [[sr.id, 'administrate']]))
|
||||
) {
|
||||
throw unauthorized()
|
||||
}
|
||||
do {
|
||||
let resourceSet
|
||||
if (attach && (resourceSet = vm.resourceSet) != null) {
|
||||
try {
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [
|
||||
sr.id,
|
||||
])
|
||||
await this.allocateLimitsInResourceSet({ disk: size }, resourceSet)
|
||||
|
||||
break
|
||||
} catch (error) {
|
||||
if (!noSuchObject.is(error, { data: { id: resourceSet } })) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// the resource set does not exist, falls back to normal check
|
||||
}
|
||||
|
||||
await this.checkPermissions(this.user.id, [[sr.id, 'administrate']])
|
||||
} while (false)
|
||||
|
||||
const xapi = this.getXapi(sr)
|
||||
const vdi = await xapi.createVdi({
|
||||
@@ -72,7 +87,7 @@ async function handleExportContent (req, res, { xapi, id }) {
|
||||
)
|
||||
pump(stream, res, error => {
|
||||
if (error != null) {
|
||||
console.warn('disk.exportContent', error)
|
||||
log.warn('disk.exportContent', { error })
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -108,6 +123,7 @@ async function handleImportContent (req, res, { xapi, id }) {
|
||||
req.setTimeout(43200000) // 12 hours
|
||||
|
||||
try {
|
||||
req.length = +req.headers['content-length']
|
||||
await xapi.importVdiContent(id, req)
|
||||
res.end(format.response(0, true))
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { unauthorized } from 'xo-common/api-errors'
|
||||
|
||||
export function create (props) {
|
||||
return this.createIpPool(props)
|
||||
}
|
||||
@@ -22,15 +20,12 @@ delete_.description = 'Delete an ipPool'
|
||||
export function getAll (params) {
|
||||
const { user } = this
|
||||
|
||||
if (!user) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllIpPools(
|
||||
user.permission === 'admin' ? params && params.userId : user.id
|
||||
)
|
||||
}
|
||||
|
||||
getAll.permission = ''
|
||||
getAll.description = 'List all ipPools'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import { unauthorized } from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function create ({ name, subjects, objects, limits }) {
|
||||
return this.createResourceSet(name, subjects, objects, limits)
|
||||
}
|
||||
@@ -99,14 +95,10 @@ set.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function get ({ id }) {
|
||||
const { user } = this
|
||||
if (!user) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getResourceSet(id)
|
||||
}
|
||||
|
||||
get.permission = ''
|
||||
get.params = {
|
||||
id: {
|
||||
type: 'string',
|
||||
@@ -116,14 +108,10 @@ get.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getAll () {
|
||||
const { user } = this
|
||||
if (!user) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllResourceSets(user.id)
|
||||
return this.getAllResourceSets(this.user.id)
|
||||
}
|
||||
|
||||
getAll.permission = ''
|
||||
getAll.description = 'Get the list of all existing resource set'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -4,7 +4,7 @@ export async function add ({ autoConnect = true, ...props }) {
|
||||
const server = await this.registerXenServer(props)
|
||||
|
||||
if (autoConnect) {
|
||||
;this.connectXenServer(server.id)::ignoreErrors()
|
||||
this.connectXenServer(server.id)::ignoreErrors()
|
||||
}
|
||||
|
||||
return server.id
|
||||
@@ -105,7 +105,7 @@ set.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connect ({ id }) {
|
||||
;this.updateXenServer(id, { enabled: true })::ignoreErrors()
|
||||
this.updateXenServer(id, { enabled: true })::ignoreErrors()
|
||||
await this.connectXenServer(id)
|
||||
}
|
||||
|
||||
@@ -122,7 +122,7 @@ connect.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnect ({ id }) {
|
||||
;this.updateXenServer(id, { enabled: false })::ignoreErrors()
|
||||
this.updateXenServer(id, { enabled: false })::ignoreErrors()
|
||||
await this.disconnectXenServer(id)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// FIXME: rename to disk.*
|
||||
|
||||
import { invalidParameters, unauthorized } from 'xo-common/api-errors'
|
||||
import { invalidParameters } from 'xo-common/api-errors'
|
||||
import { isArray, reduce } from 'lodash'
|
||||
|
||||
import { parseSize } from '../utils'
|
||||
@@ -67,13 +67,8 @@ export async function set (params) {
|
||||
{ disk: size - vdi.size },
|
||||
resourceSetId
|
||||
)
|
||||
} else if (
|
||||
!(
|
||||
this.user.permission === 'admin' ||
|
||||
(await this.hasPermissions(this.user.id, [[vdi.$SR, 'operate']]))
|
||||
)
|
||||
) {
|
||||
throw unauthorized()
|
||||
} else {
|
||||
await this.checkPermissions(this.user.id, [[vdi.$SR, 'operate']])
|
||||
}
|
||||
|
||||
await xapi.resizeVdi(ref, size)
|
||||
|
||||
@@ -6,7 +6,7 @@ import { diffItems } from '../utils'
|
||||
|
||||
// TODO: move into vm and rename to removeInterface
|
||||
async function delete_ ({ vif }) {
|
||||
;this.allocIpAddresses(
|
||||
this.allocIpAddresses(
|
||||
vif.id,
|
||||
null,
|
||||
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import concat from 'lodash/concat'
|
||||
import defer from 'golike-defer'
|
||||
import { format } from 'json-rpc-peer'
|
||||
import { ignoreErrors } from 'promise-toolbox'
|
||||
import { assignWith, concat } from 'lodash'
|
||||
import {
|
||||
forbiddenOperation,
|
||||
invalidParameters,
|
||||
@@ -12,11 +13,11 @@ import { forEach, map, mapFilter, parseSize } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function getHaValues () {
|
||||
export function getHaValues() {
|
||||
return ['best-effort', 'restart', '']
|
||||
}
|
||||
|
||||
function checkPermissionOnSrs (vm, permission = 'operate') {
|
||||
function checkPermissionOnSrs(vm, permission = 'operate') {
|
||||
const permissions = []
|
||||
forEach(vm.$VBDs, vbdId => {
|
||||
const vbd = this.getObject(vbdId, 'VBD')
|
||||
@@ -31,13 +32,7 @@ function checkPermissionOnSrs (vm, permission = 'operate') {
|
||||
])
|
||||
})
|
||||
|
||||
return this.hasPermissions(this.session.get('user_id'), permissions).then(
|
||||
success => {
|
||||
if (!success) {
|
||||
throw unauthorized()
|
||||
}
|
||||
}
|
||||
)
|
||||
return this.checkPermissions(this.session.get('user_id'), permissions)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -49,17 +44,14 @@ const extract = (obj, prop) => {
|
||||
}
|
||||
|
||||
// TODO: Implement ACLs
|
||||
export async function create (params) {
|
||||
export async function create(params) {
|
||||
const { user } = this
|
||||
const resourceSet = extract(params, 'resourceSet')
|
||||
const template = extract(params, 'template')
|
||||
if (
|
||||
resourceSet === undefined &&
|
||||
!(await this.hasPermissions(this.user.id, [
|
||||
if (resourceSet === undefined) {
|
||||
await this.checkPermissions(this.user.id, [
|
||||
[template.$pool, 'administrate'],
|
||||
]))
|
||||
) {
|
||||
throw unauthorized()
|
||||
])
|
||||
}
|
||||
|
||||
params.template = template._xapiId
|
||||
@@ -150,8 +142,10 @@ export async function create (params) {
|
||||
if (resourceSet) {
|
||||
await this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
|
||||
checkLimits = async limits2 => {
|
||||
await this.allocateLimitsInResourceSet(limits, resourceSet)
|
||||
await this.allocateLimitsInResourceSet(limits2, resourceSet)
|
||||
await this.allocateLimitsInResourceSet(
|
||||
assignWith({}, limits, limits2, (l1 = 0, l2) => l1 + l2),
|
||||
resourceSet
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -331,7 +325,7 @@ create.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function delete_ ({
|
||||
async function delete_({
|
||||
delete_disks, // eslint-disable-line camelcase
|
||||
force,
|
||||
forceDeleteDefaultTemplate,
|
||||
@@ -372,7 +366,7 @@ async function delete_ ({
|
||||
vm.type === 'VM' && // only regular VMs
|
||||
xapi.xo.getData(vm._xapiId, 'resourceSet') != null
|
||||
) {
|
||||
;this.setVmResourceSet(vm._xapiId, null)::ignoreErrors()
|
||||
this.setVmResourceSet(vm._xapiId, null)::ignoreErrors()
|
||||
}
|
||||
|
||||
return xapi.deleteVm(
|
||||
@@ -409,7 +403,7 @@ export { delete_ as delete }
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function ejectCd ({ vm }) {
|
||||
export async function ejectCd({ vm }) {
|
||||
await this.getXapi(vm).ejectCdFromVm(vm._xapiId)
|
||||
}
|
||||
|
||||
@@ -423,14 +417,14 @@ ejectCd.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function insertCd ({ vm, vdi, force }) {
|
||||
export async function insertCd({ vm, vdi, force = true }) {
|
||||
await this.getXapi(vm).insertCdIntoVm(vdi._xapiId, vm._xapiId, { force })
|
||||
}
|
||||
|
||||
insertCd.params = {
|
||||
id: { type: 'string' },
|
||||
cd_id: { type: 'string' },
|
||||
force: { type: 'boolean' },
|
||||
force: { type: 'boolean', optional: true },
|
||||
}
|
||||
|
||||
insertCd.resolve = {
|
||||
@@ -442,7 +436,7 @@ insertCd.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function migrate ({
|
||||
export async function migrate({
|
||||
vm,
|
||||
host,
|
||||
sr,
|
||||
@@ -474,9 +468,7 @@ export async function migrate ({
|
||||
})
|
||||
}
|
||||
|
||||
if (!(await this.hasPermissions(this.session.get('user_id'), permissions))) {
|
||||
throw unauthorized()
|
||||
}
|
||||
await this.checkPermissions(this.user.id, permissions)
|
||||
|
||||
await this.getXapi(vm).migrateVm(
|
||||
vm._xapiId,
|
||||
@@ -520,7 +512,7 @@ migrate.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set (params) {
|
||||
export async function set(params) {
|
||||
const VM = extract(params, 'VM')
|
||||
const xapi = this.getXapi(VM)
|
||||
const vmId = VM._xapiId
|
||||
@@ -611,6 +603,8 @@ set.params = {
|
||||
// Emulate HVM C000 PCI device for Windows Update to fetch or update PV drivers
|
||||
hasVendorDevice: { type: 'boolean', optional: true },
|
||||
|
||||
expNestedHvm: { type: 'boolean', optional: true },
|
||||
|
||||
// Move the vm In to/Out of Self Service
|
||||
resourceSet: { type: ['string', 'null'], optional: true },
|
||||
|
||||
@@ -626,7 +620,7 @@ set.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function restart ({ vm, force }) {
|
||||
export async function restart({ vm, force = false }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
if (force) {
|
||||
@@ -638,7 +632,7 @@ export async function restart ({ vm, force }) {
|
||||
|
||||
restart.params = {
|
||||
id: { type: 'string' },
|
||||
force: { type: 'boolean' },
|
||||
force: { type: 'boolean', optional: true },
|
||||
}
|
||||
|
||||
restart.resolve = {
|
||||
@@ -647,17 +641,34 @@ restart.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: implement resource sets
|
||||
export async function clone ({ vm, name, full_copy: fullCopy }) {
|
||||
export const clone = defer(async function(
|
||||
$defer,
|
||||
{ vm, name, full_copy: fullCopy }
|
||||
) {
|
||||
await checkPermissionOnSrs.call(this, vm)
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
return this.getXapi(vm)
|
||||
.cloneVm(vm._xapiRef, {
|
||||
nameLabel: name,
|
||||
fast: !fullCopy,
|
||||
})
|
||||
.then(vm => vm.$id)
|
||||
}
|
||||
const { $id: cloneId } = await xapi.cloneVm(vm._xapiRef, {
|
||||
nameLabel: name,
|
||||
fast: !fullCopy,
|
||||
})
|
||||
$defer.onFailure(() => xapi.deleteVm(cloneId))
|
||||
|
||||
const isAdmin = this.user.permission === 'admin'
|
||||
if (!isAdmin) {
|
||||
await this.addAcl(this.user.id, cloneId, 'admin')
|
||||
}
|
||||
|
||||
if (vm.resourceSet !== undefined) {
|
||||
await this.allocateLimitsInResourceSet(
|
||||
await this.computeVmResourcesUsage(vm),
|
||||
vm.resourceSet,
|
||||
isAdmin
|
||||
)
|
||||
}
|
||||
|
||||
return cloneId
|
||||
})
|
||||
|
||||
clone.params = {
|
||||
id: { type: 'string' },
|
||||
@@ -672,7 +683,7 @@ clone.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: implement resource sets
|
||||
export async function copy ({ compress, name: nameLabel, sr, vm }) {
|
||||
export async function copy({ compress, name: nameLabel, sr, vm }) {
|
||||
if (vm.$pool === sr.$pool) {
|
||||
if (vm.power_state === 'Running') {
|
||||
await checkPermissionOnSrs.call(this, vm)
|
||||
@@ -713,15 +724,9 @@ copy.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function convertToTemplate ({ vm }) {
|
||||
export async function convertToTemplate({ vm }) {
|
||||
// Convert to a template requires pool admin permission.
|
||||
if (
|
||||
!(await this.hasPermissions(this.session.get('user_id'), [
|
||||
[vm.$pool, 'administrate'],
|
||||
]))
|
||||
) {
|
||||
throw unauthorized()
|
||||
}
|
||||
await this.checkPermissions(this.user.id, [[vm.$pool, 'administrate']])
|
||||
|
||||
await this.getXapi(vm).call('VM.set_is_a_template', vm._xapiRef, true)
|
||||
}
|
||||
@@ -740,14 +745,22 @@ export { convertToTemplate as convert }
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: implement resource sets
|
||||
export async function snapshot ({
|
||||
vm,
|
||||
name = `${vm.name_label}_${new Date().toISOString()}`,
|
||||
}) {
|
||||
export const snapshot = defer(async function(
|
||||
$defer,
|
||||
{ vm, name = `${vm.name_label}_${new Date().toISOString()}` }
|
||||
) {
|
||||
await checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
return (await this.getXapi(vm).snapshotVm(vm._xapiRef, name)).$id
|
||||
}
|
||||
const xapi = this.getXapi(vm)
|
||||
const { $id: snapshotId } = await xapi.snapshotVm(vm._xapiRef, name)
|
||||
$defer.onFailure(() => xapi.deleteVm(snapshotId))
|
||||
|
||||
const { user } = this
|
||||
if (user.permission !== 'admin') {
|
||||
await this.addAcl(user.id, snapshotId, 'admin')
|
||||
}
|
||||
return snapshotId
|
||||
})
|
||||
|
||||
snapshot.params = {
|
||||
id: { type: 'string' },
|
||||
@@ -755,12 +768,12 @@ snapshot.params = {
|
||||
}
|
||||
|
||||
snapshot.resolve = {
|
||||
vm: ['id', 'VM', 'administrate'],
|
||||
vm: ['id', 'VM', 'operate'],
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function rollingDeltaBackup ({
|
||||
export function rollingDeltaBackup({
|
||||
vm,
|
||||
remote,
|
||||
tag,
|
||||
@@ -792,7 +805,7 @@ rollingDeltaBackup.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function importDeltaBackup ({ sr, remote, filePath, mapVdisSrs }) {
|
||||
export function importDeltaBackup({ sr, remote, filePath, mapVdisSrs }) {
|
||||
const mapVdisSrsXapi = {}
|
||||
|
||||
forEach(mapVdisSrs, (srId, vdiId) => {
|
||||
@@ -823,7 +836,7 @@ importDeltaBackup.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function deltaCopy ({ force, vm, retention, sr }) {
|
||||
export function deltaCopy({ force, vm, retention, sr }) {
|
||||
return this.deltaCopyVm(vm, sr, force, retention)
|
||||
}
|
||||
|
||||
@@ -841,7 +854,7 @@ deltaCopy.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function rollingSnapshot ({ vm, tag, depth, retention = depth }) {
|
||||
export async function rollingSnapshot({ vm, tag, depth, retention = depth }) {
|
||||
await checkPermissionOnSrs.call(this, vm)
|
||||
return this.rollingSnapshotVm(vm, tag, retention)
|
||||
}
|
||||
@@ -863,7 +876,7 @@ rollingSnapshot.description =
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function backup ({ vm, remoteId, file, compress }) {
|
||||
export function backup({ vm, remoteId, file, compress }) {
|
||||
return this.backupVm({ vm, remoteId, file, compress })
|
||||
}
|
||||
|
||||
@@ -884,7 +897,7 @@ backup.description = 'Exports a VM to the file system'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function importBackup ({ remote, file, sr }) {
|
||||
export function importBackup({ remote, file, sr }) {
|
||||
return this.importVmBackup(remote, file, sr)
|
||||
}
|
||||
|
||||
@@ -905,7 +918,7 @@ importBackup.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function rollingBackup ({
|
||||
export function rollingBackup({
|
||||
vm,
|
||||
remoteId,
|
||||
tag,
|
||||
@@ -943,7 +956,7 @@ rollingBackup.description =
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function rollingDrCopy ({
|
||||
export function rollingDrCopy({
|
||||
vm,
|
||||
pool,
|
||||
sr,
|
||||
@@ -997,7 +1010,7 @@ rollingDrCopy.description =
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function start ({ vm, force, host }) {
|
||||
export function start({ vm, force, host }) {
|
||||
return this.getXapi(vm).startVm(vm._xapiId, host?._xapiId, force)
|
||||
}
|
||||
|
||||
@@ -1018,7 +1031,7 @@ start.resolve = {
|
||||
// - if !force → clean shutdown
|
||||
// - if force is true → hard shutdown
|
||||
// - if force is integer → clean shutdown and after force seconds, hard shutdown.
|
||||
export async function stop ({ vm, force }) {
|
||||
export async function stop({ vm, force }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
// Hard shutdown
|
||||
@@ -1053,7 +1066,7 @@ stop.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function suspend ({ vm }) {
|
||||
export async function suspend({ vm }) {
|
||||
await this.getXapi(vm).call('VM.suspend', vm._xapiRef)
|
||||
}
|
||||
|
||||
@@ -1067,7 +1080,21 @@ suspend.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function resume ({ vm }) {
|
||||
export async function pause({ vm }) {
|
||||
await this.getXapi(vm).call('VM.pause', vm._xapiRef)
|
||||
}
|
||||
|
||||
pause.params = {
|
||||
id: { type: 'string' },
|
||||
}
|
||||
|
||||
pause.resolve = {
|
||||
vm: ['id', 'VM', 'operate'],
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function resume({ vm }) {
|
||||
return this.getXapi(vm).resumeVm(vm._xapiId)
|
||||
}
|
||||
|
||||
@@ -1081,7 +1108,7 @@ resume.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function revert ({ snapshot, snapshotBefore }) {
|
||||
export function revert({ snapshot, snapshotBefore }) {
|
||||
return this.getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore)
|
||||
}
|
||||
|
||||
@@ -1096,7 +1123,7 @@ revert.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handleExport (req, res, { xapi, id, compress }) {
|
||||
async function handleExport(req, res, { xapi, id, compress }) {
|
||||
const stream = await xapi.exportVm(id, {
|
||||
compress: compress != null ? compress : true,
|
||||
})
|
||||
@@ -1113,7 +1140,7 @@ async function handleExport (req, res, { xapi, id, compress }) {
|
||||
}
|
||||
|
||||
// TODO: integrate in xapi.js
|
||||
async function export_ ({ vm, compress }) {
|
||||
async function export_({ vm, compress }) {
|
||||
if (vm.power_state === 'Running') {
|
||||
await checkPermissionOnSrs.call(this, vm)
|
||||
}
|
||||
@@ -1144,7 +1171,7 @@ export { export_ as export }
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handleVmImport (req, res, { data, srId, type, xapi }) {
|
||||
async function handleVmImport(req, res, { data, srId, type, xapi }) {
|
||||
// Timeout seems to be broken in Node 4.
|
||||
// See https://github.com/nodejs/node/issues/3319
|
||||
req.setTimeout(43200000) // 12 hours
|
||||
@@ -1159,34 +1186,17 @@ async function handleVmImport (req, res, { data, srId, type, xapi }) {
|
||||
}
|
||||
|
||||
// TODO: "sr_id" can be passed in URL to target a specific SR
|
||||
async function import_ ({ data, host, sr, type }) {
|
||||
let xapi
|
||||
async function import_({ data, sr, type }) {
|
||||
if (data && type === 'xva') {
|
||||
throw invalidParameters('unsupported field data for the file type xva')
|
||||
}
|
||||
|
||||
if (!sr) {
|
||||
if (!host) {
|
||||
throw invalidParameters('you must provide either host or SR')
|
||||
}
|
||||
|
||||
xapi = this.getXapi(host)
|
||||
sr = xapi.pool.$default_SR
|
||||
if (!sr) {
|
||||
throw invalidParameters('there is not default SR in this pool')
|
||||
}
|
||||
|
||||
// FIXME: must have administrate permission on default SR.
|
||||
} else {
|
||||
xapi = this.getXapi(sr)
|
||||
}
|
||||
|
||||
return {
|
||||
$sendTo: await this.registerHttpRequest(handleVmImport, {
|
||||
data,
|
||||
srId: sr._xapiId,
|
||||
type,
|
||||
xapi,
|
||||
xapi: this.getXapi(sr),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@@ -1221,13 +1231,11 @@ import_.params = {
|
||||
},
|
||||
},
|
||||
},
|
||||
host: { type: 'string', optional: true },
|
||||
type: { type: 'string', optional: true },
|
||||
sr: { type: 'string', optional: true },
|
||||
sr: { type: 'string' },
|
||||
}
|
||||
|
||||
import_.resolve = {
|
||||
host: ['host', 'host', 'administrate'],
|
||||
sr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
@@ -1237,7 +1245,7 @@ export { import_ as import }
|
||||
|
||||
// FIXME: if position is used, all other disks after this position
|
||||
// should be shifted.
|
||||
export async function attachDisk ({ vm, vdi, position, mode, bootable }) {
|
||||
export async function attachDisk({ vm, vdi, position, mode, bootable }) {
|
||||
await this.getXapi(vm).createVbd({
|
||||
bootable,
|
||||
mode,
|
||||
@@ -1266,7 +1274,7 @@ attachDisk.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: implement resource sets
|
||||
export async function createInterface ({
|
||||
export async function createInterface({
|
||||
vm,
|
||||
network,
|
||||
position,
|
||||
@@ -1279,10 +1287,8 @@ export async function createInterface ({
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [
|
||||
network.id,
|
||||
])
|
||||
} else if (
|
||||
!(await this.hasPermissions(this.user.id, [[network.id, 'view']]))
|
||||
) {
|
||||
throw unauthorized()
|
||||
} else {
|
||||
await this.checkPermissions(this.user.id, [[network.id, 'view']])
|
||||
}
|
||||
|
||||
let ipAddresses
|
||||
@@ -1337,7 +1343,7 @@ createInterface.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function attachPci ({ vm, pciId }) {
|
||||
export async function attachPci({ vm, pciId }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
await xapi.call('VM.add_to_other_config', vm._xapiRef, 'pci', pciId)
|
||||
@@ -1354,7 +1360,7 @@ attachPci.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function detachPci ({ vm }) {
|
||||
export async function detachPci({ vm }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
await xapi.call('VM.remove_from_other_config', vm._xapiRef, 'pci')
|
||||
@@ -1369,7 +1375,7 @@ detachPci.resolve = {
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function stats ({ vm, granularity }) {
|
||||
export function stats({ vm, granularity }) {
|
||||
return this.getXapiVmStats(vm._xapiId, granularity)
|
||||
}
|
||||
|
||||
@@ -1389,7 +1395,7 @@ stats.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function setBootOrder ({ vm, order }) {
|
||||
export async function setBootOrder({ vm, order }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
|
||||
order = { order }
|
||||
@@ -1412,7 +1418,7 @@ setBootOrder.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function recoveryStart ({ vm }) {
|
||||
export function recoveryStart({ vm }) {
|
||||
return this.getXapi(vm).startVmOnCd(vm._xapiId)
|
||||
}
|
||||
|
||||
@@ -1426,7 +1432,7 @@ recoveryStart.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function getCloudInitConfig ({ template }) {
|
||||
export function getCloudInitConfig({ template }) {
|
||||
return this.getXapi(template).getCloudInitConfig(template._xapiId)
|
||||
}
|
||||
|
||||
@@ -1440,7 +1446,7 @@ getCloudInitConfig.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function createCloudInitConfigDrive ({ vm, sr, config, coreos }) {
|
||||
export async function createCloudInitConfigDrive({ vm, sr, config, coreos }) {
|
||||
const xapi = this.getXapi(vm)
|
||||
if (coreos) {
|
||||
// CoreOS is a special CloudConfig drive created by XS plugin
|
||||
@@ -1467,7 +1473,7 @@ createCloudInitConfigDrive.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function createVgpu ({ vm, gpuGroup, vgpuType }) {
|
||||
export async function createVgpu({ vm, gpuGroup, vgpuType }) {
|
||||
// TODO: properly handle device. Can a VM have 2 vGPUS?
|
||||
await this.getXapi(vm).createVgpu(
|
||||
vm._xapiId,
|
||||
@@ -1490,7 +1496,7 @@ createVgpu.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function deleteVgpu ({ vgpu }) {
|
||||
export async function deleteVgpu({ vgpu }) {
|
||||
await this.getXapi(vgpu).deleteVgpu(vgpu._xapiId)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import createLogger from 'debug'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import defer from 'golike-defer'
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import map from 'lodash/map'
|
||||
import { tap, delay } from 'promise-toolbox'
|
||||
import { NULL_REF } from 'xen-api'
|
||||
import { invalidParameters } from 'xo-common/api-errors'
|
||||
import { v4 as generateUuid } from 'uuid'
|
||||
import { includes, remove, filter, find, range } from 'lodash'
|
||||
@@ -12,7 +13,7 @@ import { includes, remove, filter, find, range } from 'lodash'
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import { parseXml, ensureArray } from '../utils'
|
||||
|
||||
const debug = createLogger('xo:xosan')
|
||||
const log = createLogger('xo:xosan')
|
||||
|
||||
const SSH_KEY_FILE = 'id_rsa_xosan'
|
||||
const DEFAULT_NETWORK_PREFIX = '172.31.100.'
|
||||
@@ -25,7 +26,7 @@ const XOSAN_LICENSE_QUOTA = 50 * GIGABYTE
|
||||
|
||||
const CURRENT_POOL_OPERATIONS = {}
|
||||
|
||||
function getXosanConfig (xosansr, xapi = this.getXapi(xosansr)) {
|
||||
function getXosanConfig(xosansr, xapi = this.getXapi(xosansr)) {
|
||||
const data = xapi.xo.getData(xosansr, 'xosan_config')
|
||||
if (data && data.networkPrefix === undefined) {
|
||||
// some xosan might have been created before this field was added
|
||||
@@ -36,7 +37,7 @@ function getXosanConfig (xosansr, xapi = this.getXapi(xosansr)) {
|
||||
return data
|
||||
}
|
||||
|
||||
function _getIPToVMDict (xapi, sr) {
|
||||
function _getIPToVMDict(xapi, sr) {
|
||||
const dict = {}
|
||||
const data = getXosanConfig(sr, xapi)
|
||||
if (data && data.nodes) {
|
||||
@@ -54,7 +55,7 @@ function _getIPToVMDict (xapi, sr) {
|
||||
return dict
|
||||
}
|
||||
|
||||
function _getGlusterEndpoint (sr) {
|
||||
function _getGlusterEndpoint(sr) {
|
||||
const xapi = this.getXapi(sr)
|
||||
const data = getXosanConfig(sr, xapi)
|
||||
if (!data || !data.nodes) {
|
||||
@@ -68,20 +69,20 @@ function _getGlusterEndpoint (sr) {
|
||||
}
|
||||
}
|
||||
|
||||
async function rateLimitedRetry (action, shouldRetry, retryCount = 20) {
|
||||
async function rateLimitedRetry(action, shouldRetry, retryCount = 20) {
|
||||
let retryDelay = 500 * (1 + Math.random() / 20)
|
||||
let result
|
||||
while (retryCount > 0 && (result = await action()) && shouldRetry(result)) {
|
||||
retryDelay *= 1.1
|
||||
debug('waiting ' + retryDelay + 'ms and retrying')
|
||||
log.debug(`waiting ${retryDelay} ms and retrying`)
|
||||
await delay(retryDelay)
|
||||
retryCount--
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function createVolumeInfoTypes () {
|
||||
function parseHeal (parsed) {
|
||||
function createVolumeInfoTypes() {
|
||||
function parseHeal(parsed) {
|
||||
const bricks = []
|
||||
parsed['healInfo']['bricks']['brick'].forEach(brick => {
|
||||
bricks.push(brick)
|
||||
@@ -92,7 +93,7 @@ function createVolumeInfoTypes () {
|
||||
return { commandStatus: true, result: { bricks } }
|
||||
}
|
||||
|
||||
function parseStatus (parsed) {
|
||||
function parseStatus(parsed) {
|
||||
const brickDictByUuid = {}
|
||||
const volume = parsed['volStatus']['volumes']['volume']
|
||||
volume['node'].forEach(node => {
|
||||
@@ -105,7 +106,7 @@ function createVolumeInfoTypes () {
|
||||
}
|
||||
}
|
||||
|
||||
async function parseInfo (parsed) {
|
||||
async function parseInfo(parsed) {
|
||||
const volume = parsed['volInfo']['volumes']['volume']
|
||||
volume['bricks'] = volume['bricks']['brick']
|
||||
volume['options'] = volume['options']['option']
|
||||
@@ -113,7 +114,7 @@ function createVolumeInfoTypes () {
|
||||
}
|
||||
|
||||
const sshInfoType = (command, handler) => {
|
||||
return async function (sr) {
|
||||
return async function(sr) {
|
||||
const glusterEndpoint = this::_getGlusterEndpoint(sr)
|
||||
const cmdShouldRetry = result =>
|
||||
!result['commandStatus'] &&
|
||||
@@ -128,8 +129,8 @@ function createVolumeInfoTypes () {
|
||||
}
|
||||
}
|
||||
|
||||
async function profileType (sr) {
|
||||
async function parseProfile (parsed) {
|
||||
async function profileType(sr) {
|
||||
async function parseProfile(parsed) {
|
||||
const volume = parsed['volProfile']
|
||||
volume['bricks'] = ensureArray(volume['brick'])
|
||||
delete volume['brick']
|
||||
@@ -139,8 +140,8 @@ function createVolumeInfoTypes () {
|
||||
return this::sshInfoType('profile xosan info', parseProfile)(sr)
|
||||
}
|
||||
|
||||
async function profileTopType (sr) {
|
||||
async function parseTop (parsed) {
|
||||
async function profileTopType(sr) {
|
||||
async function parseTop(parsed) {
|
||||
const volume = parsed['volTop']
|
||||
volume['bricks'] = ensureArray(volume['brick'])
|
||||
delete volume['brick']
|
||||
@@ -154,7 +155,7 @@ function createVolumeInfoTypes () {
|
||||
}))
|
||||
}
|
||||
|
||||
function checkHosts (sr) {
|
||||
function checkHosts(sr) {
|
||||
const xapi = this.getXapi(sr)
|
||||
const data = getXosanConfig(sr, xapi)
|
||||
const network = xapi.getObject(data.network)
|
||||
@@ -179,7 +180,7 @@ function createVolumeInfoTypes () {
|
||||
|
||||
const VOLUME_INFO_TYPES = createVolumeInfoTypes()
|
||||
|
||||
export async function getVolumeInfo ({ sr, infoType }) {
|
||||
export async function getVolumeInfo({ sr, infoType }) {
|
||||
await this.checkXosanLicense({ srId: sr.uuid })
|
||||
|
||||
const glusterEndpoint = this::_getGlusterEndpoint(sr)
|
||||
@@ -210,7 +211,7 @@ getVolumeInfo.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
export async function profileStatus ({ sr, changeStatus = null }) {
|
||||
export async function profileStatus({ sr, changeStatus = null }) {
|
||||
await this.checkXosanLicense({ srId: sr.uuid })
|
||||
|
||||
const glusterEndpoint = this::_getGlusterEndpoint(sr)
|
||||
@@ -239,7 +240,7 @@ profileStatus.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
function reconfigurePifIP (xapi, pif, newIP) {
|
||||
function reconfigurePifIP(xapi, pif, newIP) {
|
||||
xapi.call(
|
||||
'PIF.reconfigure_ip',
|
||||
pif.$ref,
|
||||
@@ -252,7 +253,7 @@ function reconfigurePifIP (xapi, pif, newIP) {
|
||||
}
|
||||
|
||||
// this function should probably become fixSomething(thingToFix, parmas)
|
||||
export async function fixHostNotInNetwork ({ xosanSr, host }) {
|
||||
export async function fixHostNotInNetwork({ xosanSr, host }) {
|
||||
await this.checkXosanLicense({ srId: xosanSr.uuid })
|
||||
|
||||
const xapi = this.getXapi(xosanSr)
|
||||
@@ -296,22 +297,22 @@ fixHostNotInNetwork.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
function floor2048 (value) {
|
||||
function floor2048(value) {
|
||||
return 2048 * Math.floor(value / 2048)
|
||||
}
|
||||
|
||||
async function copyVm (xapi, originalVm, sr) {
|
||||
async function copyVm(xapi, originalVm, sr) {
|
||||
return { sr, vm: await xapi.copyVm(originalVm, sr) }
|
||||
}
|
||||
|
||||
async function callPlugin (xapi, host, command, params) {
|
||||
debug('calling plugin', host.address, command)
|
||||
async function callPlugin(xapi, host, command, params) {
|
||||
log.debug(`calling plugin ${host.address} ${command}`)
|
||||
return JSON.parse(
|
||||
await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params)
|
||||
)
|
||||
}
|
||||
|
||||
async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) {
|
||||
async function remoteSsh(glusterEndpoint, cmd, ignoreError = false) {
|
||||
let result
|
||||
const formatSshError = result => {
|
||||
const messageArray = []
|
||||
@@ -346,15 +347,12 @@ async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) {
|
||||
}
|
||||
}
|
||||
}
|
||||
debug(
|
||||
result.command.join(' '),
|
||||
'\n =>exit:',
|
||||
result.exit,
|
||||
'\n =>err :',
|
||||
result.stderr,
|
||||
'\n =>out (1000 chars) :',
|
||||
result.stdout.substring(0, 1000)
|
||||
)
|
||||
|
||||
log.debug(`result of ${result.command.join(' ')}`, {
|
||||
exit: result.exit,
|
||||
err: result.stderr,
|
||||
out: result.stdout.substring(0, 1000),
|
||||
})
|
||||
// 255 seems to be ssh's own error codes.
|
||||
if (result.exit !== 255) {
|
||||
if (!ignoreError && result.exit !== 0) {
|
||||
@@ -370,7 +368,7 @@ async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) {
|
||||
)
|
||||
}
|
||||
|
||||
function findErrorMessage (commandResut) {
|
||||
function findErrorMessage(commandResut) {
|
||||
if (commandResut['exit'] === 0 && commandResut.parsed) {
|
||||
const cliOut = commandResut.parsed['cliOutput']
|
||||
if (cliOut['opErrstr'] && cliOut['opErrstr'].length) {
|
||||
@@ -386,7 +384,7 @@ function findErrorMessage (commandResut) {
|
||||
: commandResut['stdout']
|
||||
}
|
||||
|
||||
async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) {
|
||||
async function glusterCmd(glusterEndpoint, cmd, ignoreError = false) {
|
||||
const result = await remoteSsh(
|
||||
glusterEndpoint,
|
||||
`gluster --mode=script --xml ${cmd}`,
|
||||
@@ -416,7 +414,7 @@ async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) {
|
||||
return result
|
||||
}
|
||||
|
||||
const createNetworkAndInsertHosts = defer(async function (
|
||||
const createNetworkAndInsertHosts = defer(async function(
|
||||
$defer,
|
||||
xapi,
|
||||
pif,
|
||||
@@ -456,7 +454,7 @@ const createNetworkAndInsertHosts = defer(async function (
|
||||
return xosanNetwork
|
||||
})
|
||||
|
||||
async function getOrCreateSshKey (xapi) {
|
||||
async function getOrCreateSshKey(xapi) {
|
||||
let sshKey = xapi.xo.getData(xapi.pool, 'xosan_ssh_key')
|
||||
|
||||
if (!sshKey) {
|
||||
@@ -489,7 +487,7 @@ async function getOrCreateSshKey (xapi) {
|
||||
return sshKey
|
||||
}
|
||||
|
||||
const _probePoolAndWaitForPresence = defer(async function (
|
||||
const _probePoolAndWaitForPresence = defer(async function(
|
||||
$defer,
|
||||
glusterEndpoint,
|
||||
addresses
|
||||
@@ -501,7 +499,7 @@ const _probePoolAndWaitForPresence = defer(async function (
|
||||
)
|
||||
})
|
||||
|
||||
function shouldRetry (peers) {
|
||||
function shouldRetry(peers) {
|
||||
for (const peer of peers) {
|
||||
if (peer.state === '4') {
|
||||
return true
|
||||
@@ -519,7 +517,7 @@ const _probePoolAndWaitForPresence = defer(async function (
|
||||
return rateLimitedRetry(getPoolStatus, shouldRetry)
|
||||
})
|
||||
|
||||
async function configureGluster (
|
||||
async function configureGluster(
|
||||
redundancy,
|
||||
ipAndHosts,
|
||||
glusterEndpoint,
|
||||
@@ -552,7 +550,7 @@ async function configureGluster (
|
||||
creation +
|
||||
' ' +
|
||||
brickVms.map(ipAndHost => ipAndHost.brickName).join(' ')
|
||||
debug('creating volume: ', volumeCreation)
|
||||
log.debug(`creating volume: ${volumeCreation}`)
|
||||
await glusterCmd(glusterEndpoint, volumeCreation)
|
||||
await glusterCmd(
|
||||
glusterEndpoint,
|
||||
@@ -606,7 +604,7 @@ async function configureGluster (
|
||||
await _setQuota(glusterEndpoint)
|
||||
}
|
||||
|
||||
async function _setQuota (glusterEndpoint) {
|
||||
async function _setQuota(glusterEndpoint) {
|
||||
await glusterCmd(glusterEndpoint, 'volume quota xosan enable', true)
|
||||
await glusterCmd(
|
||||
glusterEndpoint,
|
||||
@@ -620,11 +618,11 @@ async function _setQuota (glusterEndpoint) {
|
||||
)
|
||||
}
|
||||
|
||||
async function _removeQuota (glusterEndpoint) {
|
||||
async function _removeQuota(glusterEndpoint) {
|
||||
await glusterCmd(glusterEndpoint, 'volume quota xosan disable', true)
|
||||
}
|
||||
|
||||
export const createSR = defer(async function (
|
||||
export const createSR = defer(async function(
|
||||
$defer,
|
||||
{
|
||||
template,
|
||||
@@ -762,7 +760,7 @@ export const createSR = defer(async function (
|
||||
glusterType,
|
||||
arbiter
|
||||
)
|
||||
debug('xosan gluster volume started')
|
||||
log.debug('xosan gluster volume started')
|
||||
// We use 10 IPs of the gluster VM range as backup, in the hope that even if the first VM gets destroyed we find at least
|
||||
// one VM to give mount the volfile.
|
||||
// It is not possible to edit the device_config after the SR is created and this data is only used at mount time when rebooting
|
||||
@@ -785,7 +783,7 @@ export const createSR = defer(async function (
|
||||
true,
|
||||
{}
|
||||
)
|
||||
debug('sr created')
|
||||
log.debug('sr created')
|
||||
// we just forget because the cleanup actions are stacked in the $onFailure system
|
||||
$defer.onFailure(() => xapi.forgetSr(xosanSrRef))
|
||||
if (arbiter) {
|
||||
@@ -809,7 +807,7 @@ export const createSR = defer(async function (
|
||||
redundancy,
|
||||
})
|
||||
CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 6 }
|
||||
debug('scanning new SR')
|
||||
log.debug('scanning new SR')
|
||||
await xapi.call('SR.scan', xosanSrRef)
|
||||
await this.rebindLicense({
|
||||
licenseId: license.id,
|
||||
@@ -857,7 +855,7 @@ createSR.resolve = {
|
||||
pif: ['pif', 'PIF', 'administrate'],
|
||||
}
|
||||
|
||||
async function umountDisk (localEndpoint, diskMountPoint) {
|
||||
async function umountDisk(localEndpoint, diskMountPoint) {
|
||||
await remoteSsh(
|
||||
localEndpoint,
|
||||
`killall -v -w /usr/sbin/xfs_growfs; fuser -v ${diskMountPoint}; umount ${diskMountPoint} && sed -i '\\_${diskMountPoint}\\S_d' /etc/fstab && rm -rf ${diskMountPoint}`
|
||||
@@ -865,7 +863,7 @@ async function umountDisk (localEndpoint, diskMountPoint) {
|
||||
}
|
||||
|
||||
// this is mostly what the LVM SR driver does, but we are avoiding the 2To limit it imposes.
|
||||
async function createVDIOnLVMWithoutSizeLimit (xapi, lvmSr, diskSize) {
|
||||
async function createVDIOnLVMWithoutSizeLimit(xapi, lvmSr, diskSize) {
|
||||
const VG_PREFIX = 'VG_XenStorage-'
|
||||
const LV_PREFIX = 'LV-'
|
||||
const { type, uuid: srUuid, $PBDs } = xapi.getObject(lvmSr)
|
||||
@@ -896,7 +894,7 @@ async function createVDIOnLVMWithoutSizeLimit (xapi, lvmSr, diskSize) {
|
||||
}
|
||||
}
|
||||
|
||||
async function createNewDisk (xapi, sr, vm, diskSize) {
|
||||
async function createNewDisk(xapi, sr, vm, diskSize) {
|
||||
const newDisk = await createVDIOnLVMWithoutSizeLimit(xapi, sr, diskSize)
|
||||
await xapi.createVbd({ vdi: newDisk, vm })
|
||||
let vbd = await xapi._waitObjectState(newDisk.$id, disk =>
|
||||
@@ -906,7 +904,7 @@ async function createNewDisk (xapi, sr, vm, diskSize) {
|
||||
return '/dev/' + vbd.device
|
||||
}
|
||||
|
||||
async function mountNewDisk (localEndpoint, hostname, newDeviceFiledeviceFile) {
|
||||
async function mountNewDisk(localEndpoint, hostname, newDeviceFiledeviceFile) {
|
||||
const brickRootCmd =
|
||||
'bash -c \'mkdir -p /bricks; for TESTVAR in {1..9}; do TESTDIR="/bricks/xosan$TESTVAR" ;if mkdir $TESTDIR; then echo $TESTDIR; exit 0; fi ; done ; exit 1\''
|
||||
const newBrickRoot = (await remoteSsh(
|
||||
@@ -919,7 +917,7 @@ async function mountNewDisk (localEndpoint, hostname, newDeviceFiledeviceFile) {
|
||||
return brickName
|
||||
}
|
||||
|
||||
async function replaceBrickOnSameVM (
|
||||
async function replaceBrickOnSameVM(
|
||||
xosansr,
|
||||
previousBrick,
|
||||
newLvmSr,
|
||||
@@ -996,7 +994,7 @@ async function replaceBrickOnSameVM (
|
||||
}
|
||||
}
|
||||
|
||||
export async function replaceBrick ({
|
||||
export async function replaceBrick({
|
||||
xosansr,
|
||||
previousBrick,
|
||||
newLvmSr,
|
||||
@@ -1088,7 +1086,7 @@ replaceBrick.resolve = {
|
||||
xosansr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
async function _prepareGlusterVm (
|
||||
async function _prepareGlusterVm(
|
||||
xapi,
|
||||
lvmSr,
|
||||
newVM,
|
||||
@@ -1125,7 +1123,7 @@ async function _prepareGlusterVm (
|
||||
}
|
||||
}
|
||||
}
|
||||
await xapi.addTag(newVM.$id, `XOSAN-${xapi.pool.name_label}`)
|
||||
await xapi.addTag(newVM.$id, 'XOSAN')
|
||||
await xapi.editVm(newVM, {
|
||||
name_label: `XOSAN - ${lvmSr.name_label} - ${
|
||||
host.name_label
|
||||
@@ -1139,12 +1137,15 @@ async function _prepareGlusterVm (
|
||||
.find(vdi => vdi && vdi.name_label === 'xosan_root')
|
||||
const rootDiskSize = rootDisk.virtual_size
|
||||
await xapi.startVm(newVM)
|
||||
debug('waiting for boot of ', ip)
|
||||
log.debug(`waiting for boot of ${ip}`)
|
||||
// wait until we find the assigned IP in the networks, we are just checking the boot is complete
|
||||
const vmIsUp = vm =>
|
||||
Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip))
|
||||
const vm = await xapi._waitObjectState(newVM.$id, vmIsUp)
|
||||
debug('booted ', ip)
|
||||
// fix #3688
|
||||
const vm = await xapi._waitObjectState(
|
||||
newVM.$id,
|
||||
_ => _.guest_metrics !== NULL_REF
|
||||
)
|
||||
await xapi._waitObjectState(vm.guest_metrics, _ => includes(_.networks, ip))
|
||||
log.debug(`booted ${ip}`)
|
||||
const localEndpoint = { xapi: xapi, hosts: [host], addresses: [ip] }
|
||||
const srFreeSpace = sr.physical_size - sr.physical_utilisation
|
||||
// we use a percentage because it looks like the VDI overhead is proportional
|
||||
@@ -1165,7 +1166,7 @@ async function _prepareGlusterVm (
|
||||
return { address: ip, host, vm, underlyingSr: lvmSr, brickName }
|
||||
}
|
||||
|
||||
async function _importGlusterVM (xapi, template, lvmsrId) {
|
||||
async function _importGlusterVM(xapi, template, lvmsrId) {
|
||||
const templateStream = await this.requestResource(
|
||||
'xosan',
|
||||
template.id,
|
||||
@@ -1183,11 +1184,11 @@ async function _importGlusterVM (xapi, template, lvmsrId) {
|
||||
return xapi.barrier(newVM.$ref)
|
||||
}
|
||||
|
||||
function _findAFreeIPAddress (nodes, networkPrefix) {
|
||||
function _findAFreeIPAddress(nodes, networkPrefix) {
|
||||
return _findIPAddressOutsideList(map(nodes, n => n.vm.ip), networkPrefix)
|
||||
}
|
||||
|
||||
function _findIPAddressOutsideList (
|
||||
function _findIPAddressOutsideList(
|
||||
reservedList,
|
||||
networkPrefix,
|
||||
vmIpLastNumber = 101
|
||||
@@ -1206,7 +1207,7 @@ const _median = arr => {
|
||||
return arr[Math.floor(arr.length / 2)]
|
||||
}
|
||||
|
||||
const insertNewGlusterVm = defer(async function (
|
||||
const insertNewGlusterVm = defer(async function(
|
||||
$defer,
|
||||
xapi,
|
||||
xosansr,
|
||||
@@ -1256,7 +1257,7 @@ const insertNewGlusterVm = defer(async function (
|
||||
return { data, newVM, addressAndHost, glusterEndpoint }
|
||||
})
|
||||
|
||||
export const addBricks = defer(async function (
|
||||
export const addBricks = defer(async function(
|
||||
$defer,
|
||||
{ xosansr, lvmsrs, brickSize }
|
||||
) {
|
||||
@@ -1352,7 +1353,7 @@ addBricks.resolve = {
|
||||
lvmsrs: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
export const removeBricks = defer(async function ($defer, { xosansr, bricks }) {
|
||||
export const removeBricks = defer(async function($defer, { xosansr, bricks }) {
|
||||
await this.checkXosanLicense({ srId: xosansr.uuid })
|
||||
|
||||
const xapi = this.getXapi(xosansr)
|
||||
@@ -1398,7 +1399,7 @@ removeBricks.params = {
|
||||
}
|
||||
removeBricks.resolve = { xosansr: ['sr', 'SR', 'administrate'] }
|
||||
|
||||
export function checkSrCurrentState ({ poolId }) {
|
||||
export function checkSrCurrentState({ poolId }) {
|
||||
return CURRENT_POOL_OPERATIONS[poolId]
|
||||
}
|
||||
|
||||
@@ -1458,7 +1459,7 @@ POSSIBLE_CONFIGURATIONS[16] = [
|
||||
{ layout: 'replica', redundancy: 2, capacity: 8 },
|
||||
]
|
||||
|
||||
function computeBrickSize (srs, brickSize = Infinity) {
|
||||
function computeBrickSize(srs, brickSize = Infinity) {
|
||||
const xapi = this.getXapi(srs[0])
|
||||
const srsObjects = map(srs, srId => xapi.getObject(srId))
|
||||
const srSizes = map(
|
||||
@@ -1471,7 +1472,7 @@ function computeBrickSize (srs, brickSize = Infinity) {
|
||||
)
|
||||
}
|
||||
|
||||
export async function computeXosanPossibleOptions ({
|
||||
export async function computeXosanPossibleOptions({
|
||||
lvmSrs,
|
||||
brickSize = Infinity,
|
||||
}) {
|
||||
@@ -1504,7 +1505,7 @@ computeXosanPossibleOptions.params = {
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
export async function unlock ({ licenseId, sr }) {
|
||||
export async function unlock({ licenseId, sr }) {
|
||||
await this.unlockXosanLicense({ licenseId, srId: sr.id })
|
||||
|
||||
const glusterEndpoint = this::_getGlusterEndpoint(sr.id)
|
||||
@@ -1531,7 +1532,7 @@ unlock.resolve = {
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
export async function downloadAndInstallXosanPack ({ id, version, pool }) {
|
||||
export async function downloadAndInstallXosanPack({ id, version, pool }) {
|
||||
if (!this.requestResource) {
|
||||
throw new Error('requestResource is not a function')
|
||||
}
|
||||
|
||||
@@ -78,19 +78,18 @@ export default class Redis extends Collection {
|
||||
.then(keys => keys.length !== 0 && redis.del(keys))
|
||||
).then(() =>
|
||||
asyncMap(redis.smembers(idsIndex), id =>
|
||||
redis.hgetall(`${prefix}:${id}`).then(
|
||||
values =>
|
||||
values == null
|
||||
? redis.srem(idsIndex, id) // entry no longer exists
|
||||
: asyncMap(indexes, index => {
|
||||
const value = values[index]
|
||||
if (value !== undefined) {
|
||||
return redis.sadd(
|
||||
`${prefix}_${index}:${String(value).toLowerCase()}`,
|
||||
id
|
||||
)
|
||||
}
|
||||
})
|
||||
redis.hgetall(`${prefix}:${id}`).then(values =>
|
||||
values == null
|
||||
? redis.srem(idsIndex, id) // entry no longer exists
|
||||
: asyncMap(indexes, index => {
|
||||
const value = values[index]
|
||||
if (value !== undefined) {
|
||||
return redis.sadd(
|
||||
`${prefix}_${index}:${String(value).toLowerCase()}`,
|
||||
id
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@ import assert from 'assert'
|
||||
import bind from 'lodash/bind'
|
||||
import blocked from 'blocked'
|
||||
import createExpress from 'express'
|
||||
import createLogger from 'debug'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import has from 'lodash/has'
|
||||
import helmet from 'helmet'
|
||||
import includes from 'lodash/includes'
|
||||
@@ -40,30 +40,37 @@ import passport from 'passport'
|
||||
import { parse as parseCookies } from 'cookie'
|
||||
import { Strategy as LocalStrategy } from 'passport-local'
|
||||
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
import { configure } from '@xen-orchestra/log/configure'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debug = createLogger('xo:main')
|
||||
configure([
|
||||
{
|
||||
filter: process.env.DEBUG,
|
||||
level: 'info',
|
||||
transport: transportConsole(),
|
||||
},
|
||||
])
|
||||
|
||||
const warn = (...args) => {
|
||||
console.warn('[Warn]', ...args)
|
||||
}
|
||||
const log = createLogger('xo:main')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DEPRECATED_ENTRIES = ['users', 'servers']
|
||||
|
||||
async function loadConfiguration () {
|
||||
async function loadConfiguration() {
|
||||
const config = await appConf.load('xo-server', {
|
||||
appDir: joinPath(__dirname, '..'),
|
||||
ignoreUnknownFormats: true,
|
||||
})
|
||||
|
||||
debug('Configuration loaded.')
|
||||
log.info('Configuration loaded.')
|
||||
|
||||
// Print a message if deprecated entries are specified.
|
||||
forEach(DEPRECATED_ENTRIES, entry => {
|
||||
if (has(config, entry)) {
|
||||
warn(`${entry} configuration is deprecated.`)
|
||||
log.warn(`${entry} configuration is deprecated.`)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -72,7 +79,7 @@ async function loadConfiguration () {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function createExpressApp () {
|
||||
function createExpressApp() {
|
||||
const app = createExpress()
|
||||
|
||||
app.use(helmet())
|
||||
@@ -104,7 +111,7 @@ function createExpressApp () {
|
||||
return app
|
||||
}
|
||||
|
||||
async function setUpPassport (express, xo) {
|
||||
async function setUpPassport(express, xo) {
|
||||
const strategies = { __proto__: null }
|
||||
xo.registerPassportStrategy = strategy => {
|
||||
passport.use(strategy)
|
||||
@@ -207,7 +214,7 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function registerPlugin (pluginPath, pluginName) {
|
||||
async function registerPlugin(pluginPath, pluginName) {
|
||||
const plugin = require(pluginPath)
|
||||
const { description, version = 'unknown' } = (() => {
|
||||
try {
|
||||
@@ -248,18 +255,18 @@ async function registerPlugin (pluginPath, pluginName) {
|
||||
)
|
||||
}
|
||||
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
const logPlugin = createLogger('xo:plugin')
|
||||
|
||||
function registerPluginWrapper (pluginPath, pluginName) {
|
||||
debugPlugin('register %s', pluginName)
|
||||
function registerPluginWrapper(pluginPath, pluginName) {
|
||||
logPlugin.info(`register ${pluginName}`)
|
||||
|
||||
return registerPlugin.call(this, pluginPath, pluginName).then(
|
||||
() => {
|
||||
debugPlugin(`successfully register ${pluginName}`)
|
||||
logPlugin.info(`successfully register ${pluginName}`)
|
||||
},
|
||||
error => {
|
||||
debugPlugin(`failed register ${pluginName}`)
|
||||
debugPlugin(error)
|
||||
logPlugin.info(`failed register ${pluginName}`)
|
||||
logPlugin.info(error)
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -267,7 +274,7 @@ function registerPluginWrapper (pluginPath, pluginName) {
|
||||
const PLUGIN_PREFIX = 'xo-server-'
|
||||
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
|
||||
|
||||
async function registerPluginsInPath (path) {
|
||||
async function registerPluginsInPath(path) {
|
||||
const files = await readdir(path).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return []
|
||||
@@ -288,7 +295,7 @@ async function registerPluginsInPath (path) {
|
||||
)
|
||||
}
|
||||
|
||||
async function registerPlugins (xo) {
|
||||
async function registerPlugins(xo) {
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'],
|
||||
@@ -299,7 +306,7 @@ async function registerPlugins (xo) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function makeWebServerListen (
|
||||
async function makeWebServerListen(
|
||||
webServer,
|
||||
{
|
||||
certificate,
|
||||
@@ -323,25 +330,25 @@ async function makeWebServerListen (
|
||||
}
|
||||
try {
|
||||
const niceAddress = await webServer.listen(opts)
|
||||
debug(`Web server listening on ${niceAddress}`)
|
||||
log.info(`Web server listening on ${niceAddress}`)
|
||||
} catch (error) {
|
||||
if (error.niceAddress) {
|
||||
warn(`Web server could not listen on ${error.niceAddress}`)
|
||||
log.warn(`Web server could not listen on ${error.niceAddress}`, { error })
|
||||
|
||||
const { code } = error
|
||||
if (code === 'EACCES') {
|
||||
warn(' Access denied.')
|
||||
warn(' Ports < 1024 are often reserved to privileges users.')
|
||||
log.warn(' Access denied.')
|
||||
log.warn(' Ports < 1024 are often reserved to privileges users.')
|
||||
} else if (code === 'EADDRINUSE') {
|
||||
warn(' Address already in use.')
|
||||
log.warn(' Address already in use.')
|
||||
}
|
||||
} else {
|
||||
warn('Web server could not listen:', error.message)
|
||||
log.warn('Web server could not listen:', { error })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function createWebServer ({ listen, listenOptions }) {
|
||||
async function createWebServer({ listen, listenOptions }) {
|
||||
const webServer = stoppable(new WebServer())
|
||||
|
||||
await Promise.all(
|
||||
@@ -417,7 +424,7 @@ const setUpStaticFiles = (express, opts) => {
|
||||
}
|
||||
|
||||
forEach(paths, path => {
|
||||
debug('Setting up %s → %s', url, path)
|
||||
log.info(`Setting up ${url} → ${path}`)
|
||||
|
||||
express.use(url, serveStatic(path))
|
||||
})
|
||||
@@ -426,8 +433,10 @@ const setUpStaticFiles = (express, opts) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
const setUpApi = (webServer, xo, config) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
...config.apiWebSocketOptions,
|
||||
|
||||
noServer: true,
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
@@ -435,7 +444,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
const onConnection = (socket, upgradeReq) => {
|
||||
const { remoteAddress } = upgradeReq.socket
|
||||
|
||||
debug('+ WebSocket connection (%s)', remoteAddress)
|
||||
log.info(`+ WebSocket connection (${remoteAddress})`)
|
||||
|
||||
// Create the abstract XO object for this connection.
|
||||
const connection = xo.createUserConnection()
|
||||
@@ -453,7 +462,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
|
||||
// Close the XO connection with this WebSocket.
|
||||
socket.once('close', () => {
|
||||
debug('- WebSocket connection (%s)', remoteAddress)
|
||||
log.info(`- WebSocket connection (${remoteAddress})`)
|
||||
|
||||
connection.close()
|
||||
})
|
||||
@@ -465,7 +474,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
|
||||
const onSend = error => {
|
||||
if (error) {
|
||||
warn('WebSocket send:', error.stack)
|
||||
log.warn('WebSocket send:', { error })
|
||||
}
|
||||
}
|
||||
jsonRpc.on('data', data => {
|
||||
@@ -513,9 +522,9 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
}
|
||||
|
||||
const { remoteAddress } = socket
|
||||
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
log.info(`+ Console proxy (${user.name} - ${remoteAddress})`)
|
||||
socket.on('close', () => {
|
||||
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
log.info(`- Console proxy (${user.name} - ${remoteAddress})`)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -540,7 +549,7 @@ ${name} v${version}`)(require('../package.json'))
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default async function main (args) {
|
||||
export default async function main(args) {
|
||||
// makes sure the global Promise has not been changed by a lib
|
||||
assert(global.Promise === require('bluebird'))
|
||||
|
||||
@@ -549,10 +558,10 @@ export default async function main (args) {
|
||||
}
|
||||
|
||||
{
|
||||
const debug = createLogger('xo:perf')
|
||||
const logPerf = createLogger('xo:perf')
|
||||
blocked(
|
||||
ms => {
|
||||
debug('blocked for %sms', ms | 0)
|
||||
logPerf.info(`blocked for ${ms | 0}ms`)
|
||||
},
|
||||
{
|
||||
threshold: 500,
|
||||
@@ -569,14 +578,14 @@ export default async function main (args) {
|
||||
const { user, group } = config
|
||||
if (group) {
|
||||
process.setgid(group)
|
||||
debug('Group changed to', group)
|
||||
log.info(`Group changed to ${group}`)
|
||||
}
|
||||
if (user) {
|
||||
process.setuid(user)
|
||||
debug('User changed to', user)
|
||||
log.info(`User changed to ${user}`)
|
||||
}
|
||||
} catch (error) {
|
||||
warn('Failed to change user/group:', error)
|
||||
log.warn('Failed to change user/group:', { error })
|
||||
}
|
||||
|
||||
// Creates main object.
|
||||
@@ -604,7 +613,7 @@ export default async function main (args) {
|
||||
})
|
||||
|
||||
if (port === undefined) {
|
||||
warn('Could not setup HTTPs redirection: no HTTPs port found')
|
||||
log.warn('Could not setup HTTPs redirection: no HTTPs port found')
|
||||
} else {
|
||||
express.use((req, res, next) => {
|
||||
if (req.secure) {
|
||||
@@ -633,7 +642,7 @@ export default async function main (args) {
|
||||
})
|
||||
|
||||
// Must be set up before the static files.
|
||||
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
|
||||
setUpApi(webServer, xo, config)
|
||||
|
||||
setUpProxies(express, config.http.proxies, xo)
|
||||
|
||||
@@ -653,17 +662,17 @@ export default async function main (args) {
|
||||
|
||||
process.on(signal, () => {
|
||||
if (alreadyCalled) {
|
||||
warn('forced exit')
|
||||
log.warn('forced exit')
|
||||
process.exit(1)
|
||||
}
|
||||
alreadyCalled = true
|
||||
|
||||
debug('%s caught, closing…', signal)
|
||||
log.info(`${signal} caught, closing…`)
|
||||
xo.stop()
|
||||
})
|
||||
})
|
||||
|
||||
await fromEvent(xo, 'stopped')
|
||||
|
||||
debug('bye :-)')
|
||||
log.info('bye :-)')
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import Collection from '../collection/redis'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
const log = createLogger('xo:plugin-metadata')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class PluginMetadata extends Model {}
|
||||
@@ -44,10 +47,7 @@ export class PluginsMetadata extends Collection {
|
||||
pluginMetadata.configuration =
|
||||
configuration && JSON.parse(configuration)
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
'cannot parse pluginMetadata.configuration:',
|
||||
configuration
|
||||
)
|
||||
log.warn(`cannot parse pluginMetadata.configuration: ${configuration}`)
|
||||
pluginMetadata.configuration = []
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import createDebug from 'debug'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import partialStream from 'partial-stream'
|
||||
import { connect } from 'tls'
|
||||
import { parse } from 'url'
|
||||
|
||||
const debug = createDebug('xo:proxy-console')
|
||||
const log = createLogger('xo:proxy-console')
|
||||
|
||||
export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
const url = parse(vmConsole.location)
|
||||
let { hostname } = url
|
||||
if (hostname === null || hostname === '') {
|
||||
console.warn(
|
||||
'host is missing in console (%s) URI (%s)',
|
||||
vmConsole.uuid,
|
||||
vmConsole.location
|
||||
)
|
||||
const { address } = vmConsole.$VM.$resident_on
|
||||
console.warn(' using host address (%s) as fallback', address)
|
||||
hostname = address
|
||||
|
||||
log.warn(
|
||||
`host is missing in console (${vmConsole.uuid}) URI (${
|
||||
vmConsole.location
|
||||
}) using host address (${address}) as fallback`
|
||||
)
|
||||
}
|
||||
|
||||
let closed = false
|
||||
@@ -41,10 +41,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
|
||||
const onSend = error => {
|
||||
if (error) {
|
||||
debug(
|
||||
'error sending to the XO client: %s',
|
||||
error.stack || error.message || error
|
||||
)
|
||||
log.debug('error sending to the XO client:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +49,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
.pipe(
|
||||
partialStream('\r\n\r\n', headers => {
|
||||
// TODO: check status code 200.
|
||||
debug('connected')
|
||||
log.debug('connected')
|
||||
})
|
||||
)
|
||||
.on('data', data => {
|
||||
@@ -63,7 +60,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
.on('end', () => {
|
||||
if (!closed) {
|
||||
closed = true
|
||||
debug('disconnected from the console')
|
||||
log.debug('disconnected from the console')
|
||||
}
|
||||
|
||||
ws.close()
|
||||
@@ -71,10 +68,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
|
||||
ws.on('error', error => {
|
||||
closed = true
|
||||
debug(
|
||||
'error from the XO client: %s',
|
||||
error.stack || error.message || error
|
||||
)
|
||||
log.debug('error from the XO client:', { error })
|
||||
|
||||
socket.end()
|
||||
})
|
||||
@@ -86,7 +80,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
.on('close', () => {
|
||||
if (!closed) {
|
||||
closed = true
|
||||
debug('disconnected from the XO client')
|
||||
log.debug('disconnected from the XO client')
|
||||
}
|
||||
|
||||
socket.end()
|
||||
@@ -94,7 +88,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
}
|
||||
).on('error', error => {
|
||||
closed = true
|
||||
debug('error from the console: %s', error.stack || error.message || error)
|
||||
log.debug('error from the console:', { error })
|
||||
|
||||
ws.close()
|
||||
})
|
||||
|
||||
15
packages/xo-server/src/schemas/log/taskInfo.js
Normal file
15
packages/xo-server/src/schemas/log/taskInfo.js
Normal file
@@ -0,0 +1,15 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['task.info'],
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'identifier of the parent task or job',
|
||||
},
|
||||
data: {},
|
||||
},
|
||||
required: ['event', 'taskId'],
|
||||
}
|
||||
15
packages/xo-server/src/schemas/log/taskWarning.js
Normal file
15
packages/xo-server/src/schemas/log/taskWarning.js
Normal file
@@ -0,0 +1,15 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['task.warning'],
|
||||
},
|
||||
taskId: {
|
||||
type: 'string',
|
||||
description: 'identifier of the parent task or job',
|
||||
},
|
||||
data: {},
|
||||
},
|
||||
required: ['event', 'taskId'],
|
||||
}
|
||||
43
packages/xo-server/src/sensitive-values.js
Normal file
43
packages/xo-server/src/sensitive-values.js
Normal file
@@ -0,0 +1,43 @@
|
||||
import mapValues from 'lodash/mapValues'
|
||||
|
||||
// this random value is used to obfuscate real data
|
||||
const OBFUSCATED_VALUE = 'q3oi6d9X8uenGvdLnHk2'
|
||||
|
||||
export const merge = (newValue, oldValue) => {
|
||||
if (newValue === OBFUSCATED_VALUE) {
|
||||
return oldValue
|
||||
}
|
||||
|
||||
let isArray
|
||||
|
||||
if (
|
||||
newValue === null ||
|
||||
oldValue === null ||
|
||||
typeof newValue !== 'object' ||
|
||||
typeof oldValue !== 'object' ||
|
||||
(isArray = Array.isArray(newValue)) !== Array.isArray(oldValue)
|
||||
) {
|
||||
return newValue
|
||||
}
|
||||
|
||||
const iteratee = (v, k) => merge(v, oldValue[k])
|
||||
return isArray ? newValue.map(iteratee) : mapValues(newValue, iteratee)
|
||||
}
|
||||
|
||||
export const obfuscate = value => replace(value, OBFUSCATED_VALUE)
|
||||
|
||||
export function replace (value, replacement) {
|
||||
function helper (value, name) {
|
||||
if (name === 'password' && typeof value === 'string') {
|
||||
return replacement
|
||||
}
|
||||
|
||||
if (typeof value !== 'object' || value === null) {
|
||||
return value
|
||||
}
|
||||
|
||||
return Array.isArray(value) ? value.map(helper) : mapValues(value, helper)
|
||||
}
|
||||
|
||||
return helper(value)
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import through2 from 'through2'
|
||||
import { type Readable } from 'stream'
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ import humanFormat from 'human-format'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isString from 'lodash/isString'
|
||||
import keys from 'lodash/keys'
|
||||
import kindOf from 'kindof'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
import pick from 'lodash/pick'
|
||||
import tmp from 'tmp'
|
||||
@@ -192,35 +191,6 @@ export const noop = () => {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Usage: pDebug(promise, name) or promise::pDebug(name)
|
||||
export function pDebug (promise, name) {
|
||||
if (arguments.length === 1) {
|
||||
name = promise
|
||||
promise = this
|
||||
}
|
||||
|
||||
Promise.resolve(promise).then(
|
||||
value => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} resolved${
|
||||
value !== undefined ? ` with ${kindOf(value)}` : ''
|
||||
}`
|
||||
)
|
||||
},
|
||||
reason => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} rejected${
|
||||
reason !== undefined ? ` with ${kindOf(reason)}` : ''
|
||||
}`
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return promise
|
||||
}
|
||||
|
||||
// Given a collection (array or object) which contains promises,
|
||||
// return a promise that is fulfilled when all the items in the
|
||||
// collection are either fulfilled or rejected.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user