Compare commits

...

67 Commits

Author SHA1 Message Date
Pierre Donias
1efe1d82cf listMissingPatchesFailed 2019-10-14 15:59:38 +02:00
Pierre Donias
c4b4ee6476 fix(xo-server,xo-web,xo-common): list missing patches error handling 2019-10-14 15:59:37 +02:00
Pierre Donias
211ede92cc fix(xo-web/new/sr): reattach SR (#4550)
See #4546
2019-10-14 14:57:24 +02:00
Julien Fontanet
256af03772 chore: use typeof instead of lodash/isString (#4603) 2019-10-14 13:58:58 +02:00
Julien Fontanet
654fd5a4f9 chore: use typeof instead of lodash/isBoolean (#4604) 2019-10-14 13:51:45 +02:00
Julien Fontanet
541d90e49f fix(xo-server-recover-account): explicit appDir 2019-10-12 17:07:14 +02:00
Julien Fontanet
974e7038e7 chore(backups-cli): dont fail on missing dirs 2019-10-12 00:49:16 +02:00
Pierre Donias
e2f5b30aa9 fix(xo-server/patching): install XCP-ng patches host by host (#4532)
Fixes #4468

To prevent issues where 2 hosts try to modify the XAPI DB at the same time
2019-10-12 00:39:17 +02:00
Julien Fontanet
3483e7d9e0 chore: update dependencies 2019-10-11 17:15:25 +02:00
Julien Fontanet
56cb20a1af chore: use typeof instead of lodash/isFunction (#4587) 2019-10-11 15:16:37 +02:00
Rajaa.BARHTAOUI
64929653dd feat(xo-server): upgrade to execa@2.0.5 (#4584) 2019-10-11 15:01:56 +02:00
Julien Fontanet
c955da9bc6 feat(backups-cli): lowlevel tool to help with backups (#4556) 2019-10-11 14:48:37 +02:00
Julien Fontanet
291354fa8e chore: use Object.assign instead of lodash/assign (#4585)
Support is good enough:
- https://node.green/#ES2015-built-in-extensions-Object-static-methods-Object-assign
- https://kangax.github.io/compat-table/es6/#test-Object_static_methods
2019-10-11 14:43:38 +02:00
Julien Fontanet
905d736512 chore: use Array.isArray instead of lodash/isArray (#4586)
Support is good enough:
- https://node.green/#ES2015-built-ins-Proxy-Array-isArray-support
- https://kangax.github.io/compat-table/es5/#test-Array_methods
2019-10-11 14:29:01 +02:00
Rajaa.BARHTAOUI
3406d6e2a9 chore(CHANGELOG): 5.39.1 (#4597) 2019-10-11 10:33:18 +02:00
BARHTAOUI
fc10b5ffb9 chore(CHANGELOG): update next 2019-10-10 16:03:20 +02:00
BARHTAOUI
f89c313166 feat(xo-web): 5.50.3 2019-10-10 16:03:20 +02:00
Rajaa.BARHTAOUI
7c734168d0 feat(xo-web/xoa): expose 'xoa check' on the UI (#4574)
See #4513
2019-10-10 13:49:17 +02:00
HamadaBrest
1e7bfec2ce feat(xo-web/hub): delete template by namespace instead of ID (#4594) 2019-10-10 10:36:07 +02:00
badrAZ
1eb0603b4e chore(xo-server-test/backup-ng): consolidate default values (#4544)
Required for #4470
2019-10-08 14:34:11 +02:00
badrAZ
4b32730ce8 feat(xo-web/vm): improve invalid cores per socket feedback (#4187)
Fixes #4120
2019-10-08 11:05:11 +02:00
BenjiReis
ad083c1d9b chore(xo-server-sdn-controller): better cert creation code (#4582) 2019-10-07 12:12:01 +02:00
BenjiReis
b4f84c2de2 chore(xo-server-sdn-controller): arrow functions when possible (#4583) 2019-10-07 11:29:30 +02:00
badrAZ
fc17443ce4 fix(xo-web/vm/advanced): error on displaying ACL users (#4578) 2019-10-07 11:08:11 +02:00
Julien Fontanet
342ae06b21 chore(xo-sdn-controller): minor formatting fix 2019-10-07 10:19:34 +02:00
Julien Fontanet
093fb7f959 fix(xo-server-logs): explicit appDir
May fix #4576
2019-10-03 16:02:10 +02:00
Julien Fontanet
f6472424ad fix(eslint): disable lines-between-class-members rule 2019-10-02 15:52:51 +02:00
Julien Fontanet
31ed3767c6 chore: fix some lint
Mainly: `obj['prop']` → `obj.prop`
2019-10-02 15:45:32 +02:00
marcpezin
366acb65ea doc(updater): release channels (#4572) 2019-10-02 15:24:45 +02:00
Pierre Donias
7c6946931b chore(xo-web): PascalCase NotFound component (#4567)
https://github.com/yannickcr/eslint-plugin-react/blob/master/docs/rules/jsx-pascal-case.md
2019-10-01 17:21:00 +02:00
HamadaBrest
5d971433a5 fix(xo-web/new-vm/self): template selection (#4568)
Fixes #4565
Introduced by 9efc3dd1fb
2019-10-01 16:59:59 +02:00
Rajaa.BARHTAOUI
05264b326b chore(CHANGELOG): 5.39.0 (#4561) 2019-09-30 16:08:56 +02:00
BARHTAOUI
fdd5c6bfd8 chore(CHANGELOG): update next 2019-09-30 15:13:05 +02:00
BARHTAOUI
42c3528c2f feat(xo-web): 5.50.2 2019-09-30 15:13:05 +02:00
HamadaBrest
18640714f1 fix(xo-web/hub/resource): "remove" button icon (#4559) 2019-09-30 14:43:31 +02:00
HamadaBrest
cda4d3399b fix(xo-web/hub): responsive hub VM templates (#4558)
Fixes #4557
2019-09-30 14:42:01 +02:00
Julien Fontanet
4da8af6e69 fix(log/configure): process nested transport 2019-09-27 14:55:26 +02:00
Julien Fontanet
b535565612 feat(@xen-orchestra/log): 0.2.0 2019-09-27 14:29:34 +02:00
Julien Fontanet
bef39b8a96 fix(log): support DEBUG and NODE_DEBUG when both defined 2019-09-27 14:26:19 +02:00
Julien Fontanet
fb2502a031 feat(log): support env var LOG_LEVEL 2019-09-27 14:23:27 +02:00
Julien Fontanet
0b90befda1 feat(log): also export createLogger symbol 2019-09-27 13:59:44 +02:00
Julien Fontanet
f9800f104a feat(log/console): display data inline 2019-09-27 13:59:03 +02:00
Julien Fontanet
99134cc381 Merge pull request #4554 from vatesfr/release
Technical release
2019-09-27 10:22:49 +02:00
BARHTAOUI
66ca08da6d chore(CHANGELOG): update next 2019-09-27 10:15:46 +02:00
BARHTAOUI
5eb7ece6ba feat(xo-web): 5.50.1 2019-09-27 09:59:05 +02:00
BARHTAOUI
6b3d334e76 feat(xo-server): 5.50.1 2019-09-27 09:58:51 +02:00
badrAZ
14f5fd8f73 feat(xen-api): 0.27.2 2019-09-27 09:54:47 +02:00
BARHTAOUI
5f73aee0df feat(xo-server-cloud): 0.3.0 2019-09-27 09:47:16 +02:00
HamadaBrest
f8666ba367 fix(xo-web/hub): properly display community info (#4552) 2019-09-26 16:57:53 +02:00
badrAZ
9e80f76dd8 fix(xo-server-test/backup-ng): force fetching uptodate logs (#4551)
See a6d182e

The issue:

xo-server-test execute sequentially multiple backups and validate their execution using logs. The issue is that the logs are not up-to-date in the second fetch because the method returns the previous cached response.

The solution:

Add _forceRefresh param to backupNg.getLogs to be able to clear the method cache. It's just a work-around which will be removed once the consolidated logs will be stored in the DB.
2019-09-26 16:42:12 +02:00
HamadaBrest
c76a5eaf67 feat: hub: XVA templates store (#4442)
Fixes #1918
2019-09-26 16:36:32 +02:00
Julien Fontanet
cd378f0168 chore(PULL_REQUEST_TEMPLATE): strikethrough not relevant items 2019-09-26 14:24:54 +02:00
badrAZ
7d51ff0cf5 feat(xo-server/_pDebounceWithKey): ability to clear cache entry (#4549)
Fixes #4548
2019-09-26 14:17:33 +02:00
Julien Fontanet
47819ea956 chore(PULL_REQUEST_TEMPLATE): better checklist + 4 agreements (#4547) 2019-09-25 17:41:32 +02:00
Julien Fontanet
c7e3560c98 chore(normalize-packages): add --access public to npm publish
Only for non-private packages without existing `postversion` script.
2019-09-25 17:36:36 +02:00
Julien Fontanet
b24400b21d feat(xen-api): support IPv6 addresses (#4521)
Fixes #4520
2019-09-24 13:59:49 +02:00
Pierre Donias
6c1d651687 fix(xo-server/host.isHostServerTimeConsistent): dont return false on check failure (#4540) 2019-09-24 10:46:43 +02:00
Pierre Donias
e7757b53e7 feat(xo-web/new-vm): remove cloud init plan limitation (#4543) 2019-09-24 10:24:27 +02:00
Julien Fontanet
a6d182e92d feat(xo-server/getBackupNgLogs): implement debounce (#4509) (#4541)
Similar to #4509

Fixes xoa-support#1676

For now, the delay is set to 10s which is the duration used by xo-web's
subscription, which makes it enough to make it independent of the number of
clients.

In the future, this could be configurable, but we may simply do the
consolidation only once during the backup execution.
2019-09-23 16:20:17 +02:00
Julien Fontanet
925eca1463 chore(xo-server/api): context has methods bound to XO
This makes sure the correct context is used, which is necessary for properties write and for debouncing.
2019-09-23 15:59:45 +02:00
Julien Fontanet
8b454f0d39 chore(xo-server/MultiKeyMap): add basic tests 2019-09-23 15:14:54 +02:00
Pierre Donias
7c4d110353 feat(xo-web/settings/logs): identify XAPI errors (#4385)
Fixes #4101
2019-09-23 10:31:34 +02:00
Julien Fontanet
6df55523b6 feat(xo-web): display node in list of packages 2019-09-20 17:22:21 +02:00
badrAZ
3ec6a24634 chore(CHANGELOG): update next 2019-09-20 16:36:04 +02:00
badrAZ
164b4218c4 feat(xo-web): 5.50.0 2019-09-20 16:21:38 +02:00
badrAZ
56df8a6477 feat(xo-server): 5.50.0 2019-09-20 16:21:22 +02:00
badrAZ
47a83b312d feat(@xen-orchestra/template): 0.1.0 2019-09-20 16:20:26 +02:00
149 changed files with 3117 additions and 1900 deletions

View File

@@ -21,7 +21,7 @@ module.exports = {
overrides: [
{
files: ['cli.js', '*-cli.js', 'packages/*cli*/**/*.js'],
files: ['cli.js', '*-cli.js', '**/*cli*/**/*.js'],
rules: {
'no-console': 'off',
},
@@ -40,6 +40,13 @@ module.exports = {
'react/jsx-handler-names': 'off',
// disabled because not always relevant, we might reconsider in the future
//
// enabled by https://github.com/standard/eslint-config-standard/commit/319b177750899d4525eb1210686f6aca96190b2f
//
// example: https://github.com/vatesfr/xen-orchestra/blob/31ed3767c67044ca445658eb6b560718972402f2/packages/xen-api/src/index.js#L156-L157
'lines-between-class-members': 'off',
'no-console': ['error', { allow: ['warn', 'error'] }],
'no-var': 'error',
'node/no-extraneous-import': 'error',

View File

@@ -36,7 +36,7 @@
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -0,0 +1,378 @@
#!/usr/bin/env node
const args = process.argv.slice(2)
if (
args.length === 0 ||
/^(?:-h|--help)$/.test(args[0]) ||
args[0] !== 'clean-vms'
) {
console.log('Usage: xo-backups clean-vms [--force] xo-vm-backups/*')
// eslint-disable-next-line no-process-exit
return process.exit(1)
}
// remove `clean-vms` arg which is the only available command ATM
args.splice(0, 1)
// only act (ie delete files) if `--force` is present
const force = args[0] === '--force'
if (force) {
args.splice(0, 1)
}
// -----------------------------------------------------------------------------
const assert = require('assert')
const lockfile = require('proper-lockfile')
const { default: Vhd } = require('vhd-lib')
const { curryRight, flatten } = require('lodash')
const { dirname, resolve } = require('path')
const { DISK_TYPE_DIFFERENCING } = require('vhd-lib/dist/_constants')
const { pipe, promisifyAll } = require('promise-toolbox')
const fs = promisifyAll(require('fs'))
const handler = require('@xen-orchestra/fs').getHandler({ url: 'file://' })
// -----------------------------------------------------------------------------
const asyncMap = curryRight((iterable, fn) =>
Promise.all(
Array.isArray(iterable) ? iterable.map(fn) : Array.from(iterable, fn)
)
)
const filter = (...args) => thisArg => thisArg.filter(...args)
// TODO: better check?
// our heuristic is not good enough, there has been some false positives
// (detected as invalid by us but valid by `tar` and imported with success),
// either:
// - these files were normal but the check is incorrect
// - these files were invalid but without data loss
// - these files were invalid but with silent data loss
//
// FIXME: the heuristic does not work if the XVA is compressed, we need to
// implement a specific test for it
//
// maybe reading the end of the file looking for a file named
// /^Ref:\d+/\d+\.checksum$/ and then validating the tar structure from it
//
// https://github.com/npm/node-tar/issues/234#issuecomment-538190295
const isValidTar = async path => {
try {
const fd = await fs.open(path, 'r')
try {
const { size } = await fs.fstat(fd)
if (size <= 1024 || size % 512 !== 0) {
return false
}
const buf = Buffer.allocUnsafe(1024)
assert.strictEqual(
await fs.read(fd, buf, 0, buf.length, size - buf.length),
buf.length
)
return buf.every(_ => _ === 0)
} finally {
fs.close(fd).catch(noop)
}
} catch (error) {
// never throw, log and report as valid to avoid side effects
console.error('isValidTar', path, error)
return true
}
}
const noop = Function.prototype
const readDir = path =>
fs.readdir(path).then(
entries => {
entries.forEach((entry, i) => {
entries[i] = `${path}/${entry}`
})
return entries
},
error => {
// a missing dir is by definition empty
if (error != null && error.code === 'ENOENT') {
return []
}
throw error
}
)
// -----------------------------------------------------------------------------
// chain is an array of VHDs from child to parent
//
// the whole chain will be merged into parent, parent will be renamed to child
// and all the others will deleted
async function mergeVhdChain(chain) {
assert(chain.length >= 2)
const child = chain[0]
const parent = chain[chain.length - 1]
const children = chain.slice(0, -1).reverse()
console.warn('Unused parents of VHD', child)
chain
.slice(1)
.reverse()
.forEach(parent => {
console.warn(' ', parent)
})
force && console.warn(' merging…')
console.warn('')
if (force) {
// `mergeVhd` does not work with a stream, either
// - make it accept a stream
// - or create synthetic VHD which is not a stream
return console.warn('TODO: implement merge')
// await mergeVhd(
// handler,
// parent,
// handler,
// children.length === 1
// ? child
// : await createSyntheticStream(handler, children)
// )
}
await Promise.all([
force && fs.rename(parent, child),
asyncMap(children.slice(0, -1), child => {
console.warn('Unused VHD', child)
force && console.warn(' deleting…')
console.warn('')
return force && handler.unlink(child)
}),
])
}
const listVhds = pipe([
vmDir => vmDir + '/vdis',
readDir,
asyncMap(readDir),
flatten,
asyncMap(readDir),
flatten,
filter(_ => _.endsWith('.vhd')),
])
async function handleVm(vmDir) {
const vhds = new Set()
const vhdParents = { __proto__: null }
const vhdChildren = { __proto__: null }
// remove broken VHDs
await asyncMap(await listVhds(vmDir), async path => {
try {
const vhd = new Vhd(handler, path)
await vhd.readHeaderAndFooter()
vhds.add(path)
if (vhd.footer.diskType === DISK_TYPE_DIFFERENCING) {
const parent = resolve(dirname(path), vhd.header.parentUnicodeName)
vhdParents[path] = parent
if (parent in vhdChildren) {
const error = new Error(
'this script does not support multiple VHD children'
)
error.parent = parent
error.child1 = vhdChildren[parent]
error.child2 = path
throw error // should we throw?
}
vhdChildren[parent] = path
}
} catch (error) {
console.warn('Error while checking VHD', path)
console.warn(' ', error)
if (error != null && error.code === 'ERR_ASSERTION') {
force && console.warn(' deleting…')
console.warn('')
force && (await handler.unlink(path))
}
}
})
// remove VHDs with missing ancestors
{
const deletions = []
// return true if the VHD has been deleted or is missing
const deleteIfOrphan = vhd => {
const parent = vhdParents[vhd]
if (parent === undefined) {
return
}
// no longer needs to be checked
delete vhdParents[vhd]
deleteIfOrphan(parent)
if (!vhds.has(parent)) {
vhds.delete(vhd)
console.warn('Error while checking VHD', vhd)
console.warn(' missing parent', parent)
force && console.warn(' deleting…')
console.warn('')
force && deletions.push(handler.unlink(vhd))
}
}
// > A property that is deleted before it has been visited will not be
// > visited later.
// >
// > -- https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...in#Deleted_added_or_modified_properties
for (const child in vhdParents) {
deleteIfOrphan(child)
}
await Promise.all(deletions)
}
const [jsons, xvas] = await readDir(vmDir).then(entries => [
entries.filter(_ => _.endsWith('.json')),
new Set(entries.filter(_ => _.endsWith('.xva'))),
])
await asyncMap(xvas, async path => {
// check is not good enough to delete the file, the best we can do is report
// it
if (!(await isValidTar(path))) {
console.warn('Potential broken XVA', path)
console.warn('')
}
})
const unusedVhds = new Set(vhds)
const unusedXvas = new Set(xvas)
// compile the list of unused XVAs and VHDs, and remove backup metadata which
// reference a missing XVA/VHD
await asyncMap(jsons, async json => {
const metadata = JSON.parse(await fs.readFile(json))
const { mode } = metadata
if (mode === 'full') {
const linkedXva = resolve(vmDir, metadata.xva)
if (xvas.has(linkedXva)) {
unusedXvas.delete(linkedXva)
} else {
console.warn('Error while checking backup', json)
console.warn(' missing file', linkedXva)
force && console.warn(' deleting…')
console.warn('')
force && (await handler.unlink(json))
}
} else if (mode === 'delta') {
const linkedVhds = (() => {
const { vhds } = metadata
return Object.keys(vhds).map(key => resolve(vmDir, vhds[key]))
})()
// FIXME: find better approach by keeping as much of the backup as
// possible (existing disks) even if one disk is missing
if (linkedVhds.every(_ => vhds.has(_))) {
linkedVhds.forEach(_ => unusedVhds.delete(_))
} else {
console.warn('Error while checking backup', json)
const missingVhds = linkedVhds.filter(_ => !vhds.has(_))
console.warn(
' %i/%i missing VHDs',
missingVhds.length,
linkedVhds.length
)
missingVhds.forEach(vhd => {
console.warn(' ', vhd)
})
force && console.warn(' deleting…')
console.warn('')
force && (await handler.unlink(json))
}
}
})
// TODO: parallelize by vm/job/vdi
const unusedVhdsDeletion = []
{
// VHD chains (as list from child to ancestor) to merge indexed by last
// ancestor
const vhdChainsToMerge = { __proto__: null }
const toCheck = new Set(unusedVhds)
const getUsedChildChainOrDelete = vhd => {
if (vhd in vhdChainsToMerge) {
const chain = vhdChainsToMerge[vhd]
delete vhdChainsToMerge[vhd]
return chain
}
if (!unusedVhds.has(vhd)) {
return [vhd]
}
// no longer needs to be checked
toCheck.delete(vhd)
const child = vhdChildren[vhd]
if (child !== undefined) {
const chain = getUsedChildChainOrDelete(child)
if (chain !== undefined) {
chain.push(vhd)
return chain
}
}
console.warn('Unused VHD', vhd)
force && console.warn(' deleting…')
console.warn('')
force && unusedVhdsDeletion.push(handler.unlink(vhd))
}
toCheck.forEach(vhd => {
vhdChainsToMerge[vhd] = getUsedChildChainOrDelete(vhd)
})
Object.keys(vhdChainsToMerge).forEach(key => {
const chain = vhdChainsToMerge[key]
if (chain !== undefined) {
unusedVhdsDeletion.push(mergeVhdChain(chain))
}
})
}
await Promise.all([
unusedVhdsDeletion,
asyncMap(unusedXvas, path => {
console.warn('Unused XVA', path)
force && console.warn(' deleting…')
console.warn('')
return force && handler.unlink(path)
}),
])
}
// -----------------------------------------------------------------------------
asyncMap(args, async vmDir => {
vmDir = resolve(vmDir)
// TODO: implement this in `xo-server`, not easy because not compatible with
// `@xen-orchestra/fs`.
const release = await lockfile.lock(vmDir)
try {
await handleVm(vmDir)
} catch (error) {
console.error('handleVm', vmDir, error)
} finally {
await release()
}
}).catch(error => console.error('main', error))

View File

@@ -0,0 +1,27 @@
{
"bin": {
"xo-backups": "index.js"
},
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/fs": "^0.10.1",
"lodash": "^4.17.15",
"promise-toolbox": "^0.14.0",
"proper-lockfile": "^4.1.1",
"vhd-lib": "^0.7.0"
},
"engines": {
"node": ">=8.16.1"
},
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/backups-cli",
"name": "@xen-orchestra/backups-cli",
"repository": {
"directory": "@xen-orchestra/backups-cli",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"scripts": {
"postversion": "npm publish --access public"
},
"version": "0.0.0"
}

View File

@@ -16,7 +16,7 @@
},
"dependencies": {
"golike-defer": "^0.4.1",
"xen-api": "^0.27.1"
"xen-api": "^0.27.2"
},
"scripts": {
"postversion": "npm publish"

View File

@@ -46,7 +46,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -34,7 +34,7 @@
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -33,7 +33,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -30,7 +30,7 @@
"get-stream": "^4.0.0",
"limit-concurrency-decorator": "^0.4.0",
"lodash": "^4.17.4",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"readable-stream": "^3.0.6",
"through2": "^3.0.0",
"tmp": "^0.1.0",
@@ -46,7 +46,7 @@
"@babel/preset-flow": "^7.0.0",
"async-iterator-to-stream": "^1.1.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"dotenv": "^8.0.0",
"index-modules": "^0.3.0",
"rimraf": "^3.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/log",
"version": "0.1.4",
"version": "0.2.0",
"license": "ISC",
"description": "",
"keywords": [],
@@ -31,14 +31,14 @@
},
"dependencies": {
"lodash": "^4.17.4",
"promise-toolbox": "^0.13.0"
"promise-toolbox": "^0.14.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"index-modules": "^0.3.0",
"rimraf": "^3.0.0"
},

View File

@@ -19,7 +19,8 @@ const createTransport = config => {
}
}
let { filter, transport } = config
let { filter } = config
let transport = createTransport(config.transport)
const level = resolve(config.level)
if (filter !== undefined) {
@@ -51,11 +52,12 @@ const symbol =
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
const { env } = process
global[symbol] = createTransport({
// display warnings or above, and all that are enabled via DEBUG or
// NODE_DEBUG env
filter: process.env.DEBUG || process.env.NODE_DEBUG,
level: LEVELS.INFO,
filter: [env.DEBUG, env.NODE_DEBUG].filter(Boolean).join(','),
level: resolve(env.LOG_LEVEL, LEVELS.INFO),
transport: createConsoleTransport(),
})

View File

@@ -1,5 +1,5 @@
import createTransport from './transports/console'
import LEVELS from './levels'
import LEVELS, { resolve } from './levels'
const symbol =
typeof Symbol !== 'undefined'
@@ -9,7 +9,8 @@ if (!(symbol in global)) {
// the default behavior, without requiring `configure` is to avoid
// logging anything unless it's a real error
const transport = createTransport()
global[symbol] = log => log.level > LEVELS.WARN && transport(log)
const level = resolve(process.env.LOG_LEVEL, LEVELS.WARN)
global[symbol] = log => log.level >= level && transport(log)
}
// -------------------------------------------------------------------
@@ -72,5 +73,5 @@ prototype.wrap = function(message, fn) {
}
}
const createLogger = namespace => new Logger(namespace)
export const createLogger = namespace => new Logger(namespace)
export { createLogger as default }

View File

@@ -13,11 +13,22 @@ for (const name in LEVELS) {
NAMES[LEVELS[name]] = name
}
export const resolve = level => {
if (typeof level === 'string') {
level = LEVELS[level.toUpperCase()]
// resolves to the number representation of a level
//
// returns `defaultLevel` if invalid
export const resolve = (level, defaultLevel) => {
const type = typeof level
if (type === 'number') {
if (level in NAMES) {
return level
}
} else if (type === 'string') {
const nLevel = LEVELS[level.toUpperCase()]
if (nLevel !== undefined) {
return nLevel
}
}
return level
return defaultLevel
}
Object.freeze(LEVELS)

View File

@@ -1,26 +1,23 @@
import LEVELS, { NAMES } from '../levels'
// Bind console methods (necessary for browsers)
/* eslint-disable no-console */
const debugConsole = console.log.bind(console)
const infoConsole = console.info.bind(console)
const warnConsole = console.warn.bind(console)
const errorConsole = console.error.bind(console)
/* eslint-enable no-console */
const { ERROR, INFO, WARN } = LEVELS
const consoleTransport = ({ data, level, namespace, message, time }) => {
const fn =
/* eslint-disable no-console */
level < INFO
? debugConsole
? console.log
: level < WARN
? infoConsole
? console.info
: level < ERROR
? warnConsole
: errorConsole
? console.warn
: console.error
/* eslint-enable no-console */
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
data != null && fn(data)
const args = [time.toISOString(), namespace, NAMES[level], message]
if (data != null) {
args.push(data)
}
fn.apply(console, args)
}
export default () => consoleTransport

View File

@@ -36,7 +36,7 @@
"@babel/preset-env": "^7.0.0",
"babel-plugin-dev": "^1.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/template",
"version": "0.0.0",
"version": "0.1.0",
"license": "ISC",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/template",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
@@ -28,7 +28,7 @@
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {
@@ -38,7 +38,7 @@
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"postversion": "npm publish"
"postversion": "npm publish --access public"
},
"dependencies": {
"lodash": "^4.17.15"

View File

@@ -4,8 +4,50 @@
### Enhancements
### Bug fixes
### Released packages
## **5.39.1** (2019-10-11)
![Channel: latest](https://badgen.net/badge/channel/latest/yellow)
### Enhancements
- [Support] Ability to check the XOA on the user interface [#4513](https://github.com/vatesfr/xen-orchestra/issues/4513) (PR [#4574](https://github.com/vatesfr/xen-orchestra/pull/4574))
### Bug fixes
- [VM/new-vm] Fix template selection on creating new VM for resource sets [#4565](https://github.com/vatesfr/xen-orchestra/issues/4565) (PR [#4568](https://github.com/vatesfr/xen-orchestra/pull/4568))
- [VM] Clearer invalid cores per socket error [#4120](https://github.com/vatesfr/xen-orchestra/issues/4120) (PR [#4187](https://github.com/vatesfr/xen-orchestra/pull/4187))
### Released packages
- xo-web v5.50.3
## **5.39.0** (2019-09-30)
### Highlights
- [VM/console] Add a button to connect to the VM via the local SSH client (PR [#4415](https://github.com/vatesfr/xen-orchestra/pull/4415))
- [SDN Controller] Add possibility to encrypt private networks (PR [#4441](https://github.com/vatesfr/xen-orchestra/pull/4441))
- [Backups] Improve performance by caching VM backups listing (PR [#4509](https://github.com/vatesfr/xen-orchestra/pull/4509))
- [HUB] VM template store [#1918](https://github.com/vatesfr/xen-orchestra/issues/1918) (PR [#4442](https://github.com/vatesfr/xen-orchestra/pull/4442))
### Enhancements
- [SR/new] Clarify address formats [#4450](https://github.com/vatesfr/xen-orchestra/issues/4450) (PR [#4460](https://github.com/vatesfr/xen-orchestra/pull/4460))
- [Backup NG/New] Show warning if zstd compression is not supported on a VM [#3892](https://github.com/vatesfr/xen-orchestra/issues/3892) (PRs [#4411](https://github.com/vatesfr/xen-orchestra/pull/4411))
- [VM/disks] Don't hide disks that are attached to the same VM twice [#4400](https://github.com/vatesfr/xen-orchestra/issues/4400) (PR [#4414](https://github.com/vatesfr/xen-orchestra/pull/4414))
- [SDN Controller] Ability to configure MTU for private networks (PR [#4491](https://github.com/vatesfr/xen-orchestra/pull/4491))
- [VM Export] Filenames are now prefixed with datetime [#4503](https://github.com/vatesfr/xen-orchestra/issues/4503)
- [Settings/Logs] Differenciate XS/XCP-ng errors from XO errors [#4101](https://github.com/vatesfr/xen-orchestra/issues/4101) (PR [#4385](https://github.com/vatesfr/xen-orchestra/pull/4385))
- [Backups] Improve performance by caching logs consolidation (PR [#4541](https://github.com/vatesfr/xen-orchestra/pull/4541))
- [New VM] Cloud Init available for all plans (PR [#4543](https://github.com/vatesfr/xen-orchestra/pull/4543))
- [Servers] IPv6 addresses can be used [#4520](https://github.com/vatesfr/xen-orchestra/issues/4520) (PR [#4521](https://github.com/vatesfr/xen-orchestra/pull/4521)) \
Note: They must enclosed in brackets to differentiate with the port, e.g.: `[2001:db8::7334]` or `[ 2001:db8::7334]:4343`
### Bug fixes
@@ -15,16 +57,32 @@
- [Network] Fix inability to create a bonded network (PR [#4489](https://github.com/vatesfr/xen-orchestra/pull/4489))
- [Backup restore & Replication] Don't copy `sm_config` to new VDIs which might leads to useless coalesces [#4482](https://github.com/vatesfr/xen-orchestra/issues/4482) (PR [#4484](https://github.com/vatesfr/xen-orchestra/pull/4484))
- [Home] Fix intermediary "no results" display showed on filtering items [#4420](https://github.com/vatesfr/xen-orchestra/issues/4420) (PR [#4456](https://github.com/vatesfr/xen-orchestra/pull/4456)
- [Backup NG/New schedule] Properly show user errors in the form [#3831](https://github.com/vatesfr/xen-orchestra/issues/3831) (PR [#4131](https://github.com/vatesfr/xen-orchestra/pull/4131))
- [VM/Advanced] Fix `"vm.set_domain_type" is not a function` error on switching virtualization mode (PV/HVM) [#4348](https://github.com/vatesfr/xen-orchestra/issues/4348) (PR [#4504](https://github.com/vatesfr/xen-orchestra/pull/4504))
- [Backup NG/logs] Show warning when zstd compression is selected but not supported [#3892](https://github.com/vatesfr/xen-orchestra/issues/3892) (PR [#4375](https://github.com/vatesfr/xen-orchestra/pull/4375)
- [Patches] Fix patches installation for CH 8.0 (PR [#4511](https://github.com/vatesfr/xen-orchestra/pull/4511))
- [Network] Fix inability to set a network name [#4514](https://github.com/vatesfr/xen-orchestra/issues/4514) (PR [4510](https://github.com/vatesfr/xen-orchestra/pull/4510))
- [Backup NG] Fix race conditions that could lead to disabled jobs still running (PR [4510](https://github.com/vatesfr/xen-orchestra/pull/4510))
- [XOA] Remove "Updates" and "Licenses" tabs for non admin users (PR [#4526](https://github.com/vatesfr/xen-orchestra/pull/4526))
- [New VM] Ability to escape [cloud config template](https://xen-orchestra.com/blog/xen-orchestra-5-21/#cloudconfigtemplates) variables [#4486](https://github.com/vatesfr/xen-orchestra/issues/4486) (PR [#4501](https://github.com/vatesfr/xen-orchestra/pull/4501))
- [Backup NG] Properly log and report if job is already running [#4497](https://github.com/vatesfr/xen-orchestra/issues/4497) (PR [4534](https://github.com/vatesfr/xen-orchestra/pull/4534))
- [Host] Fix an issue where host was wrongly reporting time inconsistency (PR [#4540](https://github.com/vatesfr/xen-orchestra/pull/4540))
### Released packages
- xo-server-sdn-controller v0.2.1
- xo-server v5.49.0
- xo-web v5.49.0
- xen-api v0.27.2
- xo-server-cloud v0.3.0
- @xen-orchestra/cron v1.0.4
- xo-server-sdn-controller v0.3.0
- @xen-orchestra/template v0.1.0
- xo-server v5.50.1
- xo-web v5.50.2
## **5.38.0** (2019-08-29)
![Channel: latest](https://badgen.net/badge/channel/latest/yellow)
![Channel: stable](https://badgen.net/badge/channel/stable/green)
### Enhancements
@@ -52,8 +110,6 @@
## **5.37.1** (2019-08-06)
![Channel: stable](https://badgen.net/badge/channel/stable/green)
### Enhancements
- [SDN Controller] Let the user choose on which PIF to create a private network (PR [#4379](https://github.com/vatesfr/xen-orchestra/pull/4379))

View File

@@ -3,31 +3,21 @@
> Keep in mind the changelog is addressed to **users** and should be
> understandable by them.
### Breaking changes
- `xo-server` requires Node 8.
### Enhancements
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [VM/disks] Don't hide disks that are attached to the same VM twice [#4400](https://github.com/vatesfr/xen-orchestra/issues/4400) (PR [#4414](https://github.com/vatesfr/xen-orchestra/pull/4414))
- [VM/console] Add a button to connect to the VM via the local SSH client (PR [#4415](https://github.com/vatesfr/xen-orchestra/pull/4415))
- [SDN Controller] Add possibility to encrypt private networks (PR [#4441](https://github.com/vatesfr/xen-orchestra/pull/4441))
- [SDN Controller] Ability to configure MTU for private networks (PR [#4491](https://github.com/vatesfr/xen-orchestra/pull/4491))
- [VM Export] Filenames are now prefixed with datetime [#4503](https://github.com/vatesfr/xen-orchestra/issues/4503)
- [Backups] Improve performance by caching VM backups listing (PR [#4509](https://github.com/vatesfr/xen-orchestra/pull/4509))
### Bug fixes
- [Backup NG/New schedule] Properly show user errors in the form [#3831](https://github.com/vatesfr/xen-orchestra/issues/3831) (PR [#4131](https://github.com/vatesfr/xen-orchestra/pull/4131))
> Users must be able to say: “I had this issue, happy to know it's fixed”
- [VM/Advanced] Fix `"vm.set_domain_type" is not a function` error on switching virtualization mode (PV/HVM) [#4348](https://github.com/vatesfr/xen-orchestra/issues/4348) (PR [#4504](https://github.com/vatesfr/xen-orchestra/pull/4504))
- [Backup NG/logs] Show warning when zstd compression is selected but not supported [#3892](https://github.com/vatesfr/xen-orchestra/issues/3892) (PR [#4375](https://github.com/vatesfr/xen-orchestra/pull/4375)
- [Patches] Fix patches installation for CH 8.0 (PR [#4511](https://github.com/vatesfr/xen-orchestra/pull/4511))
- [Network] Fix inability to set a network name [#4514](https://github.com/vatesfr/xen-orchestra/issues/4514) (PR [4510](https://github.com/vatesfr/xen-orchestra/pull/4510))
- [Backup NG] Fix race conditions that could lead to disabled jobs still running (PR [4510](https://github.com/vatesfr/xen-orchestra/pull/4510))
- [XOA] Remove "Updates" and "Licenses" tabs for non admin users (PR [#4526](https://github.com/vatesfr/xen-orchestra/pull/4526))
- [New VM] Ability to escape [cloud config template](https://xen-orchestra.com/blog/xen-orchestra-5-21/#cloudconfigtemplates) variables [#4486](https://github.com/vatesfr/xen-orchestra/issues/4486) (PR [#4501](https://github.com/vatesfr/xen-orchestra/pull/4501))
- [Backup NG] Properly log and report if job is already running [#4497](https://github.com/vatesfr/xen-orchestra/issues/4497) (PR [4534](https://github.com/vatesfr/xen-orchestra/pull/4534))
- [SR] Fix `[object HTMLInputElement]` name after re-attaching a SR [#4546](https://github.com/vatesfr/xen-orchestra/issues/4546) (PR [#4550](https://github.com/vatesfr/xen-orchestra/pull/4550))
- [Patches] Better error handling when fetching missing patches (PR [#4519](https://github.com/vatesfr/xen-orchestra/pull/4519))
### Released packages
@@ -36,8 +26,5 @@
>
> Rule of thumb: add packages on top.
- @xen-orchestra/template v0.0.0
- @xen-orchestra/cron v1.0.4
- xo-server-sdn-controller v0.3.0
- xo-server v5.50.0
- xo-web v5.50.0
- xo-server v5.51.0
- xo-web v5.51.0

View File

@@ -1,15 +1,19 @@
### Check list
> Check items when done or if not relevant
> Check if done.
>
> Strikethrough if not relevant: ~~example~~ ([doc](https://help.github.com/en/articles/basic-writing-and-formatting-syntax)).
- [ ] PR reference the relevant issue (e.g. `Fixes #007`)
- [ ] PR reference the relevant issue (e.g. `Fixes #007` or `See xoa-support#42`)
- [ ] if UI changes, a screenshot has been added to the PR
- [ ] if `xo-server` API changes, the corresponding test has been added to/updated on [`xo-server-test`](https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-server-test)
- [ ] `CHANGELOG.unreleased.md`:
- enhancement/bug fix entry added
- list of packages to release updated (`${name} v${new version}`)
- [ ] documentation updated
- [ ] **I have tested added/updated features** (and impacted code)
- `CHANGELOG.unreleased.md`:
- [ ] enhancement/bug fix entry added
- [ ] list of packages to release updated (`${name} v${new version}`)
- **I have tested added/updated features** (and impacted code)
- [ ] unit tests (e.g. [`cron/parse.spec.js`](https://github.com/vatesfr/xen-orchestra/blob/b24400b21de1ebafa1099c56bac1de5c988d9202/%40xen-orchestra/cron/src/parse.spec.js))
- [ ] if `xo-server` API changes, the corresponding test has been added to/updated on [`xo-server-test`](https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-server-test)
- [ ] at least manual testing
### Process
@@ -17,3 +21,10 @@
1. mark it as `WiP:` (Work in Progress) if not ready to be merged
1. when you want a review, add a reviewer (and only one)
1. if necessary, update your PR, and re- add a reviewer
From [_the Four Agreements_](https://en.wikipedia.org/wiki/Don_Miguel_Ruiz#The_Four_Agreements):
1. Be impeccable with your word.
1. Don't take anything personally.
1. Don't make assumptions.
1. Always do your best.

Binary file not shown.

After

Width:  |  Height:  |  Size: 99 KiB

View File

@@ -41,6 +41,20 @@ However, if you want to start a manual check, you can do it by clicking on the "
![](./assets/xo5updatebutton.png)
#### Release channel
In Xen Orchestra, you can make a choice between two different release channels.
##### Stable
The stable channel is intended to be a version of Xen Orchestra that is already **one month old** (and therefore will benefit from one month of community feedback and various fixes). This way, users more concerned with the stability of their appliance will have the option to stay on a slightly older (and tested) version of XO (still supported by our pro support).
##### Latest
The latest channel will include all the latest improvements available in Xen Orchestra. The version available in latest has already been QA'd by our team, but issues may still occur once deployed in vastly varying environments, such as our user base has.
> To select the release channel of your choice, go to the XOA > Updates view.
![](./assets/release-channels.png)
#### Upgrade
If a new version is found, you'll have an upgrade button and its tooltip displayed:

View File

@@ -12,18 +12,18 @@
"eslint-config-standard-jsx": "^8.1.0",
"eslint-plugin-eslint-comments": "^3.1.1",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-node": "^9.0.1",
"eslint-plugin-node": "^10.0.0",
"eslint-plugin-promise": "^4.0.0",
"eslint-plugin-react": "^7.6.1",
"eslint-plugin-standard": "^4.0.0",
"exec-promise": "^0.7.0",
"flow-bin": "^0.106.3",
"flow-bin": "^0.109.0",
"globby": "^10.0.0",
"husky": "^3.0.0",
"jest": "^24.1.0",
"lodash": "^4.17.4",
"prettier": "^1.10.2",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"sorted-object": "^2.0.1"
},
"engines": {

View File

@@ -35,7 +35,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.1",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -33,7 +33,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -39,10 +39,10 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"execa": "^2.0.2",
"index-modules": "^0.3.0",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"rimraf": "^3.0.0",
"tmp": "^0.1.0"
},

View File

@@ -26,7 +26,7 @@
"from2": "^2.3.0",
"fs-extra": "^8.0.1",
"limit-concurrency-decorator": "^0.4.0",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"struct-fu": "^1.2.0",
"uuid": "^3.0.1"
},
@@ -37,7 +37,7 @@
"@babel/preset-flow": "^7.0.0",
"@xen-orchestra/fs": "^0.10.1",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"execa": "^2.0.2",
"fs-promise": "^2.0.0",
"get-stream": "^5.1.0",

View File

@@ -13,7 +13,7 @@ import {
import { fuFooter, fuHeader, checksumStruct } from './_structs'
import { test as mapTestBit } from './_bitmap'
export default async function createSyntheticStream(handler, path) {
export default async function createSyntheticStream(handler, paths) {
const fds = []
const cleanup = () => {
for (let i = 0, n = fds.length; i < n; ++i) {
@@ -24,7 +24,7 @@ export default async function createSyntheticStream(handler, path) {
}
try {
const vhds = []
while (true) {
const open = async path => {
const fd = await handler.openFile(path, 'r')
fds.push(fd)
const vhd = new Vhd(handler, fd)
@@ -32,11 +32,18 @@ export default async function createSyntheticStream(handler, path) {
await vhd.readHeaderAndFooter()
await vhd.readBlockAllocationTable()
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
break
return vhd
}
if (typeof paths === 'string') {
let path = paths
let vhd
while ((vhd = await open(path)).footer.diskType !== DISK_TYPE_DYNAMIC) {
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
} else {
for (const path of paths) {
await open(path)
}
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
const nVhds = vhds.length

View File

@@ -41,14 +41,14 @@
"human-format": "^0.10.0",
"lodash": "^4.17.4",
"pw": "^0.0.4",
"xen-api": "^0.27.1"
"xen-api": "^0.27.2"
},
"devDependencies": {
"@babel/cli": "^7.1.5",
"@babel/core": "^7.1.5",
"@babel/preset-env": "^7.1.5",
"babel-plugin-lodash": "^3.2.11",
"cross-env": "^5.1.4",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "xen-api",
"version": "0.27.1",
"version": "0.27.2",
"license": "ISC",
"description": "Connector to the Xen API",
"keywords": [
@@ -46,7 +46,7 @@
"make-error": "^1.3.0",
"minimist": "^1.2.0",
"ms": "^2.1.1",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"pw": "0.0.4",
"xmlrpc": "^1.3.2",
"xo-collection": "^0.4.1"
@@ -60,7 +60,7 @@
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,4 +1,4 @@
const URL_RE = /^(?:(https?:)\/*)?(?:([^:]+):([^@]+)@)?([^/]+?)(?::([0-9]+))?\/?$/
const URL_RE = /^(?:(https?:)\/*)?(?:([^:]+):([^@]+)@)?(?:\[([^\]]+)\]|([^:/]+))(?::([0-9]+))?\/?$/
export default url => {
const matches = URL_RE.exec(url)
@@ -6,7 +6,15 @@ export default url => {
throw new Error('invalid URL: ' + url)
}
const [, protocol = 'https:', username, password, hostname, port] = matches
const [
,
protocol = 'https:',
username,
password,
ipv6,
hostname = ipv6,
port,
] = matches
const parsedUrl = { protocol, hostname, port }
if (username !== undefined) {
parsedUrl.username = decodeURIComponent(username)

View File

@@ -8,7 +8,7 @@ import execPromise from 'exec-promise'
import minimist from 'minimist'
import pw from 'pw'
import { asCallback, fromCallback } from 'promise-toolbox'
import { filter, find, isArray } from 'lodash'
import { filter, find } from 'lodash'
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import { start as createRepl } from 'repl'
@@ -110,7 +110,7 @@ const main = async args => {
asCallback.call(
fromCallback(cb => {
evaluate.call(repl, cmd, context, filename, cb)
}).then(value => (isArray(value) ? Promise.all(value) : value)),
}).then(value => (Array.isArray(value) ? Promise.all(value) : value)),
cb
)
})(repl.eval)

View File

@@ -4,7 +4,7 @@ import kindOf from 'kindof'
import ms from 'ms'
import httpRequest from 'http-request-plus'
import { EventEmitter } from 'events'
import { isArray, map, noop, omit } from 'lodash'
import { map, noop, omit } from 'lodash'
import {
cancelable,
defer,
@@ -113,7 +113,7 @@ export class Xapi extends EventEmitter {
this._watchedTypes = undefined
const { watchEvents } = opts
if (watchEvents !== false) {
if (isArray(watchEvents)) {
if (Array.isArray(watchEvents)) {
this._watchedTypes = watchEvents
}
this.watchEvents()
@@ -1075,7 +1075,7 @@ export class Xapi extends EventEmitter {
const $field = (field in RESERVED_FIELDS ? '$$' : '$') + field
const value = data[field]
if (isArray(value)) {
if (Array.isArray(value)) {
if (value.length === 0 || isOpaqueRef(value[0])) {
getters[$field] = function() {
const value = this[field]

View File

@@ -38,16 +38,16 @@
"human-format": "^0.10.0",
"l33teral": "^3.0.3",
"lodash": "^4.17.4",
"micromatch": "^3.1.3",
"micromatch": "^4.0.2",
"mkdirp": "^0.5.1",
"nice-pipe": "0.0.0",
"pretty-ms": "^4.0.0",
"pretty-ms": "^5.0.0",
"progress-stream": "^2.0.0",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"pump": "^3.0.0",
"pw": "^0.0.4",
"strip-indent": "^2.0.0",
"xdg-basedir": "^3.0.0",
"strip-indent": "^3.0.0",
"xdg-basedir": "^4.0.0",
"xo-lib": "^0.9.0"
},
"devDependencies": {
@@ -56,7 +56,7 @@
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -7,7 +7,6 @@ const promisify = require('bluebird').promisify
const readFile = promisify(require('fs').readFile)
const writeFile = promisify(require('fs').writeFile)
const assign = require('lodash/assign')
const l33t = require('l33teral')
const mkdirp = promisify(require('mkdirp'))
const xdgBasedir = require('xdg-basedir')
@@ -41,7 +40,7 @@ const save = (exports.save = function(config) {
exports.set = function(data) {
return load().then(function(config) {
return save(assign(config, data))
return save(Object.assign(config, data))
})
}

View File

@@ -17,7 +17,6 @@ const getKeys = require('lodash/keys')
const hrp = require('http-request-plus').default
const humanFormat = require('human-format')
const identity = require('lodash/identity')
const isArray = require('lodash/isArray')
const isObject = require('lodash/isObject')
const micromatch = require('micromatch')
const nicePipe = require('nice-pipe')
@@ -298,7 +297,11 @@ async function listCommands(args) {
str.push(
name,
'=<',
type == null ? 'unknown type' : isArray(type) ? type.join('|') : type,
type == null
? 'unknown type'
: Array.isArray(type)
? type.join('|')
: type,
'>'
)

View File

@@ -34,7 +34,7 @@
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"event-to-promise": "^0.8.0",
"rimraf": "^3.0.0"
},

View File

@@ -36,7 +36,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,5 +1,5 @@
import { BaseError } from 'make-error'
import { isArray, iteratee } from 'lodash'
import { iteratee } from 'lodash'
class XoError extends BaseError {
constructor({ code, message, data }) {
@@ -77,7 +77,7 @@ export const serverUnreachable = create(9, objectId => ({
}))
export const invalidParameters = create(10, (message, errors) => {
if (isArray(message)) {
if (Array.isArray(message)) {
errors = message
message = undefined
}
@@ -172,3 +172,11 @@ export const patchPrecheckFailed = create(20, ({ errorType, patch }) => ({
},
message: `patch precheck failed: ${errorType}`,
}))
export const listMissingPatchesFailed = create(21, ({ host, reason }) => ({
data: {
host,
reason,
},
message: 'could not fetch missing patches',
}))

View File

@@ -41,7 +41,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -32,7 +32,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"deep-freeze": "^0.0.1",
"rimraf": "^3.0.0"
},

View File

@@ -40,7 +40,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -39,14 +39,14 @@
"inquirer": "^7.0.0",
"ldapjs": "^1.0.1",
"lodash": "^4.17.4",
"promise-toolbox": "^0.13.0"
"promise-toolbox": "^0.14.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -40,7 +40,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-preset-env": "^1.6.1",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -36,7 +36,7 @@
"node": ">=6"
},
"dependencies": {
"@xen-orchestra/log": "^0.1.4",
"@xen-orchestra/log": "^0.2.0",
"human-format": "^0.10.0",
"lodash": "^4.13.1",
"moment-timezone": "^0.5.13"
@@ -48,7 +48,7 @@
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-cloud",
"version": "0.2.4",
"version": "0.3.0",
"license": "ISC",
"description": "",
"keywords": [
@@ -39,7 +39,7 @@
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -20,9 +20,13 @@ class XoServerCloud {
}
async load() {
const getResourceCatalog = () => this._getCatalog()
getResourceCatalog.description = 'Get the list of all available resources'
const getResourceCatalog = this._getCatalog.bind(this)
getResourceCatalog.description =
"Get the list of user's available resources"
getResourceCatalog.permission = 'admin'
getResourceCatalog.params = {
filters: { type: 'object', optional: true },
}
const registerResource = ({ namespace }) =>
this._registerResource(namespace)
@@ -34,8 +38,29 @@ class XoServerCloud {
}
registerResource.permission = 'admin'
const downloadAndInstallResource = this._downloadAndInstallResource.bind(
this
)
downloadAndInstallResource.description =
'Download and install a resource via cloud plugin'
downloadAndInstallResource.params = {
id: { type: 'string' },
namespace: { type: 'string' },
version: { type: 'string' },
sr: { type: 'string' },
}
downloadAndInstallResource.resolve = {
sr: ['sr', 'SR', 'administrate'],
}
downloadAndInstallResource.permission = 'admin'
this._unsetApiMethods = this._xo.addApiMethods({
cloud: {
downloadAndInstallResource,
getResourceCatalog,
registerResource,
},
@@ -66,8 +91,8 @@ class XoServerCloud {
// ----------------------------------------------------------------
async _getCatalog() {
const catalog = await this._updater.call('getResourceCatalog')
async _getCatalog({ filters } = {}) {
const catalog = await this._updater.call('getResourceCatalog', { filters })
if (!catalog) {
throw new Error('cannot get catalog')
@@ -90,6 +115,26 @@ class XoServerCloud {
// ----------------------------------------------------------------
async _downloadAndInstallResource({ id, namespace, sr, version }) {
const stream = await this._requestResource({
hub: true,
id,
namespace,
version,
})
const vm = await this._xo.getXapi(sr.$poolId).importVm(stream, {
srId: sr.id,
type: 'xva',
})
await vm.update_other_config({
'xo:resource:namespace': namespace,
'xo:resource:xva:version': version,
'xo:resource:xva:id': id,
})
}
// ----------------------------------------------------------------
async _registerResource(namespace) {
const _namespace = (await this._getNamespaces())[namespace]
@@ -106,8 +151,10 @@ class XoServerCloud {
// ----------------------------------------------------------------
async _getNamespaceCatalog(namespace) {
const namespaceCatalog = (await this._getCatalog())[namespace]
async _getNamespaceCatalog({ hub, namespace }) {
const namespaceCatalog = (await this._getCatalog({ filters: { hub } }))[
namespace
]
if (!namespaceCatalog) {
throw new Error(`cannot get catalog: ${namespace} not registered`)
@@ -118,14 +165,17 @@ class XoServerCloud {
// ----------------------------------------------------------------
async _requestResource(namespace, id, version) {
async _requestResource({ hub = false, id, namespace, version }) {
const _namespace = (await this._getNamespaces())[namespace]
if (!_namespace || !_namespace.registered) {
if (!hub && (!_namespace || !_namespace.registered)) {
throw new Error(`cannot get resource: ${namespace} not registered`)
}
const { _token: token } = await this._getNamespaceCatalog(namespace)
const { _token: token } = await this._getNamespaceCatalog({
hub,
namespace,
})
// 2018-03-20 Extra check: getResourceDownloadToken seems to be called without a token in some cases
if (token === undefined) {

View File

@@ -32,7 +32,7 @@
"@babel/preset-env": "^7.0.0",
"@babel/preset-flow": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -1,6 +1,6 @@
import JSON5 from 'json5'
import { createSchedule } from '@xen-orchestra/cron'
import { assign, forOwn, map, mean } from 'lodash'
import { forOwn, map, mean } from 'lodash'
import { utcParse } from 'd3-time-format'
const COMPARATOR_FN = {
@@ -483,7 +483,7 @@ ${monitorBodies.join('\n')}`
result.rrd = await this.getRrd(result.object, observationPeriod)
if (result.rrd !== null) {
const data = parseData(result.rrd, result.object.uuid)
assign(result, {
Object.assign(result, {
data,
value: data.getDisplayableValue(),
shouldAlarm: data.shouldAlarm(),
@@ -496,7 +496,7 @@ ${monitorBodies.join('\n')}`
definition.alarmTriggerLevel
)
const data = getter(result.object)
assign(result, {
Object.assign(result, {
value: data.getDisplayableValue(),
shouldAlarm: data.shouldAlarm(),
})
@@ -680,7 +680,7 @@ ${entry.listItem}
},
}
if (xapiObject.$type === 'VM') {
payload['vm_uuid'] = xapiObject.uuid
payload.vm_uuid = xapiObject.uuid
}
// JSON is not well formed, can't use the default node parser
return JSON5.parse(

View File

@@ -25,13 +25,13 @@
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.4.4",
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
"@babel/preset-env": "^7.4.4",
"cross-env": "^5.2.0"
"cross-env": "^6.0.3"
},
"dependencies": {
"@xen-orchestra/log": "^0.1.4",
"@xen-orchestra/log": "^0.2.0",
"lodash": "^4.17.11",
"node-openssl-cert": "^0.0.97",
"promise-toolbox": "^0.13.0",
"node-openssl-cert": "^0.0.98",
"promise-toolbox": "^0.14.0",
"uuid": "^3.3.2"
},
"private": true

View File

@@ -5,7 +5,7 @@ import uuidv4 from 'uuid/v4'
import { access, constants, readFile, writeFile } from 'fs'
import { EventEmitter } from 'events'
import { filter, find, forOwn, map, omitBy, sample } from 'lodash'
import { fromCallback, fromEvent } from 'promise-toolbox'
import { fromCallback, promisify } from 'promise-toolbox'
import { join } from 'path'
import { OvsdbClient } from './ovsdb-client'
@@ -47,15 +47,8 @@ export const configurationSchema = {
// =============================================================================
async function fileWrite(path, data) {
await fromCallback(writeFile, path, data)
}
async function fileRead(path) {
const result = await fromCallback(readFile, path)
return result
}
const fileWrite = promisify(writeFile)
const fileRead = promisify(readFile)
async function fileExists(path) {
try {
await fromCallback(access, path, constants.F_OK)
@@ -74,8 +67,8 @@ async function fileExists(path) {
// 2019-09-03
// Compatibility code, to be removed in 1 year.
function updateNetworkOtherConfig(network) {
return Promise.all(
const updateNetworkOtherConfig = network =>
Promise.all(
map(
{
'cross-pool-network-uuid': 'cross_pool_network_uuid',
@@ -101,14 +94,107 @@ function updateNetworkOtherConfig(network) {
}
)
)
}
// -----------------------------------------------------------------------------
function createPassword() {
const chars =
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789?!'
return Array.from({ length: 16 }, _ => sample(chars)).join('')
const CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789?!'
const createPassword = () =>
Array.from({ length: 16 }, _ => sample(CHARS)).join('')
// -----------------------------------------------------------------------------
async function generateCertificatesAndKey(dataDir) {
const openssl = new NodeOpenssl()
const rsaKeyOptions = {
rsa_keygen_bits: 4096,
format: 'PKCS8',
}
const subject = {
countryName: 'XX',
localityName: 'Default City',
organizationName: 'Default Company LTD',
}
const csrOptions = {
hash: 'sha256',
startdate: new Date('1984-02-04 00:00:00'),
enddate: new Date('2143-06-04 04:16:23'),
subject: subject,
}
const caCsrOptions = {
hash: 'sha256',
days: NB_DAYS,
subject: subject,
}
let operation
try {
// CA Cert
operation = 'Generating CA private key'
const caKey = await fromCallback.call(
openssl,
'generateRSAPrivateKey',
rsaKeyOptions
)
operation = 'Generating CA certificate'
const caCsr = await fromCallback.call(
openssl,
'generateCSR',
caCsrOptions,
caKey,
null
)
operation = 'Signing CA certificate'
const caCrt = await fromCallback.call(
openssl,
'selfSignCSR',
caCsr,
caCsrOptions,
caKey,
null
)
await fileWrite(join(dataDir, CA_CERT), caCrt)
// Cert
operation = 'Generating private key'
const key = await fromCallback.call(
openssl,
'generateRSAPrivateKey',
rsaKeyOptions
)
await fileWrite(join(dataDir, CLIENT_KEY), key)
operation = 'Generating certificate'
const csr = await fromCallback.call(
openssl,
'generateCSR',
csrOptions,
key,
null
)
operation = 'Signing certificate'
const crt = await fromCallback.call(
openssl,
'CASignCSR',
csr,
caCsrOptions,
false,
caCrt,
caKey,
null
)
await fileWrite(join(dataDir, CLIENT_CERT), crt)
} catch (error) {
log.error('Error while generating certificates and keys', {
operation,
error,
})
throw error
}
log.debug('All certificates have been successfully written')
}
// =============================================================================
@@ -177,7 +263,7 @@ class SDNController extends EventEmitter {
)
log.debug(`No default self-signed certificates exists, creating them`)
await this._generateCertificatesAndKey(certDirectory)
await generateCertificatesAndKey(certDirectory)
}
}
// TODO: verify certificates and create new certificates if needed
@@ -231,7 +317,11 @@ class SDNController extends EventEmitter {
// Expose method to create cross-pool private network
const createCrossPoolPrivateNetwork = params =>
this._createCrossPoolPrivateNetwork({ encrypted: false, mtu: 0, ...params })
this._createCrossPoolPrivateNetwork({
encrypted: false,
mtu: 0,
...params,
})
createCrossPoolPrivateNetwork.description =
'Creates a cross-pool private network on selected pools'
@@ -1430,119 +1520,6 @@ class SDNController extends EventEmitter {
this._ovsdbClients.push(client)
return client
}
// ---------------------------------------------------------------------------
async _generateCertificatesAndKey(dataDir) {
const openssl = new NodeOpenssl()
const rsakeyoptions = {
rsa_keygen_bits: 4096,
format: 'PKCS8',
}
const subject = {
countryName: 'XX',
localityName: 'Default City',
organizationName: 'Default Company LTD',
}
const csroptions = {
hash: 'sha256',
startdate: new Date('1984-02-04 00:00:00'),
enddate: new Date('2143-06-04 04:16:23'),
subject: subject,
}
const cacsroptions = {
hash: 'sha256',
days: NB_DAYS,
subject: subject,
}
// In all the following callbacks, `error` is:
// - either an error object if there was an error
// - or a boolean set to `false` if no error occurred
openssl.generateRSAPrivateKey(rsakeyoptions, (error, cakey, cmd) => {
if (error !== false) {
log.error('Error while generating CA private key', {
error,
})
return
}
openssl.generateCSR(cacsroptions, cakey, null, (error, csr, cmd) => {
if (error !== false) {
log.error('Error while generating CA certificate', {
error,
})
return
}
openssl.selfSignCSR(
csr,
cacsroptions,
cakey,
null,
async (error, cacrt, cmd) => {
if (error !== false) {
log.error('Error while signing CA certificate', {
error,
})
return
}
await fileWrite(join(dataDir, CA_CERT), cacrt)
openssl.generateRSAPrivateKey(
rsakeyoptions,
async (error, key, cmd) => {
if (error !== false) {
log.error('Error while generating private key', {
error,
})
return
}
await fileWrite(join(dataDir, CLIENT_KEY), key)
openssl.generateCSR(
csroptions,
key,
null,
(error, csr, cmd) => {
if (error !== false) {
log.error('Error while generating certificate', {
error,
})
return
}
openssl.CASignCSR(
csr,
cacsroptions,
false,
cacrt,
cakey,
null,
async (error, crt, cmd) => {
if (error !== false) {
log.error('Error while signing certificate', {
error,
})
return
}
await fileWrite(join(dataDir, CLIENT_CERT), crt)
this.emit('certWritten')
}
)
}
)
}
)
}
)
})
})
await fromEvent(this, 'certWritten', {})
log.debug('All certificates have been successfully written')
}
}
export default opts => new SDNController(opts)

View File

@@ -36,7 +36,7 @@
"golike-defer": "^0.4.1",
"jest": "^24.8.0",
"lodash": "^4.17.11",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"xo-collection": "^0.4.1",
"xo-common": "^0.2.0",
"xo-lib": "^0.9.0"

View File

@@ -0,0 +1,6 @@
export const getDefaultName = () => `xo-server-test ${new Date().toISOString()}`
export const getDefaultSchedule = () => ({
name: getDefaultName(),
cron: '0 * * * * *',
})

View File

@@ -2,15 +2,11 @@
import defer from 'golike-defer'
import Xo from 'xo-lib'
import XoCollection from 'xo-collection'
import { find, forOwn } from 'lodash'
import { defaultsDeep, find, forOwn, pick } from 'lodash'
import { fromEvent } from 'promise-toolbox'
import config from './_config'
const getDefaultCredentials = () => {
const { email, password } = config.xoConnection
return { email, password }
}
import { getDefaultName } from './_defaultValues'
class XoConnection extends Xo {
constructor(opts) {
@@ -72,7 +68,10 @@ class XoConnection extends Xo {
}
@defer
async connect($defer, credentials = getDefaultCredentials()) {
async connect(
$defer,
credentials = pick(config.xoConnection, 'email', 'password')
) {
await this.open()
$defer.onFailure(() => this.close())
@@ -111,9 +110,26 @@ class XoConnection extends Xo {
}
async createTempBackupNgJob(params) {
const job = await this.call('backupNg.createJob', params)
this._tempResourceDisposers.push('backupNg.deleteJob', { id: job.id })
return job
// mutate and inject default values
defaultsDeep(params, {
mode: 'full',
name: getDefaultName(),
settings: {
'': {
// it must be enabled because the XAPI might be not able to coalesce VDIs
// as fast as the tests run
//
// see https://xen-orchestra.com/docs/backup_troubleshooting.html#vdi-chain-protection
bypassVdiChainsCheck: true,
// it must be 'never' to avoid race conditions with the plugin `backup-reports`
reportWhen: 'never',
},
},
})
const id = await this.call('backupNg.createJob', params)
this._tempResourceDisposers.push('backupNg.deleteJob', { id })
return this.call('backupNg.getJob', { id })
}
async createTempNetwork(params) {
@@ -128,7 +144,7 @@ class XoConnection extends Xo {
async createTempVm(params) {
const id = await this.call('vm.create', {
name_label: 'XO Test',
name_label: getDefaultName(),
template: config.templates.templateWithoutDisks,
...params,
})
@@ -210,6 +226,10 @@ class XoConnection extends Xo {
return backups
}
getBackupLogs(filter) {
return this.call('backupNg.getLogs', { _forceRefresh: true, ...filter })
}
async _cleanDisposers(disposers) {
for (let n = disposers.length - 1; n > 0; ) {
const params = disposers[n--]

View File

@@ -1,61 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`backupNg .createJob() : creates a new backup job with schedules 1`] = `
Object {
"id": Any<String>,
"mode": "full",
"name": "default-backupNg",
"settings": Any<Object>,
"type": "backup",
"userId": Any<String>,
"vms": Any<Object>,
}
`;
exports[`backupNg .createJob() : creates a new backup job with schedules 2`] = `
Object {
"cron": "0 * * * * *",
"enabled": false,
"id": Any<String>,
"jobId": Any<String>,
"name": "scheduleTest",
}
`;
exports[`backupNg .createJob() : creates a new backup job without schedules 1`] = `
Object {
"id": Any<String>,
"mode": "full",
"name": "default-backupNg",
"settings": Object {
"": Object {
"reportWhen": "never",
},
},
"type": "backup",
"userId": Any<String>,
"vms": Any<Object>,
}
`;
exports[`backupNg .runJob() : fails trying to run a backup job with a VM without disks 1`] = `
Object {
"data": Object {
"mode": "full",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"jobName": "default-backupNg",
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "skipped",
}
`;
exports[`backupNg .runJob() : fails trying to run a backup job with a VM without disks 2`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -92,23 +37,6 @@ Array [
exports[`backupNg .runJob() : fails trying to run a backup job without schedule 1`] = `[JsonRpcError: invalid parameters]`;
exports[`backupNg .runJob() : fails trying to run backup job without retentions 1`] = `
Object {
"data": Object {
"mode": "full",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"jobName": "default-backupNg",
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "failure",
}
`;
exports[`backupNg .runJob() : fails trying to run backup job without retentions 2`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -128,22 +56,6 @@ Object {
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 1`] = `
Object {
"data": Object {
"mode": "delta",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 2`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -157,7 +69,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 3`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 2`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
@@ -168,7 +80,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 4`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 3`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -183,6 +95,19 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 4`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 5`] = `
Object {
"end": Any<Number>,
@@ -197,19 +122,6 @@ Object {
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 6`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 7`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -224,6 +136,19 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 7`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 8`] = `
Object {
"end": Any<Number>,
@@ -238,35 +163,6 @@ Object {
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 9`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 10`] = `
Object {
"data": Object {
"mode": "delta",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 11`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -280,7 +176,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 12`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 10`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
@@ -291,7 +187,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 13`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 11`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -306,7 +202,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 14`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 12`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
@@ -319,6 +215,34 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 13`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 14`] = `
Object {
"data": Object {
"id": Any<String>,
"isFull": false,
"type": "remote",
},
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 15`] = `
Object {
"end": Any<Number>,
@@ -334,62 +258,18 @@ Object {
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 16`] = `
Object {
"data": Object {
"id": Any<String>,
"isFull": false,
"type": "remote",
},
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 17`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 18`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 19`] = `
Object {
"data": Object {
"mode": "delta",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 20`] = `
Object {
"data": Object {
"id": Any<String>,
@@ -403,7 +283,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 21`] = `
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 18`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
@@ -414,6 +294,47 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 19`] = `
Object {
"data": Object {
"id": Any<String>,
"isFull": true,
"type": "remote",
},
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 20`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 21`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 22`] = `
Object {
"data": Object {
@@ -455,65 +376,7 @@ Object {
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 25`] = `
Object {
"data": Object {
"id": Any<String>,
"isFull": true,
"type": "remote",
},
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 26`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 27`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
"message": Any<String>,
"result": Object {
"size": Any<Number>,
},
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a rolling snapshot with 2 as retention & revert to an old state 1`] = `
Object {
"data": Object {
"mode": "full",
"reportWhen": "never",
},
"end": Any<Number>,
"id": Any<String>,
"jobId": Any<String>,
"jobName": "default-backupNg",
"message": "backup",
"scheduleId": Any<String>,
"start": Any<Number>,
"status": "success",
}
`;
exports[`backupNg execute three times a rolling snapshot with 2 as retention & revert to an old state 2`] = `
Object {
"end": Any<Number>,
"id": Any<String>,
@@ -524,7 +387,7 @@ Object {
}
`;
exports[`backupNg execute three times a rolling snapshot with 2 as retention & revert to an old state 3`] = `
exports[`backupNg execute three times a rolling snapshot with 2 as retention & revert to an old state 2`] = `
Object {
"data": Object {
"id": Any<String>,

View File

@@ -6,20 +6,44 @@ import { noSuchObject } from 'xo-common/api-errors'
import config from '../_config'
import randomId from '../_randomId'
import xo from '../_xoConnection'
import { getDefaultName, getDefaultSchedule } from '../_defaultValues'
const DEFAULT_SCHEDULE = {
name: 'scheduleTest',
cron: '0 * * * * *',
const validateBackupJob = (jobInput, jobOutput, createdSchedule) => {
const expectedObj = {
id: expect.any(String),
mode: jobInput.mode,
name: jobInput.name,
type: 'backup',
settings: {
'': jobInput.settings[''],
},
userId: xo._user.id,
vms: jobInput.vms,
}
const schedules = jobInput.schedules
if (schedules !== undefined) {
const scheduleTmpId = Object.keys(schedules)[0]
expect(createdSchedule).toEqual({
...schedules[scheduleTmpId],
enabled: false,
id: expect.any(String),
jobId: jobOutput.id,
})
expectedObj.settings[createdSchedule.id] = jobInput.settings[scheduleTmpId]
}
expect(jobOutput).toEqual(expectedObj)
}
const validateRootTask = (log, props) =>
expect(log).toMatchSnapshot({
const validateRootTask = (log, expected) =>
expect(log).toEqual({
end: expect.any(Number),
id: expect.any(String),
jobId: expect.any(String),
scheduleId: expect.any(String),
message: 'backup',
start: expect.any(Number),
...props,
...expected,
})
const validateVmTask = (task, vmId, props) => {
@@ -66,88 +90,55 @@ const validateOperationTask = (task, props) => {
})
}
// Note: `bypassVdiChainsCheck` must be enabled because the XAPI might be not
// able to coalesce VDIs as fast as the tests run.
//
// See https://xen-orchestra.com/docs/backup_troubleshooting.html#vdi-chain-protection
describe('backupNg', () => {
let defaultBackupNg
beforeAll(() => {
defaultBackupNg = {
name: 'default-backupNg',
mode: 'full',
vms: {
id: config.vms.default,
},
settings: {
'': {
reportWhen: 'never',
},
},
}
})
describe('.createJob() :', () => {
it('creates a new backup job without schedules', async () => {
const backupNg = await xo.createTempBackupNgJob(defaultBackupNg)
expect(backupNg).toMatchSnapshot({
id: expect.any(String),
userId: expect.any(String),
vms: expect.any(Object),
})
expect(backupNg.vms).toEqual(defaultBackupNg.vms)
expect(backupNg.userId).toBe(xo._user.id)
const jobInput = {
mode: 'full',
vms: {
id: config.vms.default,
},
}
const jobOutput = await xo.createTempBackupNgJob(jobInput)
validateBackupJob(jobInput, jobOutput)
})
it('creates a new backup job with schedules', async () => {
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
const jobInput = {
mode: 'full',
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
...defaultBackupNg.settings,
[scheduleTempId]: { snapshotRetention: 1 },
},
})
const backupNgJob = await xo.call('backupNg.getJob', { id: jobId })
expect(backupNgJob).toMatchSnapshot({
id: expect.any(String),
userId: expect.any(String),
settings: expect.any(Object),
vms: expect.any(Object),
})
expect(backupNgJob.vms).toEqual(defaultBackupNg.vms)
expect(backupNgJob.userId).toBe(xo._user.id)
expect(Object.keys(backupNgJob.settings).length).toBe(2)
const schedule = await xo.getSchedule({ jobId })
expect(typeof schedule).toBe('object')
expect(backupNgJob.settings[schedule.id]).toEqual({
snapshotRetention: 1,
})
expect(schedule).toMatchSnapshot({
id: expect.any(String),
jobId: expect.any(String),
})
vms: {
id: config.vms.default,
},
}
const jobOutput = await xo.createTempBackupNgJob(jobInput)
validateBackupJob(
jobInput,
jobOutput,
await xo.getSchedule({ jobId: jobOutput.id })
)
})
})
describe('.delete() :', () => {
it('deletes a backup job', async () => {
const scheduleTempId = randomId()
const { id: jobId } = await xo.call('backupNg.createJob', {
...defaultBackupNg,
const jobId = await xo.call('backupNg.createJob', {
mode: 'full',
name: getDefaultName(),
vms: {
id: config.vms.default,
},
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
...defaultBackupNg.settings,
[scheduleTempId]: { snapshotRetention: 1 },
},
})
@@ -173,16 +164,19 @@ describe('backupNg', () => {
describe('.runJob() :', () => {
it('fails trying to run a backup job without schedule', async () => {
const { id } = await xo.createTempBackupNgJob(defaultBackupNg)
const { id } = await xo.createTempBackupNgJob({
vms: {
id: config.vms.default,
},
})
await expect(xo.call('backupNg.runJob', { id })).rejects.toMatchSnapshot()
})
it('fails trying to run a backup job with no matching VMs', async () => {
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
[scheduleTempId]: { snapshotRetention: 1 },
@@ -205,9 +199,8 @@ describe('backupNg', () => {
jest.setTimeout(7e3)
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
[scheduleTempId]: { snapshotRetention: 1 },
@@ -221,7 +214,7 @@ describe('backupNg', () => {
expect(typeof schedule).toBe('object')
await xo.call('backupNg.runJob', { id: jobId, schedule: schedule.id })
const [log] = await xo.call('backupNg.getLogs', {
const [log] = await xo.getBackupLogs({
scheduleId: schedule.id,
})
expect(log.warnings).toMatchSnapshot()
@@ -231,25 +224,23 @@ describe('backupNg', () => {
jest.setTimeout(8e3)
await xo.createTempServer(config.servers.default)
const { id: vmIdWithoutDisks } = await xo.createTempVm({
name_label: 'XO Test Without Disks',
name_description: 'Creating a vm without disks',
template: config.templates.templateWithoutDisks,
})
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
const jobInput = {
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
...defaultBackupNg.settings,
[scheduleTempId]: { snapshotRetention: 1 },
},
vms: {
id: vmIdWithoutDisks,
},
})
}
const { id: jobId } = await xo.createTempBackupNgJob(jobInput)
const schedule = await xo.getSchedule({ jobId })
expect(typeof schedule).toBe('object')
@@ -260,16 +251,20 @@ describe('backupNg', () => {
tasks: [vmTask],
...log
},
] = await xo.call('backupNg.getLogs', {
] = await xo.getBackupLogs({
jobId,
scheduleId: schedule.id,
})
expect(log).toMatchSnapshot({
end: expect.any(Number),
id: expect.any(String),
jobId: expect.any(String),
scheduleId: expect.any(String),
start: expect.any(Number),
validateRootTask(log, {
data: {
mode: jobInput.mode,
reportWhen: jobInput.settings[''].reportWhen,
},
jobId,
jobName: jobInput.name,
scheduleId: schedule.id,
status: 'skipped',
})
expect(vmTask).toMatchSnapshot({
@@ -293,22 +288,24 @@ describe('backupNg', () => {
const scheduleTempId = randomId()
await xo.createTempServer(config.servers.default)
const { id: remoteId } = await xo.createTempRemote(config.remotes.default)
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
const jobInput = {
remotes: {
id: remoteId,
},
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
...defaultBackupNg.settings,
[scheduleTempId]: {},
},
srs: {
id: config.srs.default,
},
})
vms: {
id: config.vms.default,
},
}
const { id: jobId } = await xo.createTempBackupNgJob(jobInput)
const schedule = await xo.getSchedule({ jobId })
expect(typeof schedule).toBe('object')
@@ -319,17 +316,20 @@ describe('backupNg', () => {
tasks: [task],
...log
},
] = await xo.call('backupNg.getLogs', {
] = await xo.getBackupLogs({
jobId,
scheduleId: schedule.id,
})
expect(log).toMatchSnapshot({
end: expect.any(Number),
id: expect.any(String),
jobId: expect.any(String),
scheduleId: expect.any(String),
start: expect.any(Number),
validateRootTask(log, {
data: {
mode: jobInput.mode,
reportWhen: jobInput.settings[''].reportWhen,
},
jobId,
jobName: jobInput.name,
scheduleId: schedule.id,
status: 'failure',
})
expect(task).toMatchSnapshot({
@@ -352,7 +352,6 @@ describe('backupNg', () => {
jest.setTimeout(6e4)
await xo.createTempServer(config.servers.default)
let vm = await xo.createTempVm({
name_label: 'XO Test Temp',
name_description: 'Creating a temporary vm',
template: config.templates.default,
VDIs: [
@@ -365,22 +364,18 @@ describe('backupNg', () => {
})
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
...defaultBackupNg,
const jobInput = {
vms: {
id: vm.id,
},
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
'': {
bypassVdiChainsCheck: true,
reportWhen: 'never',
},
[scheduleTempId]: { snapshotRetention: 2 },
},
})
}
const { id: jobId } = await xo.createTempBackupNgJob(jobInput)
const schedule = await xo.getSchedule({ jobId })
expect(typeof schedule).toBe('object')
@@ -415,17 +410,20 @@ describe('backupNg', () => {
tasks: [{ tasks: subTasks, ...vmTask }],
...log
},
] = await xo.call('backupNg.getLogs', {
] = await xo.getBackupLogs({
jobId,
scheduleId: schedule.id,
})
expect(log).toMatchSnapshot({
end: expect.any(Number),
id: expect.any(String),
jobId: expect.any(String),
scheduleId: expect.any(String),
start: expect.any(Number),
validateRootTask(log, {
data: {
mode: jobInput.mode,
reportWhen: jobInput.settings[''].reportWhen,
},
jobId,
jobName: jobInput.name,
scheduleId: schedule.id,
status: 'success',
})
const subTaskSnapshot = subTasks.find(
@@ -470,7 +468,7 @@ describe('backupNg', () => {
const exportRetention = 2
const fullInterval = 2
const scheduleTempId = randomId()
const { id: jobId } = await xo.createTempBackupNgJob({
const jobInput = {
mode: 'delta',
remotes: {
id: {
@@ -478,13 +476,11 @@ describe('backupNg', () => {
},
},
schedules: {
[scheduleTempId]: DEFAULT_SCHEDULE,
[scheduleTempId]: getDefaultSchedule(),
},
settings: {
'': {
bypassVdiChainsCheck: true,
fullInterval,
reportWhen: 'never',
},
[remoteId1]: { deleteFirst: true },
[scheduleTempId]: { exportRetention },
@@ -492,7 +488,8 @@ describe('backupNg', () => {
vms: {
id: vmToBackup,
},
})
}
const { id: jobId } = await xo.createTempBackupNgJob(jobInput)
const schedule = await xo.getSchedule({ jobId })
expect(typeof schedule).toBe('object')
@@ -506,7 +503,7 @@ describe('backupNg', () => {
expect(backups.length).toBe(exportRetention)
)
const backupLogs = await xo.call('backupNg.getLogs', {
const backupLogs = await xo.getBackupLogs({
jobId,
scheduleId: schedule.id,
})
@@ -515,10 +512,12 @@ describe('backupNg', () => {
backupLogs.forEach(({ tasks = [], ...log }, key) => {
validateRootTask(log, {
data: {
mode: 'delta',
reportWhen: 'never',
mode: jobInput.mode,
reportWhen: jobInput.settings[''].reportWhen,
},
message: 'backup',
jobId,
jobName: jobInput.name,
scheduleId: schedule.id,
status: 'success',
})

View File

@@ -6,7 +6,7 @@ import expect from 'must'
// ===================================================================
import { getConfig, getMainConnection, getSrId, waitObjectState } from './util'
import { map, assign } from 'lodash'
import { map } from 'lodash'
import eventToPromise from 'event-to-promise'
// ===================================================================
@@ -27,7 +27,7 @@ describe('disk', () => {
const config = await getConfig()
serverId = await xo.call(
'server.add',
assign({ autoConnect: false }, config.xenServer1)
Object.assign({ autoConnect: false }, config.xenServer1)
)
await xo.call('server.connect', { id: serverId })
await eventToPromise(xo.objects, 'finish')

View File

@@ -1,6 +1,6 @@
/* eslint-env jest */
import { assign, find, map } from 'lodash'
import { find, map } from 'lodash'
import { config, rejectionOf, xo } from './util'
@@ -151,7 +151,7 @@ describe('server', () => {
it('connects to a Xen server', async () => {
const serverId = await addServer(
assign({ autoConnect: false }, config.xenServer1)
Object.assign({ autoConnect: false }, config.xenServer1)
)
await xo.call('server.connect', {
@@ -184,7 +184,7 @@ describe('server', () => {
let serverId
beforeEach(async () => {
serverId = await addServer(
assign({ autoConnect: false }, config.xenServer1)
Object.assign({ autoConnect: false }, config.xenServer1)
)
await xo.call('server.connect', {
id: serverId,

View File

@@ -12,7 +12,7 @@ import {
getOneHost,
waitObjectState,
} from './util'
import { assign, map } from 'lodash'
import { map } from 'lodash'
import eventToPromise from 'event-to-promise'
// ===================================================================
@@ -33,7 +33,7 @@ describe('vbd', () => {
serverId = await xo.call(
'server.add',
assign({ autoConnect: false }, config.xenServer1)
Object.assign({ autoConnect: false }, config.xenServer1)
)
await xo.call('server.connect', { id: serverId })
await eventToPromise(xo.objects, 'finish')

View File

@@ -34,14 +34,14 @@
"dependencies": {
"nodemailer": "^6.1.0",
"nodemailer-markdown": "^1.0.1",
"promise-toolbox": "^0.13.0"
"promise-toolbox": "^0.14.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -39,7 +39,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-preset-env": "^1.5.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -33,14 +33,14 @@
"node": ">=6"
},
"dependencies": {
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"slack-node": "^0.1.8"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -40,7 +40,7 @@
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -37,19 +37,19 @@
"dependencies": {
"@xen-orchestra/async-map": "^0.0.0",
"@xen-orchestra/cron": "^1.0.4",
"@xen-orchestra/log": "^0.1.4",
"@xen-orchestra/log": "^0.2.0",
"handlebars": "^4.0.6",
"html-minifier": "^4.0.0",
"human-format": "^0.10.0",
"lodash": "^4.17.4",
"promise-toolbox": "^0.13.0"
"promise-toolbox": "^0.14.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"rimraf": "^3.0.0"
},
"scripts": {

View File

@@ -5,7 +5,6 @@ import humanFormat from 'human-format'
import { createSchedule } from '@xen-orchestra/cron'
import { minify } from 'html-minifier'
import {
assign,
concat,
differenceBy,
filter,
@@ -418,7 +417,7 @@ function computeGlobalVmsStats({ haltedVms, vmsStats, xo }) {
}))
)
return assign(
return Object.assign(
computeMeans(vmsStats, [
'cpu',
'ram',
@@ -446,7 +445,7 @@ function computeGlobalHostsStats({ haltedHosts, hostsStats, xo }) {
}))
)
return assign(
return Object.assign(
computeMeans(hostsStats, [
'cpu',
'ram',

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "xo-server",
"version": "5.49.0",
"version": "5.50.1",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -30,7 +30,7 @@
"bin": "bin"
},
"engines": {
"node": ">=6"
"node": ">=8"
},
"dependencies": {
"@iarna/toml": "^2.2.1",
@@ -39,13 +39,14 @@
"@xen-orchestra/defined": "^0.0.0",
"@xen-orchestra/emit-async": "^0.0.0",
"@xen-orchestra/fs": "^0.10.1",
"@xen-orchestra/log": "^0.1.4",
"@xen-orchestra/log": "^0.2.0",
"@xen-orchestra/mixin": "^0.0.0",
"ajv": "^6.1.1",
"app-conf": "^0.7.0",
"archiver": "^3.0.0",
"async-iterator-to-stream": "^1.0.1",
"base64url": "^3.0.0",
"bind-property-descriptor": "^1.0.0",
"blocked": "^1.2.1",
"bluebird": "^3.5.1",
"body-parser": "^1.18.2",
@@ -57,16 +58,15 @@
"debug": "^4.0.1",
"decorator-synchronized": "^0.5.0",
"deptree": "^1.0.0",
"escape-string-regexp": "^1.0.5",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.7.0",
"execa": "^1.0.0",
"execa": "^2.0.5",
"express": "^4.16.2",
"express-session": "^1.15.6",
"fatfs": "^0.10.4",
"from2": "^2.3.0",
"fs-extra": "^8.0.1",
"get-stream": "^4.0.0",
"get-stream": "^5.1.0",
"golike-defer": "^0.4.1",
"hashy": "^0.7.1",
"helmet": "^3.9.0",
@@ -90,7 +90,7 @@
"limit-concurrency-decorator": "^0.4.0",
"lodash": "^4.17.4",
"make-error": "^1",
"micromatch": "^3.1.4",
"micromatch": "^4.0.2",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.14",
"ms": "^2.1.1",
@@ -102,7 +102,7 @@
"passport": "^0.4.0",
"passport-local": "^1.0.0",
"pretty-format": "^24.0.0",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"proxy-agent": "^3.0.0",
"pug": "^2.0.0-rc.4",
"pump": "^3.0.0",
@@ -122,8 +122,8 @@
"uuid": "^3.0.1",
"value-matcher": "^0.2.0",
"vhd-lib": "^0.7.0",
"ws": "^6.0.0",
"xen-api": "^0.27.1",
"ws": "^7.1.2",
"xen-api": "^0.27.2",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.4.1",
"xo-collection": "^0.4.1",
@@ -147,7 +147,7 @@
"@babel/preset-flow": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-dev": "^2.0.1",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"index-modules": "^0.3.0",
"rimraf": "^3.0.0"
},

View File

@@ -0,0 +1,34 @@
/* eslint-env jest */
import MultiKeyMap from './_MultiKeyMap'
describe('MultiKeyMap', () => {
it('works', () => {
const map = new MultiKeyMap()
const keys = [
// null key
[],
// simple key
['foo'],
// composite key
['foo', 'bar'],
// reverse composite key
['bar', 'foo'],
]
const values = keys.map(() => ({}))
// set all values first to make sure they are all stored and not only the
// last one
keys.forEach((key, i) => {
map.set(key, values[i])
})
keys.forEach((key, i) => {
// copy the key to make sure the array itself is not the key
expect(map.get(key.slice())).toBe(values[i])
map.delete(key.slice())
expect(map.get(key.slice())).toBe(undefined)
})
})
})

View File

@@ -16,6 +16,10 @@ function scheduleRemoveCacheEntry(keys, expires) {
const defaultKeyFn = () => []
const { slice } = Array.prototype
export const REMOVE_CACHE_ENTRY = {}
// debounce an async function so that all subsequent calls in a delay receive
// the same result
//
@@ -26,7 +30,14 @@ const defaultKeyFn = () => []
export const debounceWithKey = (fn, delay, keyFn = defaultKeyFn) => {
const cache = new MultiKeyMap()
const delayFn = typeof delay === 'number' ? () => delay : delay
return function() {
return function(arg) {
if (arg === REMOVE_CACHE_ENTRY) {
return removeCacheEntry(
cache,
ensureArray(keyFn.apply(this, slice.call(arguments, 1)))
)
}
const keys = ensureArray(keyFn.apply(this, arguments))
let promise = cache.get(keys)
if (promise === undefined) {

View File

@@ -0,0 +1,29 @@
/* eslint-env jest */
import { debounceWithKey, REMOVE_CACHE_ENTRY } from './_pDebounceWithKey'
describe('REMOVE_CACHE_ENTRY', () => {
it('clears the cache', async () => {
let i = 0
const debouncedFn = debounceWithKey(
function() {
return Promise.resolve(++i)
},
Infinity,
id => id
)
// not cached accross keys
expect(await debouncedFn(1)).toBe(1)
expect(await debouncedFn(2)).toBe(2)
// retrieve the already cached values
expect(await debouncedFn(1)).toBe(1)
expect(await debouncedFn(2)).toBe(2)
// an entry for a specific key can be removed
debouncedFn(REMOVE_CACHE_ENTRY, 1)
expect(await debouncedFn(1)).toBe(3)
expect(await debouncedFn(2)).toBe(2)
})
})

View File

@@ -3,11 +3,12 @@ import { fromCallback } from 'promise-toolbox'
import { pipeline } from 'readable-stream'
import createNdJsonStream from '../_createNdJsonStream'
import { REMOVE_CACHE_ENTRY } from '../_pDebounceWithKey'
import { safeDateFormat } from '../utils'
export function createJob({ schedules, ...job }) {
job.userId = this.user.id
return this.createBackupNgJob(job, schedules)
return this.createBackupNgJob(job, schedules).then(({ id }) => id)
}
createJob.permission = 'admin'
@@ -184,7 +185,20 @@ getAllLogs.params = {
ndjson: { type: 'boolean', optional: true },
}
export function getLogs({ after, before, limit, ...filter }) {
export function getLogs({
after,
before,
limit,
// TODO: it's a temporary work-around which should be removed
// when the consolidated logs will be stored in the DB
_forceRefresh = false,
...filter
}) {
if (_forceRefresh) {
this.getBackupNgLogs(REMOVE_CACHE_ENTRY)
}
return this.getBackupNgLogsSorted({ after, before, limit, filter })
}

View File

@@ -221,12 +221,7 @@ emergencyShutdownHost.resolve = {
// -------------------------------------------------------------------
export async function isHostServerTimeConsistent({ host }) {
try {
await this.getXapi(host).assertConsistentHostServerTime(host._xapiRef)
return true
} catch (e) {
return false
}
return this.getXapi(host).isHostServerTimeConsistent(host._xapiRef)
}
isHostServerTimeConsistent.params = {

View File

@@ -777,7 +777,7 @@ export async function probeIscsiExists({
)
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
forEach(ensureArray(xml.SRlist.SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({ uuid: sr.UUID.trim() })
})
@@ -845,7 +845,7 @@ export async function probeNfsExists({ host, server, serverPath }) {
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
forEach(ensureArray(xml.SRlist.SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({ uuid: sr.UUID.trim() })
})

View File

@@ -1,7 +1,7 @@
// FIXME: rename to disk.*
import { invalidParameters } from 'xo-common/api-errors'
import { isArray, reduce } from 'lodash'
import { reduce } from 'lodash'
import { parseSize } from '../utils'
@@ -85,7 +85,7 @@ export async function set(params) {
continue
}
for (const field of isArray(fields) ? fields : [fields]) {
for (const field of Array.isArray(fields) ? fields : [fields]) {
await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`)
}
}

View File

@@ -85,7 +85,7 @@ async function rateLimitedRetry(action, shouldRetry, retryCount = 20) {
function createVolumeInfoTypes() {
function parseHeal(parsed) {
const bricks = []
parsed['healInfo']['bricks']['brick'].forEach(brick => {
parsed.healInfo.bricks.brick.forEach(brick => {
bricks.push(brick)
if (brick.file) {
brick.file = ensureArray(brick.file)
@@ -96,21 +96,21 @@ function createVolumeInfoTypes() {
function parseStatus(parsed) {
const brickDictByUuid = {}
const volume = parsed['volStatus']['volumes']['volume']
volume['node'].forEach(node => {
const volume = parsed.volStatus.volumes.volume
volume.node.forEach(node => {
brickDictByUuid[node.peerid] = brickDictByUuid[node.peerid] || []
brickDictByUuid[node.peerid].push(node)
})
return {
commandStatus: true,
result: { nodes: brickDictByUuid, tasks: volume['tasks'] },
result: { nodes: brickDictByUuid, tasks: volume.tasks },
}
}
async function parseInfo(parsed) {
const volume = parsed['volInfo']['volumes']['volume']
volume['bricks'] = volume['bricks']['brick']
volume['options'] = volume['options']['option']
const volume = parsed.volInfo.volumes.volume
volume.bricks = volume.bricks.brick
volume.options = volume.options.option
return { commandStatus: true, result: volume }
}
@@ -118,23 +118,23 @@ function createVolumeInfoTypes() {
return async function(sr) {
const glusterEndpoint = this::_getGlusterEndpoint(sr)
const cmdShouldRetry = result =>
!result['commandStatus'] &&
((result.parsed && result.parsed['cliOutput']['opErrno'] === '30802') ||
!result.commandStatus &&
((result.parsed && result.parsed.cliOutput.opErrno === '30802') ||
result.stderr.match(/Another transaction is in progress/))
const runCmd = async () =>
glusterCmd(glusterEndpoint, 'volume ' + command, true)
const commandResult = await rateLimitedRetry(runCmd, cmdShouldRetry, 30)
return commandResult['commandStatus']
? this::handler(commandResult.parsed['cliOutput'], sr)
return commandResult.commandStatus
? this::handler(commandResult.parsed.cliOutput, sr)
: commandResult
}
}
async function profileType(sr) {
async function parseProfile(parsed) {
const volume = parsed['volProfile']
volume['bricks'] = ensureArray(volume['brick'])
delete volume['brick']
const volume = parsed.volProfile
volume.bricks = ensureArray(volume.brick)
delete volume.brick
return { commandStatus: true, result: volume }
}
@@ -143,9 +143,9 @@ function createVolumeInfoTypes() {
async function profileTopType(sr) {
async function parseTop(parsed) {
const volume = parsed['volTop']
volume['bricks'] = ensureArray(volume['brick'])
delete volume['brick']
const volume = parsed.volTop
volume.bricks = ensureArray(volume.brick)
delete volume.brick
return { commandStatus: true, result: volume }
}
@@ -326,7 +326,7 @@ async function remoteSsh(glusterEndpoint, cmd, ignoreError = false) {
}
messageArray.push(`${key}: ${result[key]}`)
}
messageArray.push('command: ' + result['command'].join(' '))
messageArray.push('command: ' + result.command.join(' '))
messageKeys.splice(messageKeys.indexOf('command'), 1)
for (const key of messageKeys) {
messageArray.push(`${key}: ${JSON.stringify(result[key])}`)
@@ -343,7 +343,7 @@ async function remoteSsh(glusterEndpoint, cmd, ignoreError = false) {
})
break
} catch (exception) {
if (exception['code'] !== 'HOST_OFFLINE') {
if (exception.code !== 'HOST_OFFLINE') {
throw exception
}
}
@@ -370,19 +370,17 @@ async function remoteSsh(glusterEndpoint, cmd, ignoreError = false) {
}
function findErrorMessage(commandResut) {
if (commandResut['exit'] === 0 && commandResut.parsed) {
const cliOut = commandResut.parsed['cliOutput']
if (cliOut['opErrstr'] && cliOut['opErrstr'].length) {
return cliOut['opErrstr']
if (commandResut.exit === 0 && commandResut.parsed) {
const cliOut = commandResut.parsed.cliOutput
if (cliOut.opErrstr && cliOut.opErrstr.length) {
return cliOut.opErrstr
}
// "peer probe" returns it's "already in peer" error in cliOutput/output
if (cliOut['output'] && cliOut['output'].length) {
return cliOut['output']
if (cliOut.output && cliOut.output.length) {
return cliOut.output
}
}
return commandResut['stderr'].length
? commandResut['stderr']
: commandResut['stdout']
return commandResut.stderr.length ? commandResut.stderr : commandResut.stdout
}
async function glusterCmd(glusterEndpoint, cmd, ignoreError = false) {
@@ -392,15 +390,15 @@ async function glusterCmd(glusterEndpoint, cmd, ignoreError = false) {
true
)
try {
result.parsed = parseXml(result['stdout'])
result.parsed = parseXml(result.stdout)
} catch (e) {
// pass, we never know if a message can be parsed or not, so we just try
}
if (result['exit'] === 0) {
const cliOut = result.parsed['cliOutput']
if (result.exit === 0) {
const cliOut = result.parsed.cliOutput
// we have found cases where opErrno is !=0 and opRet was 0, albeit the operation was an error.
result.commandStatus =
cliOut['opRet'].trim() === '0' && cliOut['opErrno'].trim() === '0'
cliOut.opRet.trim() === '0' && cliOut.opErrno.trim() === '0'
result.error = findErrorMessage(result)
} else {
result.commandStatus = false
@@ -793,7 +791,7 @@ export const createSR = defer(async function(
host: param.host.$id,
vm: { id: param.vm.$id, ip: param.address },
underlyingSr: param.underlyingSr.$id,
arbiter: !!param['arbiter'],
arbiter: !!param.arbiter,
}))
await xapi.xo.setData(xosanSrRef, 'xosan_config', {
version: 'beta2',
@@ -1164,11 +1162,11 @@ async function _prepareGlusterVm(
}
async function _importGlusterVM(xapi, template, lvmsrId) {
const templateStream = await this.requestResource(
'xosan',
template.id,
template.version
)
const templateStream = await this.requestResource({
id: template.id,
namespace: 'xosan',
version: template.version,
})
const newVM = await xapi.importVm(templateStream, {
srId: lvmsrId,
type: 'xva',
@@ -1300,7 +1298,7 @@ export const addBricks = defer(async function(
underlyingSr: newSr,
})
}
const arbiterNode = data.nodes.find(n => n['arbiter'])
const arbiterNode = data.nodes.find(n => n.arbiter)
if (arbiterNode) {
await glusterCmd(
glusterEndpoint,
@@ -1535,8 +1533,11 @@ export async function downloadAndInstallXosanPack({ id, version, pool }) {
}
const xapi = this.getXapi(pool.id)
const res = await this.requestResource('xosan', id, version)
const res = await this.requestResource({
id,
namespace: 'xosan',
version,
})
await xapi.installSupplementalPackOnAllHosts(res)
await xapi.pool.update_other_config(
'xosan_pack_installation_time',

View File

@@ -1,7 +1,7 @@
import Model from './model'
import { BaseError } from 'make-error'
import { EventEmitter } from 'events'
import { isArray, isObject, map } from './utils'
import { isObject, map } from './utils'
// ===================================================================
@@ -30,7 +30,7 @@ export default class Collection extends EventEmitter {
}
async add(models, opts) {
const array = isArray(models)
const array = Array.isArray(models)
if (!array) {
models = [models]
}
@@ -66,7 +66,7 @@ export default class Collection extends EventEmitter {
}
async remove(ids) {
if (!isArray(ids)) {
if (!Array.isArray(ids)) {
ids = [ids]
}
@@ -77,8 +77,8 @@ export default class Collection extends EventEmitter {
}
async update(models) {
const array = isArray(models)
if (!isArray(models)) {
const array = Array.isArray(models)
if (!array) {
models = [models]
}

View File

@@ -29,13 +29,7 @@ import { ensureDir, readdir, readFile } from 'fs-extra'
import parseDuration from './_parseDuration'
import Xo from './xo'
import {
forEach,
isArray,
isFunction,
mapToArray,
pFromCallback,
} from './utils'
import { forEach, mapToArray, pFromCallback } from './utils'
import bodyParser from 'body-parser'
import connectFlash from 'connect-flash'
@@ -281,15 +275,16 @@ async function registerPlugin(pluginPath, pluginName) {
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
const instance =
typeof factory === 'function'
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
@@ -468,7 +463,7 @@ const setUpProxies = (express, opts, xo) => {
const setUpStaticFiles = (express, opts) => {
forEach(opts, (paths, url) => {
if (!isArray(paths)) {
if (!Array.isArray(paths)) {
paths = [paths]
}

View File

@@ -6,6 +6,7 @@ import ndjson from 'ndjson'
import parseArgs from 'minimist'
import sublevel from 'level-sublevel'
import util from 'util'
import { join as joinPath } from 'path'
import { repair as repairDb } from 'level'
import { forEach } from './utils'
@@ -174,6 +175,7 @@ export default async function main() {
}
const config = await appConf.load('xo-server', {
appDir: joinPath(__dirname, '..'),
ignoreUnknownFormats: true,
})

View File

@@ -8,19 +8,21 @@ const parse = createParser({
keyTransform: key => key.slice(5).toLowerCase(),
})
const makeFunction = command => async (fields, ...args) => {
return splitLines(
await execa.stdout(command, [
'--noheading',
'--nosuffix',
'--nameprefixes',
'--unbuffered',
'--units',
'b',
'-o',
String(fields),
...args,
])
).map(Array.isArray(fields) ? parse : line => parse(line)[fields])
const { stdout } = await execa(command, [
'--noheading',
'--nosuffix',
'--nameprefixes',
'--unbuffered',
'--units',
'b',
'-o',
String(fields),
...args,
])
return splitLines(stdout).map(
Array.isArray(fields) ? parse : line => parse(line)[fields]
)
}
export const lvs = makeFunction('lvs')

View File

@@ -1,5 +1,3 @@
import assign from 'lodash/assign'
const _combine = (vectors, n, cb) => {
if (!n) {
return
@@ -35,7 +33,7 @@ export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
// Merge the properties of an objects set in one object.
//
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
export const mergeObjects = objects => assign({}, ...objects)
export const mergeObjects = objects => Object.assign({}, ...objects)
// Compute a cross product between vectors.
//

View File

@@ -1,6 +1,6 @@
import { EventEmitter } from 'events'
import { forEach, isEmpty, isString } from './utils'
import { forEach, isEmpty } from './utils'
// ===================================================================
@@ -30,7 +30,7 @@ export default class Model extends EventEmitter {
set(properties, value) {
// This method can also be used with two arguments to set a single
// property.
if (isString(properties)) {
if (typeof properties === 'string') {
properties = { [properties]: value }
}

View File

@@ -1,5 +1,6 @@
import appConf from 'app-conf'
import pw from 'pw'
import { join as joinPath } from 'path'
import Xo from './xo'
import { generateToken } from './utils'
@@ -26,6 +27,7 @@ xo-server-recover-account <user name or email>
const xo = new Xo(
await appConf.load('xo-server', {
appDir: joinPath(__dirname, '..'),
ignoreUnknownFormats: true,
})
)

View File

@@ -3,7 +3,6 @@ import forEach from 'lodash/forEach'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
@@ -208,7 +207,7 @@ export {
// -------------------------------------------------------------------
export function parseSize(size) {
if (!isString(size)) {
if (typeof size !== 'string') {
return size
}
@@ -256,13 +255,9 @@ export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
//
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach'
export { default as isArray } from 'lodash/isArray'
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString'
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
@@ -364,7 +359,7 @@ export const thunkToArray = thunk => {
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw isString(error) ? new Error(error) : error
throw typeof error === 'string' ? new Error(error) : error
}
// -------------------------------------------------------------------

View File

@@ -3,7 +3,6 @@ import ensureArray from './_ensureArray'
import {
extractProperty,
forEach,
isArray,
isEmpty,
mapFilter,
mapToArray,
@@ -27,7 +26,7 @@ function link(obj, prop, idField = '$id') {
return dynamicValue // Properly handles null and undefined.
}
if (isArray(dynamicValue)) {
if (Array.isArray(dynamicValue)) {
return mapToArray(dynamicValue, idField)
}

View File

@@ -42,7 +42,6 @@ import pRetry from '../_pRetry'
import {
camelToSnakeCase,
forEach,
isFunction,
map,
mapToArray,
pAll,
@@ -82,7 +81,7 @@ export const TAG_COPY_SRC = 'xo:copy_of'
// FIXME: remove this work around when fixed, https://phabricator.babeljs.io/T2877
// export * from './utils'
require('lodash/assign')(module.exports, require('./utils'))
Object.assign(module.exports, require('./utils'))
// VDI formats. (Raw is not available for delta vdi.)
export const VDI_FORMAT_VHD = 'vhd'
@@ -174,7 +173,7 @@ export default class Xapi extends XapiBase {
//
// TODO: implements a timeout.
_waitObject(predicate) {
if (isFunction(predicate)) {
if (typeof predicate === 'function') {
const { promise, resolve } = defer()
const unregister = this._registerGenericWatcher(obj => {
@@ -1576,7 +1575,7 @@ export default class Xapi extends XapiBase {
}
} else {
// Find the original template by name (*sigh*).
const templateNameLabel = vm.other_config['base_template_name']
const templateNameLabel = vm.other_config.base_template_name
const template =
templateNameLabel &&
find(
@@ -2041,6 +2040,7 @@ export default class Xapi extends XapiBase {
)
)
}
@deferrable
async createNetwork(
$defer,
@@ -2358,14 +2358,22 @@ export default class Xapi extends XapiBase {
)
}
async assertConsistentHostServerTime(hostRef) {
const delta =
async _getHostServerTimeShift(hostRef) {
return Math.abs(
parseDateTime(await this.call('host.get_servertime', hostRef)).getTime() -
Date.now()
if (Math.abs(delta) > 30e3) {
Date.now()
)
}
async isHostServerTimeConsistent(hostRef) {
return (await this._getHostServerTimeShift(hostRef)) < 30e3
}
async assertConsistentHostServerTime(hostRef) {
if (!(await this.isHostServerTimeConsistent(hostRef))) {
throw new Error(
`host server time and XOA date are not consistent with each other (${ms(
delta
await this._getHostServerTimeShift(hostRef)
)})`
)
}

View File

@@ -1,8 +1,8 @@
import asyncMap from '@xen-orchestra/async-map'
import createLogger from '@xen-orchestra/log'
import deferrable from 'golike-defer'
import unzip from 'julien-f-unzip'
import { filter, find, pickBy, some } from 'lodash'
import { listMissingPatchesFailed } from 'xo-common/api-errors'
import ensureArray from '../../_ensureArray'
import { debounce } from '../../decorators'
@@ -41,7 +41,7 @@ const XCP_NG_DEBOUNCE_TIME_MS = 60000
// list all yum updates available for a XCP-ng host
// (hostObject) → { uuid: patchObject }
async function _listXcpUpdates(host) {
return JSON.parse(
const patches = JSON.parse(
await this.call(
'host.call_plugin',
host.$ref,
@@ -50,6 +50,15 @@ async function _listXcpUpdates(host) {
{}
)
)
if (patches.error !== undefined) {
throw listMissingPatchesFailed({
host: host.$id,
reason: patches.error,
})
}
return patches
}
const _listXcpUpdateDebounced = debounceWithKey(
@@ -337,7 +346,7 @@ export default {
// INSTALL -------------------------------------------------------------------
_xcpUpdate(hosts) {
async _xcpUpdate(hosts) {
if (hosts === undefined) {
hosts = filter(this.objects.all, { $type: 'host' })
} else {
@@ -347,7 +356,10 @@ export default {
)
}
return asyncMap(hosts, async host => {
// XCP-ng hosts need to be updated one at a time starting with the pool master
// https://github.com/vatesfr/xen-orchestra/issues/4468
hosts = hosts.sort(({ $ref }) => ($ref === this.pool.master ? -1 : 1))
for (const host of hosts) {
const update = await this.call(
'host.call_plugin',
host.$ref,
@@ -364,7 +376,7 @@ export default {
String(Date.now() / 1000)
)
}
})
}
},
// Legacy XS patches: upload a patch on a pool before installing it

View File

@@ -9,11 +9,7 @@ import { satisfies as versionSatisfies } from 'semver'
import {
camelToSnakeCase,
forEach,
isArray,
isBoolean,
isFunction,
isInteger,
isString,
map,
mapFilter,
mapToArray,
@@ -45,10 +41,10 @@ export const prepareXapiParam = param => {
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
if (typeof param === 'boolean') {
return asBoolean(param)
}
if (isArray(param)) {
if (Array.isArray(param)) {
return map(param, prepareXapiParam)
}
if (isPlainObject(param)) {
@@ -135,14 +131,14 @@ export const makeEditObject = specs => {
return object => object[prop]
}
if (isString(get)) {
if (typeof get === 'string') {
return object => object[get]
}
return get
}
const normalizeSet = (set, name) => {
if (isFunction(set)) {
if (typeof set === 'function') {
return set
}
@@ -153,7 +149,7 @@ export const makeEditObject = specs => {
}
}
if (isString(set)) {
if (typeof set === 'string') {
const index = set.indexOf('.')
if (index === -1) {
const prop = camelToSnakeCase(set)
@@ -176,7 +172,7 @@ export const makeEditObject = specs => {
}
}
if (!isArray(set)) {
if (!Array.isArray(set)) {
throw new Error('must be an array, a function or a string')
}
@@ -212,7 +208,7 @@ export const makeEditObject = specs => {
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
if (typeof constraint !== 'function') {
throw new Error('constraint must be a function')
}
@@ -234,15 +230,15 @@ export const makeEditObject = specs => {
return spec
}
forEach(specs, (spec, name) => {
isString(spec) || (specs[name] = normalizeSpec(spec, name))
typeof spec === 'string' || (specs[name] = normalizeSpec(spec, name))
})
// Resolves aliases and add camelCase and snake_case aliases.
forEach(specs, (spec, name) => {
if (isString(spec)) {
if (typeof spec === 'string') {
do {
spec = specs[spec]
} while (isString(spec))
} while (typeof spec === 'string')
specs[name] = spec
}

View File

@@ -2,7 +2,8 @@ import createLogger from '@xen-orchestra/log'
import kindOf from 'kindof'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
import { forEach, isFunction } from 'lodash'
import { forEach } from 'lodash'
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import { MethodNotFound } from 'json-rpc-peer'
import * as methods from '../api'
@@ -182,7 +183,7 @@ export default class Api {
const addMethod = (method, name) => {
name = base + name
if (isFunction(method)) {
if (typeof method === 'function') {
removes.push(this.addApiMethod(name, method))
return
}
@@ -219,17 +220,29 @@ export default class Api {
throw new MethodNotFound(name)
}
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = Object.create(xo, {
api: {
// Used by system.*().
value: this,
},
session: {
value: session,
},
})
// create the context which is an augmented XO
const context = (() => {
const descriptors = {
api: {
// Used by system.*().
value: this,
},
session: {
value: session,
},
}
let obj = xo
do {
Object.getOwnPropertyNames(obj).forEach(name => {
if (!(name in descriptors)) {
descriptors[name] = getBoundPropertyDescriptor(obj, name, xo)
}
})
} while ((obj = Reflect.getPrototypeOf(obj)) !== null)
return Object.create(null, descriptors)
})()
// Fetch and inject the current user.
const userId = session.get('user_id', undefined)

View File

@@ -1,6 +1,8 @@
import ms from 'ms'
import { forEach, isEmpty, iteratee, sortedIndexBy } from 'lodash'
import { debounceWithKey } from '../_pDebounceWithKey'
const isSkippedError = error =>
error.message === 'no disks found' ||
error.message === 'no VMs match this pattern' ||
@@ -64,131 +66,138 @@ const taskTimeComparator = ({ start: s1, end: e1 }, { start: s2, end: e2 }) => {
// tasks?: Task[],
// }
export default {
async getBackupNgLogs(runId?: string) {
const [jobLogs, restoreLogs, restoreMetadataLogs] = await Promise.all([
this.getLogs('jobs'),
this.getLogs('restore'),
this.getLogs('metadataRestore'),
])
getBackupNgLogs: debounceWithKey(
async function getBackupNgLogs(runId?: string) {
const [jobLogs, restoreLogs, restoreMetadataLogs] = await Promise.all([
this.getLogs('jobs'),
this.getLogs('restore'),
this.getLogs('metadataRestore'),
])
const { runningJobs, runningRestores, runningMetadataRestores } = this
const consolidated = {}
const started = {}
const { runningJobs, runningRestores, runningMetadataRestores } = this
const consolidated = {}
const started = {}
const handleLog = ({ data, time, message }, id) => {
const { event } = data
if (event === 'job.start') {
if (
(data.type === 'backup' || data.key === undefined) &&
(runId === undefined || runId === id)
) {
const { scheduleId, jobId } = data
consolidated[id] = started[id] = {
const handleLog = ({ data, time, message }, id) => {
const { event } = data
if (event === 'job.start') {
if (
(data.type === 'backup' || data.key === undefined) &&
(runId === undefined || runId === id)
) {
const { scheduleId, jobId } = data
consolidated[id] = started[id] = {
data: data.data,
id,
jobId,
jobName: data.jobName,
message: 'backup',
scheduleId,
start: time,
status: runningJobs[jobId] === id ? 'pending' : 'interrupted',
}
}
} else if (event === 'job.end') {
const { runJobId } = data
const log = started[runJobId]
if (log !== undefined) {
delete started[runJobId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
} else if (event === 'task.start') {
const task = {
data: data.data,
id,
jobId,
jobName: data.jobName,
message: 'backup',
scheduleId,
message,
start: time,
status: runningJobs[jobId] === id ? 'pending' : 'interrupted',
}
const { parentId } = data
let parent
if (parentId === undefined && (runId === undefined || runId === id)) {
// top level task
task.status =
(message === 'restore' && !runningRestores.has(id)) ||
(message === 'metadataRestore' &&
!runningMetadataRestores.has(id))
? 'interrupted'
: 'pending'
consolidated[id] = started[id] = task
} else if ((parent = started[parentId]) !== undefined) {
// sub-task for which the parent exists
task.status = parent.status
started[id] = task
;(parent.tasks || (parent.tasks = [])).push(task)
}
} else if (event === 'task.end') {
const { taskId } = data
const log = started[taskId]
if (log !== undefined) {
// TODO: merge/transfer work-around
delete started[taskId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.result), data.status),
log.tasks
)
}
} else if (event === 'task.warning') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.warnings || (parent.warnings = [])).push({
data: data.data,
message,
})
} else if (event === 'task.info') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.infos || (parent.infos = [])).push({
data: data.data,
message,
})
} else if (event === 'jobCall.start') {
const parent = started[data.runJobId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: {
type: 'VM',
id: data.params.id,
},
id,
start: time,
status: parent.status,
})
)
}
} else if (event === 'jobCall.end') {
const { runCallId } = data
const log = started[runCallId]
if (log !== undefined) {
delete started[runCallId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
}
} else if (event === 'job.end') {
const { runJobId } = data
const log = started[runJobId]
if (log !== undefined) {
delete started[runJobId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
} else if (event === 'task.start') {
const task = {
data: data.data,
id,
message,
start: time,
}
const { parentId } = data
let parent
if (parentId === undefined && (runId === undefined || runId === id)) {
// top level task
task.status =
(message === 'restore' && !runningRestores.has(id)) ||
(message === 'metadataRestore' && !runningMetadataRestores.has(id))
? 'interrupted'
: 'pending'
consolidated[id] = started[id] = task
} else if ((parent = started[parentId]) !== undefined) {
// sub-task for which the parent exists
task.status = parent.status
started[id] = task
;(parent.tasks || (parent.tasks = [])).push(task)
}
} else if (event === 'task.end') {
const { taskId } = data
const log = started[taskId]
if (log !== undefined) {
// TODO: merge/transfer work-around
delete started[taskId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.result), data.status),
log.tasks
)
}
} else if (event === 'task.warning') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.warnings || (parent.warnings = [])).push({
data: data.data,
message,
})
} else if (event === 'task.info') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.infos || (parent.infos = [])).push({
data: data.data,
message,
})
} else if (event === 'jobCall.start') {
const parent = started[data.runJobId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: {
type: 'VM',
id: data.params.id,
},
id,
start: time,
status: parent.status,
})
)
}
} else if (event === 'jobCall.end') {
const { runCallId } = data
const log = started[runCallId]
if (log !== undefined) {
delete started[runCallId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
}
forEach(jobLogs, handleLog)
forEach(restoreLogs, handleLog)
forEach(restoreMetadataLogs, handleLog)
return runId === undefined ? consolidated : consolidated[runId]
},
10e3,
function keyFn(runId) {
return [this, runId]
}
forEach(jobLogs, handleLog)
forEach(restoreLogs, handleLog)
forEach(restoreMetadataLogs, handleLog)
return runId === undefined ? consolidated : consolidated[runId]
},
),
async getBackupNgLogsSorted({ after, before, filter, limit }) {
let logs = await this.getBackupNgLogs()

View File

@@ -1,7 +1,6 @@
import asyncMap from '@xen-orchestra/async-map'
import createLogger from '@xen-orchestra/log'
import deferrable from 'golike-defer'
import escapeStringRegexp from 'escape-string-regexp'
import execa from 'execa'
import splitLines from 'split-lines'
import { CancelToken, fromEvent, ignoreErrors } from 'promise-toolbox'
@@ -10,7 +9,16 @@ import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import { basename, dirname } from 'path'
import { filter, find, includes, once, range, sortBy, trim } from 'lodash'
import {
escapeRegExp,
filter,
find,
includes,
once,
range,
sortBy,
trim,
} from 'lodash'
import {
chainVhd,
createSyntheticStream as createVhdReadStream,
@@ -139,22 +147,20 @@ const listPartitions = (() => {
})
return device =>
execa
.stdout('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path,
])
.then(stdout =>
mapFilter(splitLines(stdout), line => {
const partition = parseLine(line)
const { type } = partition
if (type != null && !IGNORED[+type]) {
return partition
}
})
)
execa('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path,
]).then(({ stdout }) =>
mapFilter(splitLines(stdout), line => {
const partition = parseLine(line)
const { type } = partition
if (type != null && !IGNORED[+type]) {
return partition
}
})
)
})()
// handle LVM logical volumes automatically
@@ -271,7 +277,7 @@ const mountLvmPv = (device, partition) => {
}
args.push('--show', '-f', device.path)
return execa.stdout('losetup', args).then(stdout => {
return execa('losetup', args).then(({ stdout }) => {
const path = trim(stdout)
return {
path,
@@ -862,7 +868,7 @@ export default class {
const files = await handler.list('.')
const reg = new RegExp(
'^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`)
'^[^_]+_' + escapeRegExp(`${tag}_${vm.name_label}.xva`)
)
const backups = sortBy(filter(files, fileName => reg.test(fileName)))
@@ -887,9 +893,7 @@ export default class {
xapi._assertHealthyVdiChains(vm)
const reg = new RegExp(
'^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_'
)
const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeRegExp(tag) + '_')
const snapshots = sortBy(
filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)),
'name_label'
@@ -926,9 +930,7 @@ export default class {
const transferStart = Date.now()
tag = 'DR_' + tag
const reg = new RegExp(
'^' +
escapeStringRegexp(`${vm.name_label}_${tag}_`) +
'[0-9]{8}T[0-9]{6}Z$'
'^' + escapeRegExp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$'
)
const targetXapi = this._xo.getXapi(sr)

View File

@@ -87,7 +87,7 @@ async function mountLvmPhysicalVolume(devicePath, partition) {
args.push('-o', partition.start * 512)
}
args.push('--show', '-f', devicePath)
const path = (await execa.stdout('losetup', args)).trim()
const path = (await execa('losetup', args)).stdout.trim()
await execa('pvscan', ['--cache', path])
return {
@@ -251,7 +251,7 @@ export default class BackupNgFileRestore {
}
async _listPartitions(devicePath, inspectLvmPv = true) {
const stdout = await execa.stdout('partx', [
const { stdout } = await execa('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',

View File

@@ -1,7 +1,7 @@
import asyncMap from '@xen-orchestra/async-map'
import { createPredicate } from 'value-matcher'
import { timeout } from 'promise-toolbox'
import { assign, filter, isEmpty, map, mapValues } from 'lodash'
import { filter, isEmpty, map, mapValues } from 'lodash'
import { crossProduct } from '../../math'
import { serializeError, thunkToArray } from '../../utils'
@@ -82,7 +82,11 @@ export default async function executeJobCall({
params,
start: Date.now(),
})
let promise = app.callApiMethod(session, job.method, assign({}, params))
let promise = app.callApiMethod(
session,
job.method,
Object.assign({}, params)
)
if (job.timeout) {
promise = promise::timeout(job.timeout)
}

View File

@@ -4,7 +4,7 @@ import { invalidParameters, noSuchObject } from 'xo-common/api-errors'
import * as sensitiveValues from '../sensitive-values'
import { PluginsMetadata } from '../models/plugin-metadata'
import { isFunction, mapToArray } from '../utils'
import { mapToArray } from '../utils'
// ===================================================================
@@ -65,9 +65,9 @@ export default class {
id,
instance,
name,
testable: isFunction(instance.test),
testable: typeof instance.test === 'function',
testSchema,
unloadable: isFunction(instance.unload),
unloadable: typeof instance.unload === 'function',
version,
})

View File

@@ -1,7 +1,6 @@
import asyncMap from '@xen-orchestra/async-map'
import synchronized from 'decorator-synchronized'
import {
assign,
every,
forEach,
isObject,
@@ -123,7 +122,7 @@ export default class {
}
async computeVmResourcesUsage(vm) {
return assign(
return Object.assign(
computeVmResourcesUsage(this._xo.getXapi(vm).getObject(vm._xapiId)),
await this._xo.computeVmIpPoolsUsage(vm)
)

View File

@@ -2,7 +2,7 @@ import levelup from 'level-party'
import sublevel from 'level-sublevel'
import { ensureDir } from 'fs-extra'
import { forEach, isFunction, promisify } from '../utils'
import { forEach, promisify } from '../utils'
// ===================================================================
@@ -32,7 +32,7 @@ const levelHas = db => {
const levelPromise = db => {
const dbP = {}
forEach(db, (value, name) => {
if (!isFunction(value)) {
if (typeof value !== 'function') {
return
}

View File

@@ -10,13 +10,7 @@ import parseDuration from '../_parseDuration'
import Xapi from '../xapi'
import xapiObjectToXo from '../xapi-object-to-xo'
import XapiStats from '../xapi-stats'
import {
camelToSnakeCase,
forEach,
isEmpty,
isString,
popProperty,
} from '../utils'
import { camelToSnakeCase, forEach, isEmpty, popProperty } from '../utils'
import { Servers } from '../models/server'
// ===================================================================
@@ -461,7 +455,7 @@ export default class {
// Returns the XAPI connection associated to an object.
getXapi(object, type) {
if (isString(object)) {
if (typeof object === 'string') {
object = this._xo.getObject(object, type)
}

View File

@@ -9,8 +9,6 @@ import {
forEach,
includes,
isEmpty,
isFunction,
isString,
iteratee,
map as mapToArray,
stubTrue,
@@ -73,7 +71,8 @@ export default class Xo extends EventEmitter {
if (
type != null &&
((isString(type) && type !== obj.type) || !includes(type, obj.type)) // Array
((typeof type === 'string' && type !== obj.type) ||
!includes(type, obj.type)) // Array
) {
throw noSuchObject(key, type)
}
@@ -210,7 +209,7 @@ export default class Xo extends EventEmitter {
}
// For security, prevent from accessing `this`.
if (isFunction(value)) {
if (typeof value === 'function') {
value = (value =>
function() {
return value.apply(thisArg, arguments)

View File

@@ -27,7 +27,7 @@
"child-process-promise": "^2.0.3",
"core-js": "^3.0.0",
"pipette": "^0.9.3",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"tmp": "^0.1.0",
"vhd-lib": "^0.7.0"
},
@@ -36,7 +36,7 @@
"@babel/core": "^7.0.0",
"@babel/preset-env": "^7.0.0",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"cross-env": "^6.0.3",
"event-to-promise": "^0.8.0",
"execa": "^2.0.2",
"fs-extra": "^8.0.1",

View File

@@ -43,6 +43,6 @@ test('VMDKDirectParser reads OK', async () => {
}
expect(harvested.length).toEqual(2)
expect(harvested[0].offsetBytes).toEqual(0)
expect(harvested[0].data.length).toEqual(header['grainSizeSectors'] * 512)
expect(harvested[1].offsetBytes).toEqual(header['grainSizeSectors'] * 512)
expect(harvested[0].data.length).toEqual(header.grainSizeSectors * 512)
expect(harvested[1].offsetBytes).toEqual(header.grainSizeSectors * 512)
})

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "xo-web",
"version": "5.49.0",
"version": "5.50.3",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [
@@ -34,7 +34,7 @@
"@nraynaud/novnc": "0.6.1",
"@xen-orchestra/cron": "^1.0.4",
"@xen-orchestra/defined": "^0.0.0",
"@xen-orchestra/template": "^0.0.0",
"@xen-orchestra/template": "^0.1.0",
"ansi_up": "^4.0.3",
"asap": "^2.0.6",
"babel-core": "^6.26.0",
@@ -97,7 +97,7 @@
"moment-timezone": "^0.5.14",
"notifyjs": "^3.0.0",
"otplib": "^11.0.0",
"promise-toolbox": "^0.13.0",
"promise-toolbox": "^0.14.0",
"prop-types": "^15.6.0",
"qrcode": "^1.3.2",
"random-password": "^0.1.2",

View File

@@ -1,6 +1,5 @@
import PropTypes from 'prop-types'
import React from 'react'
import { isFunction } from 'lodash'
import Button from './button'
import Component from './base-component'
@@ -93,9 +92,10 @@ export default class ActionButton extends Component {
const { redirectOnSuccess } = props
if (redirectOnSuccess !== undefined) {
const to = isFunction(redirectOnSuccess)
? redirectOnSuccess(result, handlerParam)
: redirectOnSuccess
const to =
typeof redirectOnSuccess === 'function'
? redirectOnSuccess(result, handlerParam)
: redirectOnSuccess
if (to !== undefined) {
return this.context.router.push(to)
}

View File

@@ -1,6 +1,6 @@
import { PureComponent } from 'react'
import { cowSet } from 'utils'
import { includes, isArray, forEach, map } from 'lodash'
import { includes, forEach, map } from 'lodash'
import getEventValue from './get-event-value'
@@ -15,7 +15,7 @@ const get = (object, path, depth) => {
}
const prop = path[depth++]
return isArray(object) && prop === '*'
return Array.isArray(object) && prop === '*'
? map(object, value => get(value, path, depth))
: get(object[prop], path, depth)
}

Some files were not shown because too many files have changed in this diff Show More