Compare commits
32 Commits
xo-web-v5.
...
@xen-orche
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62a8b94221 | ||
|
|
21faaeb33d | ||
|
|
0525fc5909 | ||
|
|
a1a53bb285 | ||
|
|
0c453c4415 | ||
|
|
d0406f9736 | ||
|
|
ba74b8603d | ||
|
|
c675a4d61d | ||
|
|
965c45bc70 | ||
|
|
139a22602a | ||
|
|
e0e4969198 | ||
|
|
08d69d95b3 | ||
|
|
4e6c507ba9 | ||
|
|
fd06374365 | ||
|
|
a07ebc636a | ||
|
|
4c151ac9aa | ||
|
|
05c425698f | ||
|
|
2a961979e6 | ||
|
|
211ede92cc | ||
|
|
256af03772 | ||
|
|
654fd5a4f9 | ||
|
|
541d90e49f | ||
|
|
974e7038e7 | ||
|
|
e2f5b30aa9 | ||
|
|
3483e7d9e0 | ||
|
|
56cb20a1af | ||
|
|
64929653dd | ||
|
|
c955da9bc6 | ||
|
|
291354fa8e | ||
|
|
905d736512 | ||
|
|
3406d6e2a9 | ||
|
|
fc10b5ffb9 |
@@ -21,7 +21,7 @@ module.exports = {
|
||||
|
||||
overrides: [
|
||||
{
|
||||
files: ['cli.js', '*-cli.js', 'packages/*cli*/**/*.js'],
|
||||
files: ['cli.js', '*-cli.js', '**/*cli*/**/*.js'],
|
||||
rules: {
|
||||
'no-console': 'off',
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
378
@xen-orchestra/backups-cli/index.js
Executable file
378
@xen-orchestra/backups-cli/index.js
Executable file
@@ -0,0 +1,378 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const args = process.argv.slice(2)
|
||||
|
||||
if (
|
||||
args.length === 0 ||
|
||||
/^(?:-h|--help)$/.test(args[0]) ||
|
||||
args[0] !== 'clean-vms'
|
||||
) {
|
||||
console.log('Usage: xo-backups clean-vms [--force] xo-vm-backups/*')
|
||||
// eslint-disable-next-line no-process-exit
|
||||
return process.exit(1)
|
||||
}
|
||||
|
||||
// remove `clean-vms` arg which is the only available command ATM
|
||||
args.splice(0, 1)
|
||||
|
||||
// only act (ie delete files) if `--force` is present
|
||||
const force = args[0] === '--force'
|
||||
if (force) {
|
||||
args.splice(0, 1)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const assert = require('assert')
|
||||
const lockfile = require('proper-lockfile')
|
||||
const { default: Vhd } = require('vhd-lib')
|
||||
const { curryRight, flatten } = require('lodash')
|
||||
const { dirname, resolve } = require('path')
|
||||
const { DISK_TYPE_DIFFERENCING } = require('vhd-lib/dist/_constants')
|
||||
const { pipe, promisifyAll } = require('promise-toolbox')
|
||||
|
||||
const fs = promisifyAll(require('fs'))
|
||||
const handler = require('@xen-orchestra/fs').getHandler({ url: 'file://' })
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const asyncMap = curryRight((iterable, fn) =>
|
||||
Promise.all(
|
||||
Array.isArray(iterable) ? iterable.map(fn) : Array.from(iterable, fn)
|
||||
)
|
||||
)
|
||||
|
||||
const filter = (...args) => thisArg => thisArg.filter(...args)
|
||||
|
||||
// TODO: better check?
|
||||
|
||||
// our heuristic is not good enough, there has been some false positives
|
||||
// (detected as invalid by us but valid by `tar` and imported with success),
|
||||
// either:
|
||||
// - these files were normal but the check is incorrect
|
||||
// - these files were invalid but without data loss
|
||||
// - these files were invalid but with silent data loss
|
||||
//
|
||||
// FIXME: the heuristic does not work if the XVA is compressed, we need to
|
||||
// implement a specific test for it
|
||||
//
|
||||
// maybe reading the end of the file looking for a file named
|
||||
// /^Ref:\d+/\d+\.checksum$/ and then validating the tar structure from it
|
||||
//
|
||||
// https://github.com/npm/node-tar/issues/234#issuecomment-538190295
|
||||
const isValidTar = async path => {
|
||||
try {
|
||||
const fd = await fs.open(path, 'r')
|
||||
try {
|
||||
const { size } = await fs.fstat(fd)
|
||||
if (size <= 1024 || size % 512 !== 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
const buf = Buffer.allocUnsafe(1024)
|
||||
assert.strictEqual(
|
||||
await fs.read(fd, buf, 0, buf.length, size - buf.length),
|
||||
buf.length
|
||||
)
|
||||
return buf.every(_ => _ === 0)
|
||||
} finally {
|
||||
fs.close(fd).catch(noop)
|
||||
}
|
||||
} catch (error) {
|
||||
// never throw, log and report as valid to avoid side effects
|
||||
console.error('isValidTar', path, error)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const noop = Function.prototype
|
||||
|
||||
const readDir = path =>
|
||||
fs.readdir(path).then(
|
||||
entries => {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = `${path}/${entry}`
|
||||
})
|
||||
|
||||
return entries
|
||||
},
|
||||
error => {
|
||||
// a missing dir is by definition empty
|
||||
if (error != null && error.code === 'ENOENT') {
|
||||
return []
|
||||
}
|
||||
throw error
|
||||
}
|
||||
)
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// chain is an array of VHDs from child to parent
|
||||
//
|
||||
// the whole chain will be merged into parent, parent will be renamed to child
|
||||
// and all the others will deleted
|
||||
async function mergeVhdChain(chain) {
|
||||
assert(chain.length >= 2)
|
||||
|
||||
const child = chain[0]
|
||||
const parent = chain[chain.length - 1]
|
||||
const children = chain.slice(0, -1).reverse()
|
||||
|
||||
console.warn('Unused parents of VHD', child)
|
||||
chain
|
||||
.slice(1)
|
||||
.reverse()
|
||||
.forEach(parent => {
|
||||
console.warn(' ', parent)
|
||||
})
|
||||
force && console.warn(' merging…')
|
||||
console.warn('')
|
||||
if (force) {
|
||||
// `mergeVhd` does not work with a stream, either
|
||||
// - make it accept a stream
|
||||
// - or create synthetic VHD which is not a stream
|
||||
return console.warn('TODO: implement merge')
|
||||
// await mergeVhd(
|
||||
// handler,
|
||||
// parent,
|
||||
// handler,
|
||||
// children.length === 1
|
||||
// ? child
|
||||
// : await createSyntheticStream(handler, children)
|
||||
// )
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
force && fs.rename(parent, child),
|
||||
asyncMap(children.slice(0, -1), child => {
|
||||
console.warn('Unused VHD', child)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return force && handler.unlink(child)
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
const listVhds = pipe([
|
||||
vmDir => vmDir + '/vdis',
|
||||
readDir,
|
||||
asyncMap(readDir),
|
||||
flatten,
|
||||
asyncMap(readDir),
|
||||
flatten,
|
||||
filter(_ => _.endsWith('.vhd')),
|
||||
])
|
||||
|
||||
async function handleVm(vmDir) {
|
||||
const vhds = new Set()
|
||||
const vhdParents = { __proto__: null }
|
||||
const vhdChildren = { __proto__: null }
|
||||
|
||||
// remove broken VHDs
|
||||
await asyncMap(await listVhds(vmDir), async path => {
|
||||
try {
|
||||
const vhd = new Vhd(handler, path)
|
||||
await vhd.readHeaderAndFooter()
|
||||
vhds.add(path)
|
||||
if (vhd.footer.diskType === DISK_TYPE_DIFFERENCING) {
|
||||
const parent = resolve(dirname(path), vhd.header.parentUnicodeName)
|
||||
vhdParents[path] = parent
|
||||
if (parent in vhdChildren) {
|
||||
const error = new Error(
|
||||
'this script does not support multiple VHD children'
|
||||
)
|
||||
error.parent = parent
|
||||
error.child1 = vhdChildren[parent]
|
||||
error.child2 = path
|
||||
throw error // should we throw?
|
||||
}
|
||||
vhdChildren[parent] = path
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Error while checking VHD', path)
|
||||
console.warn(' ', error)
|
||||
if (error != null && error.code === 'ERR_ASSERTION') {
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
force && (await handler.unlink(path))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// remove VHDs with missing ancestors
|
||||
{
|
||||
const deletions = []
|
||||
|
||||
// return true if the VHD has been deleted or is missing
|
||||
const deleteIfOrphan = vhd => {
|
||||
const parent = vhdParents[vhd]
|
||||
if (parent === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
// no longer needs to be checked
|
||||
delete vhdParents[vhd]
|
||||
|
||||
deleteIfOrphan(parent)
|
||||
|
||||
if (!vhds.has(parent)) {
|
||||
vhds.delete(vhd)
|
||||
|
||||
console.warn('Error while checking VHD', vhd)
|
||||
console.warn(' missing parent', parent)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
force && deletions.push(handler.unlink(vhd))
|
||||
}
|
||||
}
|
||||
|
||||
// > A property that is deleted before it has been visited will not be
|
||||
// > visited later.
|
||||
// >
|
||||
// > -- https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...in#Deleted_added_or_modified_properties
|
||||
for (const child in vhdParents) {
|
||||
deleteIfOrphan(child)
|
||||
}
|
||||
|
||||
await Promise.all(deletions)
|
||||
}
|
||||
|
||||
const [jsons, xvas] = await readDir(vmDir).then(entries => [
|
||||
entries.filter(_ => _.endsWith('.json')),
|
||||
new Set(entries.filter(_ => _.endsWith('.xva'))),
|
||||
])
|
||||
|
||||
await asyncMap(xvas, async path => {
|
||||
// check is not good enough to delete the file, the best we can do is report
|
||||
// it
|
||||
if (!(await isValidTar(path))) {
|
||||
console.warn('Potential broken XVA', path)
|
||||
console.warn('')
|
||||
}
|
||||
})
|
||||
|
||||
const unusedVhds = new Set(vhds)
|
||||
const unusedXvas = new Set(xvas)
|
||||
|
||||
// compile the list of unused XVAs and VHDs, and remove backup metadata which
|
||||
// reference a missing XVA/VHD
|
||||
await asyncMap(jsons, async json => {
|
||||
const metadata = JSON.parse(await fs.readFile(json))
|
||||
const { mode } = metadata
|
||||
if (mode === 'full') {
|
||||
const linkedXva = resolve(vmDir, metadata.xva)
|
||||
|
||||
if (xvas.has(linkedXva)) {
|
||||
unusedXvas.delete(linkedXva)
|
||||
} else {
|
||||
console.warn('Error while checking backup', json)
|
||||
console.warn(' missing file', linkedXva)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
force && (await handler.unlink(json))
|
||||
}
|
||||
} else if (mode === 'delta') {
|
||||
const linkedVhds = (() => {
|
||||
const { vhds } = metadata
|
||||
return Object.keys(vhds).map(key => resolve(vmDir, vhds[key]))
|
||||
})()
|
||||
|
||||
// FIXME: find better approach by keeping as much of the backup as
|
||||
// possible (existing disks) even if one disk is missing
|
||||
if (linkedVhds.every(_ => vhds.has(_))) {
|
||||
linkedVhds.forEach(_ => unusedVhds.delete(_))
|
||||
} else {
|
||||
console.warn('Error while checking backup', json)
|
||||
const missingVhds = linkedVhds.filter(_ => !vhds.has(_))
|
||||
console.warn(
|
||||
' %i/%i missing VHDs',
|
||||
missingVhds.length,
|
||||
linkedVhds.length
|
||||
)
|
||||
missingVhds.forEach(vhd => {
|
||||
console.warn(' ', vhd)
|
||||
})
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
force && (await handler.unlink(json))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// TODO: parallelize by vm/job/vdi
|
||||
const unusedVhdsDeletion = []
|
||||
{
|
||||
// VHD chains (as list from child to ancestor) to merge indexed by last
|
||||
// ancestor
|
||||
const vhdChainsToMerge = { __proto__: null }
|
||||
|
||||
const toCheck = new Set(unusedVhds)
|
||||
|
||||
const getUsedChildChainOrDelete = vhd => {
|
||||
if (vhd in vhdChainsToMerge) {
|
||||
const chain = vhdChainsToMerge[vhd]
|
||||
delete vhdChainsToMerge[vhd]
|
||||
return chain
|
||||
}
|
||||
|
||||
if (!unusedVhds.has(vhd)) {
|
||||
return [vhd]
|
||||
}
|
||||
|
||||
// no longer needs to be checked
|
||||
toCheck.delete(vhd)
|
||||
|
||||
const child = vhdChildren[vhd]
|
||||
if (child !== undefined) {
|
||||
const chain = getUsedChildChainOrDelete(child)
|
||||
if (chain !== undefined) {
|
||||
chain.push(vhd)
|
||||
return chain
|
||||
}
|
||||
}
|
||||
|
||||
console.warn('Unused VHD', vhd)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
force && unusedVhdsDeletion.push(handler.unlink(vhd))
|
||||
}
|
||||
|
||||
toCheck.forEach(vhd => {
|
||||
vhdChainsToMerge[vhd] = getUsedChildChainOrDelete(vhd)
|
||||
})
|
||||
|
||||
Object.keys(vhdChainsToMerge).forEach(key => {
|
||||
const chain = vhdChainsToMerge[key]
|
||||
if (chain !== undefined) {
|
||||
unusedVhdsDeletion.push(mergeVhdChain(chain))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
unusedVhdsDeletion,
|
||||
asyncMap(unusedXvas, path => {
|
||||
console.warn('Unused XVA', path)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return force && handler.unlink(path)
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
asyncMap(args, async vmDir => {
|
||||
vmDir = resolve(vmDir)
|
||||
|
||||
// TODO: implement this in `xo-server`, not easy because not compatible with
|
||||
// `@xen-orchestra/fs`.
|
||||
const release = await lockfile.lock(vmDir)
|
||||
try {
|
||||
await handleVm(vmDir)
|
||||
} catch (error) {
|
||||
console.error('handleVm', vmDir, error)
|
||||
} finally {
|
||||
await release()
|
||||
}
|
||||
}).catch(error => console.error('main', error))
|
||||
27
@xen-orchestra/backups-cli/package.json
Normal file
27
@xen-orchestra/backups-cli/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"bin": {
|
||||
"xo-backups": "index.js"
|
||||
},
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"dependencies": {
|
||||
"@xen-orchestra/fs": "^0.10.1",
|
||||
"lodash": "^4.17.15",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"proper-lockfile": "^4.1.1",
|
||||
"vhd-lib": "^0.7.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=7.10.1"
|
||||
},
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/backups-cli",
|
||||
"name": "@xen-orchestra/backups-cli",
|
||||
"repository": {
|
||||
"directory": "@xen-orchestra/backups-cli",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"version": "0.0.0"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/cron",
|
||||
"version": "1.0.4",
|
||||
"version": "1.0.5",
|
||||
"license": "ISC",
|
||||
"description": "Focused, well maintained, cron parser/scheduler",
|
||||
"keywords": [
|
||||
@@ -46,7 +46,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -5,14 +5,21 @@ import parse from './parse'
|
||||
|
||||
const MAX_DELAY = 2 ** 31 - 1
|
||||
|
||||
function nextDelay(schedule) {
|
||||
const now = schedule._createDate()
|
||||
return next(schedule._schedule, now) - now
|
||||
}
|
||||
|
||||
class Job {
|
||||
constructor(schedule, fn) {
|
||||
let scheduledDate
|
||||
const wrapper = () => {
|
||||
const now = Date.now()
|
||||
if (scheduledDate > now) {
|
||||
// we're early, delay
|
||||
//
|
||||
// no need to check _isEnabled, we're just delaying the existing timeout
|
||||
//
|
||||
// see https://github.com/vatesfr/xen-orchestra/issues/4625
|
||||
this._timeout = setTimeout(wrapper, scheduledDate - now)
|
||||
return
|
||||
}
|
||||
|
||||
this._isRunning = true
|
||||
|
||||
let result
|
||||
@@ -32,7 +39,9 @@ class Job {
|
||||
this._isRunning = false
|
||||
|
||||
if (this._isEnabled) {
|
||||
const delay = nextDelay(schedule)
|
||||
const now = Date.now()
|
||||
scheduledDate = +schedule._createDate()
|
||||
const delay = scheduledDate - now
|
||||
this._timeout =
|
||||
delay < MAX_DELAY
|
||||
? setTimeout(wrapper, delay)
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
"get-stream": "^4.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.1.0",
|
||||
@@ -46,7 +46,7 @@
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"dotenv": "^8.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
|
||||
@@ -31,14 +31,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.13.0"
|
||||
"promise-toolbox": "^0.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-dev": "^1.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
21
CHANGELOG.md
21
CHANGELOG.md
@@ -8,14 +8,27 @@
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server v5.51.0
|
||||
- xo-web v5.51.0
|
||||
|
||||
## **5.39.1** (2019-10-11)
|
||||
|
||||

|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Support] Ability to check the XOA on the user interface [#4513](https://github.com/vatesfr/xen-orchestra/issues/4513) (PR [#4574](https://github.com/vatesfr/xen-orchestra/pull/4574))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [VM/new-vm] Fix template selection on creating new VM for resource sets [#4565](https://github.com/vatesfr/xen-orchestra/issues/4565) (PR [#4568](https://github.com/vatesfr/xen-orchestra/pull/4568))
|
||||
- [VM] Clearer invalid cores per socket error [#4120](https://github.com/vatesfr/xen-orchestra/issues/4120) (PR [#4187](https://github.com/vatesfr/xen-orchestra/pull/4187))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-web v5.50.3
|
||||
|
||||
|
||||
## **5.39.0** (2019-09-30)
|
||||
|
||||

|
||||
|
||||
### Highlights
|
||||
|
||||
- [VM/console] Add a button to connect to the VM via the local SSH client (PR [#4415](https://github.com/vatesfr/xen-orchestra/pull/4415))
|
||||
|
||||
@@ -3,18 +3,29 @@
|
||||
> Keep in mind the changelog is addressed to **users** and should be
|
||||
> understandable by them.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- `xo-server` requires Node 8.
|
||||
|
||||
### Enhancements
|
||||
|
||||
> Users must be able to say: “Nice enhancement, I'm eager to test it”
|
||||
|
||||
[Support] Ability to check the XOA on the user interface [#4513](https://github.com/vatesfr/xen-orchestra/issues/4513) (PR [#4574](https://github.com/vatesfr/xen-orchestra/pull/4574))
|
||||
- [Hub] Ability to select SR in hub VM installation (PR [#4571](https://github.com/vatesfr/xen-orchestra/pull/4571))
|
||||
- [Hub] Display more info about downloadable templates (PR [#4593](https://github.com/vatesfr/xen-orchestra/pull/4593))
|
||||
- [Support] Ability to open and close support tunnel from the user interface [#4513](https://github.com/vatesfr/xen-orchestra/issues/4513) (PR [#4616](https://github.com/vatesfr/xen-orchestra/pull/4616))
|
||||
- [xo-server-transport-icinga2] Add support of [icinga2](https://icinga.com/docs/icinga2/latest/doc/12-icinga2-api/) for reporting services status [#4563](https://github.com/vatesfr/xen-orchestra/issues/4563) (PR [#4573](https://github.com/vatesfr/xen-orchestra/pull/4573))
|
||||
- [Hub] Ability to update existing template (PR [#4613](https://github.com/vatesfr/xen-orchestra/pull/4613))
|
||||
- [Menu] Remove legacy backup entry [#4467](https://github.com/vatesfr/xen-orchestra/issues/4467) (PR [#4476](https://github.com/vatesfr/xen-orchestra/pull/4476))
|
||||
- [Backup NG] Offline backup feature [#3449](https://github.com/vatesfr/xen-orchestra/issues/3449) (PR [#4470](https://github.com/vatesfr/xen-orchestra/pull/4470))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
> Users must be able to say: “I had this issue, happy to know it's fixed”
|
||||
|
||||
- [VM/new-vm] Fix template selection on creating new VM for resource sets [#4565](https://github.com/vatesfr/xen-orchestra/issues/4565) (PR [#4568](https://github.com/vatesfr/xen-orchestra/pull/4568))
|
||||
- [VM] Clearer invalid cores per socket error [#4120](https://github.com/vatesfr/xen-orchestra/issues/4120) (PR [#4187](https://github.com/vatesfr/xen-orchestra/pull/4187))
|
||||
- [SR] Fix `[object HTMLInputElement]` name after re-attaching a SR [#4546](https://github.com/vatesfr/xen-orchestra/issues/4546) (PR [#4550](https://github.com/vatesfr/xen-orchestra/pull/4550))
|
||||
- [Schedules] Prevent double runs [#4625](https://github.com/vatesfr/xen-orchestra/issues/4625) (PR [#4626](https://github.com/vatesfr/xen-orchestra/pull/4626))
|
||||
- [Schedules] Properly enable/disable on config import (PR [#4624](https://github.com/vatesfr/xen-orchestra/pull/4624))
|
||||
|
||||
### Released packages
|
||||
|
||||
@@ -23,5 +34,12 @@
|
||||
>
|
||||
> Rule of thumb: add packages on top.
|
||||
|
||||
- @xen-orchestra/cron v1.0.5
|
||||
- xo-server-transport-icinga2 v0.1.0
|
||||
- xo-server-sdn-controller v0.3.1
|
||||
- xo-server v5.51.0
|
||||
- xo-web v5.51.0
|
||||
|
||||
### Dropped packages
|
||||
|
||||
- xo-server-cloud : this package was useless for OpenSource installations because it required a complete XOA environment
|
||||
|
||||
@@ -20,7 +20,7 @@ We'll consider at this point that you've got a working node on your box. E.g:
|
||||
|
||||
```
|
||||
$ node -v
|
||||
v8.12.0
|
||||
v8.16.2
|
||||
```
|
||||
|
||||
If not, see [this page](https://nodejs.org/en/download/package-manager/) for instructions on how to install Node.
|
||||
|
||||
@@ -12,18 +12,18 @@
|
||||
"eslint-config-standard-jsx": "^8.1.0",
|
||||
"eslint-plugin-eslint-comments": "^3.1.1",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
"eslint-plugin-node": "^9.0.1",
|
||||
"eslint-plugin-node": "^10.0.0",
|
||||
"eslint-plugin-promise": "^4.0.0",
|
||||
"eslint-plugin-react": "^7.6.1",
|
||||
"eslint-plugin-standard": "^4.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"flow-bin": "^0.106.3",
|
||||
"flow-bin": "^0.109.0",
|
||||
"globby": "^10.0.0",
|
||||
"husky": "^3.0.0",
|
||||
"jest": "^24.1.0",
|
||||
"lodash": "^4.17.4",
|
||||
"prettier": "^1.10.2",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"sorted-object": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.1",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -39,10 +39,10 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"execa": "^2.0.2",
|
||||
"index-modules": "^0.3.0",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"rimraf": "^3.0.0",
|
||||
"tmp": "^0.1.0"
|
||||
},
|
||||
|
||||
@@ -21,12 +21,13 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/log": "^0.2.0",
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"core-js": "^3.0.0",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^8.0.1",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
@@ -37,7 +38,7 @@
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"@xen-orchestra/fs": "^0.10.1",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"execa": "^2.0.2",
|
||||
"fs-promise": "^2.0.0",
|
||||
"get-stream": "^5.1.0",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
|
||||
import resolveRelativeFromFile from './_resolveRelativeFromFile'
|
||||
|
||||
@@ -13,18 +14,23 @@ import {
|
||||
import { fuFooter, fuHeader, checksumStruct } from './_structs'
|
||||
import { test as mapTestBit } from './_bitmap'
|
||||
|
||||
export default async function createSyntheticStream(handler, path) {
|
||||
const { warn } = createLogger('vhd-lib:createSyntheticStream')
|
||||
|
||||
export default async function createSyntheticStream(handler, paths) {
|
||||
const fds = []
|
||||
const cleanup = () => {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
warn('error while closing file', {
|
||||
error,
|
||||
fd: fds[i],
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
try {
|
||||
const vhds = []
|
||||
while (true) {
|
||||
const open = async path => {
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
fds.push(fd)
|
||||
const vhd = new Vhd(handler, fd)
|
||||
@@ -32,11 +38,18 @@ export default async function createSyntheticStream(handler, path) {
|
||||
await vhd.readHeaderAndFooter()
|
||||
await vhd.readBlockAllocationTable()
|
||||
|
||||
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
|
||||
break
|
||||
return vhd
|
||||
}
|
||||
if (typeof paths === 'string') {
|
||||
let path = paths
|
||||
let vhd
|
||||
while ((vhd = await open(path)).footer.diskType !== DISK_TYPE_DYNAMIC) {
|
||||
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
|
||||
}
|
||||
} else {
|
||||
for (const path of paths) {
|
||||
await open(path)
|
||||
}
|
||||
|
||||
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
|
||||
}
|
||||
const nVhds = vhds.length
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import assert from 'assert'
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
|
||||
import checkFooter from './_checkFooter'
|
||||
import checkHeader from './_checkHeader'
|
||||
@@ -15,10 +16,7 @@ import {
|
||||
SECTOR_SIZE,
|
||||
} from './_constants'
|
||||
|
||||
const VHD_UTIL_DEBUG = 0
|
||||
const debug = VHD_UTIL_DEBUG
|
||||
? str => console.log(`[vhd-merge]${str}`)
|
||||
: () => null
|
||||
const { debug } = createLogger('vhd-lib:Vhd')
|
||||
|
||||
// ===================================================================
|
||||
//
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
"@babel/core": "^7.1.5",
|
||||
"@babel/preset-env": "^7.1.5",
|
||||
"babel-plugin-lodash": "^3.2.11",
|
||||
"cross-env": "^5.1.4",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
"make-error": "^1.3.0",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^2.1.1",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"pw": "0.0.4",
|
||||
"xmlrpc": "^1.3.2",
|
||||
"xo-collection": "^0.4.1"
|
||||
@@ -60,7 +60,7 @@
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -8,7 +8,7 @@ import execPromise from 'exec-promise'
|
||||
import minimist from 'minimist'
|
||||
import pw from 'pw'
|
||||
import { asCallback, fromCallback } from 'promise-toolbox'
|
||||
import { filter, find, isArray } from 'lodash'
|
||||
import { filter, find } from 'lodash'
|
||||
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
|
||||
import { start as createRepl } from 'repl'
|
||||
|
||||
@@ -110,7 +110,7 @@ const main = async args => {
|
||||
asCallback.call(
|
||||
fromCallback(cb => {
|
||||
evaluate.call(repl, cmd, context, filename, cb)
|
||||
}).then(value => (isArray(value) ? Promise.all(value) : value)),
|
||||
}).then(value => (Array.isArray(value) ? Promise.all(value) : value)),
|
||||
cb
|
||||
)
|
||||
})(repl.eval)
|
||||
|
||||
@@ -4,7 +4,7 @@ import kindOf from 'kindof'
|
||||
import ms from 'ms'
|
||||
import httpRequest from 'http-request-plus'
|
||||
import { EventEmitter } from 'events'
|
||||
import { isArray, map, noop, omit } from 'lodash'
|
||||
import { map, noop, omit } from 'lodash'
|
||||
import {
|
||||
cancelable,
|
||||
defer,
|
||||
@@ -113,7 +113,7 @@ export class Xapi extends EventEmitter {
|
||||
this._watchedTypes = undefined
|
||||
const { watchEvents } = opts
|
||||
if (watchEvents !== false) {
|
||||
if (isArray(watchEvents)) {
|
||||
if (Array.isArray(watchEvents)) {
|
||||
this._watchedTypes = watchEvents
|
||||
}
|
||||
this.watchEvents()
|
||||
@@ -1075,7 +1075,7 @@ export class Xapi extends EventEmitter {
|
||||
const $field = (field in RESERVED_FIELDS ? '$$' : '$') + field
|
||||
|
||||
const value = data[field]
|
||||
if (isArray(value)) {
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0 || isOpaqueRef(value[0])) {
|
||||
getters[$field] = function() {
|
||||
const value = this[field]
|
||||
|
||||
@@ -38,16 +38,16 @@
|
||||
"human-format": "^0.10.0",
|
||||
"l33teral": "^3.0.3",
|
||||
"lodash": "^4.17.4",
|
||||
"micromatch": "^3.1.3",
|
||||
"micromatch": "^4.0.2",
|
||||
"mkdirp": "^0.5.1",
|
||||
"nice-pipe": "0.0.0",
|
||||
"pretty-ms": "^4.0.0",
|
||||
"pretty-ms": "^5.0.0",
|
||||
"progress-stream": "^2.0.0",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"pump": "^3.0.0",
|
||||
"pw": "^0.0.4",
|
||||
"strip-indent": "^2.0.0",
|
||||
"xdg-basedir": "^3.0.0",
|
||||
"strip-indent": "^3.0.0",
|
||||
"xdg-basedir": "^4.0.0",
|
||||
"xo-lib": "^0.9.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -56,7 +56,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -7,7 +7,6 @@ const promisify = require('bluebird').promisify
|
||||
const readFile = promisify(require('fs').readFile)
|
||||
const writeFile = promisify(require('fs').writeFile)
|
||||
|
||||
const assign = require('lodash/assign')
|
||||
const l33t = require('l33teral')
|
||||
const mkdirp = promisify(require('mkdirp'))
|
||||
const xdgBasedir = require('xdg-basedir')
|
||||
@@ -41,7 +40,7 @@ const save = (exports.save = function(config) {
|
||||
|
||||
exports.set = function(data) {
|
||||
return load().then(function(config) {
|
||||
return save(assign(config, data))
|
||||
return save(Object.assign(config, data))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ const getKeys = require('lodash/keys')
|
||||
const hrp = require('http-request-plus').default
|
||||
const humanFormat = require('human-format')
|
||||
const identity = require('lodash/identity')
|
||||
const isArray = require('lodash/isArray')
|
||||
const isObject = require('lodash/isObject')
|
||||
const micromatch = require('micromatch')
|
||||
const nicePipe = require('nice-pipe')
|
||||
@@ -298,7 +297,11 @@ async function listCommands(args) {
|
||||
str.push(
|
||||
name,
|
||||
'=<',
|
||||
type == null ? 'unknown type' : isArray(type) ? type.join('|') : type,
|
||||
type == null
|
||||
? 'unknown type'
|
||||
: Array.isArray(type)
|
||||
? type.join('|')
|
||||
: type,
|
||||
'>'
|
||||
)
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BaseError } from 'make-error'
|
||||
import { isArray, iteratee } from 'lodash'
|
||||
import { iteratee } from 'lodash'
|
||||
|
||||
class XoError extends BaseError {
|
||||
constructor({ code, message, data }) {
|
||||
@@ -77,7 +77,7 @@ export const serverUnreachable = create(9, objectId => ({
|
||||
}))
|
||||
|
||||
export const invalidParameters = create(10, (message, errors) => {
|
||||
if (isArray(message)) {
|
||||
if (Array.isArray(message)) {
|
||||
errors = message
|
||||
message = undefined
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"deep-freeze": "^0.0.1",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -39,14 +39,14 @@
|
||||
"inquirer": "^7.0.0",
|
||||
"ldapjs": "^1.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.13.0"
|
||||
"promise-toolbox": "^0.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-preset-env": "^1.6.1",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -354,7 +354,7 @@ class BackupReportsXoPlugin {
|
||||
log.jobName
|
||||
} ${STATUS_ICON[log.status]}`,
|
||||
markdown: toMarkdown(markdown),
|
||||
nagiosStatus: log.status === 'success' ? 0 : 2,
|
||||
success: log.status === 'success',
|
||||
nagiosMarkdown:
|
||||
log.status === 'success'
|
||||
? `[Xen Orchestra] [Success] Metadata backup report for ${log.jobName}`
|
||||
@@ -390,7 +390,7 @@ class BackupReportsXoPlugin {
|
||||
log.status
|
||||
} − Backup report for ${jobName} ${STATUS_ICON[log.status]}`,
|
||||
markdown: toMarkdown(markdown),
|
||||
nagiosStatus: 2,
|
||||
success: false,
|
||||
nagiosMarkdown: `[Xen Orchestra] [${log.status}] Backup report for ${jobName} - Error : ${log.result.message}`,
|
||||
})
|
||||
}
|
||||
@@ -646,7 +646,7 @@ class BackupReportsXoPlugin {
|
||||
subject: `[Xen Orchestra] ${log.status} − Backup report for ${jobName} ${
|
||||
STATUS_ICON[log.status]
|
||||
}`,
|
||||
nagiosStatus: log.status === 'success' ? 0 : 2,
|
||||
success: log.status === 'success',
|
||||
nagiosMarkdown:
|
||||
log.status === 'success'
|
||||
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
|
||||
@@ -656,7 +656,7 @@ class BackupReportsXoPlugin {
|
||||
})
|
||||
}
|
||||
|
||||
_sendReport({ markdown, subject, nagiosStatus, nagiosMarkdown }) {
|
||||
_sendReport({ markdown, subject, success, nagiosMarkdown }) {
|
||||
const xo = this._xo
|
||||
return Promise.all([
|
||||
xo.sendEmail !== undefined &&
|
||||
@@ -676,9 +676,14 @@ class BackupReportsXoPlugin {
|
||||
}),
|
||||
xo.sendPassiveCheck !== undefined &&
|
||||
xo.sendPassiveCheck({
|
||||
status: nagiosStatus,
|
||||
status: success ? 0 : 2,
|
||||
message: nagiosMarkdown,
|
||||
}),
|
||||
xo.sendIcinga2Status !== undefined &&
|
||||
xo.sendIcinga2Status({
|
||||
status: success ? 'OK' : 'CRITICAL',
|
||||
message: markdown,
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -708,7 +713,7 @@ class BackupReportsXoPlugin {
|
||||
return this._sendReport({
|
||||
subject: `[Xen Orchestra] ${globalStatus} ${icon}`,
|
||||
markdown,
|
||||
nagiosStatus: 2,
|
||||
success: false,
|
||||
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Error : ${error.message}`,
|
||||
})
|
||||
}
|
||||
@@ -904,7 +909,7 @@ class BackupReportsXoPlugin {
|
||||
? ICON_FAILURE
|
||||
: ICON_SKIPPED
|
||||
}`,
|
||||
nagiosStatus: globalSuccess ? 0 : 2,
|
||||
success: globalSuccess,
|
||||
nagiosMarkdown: globalSuccess
|
||||
? `[Xen Orchestra] [Success] Backup report for ${tag}`
|
||||
: `[Xen Orchestra] [${
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
@@ -1,54 +0,0 @@
|
||||
{
|
||||
"name": "xo-server-cloud",
|
||||
"version": "0.3.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [
|
||||
"cloud",
|
||||
"orchestra",
|
||||
"plugin",
|
||||
"xen",
|
||||
"xen-orchestra",
|
||||
"xo-server"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-server-cloud",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "packages/xo-server-cloud",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Pierre Donias",
|
||||
"email": "pierre.donias@gmail.com"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"http-request-plus": "^0.8.0",
|
||||
"jsonrpc-websocket-client": "^0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
import Client, { createBackoff } from 'jsonrpc-websocket-client'
|
||||
import hrp from 'http-request-plus'
|
||||
|
||||
const WS_URL = 'ws://localhost:9001'
|
||||
const HTTP_URL = 'http://localhost:9002'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class XoServerCloud {
|
||||
constructor({ xo }) {
|
||||
this._xo = xo
|
||||
|
||||
// Defined in configure().
|
||||
this._conf = null
|
||||
this._key = null
|
||||
}
|
||||
|
||||
configure(configuration) {
|
||||
this._conf = configuration
|
||||
}
|
||||
|
||||
async load() {
|
||||
const getResourceCatalog = this._getCatalog.bind(this)
|
||||
getResourceCatalog.description =
|
||||
"Get the list of user's available resources"
|
||||
getResourceCatalog.permission = 'admin'
|
||||
getResourceCatalog.params = {
|
||||
filters: { type: 'object', optional: true },
|
||||
}
|
||||
|
||||
const registerResource = ({ namespace }) =>
|
||||
this._registerResource(namespace)
|
||||
registerResource.description = 'Register a resource via cloud plugin'
|
||||
registerResource.params = {
|
||||
namespace: {
|
||||
type: 'string',
|
||||
},
|
||||
}
|
||||
registerResource.permission = 'admin'
|
||||
|
||||
const downloadAndInstallResource = this._downloadAndInstallResource.bind(
|
||||
this
|
||||
)
|
||||
|
||||
downloadAndInstallResource.description =
|
||||
'Download and install a resource via cloud plugin'
|
||||
|
||||
downloadAndInstallResource.params = {
|
||||
id: { type: 'string' },
|
||||
namespace: { type: 'string' },
|
||||
version: { type: 'string' },
|
||||
sr: { type: 'string' },
|
||||
}
|
||||
|
||||
downloadAndInstallResource.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
downloadAndInstallResource.permission = 'admin'
|
||||
|
||||
this._unsetApiMethods = this._xo.addApiMethods({
|
||||
cloud: {
|
||||
downloadAndInstallResource,
|
||||
getResourceCatalog,
|
||||
registerResource,
|
||||
},
|
||||
})
|
||||
this._unsetRequestResource = this._xo.defineProperty(
|
||||
'requestResource',
|
||||
this._requestResource,
|
||||
this
|
||||
)
|
||||
|
||||
const updater = (this._updater = new Client(WS_URL))
|
||||
const connect = () =>
|
||||
updater.open(createBackoff()).catch(error => {
|
||||
console.error('xo-server-cloud: fail to connect to updater', error)
|
||||
|
||||
return connect()
|
||||
})
|
||||
updater.on('closed', connect).on('scheduledAttempt', ({ delay }) => {
|
||||
console.warn('xo-server-cloud: next attempt in %s ms', delay)
|
||||
})
|
||||
connect()
|
||||
}
|
||||
|
||||
unload() {
|
||||
this._unsetApiMethods()
|
||||
this._unsetRequestResource()
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _getCatalog({ filters } = {}) {
|
||||
const catalog = await this._updater.call('getResourceCatalog', { filters })
|
||||
|
||||
if (!catalog) {
|
||||
throw new Error('cannot get catalog')
|
||||
}
|
||||
|
||||
return catalog
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _getNamespaces() {
|
||||
const catalog = await this._getCatalog()
|
||||
|
||||
if (!catalog._namespaces) {
|
||||
throw new Error('cannot get namespaces')
|
||||
}
|
||||
|
||||
return catalog._namespaces
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _downloadAndInstallResource({ id, namespace, sr, version }) {
|
||||
const stream = await this._requestResource({
|
||||
hub: true,
|
||||
id,
|
||||
namespace,
|
||||
version,
|
||||
})
|
||||
const vm = await this._xo.getXapi(sr.$poolId).importVm(stream, {
|
||||
srId: sr.id,
|
||||
type: 'xva',
|
||||
})
|
||||
await vm.update_other_config({
|
||||
'xo:resource:namespace': namespace,
|
||||
'xo:resource:xva:version': version,
|
||||
'xo:resource:xva:id': id,
|
||||
})
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _registerResource(namespace) {
|
||||
const _namespace = (await this._getNamespaces())[namespace]
|
||||
|
||||
if (_namespace === undefined) {
|
||||
throw new Error(`${namespace} is not available`)
|
||||
}
|
||||
|
||||
if (_namespace.registered || _namespace.pending) {
|
||||
throw new Error(`already registered for ${namespace}`)
|
||||
}
|
||||
|
||||
return this._updater.call('registerResource', { namespace })
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _getNamespaceCatalog({ hub, namespace }) {
|
||||
const namespaceCatalog = (await this._getCatalog({ filters: { hub } }))[
|
||||
namespace
|
||||
]
|
||||
|
||||
if (!namespaceCatalog) {
|
||||
throw new Error(`cannot get catalog: ${namespace} not registered`)
|
||||
}
|
||||
|
||||
return namespaceCatalog
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------
|
||||
|
||||
async _requestResource({ hub = false, id, namespace, version }) {
|
||||
const _namespace = (await this._getNamespaces())[namespace]
|
||||
|
||||
if (!hub && (!_namespace || !_namespace.registered)) {
|
||||
throw new Error(`cannot get resource: ${namespace} not registered`)
|
||||
}
|
||||
|
||||
const { _token: token } = await this._getNamespaceCatalog({
|
||||
hub,
|
||||
namespace,
|
||||
})
|
||||
|
||||
// 2018-03-20 Extra check: getResourceDownloadToken seems to be called without a token in some cases
|
||||
if (token === undefined) {
|
||||
throw new Error(`${namespace} namespace token is undefined`)
|
||||
}
|
||||
|
||||
const downloadToken = await this._updater.call('getResourceDownloadToken', {
|
||||
token,
|
||||
id,
|
||||
version,
|
||||
})
|
||||
|
||||
if (!downloadToken) {
|
||||
throw new Error('cannot get download token')
|
||||
}
|
||||
|
||||
const response = await hrp(HTTP_URL, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${downloadToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
// currently needed for XenApi#putResource()
|
||||
response.length = response.headers['content-length']
|
||||
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
export default opts => new XoServerCloud(opts)
|
||||
@@ -31,7 +31,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/cron": "^1.0.4",
|
||||
"@xen-orchestra/cron": "^1.0.5",
|
||||
"lodash": "^4.16.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/cron": "^1.0.4",
|
||||
"@xen-orchestra/cron": "^1.0.5",
|
||||
"d3-time-format": "^2.1.1",
|
||||
"json5": "^2.0.1",
|
||||
"lodash": "^4.17.4"
|
||||
@@ -32,7 +32,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import JSON5 from 'json5'
|
||||
import { createSchedule } from '@xen-orchestra/cron'
|
||||
import { assign, forOwn, map, mean } from 'lodash'
|
||||
import { forOwn, map, mean } from 'lodash'
|
||||
import { utcParse } from 'd3-time-format'
|
||||
|
||||
const COMPARATOR_FN = {
|
||||
@@ -483,7 +483,7 @@ ${monitorBodies.join('\n')}`
|
||||
result.rrd = await this.getRrd(result.object, observationPeriod)
|
||||
if (result.rrd !== null) {
|
||||
const data = parseData(result.rrd, result.object.uuid)
|
||||
assign(result, {
|
||||
Object.assign(result, {
|
||||
data,
|
||||
value: data.getDisplayableValue(),
|
||||
shouldAlarm: data.shouldAlarm(),
|
||||
@@ -496,7 +496,7 @@ ${monitorBodies.join('\n')}`
|
||||
definition.alarmTriggerLevel
|
||||
)
|
||||
const data = getter(result.object)
|
||||
assign(result, {
|
||||
Object.assign(result, {
|
||||
value: data.getDisplayableValue(),
|
||||
shouldAlarm: data.shouldAlarm(),
|
||||
})
|
||||
|
||||
@@ -25,12 +25,12 @@
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.4.4",
|
||||
"@babel/plugin-proposal-optional-chaining": "^7.2.0",
|
||||
"@babel/preset-env": "^7.4.4",
|
||||
"cross-env": "^5.2.0"
|
||||
"cross-env": "^6.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/log": "^0.2.0",
|
||||
"lodash": "^4.17.11",
|
||||
"node-openssl-cert": "^0.0.97",
|
||||
"node-openssl-cert": "^0.0.98",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"uuid": "^3.3.2"
|
||||
},
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,202 @@
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import { filter, find, forOwn, map, sample } from 'lodash'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const log = createLogger('xo:xo-server:sdn-controller:private-network')
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const CHARS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789?!'
|
||||
const createPassword = () =>
|
||||
Array.from({ length: 16 }, _ => sample(CHARS)).join('')
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export class PrivateNetwork {
|
||||
constructor(controller, uuid) {
|
||||
this.controller = controller
|
||||
this.uuid = uuid
|
||||
this.networks = {}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async addHost(host) {
|
||||
if (host.$ref === this.center?.$ref) {
|
||||
// Nothing to do
|
||||
return
|
||||
}
|
||||
|
||||
const hostClient = this.controller.ovsdbClients[host.$ref]
|
||||
if (hostClient === undefined) {
|
||||
log.error('No OVSDB client found', {
|
||||
host: host.name_label,
|
||||
pool: host.$pool.name_label,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const centerClient = this.controller.ovsdbClients[this.center.$ref]
|
||||
if (centerClient === undefined) {
|
||||
log.error('No OVSDB client found for star-center', {
|
||||
privateNetwork: this.uuid,
|
||||
host: this.center.name_label,
|
||||
pool: this.center.$pool.name_label,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const network = this.networks[host.$pool.uuid]
|
||||
const centerNetwork = this.networks[this.center.$pool.uuid]
|
||||
const otherConfig = network.other_config
|
||||
const encapsulation =
|
||||
otherConfig['xo:sdn-controller:encapsulation'] ?? 'gre'
|
||||
const vni = otherConfig['xo:sdn-controller:vni'] ?? '0'
|
||||
const password =
|
||||
otherConfig['xo:sdn-controller:encrypted'] === 'true'
|
||||
? createPassword()
|
||||
: undefined
|
||||
|
||||
let bridgeName
|
||||
try {
|
||||
;[bridgeName] = await Promise.all([
|
||||
hostClient.addInterfaceAndPort(
|
||||
network,
|
||||
centerClient.host.address,
|
||||
encapsulation,
|
||||
vni,
|
||||
password,
|
||||
this.uuid
|
||||
),
|
||||
centerClient.addInterfaceAndPort(
|
||||
centerNetwork,
|
||||
hostClient.host.address,
|
||||
encapsulation,
|
||||
vni,
|
||||
password,
|
||||
this.uuid
|
||||
),
|
||||
])
|
||||
} catch (error) {
|
||||
log.error('Error while connecting host to private network', {
|
||||
error,
|
||||
privateNetwork: this.uuid,
|
||||
network: network.name_label,
|
||||
host: host.name_label,
|
||||
pool: host.$pool.name_label,
|
||||
})
|
||||
return
|
||||
}
|
||||
log.info('Host added', {
|
||||
privateNetwork: this.uuid,
|
||||
network: network.name_label,
|
||||
host: host.name_label,
|
||||
pool: host.$pool.name_label,
|
||||
})
|
||||
|
||||
return bridgeName
|
||||
}
|
||||
|
||||
addNetwork(network) {
|
||||
this.networks[network.$pool.uuid] = network
|
||||
log.info('Adding network', {
|
||||
privateNetwork: this.uuid,
|
||||
network: network.name_label,
|
||||
pool: network.$pool.name_label,
|
||||
})
|
||||
if (this.center === undefined) {
|
||||
return this.electNewCenter()
|
||||
}
|
||||
|
||||
const hosts = filter(network.$pool.$xapi.objects.all, { $type: 'host' })
|
||||
return Promise.all(
|
||||
map(hosts, async host => {
|
||||
const hostClient = this.controller.ovsdbClients[host.$ref]
|
||||
const network = this.networks[host.$pool.uuid]
|
||||
await hostClient.resetForNetwork(network, this.uuid)
|
||||
await this.addHost(host)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async electNewCenter() {
|
||||
delete this.center
|
||||
|
||||
// TODO: make it random
|
||||
const hosts = this._getHosts()
|
||||
for (const host of hosts) {
|
||||
const pif = find(host.$PIFs, {
|
||||
network: this.networks[host.$pool.uuid].$ref,
|
||||
})
|
||||
if (pif?.currently_attached && host.$metrics.live) {
|
||||
this.center = host
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (this.center === undefined) {
|
||||
log.error('No available host to elect new star-center', {
|
||||
privateNetwork: this.uuid,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await this._reset()
|
||||
|
||||
// Recreate star topology
|
||||
await Promise.all(map(hosts, host => this.addHost(host)))
|
||||
|
||||
log.info('New star-center elected', {
|
||||
center: this.center.name_label,
|
||||
privateNetwork: this.uuid,
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
getPools() {
|
||||
const pools = []
|
||||
forOwn(this.networks, network => {
|
||||
pools.push(network.$pool)
|
||||
})
|
||||
return pools
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
_reset() {
|
||||
return Promise.all(
|
||||
map(this._getHosts(), async host => {
|
||||
// Clean old ports and interfaces
|
||||
const hostClient = this.controller.ovsdbClients[host.$ref]
|
||||
if (hostClient === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const network = this.networks[host.$pool.uuid]
|
||||
try {
|
||||
await hostClient.resetForNetwork(network, this.uuid)
|
||||
} catch (error) {
|
||||
log.error('Error while resetting private network', {
|
||||
error,
|
||||
privateNetwork: this.uuid,
|
||||
network: network.name_label,
|
||||
host: host.name_label,
|
||||
pool: network.$pool.name_label,
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
_getHosts() {
|
||||
const hosts = []
|
||||
forOwn(this.networks, network => {
|
||||
hosts.push(...filter(network.$pool.$xapi.objects.all, { $type: 'host' }))
|
||||
})
|
||||
return hosts
|
||||
}
|
||||
}
|
||||
@@ -28,8 +28,7 @@ export class OvsdbClient {
|
||||
|
||||
Attributes on created OVS ports (corresponds to a XAPI `PIF` or `VIF`):
|
||||
- `other_config`:
|
||||
- `xo:sdn-controller:cross-pool` : UUID of the remote network connected by the tunnel
|
||||
- `xo:sdn-controller:private-pool-wide`: `true` if created (and managed) by a SDN Controller
|
||||
- `xo:sdn-controller:private-network-uuid`: UUID of the private network
|
||||
|
||||
Attributes on created OVS interfaces:
|
||||
- `options`:
|
||||
@@ -67,55 +66,49 @@ export class OvsdbClient {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async addInterfaceAndPort(
|
||||
networkUuid,
|
||||
networkName,
|
||||
network,
|
||||
remoteAddress,
|
||||
encapsulation,
|
||||
key,
|
||||
password,
|
||||
remoteNetwork
|
||||
privateNetworkUuid
|
||||
) {
|
||||
if (
|
||||
this._adding.find(
|
||||
elem => elem.id === networkUuid && elem.addr === remoteAddress
|
||||
elem => elem.id === network.uuid && elem.addr === remoteAddress
|
||||
) !== undefined
|
||||
) {
|
||||
return
|
||||
}
|
||||
const adding = { id: networkUuid, addr: remoteAddress }
|
||||
const adding = { id: network.uuid, addr: remoteAddress }
|
||||
this._adding.push(adding)
|
||||
|
||||
const socket = await this._connect()
|
||||
const [bridgeUuid, bridgeName] = await this._getBridgeUuidForNetwork(
|
||||
networkUuid,
|
||||
networkName,
|
||||
socket
|
||||
)
|
||||
if (bridgeUuid === undefined) {
|
||||
const bridge = await this._getBridgeForNetwork(network, socket)
|
||||
if (bridge.uuid === undefined) {
|
||||
socket.destroy()
|
||||
this._adding = this._adding.filter(
|
||||
elem => elem.id !== networkUuid || elem.addr !== remoteAddress
|
||||
elem => elem.id !== network.uuid || elem.addr !== remoteAddress
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const alreadyExist = await this._interfaceAndPortAlreadyExist(
|
||||
bridgeUuid,
|
||||
bridgeName,
|
||||
bridge,
|
||||
remoteAddress,
|
||||
socket
|
||||
)
|
||||
if (alreadyExist) {
|
||||
socket.destroy()
|
||||
this._adding = this._adding.filter(
|
||||
elem => elem.id !== networkUuid || elem.addr !== remoteAddress
|
||||
elem => elem.id !== network.uuid || elem.addr !== remoteAddress
|
||||
)
|
||||
return bridgeName
|
||||
return bridge.name
|
||||
}
|
||||
|
||||
const index = ++this._numberOfPortAndInterface
|
||||
const interfaceName = bridgeName + '_iface' + index
|
||||
const portName = bridgeName + '_port' + index
|
||||
const interfaceName = bridge.name + '_iface' + index
|
||||
const portName = bridge.name + '_port' + index
|
||||
|
||||
// Add interface and port to the bridge
|
||||
const options = { remote_ip: remoteAddress, key: key }
|
||||
@@ -139,11 +132,9 @@ export class OvsdbClient {
|
||||
row: {
|
||||
name: portName,
|
||||
interfaces: ['set', [['named-uuid', 'new_iface']]],
|
||||
other_config: toMap(
|
||||
remoteNetwork !== undefined
|
||||
? { 'xo:sdn-controller:cross-pool': remoteNetwork }
|
||||
: { 'xo:sdn-controller:private-pool-wide': 'true' }
|
||||
),
|
||||
other_config: toMap({
|
||||
'xo:sdn-controller:private-network-uuid': privateNetworkUuid,
|
||||
}),
|
||||
},
|
||||
'uuid-name': 'new_port',
|
||||
}
|
||||
@@ -151,7 +142,7 @@ export class OvsdbClient {
|
||||
const mutateBridgeOperation = {
|
||||
op: 'mutate',
|
||||
table: 'Bridge',
|
||||
where: [['_uuid', '==', ['uuid', bridgeUuid]]],
|
||||
where: [['_uuid', '==', ['uuid', bridge.uuid]]],
|
||||
mutations: [['ports', 'insert', ['set', [['named-uuid', 'new_port']]]]],
|
||||
}
|
||||
const params = [
|
||||
@@ -163,7 +154,7 @@ export class OvsdbClient {
|
||||
const jsonObjects = await this._sendOvsdbTransaction(params, socket)
|
||||
|
||||
this._adding = this._adding.filter(
|
||||
elem => elem.id !== networkUuid || elem.addr !== remoteAddress
|
||||
elem => elem.id !== network.uuid || elem.addr !== remoteAddress
|
||||
)
|
||||
if (jsonObjects === undefined) {
|
||||
socket.destroy()
|
||||
@@ -189,8 +180,8 @@ export class OvsdbClient {
|
||||
details,
|
||||
port: portName,
|
||||
interface: interfaceName,
|
||||
bridge: bridgeName,
|
||||
network: networkName,
|
||||
bridge: bridge.name,
|
||||
network: network.name_label,
|
||||
host: this.host.name_label,
|
||||
})
|
||||
socket.destroy()
|
||||
@@ -200,33 +191,24 @@ export class OvsdbClient {
|
||||
log.debug('Port and interface added to bridge', {
|
||||
port: portName,
|
||||
interface: interfaceName,
|
||||
bridge: bridgeName,
|
||||
network: networkName,
|
||||
bridge: bridge.name,
|
||||
network: network.name_label,
|
||||
host: this.host.name_label,
|
||||
})
|
||||
socket.destroy()
|
||||
return bridgeName
|
||||
return bridge.name
|
||||
}
|
||||
|
||||
async resetForNetwork(
|
||||
networkUuid,
|
||||
networkName,
|
||||
crossPoolOnly,
|
||||
remoteNetwork
|
||||
) {
|
||||
async resetForNetwork(network, privateNetworkUuid) {
|
||||
const socket = await this._connect()
|
||||
const [bridgeUuid, bridgeName] = await this._getBridgeUuidForNetwork(
|
||||
networkUuid,
|
||||
networkName,
|
||||
socket
|
||||
)
|
||||
if (bridgeUuid === undefined) {
|
||||
const bridge = await this._getBridgeForNetwork(network, socket)
|
||||
if (bridge.uuid === undefined) {
|
||||
socket.destroy()
|
||||
return
|
||||
}
|
||||
|
||||
// Delete old ports created by a SDN controller
|
||||
const ports = await this._getBridgePorts(bridgeUuid, bridgeName, socket)
|
||||
const ports = await this._getBridgePorts(bridge, socket)
|
||||
if (ports === undefined) {
|
||||
socket.destroy()
|
||||
return
|
||||
@@ -250,15 +232,14 @@ export class OvsdbClient {
|
||||
// 2019-09-03
|
||||
// Compatibility code, to be removed in 1 year.
|
||||
const oldShouldDelete =
|
||||
(config[0] === 'private_pool_wide' && !crossPoolOnly) ||
|
||||
(config[0] === 'cross_pool' &&
|
||||
(remoteNetwork === undefined || remoteNetwork === config[1]))
|
||||
config[0] === 'private_pool_wide' ||
|
||||
config[0] === 'cross_pool' ||
|
||||
config[0] === 'xo:sdn-controller:private-pool-wide' ||
|
||||
config[0] === 'xo:sdn-controller:cross-pool'
|
||||
|
||||
const shouldDelete =
|
||||
(config[0] === 'xo:sdn-controller:private-pool-wide' &&
|
||||
!crossPoolOnly) ||
|
||||
(config[0] === 'xo:sdn-controller:cross-pool' &&
|
||||
(remoteNetwork === undefined || remoteNetwork === config[1]))
|
||||
config[0] === 'xo:sdn-controller:private-network-uuid' &&
|
||||
config[1] === privateNetworkUuid
|
||||
|
||||
if (shouldDelete || oldShouldDelete) {
|
||||
portsToDelete.push(['uuid', portUuid])
|
||||
@@ -275,7 +256,7 @@ export class OvsdbClient {
|
||||
const mutateBridgeOperation = {
|
||||
op: 'mutate',
|
||||
table: 'Bridge',
|
||||
where: [['_uuid', '==', ['uuid', bridgeUuid]]],
|
||||
where: [['_uuid', '==', ['uuid', bridge.uuid]]],
|
||||
mutations: [['ports', 'delete', ['set', portsToDelete]]],
|
||||
}
|
||||
|
||||
@@ -288,7 +269,7 @@ export class OvsdbClient {
|
||||
if (jsonObjects[0].error != null) {
|
||||
log.error('Error while deleting ports from bridge', {
|
||||
error: jsonObjects[0].error,
|
||||
bridge: bridgeName,
|
||||
bridge: bridge.name,
|
||||
host: this.host.name_label,
|
||||
})
|
||||
socket.destroy()
|
||||
@@ -297,7 +278,7 @@ export class OvsdbClient {
|
||||
|
||||
log.debug('Ports deleted from bridge', {
|
||||
nPorts: jsonObjects[0].result[0].count,
|
||||
bridge: bridgeName,
|
||||
bridge: bridge.name,
|
||||
host: this.host.name_label,
|
||||
})
|
||||
socket.destroy()
|
||||
@@ -335,9 +316,9 @@ export class OvsdbClient {
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async _getBridgeUuidForNetwork(networkUuid, networkName, socket) {
|
||||
async _getBridgeForNetwork(network, socket) {
|
||||
const where = [
|
||||
['external_ids', 'includes', toMap({ 'xs-network-uuids': networkUuid })],
|
||||
['external_ids', 'includes', toMap({ 'xs-network-uuids': network.uuid })],
|
||||
]
|
||||
const selectResult = await this._select(
|
||||
'Bridge',
|
||||
@@ -347,25 +328,17 @@ export class OvsdbClient {
|
||||
)
|
||||
if (selectResult === undefined) {
|
||||
log.error('No bridge found for network', {
|
||||
network: networkName,
|
||||
network: network.name_label,
|
||||
host: this.host.name_label,
|
||||
})
|
||||
return []
|
||||
return {}
|
||||
}
|
||||
|
||||
const bridgeUuid = selectResult._uuid[1]
|
||||
const bridgeName = selectResult.name
|
||||
|
||||
return [bridgeUuid, bridgeName]
|
||||
return { uuid: selectResult._uuid[1], name: selectResult.name }
|
||||
}
|
||||
|
||||
async _interfaceAndPortAlreadyExist(
|
||||
bridgeUuid,
|
||||
bridgeName,
|
||||
remoteAddress,
|
||||
socket
|
||||
) {
|
||||
const ports = await this._getBridgePorts(bridgeUuid, bridgeName, socket)
|
||||
async _interfaceAndPortAlreadyExist(bridge, remoteAddress, socket) {
|
||||
const ports = await this._getBridgePorts(bridge, socket)
|
||||
if (ports === undefined) {
|
||||
return false
|
||||
}
|
||||
@@ -393,8 +366,8 @@ export class OvsdbClient {
|
||||
return false
|
||||
}
|
||||
|
||||
async _getBridgePorts(bridgeUuid, bridgeName, socket) {
|
||||
const where = [['_uuid', '==', ['uuid', bridgeUuid]]]
|
||||
async _getBridgePorts(bridge, socket) {
|
||||
const where = [['_uuid', '==', ['uuid', bridge.uuid]]]
|
||||
const selectResult = await this._select('Bridge', ['ports'], where, socket)
|
||||
if (selectResult === undefined) {
|
||||
return
|
||||
@@ -36,7 +36,7 @@
|
||||
"golike-defer": "^0.4.1",
|
||||
"jest": "^24.8.0",
|
||||
"lodash": "^4.17.11",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"xo-collection": "^0.4.1",
|
||||
"xo-common": "^0.2.0",
|
||||
"xo-lib": "^0.9.0"
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
[vms]
|
||||
default = ''
|
||||
withOsAndXenTools = ''
|
||||
# vmToBackup = ''
|
||||
|
||||
[templates]
|
||||
|
||||
@@ -154,6 +154,19 @@ class XoConnection extends Xo {
|
||||
})
|
||||
}
|
||||
|
||||
async startTempVm(id, params, withXenTools = false) {
|
||||
await this.call('vm.start', { id, ...params })
|
||||
this._tempResourceDisposers.push('vm.stop', { id, force: true })
|
||||
return this.waitObjectState(id, vm => {
|
||||
if (
|
||||
vm.power_state !== 'Running' ||
|
||||
(withXenTools && vm.xenTools === false)
|
||||
) {
|
||||
throw new Error('retry')
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async createTempRemote(params) {
|
||||
const remote = await this.call('remote.create', params)
|
||||
this._tempResourceDisposers.push('remote.delete', { id: remote.id })
|
||||
|
||||
@@ -55,6 +55,68 @@ Object {
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg create and execute backup with enabled offline backup 1`] = `
|
||||
Object {
|
||||
"data": Object {
|
||||
"id": Any<String>,
|
||||
"type": "VM",
|
||||
},
|
||||
"end": Any<Number>,
|
||||
"id": Any<String>,
|
||||
"message": Any<String>,
|
||||
"start": Any<Number>,
|
||||
"status": "success",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg create and execute backup with enabled offline backup 2`] = `
|
||||
Object {
|
||||
"data": Any<Object>,
|
||||
"end": Any<Number>,
|
||||
"id": Any<String>,
|
||||
"message": Any<String>,
|
||||
"start": Any<Number>,
|
||||
"status": "success",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg create and execute backup with enabled offline backup 3`] = `
|
||||
Object {
|
||||
"end": Any<Number>,
|
||||
"id": Any<String>,
|
||||
"message": Any<String>,
|
||||
"result": Object {
|
||||
"size": Any<Number>,
|
||||
},
|
||||
"start": Any<Number>,
|
||||
"status": "success",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg create and execute backup with enabled offline backup 4`] = `
|
||||
Object {
|
||||
"data": Any<Object>,
|
||||
"end": Any<Number>,
|
||||
"id": Any<String>,
|
||||
"message": Any<String>,
|
||||
"start": Any<Number>,
|
||||
"status": "success",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg create and execute backup with enabled offline backup 5`] = `
|
||||
Object {
|
||||
"end": Any<Number>,
|
||||
"id": Any<String>,
|
||||
"message": Any<String>,
|
||||
"result": Object {
|
||||
"size": Any<Number>,
|
||||
},
|
||||
"start": Any<Number>,
|
||||
"status": "success",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`backupNg execute three times a delta backup with 2 remotes, 2 as retention and 2 as fullInterval 1`] = `
|
||||
Object {
|
||||
"data": Object {
|
||||
|
||||
@@ -584,4 +584,110 @@ describe('backupNg', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('create and execute backup with enabled offline backup', async () => {
|
||||
const vm = xo.objects.all[config.vms.withOsAndXenTools]
|
||||
if (vm.power_state !== 'Running') {
|
||||
await xo.startTempVm(vm.id, { force: true }, true)
|
||||
}
|
||||
|
||||
const scheduleTempId = randomId()
|
||||
const srId = config.srs.default
|
||||
const { id: remoteId } = await xo.createTempRemote(config.remotes.default)
|
||||
const backupInput = {
|
||||
mode: 'full',
|
||||
remotes: {
|
||||
id: remoteId,
|
||||
},
|
||||
schedules: {
|
||||
[scheduleTempId]: getDefaultSchedule(),
|
||||
},
|
||||
settings: {
|
||||
'': {
|
||||
offlineBackup: true,
|
||||
},
|
||||
[scheduleTempId]: {
|
||||
copyRetention: 1,
|
||||
exportRetention: 1,
|
||||
},
|
||||
},
|
||||
srs: {
|
||||
id: srId,
|
||||
},
|
||||
vms: {
|
||||
id: vm.id,
|
||||
},
|
||||
}
|
||||
const backup = await xo.createTempBackupNgJob(backupInput)
|
||||
expect(backup.settings[''].offlineBackup).toBe(true)
|
||||
|
||||
const schedule = await xo.getSchedule({ jobId: backup.id })
|
||||
|
||||
await Promise.all([
|
||||
xo.runBackupJob(backup.id, schedule.id, { remotes: [remoteId] }),
|
||||
xo.waitObjectState(vm.id, vm => {
|
||||
if (vm.power_state !== 'Halted') {
|
||||
throw new Error('retry')
|
||||
}
|
||||
}),
|
||||
])
|
||||
|
||||
await xo.waitObjectState(vm.id, vm => {
|
||||
if (vm.power_state !== 'Running') {
|
||||
throw new Error('retry')
|
||||
}
|
||||
})
|
||||
|
||||
const backupLogs = await xo.getBackupLogs({
|
||||
jobId: backup.id,
|
||||
scheduleId: schedule.id,
|
||||
})
|
||||
expect(backupLogs.length).toBe(1)
|
||||
|
||||
const { tasks, ...log } = backupLogs[0]
|
||||
validateRootTask(log, {
|
||||
data: {
|
||||
mode: backupInput.mode,
|
||||
reportWhen: backupInput.settings[''].reportWhen,
|
||||
},
|
||||
jobId: backup.id,
|
||||
jobName: backupInput.name,
|
||||
scheduleId: schedule.id,
|
||||
status: 'success',
|
||||
})
|
||||
|
||||
expect(Array.isArray(tasks)).toBe(true)
|
||||
tasks.forEach(({ tasks, ...vmTask }) => {
|
||||
validateVmTask(vmTask, vm.id, { status: 'success' })
|
||||
|
||||
expect(Array.isArray(tasks)).toBe(true)
|
||||
tasks.forEach(({ tasks, ...subTask }) => {
|
||||
expect(subTask.message).not.toBe('snapshot')
|
||||
|
||||
if (subTask.message === 'export') {
|
||||
validateExportTask(
|
||||
subTask,
|
||||
subTask.data.type === 'remote' ? remoteId : srId,
|
||||
{
|
||||
data: expect.any(Object),
|
||||
status: 'success',
|
||||
}
|
||||
)
|
||||
|
||||
expect(Array.isArray(tasks)).toBe(true)
|
||||
tasks.forEach(operationTask => {
|
||||
if (
|
||||
operationTask.message === 'transfer' ||
|
||||
operationTask.message === 'merge'
|
||||
) {
|
||||
validateOperationTask(operationTask, {
|
||||
result: { size: expect.any(Number) },
|
||||
status: 'success',
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
}, 200e3)
|
||||
})
|
||||
|
||||
@@ -6,7 +6,7 @@ import expect from 'must'
|
||||
// ===================================================================
|
||||
|
||||
import { getConfig, getMainConnection, getSrId, waitObjectState } from './util'
|
||||
import { map, assign } from 'lodash'
|
||||
import { map } from 'lodash'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
|
||||
// ===================================================================
|
||||
@@ -27,7 +27,7 @@ describe('disk', () => {
|
||||
const config = await getConfig()
|
||||
serverId = await xo.call(
|
||||
'server.add',
|
||||
assign({ autoConnect: false }, config.xenServer1)
|
||||
Object.assign({ autoConnect: false }, config.xenServer1)
|
||||
)
|
||||
await xo.call('server.connect', { id: serverId })
|
||||
await eventToPromise(xo.objects, 'finish')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { assign, find, map } from 'lodash'
|
||||
import { find, map } from 'lodash'
|
||||
|
||||
import { config, rejectionOf, xo } from './util'
|
||||
|
||||
@@ -151,7 +151,7 @@ describe('server', () => {
|
||||
|
||||
it('connects to a Xen server', async () => {
|
||||
const serverId = await addServer(
|
||||
assign({ autoConnect: false }, config.xenServer1)
|
||||
Object.assign({ autoConnect: false }, config.xenServer1)
|
||||
)
|
||||
|
||||
await xo.call('server.connect', {
|
||||
@@ -184,7 +184,7 @@ describe('server', () => {
|
||||
let serverId
|
||||
beforeEach(async () => {
|
||||
serverId = await addServer(
|
||||
assign({ autoConnect: false }, config.xenServer1)
|
||||
Object.assign({ autoConnect: false }, config.xenServer1)
|
||||
)
|
||||
await xo.call('server.connect', {
|
||||
id: serverId,
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
getOneHost,
|
||||
waitObjectState,
|
||||
} from './util'
|
||||
import { assign, map } from 'lodash'
|
||||
import { map } from 'lodash'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
|
||||
// ===================================================================
|
||||
@@ -33,7 +33,7 @@ describe('vbd', () => {
|
||||
|
||||
serverId = await xo.call(
|
||||
'server.add',
|
||||
assign({ autoConnect: false }, config.xenServer1)
|
||||
Object.assign({ autoConnect: false }, config.xenServer1)
|
||||
)
|
||||
await xo.call('server.connect', { id: serverId })
|
||||
await eventToPromise(xo.objects, 'finish')
|
||||
|
||||
@@ -34,14 +34,14 @@
|
||||
"dependencies": {
|
||||
"nodemailer": "^6.1.0",
|
||||
"nodemailer-markdown": "^1.0.1",
|
||||
"promise-toolbox": "^0.13.0"
|
||||
"promise-toolbox": "^0.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# xo-server-cloud [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
# xo-server-transport-icinga2 [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> xo-server plugin to send status to icinga2 server
|
||||
|
||||
## Install
|
||||
|
||||
@@ -11,6 +13,13 @@ the web interface, see [the plugin documentation](https://xen-orchestra.com/docs
|
||||
|
||||
## Development
|
||||
|
||||
### `Xo#sendIcinga2Status({ status, message })`
|
||||
|
||||
This xo method is called to send a passive check to icinga2 and change the status of a service.
|
||||
It has two parameters:
|
||||
- status: it's the service status in icinga2 (0: OK | 1: WARNING | 2: CRITICAL | 3: UNKNOWN).
|
||||
- message: it's the status information in icinga2.
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> npm install
|
||||
32
packages/xo-server-transport-icinga2/package.json
Normal file
32
packages/xo-server-transport-icinga2/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "xo-server-transport-icinga2",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xo-server-transport-icinga2",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "packages/xo-server-transport-icinga2",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"main": "./dist",
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "rimraf dist/",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
},
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.9.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.4.4",
|
||||
"@babel/core": "^7.4.4",
|
||||
"@babel/preset-env": "^7.4.4",
|
||||
"cross-env": "^6.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/log": "^0.2.0"
|
||||
},
|
||||
"private": true
|
||||
}
|
||||
136
packages/xo-server-transport-icinga2/src/index.js
Normal file
136
packages/xo-server-transport-icinga2/src/index.js
Normal file
@@ -0,0 +1,136 @@
|
||||
import assert from 'assert'
|
||||
import { URL } from 'url'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export const configurationSchema = {
|
||||
type: 'object',
|
||||
|
||||
properties: {
|
||||
server: {
|
||||
type: 'string',
|
||||
description: `
|
||||
The icinga2 server http/https address.
|
||||
|
||||
*If no port is provided in the URL, 5665 will be used.*
|
||||
|
||||
Examples:
|
||||
- https://icinga2.example.com
|
||||
- http://192.168.0.1:1234
|
||||
`.trim(),
|
||||
},
|
||||
user: {
|
||||
type: 'string',
|
||||
description: 'The icinga2 server username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
description: 'The icinga2 server password',
|
||||
},
|
||||
filter: {
|
||||
type: 'string',
|
||||
description: `
|
||||
The filter to use
|
||||
|
||||
See: https://icinga.com/docs/icinga2/latest/doc/12-icinga2-api/#filters
|
||||
|
||||
Example:
|
||||
- Monitor the backup jobs of the VMs of a specific host:
|
||||
|
||||
\`host.name=="xoa.example.com" && service.name=="xo-backup"\`
|
||||
`.trim(),
|
||||
},
|
||||
acceptUnauthorized: {
|
||||
type: 'boolean',
|
||||
description: 'Accept unauthorized certificates',
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['server'],
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const STATUS_MAP = {
|
||||
OK: 0,
|
||||
WARNING: 1,
|
||||
CRITICAL: 2,
|
||||
UNKNOWN: 3,
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
class XoServerIcinga2 {
|
||||
constructor({ xo }) {
|
||||
this._xo = xo
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
configure(configuration) {
|
||||
const serverUrl = new URL(configuration.server)
|
||||
if (configuration.user !== '') {
|
||||
serverUrl.username = configuration.user
|
||||
}
|
||||
if (configuration.password !== '') {
|
||||
serverUrl.password = configuration.password
|
||||
}
|
||||
if (serverUrl.port === '') {
|
||||
serverUrl.port = '5665' // Default icinga2 access port
|
||||
}
|
||||
serverUrl.pathname = '/v1/actions/process-check-result'
|
||||
this._url = serverUrl.href
|
||||
|
||||
this._filter =
|
||||
configuration.filter !== undefined ? configuration.filter : ''
|
||||
this._acceptUnauthorized = configuration.acceptUnauthorized
|
||||
}
|
||||
|
||||
load() {
|
||||
this._unset = this._xo.defineProperty(
|
||||
'sendIcinga2Status',
|
||||
this._sendIcinga2Status,
|
||||
this
|
||||
)
|
||||
}
|
||||
|
||||
unload() {
|
||||
this._unset()
|
||||
}
|
||||
|
||||
test() {
|
||||
return this._sendIcinga2Status({
|
||||
message:
|
||||
'The server-icinga2 plugin for Xen Orchestra server seems to be working fine, nicely done :)',
|
||||
status: 'OK',
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
_sendIcinga2Status({ message, status }) {
|
||||
const icinga2Status = STATUS_MAP[status]
|
||||
assert(icinga2Status !== undefined, `Invalid icinga2 status: ${status}`)
|
||||
return this._xo
|
||||
.httpRequest(this._url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
rejectUnauthorized: !this._acceptUnauthorized,
|
||||
body: JSON.stringify({
|
||||
type: 'Service',
|
||||
filter: this._filter,
|
||||
plugin_output: message,
|
||||
exit_status: icinga2Status,
|
||||
}),
|
||||
})
|
||||
.readAll()
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default opts => new XoServerIcinga2(opts)
|
||||
@@ -39,7 +39,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-preset-env": "^1.5.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -33,14 +33,14 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"slack-node": "^0.1.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -36,20 +36,20 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"@xen-orchestra/cron": "^1.0.4",
|
||||
"@xen-orchestra/cron": "^1.0.5",
|
||||
"@xen-orchestra/log": "^0.2.0",
|
||||
"handlebars": "^4.0.6",
|
||||
"html-minifier": "^4.0.0",
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.13.0"
|
||||
"promise-toolbox": "^0.14.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -5,7 +5,6 @@ import humanFormat from 'human-format'
|
||||
import { createSchedule } from '@xen-orchestra/cron'
|
||||
import { minify } from 'html-minifier'
|
||||
import {
|
||||
assign,
|
||||
concat,
|
||||
differenceBy,
|
||||
filter,
|
||||
@@ -418,7 +417,7 @@ function computeGlobalVmsStats({ haltedVms, vmsStats, xo }) {
|
||||
}))
|
||||
)
|
||||
|
||||
return assign(
|
||||
return Object.assign(
|
||||
computeMeans(vmsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
@@ -446,7 +445,7 @@ function computeGlobalHostsStats({ haltedHosts, hostsStats, xo }) {
|
||||
}))
|
||||
)
|
||||
|
||||
return assign(
|
||||
return Object.assign(
|
||||
computeMeans(hostsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
|
||||
@@ -30,12 +30,12 @@
|
||||
"bin": "bin"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=8"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iarna/toml": "^2.2.1",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"@xen-orchestra/cron": "^1.0.4",
|
||||
"@xen-orchestra/cron": "^1.0.5",
|
||||
"@xen-orchestra/defined": "^0.0.0",
|
||||
"@xen-orchestra/emit-async": "^0.0.0",
|
||||
"@xen-orchestra/fs": "^0.10.1",
|
||||
@@ -58,16 +58,15 @@
|
||||
"debug": "^4.0.1",
|
||||
"decorator-synchronized": "^0.5.0",
|
||||
"deptree": "^1.0.0",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"execa": "^1.0.0",
|
||||
"execa": "^2.0.5",
|
||||
"express": "^4.16.2",
|
||||
"express-session": "^1.15.6",
|
||||
"fatfs": "^0.10.4",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^8.0.1",
|
||||
"get-stream": "^4.0.0",
|
||||
"get-stream": "^5.1.0",
|
||||
"golike-defer": "^0.4.1",
|
||||
"hashy": "^0.7.1",
|
||||
"helmet": "^3.9.0",
|
||||
@@ -91,7 +90,7 @@
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"lodash": "^4.17.4",
|
||||
"make-error": "^1",
|
||||
"micromatch": "^3.1.4",
|
||||
"micromatch": "^4.0.2",
|
||||
"minimist": "^1.2.0",
|
||||
"moment-timezone": "^0.5.14",
|
||||
"ms": "^2.1.1",
|
||||
@@ -103,7 +102,7 @@
|
||||
"passport": "^0.4.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"pretty-format": "^24.0.0",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"proxy-agent": "^3.0.0",
|
||||
"pug": "^2.0.0-rc.4",
|
||||
"pump": "^3.0.0",
|
||||
@@ -123,7 +122,7 @@
|
||||
"uuid": "^3.0.1",
|
||||
"value-matcher": "^0.2.0",
|
||||
"vhd-lib": "^0.7.0",
|
||||
"ws": "^6.0.0",
|
||||
"ws": "^7.1.2",
|
||||
"xen-api": "^0.27.2",
|
||||
"xml2js": "^0.4.19",
|
||||
"xo-acl-resolver": "^0.4.1",
|
||||
@@ -148,7 +147,7 @@
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"babel-plugin-transform-dev": "^2.0.1",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// FIXME: rename to disk.*
|
||||
|
||||
import { invalidParameters } from 'xo-common/api-errors'
|
||||
import { isArray, reduce } from 'lodash'
|
||||
import { reduce } from 'lodash'
|
||||
|
||||
import { parseSize } from '../utils'
|
||||
|
||||
@@ -85,7 +85,7 @@ export async function set(params) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (const field of isArray(fields) ? fields : [fields]) {
|
||||
for (const field of Array.isArray(fields) ? fields : [fields]) {
|
||||
await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import Model from './model'
|
||||
import { BaseError } from 'make-error'
|
||||
import { EventEmitter } from 'events'
|
||||
import { isArray, isObject, map } from './utils'
|
||||
import { isObject, map } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -30,7 +30,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
async add(models, opts) {
|
||||
const array = isArray(models)
|
||||
const array = Array.isArray(models)
|
||||
if (!array) {
|
||||
models = [models]
|
||||
}
|
||||
@@ -66,7 +66,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
async remove(ids) {
|
||||
if (!isArray(ids)) {
|
||||
if (!Array.isArray(ids)) {
|
||||
ids = [ids]
|
||||
}
|
||||
|
||||
@@ -77,8 +77,8 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
async update(models) {
|
||||
const array = isArray(models)
|
||||
if (!isArray(models)) {
|
||||
const array = Array.isArray(models)
|
||||
if (!array) {
|
||||
models = [models]
|
||||
}
|
||||
|
||||
|
||||
@@ -29,13 +29,7 @@ import { ensureDir, readdir, readFile } from 'fs-extra'
|
||||
|
||||
import parseDuration from './_parseDuration'
|
||||
import Xo from './xo'
|
||||
import {
|
||||
forEach,
|
||||
isArray,
|
||||
isFunction,
|
||||
mapToArray,
|
||||
pFromCallback,
|
||||
} from './utils'
|
||||
import { forEach, mapToArray, pFromCallback } from './utils'
|
||||
|
||||
import bodyParser from 'body-parser'
|
||||
import connectFlash from 'connect-flash'
|
||||
@@ -281,15 +275,16 @@ async function registerPlugin(pluginPath, pluginName) {
|
||||
|
||||
// The default export can be either a factory or directly a plugin
|
||||
// instance.
|
||||
const instance = isFunction(factory)
|
||||
? factory({
|
||||
xo: this,
|
||||
getDataDir: () => {
|
||||
const dir = `${this._config.datadir}/${pluginName}`
|
||||
return ensureDir(dir).then(() => dir)
|
||||
},
|
||||
})
|
||||
: factory
|
||||
const instance =
|
||||
typeof factory === 'function'
|
||||
? factory({
|
||||
xo: this,
|
||||
getDataDir: () => {
|
||||
const dir = `${this._config.datadir}/${pluginName}`
|
||||
return ensureDir(dir).then(() => dir)
|
||||
},
|
||||
})
|
||||
: factory
|
||||
|
||||
await this.registerPlugin(
|
||||
pluginName,
|
||||
@@ -468,7 +463,7 @@ const setUpProxies = (express, opts, xo) => {
|
||||
|
||||
const setUpStaticFiles = (express, opts) => {
|
||||
forEach(opts, (paths, url) => {
|
||||
if (!isArray(paths)) {
|
||||
if (!Array.isArray(paths)) {
|
||||
paths = [paths]
|
||||
}
|
||||
|
||||
|
||||
@@ -8,19 +8,21 @@ const parse = createParser({
|
||||
keyTransform: key => key.slice(5).toLowerCase(),
|
||||
})
|
||||
const makeFunction = command => async (fields, ...args) => {
|
||||
return splitLines(
|
||||
await execa.stdout(command, [
|
||||
'--noheading',
|
||||
'--nosuffix',
|
||||
'--nameprefixes',
|
||||
'--unbuffered',
|
||||
'--units',
|
||||
'b',
|
||||
'-o',
|
||||
String(fields),
|
||||
...args,
|
||||
])
|
||||
).map(Array.isArray(fields) ? parse : line => parse(line)[fields])
|
||||
const { stdout } = await execa(command, [
|
||||
'--noheading',
|
||||
'--nosuffix',
|
||||
'--nameprefixes',
|
||||
'--unbuffered',
|
||||
'--units',
|
||||
'b',
|
||||
'-o',
|
||||
String(fields),
|
||||
...args,
|
||||
])
|
||||
|
||||
return splitLines(stdout).map(
|
||||
Array.isArray(fields) ? parse : line => parse(line)[fields]
|
||||
)
|
||||
}
|
||||
|
||||
export const lvs = makeFunction('lvs')
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import assign from 'lodash/assign'
|
||||
|
||||
const _combine = (vectors, n, cb) => {
|
||||
if (!n) {
|
||||
return
|
||||
@@ -35,7 +33,7 @@ export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
|
||||
// Merge the properties of an objects set in one object.
|
||||
//
|
||||
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
|
||||
export const mergeObjects = objects => assign({}, ...objects)
|
||||
export const mergeObjects = objects => Object.assign({}, ...objects)
|
||||
|
||||
// Compute a cross product between vectors.
|
||||
//
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { EventEmitter } from 'events'
|
||||
|
||||
import { forEach, isEmpty, isString } from './utils'
|
||||
import { forEach, isEmpty } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -30,7 +30,7 @@ export default class Model extends EventEmitter {
|
||||
set(properties, value) {
|
||||
// This method can also be used with two arguments to set a single
|
||||
// property.
|
||||
if (isString(properties)) {
|
||||
if (typeof properties === 'string') {
|
||||
properties = { [properties]: value }
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import appConf from 'app-conf'
|
||||
import pw from 'pw'
|
||||
import { join as joinPath } from 'path'
|
||||
|
||||
import Xo from './xo'
|
||||
import { generateToken } from './utils'
|
||||
@@ -26,6 +27,7 @@ xo-server-recover-account <user name or email>
|
||||
|
||||
const xo = new Xo(
|
||||
await appConf.load('xo-server', {
|
||||
appDir: joinPath(__dirname, '..'),
|
||||
ignoreUnknownFormats: true,
|
||||
})
|
||||
)
|
||||
|
||||
@@ -3,7 +3,6 @@ import forEach from 'lodash/forEach'
|
||||
import has from 'lodash/has'
|
||||
import highland from 'highland'
|
||||
import humanFormat from 'human-format'
|
||||
import isString from 'lodash/isString'
|
||||
import keys from 'lodash/keys'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
import pick from 'lodash/pick'
|
||||
@@ -208,7 +207,7 @@ export {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function parseSize(size) {
|
||||
if (!isString(size)) {
|
||||
if (typeof size !== 'string') {
|
||||
return size
|
||||
}
|
||||
|
||||
@@ -256,13 +255,9 @@ export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
|
||||
//
|
||||
// Exports them from here to avoid direct dependencies on lodash/
|
||||
export { default as forEach } from 'lodash/forEach'
|
||||
export { default as isArray } from 'lodash/isArray'
|
||||
export { default as isBoolean } from 'lodash/isBoolean'
|
||||
export { default as isEmpty } from 'lodash/isEmpty'
|
||||
export { default as isFunction } from 'lodash/isFunction'
|
||||
export { default as isInteger } from 'lodash/isInteger'
|
||||
export { default as isObject } from 'lodash/isObject'
|
||||
export { default as isString } from 'lodash/isString'
|
||||
export { default as mapToArray } from 'lodash/map'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@@ -364,7 +359,7 @@ export const thunkToArray = thunk => {
|
||||
// function foo (param = throwFn('param is required')()) {}
|
||||
// ```
|
||||
export const throwFn = error => () => {
|
||||
throw isString(error) ? new Error(error) : error
|
||||
throw typeof error === 'string' ? new Error(error) : error
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -3,7 +3,6 @@ import ensureArray from './_ensureArray'
|
||||
import {
|
||||
extractProperty,
|
||||
forEach,
|
||||
isArray,
|
||||
isEmpty,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
@@ -27,7 +26,7 @@ function link(obj, prop, idField = '$id') {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
if (Array.isArray(dynamicValue)) {
|
||||
return mapToArray(dynamicValue, idField)
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,6 @@ import pRetry from '../_pRetry'
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
forEach,
|
||||
isFunction,
|
||||
map,
|
||||
mapToArray,
|
||||
pAll,
|
||||
@@ -82,7 +81,7 @@ export const TAG_COPY_SRC = 'xo:copy_of'
|
||||
|
||||
// FIXME: remove this work around when fixed, https://phabricator.babeljs.io/T2877
|
||||
// export * from './utils'
|
||||
require('lodash/assign')(module.exports, require('./utils'))
|
||||
Object.assign(module.exports, require('./utils'))
|
||||
|
||||
// VDI formats. (Raw is not available for delta vdi.)
|
||||
export const VDI_FORMAT_VHD = 'vhd'
|
||||
@@ -174,7 +173,7 @@ export default class Xapi extends XapiBase {
|
||||
//
|
||||
// TODO: implements a timeout.
|
||||
_waitObject(predicate) {
|
||||
if (isFunction(predicate)) {
|
||||
if (typeof predicate === 'function') {
|
||||
const { promise, resolve } = defer()
|
||||
|
||||
const unregister = this._registerGenericWatcher(obj => {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import deferrable from 'golike-defer'
|
||||
import unzip from 'julien-f-unzip'
|
||||
@@ -337,7 +336,7 @@ export default {
|
||||
|
||||
// INSTALL -------------------------------------------------------------------
|
||||
|
||||
_xcpUpdate(hosts) {
|
||||
async _xcpUpdate(hosts) {
|
||||
if (hosts === undefined) {
|
||||
hosts = filter(this.objects.all, { $type: 'host' })
|
||||
} else {
|
||||
@@ -347,7 +346,10 @@ export default {
|
||||
)
|
||||
}
|
||||
|
||||
return asyncMap(hosts, async host => {
|
||||
// XCP-ng hosts need to be updated one at a time starting with the pool master
|
||||
// https://github.com/vatesfr/xen-orchestra/issues/4468
|
||||
hosts = hosts.sort(({ $ref }) => ($ref === this.pool.master ? -1 : 1))
|
||||
for (const host of hosts) {
|
||||
const update = await this.call(
|
||||
'host.call_plugin',
|
||||
host.$ref,
|
||||
@@ -364,7 +366,7 @@ export default {
|
||||
String(Date.now() / 1000)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
// Legacy XS patches: upload a patch on a pool before installing it
|
||||
|
||||
@@ -9,11 +9,7 @@ import { satisfies as versionSatisfies } from 'semver'
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
forEach,
|
||||
isArray,
|
||||
isBoolean,
|
||||
isFunction,
|
||||
isInteger,
|
||||
isString,
|
||||
map,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
@@ -45,10 +41,10 @@ export const prepareXapiParam = param => {
|
||||
if (isInteger(param)) {
|
||||
return asInteger(param)
|
||||
}
|
||||
if (isBoolean(param)) {
|
||||
if (typeof param === 'boolean') {
|
||||
return asBoolean(param)
|
||||
}
|
||||
if (isArray(param)) {
|
||||
if (Array.isArray(param)) {
|
||||
return map(param, prepareXapiParam)
|
||||
}
|
||||
if (isPlainObject(param)) {
|
||||
@@ -135,14 +131,14 @@ export const makeEditObject = specs => {
|
||||
return object => object[prop]
|
||||
}
|
||||
|
||||
if (isString(get)) {
|
||||
if (typeof get === 'string') {
|
||||
return object => object[get]
|
||||
}
|
||||
|
||||
return get
|
||||
}
|
||||
const normalizeSet = (set, name) => {
|
||||
if (isFunction(set)) {
|
||||
if (typeof set === 'function') {
|
||||
return set
|
||||
}
|
||||
|
||||
@@ -153,7 +149,7 @@ export const makeEditObject = specs => {
|
||||
}
|
||||
}
|
||||
|
||||
if (isString(set)) {
|
||||
if (typeof set === 'string') {
|
||||
const index = set.indexOf('.')
|
||||
if (index === -1) {
|
||||
const prop = camelToSnakeCase(set)
|
||||
@@ -176,7 +172,7 @@ export const makeEditObject = specs => {
|
||||
}
|
||||
}
|
||||
|
||||
if (!isArray(set)) {
|
||||
if (!Array.isArray(set)) {
|
||||
throw new Error('must be an array, a function or a string')
|
||||
}
|
||||
|
||||
@@ -212,7 +208,7 @@ export const makeEditObject = specs => {
|
||||
}
|
||||
|
||||
forEach(spec.constraints, (constraint, constraintName) => {
|
||||
if (!isFunction(constraint)) {
|
||||
if (typeof constraint !== 'function') {
|
||||
throw new Error('constraint must be a function')
|
||||
}
|
||||
|
||||
@@ -234,15 +230,15 @@ export const makeEditObject = specs => {
|
||||
return spec
|
||||
}
|
||||
forEach(specs, (spec, name) => {
|
||||
isString(spec) || (specs[name] = normalizeSpec(spec, name))
|
||||
typeof spec === 'string' || (specs[name] = normalizeSpec(spec, name))
|
||||
})
|
||||
|
||||
// Resolves aliases and add camelCase and snake_case aliases.
|
||||
forEach(specs, (spec, name) => {
|
||||
if (isString(spec)) {
|
||||
if (typeof spec === 'string') {
|
||||
do {
|
||||
spec = specs[spec]
|
||||
} while (isString(spec))
|
||||
} while (typeof spec === 'string')
|
||||
specs[name] = spec
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import createLogger from '@xen-orchestra/log'
|
||||
import kindOf from 'kindof'
|
||||
import ms from 'ms'
|
||||
import schemaInspector from 'schema-inspector'
|
||||
import { forEach, isFunction } from 'lodash'
|
||||
import { forEach } from 'lodash'
|
||||
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
|
||||
import { MethodNotFound } from 'json-rpc-peer'
|
||||
|
||||
@@ -183,7 +183,7 @@ export default class Api {
|
||||
const addMethod = (method, name) => {
|
||||
name = base + name
|
||||
|
||||
if (isFunction(method)) {
|
||||
if (typeof method === 'function') {
|
||||
removes.push(this.addApiMethod(name, method))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ import {
|
||||
type Xapi,
|
||||
TAG_COPY_SRC,
|
||||
} from '../../xapi'
|
||||
import { getVmDisks } from '../../xapi/utils'
|
||||
import { formatDateTime, getVmDisks } from '../../xapi/utils'
|
||||
import {
|
||||
resolveRelativeFromFile,
|
||||
safeDateFormat,
|
||||
@@ -75,6 +75,7 @@ type Settings = {|
|
||||
deleteFirst?: boolean,
|
||||
copyRetention?: number,
|
||||
exportRetention?: number,
|
||||
offlineBackup?: boolean,
|
||||
offlineSnapshot?: boolean,
|
||||
reportWhen?: ReportWhen,
|
||||
snapshotRetention?: number,
|
||||
@@ -147,6 +148,7 @@ const defaultSettings: Settings = {
|
||||
deleteFirst: false,
|
||||
exportRetention: 0,
|
||||
fullInterval: 0,
|
||||
offlineBackup: false,
|
||||
offlineSnapshot: false,
|
||||
reportWhen: 'failure',
|
||||
snapshotRetention: 0,
|
||||
@@ -188,7 +190,7 @@ const getJobCompression = ({ compression: c }) =>
|
||||
const listReplicatedVms = (
|
||||
xapi: Xapi,
|
||||
scheduleOrJobId: string,
|
||||
srId?: string,
|
||||
srUuid?: string,
|
||||
vmUuid?: string
|
||||
): Vm[] => {
|
||||
const { all } = xapi.objects
|
||||
@@ -203,7 +205,7 @@ const listReplicatedVms = (
|
||||
'start' in object.blocked_operations &&
|
||||
(oc['xo:backup:job'] === scheduleOrJobId ||
|
||||
oc['xo:backup:schedule'] === scheduleOrJobId) &&
|
||||
oc['xo:backup:sr'] === srId &&
|
||||
oc['xo:backup:sr'] === srUuid &&
|
||||
(oc['xo:backup:vm'] === vmUuid ||
|
||||
// 2018-03-28, JFT: to catch VMs replicated before this fix
|
||||
oc['xo:backup:vm'] === undefined)
|
||||
@@ -479,16 +481,21 @@ const disableVmHighAvailability = async (xapi: Xapi, vm: Vm) => {
|
||||
// Attributes on created VM snapshots:
|
||||
//
|
||||
// - `other_config`:
|
||||
// - `xo:backup:datetime` = snapshot.snapshot_time (allow sorting replicated VMs)
|
||||
// - `xo:backup:deltaChainLength` = n (number of delta copies/replicated since a full)
|
||||
// - `xo:backup:exported` = 'true' (added at the end of the backup)
|
||||
//
|
||||
// Attributes on created VMs and created snapshots:
|
||||
//
|
||||
// - `other_config`:
|
||||
// - `xo:backup:datetime`: format is UTC %Y%m%dT%H:%M:%SZ
|
||||
// - from snapshots: snapshot.snapshot_time
|
||||
// - with offline backup: formatDateTime(Date.now())
|
||||
// - `xo:backup:job` = job.id
|
||||
// - `xo:backup:schedule` = schedule.id
|
||||
// - `xo:backup:vm` = vm.uuid
|
||||
//
|
||||
// Attributes of created VMs:
|
||||
//
|
||||
// - all snapshots attributes (see above)
|
||||
// - `name_label`: `${original name} - ${job name} - (${safeDateFormat(backup timestamp)})`
|
||||
// - tag:
|
||||
// - copy in delta mode: `Continuous Replication`
|
||||
@@ -1023,6 +1030,12 @@ export default class BackupNg {
|
||||
throw new Error('copy, export and snapshot retentions cannot both be 0')
|
||||
}
|
||||
|
||||
const isOfflineBackup =
|
||||
mode === 'full' && getSetting(settings, 'offlineBackup', [vmUuid, ''])
|
||||
if (isOfflineBackup && snapshotRetention > 0) {
|
||||
throw new Error('offline backup is not compatible with rolling snapshot')
|
||||
}
|
||||
|
||||
if (
|
||||
!some(
|
||||
vm.$VBDs,
|
||||
@@ -1032,110 +1045,139 @@ export default class BackupNg {
|
||||
throw new Error('no disks found')
|
||||
}
|
||||
|
||||
const snapshots = vm.$snapshots
|
||||
.filter(_ => _.other_config['xo:backup:job'] === jobId)
|
||||
.sort(compareSnapshotTime)
|
||||
let baseSnapshot, exported: Vm, exportDateTime
|
||||
if (isOfflineBackup) {
|
||||
exported = vm
|
||||
exportDateTime = formatDateTime(Date.now())
|
||||
if (vm.power_state === 'Running') {
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'shutdown VM',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.shutdownVm(vm)
|
||||
)
|
||||
$defer(() => xapi.startVm(vm))
|
||||
}
|
||||
} else {
|
||||
const snapshots = vm.$snapshots
|
||||
.filter(_ => _.other_config['xo:backup:job'] === jobId)
|
||||
.sort(compareSnapshotTime)
|
||||
|
||||
const bypassVdiChainsCheck: boolean = getSetting(
|
||||
settings,
|
||||
'bypassVdiChainsCheck',
|
||||
[vmUuid, '']
|
||||
)
|
||||
if (!bypassVdiChainsCheck) {
|
||||
xapi._assertHealthyVdiChains(vm)
|
||||
}
|
||||
const bypassVdiChainsCheck: boolean = getSetting(
|
||||
settings,
|
||||
'bypassVdiChainsCheck',
|
||||
[vmUuid, '']
|
||||
)
|
||||
if (!bypassVdiChainsCheck) {
|
||||
xapi._assertHealthyVdiChains(vm)
|
||||
}
|
||||
|
||||
const offlineSnapshot: boolean = getSetting(settings, 'offlineSnapshot', [
|
||||
vmUuid,
|
||||
'',
|
||||
])
|
||||
const startAfterSnapshot = offlineSnapshot && vm.power_state === 'Running'
|
||||
if (startAfterSnapshot) {
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'shutdown VM',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.shutdownVm(vm)
|
||||
)
|
||||
}
|
||||
|
||||
exported = (await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'snapshot',
|
||||
parentId: taskId,
|
||||
result: _ => _.uuid,
|
||||
},
|
||||
xapi._snapshotVm(
|
||||
$cancelToken,
|
||||
vm,
|
||||
`[XO Backup ${job.name}] ${vm.name_label}`
|
||||
)
|
||||
): any)
|
||||
|
||||
if (startAfterSnapshot) {
|
||||
ignoreErrors.call(xapi.startVm(vm))
|
||||
}
|
||||
|
||||
const offlineSnapshot: boolean = getSetting(settings, 'offlineSnapshot', [
|
||||
vmUuid,
|
||||
'',
|
||||
])
|
||||
const startAfterSnapshot = offlineSnapshot && vm.power_state === 'Running'
|
||||
if (startAfterSnapshot) {
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'shutdown VM',
|
||||
message: 'add metadata to snapshot',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.shutdownVm(vm)
|
||||
)
|
||||
}
|
||||
|
||||
let snapshot: Vm = (await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'snapshot',
|
||||
parentId: taskId,
|
||||
result: _ => _.uuid,
|
||||
},
|
||||
xapi._snapshotVm(
|
||||
$cancelToken,
|
||||
vm,
|
||||
`[XO Backup ${job.name}] ${vm.name_label}`
|
||||
)
|
||||
): any)
|
||||
|
||||
if (startAfterSnapshot) {
|
||||
ignoreErrors.call(xapi.startVm(vm))
|
||||
}
|
||||
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'add metadata to snapshot',
|
||||
parentId: taskId,
|
||||
},
|
||||
snapshot.update_other_config({
|
||||
'xo:backup:datetime': snapshot.snapshot_time,
|
||||
'xo:backup:job': jobId,
|
||||
'xo:backup:schedule': scheduleId,
|
||||
'xo:backup:vm': vmUuid,
|
||||
})
|
||||
)
|
||||
|
||||
snapshot = await xapi.barrier(snapshot.$ref)
|
||||
|
||||
let baseSnapshot
|
||||
if (mode === 'delta') {
|
||||
baseSnapshot = findLast(
|
||||
snapshots,
|
||||
_ => 'xo:backup:exported' in _.other_config
|
||||
exported.update_other_config({
|
||||
'xo:backup:datetime': exported.snapshot_time,
|
||||
'xo:backup:job': jobId,
|
||||
'xo:backup:schedule': scheduleId,
|
||||
'xo:backup:vm': vmUuid,
|
||||
})
|
||||
)
|
||||
|
||||
// JFT 2018-10-02: support previous snapshots which did not have this
|
||||
// entry, can be removed after 2018-12.
|
||||
if (baseSnapshot === undefined) {
|
||||
baseSnapshot = last(snapshots)
|
||||
}
|
||||
}
|
||||
snapshots.push(snapshot)
|
||||
exported = await xapi.barrier(exported.$ref)
|
||||
|
||||
// snapshots to delete due to the snapshot retention settings
|
||||
const snapshotsToDelete = flatMap(
|
||||
groupBy(snapshots, _ => _.other_config['xo:backup:schedule']),
|
||||
(snapshots, scheduleId) =>
|
||||
getOldEntries(
|
||||
getSetting(settings, 'snapshotRetention', [scheduleId]),
|
||||
snapshots
|
||||
if (mode === 'delta') {
|
||||
baseSnapshot = findLast(
|
||||
snapshots,
|
||||
_ => 'xo:backup:exported' in _.other_config
|
||||
)
|
||||
)
|
||||
|
||||
// delete unused snapshots
|
||||
await asyncMap(snapshotsToDelete, vm => {
|
||||
// snapshot and baseSnapshot should not be deleted right now
|
||||
if (vm !== snapshot && vm !== baseSnapshot) {
|
||||
return xapi.deleteVm(vm)
|
||||
// JFT 2018-10-02: support previous snapshots which did not have this
|
||||
// entry, can be removed after 2018-12.
|
||||
if (baseSnapshot === undefined) {
|
||||
baseSnapshot = last(snapshots)
|
||||
}
|
||||
}
|
||||
})
|
||||
snapshots.push(exported)
|
||||
|
||||
snapshot = ((await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'waiting for uptodate snapshot record',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.barrier(snapshot.$ref)
|
||||
): any): Vm)
|
||||
// snapshots to delete due to the snapshot retention settings
|
||||
const snapshotsToDelete = flatMap(
|
||||
groupBy(snapshots, _ => _.other_config['xo:backup:schedule']),
|
||||
(snapshots, scheduleId) =>
|
||||
getOldEntries(
|
||||
getSetting(settings, 'snapshotRetention', [scheduleId]),
|
||||
snapshots
|
||||
)
|
||||
)
|
||||
|
||||
// delete unused snapshots
|
||||
await asyncMap(snapshotsToDelete, vm => {
|
||||
// snapshot and baseSnapshot should not be deleted right now
|
||||
if (vm !== exported && vm !== baseSnapshot) {
|
||||
return xapi.deleteVm(vm)
|
||||
}
|
||||
})
|
||||
|
||||
exported = ((await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'waiting for uptodate snapshot record',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.barrier(exported.$ref)
|
||||
): any): Vm)
|
||||
|
||||
if (mode === 'full' && snapshotsToDelete.includes(exported)) {
|
||||
// TODO: do not create the snapshot if there are no snapshotRetention and
|
||||
// the VM is not running
|
||||
$defer.call(xapi, 'deleteVm', exported)
|
||||
} else if (mode === 'delta') {
|
||||
if (snapshotsToDelete.includes(exported)) {
|
||||
$defer.onFailure.call(xapi, 'deleteVm', exported)
|
||||
}
|
||||
if (snapshotsToDelete.includes(baseSnapshot)) {
|
||||
$defer.onSuccess.call(xapi, 'deleteVm', baseSnapshot)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (copyRetention === 0 && exportRetention === 0) {
|
||||
return
|
||||
@@ -1151,14 +1193,8 @@ export default class BackupNg {
|
||||
const metadataFilename = `${vmDir}/${basename}.json`
|
||||
|
||||
if (mode === 'full') {
|
||||
// TODO: do not create the snapshot if there are no snapshotRetention and
|
||||
// the VM is not running
|
||||
if (snapshotsToDelete.includes(snapshot)) {
|
||||
$defer.call(xapi, 'deleteVm', snapshot)
|
||||
}
|
||||
|
||||
let compress = getJobCompression(job)
|
||||
const pool = snapshot.$pool
|
||||
const pool = exported.$pool
|
||||
if (
|
||||
compress === 'zstd' &&
|
||||
pool.restrictions.restrict_zstd_export !== 'false'
|
||||
@@ -1175,10 +1211,10 @@ export default class BackupNg {
|
||||
let xva: any = await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'start snapshot export',
|
||||
message: 'start VM export',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.exportVm($cancelToken, snapshot, {
|
||||
xapi.exportVm($cancelToken, exported, {
|
||||
compress,
|
||||
})
|
||||
)
|
||||
@@ -1203,7 +1239,7 @@ export default class BackupNg {
|
||||
timestamp: now,
|
||||
version: '2.0.0',
|
||||
vm,
|
||||
vmSnapshot: snapshot,
|
||||
vmSnapshot: exported.id !== vm.id ? exported : undefined,
|
||||
xva: `./${dataBasename}`,
|
||||
}
|
||||
const dataFilename = `${vmDir}/${dataBasename}`
|
||||
@@ -1287,7 +1323,7 @@ export default class BackupNg {
|
||||
async (taskId, sr) => {
|
||||
const fork = forkExport()
|
||||
|
||||
const { $id: srId, xapi } = sr
|
||||
const { uuid: srUuid, xapi } = sr
|
||||
|
||||
// delete previous interrupted copies
|
||||
ignoreErrors.call(
|
||||
@@ -1299,7 +1335,7 @@ export default class BackupNg {
|
||||
|
||||
const oldVms = getOldEntries(
|
||||
copyRetention - 1,
|
||||
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
|
||||
listReplicatedVms(xapi, scheduleId, srUuid, vmUuid)
|
||||
)
|
||||
|
||||
const deleteOldBackups = () =>
|
||||
@@ -1311,7 +1347,9 @@ export default class BackupNg {
|
||||
},
|
||||
this._deleteVms(xapi, oldVms)
|
||||
)
|
||||
const deleteFirst = getSetting(settings, 'deleteFirst', [srId])
|
||||
const deleteFirst = getSetting(settings, 'deleteFirst', [
|
||||
srUuid,
|
||||
])
|
||||
if (deleteFirst) {
|
||||
await deleteOldBackups()
|
||||
}
|
||||
@@ -1341,7 +1379,15 @@ export default class BackupNg {
|
||||
'start',
|
||||
'Start operation for this vm is blocked, clone it if you want to use it.'
|
||||
),
|
||||
vm.update_other_config('xo:backup:sr', srId),
|
||||
!isOfflineBackup
|
||||
? vm.update_other_config('xo:backup:sr', srUuid)
|
||||
: vm.update_other_config({
|
||||
'xo:backup:datetime': exportDateTime,
|
||||
'xo:backup:job': jobId,
|
||||
'xo:backup:schedule': scheduleId,
|
||||
'xo:backup:sr': srUuid,
|
||||
'xo:backup:vm': exported.uuid,
|
||||
}),
|
||||
])
|
||||
|
||||
if (!deleteFirst) {
|
||||
@@ -1354,13 +1400,6 @@ export default class BackupNg {
|
||||
noop // errors are handled in logs
|
||||
)
|
||||
} else if (mode === 'delta') {
|
||||
if (snapshotsToDelete.includes(snapshot)) {
|
||||
$defer.onFailure.call(xapi, 'deleteVm', snapshot)
|
||||
}
|
||||
if (snapshotsToDelete.includes(baseSnapshot)) {
|
||||
$defer.onSuccess.call(xapi, 'deleteVm', baseSnapshot)
|
||||
}
|
||||
|
||||
let deltaChainLength = 0
|
||||
let fullVdisRequired
|
||||
await (async () => {
|
||||
@@ -1398,11 +1437,11 @@ export default class BackupNg {
|
||||
}
|
||||
})
|
||||
|
||||
for (const { $id: srId, xapi } of srs) {
|
||||
for (const { uuid: srUuid, xapi } of srs) {
|
||||
const replicatedVm = listReplicatedVms(
|
||||
xapi,
|
||||
jobId,
|
||||
srId,
|
||||
srUuid,
|
||||
vmUuid
|
||||
).find(vm => vm.other_config[TAG_COPY_SRC] === baseSnapshot.uuid)
|
||||
if (replicatedVm === undefined) {
|
||||
@@ -1468,7 +1507,7 @@ export default class BackupNg {
|
||||
message: 'start snapshot export',
|
||||
parentId: taskId,
|
||||
},
|
||||
xapi.exportDeltaVm($cancelToken, snapshot, baseSnapshot, {
|
||||
xapi.exportDeltaVm($cancelToken, exported, baseSnapshot, {
|
||||
fullVdisRequired,
|
||||
})
|
||||
)
|
||||
@@ -1490,7 +1529,7 @@ export default class BackupNg {
|
||||
}/${basename}.vhd`
|
||||
),
|
||||
vm,
|
||||
vmSnapshot: snapshot,
|
||||
vmSnapshot: exported,
|
||||
}
|
||||
|
||||
const jsonMetadata = JSON.stringify(metadata)
|
||||
@@ -1656,7 +1695,7 @@ export default class BackupNg {
|
||||
async (taskId, sr) => {
|
||||
const fork = forkExport()
|
||||
|
||||
const { $id: srId, xapi } = sr
|
||||
const { uuid: srUuid, xapi } = sr
|
||||
|
||||
// delete previous interrupted copies
|
||||
ignoreErrors.call(
|
||||
@@ -1668,7 +1707,7 @@ export default class BackupNg {
|
||||
|
||||
const oldVms = getOldEntries(
|
||||
copyRetention - 1,
|
||||
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
|
||||
listReplicatedVms(xapi, scheduleId, srUuid, vmUuid)
|
||||
)
|
||||
|
||||
const deleteOldBackups = () =>
|
||||
@@ -1681,7 +1720,9 @@ export default class BackupNg {
|
||||
this._deleteVms(xapi, oldVms)
|
||||
)
|
||||
|
||||
const deleteFirst = getSetting(settings, 'deleteFirst', [srId])
|
||||
const deleteFirst = getSetting(settings, 'deleteFirst', [
|
||||
srUuid,
|
||||
])
|
||||
if (deleteFirst) {
|
||||
await deleteOldBackups()
|
||||
}
|
||||
@@ -1698,7 +1739,7 @@ export default class BackupNg {
|
||||
name_label: `${metadata.vm.name_label} - ${
|
||||
job.name
|
||||
} - (${safeDateFormat(metadata.timestamp)})`,
|
||||
srId,
|
||||
srId: sr.$id,
|
||||
})
|
||||
)
|
||||
|
||||
@@ -1709,7 +1750,7 @@ export default class BackupNg {
|
||||
'start',
|
||||
'Start operation for this vm is blocked, clone it if you want to use it.'
|
||||
),
|
||||
vm.update_other_config('xo:backup:sr', srId),
|
||||
vm.update_other_config('xo:backup:sr', srUuid),
|
||||
])
|
||||
|
||||
if (!deleteFirst) {
|
||||
@@ -1724,7 +1765,7 @@ export default class BackupNg {
|
||||
|
||||
if (!isFull) {
|
||||
ignoreErrors.call(
|
||||
snapshot.update_other_config(
|
||||
exported.update_other_config(
|
||||
'xo:backup:deltaChainLength',
|
||||
String(deltaChainLength)
|
||||
)
|
||||
@@ -1734,14 +1775,16 @@ export default class BackupNg {
|
||||
throw new Error(`no exporter for backup mode ${mode}`)
|
||||
}
|
||||
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'set snapshot.other_config[xo:backup:exported]',
|
||||
parentId: taskId,
|
||||
},
|
||||
snapshot.update_other_config('xo:backup:exported', 'true')
|
||||
)
|
||||
if (!isOfflineBackup) {
|
||||
await wrapTask(
|
||||
{
|
||||
logger,
|
||||
message: 'set snapshot.other_config[xo:backup:exported]',
|
||||
parentId: taskId,
|
||||
},
|
||||
exported.update_other_config('xo:backup:exported', 'true')
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async _deleteDeltaVmBackups(
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import deferrable from 'golike-defer'
|
||||
import escapeStringRegexp from 'escape-string-regexp'
|
||||
import execa from 'execa'
|
||||
import splitLines from 'split-lines'
|
||||
import { CancelToken, fromEvent, ignoreErrors } from 'promise-toolbox'
|
||||
@@ -10,7 +9,15 @@ import { createReadStream, readdir, stat } from 'fs'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
import { utcFormat } from 'd3-time-format'
|
||||
import { basename, dirname } from 'path'
|
||||
import { filter, find, includes, once, range, sortBy, trim } from 'lodash'
|
||||
import {
|
||||
escapeRegExp,
|
||||
filter,
|
||||
find,
|
||||
includes,
|
||||
once,
|
||||
range,
|
||||
sortBy,
|
||||
} from 'lodash'
|
||||
import {
|
||||
chainVhd,
|
||||
createSyntheticStream as createVhdReadStream,
|
||||
@@ -19,6 +26,7 @@ import {
|
||||
|
||||
import createSizeStream from '../size-stream'
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import { debounceWithKey } from '../_pDebounceWithKey'
|
||||
import { lvs, pvs } from '../lvm'
|
||||
import {
|
||||
forEach,
|
||||
@@ -36,6 +44,7 @@ import {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DEBOUNCE_DELAY = 10e3
|
||||
const DELTA_BACKUP_EXT = '.json'
|
||||
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
|
||||
const TAG_SOURCE_VM = 'xo:source_vm'
|
||||
@@ -139,22 +148,20 @@ const listPartitions = (() => {
|
||||
})
|
||||
|
||||
return device =>
|
||||
execa
|
||||
.stdout('partx', [
|
||||
'--bytes',
|
||||
'--output=NR,START,SIZE,NAME,UUID,TYPE',
|
||||
'--pairs',
|
||||
device.path,
|
||||
])
|
||||
.then(stdout =>
|
||||
mapFilter(splitLines(stdout), line => {
|
||||
const partition = parseLine(line)
|
||||
const { type } = partition
|
||||
if (type != null && !IGNORED[+type]) {
|
||||
return partition
|
||||
}
|
||||
})
|
||||
)
|
||||
execa('partx', [
|
||||
'--bytes',
|
||||
'--output=NR,START,SIZE,NAME,UUID,TYPE',
|
||||
'--pairs',
|
||||
device.path,
|
||||
]).then(({ stdout }) =>
|
||||
mapFilter(splitLines(stdout), line => {
|
||||
const partition = parseLine(line)
|
||||
const { type } = partition
|
||||
if (type != null && !IGNORED[+type]) {
|
||||
return partition
|
||||
}
|
||||
})
|
||||
)
|
||||
})()
|
||||
|
||||
// handle LVM logical volumes automatically
|
||||
@@ -271,8 +278,8 @@ const mountLvmPv = (device, partition) => {
|
||||
}
|
||||
args.push('--show', '-f', device.path)
|
||||
|
||||
return execa.stdout('losetup', args).then(stdout => {
|
||||
const path = trim(stdout)
|
||||
return execa('losetup', args).then(({ stdout }) => {
|
||||
const path = stdout.trim()
|
||||
return {
|
||||
path,
|
||||
unmount: once(() =>
|
||||
@@ -294,6 +301,9 @@ export default class {
|
||||
this._xo = xo
|
||||
}
|
||||
|
||||
@debounceWithKey.decorate(DEBOUNCE_DELAY, function keyFn(remoteId) {
|
||||
return [this, remoteId]
|
||||
})
|
||||
async listRemoteBackups(remoteId) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
@@ -320,6 +330,9 @@ export default class {
|
||||
return backups
|
||||
}
|
||||
|
||||
@debounceWithKey.decorate(DEBOUNCE_DELAY, function keyFn(remoteId) {
|
||||
return [this, remoteId]
|
||||
})
|
||||
async listVmBackups(remoteId) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
@@ -862,7 +875,7 @@ export default class {
|
||||
const files = await handler.list('.')
|
||||
|
||||
const reg = new RegExp(
|
||||
'^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`)
|
||||
'^[^_]+_' + escapeRegExp(`${tag}_${vm.name_label}.xva`)
|
||||
)
|
||||
const backups = sortBy(filter(files, fileName => reg.test(fileName)))
|
||||
|
||||
@@ -887,9 +900,7 @@ export default class {
|
||||
|
||||
xapi._assertHealthyVdiChains(vm)
|
||||
|
||||
const reg = new RegExp(
|
||||
'^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_'
|
||||
)
|
||||
const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeRegExp(tag) + '_')
|
||||
const snapshots = sortBy(
|
||||
filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)),
|
||||
'name_label'
|
||||
@@ -926,9 +937,7 @@ export default class {
|
||||
const transferStart = Date.now()
|
||||
tag = 'DR_' + tag
|
||||
const reg = new RegExp(
|
||||
'^' +
|
||||
escapeStringRegexp(`${vm.name_label}_${tag}_`) +
|
||||
'[0-9]{8}T[0-9]{6}Z$'
|
||||
'^' + escapeRegExp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$'
|
||||
)
|
||||
|
||||
const targetXapi = this._xo.getXapi(sr)
|
||||
|
||||
@@ -87,7 +87,7 @@ async function mountLvmPhysicalVolume(devicePath, partition) {
|
||||
args.push('-o', partition.start * 512)
|
||||
}
|
||||
args.push('--show', '-f', devicePath)
|
||||
const path = (await execa.stdout('losetup', args)).trim()
|
||||
const path = (await execa('losetup', args)).stdout.trim()
|
||||
await execa('pvscan', ['--cache', path])
|
||||
|
||||
return {
|
||||
@@ -251,7 +251,7 @@ export default class BackupNgFileRestore {
|
||||
}
|
||||
|
||||
async _listPartitions(devicePath, inspectLvmPv = true) {
|
||||
const stdout = await execa.stdout('partx', [
|
||||
const { stdout } = await execa('partx', [
|
||||
'--bytes',
|
||||
'--output=NR,START,SIZE,NAME,UUID,TYPE',
|
||||
'--pairs',
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import { createPredicate } from 'value-matcher'
|
||||
import { timeout } from 'promise-toolbox'
|
||||
import { assign, filter, isEmpty, map, mapValues } from 'lodash'
|
||||
import { filter, isEmpty, map, mapValues } from 'lodash'
|
||||
|
||||
import { crossProduct } from '../../math'
|
||||
import { serializeError, thunkToArray } from '../../utils'
|
||||
@@ -82,7 +82,11 @@ export default async function executeJobCall({
|
||||
params,
|
||||
start: Date.now(),
|
||||
})
|
||||
let promise = app.callApiMethod(session, job.method, assign({}, params))
|
||||
let promise = app.callApiMethod(
|
||||
session,
|
||||
job.method,
|
||||
Object.assign({}, params)
|
||||
)
|
||||
if (job.timeout) {
|
||||
promise = promise::timeout(job.timeout)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { invalidParameters, noSuchObject } from 'xo-common/api-errors'
|
||||
|
||||
import * as sensitiveValues from '../sensitive-values'
|
||||
import { PluginsMetadata } from '../models/plugin-metadata'
|
||||
import { isFunction, mapToArray } from '../utils'
|
||||
import { mapToArray } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -65,9 +65,9 @@ export default class {
|
||||
id,
|
||||
instance,
|
||||
name,
|
||||
testable: isFunction(instance.test),
|
||||
testable: typeof instance.test === 'function',
|
||||
testSchema,
|
||||
unloadable: isFunction(instance.unload),
|
||||
unloadable: typeof instance.unload === 'function',
|
||||
version,
|
||||
})
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import synchronized from 'decorator-synchronized'
|
||||
import {
|
||||
assign,
|
||||
every,
|
||||
forEach,
|
||||
isObject,
|
||||
@@ -123,7 +122,7 @@ export default class {
|
||||
}
|
||||
|
||||
async computeVmResourcesUsage(vm) {
|
||||
return assign(
|
||||
return Object.assign(
|
||||
computeVmResourcesUsage(this._xo.getXapi(vm).getObject(vm._xapiId)),
|
||||
await this._xo.computeVmIpPoolsUsage(vm)
|
||||
)
|
||||
|
||||
@@ -77,7 +77,10 @@ export default class Scheduling {
|
||||
'schedules',
|
||||
() => db.get(),
|
||||
schedules =>
|
||||
asyncMap(schedules, schedule => db.update(normalize(schedule))),
|
||||
asyncMap(schedules, async schedule => {
|
||||
await db.update(normalize(schedule))
|
||||
this._start(schedule.id)
|
||||
}),
|
||||
['jobs']
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import levelup from 'level-party'
|
||||
import sublevel from 'level-sublevel'
|
||||
import { ensureDir } from 'fs-extra'
|
||||
|
||||
import { forEach, isFunction, promisify } from '../utils'
|
||||
import { forEach, promisify } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -32,7 +32,7 @@ const levelHas = db => {
|
||||
const levelPromise = db => {
|
||||
const dbP = {}
|
||||
forEach(db, (value, name) => {
|
||||
if (!isFunction(value)) {
|
||||
if (typeof value !== 'function') {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -10,13 +10,7 @@ import parseDuration from '../_parseDuration'
|
||||
import Xapi from '../xapi'
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import XapiStats from '../xapi-stats'
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
forEach,
|
||||
isEmpty,
|
||||
isString,
|
||||
popProperty,
|
||||
} from '../utils'
|
||||
import { camelToSnakeCase, forEach, isEmpty, popProperty } from '../utils'
|
||||
import { Servers } from '../models/server'
|
||||
|
||||
// ===================================================================
|
||||
@@ -461,7 +455,7 @@ export default class {
|
||||
|
||||
// Returns the XAPI connection associated to an object.
|
||||
getXapi(object, type) {
|
||||
if (isString(object)) {
|
||||
if (typeof object === 'string') {
|
||||
object = this._xo.getObject(object, type)
|
||||
}
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@ import {
|
||||
forEach,
|
||||
includes,
|
||||
isEmpty,
|
||||
isFunction,
|
||||
isString,
|
||||
iteratee,
|
||||
map as mapToArray,
|
||||
stubTrue,
|
||||
@@ -73,7 +71,8 @@ export default class Xo extends EventEmitter {
|
||||
|
||||
if (
|
||||
type != null &&
|
||||
((isString(type) && type !== obj.type) || !includes(type, obj.type)) // Array
|
||||
((typeof type === 'string' && type !== obj.type) ||
|
||||
!includes(type, obj.type)) // Array
|
||||
) {
|
||||
throw noSuchObject(key, type)
|
||||
}
|
||||
@@ -210,7 +209,7 @@ export default class Xo extends EventEmitter {
|
||||
}
|
||||
|
||||
// For security, prevent from accessing `this`.
|
||||
if (isFunction(value)) {
|
||||
if (typeof value === 'function') {
|
||||
value = (value =>
|
||||
function() {
|
||||
return value.apply(thisArg, arguments)
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
"child-process-promise": "^2.0.3",
|
||||
"core-js": "^3.0.0",
|
||||
"pipette": "^0.9.3",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"tmp": "^0.1.0",
|
||||
"vhd-lib": "^0.7.0"
|
||||
},
|
||||
@@ -36,7 +36,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"cross-env": "^6.0.3",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"execa": "^2.0.2",
|
||||
"fs-extra": "^8.0.1",
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nraynaud/novnc": "0.6.1",
|
||||
"@xen-orchestra/cron": "^1.0.4",
|
||||
"@xen-orchestra/cron": "^1.0.5",
|
||||
"@xen-orchestra/defined": "^0.0.0",
|
||||
"@xen-orchestra/template": "^0.1.0",
|
||||
"ansi_up": "^4.0.3",
|
||||
@@ -97,7 +97,7 @@
|
||||
"moment-timezone": "^0.5.14",
|
||||
"notifyjs": "^3.0.0",
|
||||
"otplib": "^11.0.0",
|
||||
"promise-toolbox": "^0.13.0",
|
||||
"promise-toolbox": "^0.14.0",
|
||||
"prop-types": "^15.6.0",
|
||||
"qrcode": "^1.3.2",
|
||||
"random-password": "^0.1.2",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import PropTypes from 'prop-types'
|
||||
import React from 'react'
|
||||
import { isFunction } from 'lodash'
|
||||
|
||||
import Button from './button'
|
||||
import Component from './base-component'
|
||||
@@ -93,9 +92,10 @@ export default class ActionButton extends Component {
|
||||
|
||||
const { redirectOnSuccess } = props
|
||||
if (redirectOnSuccess !== undefined) {
|
||||
const to = isFunction(redirectOnSuccess)
|
||||
? redirectOnSuccess(result, handlerParam)
|
||||
: redirectOnSuccess
|
||||
const to =
|
||||
typeof redirectOnSuccess === 'function'
|
||||
? redirectOnSuccess(result, handlerParam)
|
||||
: redirectOnSuccess
|
||||
if (to !== undefined) {
|
||||
return this.context.router.push(to)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { PureComponent } from 'react'
|
||||
import { cowSet } from 'utils'
|
||||
import { includes, isArray, forEach, map } from 'lodash'
|
||||
import { includes, forEach, map } from 'lodash'
|
||||
|
||||
import getEventValue from './get-event-value'
|
||||
|
||||
@@ -15,7 +15,7 @@ const get = (object, path, depth) => {
|
||||
}
|
||||
|
||||
const prop = path[depth++]
|
||||
return isArray(object) && prop === '*'
|
||||
return Array.isArray(object) && prop === '*'
|
||||
? map(object, value => get(value, path, depth))
|
||||
: get(object[prop], path, depth)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import classNames from 'classnames'
|
||||
import React from 'react'
|
||||
import PropTypes from 'prop-types'
|
||||
import { isEmpty, isFunction, isString, map, pick } from 'lodash'
|
||||
import { isEmpty, map, pick } from 'lodash'
|
||||
|
||||
import _ from '../intl'
|
||||
import Component from '../base-component'
|
||||
@@ -100,7 +100,7 @@ class Editable extends Component {
|
||||
|
||||
return this.__save(
|
||||
() => this.state.previous,
|
||||
isFunction(onUndo) ? onUndo : props.onChange
|
||||
typeof onUndo === 'function' ? onUndo : props.onChange
|
||||
)
|
||||
}
|
||||
|
||||
@@ -132,7 +132,9 @@ class Editable extends Component {
|
||||
} catch (error) {
|
||||
this.setState({
|
||||
// `error` may be undefined if the action has been cancelled
|
||||
error: error !== undefined && (isString(error) ? error : error.message),
|
||||
error:
|
||||
error !== undefined &&
|
||||
(typeof error === 'string' ? error : error.message),
|
||||
saving: false,
|
||||
})
|
||||
logError(error)
|
||||
|
||||
@@ -3,7 +3,7 @@ import PropTypes from 'prop-types'
|
||||
import React, { Component } from 'react'
|
||||
import { connect } from 'react-redux'
|
||||
import { FormattedMessage, IntlProvider as IntlProvider_ } from 'react-intl'
|
||||
import { every, isFunction, isString } from 'lodash'
|
||||
import { every } from 'lodash'
|
||||
|
||||
import locales from './locales'
|
||||
import messages from './messages'
|
||||
@@ -20,7 +20,7 @@ import { createSelector } from '.././selectors'
|
||||
// - render (optional): a function receiving the React nodes of the
|
||||
// translated message and returning the React node to render
|
||||
const getMessage = (props, messageId, values, render) => {
|
||||
if (isString(props)) {
|
||||
if (typeof props === 'string') {
|
||||
render = values
|
||||
values = messageId
|
||||
messageId = props
|
||||
@@ -32,7 +32,7 @@ const getMessage = (props, messageId, values, render) => {
|
||||
throw new Error(`no message defined for ${messageId}`)
|
||||
}
|
||||
|
||||
if (isFunction(values)) {
|
||||
if (typeof values === 'function') {
|
||||
render = values
|
||||
values = undefined
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// `create-locale`.
|
||||
|
||||
const forEach = require('lodash/forEach')
|
||||
const isString = require('lodash/isString')
|
||||
|
||||
const messages = {
|
||||
keyValue: '{key}: {value}',
|
||||
@@ -18,9 +17,11 @@ const messages = {
|
||||
notifications: 'Notifications',
|
||||
noNotifications: 'No notifications so far.',
|
||||
notificationNew: 'NEW!',
|
||||
moreDetails: 'More details',
|
||||
messageSubject: 'Subject',
|
||||
messageFrom: 'From',
|
||||
messageReply: 'Reply',
|
||||
sr: 'SR',
|
||||
tryXoa: 'Try XOA for free and deploy it here.',
|
||||
|
||||
editableLongClickPlaceholder: 'Long click to edit',
|
||||
@@ -122,11 +123,7 @@ const messages = {
|
||||
newServerPage: 'Server',
|
||||
newImport: 'Import',
|
||||
xosan: 'XOSAN',
|
||||
backupDeprecatedMessage:
|
||||
'Warning: Backup is deprecated, use Backup NG instead.',
|
||||
moveRestoreLegacyMessage: 'Warning: Your legacy backups can be found here',
|
||||
backupMigrationLink: 'How to migrate to Backup NG',
|
||||
backupNgNewPage: 'Create a new backup with Backup NG',
|
||||
backupMigrationLink: 'How to migrate to the new backup system',
|
||||
backupOverviewPage: 'Overview',
|
||||
backupNewPage: 'New',
|
||||
backupRemotesPage: 'Remotes',
|
||||
@@ -134,7 +131,6 @@ const messages = {
|
||||
backupFileRestorePage: 'File restore',
|
||||
schedule: 'Schedule',
|
||||
newVmBackup: 'New VM backup',
|
||||
editVmBackup: 'Edit VM backup',
|
||||
backup: 'Backup',
|
||||
rollingSnapshot: 'Rolling Snapshot',
|
||||
deltaBackup: 'Delta Backup',
|
||||
@@ -157,7 +153,12 @@ const messages = {
|
||||
freeUpgrade: 'Free upgrade!',
|
||||
checkXoa: 'Check XOA',
|
||||
xoaCheck: 'XOA check',
|
||||
checkXoaCommunity: 'XOA check is available in XOA.',
|
||||
closeTunnel: 'Close tunnel',
|
||||
openTunnel: 'Open tunnel',
|
||||
supportCommunity:
|
||||
'The XOA check and the support tunnel are available in XOA.',
|
||||
supportTunnel: 'Support tunnel',
|
||||
supportTunnelClosed: 'The support tunnel is closed.',
|
||||
|
||||
// ----- Sign out -----
|
||||
signOut: 'Sign out',
|
||||
@@ -425,13 +426,12 @@ const messages = {
|
||||
jobUserNotFound: "This job's creator no longer exists",
|
||||
backupUserNotFound: "This backup's creator no longer exists",
|
||||
redirectToMatchingVms: 'Click here to see the matching VMs',
|
||||
migrateToBackupNg: 'Migrate to Backup NG',
|
||||
noMatchingVms: 'There are no matching VMs!',
|
||||
allMatchingVms: '{icon} See the matching VMs ({nMatchingVms, number})',
|
||||
backupOwner: 'Backup owner',
|
||||
migrateBackupSchedule: 'Migrate to Backup NG',
|
||||
migrateBackupSchedule: 'Migrate to the new backup system',
|
||||
migrateBackupScheduleMessage:
|
||||
'This will convert the old backup job to a Backup NG job. This operation is not reversible. Do you want to continue?',
|
||||
'This will convert the legacy backup job to the new backup system. This operation is not reversible. Do you want to continue?',
|
||||
runBackupNgJobConfirm: 'Are you sure you want to run {name} ({id})?',
|
||||
cancelJobConfirm: 'Are you sure you want to cancel {name} ({id})?',
|
||||
scheduleDstWarning:
|
||||
@@ -453,6 +453,9 @@ const messages = {
|
||||
backupName: 'Name',
|
||||
offlineSnapshot: 'Offline snapshot',
|
||||
offlineSnapshotInfo: 'Shutdown VMs before snapshotting them',
|
||||
offlineBackup: 'Offline backup',
|
||||
offlineBackupInfo:
|
||||
'Export VMs without snapshotting them. The VMs will be shutdown during the export.',
|
||||
timeout: 'Timeout',
|
||||
timeoutInfo: 'Number of hours after which a job is considered failed',
|
||||
fullBackupInterval: 'Full backup interval',
|
||||
@@ -2164,8 +2167,9 @@ const messages = {
|
||||
size: 'Size',
|
||||
totalDiskSize: 'Total disk size',
|
||||
hideInstalledPool: 'Already installed templates are hidden',
|
||||
hubSrErrorTitle: 'Missing property',
|
||||
hubImportNotificationTitle: 'XVA import',
|
||||
hubTemplateDescriptionNotAvailable:
|
||||
'No description available for this template',
|
||||
|
||||
// Licenses
|
||||
xosanUnregisteredDisclaimer:
|
||||
@@ -2214,7 +2218,7 @@ const messages = {
|
||||
'{days, plural, =0 {} one {# day } other {# days }}{hours, plural, =0 {} one {# hour } other {# hours }}{minutes, plural, =0 {} one {# minute } other {# minutes }}{seconds, plural, =0 {} one {# second} other {# seconds}}',
|
||||
}
|
||||
forEach(messages, function(message, id) {
|
||||
if (isString(message)) {
|
||||
if (typeof message === 'string') {
|
||||
messages[id] = {
|
||||
id,
|
||||
defaultMessage: message,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import forEachRight from 'lodash/forEachRight'
|
||||
import forEach from 'lodash/forEach'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isIp from 'is-ip'
|
||||
import some from 'lodash/some'
|
||||
|
||||
@@ -76,7 +75,7 @@ export const getNextIpV4 = ip => {
|
||||
}
|
||||
|
||||
export const formatIps = ips => {
|
||||
if (!isArray(ips)) {
|
||||
if (!Array.isArray(ips)) {
|
||||
throw new Error('ips must be an array')
|
||||
}
|
||||
if (ips.length === 0) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import React from 'react'
|
||||
import includes from 'lodash/includes'
|
||||
import isArray from 'lodash/isArray'
|
||||
import marked from 'marked'
|
||||
|
||||
import { Col, Row } from 'grid'
|
||||
@@ -14,7 +13,7 @@ export const getType = schema => {
|
||||
|
||||
const type = schema.type
|
||||
|
||||
if (isArray(type)) {
|
||||
if (Array.isArray(type)) {
|
||||
if (includes(type, 'integer')) {
|
||||
return 'integer'
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user