Merge branch 'next-release' into stable
This commit is contained in:
commit
1d8341eb27
12
.babelrc
12
.babelrc
@ -1,12 +0,0 @@
|
||||
{
|
||||
"comments": false,
|
||||
"compact": true,
|
||||
"plugins": [
|
||||
"transform-decorators-legacy",
|
||||
"transform-runtime"
|
||||
],
|
||||
"presets": [
|
||||
"stage-0",
|
||||
"es2015"
|
||||
]
|
||||
}
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,3 +1,4 @@
|
||||
/.nyc_output/
|
||||
/dist/
|
||||
/node_modules/
|
||||
/src/api/index.js
|
||||
|
@ -1,5 +1,6 @@
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
|
@ -7,8 +7,12 @@
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
// Use Bluebird for all promises as it provides better performance and
|
||||
// less memory usage.
|
||||
global.Promise = require('bluebird')
|
||||
|
||||
// Make unhandled rejected promises visible.
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
process.on('unhandledRejection', function (reason) {
|
||||
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
|
||||
})
|
||||
|
||||
|
@ -10,10 +10,12 @@ var plumber = require('gulp-plumber')
|
||||
var sourceMaps = require('gulp-sourcemaps')
|
||||
var watch = require('gulp-watch')
|
||||
|
||||
var join = require('path').join
|
||||
|
||||
// ===================================================================
|
||||
|
||||
var SRC_DIR = __dirname + '/src'
|
||||
var DIST_DIR = __dirname + '/dist'
|
||||
var SRC_DIR = join(__dirname, 'src')
|
||||
var DIST_DIR = join(__dirname, 'dist')
|
||||
|
||||
var PRODUCTION = process.argv.indexOf('--production') !== -1
|
||||
|
||||
|
47
package.json
47
package.json
@ -13,6 +13,10 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/vatesfr/xo-web/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
|
||||
"preferGlobal": true,
|
||||
"files": [
|
||||
@ -21,17 +25,13 @@
|
||||
"dist/",
|
||||
"config.json",
|
||||
"index.js",
|
||||
"signin.jade"
|
||||
"signin.pug"
|
||||
],
|
||||
"directories": {
|
||||
"bin": "bin"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.12 <5"
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2-promise": "^0.2.0",
|
||||
@ -58,13 +58,12 @@
|
||||
"fs-promise": "^0.4.1",
|
||||
"get-stream": "^2.1.0",
|
||||
"hashy": "~0.4.2",
|
||||
"helmet": "^1.1.0",
|
||||
"helmet": "^2.0.0",
|
||||
"highland": "^2.5.1",
|
||||
"http-proxy": "^1.13.2",
|
||||
"http-server-plus": "^0.6.4",
|
||||
"human-format": "^0.6.0",
|
||||
"is-my-json-valid": "^2.12.2",
|
||||
"jade": "^1.11.0",
|
||||
"js-yaml": "^3.2.7",
|
||||
"json-rpc-peer": "^0.11.0",
|
||||
"json5": "^0.4.0",
|
||||
@ -115,6 +114,7 @@
|
||||
"passport-local": "^1.0.0",
|
||||
"promise-toolbox": "^0.3.2",
|
||||
"proxy-agent": "^2.0.0",
|
||||
"pug": "^2.0.0-alpha6",
|
||||
"redis": "^2.0.1",
|
||||
"schema-inspector": "^1.5.1",
|
||||
"semver": "^5.1.0",
|
||||
@ -124,14 +124,14 @@
|
||||
"struct-fu": "^1.0.0",
|
||||
"trace": "^2.0.1",
|
||||
"ws": "~1.0.1",
|
||||
"xen-api": "^0.8.0",
|
||||
"xen-api": "^0.9.0",
|
||||
"xml2js": "~0.4.6",
|
||||
"xo-acl-resolver": "0.1.0",
|
||||
"xo-collection": "^0.4.0",
|
||||
"xo-remote-parser": "^0.1.0"
|
||||
"xo-remote-parser": "^0.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-eslint": "^5.0.0",
|
||||
"babel-eslint": "^6.0.4",
|
||||
"babel-plugin-transform-decorators-legacy": "^1.3.4",
|
||||
"babel-plugin-transform-runtime": "^6.5.2",
|
||||
"babel-preset-es2015": "^6.5.0",
|
||||
@ -148,21 +148,34 @@
|
||||
"leche": "^2.1.1",
|
||||
"mocha": "^2.2.1",
|
||||
"must": "^0.13.1",
|
||||
"nyc": "^6.4.2",
|
||||
"sinon": "^1.14.1",
|
||||
"standard": "^5.2.1"
|
||||
"standard": "^7.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "npm run build-indexes && gulp build --production",
|
||||
"depcheck": "dependency-check ./package.json",
|
||||
"build-indexes": "./tools/generate-index src/api src/xo-mixins",
|
||||
"dev": "npm run build-indexes && gulp build",
|
||||
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
|
||||
"lint": "standard",
|
||||
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
|
||||
"posttest": "npm run lint && npm run depcheck",
|
||||
"prepublish": "npm run build",
|
||||
"start": "node bin/xo-server",
|
||||
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
|
||||
"posttest": "npm run lint && dependency-check ./package.json",
|
||||
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
|
||||
"release": "npm version",
|
||||
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable"
|
||||
"start": "node bin/xo-server",
|
||||
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
|
||||
},
|
||||
"babel": {
|
||||
"plugins": [
|
||||
"transform-decorators-legacy",
|
||||
"transform-runtime"
|
||||
],
|
||||
"presets": [
|
||||
"stage-0",
|
||||
"es2015"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"ghooks": {
|
||||
@ -171,7 +184,7 @@
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"dist/**"
|
||||
"dist"
|
||||
],
|
||||
"parser": "babel-eslint"
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ import {JsonRpcError} from 'json-rpc-peer'
|
||||
// ===================================================================
|
||||
|
||||
// Export standard JSON-RPC errors.
|
||||
export {
|
||||
export { // eslint-disable-line no-duplicate-imports
|
||||
InvalidJson,
|
||||
InvalidParameters,
|
||||
InvalidRequest,
|
||||
|
14
src/api.js
14
src/api.js
@ -272,6 +272,20 @@ export default class Api {
|
||||
|
||||
try {
|
||||
await checkPermission.call(context, method)
|
||||
|
||||
// API methods are in a namespace.
|
||||
// Some methods use the namespace or an id parameter like:
|
||||
//
|
||||
// vm.detachPci vm=<string>
|
||||
// vm.ejectCd id=<string>
|
||||
//
|
||||
// The goal here is to standardize the calls by always providing
|
||||
// an id parameter when possible to simplify calls to the API.
|
||||
if (params && params.id === undefined) {
|
||||
const namespace = name.slice(0, name.indexOf('.'))
|
||||
params.id = params[namespace]
|
||||
}
|
||||
|
||||
checkParams(method, params)
|
||||
|
||||
const resolvedParams = await resolveParams.call(context, method, params)
|
||||
|
@ -16,13 +16,23 @@ export async function get ({namespace}) {
|
||||
}
|
||||
|
||||
get.description = 'returns logs list for one namespace'
|
||||
get.params = {
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
get.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function delete_ ({namespace, id}) {
|
||||
const logger = await this.getLogger(namespace)
|
||||
logger.del(id)
|
||||
}
|
||||
|
||||
delete_.description = 'deletes on or several logs from a namespace'
|
||||
delete_.description = 'deletes one or several logs from a namespace'
|
||||
delete_.params = {
|
||||
id: { type: 'string' },
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
delete_.permission = 'admin'
|
||||
|
||||
export {delete_ as delete}
|
||||
|
@ -74,7 +74,7 @@ installPatch.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handlePatchUpload (req, res, {pool}) {
|
||||
const {headers: {['content-length']: contentLength}} = req
|
||||
const contentLength = req.headers['content-length']
|
||||
if (!contentLength) {
|
||||
res.writeHead(411)
|
||||
res.end('Content length is mandatory')
|
||||
|
@ -27,7 +27,7 @@ startsWith = require 'lodash.startswith'
|
||||
pCatch,
|
||||
pFinally
|
||||
} = require '../utils'
|
||||
{isVmRunning: $isVMRunning} = require('../xapi')
|
||||
{isVmRunning: $isVmRunning} = require('../xapi')
|
||||
|
||||
#=====================================================================
|
||||
|
||||
@ -426,15 +426,18 @@ set = $coroutine (params) ->
|
||||
"cannot set memory below the static minimum (#{VM.memory.static[0]})"
|
||||
)
|
||||
|
||||
if ($isVMRunning VM) and memory > VM.memory.static[1]
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
|
||||
"for a running VM"
|
||||
)
|
||||
if memory < VM.memory.dynamic[0]
|
||||
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memory}"
|
||||
else if memory > VM.memory.static[1]
|
||||
if $isVmRunning VM
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
|
||||
"for a running VM"
|
||||
)
|
||||
|
||||
if memory > VM.memory.static[1]
|
||||
yield xapi.call 'VM.set_memory_static_max', ref, "#{memory}"
|
||||
|
||||
if resourceSet?
|
||||
yield @allocateLimitsInResourceSet({
|
||||
memory: memory - VM.memory.size
|
||||
@ -449,7 +452,7 @@ set = $coroutine (params) ->
|
||||
yield @allocateLimitsInResourceSet({
|
||||
cpus: CPUs - VM.CPUs.number
|
||||
}, resourceSet)
|
||||
if $isVMRunning VM
|
||||
if $isVmRunning VM
|
||||
if CPUs > VM.CPUs.max
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
@ -664,12 +667,12 @@ exports.convert = convertToTemplate
|
||||
snapshot = $coroutine ({vm, name}) ->
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name)
|
||||
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name ? "#{vm.name_label}_#{new Date().toISOString()}")
|
||||
return snapshot.$id
|
||||
|
||||
snapshot.params = {
|
||||
id: { type: 'string' }
|
||||
name: { type: 'string' }
|
||||
name: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
snapshot.resolve = {
|
||||
@ -688,14 +691,14 @@ rollingDeltaBackup = $coroutine ({vm, remote, tag, depth}) ->
|
||||
})
|
||||
|
||||
rollingDeltaBackup.params = {
|
||||
vm: { type: 'string' }
|
||||
id: { type: 'string' }
|
||||
remote: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: ['string', 'number'] }
|
||||
}
|
||||
|
||||
rollingDeltaBackup.resolve = {
|
||||
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate']
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
rollingDeltaBackup.permission = 'admin'
|
||||
@ -726,12 +729,12 @@ exports.importDeltaBackup = importDeltaBackup
|
||||
deltaCopy = ({ vm, sr }) -> @deltaCopyVm(vm, sr)
|
||||
|
||||
deltaCopy.params = {
|
||||
vm: { type: 'string' },
|
||||
id: { type: 'string' },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
deltaCopy.resolve = {
|
||||
vm: [ 'vm', 'VM', 'operate'],
|
||||
vm: [ 'id', 'VM', 'operate'],
|
||||
sr: [ 'sr', 'SR', 'operate']
|
||||
}
|
||||
|
||||
@ -834,21 +837,30 @@ exports.rollingBackup = rollingBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
rollingDrCopy = ({vm, pool, tag, depth}) ->
|
||||
if vm.$pool is pool.id
|
||||
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
|
||||
return @rollingDrCopyVm({vm, sr: @getObject(pool.default_SR, 'SR'), tag, depth})
|
||||
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
|
||||
unless sr
|
||||
unless pool
|
||||
throw new InvalidParameters('either pool or sr param should be specified')
|
||||
|
||||
if vm.$pool is pool.id
|
||||
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
|
||||
|
||||
sr = @getObject(pool.default_SR, 'SR')
|
||||
|
||||
return @rollingDrCopyVm({vm, sr, tag, depth})
|
||||
|
||||
rollingDrCopy.params = {
|
||||
id: { type: 'string' }
|
||||
pool: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: 'number' }
|
||||
id: { type: 'string' }
|
||||
pool: { type: 'string', optional: true }
|
||||
sr: { type: 'string', optional: true }
|
||||
tag: { type: 'string'}
|
||||
}
|
||||
|
||||
rollingDrCopy.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
rollingDrCopy.description = 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to depth'
|
||||
@ -1092,8 +1104,6 @@ exports.attachDisk = attachDisk
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME: position should be optional and default to last.
|
||||
|
||||
# TODO: implement resource sets
|
||||
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
|
||||
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
|
||||
@ -1107,7 +1117,7 @@ createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
|
||||
createInterface.params = {
|
||||
vm: { type: 'string' }
|
||||
network: { type: 'string' }
|
||||
position: { type: 'string' }
|
||||
position: { type: 'string', optional: true }
|
||||
mtu: { type: 'string', optional: true }
|
||||
mac: { type: 'string', optional: true }
|
||||
}
|
||||
|
@ -33,10 +33,6 @@ export default class Collection extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
constructor () {
|
||||
super()
|
||||
}
|
||||
|
||||
async add (models, opts) {
|
||||
const array = isArray(models)
|
||||
if (!array) {
|
||||
|
@ -1,5 +1,4 @@
|
||||
import assign from 'lodash.assign'
|
||||
import getStream from 'get-stream'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import { parse as parseUrl } from 'url'
|
||||
import { request as httpRequest } from 'http'
|
||||
@ -7,7 +6,8 @@ import { request as httpsRequest } from 'https'
|
||||
import { stringify as formatQueryString } from 'querystring'
|
||||
|
||||
import {
|
||||
isString
|
||||
isString,
|
||||
streamToBuffer
|
||||
} from './utils'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -90,7 +90,7 @@ export default (...args) => {
|
||||
response.cancel = () => {
|
||||
req.abort()
|
||||
}
|
||||
response.readAll = () => getStream(response)
|
||||
response.readAll = () => streamToBuffer(response)
|
||||
|
||||
const length = response.headers['content-length']
|
||||
if (length) {
|
||||
|
@ -14,8 +14,9 @@ import proxyConsole from './proxy-console'
|
||||
import serveStatic from 'serve-static'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import WebSocket from 'ws'
|
||||
import { compile as compilePug } from 'pug'
|
||||
import { createServer as createProxyServer } from 'http-proxy'
|
||||
import {compile as compileJade} from 'jade'
|
||||
import { join as joinPath } from 'path'
|
||||
|
||||
import {
|
||||
AlreadyAuthenticated,
|
||||
@ -128,8 +129,8 @@ async function setUpPassport (express, xo) {
|
||||
}
|
||||
|
||||
// Registers the sign in form.
|
||||
const signInPage = compileJade(
|
||||
await readFile(__dirname + '/../signin.jade')
|
||||
const signInPage = compilePug(
|
||||
await readFile(joinPath(__dirname, '..', 'signin.pug'))
|
||||
)
|
||||
express.get('/signin', (req, res, next) => {
|
||||
res.send(signInPage({
|
||||
@ -515,7 +516,7 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
const { token } = parseCookies(req.headers.cookie)
|
||||
|
||||
const user = await xo.authenticateUser({ token })
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { // eslint-disable-line space-before-keywords
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
|
||||
throw new InvalidCredential()
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,10 @@
|
||||
import {expect} from 'chai'
|
||||
import leche from 'leche'
|
||||
|
||||
import {productParams} from './job-executor'
|
||||
import {_computeCrossProduct} from './job-executor'
|
||||
import {
|
||||
_computeCrossProduct,
|
||||
productParams
|
||||
} from './job-executor'
|
||||
|
||||
describe('productParams', function () {
|
||||
leche.withData({
|
||||
|
@ -1,5 +1,4 @@
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import getStream from 'get-stream'
|
||||
import through2 from 'through2'
|
||||
|
||||
import {
|
||||
@ -10,6 +9,7 @@ import {
|
||||
addChecksumToReadStream,
|
||||
noop,
|
||||
pCatch,
|
||||
streamToBuffer,
|
||||
validChecksumOfReadStream
|
||||
} from '../utils'
|
||||
|
||||
@ -48,11 +48,14 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
async outputFile (file, data, options) {
|
||||
return this._outputFile(file, data, options)
|
||||
return this._outputFile(file, data, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const stream = await this.createOutputStream(file)
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
const promise = eventToPromise(stream, 'finish')
|
||||
stream.end(data)
|
||||
return promise
|
||||
@ -62,8 +65,8 @@ export default class RemoteHandlerAbstract {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return getStream(await this.createReadStream(file, options))
|
||||
_readFile (file, options) {
|
||||
return this.createReadStream(file, options).then(streamToBuffer)
|
||||
}
|
||||
|
||||
async rename (oldPath, newPath) {
|
||||
@ -128,7 +131,10 @@ export default class RemoteHandlerAbstract {
|
||||
checksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createOutputStream(file, options)
|
||||
const streamP = this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
|
31
src/utils.js
31
src/utils.js
@ -1,6 +1,7 @@
|
||||
import base64url from 'base64url'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash.foreach'
|
||||
import getStream from 'get-stream'
|
||||
import has from 'lodash.has'
|
||||
import highland from 'highland'
|
||||
import humanFormat from 'human-format'
|
||||
@ -45,17 +46,7 @@ export function bufferToStream (buf) {
|
||||
return stream
|
||||
}
|
||||
|
||||
export async function streamToBuffer (stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const bufs = []
|
||||
|
||||
stream.on('error', reject)
|
||||
stream.on('data', data => {
|
||||
bufs.push(data)
|
||||
})
|
||||
stream.on('end', () => resolve(Buffer.concat(bufs)))
|
||||
})
|
||||
}
|
||||
export const streamToBuffer = getStream.buffer
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@ -245,14 +236,20 @@ export const lightSet = collection => {
|
||||
collection = null
|
||||
|
||||
const set = {
|
||||
add: value => (data[value] = true, set),
|
||||
add: value => {
|
||||
data[value] = true
|
||||
return set
|
||||
},
|
||||
clear: () => {
|
||||
for (const value in data) {
|
||||
delete data[value]
|
||||
}
|
||||
return set
|
||||
},
|
||||
delete: value => (delete data[value], set),
|
||||
delete: value => {
|
||||
delete data[value]
|
||||
return set
|
||||
},
|
||||
has: value => data[value]
|
||||
}
|
||||
return set
|
||||
@ -306,7 +303,7 @@ export function pSettle (promises) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
export { // eslint-disable-line no-duplicate-imports
|
||||
all as pAll,
|
||||
catchPlus as pCatch,
|
||||
delay as pDelay,
|
||||
@ -360,14 +357,14 @@ export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
|
||||
// This functions are often used throughout xo-server.
|
||||
//
|
||||
// Exports them from here to avoid direct dependencies on lodash.
|
||||
export { default as forEach } from 'lodash.foreach'
|
||||
export { default as isArray } from 'lodash.isarray'
|
||||
export { default as forEach } from 'lodash.foreach' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isArray } from 'lodash.isarray' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isBoolean } from 'lodash.isboolean'
|
||||
export { default as isEmpty } from 'lodash.isempty'
|
||||
export { default as isFunction } from 'lodash.isfunction'
|
||||
export { default as isInteger } from 'lodash.isinteger'
|
||||
export { default as isObject } from 'lodash.isobject'
|
||||
export { default as isString } from 'lodash.isstring'
|
||||
export { default as isString } from 'lodash.isstring' // eslint-disable-line no-duplicate-imports
|
||||
export { default as mapToArray } from 'lodash.map'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
@ -528,12 +528,12 @@ export default async function vhdMerge (
|
||||
|
||||
// Child must be a delta.
|
||||
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
|
||||
throw new Error(`Unable to merge, child is not a delta backup.`)
|
||||
throw new Error('Unable to merge, child is not a delta backup.')
|
||||
}
|
||||
|
||||
// Merging in differencing disk is prohibited in our case.
|
||||
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
|
||||
throw new Error(`Unable to merge, parent is not a full backup.`)
|
||||
throw new Error('Unable to merge, parent is not a full backup.')
|
||||
}
|
||||
|
||||
// Allocation table map is not yet implemented.
|
||||
@ -541,7 +541,7 @@ export default async function vhdMerge (
|
||||
parentVhd.hasBlockAllocationTableMap() ||
|
||||
childVhd.hasBlockAllocationTableMap()
|
||||
) {
|
||||
throw new Error(`Unsupported allocation table map.`)
|
||||
throw new Error('Unsupported allocation table map.')
|
||||
}
|
||||
|
||||
// Read allocation table of child/parent.
|
||||
|
@ -126,6 +126,9 @@ const TRANSFORMS = {
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
size: 0,
|
||||
|
||||
// Deprecated
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
@ -322,7 +325,7 @@ const TRANSFORMS = {
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
const methods = otherConfig['install-methods']
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})(),
|
||||
@ -361,9 +364,9 @@ const TRANSFORMS = {
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared
|
||||
obj.shared || !obj.$PBDs[0]
|
||||
? link(obj, 'pool')
|
||||
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
: link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
|
@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
|
||||
}
|
||||
}
|
||||
|
||||
export class FaultyGranularity extends XapiStatsError {
|
||||
constructor (msg) {
|
||||
super(msg)
|
||||
}
|
||||
}
|
||||
export class FaultyGranularity extends XapiStatsError {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Utils
|
||||
|
44
src/xapi.js
44
src/xapi.js
@ -1,3 +1,5 @@
|
||||
/* eslint-disable camelcase */
|
||||
|
||||
import createDebug from 'debug'
|
||||
import every from 'lodash.every'
|
||||
import fatfs from 'fatfs'
|
||||
@ -8,6 +10,7 @@ import includes from 'lodash.includes'
|
||||
import pickBy from 'lodash.pickby'
|
||||
import sortBy from 'lodash.sortby'
|
||||
import unzip from 'julien-f-unzip'
|
||||
import { defer } from 'promise-toolbox'
|
||||
import { utcFormat, utcParse } from 'd3-time-format'
|
||||
import {
|
||||
wrapError as wrapXapiError,
|
||||
@ -113,6 +116,10 @@ const asBoolean = value => Boolean(value)
|
||||
// }
|
||||
const asInteger = value => String(value)
|
||||
|
||||
const optional = (value, fn) => value == null
|
||||
? undefined
|
||||
: fn ? fn(value) : value
|
||||
|
||||
const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
|
||||
|
||||
const prepareXapiParam = param => {
|
||||
@ -274,8 +281,7 @@ export default class Xapi extends XapiBase {
|
||||
// TODO: implements a timeout.
|
||||
_waitObject (predicate) {
|
||||
if (isFunction(predicate)) {
|
||||
let resolve
|
||||
const promise = new Promise(resolve_ => resolve = resolve_)
|
||||
const { promise, resolve } = defer()
|
||||
|
||||
const unregister = this._registerGenericWatcher(obj => {
|
||||
if (predicate(obj)) {
|
||||
@ -290,10 +296,7 @@ export default class Xapi extends XapiBase {
|
||||
|
||||
let watcher = this._objectWatchers[predicate]
|
||||
if (!watcher) {
|
||||
let resolve
|
||||
const promise = new Promise(resolve_ => {
|
||||
resolve = resolve_
|
||||
})
|
||||
const { promise, resolve } = defer()
|
||||
|
||||
// Register the watcher.
|
||||
watcher = this._objectWatchers[predicate] = {
|
||||
@ -352,18 +355,8 @@ export default class Xapi extends XapiBase {
|
||||
|
||||
let watcher = this._taskWatchers[ref]
|
||||
if (!watcher) {
|
||||
let resolve, reject
|
||||
const promise = new Promise((resolve_, reject_) => {
|
||||
resolve = resolve_
|
||||
reject = reject_
|
||||
})
|
||||
|
||||
// Register the watcher.
|
||||
watcher = this._taskWatchers[ref] = {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
watcher = this._taskWatchers[ref] = defer()
|
||||
}
|
||||
|
||||
return watcher.promise
|
||||
@ -435,7 +428,7 @@ export default class Xapi extends XapiBase {
|
||||
nameDescription
|
||||
}),
|
||||
autoPoweron != null && this._updateObjectMapProperty(pool, 'other_config', {
|
||||
autoPoweron: autoPoweron ? 'on' : null
|
||||
autoPoweron: autoPoweron ? 'true' : null
|
||||
})
|
||||
])
|
||||
}
|
||||
@ -976,6 +969,7 @@ export default class Xapi extends XapiBase {
|
||||
generation_id,
|
||||
ha_always_run,
|
||||
ha_restart_priority,
|
||||
has_vendor_device = false, // Avoid issue with some Dundee builds.
|
||||
hardware_platform_version,
|
||||
HVM_boot_params,
|
||||
HVM_boot_policy,
|
||||
@ -1044,11 +1038,12 @@ export default class Xapi extends XapiBase {
|
||||
generation_id,
|
||||
ha_always_run: asBoolean(ha_always_run),
|
||||
ha_restart_priority,
|
||||
hardware_platform_version,
|
||||
has_vendor_device,
|
||||
hardware_platform_version: optional(hardware_platform_version, asInteger),
|
||||
// HVM_shadow_multiplier: asFloat(HVM_shadow_multiplier), // FIXME: does not work FIELD_TYPE_ERROR(hVM_shadow_multiplier)
|
||||
name_description,
|
||||
name_label,
|
||||
order,
|
||||
order: optional(order, asInteger),
|
||||
protection_policy,
|
||||
shutdown_delay: asInteger(shutdown_delay),
|
||||
start_delay: asInteger(start_delay),
|
||||
@ -1384,10 +1379,7 @@ export default class Xapi extends XapiBase {
|
||||
vifs[vif.$ref] = vif
|
||||
})
|
||||
|
||||
return {
|
||||
// TODO: make non-enumerable?
|
||||
streams: await streams::pAll(),
|
||||
|
||||
return Object.defineProperty({
|
||||
version: '1.0.0',
|
||||
vbds,
|
||||
vdis,
|
||||
@ -1401,7 +1393,9 @@ export default class Xapi extends XapiBase {
|
||||
}
|
||||
}
|
||||
: vm
|
||||
}
|
||||
}, 'streams', {
|
||||
value: await streams::pAll()
|
||||
})
|
||||
}
|
||||
|
||||
@deferrable.onFailure
|
||||
|
@ -151,7 +151,7 @@ export default class {
|
||||
}
|
||||
|
||||
async deleteAuthenticationToken (id) {
|
||||
if (!await this._tokens.remove(id)) { // eslint-disable-line space-before-keywords
|
||||
if (!await this._tokens.remove(id)) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
}
|
||||
|
@ -391,8 +391,7 @@ export default class {
|
||||
// The problem is in the merge case, a delta merged in a full vdi
|
||||
// backup forces us to browse the resulting file =>
|
||||
// Significant transfer time on the network !
|
||||
checksum: !isFull,
|
||||
flags: 'wx'
|
||||
checksum: !isFull
|
||||
})
|
||||
|
||||
stream.on('error', error => targetStream.emit('error', error))
|
||||
@ -545,12 +544,8 @@ export default class {
|
||||
|
||||
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
|
||||
|
||||
const { streams,
|
||||
...infos
|
||||
} = delta
|
||||
|
||||
// Write Metadata.
|
||||
await handler.outputFile(infoPath, JSON.stringify(infos, null, 2), {flag: 'wx'})
|
||||
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
|
||||
|
||||
// Here we have a completed backup. We can merge old vdis.
|
||||
await Promise.all(
|
||||
@ -631,7 +626,7 @@ export default class {
|
||||
}
|
||||
|
||||
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
|
||||
const targetStream = await handler.createOutputStream(file, { flags: 'wx' })
|
||||
const targetStream = await handler.createOutputStream(file)
|
||||
const promise = eventToPromise(targetStream, 'finish')
|
||||
|
||||
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
|
||||
|
@ -57,7 +57,7 @@ export default class {
|
||||
})
|
||||
|
||||
xo.on('start', async () => {
|
||||
if (!(await users.exists())) {
|
||||
if (!await users.exists()) {
|
||||
const email = 'admin@admin.net'
|
||||
const password = 'admin'
|
||||
|
||||
@ -246,7 +246,7 @@ export default class {
|
||||
}
|
||||
|
||||
async getGroup (id) {
|
||||
const group = (await this._groups.first(id))
|
||||
const group = await this._groups.first(id)
|
||||
if (!group) {
|
||||
throw new NoSuchGroup(id)
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ export default class {
|
||||
async unregisterXenServer (id) {
|
||||
this.disconnectXenServer(id)::pCatch(noop)
|
||||
|
||||
if (!await this._servers.remove(id)) { // eslint-disable-line space-before-keywords
|
||||
if (!await this._servers.remove(id)) {
|
||||
throw new NoSuchXenServer(id)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user