diff --git a/@xen-orchestra/cron/src/index.js b/@xen-orchestra/cron/src/index.js index a81928830..ea5de6d45 100644 --- a/@xen-orchestra/cron/src/index.js +++ b/@xen-orchestra/cron/src/index.js @@ -18,9 +18,10 @@ class Job { } const scheduleNext = () => { const delay = schedule._nextDelay() - this._timeout = delay < MAX_DELAY - ? setTimeout(wrapper, delay) - : setTimeout(scheduleNext, MAX_DELAY) + this._timeout = + delay < MAX_DELAY + ? setTimeout(wrapper, delay) + : setTimeout(scheduleNext, MAX_DELAY) } this._scheduleNext = scheduleNext diff --git a/@xen-orchestra/cron/src/next.js b/@xen-orchestra/cron/src/next.js index 88e859418..20448d5a2 100644 --- a/@xen-orchestra/cron/src/next.js +++ b/@xen-orchestra/cron/src/next.js @@ -9,7 +9,7 @@ const NEXT_MAPPING = { minute: { hour: 1 }, } -const getFirst = values => values !== undefined ? values[0] : 0 +const getFirst = values => (values !== undefined ? values[0] : 0) const setFirstAvailable = (date, unit, values) => { if (values === undefined) { diff --git a/@xen-orchestra/cron/src/parse.js b/@xen-orchestra/cron/src/parse.js index a5d7714b2..9ee9569af 100644 --- a/@xen-orchestra/cron/src/parse.js +++ b/@xen-orchestra/cron/src/parse.js @@ -90,7 +90,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => { if (!match('/')) { return } - [start, end] = field.range + ;[start, end] = field.range step = parseInteger() } else { start = parseValue() diff --git a/@xen-orchestra/cron/src/parse.spec.js b/@xen-orchestra/cron/src/parse.spec.js index f06e660b6..a09266ad4 100644 --- a/@xen-orchestra/cron/src/parse.spec.js +++ b/@xen-orchestra/cron/src/parse.spec.js @@ -28,9 +28,7 @@ describe('parse()', () => { }) it('reports missing integer', () => { - expect(() => parse('*/a')).toThrow( - 'minute: missing integer at character 2' - ) + expect(() => parse('*/a')).toThrow('minute: missing integer at character 2') expect(() => parse('*')).toThrow('hour: missing integer at character 1') }) diff --git a/packages/complex-matcher/src/index.js b/packages/complex-matcher/src/index.js index f2c63f614..d540864a8 100644 --- a/packages/complex-matcher/src/index.js +++ b/packages/complex-matcher/src/index.js @@ -325,7 +325,10 @@ class P { value.push(result.value) pos = result.pos } - while (i < max && (result = this._parse(input, pos, end)) instanceof Success) { + while ( + i < max && + (result = this._parse(input, pos, end)) instanceof Success + ) { ++i value.push(result.value) pos = result.pos @@ -359,8 +362,9 @@ P.eof = new P( const parser = P.grammar({ default: r => - P.seq(r.ws, r.term.repeat(), P.eof) - .map(([, terms]) => (terms.length === 0 ? new Null() : new And(terms))), + P.seq(r.ws, r.term.repeat(), P.eof).map( + ([, terms]) => (terms.length === 0 ? new Null() : new And(terms)) + ), quotedString: new P((input, pos, end) => { if (input[pos] !== '"') { return new Failure(pos, '"') @@ -416,7 +420,7 @@ const parser = P.grammar({ ? new StringNode(str) : new NumberNode(asNum) }) - ), + ) ).skip(r.ws), ws: P.regex(/\s*/), }).default @@ -476,17 +480,19 @@ export const getPropertyClausesStrings = node => { // ------------------------------------------------------------------- export const setPropertyClause = (node, name, child) => { - const property = child && new Property( - name, - typeof child === 'string' ? new StringNode(child) : child - ) + const property = + child && + new Property( + name, + typeof child === 'string' ? new StringNode(child) : child + ) if (node === undefined) { return property } - const children = (node instanceof And ? node.children : [node]).filter(child => - !(child instanceof Property && child.name === name) + const children = (node instanceof And ? node.children : [node]).filter( + child => !(child instanceof Property && child.name === name) ) if (property !== undefined) { children.push(property) diff --git a/packages/complex-matcher/src/index.spec.js b/packages/complex-matcher/src/index.spec.js index edc21419b..d1cd019ab 100644 --- a/packages/complex-matcher/src/index.spec.js +++ b/packages/complex-matcher/src/index.spec.js @@ -49,13 +49,15 @@ describe('Number', () => { describe('setPropertyClause', () => { it('creates a node if none passed', () => { - expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe('foo:bar') + expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe( + 'foo:bar' + ) }) it('adds a property clause if there was none', () => { - expect( - setPropertyClause(parse('baz'), 'foo', 'bar').toString() - ).toBe('baz foo:bar') + expect(setPropertyClause(parse('baz'), 'foo', 'bar').toString()).toBe( + 'baz foo:bar' + ) }) it('replaces the property clause if there was one', () => { diff --git a/packages/value-matcher/src/index.js b/packages/value-matcher/src/index.js index 19ca6a6ef..1db9d90c0 100644 --- a/packages/value-matcher/src/index.js +++ b/packages/value-matcher/src/index.js @@ -26,13 +26,16 @@ type ObjectPattern = { [string]: Pattern } type ArrayPattern = Array // value equals the pattern -type ValuePattern = bool | number | string +type ValuePattern = boolean | number | string const match = (pattern: Pattern, value: any) => { if (Array.isArray(pattern)) { - return Array.isArray(value) && pattern.every((subpattern, i) => - // FIXME: subpatterns should match different subvalues - value.some(subvalue => match(subpattern, subvalue)) + return ( + Array.isArray(value) && + pattern.every((subpattern, i) => + // FIXME: subpatterns should match different subvalues + value.some(subvalue => match(subpattern, subvalue)) + ) ) } @@ -41,7 +44,7 @@ const match = (pattern: Pattern, value: any) => { const { length } = keys if (length === 1) { - const [ key ] = keys + const [key] = keys if (key === '__and') { const andPattern: AndPattern = (pattern: any) return andPattern.__and.every(subpattern => match(subpattern, value)) @@ -74,4 +77,5 @@ const match = (pattern: Pattern, value: any) => { return pattern === value } -export const createPredicate = (pattern: Pattern) => (value: any) => match(pattern, value) +export const createPredicate = (pattern: Pattern) => (value: any) => + match(pattern, value) diff --git a/packages/vhd-cli/src/vhd.js b/packages/vhd-cli/src/vhd.js index 5a2a03d08..8f2098a02 100644 --- a/packages/vhd-cli/src/vhd.js +++ b/packages/vhd-cli/src/vhd.js @@ -36,11 +36,13 @@ const fuFooter = fu.struct([ fu.char('creatorApplication', 4), // 28 fu.uint32('creatorVersion'), // 32 fu.uint32('creatorHostOs'), // 36 - fu.struct('originalSize', [ // At the creation, current size of the hard disk. + fu.struct('originalSize', [ + // At the creation, current size of the hard disk. fu.uint32('high'), // 40 fu.uint32('low'), // 44 ]), - fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize. + fu.struct('currentSize', [ + // Current size of the virtual disk. At the creation: currentSize = originalSize. fu.uint32('high'), // 48 fu.uint32('low'), // 52 ]), @@ -60,11 +62,9 @@ const FOOTER_SIZE = fuFooter.size const fuHeader = fu.struct([ fu.char('cookie', 8), - fu.struct('dataOffset', [ - fu.uint32('high'), - fu.uint32('low'), - ]), - fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table. + fu.struct('dataOffset', [fu.uint32('high'), fu.uint32('low')]), + fu.struct('tableOffset', [ + // Absolute byte offset of the Block Allocation Table. fu.uint32('high'), fu.uint32('low'), ]), @@ -76,16 +76,21 @@ const fuHeader = fu.struct([ fu.uint32('parentTimestamp'), fu.byte('reserved1', 4), fu.char16be('parentUnicodeName', 512), - fu.struct('parentLocatorEntry', [ - fu.uint32('platformCode'), - fu.uint32('platformDataSpace'), - fu.uint32('platformDataLength'), - fu.uint32('reserved'), - fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data. - fu.uint32('high'), - fu.uint32('low'), - ]), - ], 8), + fu.struct( + 'parentLocatorEntry', + [ + fu.uint32('platformCode'), + fu.uint32('platformDataSpace'), + fu.uint32('platformDataLength'), + fu.uint32('reserved'), + fu.struct('platformDataOffset', [ + // Absolute byte offset of the locator data. + fu.uint32('high'), + fu.uint32('low'), + ]), + ], + 8 + ), fu.byte('reserved2', 256), ]) const HEADER_SIZE = fuHeader.size @@ -98,10 +103,10 @@ const SIZE_OF_32_BITS = Math.pow(2, 32) const uint32ToUint64 = fu => fu.high * SIZE_OF_32_BITS + fu.low // Returns a 32 bits integer corresponding to a Vhd version. -const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF) +const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000ffff) // bytes[] bit manipulation -const testBit = (map, bit) => map[bit >> 3] & 1 << (bit & 7) +const testBit = (map, bit) => map[bit >> 3] & (1 << (bit & 7)) const setBit = (map, bit) => { map[bit >> 3] |= 1 << (bit & 7) } @@ -109,98 +114,95 @@ const unsetBit = (map, bit) => { map[bit >> 3] &= ~(1 << (bit & 7)) } -const addOffsets = (...offsets) => offsets.reduce( - (a, b) => b == null - ? a - : typeof b === 'object' - ? { bytes: a.bytes + b.bytes, bits: a.bits + b.bits } - : { bytes: a.bytes + b, bits: a.bits }, - { bytes: 0, bits: 0 } -) +const addOffsets = (...offsets) => + offsets.reduce( + (a, b) => + b == null + ? a + : typeof b === 'object' + ? { bytes: a.bytes + b.bytes, bits: a.bits + b.bits } + : { bytes: a.bytes + b, bits: a.bits }, + { bytes: 0, bits: 0 } + ) const pack = (field, value, buf, offset) => { - field.pack( - value, - buf, - addOffsets(field.offset, offset) - ) + field.pack(value, buf, addOffsets(field.offset, offset)) } const unpack = (field, buf, offset) => - field.unpack( - buf, - addOffsets(field.offset, offset) - ) + field.unpack(buf, addOffsets(field.offset, offset)) // =================================================================== -const streamToNewBuffer = stream => new Promise((resolve, reject) => { - const chunks = [] - let length = 0 +const streamToNewBuffer = stream => + new Promise((resolve, reject) => { + const chunks = [] + let length = 0 - const onData = chunk => { - chunks.push(chunk) - length += chunk.length - } - stream.on('data', onData) + const onData = chunk => { + chunks.push(chunk) + length += chunk.length + } + stream.on('data', onData) - const clean = () => { - stream.removeListener('data', onData) - stream.removeListener('end', onEnd) - stream.removeListener('error', onError) - } - const onEnd = () => { - resolve(Buffer.concat(chunks, length)) - clean() - } - stream.on('end', onEnd) - const onError = error => { - reject(error) - clean() - } - stream.on('error', onError) -}) + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(Buffer.concat(chunks, length)) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) + }) const streamToExistingBuffer = ( stream, buffer, offset = 0, end = buffer.length -) => new Promise((resolve, reject) => { - assert(offset >= 0) - assert(end > offset) - assert(end <= buffer.length) +) => + new Promise((resolve, reject) => { + assert(offset >= 0) + assert(end > offset) + assert(end <= buffer.length) - let i = offset + let i = offset - const onData = chunk => { - const prev = i - i += chunk.length + const onData = chunk => { + const prev = i + i += chunk.length - if (i > end) { - return onError(new Error('too much data')) + if (i > end) { + return onError(new Error('too much data')) + } + + chunk.copy(buffer, prev) } + stream.on('data', onData) - chunk.copy(buffer, prev) - } - stream.on('data', onData) - - const clean = () => { - stream.removeListener('data', onData) - stream.removeListener('end', onEnd) - stream.removeListener('error', onError) - } - const onEnd = () => { - resolve(i - offset) - clean() - } - stream.on('end', onEnd) - const onError = error => { - reject(error) - clean() - } - stream.on('error', onError) -}) + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(i - offset) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) + }) // =================================================================== @@ -214,7 +216,11 @@ const computeChecksum = (struct, buf, offset = 0) => { for (let i = offset, n = checksumOffset; i < n; ++i) { sum += buf[i] } - for (let i = checksumOffset + checksumField.size, n = offset + struct.size; i < n; ++i) { + for ( + let i = checksumOffset + checksumField.size, n = offset + struct.size; + i < n; + ++i + ) { sum += buf[i] } @@ -222,7 +228,8 @@ const computeChecksum = (struct, buf, offset = 0) => { } const verifyChecksum = (struct, buf, offset) => - unpack(struct.fields.checksum, buf, offset) === computeChecksum(struct, buf, offset) + unpack(struct.fields.checksum, buf, offset) === + computeChecksum(struct, buf, offset) const getParentLocatorSize = parentLocatorEntry => { const { platformDataSpace } = parentLocatorEntry @@ -231,15 +238,13 @@ const getParentLocatorSize = parentLocatorEntry => { return platformDataSpace * SECTOR_SIZE } - return (platformDataSpace % SECTOR_SIZE === 0) - ? platformDataSpace - : 0 + return platformDataSpace % SECTOR_SIZE === 0 ? platformDataSpace : 0 } // =================================================================== // Euclidean division, returns the quotient and the remainder of a / b. -const div = (a, b) => [ Math.floor(a / b), a % b ] +const div = (a, b) => [Math.floor(a / b), a % b] export default class Vhd { constructor (handler, path) { @@ -263,13 +268,22 @@ export default class Vhd { assert(begin >= 0) assert(length > 0) - return this._handler.createReadStream(this._path, { - end: begin + length - 1, - start: begin, - }).then(buf - ? stream => streamToExistingBuffer(stream, buf, offset, (offset || 0) + length) - : streamToNewBuffer - ) + return this._handler + .createReadStream(this._path, { + end: begin + length - 1, + start: begin, + }) + .then( + buf + ? stream => + streamToExistingBuffer( + stream, + buf, + offset, + (offset || 0) + length + ) + : streamToNewBuffer + ) } // - if `buffer`: it is filled with 0 starting from `offset`, and @@ -296,7 +310,7 @@ export default class Vhd { assert(block < this._header.maxTableEntries) const blockAddr = this._blockAllocationTable[block] - if (blockAddr !== 0xFFFFFFFF) { + if (blockAddr !== 0xffffffff) { return blockAddr * SECTOR_SIZE } } @@ -325,7 +339,8 @@ export default class Vhd { assert(sectorsPerBlock % 1 === 0) // 1 bit per sector, rounded up to full sectors - this._blockBitmapSize = Math.ceil(sectorsPerBlock / 8 / SECTOR_SIZE) * SECTOR_SIZE + this._blockBitmapSize = + Math.ceil(sectorsPerBlock / 8 / SECTOR_SIZE) * SECTOR_SIZE assert(this._blockBitmapSize === SECTOR_SIZE) this._footer = footer @@ -368,10 +383,10 @@ export default class Vhd { const blockBitmapSize = this._blockBitmapSize const parent = this._parent - if (blockAddr && ( - !parent || - testBit(await this._read(blockAddr, blockBitmapSize), sector) - )) { + if ( + blockAddr && + (!parent || testBit(await this._read(blockAddr, blockBitmapSize), sector)) + ) { return this._read( blockAddr + blockBitmapSize + sector * SECTOR_SIZE + begin, length, @@ -402,12 +417,17 @@ export default class Vhd { } if (!parent) { - return this._read(blockAddr + this._blockBitmapSize + begin, length, buf, offset) + return this._read( + blockAddr + this._blockBitmapSize + begin, + length, + buf, + offset + ) } // FIXME: we should read as many sectors in a single pass as // possible for maximum perf. - const [ sector, beginInSector ] = div(begin, SECTOR_SIZE) + const [sector, beginInSector] = div(begin, SECTOR_SIZE) return this._readBlockSector( block, sector, @@ -428,7 +448,7 @@ export default class Vhd { } const { blockSize } = this._header - const [ block, beginInBlock ] = div(begin, blockSize) + const [block, beginInBlock] = div(begin, blockSize) return this._readBlock( block, diff --git a/packages/xen-api/examples/utils.js b/packages/xen-api/examples/utils.js index d7509e4b2..688ae15cb 100644 --- a/packages/xen-api/examples/utils.js +++ b/packages/xen-api/examples/utils.js @@ -29,13 +29,15 @@ exports.createOutputStream = path => { exports.resolveRef = (xapi, type, refOrUuidOrNameLabel) => isOpaqueRef(refOrUuidOrNameLabel) ? refOrUuidOrNameLabel - : xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch( - () => xapi.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel).then( - refs => { + : xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(() => + xapi + .call(`${type}.get_by_name_label`, refOrUuidOrNameLabel) + .then(refs => { if (refs.length === 1) { return refs[0] } - throw new Error(`no single match for ${type} with name label ${refOrUuidOrNameLabel}`) - } - ) + throw new Error( + `no single match for ${type} with name label ${refOrUuidOrNameLabel}` + ) + }) ) diff --git a/packages/xen-api/src/cli.js b/packages/xen-api/src/cli.js index 25efc2258..c15821d4e 100755 --- a/packages/xen-api/src/cli.js +++ b/packages/xen-api/src/cli.js @@ -56,7 +56,7 @@ const main = async args => { let auth if (opts._.length > 1) { - const [ , user, password = await askPassword() ] = opts._ + const [, user, password = await askPassword()] = opts._ auth = { user, password } } @@ -86,11 +86,11 @@ const main = async args => { // Make the REPL waits for promise completion. repl.eval = (evaluate => (cmd, context, filename, cb) => { - fromCallback(cb => { + ;fromCallback(cb => { evaluate.call(repl, cmd, context, filename, cb) - }).then(value => - isArray(value) ? Promise.all(value) : value - )::asCallback(cb) + }) + .then(value => (isArray(value) ? Promise.all(value) : value)) + ::asCallback(cb) })(repl.eval) await eventToPromise(repl, 'exit') diff --git a/packages/xen-api/src/index.js b/packages/xen-api/src/index.js index 837435f95..451cc68cc 100644 --- a/packages/xen-api/src/index.js +++ b/packages/xen-api/src/index.js @@ -55,7 +55,7 @@ const NETWORK_ERRORS = { ETIMEDOUT: true, } -const isNetworkError = ({code}) => NETWORK_ERRORS[code] +const isNetworkError = ({ code }) => NETWORK_ERRORS[code] // ------------------------------------------------------------------- @@ -64,17 +64,17 @@ const XAPI_NETWORK_ERRORS = { HOST_HAS_NO_MANAGEMENT_IP: true, } -const isXapiNetworkError = ({code}) => XAPI_NETWORK_ERRORS[code] +const isXapiNetworkError = ({ code }) => XAPI_NETWORK_ERRORS[code] // ------------------------------------------------------------------- -const areEventsLost = ({code}) => code === 'EVENTS_LOST' +const areEventsLost = ({ code }) => code === 'EVENTS_LOST' -const isHostSlave = ({code}) => code === 'HOST_IS_SLAVE' +const isHostSlave = ({ code }) => code === 'HOST_IS_SLAVE' -const isMethodUnknown = ({code}) => code === 'MESSAGE_METHOD_UNKNOWN' +const isMethodUnknown = ({ code }) => code === 'MESSAGE_METHOD_UNKNOWN' -const isSessionInvalid = ({code}) => code === 'SESSION_INVALID' +const isSessionInvalid = ({ code }) => code === 'SESSION_INVALID' // ------------------------------------------------------------------- @@ -93,8 +93,9 @@ class XapiError extends BaseError { export const wrapError = error => { let code, params - if (isArray(error)) { // < XenServer 7.3 - [ code, ...params ] = error + if (isArray(error)) { + // < XenServer 7.3 + ;[code, ...params] = error } else { code = error.message params = error.data @@ -111,7 +112,7 @@ const parseUrl = url => { throw new Error('invalid URL: ' + url) } - const [ , protocol = 'https:', username, password, hostname, port ] = matches + const [, protocol = 'https:', username, password, hostname, port] = matches return { protocol, username, password, hostname, port } } @@ -128,17 +129,13 @@ const { const OPAQUE_REF_PREFIX = 'OpaqueRef:' export const isOpaqueRef = value => - typeof value === 'string' && - startsWith(value, OPAQUE_REF_PREFIX) + typeof value === 'string' && startsWith(value, OPAQUE_REF_PREFIX) // ------------------------------------------------------------------- const RE_READ_ONLY_METHOD = /^[^.]+\.get_/ -const isReadOnlyCall = (method, args) => ( - args.length === 1 && - isOpaqueRef(args[0]) && - RE_READ_ONLY_METHOD.test(method) -) +const isReadOnlyCall = (method, args) => + args.length === 1 && isOpaqueRef(args[0]) && RE_READ_ONLY_METHOD.test(method) // Prepare values before passing them to the XenAPI: // @@ -178,17 +175,17 @@ const EMPTY_ARRAY = freezeObject([]) const getTaskResult = (task, onSuccess, onFailure) => { const { status } = task if (status === 'cancelled') { - return [ onFailure(new Cancel('task canceled')) ] + return [onFailure(new Cancel('task canceled'))] } if (status === 'failure') { - return [ onFailure(wrapError(task.error_info)) ] + return [onFailure(wrapError(task.error_info))] } if (status === 'success') { // the result might be: // - empty string // - an opaque reference // - an XML-RPC value - return [ onSuccess(task.result) ] + return [onSuccess(task.result)] } } @@ -209,7 +206,7 @@ export class Xapi extends EventEmitter { this._pool = null this._readOnly = Boolean(opts.readOnly) this._sessionId = null - const url = this._url = parseUrl(opts.url) + const url = (this._url = parseUrl(opts.url)) if (this._auth === undefined) { const user = url.username @@ -224,9 +221,7 @@ export class Xapi extends EventEmitter { } if (opts.watchEvents !== false) { - this._debounce = opts.debounce == null - ? 200 - : opts.debounce + this._debounce = opts.debounce == null ? 200 : opts.debounce this._eventWatchers = createObject(null) @@ -237,7 +232,7 @@ export class Xapi extends EventEmitter { this._nTasks = 0 - const objects = this._objects = new Collection() + const objects = (this._objects = new Collection()) objects.getKey = getKey this._objectsByRefs = createObject(null) @@ -286,13 +281,7 @@ export class Xapi extends EventEmitter { get status () { const id = this._sessionId - return id - ? ( - id === CONNECTING - ? CONNECTING - : CONNECTED - ) - : DISCONNECTED + return id ? (id === CONNECTING ? CONNECTING : CONNECTED) : DISCONNECTED } get _humanId () { @@ -305,36 +294,46 @@ export class Xapi extends EventEmitter { barrier (ref) { const eventWatchers = this._eventWatchers if (eventWatchers === undefined) { - return Promise.reject(new Error('Xapi#barrier() requires events watching')) + return Promise.reject( + new Error('Xapi#barrier() requires events watching') + ) } - const key = `xo:barrier:${Math.random().toString(36).slice(2)}` + const key = `xo:barrier:${Math.random() + .toString(36) + .slice(2)}` const poolRef = this._pool.$ref const { promise, resolve } = defer() eventWatchers[key] = resolve - return this._sessionCall( - 'pool.add_to_other_config', - [ poolRef, key, '' ] - ).then(() => promise.then(() => { - this._sessionCall('pool.remove_from_other_config', [ poolRef, key ]).catch(noop) + return this._sessionCall('pool.add_to_other_config', [ + poolRef, + key, + '', + ]).then(() => + promise.then(() => { + this._sessionCall('pool.remove_from_other_config', [ + poolRef, + key, + ]).catch(noop) - if (ref === undefined) { - return - } + if (ref === undefined) { + return + } - // support legacy params (type, ref) - if (arguments.length === 2) { - ref = arguments[1] - } + // support legacy params (type, ref) + if (arguments.length === 2) { + ref = arguments[1] + } - return this.getObjectByRef(ref) - })) + return this.getObjectByRef(ref) + }) + ) } connect () { - const {status} = this + const { status } = this if (status === CONNECTED) { return Promise.reject(new Error('already connected')) @@ -378,7 +377,7 @@ export class Xapi extends EventEmitter { return Promise.reject(new Error('already disconnected')) } - this._transportCall('session.logout', [ this._sessionId ]).catch(noop) + this._transportCall('session.logout', [this._sessionId]).catch(noop) this._sessionId = null @@ -434,13 +433,10 @@ export class Xapi extends EventEmitter { // this lib), UUID (unique identifier that some objects have) or // opaque reference (internal to XAPI). getObject (idOrUuidOrRef, defaultValue) { - const object = typeof idOrUuidOrRef === 'string' - ? ( - // if there is an UUID, it is also the $id. - this._objects.all[idOrUuidOrRef] || - this._objectsByRefs[idOrUuidOrRef] - ) - : this._objects.all[idOrUuidOrRef.$id] + const object = + typeof idOrUuidOrRef === 'string' + ? this._objects.all[idOrUuidOrRef] || this._objectsByRefs[idOrUuidOrRef] + : this._objects.all[idOrUuidOrRef.$id] if (object) return object @@ -479,158 +475,147 @@ export class Xapi extends EventEmitter { } @cancelable - getResource ($cancelToken, pathname, { - host, - query, - task, - }) { - return this._autoTask( - task, - `Xapi#getResource ${pathname}` - ).then(taskRef => { - query = { ...query, session_id: this.sessionId } - let taskResult - if (taskRef !== undefined) { - query.task_id = taskRef - taskResult = this.watchTask(taskRef) + getResource ($cancelToken, pathname, { host, query, task }) { + return this._autoTask(task, `Xapi#getResource ${pathname}`).then( + taskRef => { + query = { ...query, session_id: this.sessionId } + let taskResult + if (taskRef !== undefined) { + query.task_id = taskRef + taskResult = this.watchTask(taskRef) - if (typeof $cancelToken.addHandler === 'function') { - $cancelToken.addHandler(() => taskResult) + if (typeof $cancelToken.addHandler === 'function') { + $cancelToken.addHandler(() => taskResult) + } } - } - let promise = httpRequest( - $cancelToken, - this._url, - host && { - hostname: this.getObject(host).address, - }, - { - pathname, - query, - rejectUnauthorized: !this._allowUnauthorized, - } - ) - - if (taskResult !== undefined) { - promise = promise.then(response => { - response.task = taskResult - return response - }) - } - - return promise - }) - } - - @cancelable - putResource ($cancelToken, body, pathname, { - host, - query, - task, - } = {}) { - if (this._readOnly) { - return Promise.reject(new Error(new Error('cannot put resource in read only mode'))) - } - - return this._autoTask( - task, - `Xapi#putResource ${pathname}` - ).then(taskRef => { - query = { ...query, session_id: this.sessionId } - - let taskResult - if (taskRef !== undefined) { - query.task_id = taskRef - taskResult = this.watchTask(taskRef) - - if (typeof $cancelToken.addHandler === 'function') { - $cancelToken.addHandler(() => taskResult) - } - } - - const headers = {} - - // Xen API does not support chunk encoding. - const isStream = typeof body.pipe === 'function' - const { length } = body - if (isStream && length === undefined) { - // add a fake huge content length (1 PiB) - headers['content-length'] = '1125899906842624' - } - - const doRequest = override => httpRequest.put( - $cancelToken, - this._url, - host && { - hostname: this.getObject(host).address, - }, - { - body, - headers, - pathname, - query, - rejectUnauthorized: !this._allowUnauthorized, - }, - override - ) - - const promise = isStream - - // dummy request to probe for a redirection before consuming body - ? doRequest({ - body: '', - - // omit task_id because this request will fail on purpose - query: 'task_id' in query - ? omit(query, 'task_id') - : query, - - maxRedirects: 0, - }).then( - response => { - response.req.abort() - return doRequest() + let promise = httpRequest( + $cancelToken, + this._url, + host && { + hostname: this.getObject(host).address, }, - error => { - let response - if (error != null && (response = error.response) != null) { - response.req.abort() - - const { headers: { location }, statusCode } = response - if (statusCode === 302 && location !== undefined) { - return doRequest(location) - } - } - - throw error + { + pathname, + query, + rejectUnauthorized: !this._allowUnauthorized, } ) - // http-request-plus correctly handle redirects if body is not a stream - : doRequest() - - return promise.then(response => { - const { req } = response - if (taskResult !== undefined) { - taskResult = taskResult.catch(error => { - error.url = response.url - throw error + promise = promise.then(response => { + response.task = taskResult + return response }) } - if (req.finished) { - req.abort() - return taskResult + return promise + } + ) + } + + @cancelable + putResource ($cancelToken, body, pathname, { host, query, task } = {}) { + if (this._readOnly) { + return Promise.reject( + new Error(new Error('cannot put resource in read only mode')) + ) + } + + return this._autoTask(task, `Xapi#putResource ${pathname}`).then( + taskRef => { + query = { ...query, session_id: this.sessionId } + + let taskResult + if (taskRef !== undefined) { + query.task_id = taskRef + taskResult = this.watchTask(taskRef) + + if (typeof $cancelToken.addHandler === 'function') { + $cancelToken.addHandler(() => taskResult) + } } - return fromEvents(req, ['close', 'finish']).then(() => { - req.abort() - return taskResult + const headers = {} + + // Xen API does not support chunk encoding. + const isStream = typeof body.pipe === 'function' + const { length } = body + if (isStream && length === undefined) { + // add a fake huge content length (1 PiB) + headers['content-length'] = '1125899906842624' + } + + const doRequest = override => + httpRequest.put( + $cancelToken, + this._url, + host && { + hostname: this.getObject(host).address, + }, + { + body, + headers, + pathname, + query, + rejectUnauthorized: !this._allowUnauthorized, + }, + override + ) + + // if a stream, sends a dummy request to probe for a + // redirection before consuming body + const promise = isStream + ? doRequest({ + body: '', + + // omit task_id because this request will fail on purpose + query: 'task_id' in query ? omit(query, 'task_id') : query, + + maxRedirects: 0, + }).then( + response => { + response.req.abort() + return doRequest() + }, + error => { + let response + if (error != null && (response = error.response) != null) { + response.req.abort() + + const { headers: { location }, statusCode } = response + if (statusCode === 302 && location !== undefined) { + return doRequest(location) + } + } + + throw error + } + ) + : doRequest() + + return promise.then(response => { + const { req } = response + + if (taskResult !== undefined) { + taskResult = taskResult.catch(error => { + error.url = response.url + throw error + }) + } + + if (req.finished) { + req.abort() + return taskResult + } + + return fromEvents(req, ['close', 'finish']).then(() => { + req.abort() + return taskResult + }) }) - }) - }) + } + ) } watchTask (ref) { @@ -692,22 +677,24 @@ export class Xapi extends EventEmitter { newArgs.push.apply(newArgs, args) } - return this._transportCall(method, newArgs) - ::pCatch(isSessionInvalid, () => { + return this._transportCall(method, newArgs)::pCatch( + isSessionInvalid, + () => { // XAPI is sometimes reinitialized and sessions are lost. // Try to login again. debug('%s: the session has been reinitialized', this._humanId) this._sessionId = null return this.connect().then(() => this._sessionCall(method, args)) - }) + } + ) } catch (error) { return Promise.reject(error) } } _addObject (type, ref, object) { - const {_objectsByRefs: objectsByRefs} = this + const { _objectsByRefs: objectsByRefs } = this const reservedKeys = { id: true, @@ -715,9 +702,8 @@ export class Xapi extends EventEmitter { ref: true, type: true, } - const getKey = (key, obj) => reservedKeys[key] && obj === object - ? `$$${key}` - : `$${key}` + const getKey = (key, obj) => + reservedKeys[key] && obj === object ? `$$${key}` : `$${key}` // Creates resolved properties. forEach(object, function resolveObject (value, key, object) { @@ -736,7 +722,7 @@ export class Xapi extends EventEmitter { } else if (isOpaqueRef(value[0])) { // This is an array of refs. defineProperty(object, getKey(key, object), { - get: () => freezeObject(map(value, (ref) => objectsByRefs[ref])), + get: () => freezeObject(map(value, ref => objectsByRefs[ref])), }) freezeObject(value) @@ -836,38 +822,40 @@ export class Xapi extends EventEmitter { } _watchEvents () { - const loop = () => this.status === CONNECTED && this._sessionCall('event.from', [ - ['*'], - this._fromToken, - 60 + 0.1, // Force float. - ]).then(onSuccess, onFailure) + const loop = () => + this.status === CONNECTED && + this._sessionCall('event.from', [ + ['*'], + this._fromToken, + 60 + 0.1, // Force float. + ]).then(onSuccess, onFailure) const onSuccess = ({ events, token, valid_ref_counts: { task } }) => { this._fromToken = token this._processEvents(events) if (task !== this._nTasks) { - this._sessionCall('task.get_all_records').then(tasks => { - const toRemove = new Set() - forEach(this.objects.all, object => { - if (object.$type === 'task') { - toRemove.add(object.$ref) - } + this._sessionCall('task.get_all_records') + .then(tasks => { + const toRemove = new Set() + forEach(this.objects.all, object => { + if (object.$type === 'task') { + toRemove.add(object.$ref) + } + }) + forEach(tasks, (task, ref) => { + toRemove.delete(ref) + this._addObject('task', ref, task) + }) + toRemove.forEach(ref => { + this._removeObject('task', ref) + }) }) - forEach(tasks, (task, ref) => { - toRemove.delete(ref) - this._addObject('task', ref, task) - }) - toRemove.forEach(ref => { - this._removeObject('task', ref) - }) - }).catch(noop) + .catch(noop) } const debounce = this._debounce - return debounce != null - ? pDelay(debounce).then(loop) - : loop() + return debounce != null ? pDelay(debounce).then(loop) : loop() } const onFailure = error => { if (areEventsLost(error)) { @@ -906,41 +894,43 @@ export class Xapi extends EventEmitter { ::/\.get_all_records$/.test ) - return Promise.all(map( - getAllRecordsMethods, - method => this._sessionCall(method).then( - objects => { - const type = method.slice(0, method.indexOf('.')).toLowerCase() - forEach(objects, (object, ref) => { - this._addObject(type, ref, object) - }) - }, - error => { - if (error.code !== 'MESSAGE_REMOVED') { - throw error + return Promise.all( + map(getAllRecordsMethods, method => + this._sessionCall(method).then( + objects => { + const type = method.slice(0, method.indexOf('.')).toLowerCase() + forEach(objects, (object, ref) => { + this._addObject(type, ref, object) + }) + }, + error => { + if (error.code !== 'MESSAGE_REMOVED') { + throw error + } } - } + ) ) - )) + ) }) } - const watchEvents = () => this._sessionCall('event.register', [ ['*'] ]).then(loop) + const watchEvents = () => + this._sessionCall('event.register', [['*']]).then(loop) - const loop = () => this.status === CONNECTED && this._sessionCall('event.next').then(onSuccess, onFailure) + const loop = () => + this.status === CONNECTED && + this._sessionCall('event.next').then(onSuccess, onFailure) const onSuccess = events => { this._processEvents(events) const debounce = this._debounce - return debounce == null - ? loop() - : pDelay(debounce).then(loop) + return debounce == null ? loop() : pDelay(debounce).then(loop) } const onFailure = error => { if (areEventsLost(error)) { - return this._sessionCall('event.unregister', [ ['*'] ]).then(watchEvents) + return this._sessionCall('event.unregister', [['*']]).then(watchEvents) } throw error @@ -950,85 +940,106 @@ export class Xapi extends EventEmitter { } } -Xapi.prototype._transportCall = reduce([ - function (method, args) { - return this._call(method, args).catch(error => { - if (!(error instanceof Error)) { - error = wrapError(error) - } - - error.method = method - throw error - }) - }, - call => function () { - let iterator // lazily created - const loop = () => call.apply(this, arguments) - ::pCatch(isNetworkError, isXapiNetworkError, error => { - if (iterator === undefined) { - iterator = fibonacci().clamp(undefined, 60).take(10).toMs() +Xapi.prototype._transportCall = reduce( + [ + function (method, args) { + return this._call(method, args).catch(error => { + if (!(error instanceof Error)) { + error = wrapError(error) } - const cursor = iterator.next() - if (!cursor.done) { - // TODO: ability to cancel the connection - // TODO: ability to force immediate reconnection - - const delay = cursor.value - debug('%s: network error %s, next try in %s ms', this._humanId, error.code, delay) - return pDelay(delay).then(loop) - } - - debug('%s: network error %s, aborting', this._humanId, error.code) - - // mark as disconnected - this.disconnect()::pCatch(noop) - + error.method = method throw error }) - return loop() - }, - call => function loop () { - return call.apply(this, arguments) - ::pCatch(isHostSlave, ({params: [master]}) => { - debug('%s: host is slave, attempting to connect at %s', this._humanId, master) + }, + call => + function () { + let iterator // lazily created + const loop = () => + call + .apply(this, arguments) + ::pCatch(isNetworkError, isXapiNetworkError, error => { + if (iterator === undefined) { + iterator = fibonacci() + .clamp(undefined, 60) + .take(10) + .toMs() + } - const newUrl = { - ...this._url, - hostname: master, - } - this.emit('redirect', newUrl) - this._url = newUrl + const cursor = iterator.next() + if (!cursor.done) { + // TODO: ability to cancel the connection + // TODO: ability to force immediate reconnection - return loop.apply(this, arguments) - }) - }, - call => function (method) { - const startTime = Date.now() - return call.apply(this, arguments).then( - result => { - debug( - '%s: %s(...) [%s] ==> %s', - this._humanId, - method, - ms(Date.now() - startTime), - kindOf(result) - ) - return result + const delay = cursor.value + debug( + '%s: network error %s, next try in %s ms', + this._humanId, + error.code, + delay + ) + return pDelay(delay).then(loop) + } + + debug('%s: network error %s, aborting', this._humanId, error.code) + + // mark as disconnected + this.disconnect()::pCatch(noop) + + throw error + }) + return loop() }, - error => { - debug( - '%s: %s(...) [%s] =!> %s', - this._humanId, - method, - ms(Date.now() - startTime), - error + call => + function loop () { + return call + .apply(this, arguments) + ::pCatch(isHostSlave, ({ params: [master] }) => { + debug( + '%s: host is slave, attempting to connect at %s', + this._humanId, + master + ) + + const newUrl = { + ...this._url, + hostname: master, + } + this.emit('redirect', newUrl) + this._url = newUrl + + return loop.apply(this, arguments) + }) + }, + call => + function (method) { + const startTime = Date.now() + return call.apply(this, arguments).then( + result => { + debug( + '%s: %s(...) [%s] ==> %s', + this._humanId, + method, + ms(Date.now() - startTime), + kindOf(result) + ) + return result + }, + error => { + debug( + '%s: %s(...) [%s] =!> %s', + this._humanId, + method, + ms(Date.now() - startTime), + error + ) + throw error + } ) - throw error - } - ) - }, -], (call, decorator) => decorator(call)) + }, + ], + (call, decorator) => decorator(call) +) // =================================================================== diff --git a/packages/xen-api/src/inject-events.js b/packages/xen-api/src/inject-events.js index 3c1801b86..c62b0aa26 100755 --- a/packages/xen-api/src/inject-events.js +++ b/packages/xen-api/src/inject-events.js @@ -5,7 +5,7 @@ import { delay as pDelay } from 'promise-toolbox' import { createClient } from './' const xapi = (() => { - const [ , , url, user, password ] = process.argv + const [, , url, user, password] = process.argv return createClient({ auth: { user, password }, @@ -14,16 +14,19 @@ const xapi = (() => { }) })() -xapi.connect() +xapi + .connect() // Get the pool record's ref. .then(() => xapi.call('pool.get_all')) // Injects lots of events. - .then(([ poolRef ]) => { - const loop = () => xapi.call('event.inject', 'pool', poolRef) - ::pDelay(10) // A small delay is required to avoid overloading the Xen API. - .then(loop) + .then(([poolRef]) => { + const loop = () => + xapi + .call('event.inject', 'pool', poolRef) + ::pDelay(10) // A small delay is required to avoid overloading the Xen API. + .then(loop) return loop() }) diff --git a/packages/xen-api/src/memory-test.js b/packages/xen-api/src/memory-test.js index 33a389cc2..1f5ebed4a 100755 --- a/packages/xen-api/src/memory-test.js +++ b/packages/xen-api/src/memory-test.js @@ -14,7 +14,7 @@ setInterval(() => { ) }, 1e2) -const [ , , url, user, password ] = process.argv +const [, , url, user, password] = process.argv createClient({ auth: { user, password }, readOnly: true, diff --git a/packages/xen-api/src/transports/auto.js b/packages/xen-api/src/transports/auto.js index 47b927463..845e9987d 100644 --- a/packages/xen-api/src/transports/auto.js +++ b/packages/xen-api/src/transports/auto.js @@ -3,7 +3,7 @@ import xmlRpc from './xml-rpc' import xmlRpcJson from './xml-rpc-json' import { UnsupportedTransport } from './_utils' -const factories = [ jsonRpc, xmlRpcJson, xmlRpc ] +const factories = [jsonRpc, xmlRpcJson, xmlRpc] const { length } = factories export default opts => { @@ -14,18 +14,18 @@ export default opts => { const current = factories[i++](opts) if (i < length) { const currentI = i - call = (method, args) => current(method, args).catch( - error => { + call = (method, args) => + current(method, args).catch(error => { if (error instanceof UnsupportedTransport) { - if (currentI === i) { // not changed yet + if (currentI === i) { + // not changed yet create() } return call(method, args) } throw error - } - ) + }) } else { call = current } diff --git a/packages/xen-api/src/transports/json-rpc.js b/packages/xen-api/src/transports/json-rpc.js index 86725e829..3e6072a0a 100644 --- a/packages/xen-api/src/transports/json-rpc.js +++ b/packages/xen-api/src/transports/json-rpc.js @@ -4,35 +4,40 @@ import { format, parse } from 'json-rpc-protocol' import { UnsupportedTransport } from './_utils' export default ({ allowUnauthorized, url }) => { - return (method, args) => httpRequestPlus.post(url, { - rejectUnauthorized: !allowUnauthorized, - body: format.request(0, method, args), - headers: { - 'Accept': 'application/json', - 'Content-Type': 'application/json', - }, - path: '/jsonrpc', - }).readAll('utf8').then( - text => { - let response - try { - response = parse(text) - } catch (error) { - throw new UnsupportedTransport() - } + return (method, args) => + httpRequestPlus + .post(url, { + rejectUnauthorized: !allowUnauthorized, + body: format.request(0, method, args), + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + path: '/jsonrpc', + }) + .readAll('utf8') + .then( + text => { + let response + try { + response = parse(text) + } catch (error) { + throw new UnsupportedTransport() + } - if (response.type === 'response') { - return response.result - } + if (response.type === 'response') { + return response.result + } - throw response.error - }, - error => { - if (error.response !== undefined) { // HTTP error - throw new UnsupportedTransport() - } + throw response.error + }, + error => { + if (error.response !== undefined) { + // HTTP error + throw new UnsupportedTransport() + } - throw error - } - ) + throw error + } + ) } diff --git a/packages/xen-api/src/transports/xml-rpc-json.js b/packages/xen-api/src/transports/xml-rpc-json.js index 49add5620..8210ef682 100644 --- a/packages/xen-api/src/transports/xml-rpc-json.js +++ b/packages/xen-api/src/transports/xml-rpc-json.js @@ -20,10 +20,7 @@ const SPECIAL_CHARS = { '\r': '\\r', '\t': '\\t', } -const SPECIAL_CHARS_RE = new RegExp( - Object.keys(SPECIAL_CHARS).join('|'), - 'g' -) +const SPECIAL_CHARS_RE = new RegExp(Object.keys(SPECIAL_CHARS).join('|'), 'g') const parseResult = result => { const status = result.Status @@ -78,11 +75,7 @@ export default ({ allowUnauthorized, url: { hostname, path, port, protocol }, }) => { - const client = ( - protocol === 'https:' - ? createSecureClient - : createClient - )({ + const client = (protocol === 'https:' ? createSecureClient : createClient)({ host: hostname, path: '/json', port, @@ -90,8 +83,5 @@ export default ({ }) const call = promisify(client.methodCall, client) - return (method, args) => call(method, args).then( - parseResult, - logError - ) + return (method, args) => call(method, args).then(parseResult, logError) } diff --git a/packages/xen-api/src/transports/xml-rpc.js b/packages/xen-api/src/transports/xml-rpc.js index d99033603..e88350cb2 100644 --- a/packages/xen-api/src/transports/xml-rpc.js +++ b/packages/xen-api/src/transports/xml-rpc.js @@ -34,19 +34,12 @@ export default ({ allowUnauthorized, url: { hostname, path, port, protocol }, }) => { - const client = ( - protocol === 'https:' - ? createSecureClient - : createClient - )({ + const client = (protocol === 'https:' ? createSecureClient : createClient)({ host: hostname, port, rejectUnauthorized: !allowUnauthorized, }) const call = promisify(client.methodCall, client) - return (method, args) => call(method, args).then( - parseResult, - logError - ) + return (method, args) => call(method, args).then(parseResult, logError) } diff --git a/packages/xo-acl-resolver/src/index.js b/packages/xo-acl-resolver/src/index.js index 86e3cbd93..f48c37e9a 100644 --- a/packages/xo-acl-resolver/src/index.js +++ b/packages/xo-acl-resolver/src/index.js @@ -8,7 +8,8 @@ let getObject const authorized = () => true // eslint-disable-line no-unused-vars const forbiddden = () => false // eslint-disable-line no-unused-vars -const and = (...checkers) => (object, permission) => { // eslint-disable-line no-unused-vars +// eslint-disable-next-line no-unused-vars +const and = (...checkers) => (object, permission) => { for (const checker of checkers) { if (!checker(object, permission)) { return false @@ -17,7 +18,8 @@ const and = (...checkers) => (object, permission) => { // eslint-disable-line no return true } -const or = (...checkers) => (object, permission) => { // eslint-disable-line no-unused-vars +// eslint-disable-next-line no-unused-vars +const or = (...checkers) => (object, permission) => { for (const checker of checkers) { if (checker(object, permission)) { return true @@ -28,7 +30,7 @@ const or = (...checkers) => (object, permission) => { // eslint-disable-line no- // ------------------------------------------------------------------- -const checkMember = (memberName) => (object, permission) => { +const checkMember = memberName => (object, permission) => { const member = object[memberName] return member !== object.id && checkAuthorization(member, permission) } @@ -36,10 +38,7 @@ const checkMember = (memberName) => (object, permission) => { const checkSelf = ({ id }, permission) => { const permissionsForObject = permissionsByObject[id] - return ( - permissionsForObject && - permissionsForObject[permission] - ) + return permissionsForObject && permissionsForObject[permission] } // =================================================================== @@ -102,12 +101,7 @@ function checkAuthorization (objectId, permission) { // ------------------------------------------------------------------- -export default ( - permissionsByObject_, - getObject_, - permissions, - permission -) => { +export default (permissionsByObject_, getObject_, permissions, permission) => { // Assign global variables. permissionsByObject = permissionsByObject_ getObject = getObject_ diff --git a/packages/xo-cli/.babelrc.js b/packages/xo-cli/.babelrc.js index ffa5e0cc0..845b33558 100644 --- a/packages/xo-cli/.babelrc.js +++ b/packages/xo-cli/.babelrc.js @@ -5,7 +5,7 @@ const __TEST__ = NODE_ENV === 'test' module.exports = { comments: !__PROD__, compact: __PROD__, - ignore: __TEST__ ? undefined : [ /\.spec\.js$/ ], + ignore: __TEST__ ? undefined : [/\.spec\.js$/], plugins: ['lodash'], presets: [ [ @@ -14,9 +14,7 @@ module.exports = { debug: !__TEST__, loose: true, shippedProposals: true, - targets: __PROD__ - ? { node: '6' } - : { node: 'current' }, + targets: __PROD__ ? { node: '6' } : { node: 'current' }, useBuiltIns: 'usage', }, ], diff --git a/packages/xo-cli/src/config.js b/packages/xo-cli/src/config.js index 17bf2050a..2f6575c18 100644 --- a/packages/xo-cli/src/config.js +++ b/packages/xo-cli/src/config.js @@ -19,11 +19,13 @@ const configFile = configPath + '/config.json' // =================================================================== -const load = exports.load = function () { - return readFile(configFile).then(JSON.parse).catch(function () { - return {} - }) -} +const load = (exports.load = function () { + return readFile(configFile) + .then(JSON.parse) + .catch(function () { + return {} + }) +}) exports.get = function (path) { return load().then(function (config) { @@ -31,11 +33,11 @@ exports.get = function (path) { }) } -const save = exports.save = function (config) { +const save = (exports.save = function (config) { return mkdirp(configPath).then(function () { return writeFile(configFile, JSON.stringify(config)) }) -} +}) exports.set = function (data) { return load().then(function (config) { diff --git a/packages/xo-cli/src/index.js b/packages/xo-cli/src/index.js index fbaceff42..475807e31 100755 --- a/packages/xo-cli/src/index.js +++ b/packages/xo-cli/src/index.js @@ -108,14 +108,16 @@ const humanFormatOpts = { function printProgress (progress) { if (progress.length) { - console.warn('%s% of %s @ %s/s - ETA %s', + console.warn( + '%s% of %s @ %s/s - ETA %s', Math.round(progress.percentage), humanFormat(progress.length, humanFormatOpts), humanFormat(progress.speed, humanFormatOpts), prettyMs(progress.eta * 1e3) ) } else { - console.warn('%s @ %s/s', + console.warn( + '%s @ %s/s', humanFormat(progress.transferred, humanFormatOpts), humanFormat(progress.speed, humanFormatOpts) ) @@ -130,8 +132,10 @@ function wrap (val) { // =================================================================== -const help = wrap((function (pkg) { - return require('strip-indent')(` +const help = wrap( + (function (pkg) { + return require('strip-indent')( + ` Usage: $name --register [--expiresIn duration] [] @@ -162,18 +166,20 @@ const help = wrap((function (pkg) { Executes a command on the current XO instance. $name v$version - `).replace(/<([^>]+)>|\$(\w+)/g, function (_, arg, key) { - if (arg) { - return '<' + chalk.yellow(arg) + '>' - } + ` + ).replace(/<([^>]+)>|\$(\w+)/g, function (_, arg, key) { + if (arg) { + return '<' + chalk.yellow(arg) + '>' + } - if (key === 'name') { - return chalk.bold(pkg[key]) - } + if (key === 'name') { + return chalk.bold(pkg[key]) + } - return pkg[key] - }) -})(require('../package'))) + return pkg[key] + }) + })(require('../package')) +) // ------------------------------------------------------------------- @@ -230,10 +236,7 @@ async function register (args) { exports.register = register function unregister () { - return config.unset([ - 'server', - 'token', - ]) + return config.unset(['server', 'token']) } exports.unregister = unregister @@ -284,11 +287,7 @@ async function listCommands (args) { str.push( name, '=<', - type == null - ? 'unknown type' - : isArray(type) - ? type.join('|') - : type, + type == null ? 'unknown type' : isArray(type) ? type.join('|') : type, '>' ) @@ -347,10 +346,7 @@ async function call (args) { const result = await xo.call(method, params) let keys, key, url - if ( - isObject(result) && - (keys = getKeys(result)).length === 1 - ) { + if (isObject(result) && (keys = getKeys(result)).length === 1) { key = keys[0] if (key === '$getFrom') { @@ -359,16 +355,19 @@ async function call (args) { const progress = progressStream({ time: 1e3 }, printProgress) - return eventToPromise(nicePipe([ - got.stream(url).on('response', function (response) { - const length = response.headers['content-length'] - if (length !== undefined) { - progress.length(length) - } - }), - progress, - output, - ]), 'finish') + return eventToPromise( + nicePipe([ + got.stream(url).on('response', function (response) { + const length = response.headers['content-length'] + if (length !== undefined) { + progress.length(length) + } + }), + progress, + output, + ]), + 'finish' + ) } if (key === '$sendTo') { @@ -379,10 +378,13 @@ async function call (args) { const input = nicePipe([ createReadStream(file), - progressStream({ - length: length, - time: 1e3, - }, printProgress), + progressStream( + { + length: length, + time: 1e3, + }, + printProgress + ), ]) const response = await got.post(url, { diff --git a/packages/xo-collection/src/collection.js b/packages/xo-collection/src/collection.js index eacb5e00e..6b7f6cb29 100644 --- a/packages/xo-collection/src/collection.js +++ b/packages/xo-collection/src/collection.js @@ -1,17 +1,14 @@ import kindOf from 'kindof' -import {BaseError} from 'make-error' -import {EventEmitter} from 'events' -import {forEach} from 'lodash' +import { BaseError } from 'make-error' +import { EventEmitter } from 'events' +import { forEach } from 'lodash' import isEmpty from './is-empty' import isObject from './is-object' // =================================================================== -const { - create: createObject, - prototype: { hasOwnProperty }, -} = Object +const { create: createObject, prototype: { hasOwnProperty } } = Object export const ACTION_ADD = 'add' export const ACTION_UPDATE = 'update' @@ -189,7 +186,7 @@ export default class Collection extends EventEmitter { // ----------------------------------------------------------------- createIndex (name, index) { - const {_indexes: indexes} = this + const { _indexes: indexes } = this if (hasOwnProperty.call(indexes, name)) { throw new DuplicateIndex(name) } @@ -201,7 +198,7 @@ export default class Collection extends EventEmitter { } deleteIndex (name) { - const {_indexes: indexes} = this + const { _indexes: indexes } = this if (!hasOwnProperty.call(indexes, name)) { throw new NoSuchIndex(name) } @@ -218,7 +215,7 @@ export default class Collection extends EventEmitter { // ----------------------------------------------------------------- * [Symbol.iterator] () { - const {_items: items} = this + const { _items: items } = this for (const key in items) { yield [key, items[key]] @@ -226,7 +223,7 @@ export default class Collection extends EventEmitter { } * keys () { - const {_items: items} = this + const { _items: items } = this for (const key in items) { yield key @@ -234,7 +231,7 @@ export default class Collection extends EventEmitter { } * values () { - const {_items: items} = this + const { _items: items } = this for (const key in items) { yield items[key] @@ -259,7 +256,7 @@ export default class Collection extends EventEmitter { return } - const {_buffer: buffer} = this + const { _buffer: buffer } = this // Due to deduplication there could be nothing in the buffer. if (isEmpty(buffer)) { @@ -354,7 +351,8 @@ export default class Collection extends EventEmitter { } else { this._buffer[key] = ACTION_REMOVE } - } else { // update + } else { + // update if (!this._buffer[key]) { this._buffer[key] = ACTION_UPDATE } diff --git a/packages/xo-collection/src/collection.spec.js b/packages/xo-collection/src/collection.spec.js index 939056a40..deb681437 100644 --- a/packages/xo-collection/src/collection.spec.js +++ b/packages/xo-collection/src/collection.spec.js @@ -3,15 +3,15 @@ import eventToPromise from 'event-to-promise' import { forEach } from 'lodash' -import Collection, {DuplicateItem, NoSuchItem} from './collection' +import Collection, { DuplicateItem, NoSuchItem } from './collection' // =================================================================== function waitTicks (n = 2) { - const {nextTick} = process + const { nextTick } = process return new Promise(function (resolve) { - (function waitNextTick () { + ;(function waitNextTick () { // The first tick is handled by Promise#then() if (--n) { nextTick(waitNextTick) @@ -34,16 +34,16 @@ describe('Collection', function () { it('is iterable', function () { const iterator = col[Symbol.iterator]() - expect(iterator.next()).toEqual({done: false, value: ['bar', 0]}) - expect(iterator.next()).toEqual({done: true, value: undefined}) + expect(iterator.next()).toEqual({ done: false, value: ['bar', 0] }) + expect(iterator.next()).toEqual({ done: true, value: undefined }) }) describe('#keys()', function () { it('returns an iterator over the keys', function () { const iterator = col.keys() - expect(iterator.next()).toEqual({done: false, value: 'bar'}) - expect(iterator.next()).toEqual({done: true, value: undefined}) + expect(iterator.next()).toEqual({ done: false, value: 'bar' }) + expect(iterator.next()).toEqual({ done: true, value: undefined }) }) }) @@ -51,8 +51,8 @@ describe('Collection', function () { it('returns an iterator over the values', function () { const iterator = col.values() - expect(iterator.next()).toEqual({done: false, value: 0}) - expect(iterator.next()).toEqual({done: true, value: undefined}) + expect(iterator.next()).toEqual({ done: false, value: 0 }) + expect(iterator.next()).toEqual({ done: true, value: undefined }) }) }) @@ -70,7 +70,7 @@ describe('Collection', function () { // Async event. return eventToPromise(col, 'add').then(function (added) { - expect(Object.keys(added)).toEqual([ 'foo' ]) + expect(Object.keys(added)).toEqual(['foo']) expect(added.foo).toBe(true) }) }) @@ -216,7 +216,7 @@ describe('Collection', function () { }) it('accepts an object with an id property', function () { - col.unset({id: 'bar'}) + col.unset({ id: 'bar' }) expect(col.has('bar')).toBe(false) @@ -235,7 +235,7 @@ describe('Collection', function () { return waitTicks().then(() => { col.touch(foo) - return eventToPromise(col, 'update', (items) => { + return eventToPromise(col, 'update', items => { expect(Object.keys(items)).toEqual(['foo']) expect(items.foo).toBe(foo) }) @@ -249,7 +249,7 @@ describe('Collection', function () { expect(col.size).toBe(0) - return eventToPromise(col, 'remove').then((items) => { + return eventToPromise(col, 'remove').then(items => { expect(Object.keys(items)).toEqual(['bar']) expect(items.bar).toBeUndefined() }) @@ -257,84 +257,69 @@ describe('Collection', function () { }) describe('deduplicates events', function () { - forEach({ - 'add & update → add': [ - [ - ['add', 'foo', 0], - ['update', 'foo', 1], - ], - { - add: { - foo: 1, + forEach( + { + 'add & update → add': [ + [['add', 'foo', 0], ['update', 'foo', 1]], + { + add: { + foo: 1, + }, }, - }, - ], - - 'add & remove → ∅': [ - [ - ['add', 'foo', 0], - ['remove', 'foo'], ], - {}, - ], - 'update & update → update': [ - [ - ['update', 'bar', 1], - ['update', 'bar', 2], - ], - { - update: { - bar: 2, + 'add & remove → ∅': [[['add', 'foo', 0], ['remove', 'foo']], {}], + + 'update & update → update': [ + [['update', 'bar', 1], ['update', 'bar', 2]], + { + update: { + bar: 2, + }, }, - }, - ], - - 'update & remove → remove': [ - [ - ['update', 'bar', 1], - ['remove', 'bar'], ], - { - remove: { - bar: undefined, - }, - }, - ], - 'remove & add → update': [ - [ - ['remove', 'bar'], - ['add', 'bar', 0], + 'update & remove → remove': [ + [['update', 'bar', 1], ['remove', 'bar']], + { + remove: { + bar: undefined, + }, + }, ], - { - update: { - bar: 0, + + 'remove & add → update': [ + [['remove', 'bar'], ['add', 'bar', 0]], + { + update: { + bar: 0, + }, }, - }, - ], - }, ([operations, results], label) => { - it(label, function () { - forEach(operations, ([method, ...args]) => { - col[method](...args) - }) + ], + }, + ([operations, results], label) => { + it(label, function () { + forEach(operations, ([method, ...args]) => { + col[method](...args) + }) - const spies = Object.create(null) - forEach(['add', 'update', 'remove'], event => { - col.on(event, (spies[event] = jest.fn())) - }) + const spies = Object.create(null) + forEach(['add', 'update', 'remove'], event => { + col.on(event, (spies[event] = jest.fn())) + }) - return waitTicks().then(() => { - forEach(spies, (spy, event) => { - const items = results[event] - if (items) { - expect(spy.mock.calls).toEqual([ [ items ] ]) - } else { - expect(spy).not.toHaveBeenCalled() - } + return waitTicks().then(() => { + forEach(spies, (spy, event) => { + const items = results[event] + if (items) { + expect(spy.mock.calls).toEqual([[items]]) + } else { + expect(spy).not.toHaveBeenCalled() + } + }) }) }) - }) - }) + } + ) }) }) diff --git a/packages/xo-collection/src/index.js b/packages/xo-collection/src/index.js index 01ddfd7b8..8e1e62e48 100644 --- a/packages/xo-collection/src/index.js +++ b/packages/xo-collection/src/index.js @@ -3,11 +3,7 @@ import { bind, iteratee } from 'lodash' import clearObject from './clear-object' import isEmpty from './is-empty' import NotImplemented from './not-implemented' -import { - ACTION_ADD, - ACTION_UPDATE, - ACTION_REMOVE, -} from './collection' +import { ACTION_ADD, ACTION_UPDATE, ACTION_REMOVE } from './collection' // =================================================================== @@ -34,7 +30,7 @@ export default class Index { // Remove empty items lists. sweep () { - const {_itemsByHash: itemsByHash} = this + const { _itemsByHash: itemsByHash } = this for (const hash in itemsByHash) { if (isEmpty(itemsByHash[hash])) { delete itemsByHash[hash] @@ -86,14 +82,11 @@ export default class Index { const hash = computeHash(value, key) if (hash != null) { - ( - itemsByHash[hash] || - + ;(itemsByHash[hash] || // FIXME: We do not use objects without prototype for now // because it breaks Angular in xo-web, change it back when // this is fixed. - (itemsByHash[hash] = {}) - )[key] = value + (itemsByHash[hash] = {}))[key] = value keysToHash[key] = hash } @@ -118,12 +111,9 @@ export default class Index { // Inserts item into the new hash's list if any. if (hash != null) { - ( - itemsByHash[hash] || - + ;(itemsByHash[hash] || // FIXME: idem: change back to Object.create(null) - (itemsByHash[hash] = {}) - )[key] = value + (itemsByHash[hash] = {}))[key] = value keysToHash[key] = hash } else { @@ -133,10 +123,7 @@ export default class Index { } _onRemove (items) { - const { - _itemsByHash: itemsByHash, - _keysToHash: keysToHash, - } = this + const { _itemsByHash: itemsByHash, _keysToHash: keysToHash } = this for (const key in items) { const prev = keysToHash[key] diff --git a/packages/xo-collection/src/index.spec.js b/packages/xo-collection/src/index.spec.js index 74bd0dfff..8cfdfda67 100644 --- a/packages/xo-collection/src/index.spec.js +++ b/packages/xo-collection/src/index.spec.js @@ -9,10 +9,10 @@ import Index from './index' // =================================================================== const waitTicks = (n = 2) => { - const {nextTick} = process + const { nextTick } = process return new Promise(resolve => { - (function waitNextTick () { + ;(function waitNextTick () { // The first tick is handled by Promise#then() if (--n) { nextTick(waitNextTick) diff --git a/packages/xo-collection/src/is-object.js b/packages/xo-collection/src/is-object.js index 92961e81c..1453776e7 100644 --- a/packages/xo-collection/src/is-object.js +++ b/packages/xo-collection/src/is-object.js @@ -1,3 +1,3 @@ export default function isObject (value) { - return (value !== null) && (typeof value === 'object') + return value !== null && typeof value === 'object' } diff --git a/packages/xo-collection/src/not-implemented.js b/packages/xo-collection/src/not-implemented.js index 52e507e37..ddb70c6a1 100644 --- a/packages/xo-collection/src/not-implemented.js +++ b/packages/xo-collection/src/not-implemented.js @@ -1,4 +1,4 @@ -import {BaseError} from 'make-error' +import { BaseError } from 'make-error' export default class NotImplemented extends BaseError { constructor (message) { diff --git a/packages/xo-collection/src/unique-index.js b/packages/xo-collection/src/unique-index.js index 96c90964c..e3a9114b7 100644 --- a/packages/xo-collection/src/unique-index.js +++ b/packages/xo-collection/src/unique-index.js @@ -2,11 +2,7 @@ import { bind, iteratee } from 'lodash' import clearObject from './clear-object' import NotImplemented from './not-implemented' -import { - ACTION_ADD, - ACTION_UPDATE, - ACTION_REMOVE, -} from './collection' +import { ACTION_ADD, ACTION_UPDATE, ACTION_REMOVE } from './collection' // =================================================================== @@ -108,10 +104,7 @@ export default class UniqueIndex { } _onRemove (items) { - const { - _itemByHash: itemByHash, - _keysToHash: keysToHash, - } = this + const { _itemByHash: itemByHash, _keysToHash: keysToHash } = this for (const key in items) { const prev = keysToHash[key] diff --git a/packages/xo-collection/src/unique-index.spec.js b/packages/xo-collection/src/unique-index.spec.js index 3b0005b1e..ed4ab3b2d 100644 --- a/packages/xo-collection/src/unique-index.spec.js +++ b/packages/xo-collection/src/unique-index.spec.js @@ -9,10 +9,10 @@ import Index from './unique-index' // =================================================================== const waitTicks = (n = 2) => { - const {nextTick} = process + const { nextTick } = process return new Promise(resolve => { - (function waitNextTick () { + ;(function waitNextTick () { // The first tick is handled by Promise#then() if (--n) { nextTick(waitNextTick) diff --git a/packages/xo-collection/src/view.example.js b/packages/xo-collection/src/view.example.js index 4cd60610a..2b45345fe 100644 --- a/packages/xo-collection/src/view.example.js +++ b/packages/xo-collection/src/view.example.js @@ -7,7 +7,7 @@ import View from './view' // Create the collection. const users = new Collection() -users.getKey = (user) => user.name +users.getKey = user => user.name // Inserts some data. users.add({ diff --git a/packages/xo-collection/src/view.js b/packages/xo-collection/src/view.js index cb10cd260..821ca9f25 100644 --- a/packages/xo-collection/src/view.js +++ b/packages/xo-collection/src/view.js @@ -54,7 +54,7 @@ export default class View extends Collection { } _onAdd (items) { - const {_predicate: predicate} = this + const { _predicate: predicate } = this forEach(items, (value, key) => { if (predicate(value, key, this)) { @@ -67,7 +67,7 @@ export default class View extends Collection { } _onUpdate (items) { - const {_predicate: predicate} = this + const { _predicate: predicate } = this forEach(items, (value, key) => { if (predicate(value, key, this)) { diff --git a/packages/xo-lib/example.js b/packages/xo-lib/example.js index 5ab7de8c8..1a3d0d6ad 100644 --- a/packages/xo-lib/example.js +++ b/packages/xo-lib/example.js @@ -10,36 +10,53 @@ const xo = new Xo({ url: 'localhost:9000', }) -xo.open().then(function () { - return xo.call('acl.get', {}).then(function (result) { - console.log('success:', result) - }).catch(function (error) { - console.log('failure:', error) +xo + .open() + .then(function () { + return xo + .call('acl.get', {}) + .then(function (result) { + console.log('success:', result) + }) + .catch(function (error) { + console.log('failure:', error) + }) }) -}).then(function () { - return xo.signIn({ - email: 'admin@admin.net', - password: 'admin', - }).then(function () { - console.log('connected as ', xo.user) - }).catch(function (error) { - console.log('failure:', error) + .then(function () { + return xo + .signIn({ + email: 'admin@admin.net', + password: 'admin', + }) + .then(function () { + console.log('connected as ', xo.user) + }) + .catch(function (error) { + console.log('failure:', error) + }) }) -}).then(function () { - return xo.signIn({ - email: 'tom', - password: 'tom', - }).then(function () { - console.log('connected as', xo.user) + .then(function () { + return xo + .signIn({ + email: 'tom', + password: 'tom', + }) + .then(function () { + console.log('connected as', xo.user) - return xo.call('acl.get', {}).then(function (result) { - console.log('success:', result) - }).catch(function (error) { - console.log('failure:', error) - }) - }).catch(function (error) { - console.log('failure', error) + return xo + .call('acl.get', {}) + .then(function (result) { + console.log('success:', result) + }) + .catch(function (error) { + console.log('failure:', error) + }) + }) + .catch(function (error) { + console.log('failure', error) + }) + }) + .then(function () { + return xo.close() }) -}).then(function () { - return xo.close() -}) diff --git a/packages/xo-lib/src/index.js b/packages/xo-lib/src/index.js index 63d3f4111..ac085fa8e 100644 --- a/packages/xo-lib/src/index.js +++ b/packages/xo-lib/src/index.js @@ -1,7 +1,4 @@ -import JsonRpcWebSocketClient, { - OPEN, - CLOSED, -} from 'jsonrpc-websocket-client' +import JsonRpcWebSocketClient, { OPEN, CLOSED } from 'jsonrpc-websocket-client' import { BaseError } from 'make-error' import { startsWith } from 'lodash' @@ -20,7 +17,7 @@ export default class Xo extends JsonRpcWebSocketClient { const url = opts != null ? opts.url : '.' super(`${url === '/' ? '' : url}/api/`) - this._credentials = (opts != null ? opts.credentials : null) + this._credentials = opts != null ? opts.credentials : null this._user = null this.on(OPEN, () => { @@ -45,12 +42,13 @@ export default class Xo extends JsonRpcWebSocketClient { } const promise = super.call(method, args) - promise.retry = (predicate) => promise.catch((error) => { - i = (i || 0) + 1 - if (predicate(error, i)) { - return this.call(method, args, i) - } - }) + promise.retry = predicate => + promise.catch(error => { + i = (i || 0) + 1 + if (predicate(error, i)) { + return this.call(method, args, i) + } + }) return promise } diff --git a/packages/xo-remote-parser/src/index.js b/packages/xo-remote-parser/src/index.js index 68993294d..98d0262bb 100644 --- a/packages/xo-remote-parser/src/index.js +++ b/packages/xo-remote-parser/src/index.js @@ -3,10 +3,13 @@ import map from 'lodash/map' import trim from 'lodash/trim' import trimStart from 'lodash/trimStart' -const sanitizePath = (...paths) => filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join('/') +const sanitizePath = (...paths) => + filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join( + '/' + ) export const parse = string => { - const object = { } + const object = {} const [type, rest] = string.split('://') if (type === 'file') { @@ -36,7 +39,7 @@ export const parse = string => { return object } -export const format = ({type, host, path, username, password, domain}) => { +export const format = ({ type, host, path, username, password, domain }) => { type === 'local' && (type = 'file') let string = `${type}://` if (type === 'nfs') { diff --git a/packages/xo-server-auth-github/src/index.js b/packages/xo-server-auth-github/src/index.js index a9d96930b..4a71a35e0 100644 --- a/packages/xo-server-auth-github/src/index.js +++ b/packages/xo-server-auth-github/src/index.js @@ -1,4 +1,4 @@ -import {Strategy} from 'passport-github' +import { Strategy } from 'passport-github' // =================================================================== @@ -27,18 +27,23 @@ class AuthGitHubXoPlugin { } load () { - const {_xo: xo} = this + const { _xo: xo } = this - xo.registerPassportStrategy(new Strategy(this._conf, async (accessToken, refreshToken, profile, done) => { - try { - done(null, await xo.registerUser('github', profile.username)) - } catch (error) { - done(error.message) - } - })) + xo.registerPassportStrategy( + new Strategy( + this._conf, + async (accessToken, refreshToken, profile, done) => { + try { + done(null, await xo.registerUser('github', profile.username)) + } catch (error) { + done(error.message) + } + } + ) + ) } } // =================================================================== -export default ({xo}) => new AuthGitHubXoPlugin(xo) +export default ({ xo }) => new AuthGitHubXoPlugin(xo) diff --git a/packages/xo-server-auth-google/src/index.js b/packages/xo-server-auth-google/src/index.js index e05d737b3..d8c099c48 100644 --- a/packages/xo-server-auth-google/src/index.js +++ b/packages/xo-server-auth-google/src/index.js @@ -7,7 +7,8 @@ export const configurationSchema = { properties: { callbackURL: { type: 'string', - description: 'Must be exactly the same as specified on the Google developer console.', + description: + 'Must be exactly the same as specified on the Google developer console.', }, clientID: { type: 'string', @@ -18,8 +19,8 @@ export const configurationSchema = { scope: { default: 'https://www.googleapis.com/auth/plus.login', description: 'Note that changing this value will break existing users.', - enum: [ 'https://www.googleapis.com/auth/plus.login', 'email' ], - enumNames: [ 'Google+ name', 'Simple email address' ], + enum: ['https://www.googleapis.com/auth/plus.login', 'email'], + enumNames: ['Google+ name', 'Simple email address'], }, }, required: ['callbackURL', 'clientID', 'clientSecret'], @@ -41,18 +42,23 @@ class AuthGoogleXoPlugin { const conf = this._conf const xo = this._xo - xo.registerPassportStrategy(new Strategy(conf, async (accessToken, refreshToken, profile, done) => { - try { - done(null, await xo.registerUser( - 'google', - conf.scope === 'email' - ? profile.emails[0].value - : profile.displayName - )) - } catch (error) { - done(error.message) - } - })) + xo.registerPassportStrategy( + new Strategy(conf, async (accessToken, refreshToken, profile, done) => { + try { + done( + null, + await xo.registerUser( + 'google', + conf.scope === 'email' + ? profile.emails[0].value + : profile.displayName + ) + ) + } catch (error) { + done(error.message) + } + }) + ) } } diff --git a/packages/xo-server-auth-ldap/src/index.js b/packages/xo-server-auth-ldap/src/index.js index 46b0e5f6b..777a85a6f 100644 --- a/packages/xo-server-auth-ldap/src/index.js +++ b/packages/xo-server-auth-ldap/src/index.js @@ -10,15 +10,16 @@ import { readFile } from 'fs' // =================================================================== const VAR_RE = /\{\{([^}]+)\}\}/g -const evalFilter = (filter, vars) => filter.replace(VAR_RE, (_, name) => { - const value = vars[name] +const evalFilter = (filter, vars) => + filter.replace(VAR_RE, (_, name) => { + const value = vars[name] - if (value === undefined) { - throw new Error('invalid variable: ' + name) - } + if (value === undefined) { + throw new Error('invalid variable: ' + name) + } - return escape(value) -}) + return escape(value) + }) export const configurationSchema = { type: 'object', @@ -39,7 +40,8 @@ If not specified, it will use a default set of well-known CAs. }, }, checkCertificate: { - description: 'Enforce the validity of the server\'s certificates. You can disable it when connecting to servers that use a self-signed certificate.', + description: + "Enforce the validity of the server's certificates. You can disable it when connecting to servers that use a self-signed certificate.", type: 'boolean', default: true, }, @@ -58,14 +60,16 @@ For Microsoft Active Directory, it can also be \`@\`. type: 'string', }, password: { - description: 'Password of the user permitted of search the LDAP directory.', + description: + 'Password of the user permitted of search the LDAP directory.', type: 'string', }, }, required: ['dn', 'password'], }, base: { - description: 'The base is the part of the description tree where the users are looked for.', + description: + 'The base is the part of the description tree where the users are looked for.', type: 'string', }, filter: { @@ -116,25 +120,21 @@ class AuthLdap { } async configure (conf) { - const clientOpts = this._clientOpts = { + const clientOpts = (this._clientOpts = { url: conf.uri, maxConnections: 5, tlsOptions: {}, - } + }) { - const { - bind, - checkCertificate = true, - certificateAuthorities, - } = conf + const { bind, checkCertificate = true, certificateAuthorities } = conf if (bind) { clientOpts.bindDN = bind.dn clientOpts.bindCredentials = bind.password } - const {tlsOptions} = clientOpts + const { tlsOptions } = clientOpts tlsOptions.rejectUnauthorized = checkCertificate if (certificateAuthorities) { @@ -192,7 +192,7 @@ class AuthLdap { // Bind if necessary. { - const {_credentials: credentials} = this + const { _credentials: credentials } = this if (credentials) { logger(`attempting to bind with as ${credentials.dn}...`) await bind(credentials.dn, credentials.password) @@ -216,7 +216,7 @@ class AuthLdap { entries.push(entry.json) }) - const {status} = await eventToPromise(response, 'end') + const { status } = await eventToPromise(response, 'end') if (status) { throw new Error('unexpected search response status: ' + status) } @@ -229,7 +229,11 @@ class AuthLdap { try { logger(`attempting to bind as ${entry.objectName}`) await bind(entry.objectName, password) - logger(`successfully bound as ${entry.objectName} => ${username} authenticated`) + logger( + `successfully bound as ${ + entry.objectName + } => ${username} authenticated` + ) return { username } } catch (error) { logger(`failed to bind as ${entry.objectName}: ${error.message}`) @@ -246,4 +250,4 @@ class AuthLdap { // =================================================================== -export default ({xo}) => new AuthLdap(xo) +export default ({ xo }) => new AuthLdap(xo) diff --git a/packages/xo-server-auth-ldap/src/prompt-schema.js b/packages/xo-server-auth-ldap/src/prompt-schema.js index be124a92f..c51cb8939 100644 --- a/packages/xo-server-auth-ldap/src/prompt-schema.js +++ b/packages/xo-server-auth-ldap/src/prompt-schema.js @@ -8,48 +8,61 @@ const EMPTY_OBJECT = Object.freeze({ __proto__: null }) const _extractValue = ({ value }) => value -export const confirm = (message, { - default: defaultValue = null, -} = EMPTY_OBJECT) => prompt({ - default: defaultValue, +export const confirm = ( message, - name: 'value', - type: 'confirm', -}).then(_extractValue) + { default: defaultValue = null } = EMPTY_OBJECT +) => + prompt({ + default: defaultValue, + message, + name: 'value', + type: 'confirm', + }).then(_extractValue) -export const input = (message, { - default: defaultValue = null, - filter = undefined, - validate = undefined, -} = EMPTY_OBJECT) => prompt({ - default: defaultValue, +export const input = ( message, - name: 'value', - type: 'input', - validate, -}).then(_extractValue) + { + default: defaultValue = null, + filter = undefined, + validate = undefined, + } = EMPTY_OBJECT +) => + prompt({ + default: defaultValue, + message, + name: 'value', + type: 'input', + validate, + }).then(_extractValue) -export const list = (message, choices, { - default: defaultValue = null, -} = EMPTY_OBJECT) => prompt({ - default: defaultValue, +export const list = ( + message, choices, - message, - name: 'value', - type: 'list', -}).then(_extractValue) + { default: defaultValue = null } = EMPTY_OBJECT +) => + prompt({ + default: defaultValue, + choices, + message, + name: 'value', + type: 'list', + }).then(_extractValue) -export const password = (message, { - default: defaultValue = null, - filter = undefined, - validate = undefined, -} = EMPTY_OBJECT) => prompt({ - default: defaultValue, +export const password = ( message, - name: 'value', - type: 'password', - validate, -}).then(_extractValue) + { + default: defaultValue = null, + filter = undefined, + validate = undefined, + } = EMPTY_OBJECT +) => + prompt({ + default: defaultValue, + message, + name: 'value', + type: 'password', + validate, + }).then(_extractValue) // =================================================================== @@ -69,25 +82,25 @@ const promptByType = { items[i] = await promptGeneric( itemSchema, defaultValue[i], - path - ? `${path} [${i}]` - : `[${i}]` + path ? `${path} [${i}]` : `[${i}]` ) ++i } let n = schema.minItems || 0 - while (i < n) { // eslint-disable-line no-unmodified-loop-condition + // eslint-disable-next-line no-unmodified-loop-condition + while (i < n) { await promptItem() } n = schema.maxItems || Infinity while ( - i < n && // eslint-disable-line no-unmodified-loop-condition - await confirm('additional item?', { + // eslint-disable-next-line no-unmodified-loop-condition + i < n && + (await confirm('additional item?', { default: false, - }) + })) ) { await promptItem() } @@ -95,33 +108,38 @@ const promptByType = { return items }, - boolean: (schema, defaultValue, path) => confirm(path, { - default: defaultValue != null ? defaultValue : schema.default, - }), + boolean: (schema, defaultValue, path) => + confirm(path, { + default: defaultValue != null ? defaultValue : schema.default, + }), - enum: (schema, defaultValue, path) => list(path, schema.enum, { - defaultValue: defaultValue || schema.defaultValue, - }), + enum: (schema, defaultValue, path) => + list(path, schema.enum, { + defaultValue: defaultValue || schema.defaultValue, + }), - integer: (schema, defaultValue, path) => input(path, { - default: defaultValue || schema.default, - filter: input => +input, - validate: input => isInteger(+input), - }), + integer: (schema, defaultValue, path) => + input(path, { + default: defaultValue || schema.default, + filter: input => +input, + validate: input => isInteger(+input), + }), - number: (schema, defaultValue, path) => input(path, { - default: defaultValue || schema.default, - filter: input => +input, - validate: input => isFinite(+input), - }), + number: (schema, defaultValue, path) => + input(path, { + default: defaultValue || schema.default, + filter: input => +input, + validate: input => isFinite(+input), + }), object: async (schema, defaultValue, path) => { const value = {} const required = {} - schema.required && forEach(schema.required, name => { - required[name] = true - }) + schema.required && + forEach(schema.required, name => { + required[name] = true + }) const promptProperty = async (schema, name) => { const subpath = path @@ -130,9 +148,9 @@ const promptByType = { if ( required[name] || - await confirm(`fill optional ${subpath}?`, { + (await confirm(`fill optional ${subpath}?`, { default: Boolean(defaultValue && name in defaultValue), - }) + })) ) { value[name] = await promptGeneric( schema, @@ -147,15 +165,14 @@ const promptByType = { return value }, - string: (schema, defaultValue, path) => input(path, { - default: defaultValue || schema.default, - }), + string: (schema, defaultValue, path) => + input(path, { + default: defaultValue || schema.default, + }), } export default function promptGeneric (schema, defaultValue, path) { - const type = schema.enum - ? 'enum' - : schema.type + const type = schema.enum ? 'enum' : schema.type const prompt = promptByType[type.toLowerCase()] if (!prompt) { diff --git a/packages/xo-server-auth-ldap/src/test-cli.js b/packages/xo-server-auth-ldap/src/test-cli.js index 785f5e504..ee6d52500 100755 --- a/packages/xo-server-auth-ldap/src/test-cli.js +++ b/packages/xo-server-auth-ldap/src/test-cli.js @@ -5,13 +5,8 @@ import { bind } from 'lodash' import { fromCallback } from 'promise-toolbox' import { readFile, writeFile } from 'fs' -import promptSchema, { - input, - password, -} from './prompt-schema' -import createPlugin, { - configurationSchema, -} from './' +import promptSchema, { input, password } from './prompt-schema' +import createPlugin, { configurationSchema } from './' // =================================================================== @@ -27,7 +22,9 @@ execPromise(async args => { () => ({}) ) ) - await fromCallback(cb => writeFile(CACHE_FILE, JSON.stringify(config, null, 2), cb)).then( + await fromCallback(cb => + writeFile(CACHE_FILE, JSON.stringify(config, null, 2), cb) + ).then( () => { console.log('configuration saved in %s', CACHE_FILE) }, @@ -40,10 +37,13 @@ execPromise(async args => { const plugin = createPlugin({}) await plugin.configure(config) - await plugin._authenticate({ - username: await input('Username', { - validate: input => !!input.length, - }), - password: await password('Password'), - }, bind(console.log, console)) + await plugin._authenticate( + { + username: await input('Username', { + validate: input => !!input.length, + }), + password: await password('Password'), + }, + bind(console.log, console) + ) }) diff --git a/packages/xo-server-auth-saml/src/index.js b/packages/xo-server-auth-saml/src/index.js index a3f7d9571..45b92a20e 100644 --- a/packages/xo-server-auth-saml/src/index.js +++ b/packages/xo-server-auth-saml/src/index.js @@ -1,4 +1,4 @@ -import {Strategy} from 'passport-saml' +import { Strategy } from 'passport-saml' // =================================================================== @@ -38,19 +38,21 @@ class AuthSamlXoPlugin { load () { const xo = this._xo - xo.registerPassportStrategy(new Strategy(this._conf, async (profile, done) => { - const name = profile[this._usernameField] - if (!name) { - done('no name found for this user') - return - } + xo.registerPassportStrategy( + new Strategy(this._conf, async (profile, done) => { + const name = profile[this._usernameField] + if (!name) { + done('no name found for this user') + return + } - try { - done(null, await xo.registerUser('saml', name)) - } catch (error) { - done(error.message) - } - })) + try { + done(null, await xo.registerUser('saml', name)) + } catch (error) { + done(error.message) + } + }) + ) } } diff --git a/packages/xo-server-backup-reports/src/index.js b/packages/xo-server-backup-reports/src/index.js index a73a840fc..827ba5991 100644 --- a/packages/xo-server-backup-reports/src/index.js +++ b/packages/xo-server-backup-reports/src/index.js @@ -37,15 +37,17 @@ const ICON_FAILURE = '🚨' const ICON_SUCCESS = '✔' const DATE_FORMAT = 'dddd, MMMM Do YYYY, h:mm:ss a' -const createDateFormater = timezone => timezone !== undefined - ? timestamp => moment(timestamp).tz(timezone).format(DATE_FORMAT) - : timestamp => moment(timestamp).format(DATE_FORMAT) +const createDateFormater = timezone => + timezone !== undefined + ? timestamp => + moment(timestamp) + .tz(timezone) + .format(DATE_FORMAT) + : timestamp => moment(timestamp).format(DATE_FORMAT) -const formatDuration = milliseconds => - moment.duration(milliseconds).humanize() +const formatDuration = milliseconds => moment.duration(milliseconds).humanize() -const formatMethod = method => - startCase(method.slice(method.indexOf('.') + 1)) +const formatMethod = method => startCase(method.slice(method.indexOf('.') + 1)) const formatSize = bytes => humanFormat(bytes, { @@ -83,7 +85,9 @@ class BackupReportsXoPlugin { } _wrapper (status) { - return new Promise(resolve => resolve(this._listener(status))).catch(logError) + return new Promise(resolve => resolve(this._listener(status))).catch( + logError + ) } _listener (status) { @@ -114,8 +118,7 @@ class BackupReportsXoPlugin { } const reportOnFailure = - reportWhen === 'fail' || // xo-web < 5 - reportWhen === 'failure' // xo-web >= 5 + reportWhen === 'fail' || reportWhen === 'failure' // xo-web < 5 // xo-web >= 5 let globalMergeSize = 0 let globalTransferSize = 0 @@ -152,11 +155,7 @@ class BackupReportsXoPlugin { const { message } = error - failedBackupsText.push( - ...text, - `- **Error**: ${message}`, - '' - ) + failedBackupsText.push(...text, `- **Error**: ${message}`, '') nagiosText.push( `[ ${vm !== undefined ? vm.name_label : 'undefined'} : ${message} ]` @@ -169,22 +168,25 @@ class BackupReportsXoPlugin { globalTransferSize += transferSize text.push( `- **Transfer size**: ${formatSize(transferSize)}`, - `- **Transfer speed**: ${formatSpeed(transferSize, returnedValue.transferDuration)}` + `- **Transfer speed**: ${formatSpeed( + transferSize, + returnedValue.transferDuration + )}` ) } if (mergeSize !== undefined) { globalMergeSize += mergeSize text.push( `- **Merge size**: ${formatSize(mergeSize)}`, - `- **Merge speed**: ${formatSpeed(mergeSize, returnedValue.mergeDuration)}` + `- **Merge speed**: ${formatSpeed( + mergeSize, + returnedValue.mergeDuration + )}` ) } } - successfulBackupText.push( - ...text, - '' - ) + successfulBackupText.push(...text, '') } }) @@ -208,14 +210,10 @@ class BackupReportsXoPlugin { `- **Successes**: ${nSuccesses} / ${nCalls}`, ] if (globalTransferSize !== 0) { - markdown.push( - `- **Transfer size**: ${formatSize(globalTransferSize)}` - ) + markdown.push(`- **Transfer size**: ${formatSize(globalTransferSize)}`) } if (globalMergeSize !== 0) { - markdown.push( - `- **Merge size**: ${formatSize(globalMergeSize)}` - ) + markdown.push(`- **Merge size**: ${formatSize(globalMergeSize)}`) } markdown.push('') @@ -239,38 +237,40 @@ class BackupReportsXoPlugin { ) } - markdown.push( - '---', - '', - `*${pkg.name} v${pkg.version}*` - ) + markdown.push('---', '', `*${pkg.name} v${pkg.version}*`) markdown = markdown.join('\n') const xo = this._xo return Promise.all([ - xo.sendEmail !== undefined && xo.sendEmail({ - to: this._mailsReceivers, - subject: `[Xen Orchestra] ${ - globalSuccess ? 'Success' : 'Failure' - } − Backup report for ${tag} ${ - globalSuccess ? ICON_SUCCESS : ICON_FAILURE - }`, - markdown, - }), - xo.sendToXmppClient !== undefined && xo.sendToXmppClient({ - to: this._xmppReceivers, - message: markdown, - }), - xo.sendSlackMessage !== undefined && xo.sendSlackMessage({ - message: markdown, - }), - xo.sendPassiveCheck !== undefined && xo.sendPassiveCheck({ - status: globalSuccess ? 0 : 2, - message: globalSuccess - ? `[Xen Orchestra] [Success] Backup report for ${tag}` - : `[Xen Orchestra] [Failure] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`, - }), + xo.sendEmail !== undefined && + xo.sendEmail({ + to: this._mailsReceivers, + subject: `[Xen Orchestra] ${ + globalSuccess ? 'Success' : 'Failure' + } − Backup report for ${tag} ${ + globalSuccess ? ICON_SUCCESS : ICON_FAILURE + }`, + markdown, + }), + xo.sendToXmppClient !== undefined && + xo.sendToXmppClient({ + to: this._xmppReceivers, + message: markdown, + }), + xo.sendSlackMessage !== undefined && + xo.sendSlackMessage({ + message: markdown, + }), + xo.sendPassiveCheck !== undefined && + xo.sendPassiveCheck({ + status: globalSuccess ? 0 : 2, + message: globalSuccess + ? `[Xen Orchestra] [Success] Backup report for ${tag}` + : `[Xen Orchestra] [Failure] Backup report for ${tag} - VMs : ${nagiosText.join( + ' ' + )}`, + }), ]) } } diff --git a/packages/xo-server-cloud/src/index.js b/packages/xo-server-cloud/src/index.js index 46bc8459b..e054f7715 100644 --- a/packages/xo-server-cloud/src/index.js +++ b/packages/xo-server-cloud/src/index.js @@ -27,7 +27,8 @@ class XoServerCloud { getResourceCatalog.description = 'Get the list of all available resources' getResourceCatalog.permission = 'admin' - const registerResource = ({ namespace }) => this._registerResource(namespace) + const registerResource = ({ namespace }) => + this._registerResource(namespace) registerResource.description = 'Register a resource via cloud plugin' registerResource.params = { namespace: { @@ -42,21 +43,22 @@ class XoServerCloud { registerResource, }, }) - this._unsetRequestResource = this._xo.defineProperty('requestResource', this._requestResource, this) + this._unsetRequestResource = this._xo.defineProperty( + 'requestResource', + this._requestResource, + this + ) - const updater = this._updater = new Client(`${UPDATER_URL}:${WS_PORT}`) - const connect = () => updater.open(createBackoff()).catch( - error => { + const updater = (this._updater = new Client(`${UPDATER_URL}:${WS_PORT}`)) + const connect = () => + updater.open(createBackoff()).catch(error => { console.error('xo-server-cloud: fail to connect to updater', error) return connect() - } - ) - updater - .on('closed', connect) - .on('scheduledAttempt', ({ delay }) => { - console.warn('xo-server-cloud: next attempt in %s ms', delay) }) + updater.on('closed', connect).on('scheduledAttempt', ({ delay }) => { + console.warn('xo-server-cloud: next attempt in %s ms', delay) + }) connect() } @@ -138,13 +140,15 @@ class XoServerCloud { throw new Error('cannot get download token') } - const req = request.get(`${UPDATER_URL}:${HTTP_PORT}/`) + const req = request + .get(`${UPDATER_URL}:${HTTP_PORT}/`) .set('Authorization', `Bearer ${downloadToken}`) // Impossible to pipe the response directly: https://github.com/visionmedia/superagent/issues/1187 const pt = new PassThrough() req.pipe(pt) - pt.length = (await eventToPromise(req, 'response')).headers['content-length'] + const { headers } = await eventToPromise(req, 'response') + pt.length = headers['content-length'] return pt } diff --git a/packages/xo-server-load-balancer/src/density-plan.js b/packages/xo-server-load-balancer/src/density-plan.js index af4d79101..e57438262 100644 --- a/packages/xo-server-load-balancer/src/density-plan.js +++ b/packages/xo-server-load-balancer/src/density-plan.js @@ -7,8 +7,9 @@ import { debug } from './utils' export default class DensityPlan extends Plan { _checkRessourcesThresholds (objects, averages) { - return filter(objects, object => - averages[object.id].memoryFree > this._thresholds.memoryFree.low + return filter( + objects, + object => averages[object.id].memoryFree > this._thresholds.memoryFree.low ) } @@ -19,27 +20,17 @@ export default class DensityPlan extends Plan { return } - const { - hosts, - toOptimize, - } = results + const { hosts, toOptimize } = results - let { - averages: hostsAverages, - } = results + let { averages: hostsAverages } = results const pools = await this._getPlanPools() let optimizationsCount = 0 for (const hostToOptimize of toOptimize) { - const { - id: hostId, - $poolId: poolId, - } = hostToOptimize + const { id: hostId, $poolId: poolId } = hostToOptimize - const { - master: masterId, - } = pools[poolId] + const { master: masterId } = pools[poolId] // Avoid master optimization. if (masterId === hostId) { @@ -58,10 +49,7 @@ export default class DensityPlan extends Plan { const otherHosts = [] for (const dest of hosts) { - const { - id: destId, - $poolId: destPoolId, - } = dest + const { id: destId, $poolId: destPoolId } = dest // Destination host != Host to optimize! if (destId === hostId) { @@ -83,12 +71,7 @@ export default class DensityPlan extends Plan { const simulResults = await this._simulate({ host: hostToOptimize, - destinations: [ - [ poolMaster ], - poolHosts, - masters, - otherHosts, - ], + destinations: [[poolMaster], poolHosts, masters, otherHosts], hostsAverages: clone(hostsAverages), }) @@ -115,15 +98,15 @@ export default class DensityPlan extends Plan { for (const vm of vms) { if (!vm.xenTools) { - debug(`VM (${vm.id}) of Host (${hostId}) does not support pool migration.`) + debug( + `VM (${vm.id}) of Host (${hostId}) does not support pool migration.` + ) return } } // Sort vms by amount of memory. (+ -> -) - vms.sort((a, b) => - vmsAverages[b.id].memory - vmsAverages[a.id].memory - ) + vms.sort((a, b) => vmsAverages[b.id].memory - vmsAverages[a.id].memory) const simulResults = { hostsAverages, @@ -162,15 +145,11 @@ export default class DensityPlan extends Plan { // Test if a VM migration on a destination (of a destinations set) is possible. _testMigration ({ vm, destinations, hostsAverages, vmsAverages }) { - const { - _thresholds: { - critical: criticalThreshold, - }, - } = this + const { _thresholds: { critical: criticalThreshold } } = this // Sort the destinations by available memory. (- -> +) - destinations.sort((a, b) => - hostsAverages[a.id].memoryFree - hostsAverages[b.id].memoryFree + destinations.sort( + (a, b) => hostsAverages[a.id].memoryFree - hostsAverages[b.id].memoryFree ) for (const destination of destinations) { @@ -204,13 +183,18 @@ export default class DensityPlan extends Plan { await Promise.all( mapToArray(moves, move => { - const { - vm, - destination, - } = move + const { vm, destination } = move const xapiDest = this.xo.getXapi(destination) - debug(`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${vm.$container}).`) - return xapiDest.migrateVm(vm._xapiId, this.xo.getXapi(destination), destination._xapiId) + debug( + `Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${ + vm.$container + }).` + ) + return xapiDest.migrateVm( + vm._xapiId, + this.xo.getXapi(destination), + destination._xapiId + ) }) ) diff --git a/packages/xo-server-load-balancer/src/index.js b/packages/xo-server-load-balancer/src/index.js index 2d8125d23..fc5976078 100644 --- a/packages/xo-server-load-balancer/src/index.js +++ b/packages/xo-server-load-balancer/src/index.js @@ -9,10 +9,7 @@ import { DEFAULT_CRITICAL_THRESHOLD_CPU, DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE, } from './plan' -import { - EXECUTION_DELAY, - debug, -} from './utils' +import { EXECUTION_DELAY, debug } from './utils' // =================================================================== @@ -41,7 +38,7 @@ export const configurationSchema = { }, mode: { - enum: [ 'Performance mode', 'Density mode' ], + enum: ['Performance mode', 'Density mode'], title: 'Mode', }, @@ -85,7 +82,7 @@ export const configurationSchema = { }, }, - required: [ 'name', 'mode', 'pools' ], + required: ['name', 'mode', 'pools'], }, minItems: 1, @@ -115,7 +112,10 @@ const makeJob = (cronPattern, fn) => { try { await fn() } catch (error) { - console.error('[WARN] scheduled function:', (error && error.stack) || error) + console.error( + '[WARN] scheduled function:', + (error && error.stack) || error + ) } finally { job.running = false job.emitter.emit('finish') @@ -133,7 +133,10 @@ const makeJob = (cronPattern, fn) => { class LoadBalancerPlugin { constructor (xo) { this.xo = xo - this._job = makeJob(`*/${EXECUTION_DELAY} * * * *`, this._executePlans.bind(this)) + this._job = makeJob( + `*/${EXECUTION_DELAY} * * * *`, + this._executePlans.bind(this) + ) } async configure ({ plans }) { @@ -154,7 +157,10 @@ class LoadBalancerPlugin { if (plans) { for (const plan of plans) { - this._addPlan(plan.mode === 'Performance mode' ? PERFORMANCE_MODE : DENSITY_MODE, plan) + this._addPlan( + plan.mode === 'Performance mode' ? PERFORMANCE_MODE : DENSITY_MODE, + plan + ) } } @@ -180,18 +186,17 @@ class LoadBalancerPlugin { } this._poolIds = this._poolIds.concat(pools) - this._plans.push(mode === PERFORMANCE_MODE - ? new PerformancePlan(this.xo, name, pools, options) - : new DensityPlan(this.xo, name, pools, options) + this._plans.push( + mode === PERFORMANCE_MODE + ? new PerformancePlan(this.xo, name, pools, options) + : new DensityPlan(this.xo, name, pools, options) ) } _executePlans () { debug('Execute plans!') - return Promise.all( - mapToArray(this._plans, plan => plan.execute()) - ) + return Promise.all(mapToArray(this._plans, plan => plan.execute())) } } diff --git a/packages/xo-server-load-balancer/src/performance-plan.js b/packages/xo-server-load-balancer/src/performance-plan.js index 8aedd8f19..f6d35868b 100644 --- a/packages/xo-server-load-balancer/src/performance-plan.js +++ b/packages/xo-server-load-balancer/src/performance-plan.js @@ -35,7 +35,10 @@ export default class PerformancePlan extends Plan { try { await Promise.all( mapToArray( - filter(this._getHosts({ powerState: 'Halted' }), host => host.powerOnMode !== ''), + filter( + this._getHosts({ powerState: 'Halted' }), + host => host.powerOnMode !== '' + ), host => { const { id } = host return this.xo.getXapi(id).powerOnHost(id) @@ -52,17 +55,14 @@ export default class PerformancePlan extends Plan { return } - const { - averages, - toOptimize, - } = results + const { averages, toOptimize } = results let { hosts } = results toOptimize.sort((a, b) => { a = averages[a.id] b = averages[b.id] - return (b.cpu - a.cpu) || (a.memoryFree - b.memoryFree) + return b.cpu - a.cpu || a.memoryFree - b.memoryFree }) for (const exceededHost of toOptimize) { @@ -85,9 +85,7 @@ export default class PerformancePlan extends Plan { const vmsAverages = await this._getVmsAverages(vms, exceededHost) // Sort vms by cpu usage. (lower to higher) - vms.sort((a, b) => - vmsAverages[b.id].cpu - vmsAverages[a.id].cpu - ) + vms.sort((a, b) => vmsAverages[b.id].cpu - vmsAverages[a.id].cpu) const exceededAverages = hostsAverages[exceededHost.id] const promises = [] @@ -95,11 +93,15 @@ export default class PerformancePlan extends Plan { const xapiSrc = this.xo.getXapi(exceededHost) let optimizationsCount = 0 - const searchFunction = (a, b) => hostsAverages[b.id].cpu - hostsAverages[a.id].cpu + const searchFunction = (a, b) => + hostsAverages[b.id].cpu - hostsAverages[a.id].cpu for (const vm of vms) { // Search host with lower cpu usage in the same pool first. In other pool if necessary. - let destination = searchBestObject(find(hosts, host => host.$poolId === vm.$poolId), searchFunction) + let destination = searchBestObject( + find(hosts, host => host.$poolId === vm.$poolId), + searchFunction + ) if (!destination) { destination = searchBestObject(hosts, searchFunction) @@ -110,7 +112,8 @@ export default class PerformancePlan extends Plan { // Unable to move the vm. if ( - exceededAverages.cpu - vmAverages.cpu < destinationAverages.cpu + vmAverages.cpu || + exceededAverages.cpu - vmAverages.cpu < + destinationAverages.cpu + vmAverages.cpu || destinationAverages.memoryFree > vmAverages.memory ) { continue @@ -122,15 +125,27 @@ export default class PerformancePlan extends Plan { exceededAverages.memoryFree += vmAverages.memory destinationAverages.memoryFree -= vmAverages.memory - debug(`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${exceededHost.id}).`) + debug( + `Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${ + exceededHost.id + }).` + ) optimizationsCount++ promises.push( - xapiSrc.migrateVm(vm._xapiId, this.xo.getXapi(destination), destination._xapiId) + xapiSrc.migrateVm( + vm._xapiId, + this.xo.getXapi(destination), + destination._xapiId + ) ) } await Promise.all(promises) - debug(`Performance mode: ${optimizationsCount} optimizations for Host (${exceededHost.id}).`) + debug( + `Performance mode: ${optimizationsCount} optimizations for Host (${ + exceededHost.id + }).` + ) } } diff --git a/packages/xo-server-load-balancer/src/plan.js b/packages/xo-server-load-balancer/src/plan.js index 98e6982e2..a2f50a81d 100644 --- a/packages/xo-server-load-balancer/src/plan.js +++ b/packages/xo-server-load-balancer/src/plan.js @@ -1,9 +1,6 @@ import { filter, includes, map as mapToArray } from 'lodash' -import { - EXECUTION_DELAY, - debug, -} from './utils' +import { EXECUTION_DELAY, debug } from './utils' const MINUTES_OF_HISTORICAL_DATA = 30 @@ -20,7 +17,7 @@ const LOW_THRESHOLD_FACTOR = 0.25 const HIGH_THRESHOLD_MEMORY_FREE_FACTOR = 1.25 const LOW_THRESHOLD_MEMORY_FREE_FACTOR = 20.0 -const numberOrDefault = (value, def) => (value >= 0) ? value : def +const numberOrDefault = (value, def) => (value >= 0 ? value : def) // =================================================================== // Averages. @@ -69,10 +66,12 @@ function computeRessourcesAverageWithWeight (averages1, averages2, ratio) { const averages = {} for (const id in averages1) { - const objectAverages = averages[id] = {} + const objectAverages = (averages[id] = {}) for (const averageName in averages1[id]) { - objectAverages[averageName] = averages1[id][averageName] * ratio + averages2[id][averageName] * (1 - ratio) + objectAverages[averageName] = + averages1[id][averageName] * ratio + + averages2[id][averageName] * (1 - ratio) } } @@ -89,20 +88,24 @@ function setRealCpuAverageOfVms (vms, vmsAverages, nCpus) { // =================================================================== export default class Plan { - constructor (xo, name, poolIds, { - excludedHosts, - thresholds, - } = {}) { + constructor (xo, name, poolIds, { excludedHosts, thresholds } = {}) { this.xo = xo this._name = name this._poolIds = poolIds this._excludedHosts = excludedHosts this._thresholds = { cpu: { - critical: numberOrDefault(thresholds && thresholds.cpu, DEFAULT_CRITICAL_THRESHOLD_CPU), + critical: numberOrDefault( + thresholds && thresholds.cpu, + DEFAULT_CRITICAL_THRESHOLD_CPU + ), }, memoryFree: { - critical: numberOrDefault(thresholds && thresholds.memoryFree, DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE) * 1024, + critical: + numberOrDefault( + thresholds && thresholds.memoryFree, + DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE + ) * 1024, }, } @@ -143,8 +146,16 @@ export default class Plan { } // Check in the last 30 min interval with ratio. - const avgBefore = computeRessourcesAverage(hosts, hostsStats, MINUTES_OF_HISTORICAL_DATA) - const avgWithRatio = computeRessourcesAverageWithWeight(avgNow, avgBefore, 0.75) + const avgBefore = computeRessourcesAverage( + hosts, + hostsStats, + MINUTES_OF_HISTORICAL_DATA + ) + const avgWithRatio = computeRessourcesAverageWithWeight( + avgNow, + avgBefore, + 0.75 + ) toOptimize = this._checkRessourcesThresholds(toOptimize, avgWithRatio) @@ -185,19 +196,23 @@ export default class Plan { // Compute hosts for each pool. They can change over time. _getHosts ({ powerState = 'Running' } = {}) { - return filter(this.xo.getObjects(), object => ( - object.type === 'host' && - includes(this._poolIds, object.$poolId) && - object.power_state === powerState && - !includes(this._excludedHosts, object.id) - )) + return filter( + this.xo.getObjects(), + object => + object.type === 'host' && + includes(this._poolIds, object.$poolId) && + object.power_state === powerState && + !includes(this._excludedHosts, object.id) + ) } async _getVms (hostId) { - return filter(this.xo.getObjects(), object => - object.type === 'VM' && - object.power_state === 'Running' && - object.$container === hostId + return filter( + this.xo.getObjects(), + object => + object.type === 'VM' && + object.power_state === 'Running' && + object.$container === hostId ) } @@ -208,15 +223,17 @@ export default class Plan { async _getHostsStats (hosts, granularity) { const hostsStats = {} - await Promise.all(mapToArray(hosts, host => - this.xo.getXapiHostStats(host, granularity).then(hostStats => { - hostsStats[host.id] = { - nPoints: hostStats.stats.cpus[0].length, - stats: hostStats.stats, - averages: {}, - } - }) - )) + await Promise.all( + mapToArray(hosts, host => + this.xo.getXapiHostStats(host, granularity).then(hostStats => { + hostsStats[host.id] = { + nPoints: hostStats.stats.cpus[0].length, + stats: hostStats.stats, + averages: {}, + } + }) + ) + ) return hostsStats } @@ -224,15 +241,17 @@ export default class Plan { async _getVmsStats (vms, granularity) { const vmsStats = {} - await Promise.all(mapToArray(vms, vm => - this.xo.getXapiVmStats(vm, granularity).then(vmStats => { - vmsStats[vm.id] = { - nPoints: vmStats.stats.cpus[0].length, - stats: vmStats.stats, - averages: {}, - } - }) - )) + await Promise.all( + mapToArray(vms, vm => + this.xo.getXapiVmStats(vm, granularity).then(vmStats => { + vmsStats[vm.id] = { + nPoints: vmStats.stats.cpus[0].length, + stats: vmStats.stats, + averages: {}, + } + }) + ) + ) return vmsStats } diff --git a/packages/xo-server-perf-alert/.babelrc.js b/packages/xo-server-perf-alert/.babelrc.js index 5fdaef4ec..3d0c8e5eb 100644 --- a/packages/xo-server-perf-alert/.babelrc.js +++ b/packages/xo-server-perf-alert/.babelrc.js @@ -27,9 +27,7 @@ module.exports = { debug: !__TEST__, loose: true, shippedProposals: true, - targets: __PROD__ - ? { node: nodeCompat } - : { node: 'current' }, + targets: __PROD__ ? { node: nodeCompat } : { node: 'current' }, useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage', }, ], diff --git a/packages/xo-server-perf-alert/src/index.js b/packages/xo-server-perf-alert/src/index.js index fe07a5997..c6eeedc58 100644 --- a/packages/xo-server-perf-alert/src/index.js +++ b/packages/xo-server-perf-alert/src/index.js @@ -91,9 +91,7 @@ const HOST_FUNCTIONS = { unit: '% used', comparator: '>', createParser: (legend, threshold) => { - const memoryKBytesLegend = legend.find( - l => l.name === 'memory_total_kib' - ) + const memoryKBytesLegend = legend.find(l => l.name === 'memory_total_kib') const memoryKBytesFreeLegend = legend.find( l => l.name === 'memory_free_kib' ) diff --git a/packages/xo-server-test-plugin/index.js b/packages/xo-server-test-plugin/index.js index cf6e03f98..2b6fbdf6f 100644 --- a/packages/xo-server-test-plugin/index.js +++ b/packages/xo-server-test-plugin/index.js @@ -37,7 +37,6 @@ exports.default = function (opts) { // For simplicity's sake, this plugin returns a plain object, but // usually it returns a new instance of an existing class. return { - // This (optional) method is called each time the plugin is // (re-)configured. // diff --git a/packages/xo-server-transport-email/src/index.js b/packages/xo-server-transport-email/src/index.js index 7b9d74a3c..47c9d3ab7 100644 --- a/packages/xo-server-transport-email/src/index.js +++ b/packages/xo-server-transport-email/src/index.js @@ -16,7 +16,10 @@ const removeUndefined = obj => { const markdownCompiler = nodemailerMarkdown() const logAndRethrow = error => { - console.error('[WARN] plugin transport-email:', (error && error.stack) || error) + console.error( + '[WARN] plugin transport-email:', + (error && error.stack) || error + ) throw error } @@ -59,7 +62,7 @@ export const configurationSchema = { }, secure: { default: false, - enum: [ false, 'force', 'disabled', true ], + enum: [false, 'force', 'disabled', true], enumNames: [ 'auto (uses STARTTLS if available)', 'force (requires STARTTLS or fail)', @@ -70,7 +73,8 @@ export const configurationSchema = { }, ignoreUnauthorized: { type: 'boolean', - description: 'ignore certificates error (e.g. self-signed certificate)', + description: + 'ignore certificates error (e.g. self-signed certificate)', }, // FIXME: xo-web does not support edition of too nested @@ -138,18 +142,11 @@ class TransportEmailPlugin { configure ({ from, - transport: { - ignoreUnauthorized, - password, - secure, - user, - ...transportConf - }, + transport: { ignoreUnauthorized, password, secure, user, ...transportConf }, }) { if (ignoreUnauthorized != null) { - ( - transportConf.tls || - (transportConf.tls = {}) + ;( + transportConf.tls || (transportConf.tls = {}) ).rejectUnauthorized = !ignoreUnauthorized } @@ -159,11 +156,14 @@ class TransportEmailPlugin { switch (secure) { case true: - transportConf.secure = true; break + transportConf.secure = true + break case 'disabled': - transportConf.ignoreTLS = true; break + transportConf.ignoreTLS = true + break case 'required': - transportConf.requireTLS = true; break + transportConf.requireTLS = true + break } const transport = createTransport(transportConf, { from }) @@ -180,7 +180,7 @@ class TransportEmailPlugin { this._unset() } - test ({to}) { + test ({ to }) { return this._sendEmail({ to, subject: '[Xen Orchestra] Test of transport-email plugin', @@ -188,29 +188,27 @@ class TransportEmailPlugin { The transport-email plugin for Xen Orchestra server seems to be working fine, nicely done :) `, - attachments: [ { - filename: 'example.txt', - content: 'Attachments are working too, great!\n', - } ], + attachments: [ + { + filename: 'example.txt', + content: 'Attachments are working too, great!\n', + }, + ], }) } - _sendEmail ({ - from, - to, cc, bcc, - subject, - markdown, - attachments, - }) { - return this._send(removeUndefined({ - from, - to, - cc, - bcc, - subject, - markdown, - attachments, - })).catch(logAndRethrow) + _sendEmail ({ from, to, cc, bcc, subject, markdown, attachments }) { + return this._send( + removeUndefined({ + from, + to, + cc, + bcc, + subject, + markdown, + attachments, + }) + ).catch(logAndRethrow) } } diff --git a/packages/xo-server-transport-nagios/src/index.js b/packages/xo-server-transport-nagios/src/index.js index 0a939e370..3f4b2ef63 100644 --- a/packages/xo-server-transport-nagios/src/index.js +++ b/packages/xo-server-transport-nagios/src/index.js @@ -35,18 +35,12 @@ export const configurationSchema = { // =================================================================== -const bind = (fn, thisArg) => function __bound__ () { - return fn.apply(thisArg, arguments) -} +const bind = (fn, thisArg) => + function __bound__ () { + return fn.apply(thisArg, arguments) + } -function nscaPacketBuilder ({ - host, - iv, - message, - service, - status, - timestamp, -}) { +function nscaPacketBuilder ({ host, iv, message, service, status, timestamp }) { // Building NSCA packet const SIZE = 720 const packet = Buffer.alloc(SIZE) @@ -112,15 +106,13 @@ class XoServerNagios { test () { return this._sendPassiveCheck({ - message: 'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)', + message: + 'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)', status: OK, }) } - _sendPassiveCheck ({ - message, - status, - }) { + _sendPassiveCheck ({ message, status }) { return new Promise((resolve, reject) => { if (/\r|\n/.test(message)) { throw new Error('the message must not contain a line break') @@ -145,13 +137,7 @@ class XoServerNagios { // 1) Using xor between the NSCA packet and the initialization vector // 2) Using xor between the result of the first operation and the encryption key - const xorPacketBuffer = xor( - xor( - packet, - iv - ), - this._key - ) + const xorPacketBuffer = xor(xor(packet, iv), this._key) client.write(xorPacketBuffer, res => { client.destroy() diff --git a/packages/xo-server-transport-slack/src/index.js b/packages/xo-server-transport-slack/src/index.js index 6c3939c4e..cec681c1c 100644 --- a/packages/xo-server-transport-slack/src/index.js +++ b/packages/xo-server-transport-slack/src/index.js @@ -4,7 +4,10 @@ import { promisify } from 'promise-toolbox' // =================================================================== const logAndRethrow = error => { - console.error('[WARN] plugin transport-slack:', (error != null && error.stack) || error) + console.error( + '[WARN] plugin transport-slack:', + (error != null && error.stack) || error + ) throw error } @@ -48,10 +51,7 @@ class XoServerTransportSlack { this._send = null } - configure ({ - webhookUri, - ...conf - }) { + configure ({ webhookUri, ...conf }) { const slack = new Slack() slack.setWebhook(webhookUri) this._conf = conf @@ -74,9 +74,7 @@ The transport-slack plugin for Xen Orchestra server seems to be working fine, ni }) } - _sendSlack ({ - message, - }) { + _sendSlack ({ message }) { // TODO: handle errors return this._send({ ...this._conf, text: message }).catch(logAndRethrow) } diff --git a/packages/xo-server-transport-xmpp/src/index.js b/packages/xo-server-transport-xmpp/src/index.js index 3185af1de..2a02bb933 100644 --- a/packages/xo-server-transport-xmpp/src/index.js +++ b/packages/xo-server-transport-xmpp/src/index.js @@ -68,13 +68,15 @@ class TransportXmppPlugin { this._unset = this._client = null } - _sendToXmppClient ({to, message}) { + _sendToXmppClient ({ to, message }) { for (const receiver of to) { this._client.send( new XmppClient.Stanza('message', { to: receiver, type: 'chat', - }).c('body').t(message) + }) + .c('body') + .t(message) ) } } diff --git a/packages/xo-server-usage-report/src/index.js b/packages/xo-server-usage-report/src/index.js index 687048ff0..05107f19d 100644 --- a/packages/xo-server-usage-report/src/index.js +++ b/packages/xo-server-usage-report/src/index.js @@ -14,13 +14,8 @@ import { values, zipObject, } from 'lodash' -import { - promisify, -} from 'promise-toolbox' -import { - readFile, - writeFile, -} from 'fs' +import { promisify } from 'promise-toolbox' +import { readFile, writeFile } from 'fs' // =================================================================== @@ -41,9 +36,9 @@ const mibPower = Math.pow(2, 20) const kibPower = Math.pow(2, 10) let template = null -pReadFile(`${__dirname}/../report.html.tpl`, 'utf8') - .then(tpl => { - template = Handlebars.compile(minify(tpl, { +pReadFile(`${__dirname}/../report.html.tpl`, 'utf8').then(tpl => { + template = Handlebars.compile( + minify(tpl, { collapseBooleanAttributes: true, collapseWhitespace: true, minifyCSS: true, @@ -51,14 +46,14 @@ pReadFile(`${__dirname}/../report.html.tpl`, 'utf8') removeComments: true, removeOptionalTags: true, removeRedundantAttributes: true, - })) - }) + }) + ) +}) let imgXo = null -pReadFile(`${__dirname}/../images/xo.png`, 'base64') - .then(data => { - imgXo = `data:image/png;base64,${data}` - }) +pReadFile(`${__dirname}/../images/xo.png`, 'base64').then(data => { + imgXo = `data:image/png;base64,${data}` +}) // =================================================================== @@ -75,26 +70,36 @@ export const configurationSchema = { periodicity: { type: 'string', enum: ['monthly', 'weekly'], - description: 'If you choose weekly you will receive the report every sunday and if you choose monthly you will receive it every first day of the month.', + description: + 'If you choose weekly you will receive the report every sunday and if you choose monthly you will receive it every first day of the month.', }, }, additionalProperties: false, - required: [ 'emails', 'periodicity' ], + required: ['emails', 'periodicity'], } // =================================================================== -Handlebars.registerHelper('compare', function (lvalue, operator, rvalue, options) { +Handlebars.registerHelper('compare', function ( + lvalue, + operator, + rvalue, + options +) { if (arguments.length < 3) { throw new Error('Handlerbars Helper "compare" needs 2 parameters') } if (!compareOperators[operator]) { - throw new Error(`Handlerbars Helper "compare" doesn't know the operator ${operator}`) + throw new Error( + `Handlerbars Helper "compare" doesn't know the operator ${operator}` + ) } - return compareOperators[operator](lvalue, rvalue) ? options.fn(this) : options.inverse(this) + return compareOperators[operator](lvalue, rvalue) + ? options.fn(this) + : options.inverse(this) }) Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) { @@ -103,7 +108,9 @@ Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) { } if (!mathOperators[operator]) { - throw new Error(`Handlerbars Helper "math" doesn't know the operator ${operator}`) + throw new Error( + `Handlerbars Helper "math" doesn't know the operator ${operator}` + ) } return mathOperators[operator](+lvalue, +rvalue) @@ -135,24 +142,24 @@ const computeDoubleMean = val => computeMean(val.map(computeMean)) function computeMeans (objects, options) { return zipObject( options, - map( - options, - opt => round(computeMean(map(objects, opt)), 2) - ) + map(options, opt => round(computeMean(map(objects, opt)), 2)) ) } function getTop (objects, options) { return zipObject( options, - map( - options, - opt => map( - orderBy(objects, object => { - const value = object[opt] + map(options, opt => + map( + orderBy( + objects, + object => { + const value = object[opt] - return isNaN(value) ? -Infinity : value - }, 'desc').slice(0, 3), + return isNaN(value) ? -Infinity : value + }, + 'desc' + ).slice(0, 3), obj => ({ uuid: obj.uuid, name: obj.name, @@ -168,7 +175,10 @@ function conputePercentage (curr, prev, options) { options, map( options, - opt => prev[opt] === 0 ? 'NONE' : `${round((curr[opt] - prev[opt]) * 100 / prev[opt], 2)}` + opt => + prev[opt] === 0 + ? 'NONE' + : `${round((curr[opt] - prev[opt]) * 100 / prev[opt], 2)}` ) ) } @@ -182,48 +192,42 @@ function getDiff (oldElements, newElements) { // =================================================================== -function getVmsStats ({ - runningVms, - xo, -}) { - return Promise.all(map(runningVms, async vm => { - const vmStats = await xo.getXapiVmStats(vm, 'days') - return { - uuid: vm.uuid, - name: vm.name_label, - cpu: computeDoubleMean(vmStats.stats.cpus), - ram: computeMean(vmStats.stats.memoryUsed) / gibPower, - diskRead: computeDoubleMean(values(vmStats.stats.xvds.r)) / mibPower, - diskWrite: computeDoubleMean(values(vmStats.stats.xvds.w)) / mibPower, - netReception: computeDoubleMean(vmStats.stats.vifs.rx) / kibPower, - netTransmission: computeDoubleMean(vmStats.stats.vifs.tx) / kibPower, - } - })) +function getVmsStats ({ runningVms, xo }) { + return Promise.all( + map(runningVms, async vm => { + const vmStats = await xo.getXapiVmStats(vm, 'days') + return { + uuid: vm.uuid, + name: vm.name_label, + cpu: computeDoubleMean(vmStats.stats.cpus), + ram: computeMean(vmStats.stats.memoryUsed) / gibPower, + diskRead: computeDoubleMean(values(vmStats.stats.xvds.r)) / mibPower, + diskWrite: computeDoubleMean(values(vmStats.stats.xvds.w)) / mibPower, + netReception: computeDoubleMean(vmStats.stats.vifs.rx) / kibPower, + netTransmission: computeDoubleMean(vmStats.stats.vifs.tx) / kibPower, + } + }) + ) } -function getHostsStats ({ - runningHosts, - xo, -}) { - return Promise.all(map(runningHosts, async host => { - const hostStats = await xo.getXapiHostStats(host, 'days') - return { - uuid: host.uuid, - name: host.name_label, - cpu: computeDoubleMean(hostStats.stats.cpus), - ram: computeMean(hostStats.stats.memoryUsed) / gibPower, - load: computeMean(hostStats.stats.load), - netReception: computeDoubleMean(hostStats.stats.pifs.rx) / kibPower, - netTransmission: computeDoubleMean(hostStats.stats.pifs.tx) / kibPower, - } - })) +function getHostsStats ({ runningHosts, xo }) { + return Promise.all( + map(runningHosts, async host => { + const hostStats = await xo.getXapiHostStats(host, 'days') + return { + uuid: host.uuid, + name: host.name_label, + cpu: computeDoubleMean(hostStats.stats.cpus), + ram: computeMean(hostStats.stats.memoryUsed) / gibPower, + load: computeMean(hostStats.stats.load), + netReception: computeDoubleMean(hostStats.stats.pifs.rx) / kibPower, + netTransmission: computeDoubleMean(hostStats.stats.pifs.tx) / kibPower, + } + }) + ) } -function computeGlobalVmsStats ({ - haltedVms, - vmsStats, - xo, -}) { +function computeGlobalVmsStats ({ haltedVms, vmsStats, xo }) { const allVms = concat( map(vmsStats, vm => ({ uuid: vm.uuid, @@ -235,17 +239,23 @@ function computeGlobalVmsStats ({ })) ) - return assign(computeMeans(vmsStats, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission']), { - number: allVms.length, - allVms, - }) + return assign( + computeMeans(vmsStats, [ + 'cpu', + 'ram', + 'diskRead', + 'diskWrite', + 'netReception', + 'netTransmission', + ]), + { + number: allVms.length, + allVms, + } + ) } -function computeGlobalHostsStats ({ - haltedHosts, - hostsStats, - xo, -}) { +function computeGlobalHostsStats ({ haltedHosts, hostsStats, xo }) { const allHosts = concat( map(hostsStats, host => ({ uuid: host.uuid, @@ -257,52 +267,65 @@ function computeGlobalHostsStats ({ })) ) - return assign(computeMeans(hostsStats, ['cpu', 'ram', 'load', 'netReception', 'netTransmission']), { - number: allHosts.length, - allHosts, - }) -} - -function getTopVms ({ - vmsStats, - xo, -}) { - return getTop(vmsStats, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission']) -} - -function getTopHosts ({ - hostsStats, - xo, -}) { - return getTop(hostsStats, ['cpu', 'ram', 'load', 'netReception', 'netTransmission']) -} - -function getMostAllocatedSpaces ({ - disks, - xo, -}) { - return map( - orderBy(disks, ['size'], ['desc']).slice(0, 3), disk => ({ - uuid: disk.uuid, - name: disk.name_label, - size: round(disk.size / gibPower, 2), - })) -} - -async function getHostsMissingPatches ({ - runningHosts, - xo, -}) { - const hostsMissingPatches = await Promise.all(map(runningHosts, async host => { - const hostsPatches = await xo.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId) - if (hostsPatches.length > 0) { - return { - uuid: host.uuid, - name: host.name_label, - patches: map(hostsPatches, 'name'), - } + return assign( + computeMeans(hostsStats, [ + 'cpu', + 'ram', + 'load', + 'netReception', + 'netTransmission', + ]), + { + number: allHosts.length, + allHosts, } + ) +} + +function getTopVms ({ vmsStats, xo }) { + return getTop(vmsStats, [ + 'cpu', + 'ram', + 'diskRead', + 'diskWrite', + 'netReception', + 'netTransmission', + ]) +} + +function getTopHosts ({ hostsStats, xo }) { + return getTop(hostsStats, [ + 'cpu', + 'ram', + 'load', + 'netReception', + 'netTransmission', + ]) +} + +function getMostAllocatedSpaces ({ disks, xo }) { + return map(orderBy(disks, ['size'], ['desc']).slice(0, 3), disk => ({ + uuid: disk.uuid, + name: disk.name_label, + size: round(disk.size / gibPower, 2), })) +} + +async function getHostsMissingPatches ({ runningHosts, xo }) { + const hostsMissingPatches = await Promise.all( + map(runningHosts, async host => { + const hostsPatches = await xo + .getXapi(host) + .listMissingPoolPatchesOnHost(host._xapiId) + if (hostsPatches.length > 0) { + return { + uuid: host.uuid, + name: host.name_label, + patches: map(hostsPatches, 'name'), + } + } + }) + ) return filter(hostsMissingPatches, host => host !== undefined) } @@ -310,17 +333,11 @@ function getAllUsersEmail (users) { return map(users, 'email') } -async function storeStats ({ - data, - storedStatsPath, -}) { +async function storeStats ({ data, storedStatsPath }) { await pWriteFile(storedStatsPath, JSON.stringify(data)) } -async function computeEvolution ({ - storedStatsPath, - ...newStats -}) { +async function computeEvolution ({ storedStatsPath, ...newStats }) { try { const oldStats = JSON.parse(await pReadFile(storedStatsPath, 'utf8')) const newStatsVms = newStats.vms @@ -332,16 +349,35 @@ async function computeEvolution ({ const vmsEvolution = { number: newStatsVms.number - oldStatsVms.number, - ...conputePercentage(newStatsVms, oldStatsVms, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission']), + ...conputePercentage(newStatsVms, oldStatsVms, [ + 'cpu', + 'ram', + 'diskRead', + 'diskWrite', + 'netReception', + 'netTransmission', + ]), } const hostsEvolution = { number: newStatsHosts.number - oldStatsHosts.number, - ...conputePercentage(newStatsHosts, oldStatsHosts, ['cpu', 'ram', 'load', 'netReception', 'netTransmission']), + ...conputePercentage(newStatsHosts, oldStatsHosts, [ + 'cpu', + 'ram', + 'load', + 'netReception', + 'netTransmission', + ]), } - const vmsRessourcesEvolution = getDiff(oldStatsVms.allVms, newStatsVms.allVms) - const hostsRessourcesEvolution = getDiff(oldStatsHosts.allHosts, newStatsHosts.allHosts) + const vmsRessourcesEvolution = getDiff( + oldStatsVms.allVms, + newStatsVms.allVms + ) + const hostsRessourcesEvolution = getDiff( + oldStatsHosts.allHosts, + newStatsHosts.allHosts + ) const usersEvolution = getDiff(oldStats.users, newStats.users) @@ -358,29 +394,41 @@ async function computeEvolution ({ } } -async function dataBuilder ({ - xo, - storedStatsPath, -}) { +async function dataBuilder ({ xo, storedStatsPath }) { const xoObjects = values(xo.getObjects()) - const runningVms = filter(xoObjects, {type: 'VM', power_state: 'Running'}) - const haltedVms = filter(xoObjects, {type: 'VM', power_state: 'Halted'}) - const runningHosts = filter(xoObjects, {type: 'host', power_state: 'Running'}) - const haltedHosts = filter(xoObjects, {type: 'host', power_state: 'Halted'}) - const disks = filter(xoObjects, {type: 'SR'}) - const [users, vmsStats, hostsStats, topAllocation, hostsMissingPatches] = await Promise.all([ + const runningVms = filter(xoObjects, { type: 'VM', power_state: 'Running' }) + const haltedVms = filter(xoObjects, { type: 'VM', power_state: 'Halted' }) + const runningHosts = filter(xoObjects, { + type: 'host', + power_state: 'Running', + }) + const haltedHosts = filter(xoObjects, { type: 'host', power_state: 'Halted' }) + const disks = filter(xoObjects, { type: 'SR' }) + const [ + users, + vmsStats, + hostsStats, + topAllocation, + hostsMissingPatches, + ] = await Promise.all([ xo.getAllUsers(), - getVmsStats({xo, runningVms}), - getHostsStats({xo, runningHosts}), - getMostAllocatedSpaces({xo, disks}), - getHostsMissingPatches({xo, runningHosts}), + getVmsStats({ xo, runningVms }), + getHostsStats({ xo, runningHosts }), + getMostAllocatedSpaces({ xo, disks }), + getHostsMissingPatches({ xo, runningHosts }), ]) - const [globalVmsStats, globalHostsStats, topVms, topHosts, usersEmail] = await Promise.all([ - computeGlobalVmsStats({xo, vmsStats, haltedVms}), - computeGlobalHostsStats({xo, hostsStats, haltedHosts}), - getTopVms({xo, vmsStats}), - getTopHosts({xo, hostsStats}), + const [ + globalVmsStats, + globalHostsStats, + topVms, + topHosts, + usersEmail, + ] = await Promise.all([ + computeGlobalVmsStats({ xo, vmsStats, haltedVms }), + computeGlobalHostsStats({ xo, hostsStats, haltedHosts }), + getTopVms({ xo, vmsStats }), + getTopHosts({ xo, hostsStats }), getAllUsersEmail(users), ]) const evolution = await computeEvolution({ @@ -419,7 +467,7 @@ async function dataBuilder ({ // =================================================================== class UsageReportPlugin { - constructor ({xo, getDataDir}) { + constructor ({ xo, getDataDir }) { this._xo = xo this._dir = getDataDir // Defined in configure(). @@ -430,7 +478,8 @@ class UsageReportPlugin { this._conf = configuration this._job = new CronJob({ - cronTime: configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0', + cronTime: + configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0', onTick: () => this._sendReport(), start: false, }) @@ -467,10 +516,12 @@ class UsageReportPlugin { Please, find the attached report. best regards.`, - attachments: [{ - filename: `xoReport_${currDate}.html`, - content: template(data), - }], + attachments: [ + { + filename: `xoReport_${currDate}.html`, + content: template(data), + }, + ], }), storeStats({ data, diff --git a/packages/xo-server/better-stacks.js b/packages/xo-server/better-stacks.js index 979006f62..20cf47146 100644 --- a/packages/xo-server/better-stacks.js +++ b/packages/xo-server/better-stacks.js @@ -8,25 +8,21 @@ try { const filtered = frames.filter(function (frame) { const name = frame && frame.getFileName() - return ( - // has a filename + return (// has a filename name && - // contains a separator (no internal modules) name.indexOf(sep) !== -1 && - // does not start with `internal` - name.lastIndexOf('internal', 0) !== -1 - ) + name.lastIndexOf('internal', 0) !== -1) }) // depd (used amongst other by express requires at least 3 frames // in the stack. - return filtered.length > 2 - ? filtered - : frames + return filtered.length > 2 ? filtered : frames }) } catch (_) {} // Source maps. -try { require('julien-f-source-map-support/register') } catch (_) {} +try { + require('julien-f-source-map-support/register') +} catch (_) {} diff --git a/packages/xo-server/src/api/acl.js b/packages/xo-server/src/api/acl.js index 840e9165b..929f1257b 100644 --- a/packages/xo-server/src/api/acl.js +++ b/packages/xo-server/src/api/acl.js @@ -14,11 +14,12 @@ export async function getCurrentPermissions () { getCurrentPermissions.permission = '' -getCurrentPermissions.description = 'get (explicit) permissions by object for the current user' +getCurrentPermissions.description = + 'get (explicit) permissions by object for the current user' // ------------------------------------------------------------------- -export async function add ({subject, object, action}) { +export async function add ({ subject, object, action }) { await this.addAcl(subject, object, action) } @@ -34,7 +35,7 @@ add.description = 'add a new ACL entry' // ------------------------------------------------------------------- -export async function remove ({subject, object, action}) { +export async function remove ({ subject, object, action }) { await this.removeAcl(subject, object, action) } diff --git a/packages/xo-server/src/api/backup.js b/packages/xo-server/src/api/backup.js index df6162907..7cf8f11e3 100644 --- a/packages/xo-server/src/api/backup.js +++ b/packages/xo-server/src/api/backup.js @@ -42,46 +42,57 @@ scanFiles.params = { // ------------------------------------------------------------------- -function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) { - this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => { - res.setHeader('content-disposition', 'attachment') - res.setHeader('content-type', 'application/octet-stream') +function handleFetchFiles ( + req, + res, + { remote, disk, partition, paths, format: archiveFormat } +) { + this.fetchFilesInDiskBackup(remote, disk, partition, paths) + .then(files => { + res.setHeader('content-disposition', 'attachment') + res.setHeader('content-type', 'application/octet-stream') - const nFiles = paths.length + const nFiles = paths.length - // Send lone file directly - if (nFiles === 1) { - files[0].pipe(res) - return - } + // Send lone file directly + if (nFiles === 1) { + files[0].pipe(res) + return + } - const archive = archiver(archiveFormat) - archive.on('error', error => { + const archive = archiver(archiveFormat) + archive.on('error', error => { + console.error(error) + res.end(format.error(0, error)) + }) + + forEach(files, file => { + archive.append(file, { name: basename(file.path) }) + }) + archive.finalize() + + archive.pipe(res) + }) + .catch(error => { console.error(error) + res.writeHead(500) res.end(format.error(0, error)) }) - - forEach(files, file => { - archive.append(file, { name: basename(file.path) }) - }) - archive.finalize() - - archive.pipe(res) - }).catch(error => { - console.error(error) - res.writeHead(500) - res.end(format.error(0, error)) - }) } export async function fetchFiles ({ format = 'zip', ...params }) { - const fileName = params.paths.length > 1 - ? `restore_${new Date().toJSON()}.${format}` - : basename(params.paths[0]) + const fileName = + params.paths.length > 1 + ? `restore_${new Date().toJSON()}.${format}` + : basename(params.paths[0]) - return this.registerHttpRequest(handleFetchFiles, { ...params, format }, { - suffix: encodeURI(`/${fileName}`), - }).then(url => ({ $getFrom: url })) + return this.registerHttpRequest( + handleFetchFiles, + { ...params, format }, + { + suffix: encodeURI(`/${fileName}`), + } + ).then(url => ({ $getFrom: url })) } fetchFiles.permission = 'admin' diff --git a/packages/xo-server/src/api/disk.js b/packages/xo-server/src/api/disk.js index 69f0a8b5a..a5c8e76cb 100644 --- a/packages/xo-server/src/api/disk.js +++ b/packages/xo-server/src/api/disk.js @@ -8,9 +8,11 @@ export async function create ({ name, size, sr, vm, bootable, position, mode }) let resourceSet if (attach && (resourceSet = vm.resourceSet) != null) { - await this.checkResourceSetConstraints(resourceSet, this.user.id, [ sr.id ]) + await this.checkResourceSetConstraints(resourceSet, this.user.id, [sr.id]) await this.allocateLimitsInResourceSet({ disk: size }, resourceSet) - } else if (!(await this.hasPermissions(this.user.id, [ [ sr.id, 'administrate' ] ]))) { + } else if ( + !await this.hasPermissions(this.user.id, [[sr.id, 'administrate']]) + ) { throw unauthorized() } diff --git a/packages/xo-server/src/api/docker.js b/packages/xo-server/src/api/docker.js index 0b79239f1..f3bcdd66d 100644 --- a/packages/xo-server/src/api/docker.js +++ b/packages/xo-server/src/api/docker.js @@ -1,4 +1,4 @@ -export async function register ({vm}) { +export async function register ({ vm }) { await this.getXapi(vm).registerDockerContainer(vm._xapiId) } register.description = 'Register the VM for Docker management' @@ -13,7 +13,7 @@ register.resolve = { // ----------------------------------------------------------------------------- -export async function deregister ({vm}) { +export async function deregister ({ vm }) { await this.getXapi(vm).unregisterDockerContainer(vm._xapiId) } deregister.description = 'Deregister the VM for Docker management' @@ -28,23 +28,23 @@ deregister.resolve = { // ----------------------------------------------------------------------------- -export async function start ({vm, container}) { +export async function start ({ vm, container }) { await this.getXapi(vm).startDockerContainer(vm._xapiId, container) } -export async function stop ({vm, container}) { +export async function stop ({ vm, container }) { await this.getXapi(vm).stopDockerContainer(vm._xapiId, container) } -export async function restart ({vm, container}) { +export async function restart ({ vm, container }) { await this.getXapi(vm).restartDockerContainer(vm._xapiId, container) } -export async function pause ({vm, container}) { +export async function pause ({ vm, container }) { await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container) } -export async function unpause ({vm, container}) { +export async function unpause ({ vm, container }) { await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container) } diff --git a/packages/xo-server/src/api/group.js b/packages/xo-server/src/api/group.js index ec208d3ff..bf54aca4d 100644 --- a/packages/xo-server/src/api/group.js +++ b/packages/xo-server/src/api/group.js @@ -1,27 +1,27 @@ -export async function create ({name}) { - return (await this.createGroup({name})).id +export async function create ({ name }) { + return (await this.createGroup({ name })).id } create.description = 'creates a new group' create.permission = 'admin' create.params = { - name: {type: 'string'}, + name: { type: 'string' }, } // ------------------------------------------------------------------- // Deletes an existing group. -async function delete_ ({id}) { +async function delete_ ({ id }) { await this.deleteGroup(id) } // delete is not a valid identifier. -export {delete_ as delete} +export { delete_ as delete } delete_.description = 'deletes an existing group' delete_.permission = 'admin' delete_.params = { - id: {type: 'string'}, + id: { type: 'string' }, } // ------------------------------------------------------------------- @@ -36,35 +36,35 @@ getAll.permission = 'admin' // ------------------------------------------------------------------- // sets group.users with an array of user ids -export async function setUsers ({id, userIds}) { +export async function setUsers ({ id, userIds }) { await this.setGroupUsers(id, userIds) } setUsers.description = 'sets the users belonging to a group' setUsers.permission = 'admin' setUsers.params = { - id: {type: 'string'}, + id: { type: 'string' }, userIds: {}, } // ------------------------------------------------------------------- // adds the user id to group.users -export async function addUser ({id, userId}) { +export async function addUser ({ id, userId }) { await this.addUserToGroup(userId, id) } addUser.description = 'adds a user to a group' addUser.permission = 'admin' addUser.params = { - id: {type: 'string'}, - userId: {type: 'string'}, + id: { type: 'string' }, + userId: { type: 'string' }, } // ------------------------------------------------------------------- // remove the user id from group.users -export async function removeUser ({id, userId}) { +export async function removeUser ({ id, userId }) { await this.removeUserFromGroup(userId, id) } @@ -73,14 +73,14 @@ export async function removeUser ({id, userId}) { removeUser.description = 'removes a user from a group' removeUser.permission = 'admin' removeUser.params = { - id: {type: 'string'}, - userId: {type: 'string'}, + id: { type: 'string' }, + userId: { type: 'string' }, } // ------------------------------------------------------------------- -export async function set ({id, name}) { - await this.updateGroup(id, {name}) +export async function set ({ id, name }) { + await this.updateGroup(id, { name }) } set.description = 'changes the properties of an existing group' diff --git a/packages/xo-server/src/api/host.js b/packages/xo-server/src/api/host.js index 08ccef37d..9f884fa4e 100644 --- a/packages/xo-server/src/api/host.js +++ b/packages/xo-server/src/api/host.js @@ -1,5 +1,4 @@ - -import {format} from 'json-rpc-peer' +import { format } from 'json-rpc-peer' // =================================================================== @@ -58,7 +57,7 @@ restart.resolve = { // ------------------------------------------------------------------- -export function restartAgent ({host}) { +export function restartAgent ({ host }) { return this.getXapi(host).restartHostAgent(host._xapiId) } @@ -77,7 +76,7 @@ export { restartAgent as restart_agent } // eslint-disable-line camelcase // ------------------------------------------------------------------- -export function start ({host}) { +export function start ({ host }) { return this.getXapi(host).powerOnHost(host._xapiId) } @@ -93,7 +92,7 @@ start.resolve = { // ------------------------------------------------------------------- -export function stop ({host}) { +export function stop ({ host }) { return this.getXapi(host).shutdownHost(host._xapiId) } @@ -109,7 +108,7 @@ stop.resolve = { // ------------------------------------------------------------------- -export function detach ({host}) { +export function detach ({ host }) { return this.getXapi(host).ejectHostFromPool(host._xapiId) } @@ -125,7 +124,7 @@ detach.resolve = { // ------------------------------------------------------------------- -export function enable ({host}) { +export function enable ({ host }) { return this.getXapi(host).enableHost(host._xapiId) } @@ -141,7 +140,7 @@ enable.resolve = { // ------------------------------------------------------------------- -export function disable ({host}) { +export function disable ({ host }) { return this.getXapi(host).disableHost(host._xapiId) } @@ -157,7 +156,7 @@ disable.resolve = { // ------------------------------------------------------------------- -export function forget ({host}) { +export function forget ({ host }) { return this.getXapi(host).forgetHost(host._xapiId) } @@ -176,11 +175,12 @@ forget.resolve = { // Returns an array of missing new patches in the host // Returns an empty array if up-to-date // Throws an error if the host is not running the latest XS version -export function listMissingPatches ({host}) { +export function listMissingPatches ({ host }) { return this.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId) } -listMissingPatches.description = 'return an array of missing new patches in the host' +listMissingPatches.description = + 'return an array of missing new patches in the host' listMissingPatches.params = { host: { type: 'string' }, @@ -192,7 +192,7 @@ listMissingPatches.resolve = { // ------------------------------------------------------------------- -export function installPatch ({host, patch: patchUuid}) { +export function installPatch ({ host, patch: patchUuid }) { return this.getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId) } @@ -209,7 +209,7 @@ installPatch.resolve = { // ------------------------------------------------------------------- -export function installAllPatches ({host}) { +export function installAllPatches ({ host }) { return this.getXapi(host).installAllPoolPatchesOnHost(host._xapiId) } @@ -225,7 +225,7 @@ installAllPatches.resolve = { // ------------------------------------------------------------------- -export function emergencyShutdownHost ({host}) { +export function emergencyShutdownHost ({ host }) { return this.getXapi(host).emergencyShutdownHost(host._xapiId) } @@ -241,7 +241,7 @@ emergencyShutdownHost.resolve = { // ------------------------------------------------------------------- -export function stats ({host, granularity}) { +export function stats ({ host, granularity }) { return this.getXapiHostStats(host, granularity) } @@ -278,9 +278,11 @@ async function handleInstallSupplementalPack (req, res, { hostId }) { } } -export async function installSupplementalPack ({host}) { +export async function installSupplementalPack ({ host }) { return { - $sendTo: (await this.registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })), + $sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { + hostId: host.id, + }), } } diff --git a/packages/xo-server/src/api/ip-pool.js b/packages/xo-server/src/api/ip-pool.js index 6a84a8c23..de73c6e09 100644 --- a/packages/xo-server/src/api/ip-pool.js +++ b/packages/xo-server/src/api/ip-pool.js @@ -26,9 +26,8 @@ export function getAll (params) { throw unauthorized() } - return this.getAllIpPools(user.permission === 'admin' - ? params && params.userId - : user.id + return this.getAllIpPools( + user.permission === 'admin' ? params && params.userId : user.id ) } diff --git a/packages/xo-server/src/api/job.js b/packages/xo-server/src/api/job.js index 1e0aefd3c..806e76142 100644 --- a/packages/xo-server/src/api/job.js +++ b/packages/xo-server/src/api/job.js @@ -14,10 +14,10 @@ export async function get (id) { get.permission = 'admin' get.description = 'Gets an existing job' get.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export async function create ({job}) { +export async function create ({ job }) { if (!job.userId) { job.userId = this.session.get('user_id') } @@ -31,16 +31,16 @@ create.params = { job: { type: 'object', properties: { - userId: {type: 'string', optional: true}, - name: {type: 'string', optional: true}, - timeout: {type: 'number', optional: true}, - type: {type: 'string'}, - key: {type: 'string'}, - method: {type: 'string'}, + userId: { type: 'string', optional: true }, + name: { type: 'string', optional: true }, + timeout: { type: 'number', optional: true }, + type: { type: 'string' }, + key: { type: 'string' }, + method: { type: 'string' }, paramsVector: { type: 'object', properties: { - type: {type: 'string'}, + type: { type: 'string' }, items: { type: 'array', items: { @@ -54,7 +54,7 @@ create.params = { }, } -export async function set ({job}) { +export async function set ({ job }) { await this.updateJob(job) } @@ -64,16 +64,16 @@ set.params = { job: { type: 'object', properties: { - id: {type: 'string'}, - name: {type: 'string', optional: true}, - timeout: {type: ['number', 'null'], optional: true}, - type: {type: 'string', optional: true}, - key: {type: 'string', optional: true}, - method: {type: 'string', optional: true}, + id: { type: 'string' }, + name: { type: 'string', optional: true }, + timeout: { type: ['number', 'null'], optional: true }, + type: { type: 'string', optional: true }, + key: { type: 'string', optional: true }, + method: { type: 'string', optional: true }, paramsVector: { type: 'object', properties: { - type: {type: 'string'}, + type: { type: 'string' }, items: { type: 'array', items: { @@ -87,24 +87,24 @@ set.params = { }, } -async function delete_ ({id}) { +async function delete_ ({ id }) { await this.removeJob(id) } delete_.permission = 'admin' delete_.description = 'Deletes an existing job' delete_.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export {delete_ as delete} +export { delete_ as delete } -export async function runSequence ({idSequence}) { +export async function runSequence ({ idSequence }) { await this.runJobSequence(idSequence) } runSequence.permission = 'admin' runSequence.description = 'Runs jobs sequentially, in the provided order' runSequence.params = { - idSequence: {type: 'array', items: {type: 'string'}}, + idSequence: { type: 'array', items: { type: 'string' } }, } diff --git a/packages/xo-server/src/api/log.js b/packages/xo-server/src/api/log.js index 3d3e79a43..941685284 100644 --- a/packages/xo-server/src/api/log.js +++ b/packages/xo-server/src/api/log.js @@ -1,11 +1,12 @@ -export async function get ({namespace}) { +export async function get ({ namespace }) { const logger = await this.getLogger(namespace) return new Promise((resolve, reject) => { const logs = {} - logger.createReadStream() - .on('data', (data) => { + logger + .createReadStream() + .on('data', data => { logs[data.key] = data.value }) .on('end', () => { @@ -23,16 +24,16 @@ get.permission = 'admin' // ------------------------------------------------------------------- -async function delete_ ({namespace, id}) { +async function delete_ ({ namespace, id }) { const logger = await this.getLogger(namespace) logger.del(id) } delete_.description = 'deletes one or several logs from a namespace' delete_.params = { - id: { type: [ 'array', 'string' ] }, + id: { type: ['array', 'string'] }, namespace: { type: 'string' }, } delete_.permission = 'admin' -export {delete_ as delete} +export { delete_ as delete } diff --git a/packages/xo-server/src/api/message.js b/packages/xo-server/src/api/message.js index 6e881206d..a559bc39b 100644 --- a/packages/xo-server/src/api/message.js +++ b/packages/xo-server/src/api/message.js @@ -1,7 +1,7 @@ async function delete_ ({ message }) { await this.getXapi(message).call('message.destroy', message._xapiRef) } -export {delete_ as delete} +export { delete_ as delete } delete_.params = { id: { type: 'string' }, diff --git a/packages/xo-server/src/api/network.js b/packages/xo-server/src/api/network.js index ca90e9f56..e689aa049 100644 --- a/packages/xo-server/src/api/network.js +++ b/packages/xo-server/src/api/network.js @@ -4,7 +4,14 @@ export function getBondModes () { return ['balance-slb', 'active-backup', 'lacp'] } -export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) { +export async function create ({ + pool, + name, + description, + pif, + mtu = 1500, + vlan = 0, +}) { return this.getXapi(pool).createNetwork({ name, description, @@ -30,13 +37,19 @@ create.permission = 'admin' // ================================================================= -export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) { +export async function createBonded ({ + pool, + name, + description, + pifs, + mtu = 1500, + mac, + bondMode, +}) { return this.getXapi(pool).createBondedNetwork({ name, description, - pifIds: mapToArray(pifs, pif => - this.getObject(pif, 'PIF')._xapiId - ), + pifIds: mapToArray(pifs, pif => this.getObject(pif, 'PIF')._xapiId), mtu: +mtu, mac, bondMode, @@ -56,14 +69,18 @@ createBonded.params = { mtu: { type: ['integer', 'string'], optional: true }, mac: { type: 'string', optional: true }, // RegExp since schema-inspector does not provide a param check based on an enumeration - bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }, + bondMode: { + type: 'string', + pattern: new RegExp(`^(${getBondModes().join('|')})$`), + }, } createBonded.resolve = { pool: ['pool', 'pool', 'administrate'], } createBonded.permission = 'admin' -createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp' +createBonded.description = + 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp' // =================================================================== @@ -109,7 +126,7 @@ set.resolve = { export async function delete_ ({ network }) { return this.getXapi(network).deleteNetwork(network._xapiId) } -export {delete_ as delete} +export { delete_ as delete } delete_.params = { id: { type: 'string' }, diff --git a/packages/xo-server/src/api/pbd.js b/packages/xo-server/src/api/pbd.js index 855a87bbe..3ba4be422 100644 --- a/packages/xo-server/src/api/pbd.js +++ b/packages/xo-server/src/api/pbd.js @@ -3,11 +3,11 @@ // =================================================================== // Delete -async function delete_ ({PBD}) { +async function delete_ ({ PBD }) { // TODO: check if PBD is attached before await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef) } -export {delete_ as delete} +export { delete_ as delete } delete_.params = { id: { type: 'string' }, @@ -35,7 +35,7 @@ disconnect.resolve = { // =================================================================== // Connect -export async function connect ({PBD}) { +export async function connect ({ PBD }) { // TODO: check if PBD is attached before await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef) } diff --git a/packages/xo-server/src/api/pif.js b/packages/xo-server/src/api/pif.js index 198fd0edb..f48c3cfad 100644 --- a/packages/xo-server/src/api/pif.js +++ b/packages/xo-server/src/api/pif.js @@ -13,11 +13,11 @@ export function getIpv6ConfigurationModes () { // =================================================================== // Delete -async function delete_ ({pif}) { +async function delete_ ({ pif }) { // TODO: check if PIF is attached before await this.getXapi(pif).call('PIF.destroy', pif._xapiRef) } -export {delete_ as delete} +export { delete_ as delete } delete_.params = { id: { type: 'string' }, @@ -30,7 +30,7 @@ delete_.resolve = { // =================================================================== // Disconnect -export async function disconnect ({pif}) { +export async function disconnect ({ pif }) { // TODO: check if PIF is attached before await this.getXapi(pif).call('PIF.unplug', pif._xapiRef) } @@ -45,7 +45,7 @@ disconnect.resolve = { // =================================================================== // Connect -export async function connect ({pif}) { +export async function connect ({ pif }) { // TODO: check if PIF is attached before await this.getXapi(pif).call('PIF.plug', pif._xapiRef) } @@ -60,8 +60,23 @@ connect.resolve = { // =================================================================== // Reconfigure IP -export async function reconfigureIp ({ pif, mode = 'DHCP', ip = '', netmask = '', gateway = '', dns = '' }) { - await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns) +export async function reconfigureIp ({ + pif, + mode = 'DHCP', + ip = '', + netmask = '', + gateway = '', + dns = '', +}) { + await this.getXapi(pif).call( + 'PIF.reconfigure_ip', + pif._xapiRef, + mode, + ip, + netmask, + gateway, + dns + ) } reconfigureIp.params = { diff --git a/packages/xo-server/src/api/pool.js b/packages/xo-server/src/api/pool.js index 4802bbd44..204ef5837 100644 --- a/packages/xo-server/src/api/pool.js +++ b/packages/xo-server/src/api/pool.js @@ -38,7 +38,7 @@ set.resolve = { // ------------------------------------------------------------------- export async function setDefaultSr ({ sr }) { - await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ]) + await this.hasPermissions(this.user.id, [[sr.$pool, 'administrate']]) await this.getXapi(sr).setDefaultSr(sr._xapiId) } @@ -58,7 +58,7 @@ setDefaultSr.resolve = { // ------------------------------------------------------------------- export async function setPoolMaster ({ host }) { - await this.hasPermissions(this.user.id, [ [ host.$pool, 'administrate' ] ]) + await this.hasPermissions(this.user.id, [[host.$pool, 'administrate']]) await this.getXapi(host).setPoolMaster(host._xapiId) } @@ -75,7 +75,7 @@ setPoolMaster.resolve = { // ------------------------------------------------------------------- -export async function installPatch ({pool, patch: patchUuid}) { +export async function installPatch ({ pool, patch: patchUuid }) { await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid) } @@ -107,11 +107,12 @@ installAllPatches.resolve = { pool: ['pool', 'pool', 'administrate'], } -installAllPatches.description = 'Install automatically all patches for every hosts of a pool' +installAllPatches.description = + 'Install automatically all patches for every hosts of a pool' // ------------------------------------------------------------------- -async function handlePatchUpload (req, res, {pool}) { +async function handlePatchUpload (req, res, { pool }) { const contentLength = req.headers['content-length'] if (!contentLength) { res.writeHead(411) @@ -122,9 +123,9 @@ async function handlePatchUpload (req, res, {pool}) { await this.getXapi(pool).uploadPoolPatch(req, contentLength) } -export async function uploadPatch ({pool}) { +export async function uploadPatch ({ pool }) { return { - $sendTo: await this.registerHttpRequest(handlePatchUpload, {pool}), + $sendTo: await this.registerHttpRequest(handlePatchUpload, { pool }), } } @@ -139,7 +140,7 @@ uploadPatch.resolve = { // Compatibility // // TODO: remove when no longer used in xo-web -export {uploadPatch as patch} +export { uploadPatch as patch } // ------------------------------------------------------------------- @@ -177,11 +178,8 @@ mergeInto.resolve = { // ------------------------------------------------------------------- -export async function getLicenseState ({pool}) { - return this.getXapi(pool).call( - 'pool.get_license_state', - pool._xapiId.$ref - ) +export async function getLicenseState ({ pool }) { + return this.getXapi(pool).call('pool.get_license_state', pool._xapiId.$ref) } getLicenseState.params = { @@ -215,11 +213,14 @@ async function handleInstallSupplementalPack (req, res, { poolId }) { export async function installSupplementalPack ({ pool }) { return { - $sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id }), + $sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { + poolId: pool.id, + }), } } -installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts' +installSupplementalPack.description = + 'installs supplemental pack from ISO file on all hosts' installSupplementalPack.params = { pool: { type: 'string' }, diff --git a/packages/xo-server/src/api/remote.js b/packages/xo-server/src/api/remote.js index 54333809c..36f8672ac 100644 --- a/packages/xo-server/src/api/remote.js +++ b/packages/xo-server/src/api/remote.js @@ -5,68 +5,68 @@ export async function getAll () { getAll.permission = 'admin' getAll.description = 'Gets all existing fs remote points' -export async function get ({id}) { +export async function get ({ id }) { return this.getRemote(id) } get.permission = 'admin' get.description = 'Gets an existing fs remote point' get.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export async function test ({id}) { +export async function test ({ id }) { return this.testRemote(id) } test.permission = 'admin' test.description = 'Performs a read/write matching test on a remote point' test.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export async function list ({id}) { +export async function list ({ id }) { return this.listRemoteBackups(id) } list.permission = 'admin' list.description = 'Lists the files found in a remote point' list.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export async function create ({name, url}) { - return this.createRemote({name, url}) +export async function create ({ name, url }) { + return this.createRemote({ name, url }) } create.permission = 'admin' create.description = 'Creates a new fs remote point' create.params = { - name: {type: 'string'}, - url: {type: 'string'}, + name: { type: 'string' }, + url: { type: 'string' }, } -export async function set ({id, name, url, enabled}) { - await this.updateRemote(id, {name, url, enabled}) +export async function set ({ id, name, url, enabled }) { + await this.updateRemote(id, { name, url, enabled }) } set.permission = 'admin' set.description = 'Modifies an existing fs remote point' set.params = { - id: {type: 'string'}, - name: {type: 'string', optional: true}, - url: {type: 'string', optional: true}, - enabled: {type: 'boolean', optional: true}, + id: { type: 'string' }, + name: { type: 'string', optional: true }, + url: { type: 'string', optional: true }, + enabled: { type: 'boolean', optional: true }, } -async function delete_ ({id}) { +async function delete_ ({ id }) { await this.removeRemote(id) } delete_.permission = 'admin' delete_.description = 'Deletes an existing fs remote point' delete_.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export {delete_ as delete} +export { delete_ as delete } diff --git a/packages/xo-server/src/api/resource-set.js b/packages/xo-server/src/api/resource-set.js index 79f67bb84..4943131cf 100644 --- a/packages/xo-server/src/api/resource-set.js +++ b/packages/xo-server/src/api/resource-set.js @@ -1,6 +1,4 @@ -import { - unauthorized, -} from 'xo-common/api-errors' +import { unauthorized } from 'xo-common/api-errors' // =================================================================== @@ -237,4 +235,5 @@ export function recomputeAllLimits () { } recomputeAllLimits.permission = 'admin' -recomputeAllLimits.description = 'Recompute manually the current resource set usage' +recomputeAllLimits.description = + 'Recompute manually the current resource set usage' diff --git a/packages/xo-server/src/api/schedule.js b/packages/xo-server/src/api/schedule.js index bb2fc1204..c343184a5 100644 --- a/packages/xo-server/src/api/schedule.js +++ b/packages/xo-server/src/api/schedule.js @@ -14,20 +14,26 @@ export async function get (id) { get.permission = 'admin' get.description = 'Gets an existing schedule' get.params = { - id: {type: 'string'}, + id: { type: 'string' }, } export async function create ({ jobId, cron, enabled, name, timezone }) { - return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone }) + return /* await */ this.createSchedule(this.session.get('user_id'), { + job: jobId, + cron, + enabled, + name, + timezone, + }) } create.permission = 'admin' create.description = 'Creates a new schedule' create.params = { - jobId: {type: 'string'}, - cron: {type: 'string'}, - enabled: {type: 'boolean', optional: true}, - name: {type: 'string', optional: true}, + jobId: { type: 'string' }, + cron: { type: 'string' }, + enabled: { type: 'boolean', optional: true }, + name: { type: 'string', optional: true }, } export async function set ({ id, jobId, cron, enabled, name, timezone }) { @@ -37,21 +43,21 @@ export async function set ({ id, jobId, cron, enabled, name, timezone }) { set.permission = 'admin' set.description = 'Modifies an existing schedule' set.params = { - id: {type: 'string'}, - jobId: {type: 'string', optional: true}, - cron: {type: 'string', optional: true}, - enabled: {type: 'boolean', optional: true}, - name: {type: 'string', optional: true}, + id: { type: 'string' }, + jobId: { type: 'string', optional: true }, + cron: { type: 'string', optional: true }, + enabled: { type: 'boolean', optional: true }, + name: { type: 'string', optional: true }, } -async function delete_ ({id}) { +async function delete_ ({ id }) { await this.removeSchedule(id) } delete_.permission = 'admin' delete_.description = 'Deletes an existing schedule' delete_.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export {delete_ as delete} +export { delete_ as delete } diff --git a/packages/xo-server/src/api/scheduler.js b/packages/xo-server/src/api/scheduler.js index 7488e5682..0a93c46e7 100644 --- a/packages/xo-server/src/api/scheduler.js +++ b/packages/xo-server/src/api/scheduler.js @@ -1,16 +1,16 @@ -export async function enable ({id}) { +export async function enable ({ id }) { const schedule = await this.getSchedule(id) schedule.enabled = true await this.updateSchedule(id, schedule) } enable.permission = 'admin' -enable.description = 'Enables a schedule to run it\'s job as scheduled' +enable.description = "Enables a schedule to run it's job as scheduled" enable.params = { - id: {type: 'string'}, + id: { type: 'string' }, } -export async function disable ({id}) { +export async function disable ({ id }) { const schedule = await this.getSchedule(id) schedule.enabled = false await this.updateSchedule(id, schedule) @@ -19,7 +19,7 @@ export async function disable ({id}) { disable.permission = 'admin' disable.description = 'Disables a schedule' disable.params = { - id: {type: 'string'}, + id: { type: 'string' }, } export function getScheduleTable () { diff --git a/packages/xo-server/src/api/server.js b/packages/xo-server/src/api/server.js index 08fedf925..655652f14 100644 --- a/packages/xo-server/src/api/server.js +++ b/packages/xo-server/src/api/server.js @@ -1,10 +1,10 @@ import { ignoreErrors } from 'promise-toolbox' -export async function add ({autoConnect = true, ...props}) { +export async function add ({ autoConnect = true, ...props }) { const server = await this.registerXenServer(props) if (autoConnect) { - this.connectXenServer(server.id)::ignoreErrors() + ;this.connectXenServer(server.id)::ignoreErrors() } return server.id @@ -40,7 +40,7 @@ add.params = { // ------------------------------------------------------------------- -export async function remove ({id}) { +export async function remove ({ id }) { await this.unregisterXenServer(id) } @@ -68,7 +68,7 @@ getAll.permission = 'admin' // ------------------------------------------------------------------- -export async function set ({id, ...props}) { +export async function set ({ id, ...props }) { await this.updateXenServer(id, props) } @@ -104,8 +104,8 @@ set.params = { // ------------------------------------------------------------------- -export async function connect ({id}) { - this.updateXenServer(id, {enabled: true})::ignoreErrors() +export async function connect ({ id }) { + ;this.updateXenServer(id, { enabled: true })::ignoreErrors() await this.connectXenServer(id) } @@ -121,8 +121,8 @@ connect.params = { // ------------------------------------------------------------------- -export async function disconnect ({id}) { - this.updateXenServer(id, {enabled: false})::ignoreErrors() +export async function disconnect ({ id }) { + ;this.updateXenServer(id, { enabled: false })::ignoreErrors() await this.disconnectXenServer(id) } diff --git a/packages/xo-server/src/api/session.js b/packages/xo-server/src/api/session.js index ae731a25b..a2caad4e1 100644 --- a/packages/xo-server/src/api/session.js +++ b/packages/xo-server/src/api/session.js @@ -1,7 +1,7 @@ -import {deprecate} from 'util' +import { deprecate } from 'util' import { getUserPublicProperties } from '../utils' -import {invalidCredentials} from 'xo-common/api-errors' +import { invalidCredentials } from 'xo-common/api-errors' // =================================================================== @@ -19,7 +19,10 @@ signIn.description = 'sign in' // ------------------------------------------------------------------- -export const signInWithPassword = deprecate(signIn, 'use session.signIn() instead') +export const signInWithPassword = deprecate( + signIn, + 'use session.signIn() instead' +) signInWithPassword.params = { email: { type: 'string' }, diff --git a/packages/xo-server/src/api/sr.js b/packages/xo-server/src/api/sr.js index dcde4d61f..4d0952512 100644 --- a/packages/xo-server/src/api/sr.js +++ b/packages/xo-server/src/api/sr.js @@ -1,12 +1,7 @@ import { some } from 'lodash' import { asInteger } from '../xapi/utils' -import { - asyncMap, - ensureArray, - forEach, - parseXml, -} from '../utils' +import { asyncMap, ensureArray, forEach, parseXml } from '../utils' // =================================================================== @@ -50,10 +45,11 @@ scan.resolve = { } // ------------------------------------------------------------------- -const srIsBackingHa = (sr) => sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr) +const srIsBackingHa = sr => + sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr) // TODO: find a way to call this "delete" and not destroy -export async function destroy ({sr}) { +export async function destroy ({ sr }) { const xapi = this.getXapi(sr) if (sr.SR_type !== 'xosan') { await xapi.destroySr(sr._xapiId) @@ -61,7 +57,9 @@ export async function destroy ({sr}) { } const xapiSr = xapi.getObject(sr) if (srIsBackingHa(xapiSr)) { - throw new Error('You tried to remove a SR the High Availability is relying on. Please disable HA first.') + throw new Error( + 'You tried to remove a SR the High Availability is relying on. Please disable HA first.' + ) } const config = xapi.xo.getData(sr, 'xosan_config') // we simply forget because the hosted disks are being destroyed with the VMs @@ -239,12 +237,7 @@ createNfs.resolve = { // This functions creates an HBA SR -export async function createHba ({ - host, - nameLabel, - nameDescription, - scsiId, -}) { +export async function createHba ({ host, nameLabel, nameDescription, scsiId }) { const xapi = this.getXapi(host) const deviceConfig = { @@ -284,12 +277,7 @@ createHba.resolve = { // This functions creates a local LVM SR -export async function createLvm ({ - host, - nameLabel, - nameDescription, - device, -}) { +export async function createLvm ({ host, nameLabel, nameDescription, device }) { const xapi = this.getXapi(host) const deviceConfig = { @@ -328,10 +316,7 @@ createLvm.resolve = { // This function helps to detect all NFS shares (exports) on a NFS server // Return a table of exports with their paths and ACLs -export async function probeNfs ({ - host, - server, -}) { +export async function probeNfs ({ host, server }) { const xapi = this.getXapi(host) const deviceConfig = { @@ -341,13 +326,7 @@ export async function probeNfs ({ let xml try { - await xapi.call( - 'SR.probe', - host._xapiRef, - deviceConfig, - 'nfs', - {} - ) + await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}) throw new Error('the call above should have thrown an error') } catch (error) { @@ -381,20 +360,13 @@ probeNfs.resolve = { // ------------------------------------------------------------------- // This function helps to detect all HBA devices on the host -export async function probeHba ({ - host, -}) { +export async function probeHba ({ host }) { const xapi = this.getXapi(host) let xml try { - await xapi.call( - 'SR.probe', - host._xapiRef, - 'type', - {} - ) + await xapi.call('SR.probe', host._xapiRef, 'type', {}) throw new Error('the call above should have thrown an error') } catch (error) { @@ -527,13 +499,7 @@ export async function probeIscsiIqns ({ let xml try { - await xapi.call( - 'SR.probe', - host._xapiRef, - deviceConfig, - 'lvmoiscsi', - {} - ) + await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}) throw new Error('the call above should have thrown an error') } catch (error) { @@ -605,13 +571,7 @@ export async function probeIscsiLuns ({ let xml try { - await xapi.call( - 'SR.probe', - host._xapiRef, - deviceConfig, - 'lvmoiscsi', - {} - ) + await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}) throw new Error('the call above should have thrown an error') } catch (error) { @@ -681,7 +641,9 @@ export async function probeIscsiExists ({ deviceConfig.port = asInteger(port) } - const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {})) + const xml = parseXml( + await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}) + ) const srs = [] forEach(ensureArray(xml['SRlist'].SR), sr => { @@ -710,11 +672,7 @@ probeIscsiExists.resolve = { // This function helps to detect if this NFS SR already exists in XAPI // It returns a table of SR UUID, empty if no existing connections -export async function probeNfsExists ({ - host, - server, - serverPath, -}) { +export async function probeNfsExists ({ host, server, serverPath }) { const xapi = this.getXapi(host) const deviceConfig = { @@ -722,7 +680,9 @@ export async function probeNfsExists ({ serverpath: serverPath, } - const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {})) + const xml = parseXml( + await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}) + ) const srs = [] diff --git a/packages/xo-server/src/api/system.js b/packages/xo-server/src/api/system.js index 352eb82d3..ad2494134 100644 --- a/packages/xo-server/src/api/system.js +++ b/packages/xo-server/src/api/system.js @@ -20,7 +20,8 @@ export function getMethodsInfo () { return methods } -getMethodsInfo.description = 'returns the signatures of all available API methods' +getMethodsInfo.description = + 'returns the signatures of all available API methods' // ------------------------------------------------------------------- @@ -46,7 +47,7 @@ listMethods.description = 'returns the name of all available API methods' // ------------------------------------------------------------------- -export function methodSignature ({method: name}) { +export function methodSignature ({ method: name }) { const method = this.apiMethods[name] if (!method) { diff --git a/packages/xo-server/src/api/tag.js b/packages/xo-server/src/api/tag.js index 52f1cb16a..5a8c4cd22 100644 --- a/packages/xo-server/src/api/tag.js +++ b/packages/xo-server/src/api/tag.js @@ -1,4 +1,4 @@ -export async function add ({tag, object}) { +export async function add ({ tag, object }) { await this.getXapi(object).addTag(object._xapiId, tag) } @@ -15,7 +15,7 @@ add.params = { // ------------------------------------------------------------------- -export async function remove ({tag, object}) { +export async function remove ({ tag, object }) { await this.getXapi(object).removeTag(object._xapiId, tag) } diff --git a/packages/xo-server/src/api/task.js b/packages/xo-server/src/api/task.js index f24438778..71fe4a48c 100644 --- a/packages/xo-server/src/api/task.js +++ b/packages/xo-server/src/api/task.js @@ -1,4 +1,4 @@ -export async function cancel ({task}) { +export async function cancel ({ task }) { await this.getXapi(task).call('task.cancel', task._xapiRef) } @@ -12,7 +12,7 @@ cancel.resolve = { // ------------------------------------------------------------------- -export async function destroy ({task}) { +export async function destroy ({ task }) { await this.getXapi(task).call('task.destroy', task._xapiRef) } diff --git a/packages/xo-server/src/api/test.js b/packages/xo-server/src/api/test.js index 0eb4d3882..ef9d8dd02 100644 --- a/packages/xo-server/src/api/test.js +++ b/packages/xo-server/src/api/test.js @@ -13,9 +13,7 @@ getPermissionsForUser.params = { // ------------------------------------------------------------------- export function hasPermission ({ userId, objectId, permission }) { - return this.hasPermissions(userId, [ - [ objectId, permission ], - ]) + return this.hasPermissions(userId, [[objectId, permission]]) } hasPermission.permission = 'admin' @@ -34,7 +32,7 @@ hasPermission.params = { // ------------------------------------------------------------------- -export function wait ({duration, returnValue}) { +export function wait ({ duration, returnValue }) { return new Promise(resolve => { setTimeout(() => { resolve(returnValue) @@ -81,6 +79,6 @@ copyVm.params = { } copyVm.resolve = { - vm: [ 'vm', 'VM' ], - sr: [ 'sr', 'SR' ], + vm: ['vm', 'VM'], + sr: ['sr', 'SR'], } diff --git a/packages/xo-server/src/api/token.js b/packages/xo-server/src/api/token.js index 0497ccebc..08bf9284e 100644 --- a/packages/xo-server/src/api/token.js +++ b/packages/xo-server/src/api/token.js @@ -12,7 +12,7 @@ create.description = 'create a new authentication token' create.params = { expiresIn: { optional: true, - type: [ 'number', 'string' ], + type: ['number', 'string'], }, } @@ -21,11 +21,11 @@ create.permission = '' // sign in // ------------------------------------------------------------------- // TODO: an user should be able to delete its own tokens. -async function delete_ ({token: id}) { +async function delete_ ({ token: id }) { await this.deleteAuthenticationToken(id) } -export {delete_ as delete} +export { delete_ as delete } delete_.description = 'delete an existing authentication token' diff --git a/packages/xo-server/src/api/user.js b/packages/xo-server/src/api/user.js index 93284f96a..c4e6fe093 100644 --- a/packages/xo-server/src/api/user.js +++ b/packages/xo-server/src/api/user.js @@ -1,10 +1,10 @@ -import {invalidParameters} from 'xo-common/api-errors' +import { invalidParameters } from 'xo-common/api-errors' import { getUserPublicProperties, mapToArray } from '../utils' // =================================================================== -export async function create ({email, password, permission}) { - return (await this.createUser({email, password, permission})).id +export async function create ({ email, password, permission }) { + return (await this.createUser({ email, password, permission })).id } create.description = 'creates a new user' @@ -20,7 +20,7 @@ create.params = { // ------------------------------------------------------------------- // Deletes an existing user. -async function delete_ ({id}) { +async function delete_ ({ id }) { if (id === this.session.get('user_id')) { throw invalidParameters('a user cannot delete itself') } @@ -29,7 +29,7 @@ async function delete_ ({id}) { } // delete is not a valid identifier. -export {delete_ as delete} +export { delete_ as delete } delete_.description = 'deletes an existing user' @@ -57,17 +57,19 @@ getAll.permission = 'admin' // ------------------------------------------------------------------- -export async function set ({id, email, password, permission, preferences}) { +export async function set ({ id, email, password, permission, preferences }) { const isAdmin = this.user && this.user.permission === 'admin' if (isAdmin) { if (permission && id === this.session.get('user_id')) { throw invalidParameters('a user cannot change its own permission') } } else if (email || password || permission) { - throw invalidParameters('this properties can only changed by an administrator') + throw invalidParameters( + 'this properties can only changed by an administrator' + ) } - await this.updateUser(id, {email, password, permission, preferences}) + await this.updateUser(id, { email, password, permission, preferences }) } set.description = 'changes the properties of an existing user' @@ -84,16 +86,17 @@ set.params = { // ------------------------------------------------------------------- -export async function changePassword ({oldPassword, newPassword}) { +export async function changePassword ({ oldPassword, newPassword }) { const id = this.session.get('user_id') await this.changeUserPassword(id, oldPassword, newPassword) } -changePassword.description = 'change password after checking old password (user function)' +changePassword.description = + 'change password after checking old password (user function)' changePassword.permission = '' changePassword.params = { - oldPassword: {type: 'string'}, - newPassword: {type: 'string'}, + oldPassword: { type: 'string' }, + newPassword: { type: 'string' }, } diff --git a/packages/xo-server/src/api/vbd.js b/packages/xo-server/src/api/vbd.js index 63d7fd1e9..db825a187 100644 --- a/packages/xo-server/src/api/vbd.js +++ b/packages/xo-server/src/api/vbd.js @@ -1,6 +1,6 @@ // FIXME: too low level, should be removed. -async function delete_ ({vbd}) { +async function delete_ ({ vbd }) { await this.getXapi(vbd).deleteVbd(vbd) } @@ -16,7 +16,7 @@ export { delete_ as delete } // ------------------------------------------------------------------- -export async function disconnect ({vbd}) { +export async function disconnect ({ vbd }) { const xapi = this.getXapi(vbd) await xapi.disconnectVbd(vbd._xapiRef) } @@ -31,7 +31,7 @@ disconnect.resolve = { // ------------------------------------------------------------------- -export async function connect ({vbd}) { +export async function connect ({ vbd }) { const xapi = this.getXapi(vbd) await xapi.connectVbd(vbd._xapiRef) } @@ -46,7 +46,7 @@ connect.resolve = { // ------------------------------------------------------------------- -export async function set ({position, vbd}) { +export async function set ({ position, vbd }) { if (position !== undefined) { const xapi = this.getXapi(vbd) await xapi.call('VBD.set_userdevice', vbd._xapiRef, String(position)) @@ -66,7 +66,7 @@ set.resolve = { // ------------------------------------------------------------------- -export async function setBootable ({vbd, bootable}) { +export async function setBootable ({ vbd, bootable }) { const xapi = this.getXapi(vbd) await xapi.call('VBD.set_bootable', vbd._xapiRef, bootable) diff --git a/packages/xo-server/src/api/vdi.js b/packages/xo-server/src/api/vdi.js index d4399da30..91d6150e9 100644 --- a/packages/xo-server/src/api/vdi.js +++ b/packages/xo-server/src/api/vdi.js @@ -7,10 +7,11 @@ import { parseSize } from '../utils' // ==================================================================== -export async function delete_ ({vdi}) { +export async function delete_ ({ vdi }) { const resourceSet = reduce( vdi.$VBDs, - (resourceSet, vbd) => resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet, + (resourceSet, vbd) => + resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet, undefined ) @@ -35,7 +36,7 @@ export { delete_ as delete } // FIXME: human readable strings should be handled. export async function set (params) { - const {vdi} = params + const { vdi } = params const xapi = this.getXapi(vdi) const ref = vdi._xapiRef @@ -52,18 +53,26 @@ export async function set (params) { const vbds = vdi.$VBDs if ( - (vbds.length === 1) && - ((resourceSetId = xapi.xo.getData(this.getObject(vbds[0], 'VBD').VM, 'resourceSet')) !== undefined) + vbds.length === 1 && + (resourceSetId = xapi.xo.getData( + this.getObject(vbds[0], 'VBD').VM, + 'resourceSet' + )) !== undefined ) { if (this.user.permission !== 'admin') { await this.checkResourceSetConstraints(resourceSetId, this.user.id) } - await this.allocateLimitsInResourceSet({ disk: size - vdi.size }, resourceSetId) - } else if (!( - (this.user.permission === 'admin') || - (await this.hasPermissions(this.user.id, [ [ vdi.$SR, 'operate' ] ])) - )) { + await this.allocateLimitsInResourceSet( + { disk: size - vdi.size }, + resourceSetId + ) + } else if ( + !( + this.user.permission === 'admin' || + (await this.hasPermissions(this.user.id, [[vdi.$SR, 'operate']])) + ) + ) { throw unauthorized() } @@ -72,14 +81,16 @@ export async function set (params) { // Other fields. const object = { - 'name_label': 'name_label', - 'name_description': 'name_description', + name_label: 'name_label', + name_description: 'name_description', } for (const param in object) { const fields = object[param] - if (!(param in params)) { continue } + if (!(param in params)) { + continue + } - for (const field of (isArray(fields) ? fields : [fields])) { + for (const field of isArray(fields) ? fields : [fields]) { await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`) } } @@ -103,7 +114,7 @@ set.resolve = { // ------------------------------------------------------------------- -export async function migrate ({vdi, sr}) { +export async function migrate ({ vdi, sr }) { const xapi = this.getXapi(vdi) await xapi.moveVdi(vdi._xapiRef, sr._xapiRef) diff --git a/packages/xo-server/src/api/vif.js b/packages/xo-server/src/api/vif.js index 039222502..91a333213 100644 --- a/packages/xo-server/src/api/vif.js +++ b/packages/xo-server/src/api/vif.js @@ -5,8 +5,8 @@ import { diffItems } from '../utils' // =================================================================== // TODO: move into vm and rename to removeInterface -async function delete_ ({vif}) { - this.allocIpAddresses( +async function delete_ ({ vif }) { + ;this.allocIpAddresses( vif.id, null, vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses) @@ -14,7 +14,7 @@ async function delete_ ({vif}) { await this.getXapi(vif).deleteVif(vif._xapiId) } -export {delete_ as delete} +export { delete_ as delete } delete_.params = { id: { type: 'string' }, @@ -27,7 +27,7 @@ delete_.resolve = { // ------------------------------------------------------------------- // TODO: move into vm and rename to disconnectInterface -export async function disconnect ({vif}) { +export async function disconnect ({ vif }) { // TODO: check if VIF is attached before await this.getXapi(vif).disconnectVif(vif._xapiId) } @@ -42,7 +42,7 @@ disconnect.resolve = { // ------------------------------------------------------------------- // TODO: move into vm and rename to connectInterface -export async function connect ({vif}) { +export async function connect ({ vif }) { // TODO: check if VIF is attached before await this.getXapi(vif).connectVif(vif._xapiId) } @@ -65,7 +65,9 @@ export async function set ({ allowedIpv6Addresses, attached, }) { - const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses) + const oldIpAddresses = vif.allowedIpv4Addresses.concat( + vif.allowedIpv6Addresses + ) const newIpAddresses = [] { const { push } = newIpAddresses @@ -96,15 +98,11 @@ export async function set ({ return } - const [ addAddresses, removeAddresses ] = diffItems( + const [addAddresses, removeAddresses] = diffItems( newIpAddresses, oldIpAddresses ) - await this.allocIpAddresses( - vif.id, - addAddresses, - removeAddresses - ) + await this.allocIpAddresses(vif.id, addAddresses, removeAddresses) return this.getXapi(vif).editVif(vif._xapiId, { ipv4Allowed: allowedIpv4Addresses, diff --git a/packages/xo-server/src/api/vm.js b/packages/xo-server/src/api/vm.js index b09a94446..4aa24d6a0 100644 --- a/packages/xo-server/src/api/vm.js +++ b/packages/xo-server/src/api/vm.js @@ -25,14 +25,13 @@ function checkPermissionOnSrs (vm, permission = 'operate') { return permissions.push([this.getObject(vdiId, 'VDI').$SR, permission]) }) - return this.hasPermissions( - this.session.get('user_id'), - permissions - ).then(success => { - if (!success) { - throw unauthorized() + return this.hasPermissions(this.session.get('user_id'), permissions).then( + success => { + if (!success) { + throw unauthorized() + } } - }) + ) } // =================================================================== @@ -359,7 +358,7 @@ async function delete_ ({ // Update resource sets const resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet') if (resourceSet != null) { - this.setVmResourceSet(vm._xapiId, null)::ignoreErrors() + ;this.setVmResourceSet(vm._xapiId, null)::ignoreErrors() } return xapi.deleteVm(vm._xapiId, deleteDisks, force) @@ -1239,8 +1238,10 @@ export async function createInterface ({ }) { const { resourceSet } = vm if (resourceSet != null) { - await this.checkResourceSetConstraints(resourceSet, this.user.id, [ network.id ]) - } else if (!(await this.hasPermissions(this.user.id, [ [ network.id, 'view' ] ]))) { + await this.checkResourceSetConstraints(resourceSet, this.user.id, [ + network.id, + ]) + } else if (!await this.hasPermissions(this.user.id, [[network.id, 'view']])) { throw unauthorized() } diff --git a/packages/xo-server/src/api/xo.js b/packages/xo-server/src/api/xo.js index 557d05af6..41f514419 100644 --- a/packages/xo-server/src/api/xo.js +++ b/packages/xo-server/src/api/xo.js @@ -12,15 +12,17 @@ clean.permission = 'admin' export async function exportConfig () { return { - $getFrom: await this.registerHttpRequest((req, res) => { - res.writeHead(200, 'OK', { - 'content-disposition': 'attachment', - }) + $getFrom: await this.registerHttpRequest( + (req, res) => { + res.writeHead(200, 'OK', { + 'content-disposition': 'attachment', + }) - return this.exportConfig() - }, - undefined, - { suffix: '/config.json' }), + return this.exportConfig() + }, + undefined, + { suffix: '/config.json' } + ), } } diff --git a/packages/xo-server/src/api/xosan.js b/packages/xo-server/src/api/xosan.js index 813d0eebd..350b2c070 100644 --- a/packages/xo-server/src/api/xosan.js +++ b/packages/xo-server/src/api/xosan.js @@ -6,13 +6,7 @@ import map from 'lodash/map' import { tap, delay } from 'promise-toolbox' import { invalidParameters } from 'xo-common/api-errors' import { v4 as generateUuid } from 'uuid' -import { - includes, - remove, - filter, - find, - range, -} from 'lodash' +import { includes, remove, filter, find, range } from 'lodash' import { asInteger } from '../xapi/utils' import { asyncMap, parseXml, ensureArray } from '../utils' @@ -47,7 +41,10 @@ function _getIPToVMDict (xapi, sr) { if (data && data.nodes) { data.nodes.forEach(conf => { try { - dict[conf.brickName] = {vm: xapi.getObject(conf.vm.id), sr: conf.underlyingSr} + dict[conf.brickName] = { + vm: xapi.getObject(conf.vm.id), + sr: conf.underlyingSr, + } } catch (e) { // pass } @@ -91,7 +88,7 @@ function createVolumeInfoTypes () { brick.file = ensureArray(brick.file) } }) - return {commandStatus: true, result: {bricks}} + return { commandStatus: true, result: { bricks } } } function parseStatus (parsed) { @@ -103,7 +100,7 @@ function createVolumeInfoTypes () { }) return { commandStatus: true, - result: {nodes: brickDictByUuid, tasks: volume['tasks']}, + result: { nodes: brickDictByUuid, tasks: volume['tasks'] }, } } @@ -111,7 +108,7 @@ function createVolumeInfoTypes () { const volume = parsed['volInfo']['volumes']['volume'] volume['bricks'] = volume['bricks']['brick'] volume['options'] = volume['options']['option'] - return {commandStatus: true, result: volume} + return { commandStatus: true, result: volume } } const sshInfoType = (command, handler) => { @@ -121,9 +118,12 @@ function createVolumeInfoTypes () { !result['commandStatus'] && ((result.parsed && result.parsed['cliOutput']['opErrno'] === '30802') || result.stderr.match(/Another transaction is in progress/)) - const runCmd = async () => glusterCmd(glusterEndpoint, 'volume ' + command, true) + const runCmd = async () => + glusterCmd(glusterEndpoint, 'volume ' + command, true) const commandResult = await rateLimitedRetry(runCmd, cmdShouldRetry, 30) - return commandResult['commandStatus'] ? this::handler(commandResult.parsed['cliOutput'], sr) : commandResult + return commandResult['commandStatus'] + ? this::handler(commandResult.parsed['cliOutput'], sr) + : commandResult } } @@ -132,10 +132,10 @@ function createVolumeInfoTypes () { const volume = parsed['volProfile'] volume['bricks'] = ensureArray(volume['brick']) delete volume['brick'] - return {commandStatus: true, result: volume} + return { commandStatus: true, result: volume } } - return this::(sshInfoType('profile xosan info', parseProfile))(sr) + return this::sshInfoType('profile xosan info', parseProfile)(sr) } async function profileTopType (sr) { @@ -143,12 +143,13 @@ function createVolumeInfoTypes () { const volume = parsed['volTop'] volume['bricks'] = ensureArray(volume['brick']) delete volume['brick'] - return {commandStatus: true, result: volume} + return { commandStatus: true, result: volume } } const topTypes = ['open', 'read', 'write', 'opendir', 'readdir'] return asyncMap(topTypes, async type => ({ - type, result: await this::(sshInfoType(`top xosan ${type}`, parseTop))(sr), + type, + result: await this::sshInfoType(`top xosan ${type}`, parseTop)(sr), })) } @@ -156,8 +157,11 @@ function createVolumeInfoTypes () { const xapi = this.getXapi(sr) const data = getXosanConfig(sr, xapi) const network = xapi.getObject(data.network) - const badPifs = filter(network.$PIFs, pif => pif.ip_configuration_mode !== 'Static') - return badPifs.map(pif => ({pif, host: pif.$host.$id})) + const badPifs = filter( + network.$PIFs, + pif => pif.ip_configuration_mode !== 'Static' + ) + return badPifs.map(pif => ({ pif, host: pif.$host.$id })) } return { @@ -174,7 +178,7 @@ function createVolumeInfoTypes () { const VOLUME_INFO_TYPES = createVolumeInfoTypes() -export async function getVolumeInfo ({sr, infoType}) { +export async function getVolumeInfo ({ sr, infoType }) { await this.checkXosanLicense({ srId: sr.uuid }) const glusterEndpoint = this::_getGlusterEndpoint(sr) @@ -205,7 +209,7 @@ getVolumeInfo.resolve = { sr: ['sr', 'SR', 'administrate'], } -export async function profileStatus ({sr, changeStatus = null}) { +export async function profileStatus ({ sr, changeStatus = null }) { await this.checkXosanLicense({ srId: sr.uuid }) const glusterEndpoint = this::_getGlusterEndpoint(sr) @@ -216,7 +220,7 @@ export async function profileStatus ({sr, changeStatus = null}) { if (changeStatus === true) { await glusterCmd(glusterEndpoint, 'volume profile xosan start') } - return this::getVolumeInfo({sr: sr, infoType: 'profile'}) + return this::getVolumeInfo({ sr: sr, infoType: 'profile' }) } profileStatus.description = 'activate, deactivate, or interrogate profile data' @@ -226,7 +230,8 @@ profileStatus.params = { type: 'string', }, changeStatus: { - type: 'bool', optional: true, + type: 'bool', + optional: true, }, } profileStatus.resolve = { @@ -234,18 +239,30 @@ profileStatus.resolve = { } function reconfigurePifIP (xapi, pif, newIP) { - xapi.call('PIF.reconfigure_ip', pif.$ref, 'Static', newIP, '255.255.255.0', '', '') + xapi.call( + 'PIF.reconfigure_ip', + pif.$ref, + 'Static', + newIP, + '255.255.255.0', + '', + '' + ) } // this function should probably become fixSomething(thingToFix, parmas) -export async function fixHostNotInNetwork ({xosanSr, host}) { +export async function fixHostNotInNetwork ({ xosanSr, host }) { await this.checkXosanLicense({ srId: xosanSr.uuid }) const xapi = this.getXapi(xosanSr) const data = getXosanConfig(xosanSr, xapi) const network = xapi.getObject(data.network) - const usedAddresses = network.$PIFs.filter(pif => pif.ip_configuration_mode === 'Static').map(pif => pif.IP) - const pif = network.$PIFs.find(pif => pif.ip_configuration_mode !== 'Static' && pif.$host.$id === host) + const usedAddresses = network.$PIFs + .filter(pif => pif.ip_configuration_mode === 'Static') + .map(pif => pif.IP) + const pif = network.$PIFs.find( + pif => pif.ip_configuration_mode !== 'Static' && pif.$host.$id === host + ) if (pif) { const newIP = _findIPAddressOutsideList(usedAddresses, HOST_FIRST_NUMBER) reconfigurePifIP(xapi, pif, newIP) @@ -283,17 +300,19 @@ function floor2048 (value) { } async function copyVm (xapi, originalVm, sr) { - return {sr, vm: await xapi.copyVm(originalVm, sr)} + return { sr, vm: await xapi.copyVm(originalVm, sr) } } async function callPlugin (xapi, host, command, params) { debug('calling plugin', host.address, command) - return JSON.parse(await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params)) + return JSON.parse( + await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params) + ) } async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) { let result - const formatSshError = (result) => { + const formatSshError = result => { const messageArray = [] const messageKeys = Object.keys(result) const orderedKeys = ['stderr', 'stdout', 'exit'] @@ -315,7 +334,10 @@ async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) { for (const address of glusterEndpoint.addresses) { for (const host of glusterEndpoint.hosts) { try { - result = await callPlugin(glusterEndpoint.xapi, host, 'run_ssh', {destination: 'root@' + address, cmd: cmd}) + result = await callPlugin(glusterEndpoint.xapi, host, 'run_ssh', { + destination: 'root@' + address, + cmd: cmd, + }) break } catch (exception) { if (exception['code'] !== 'HOST_OFFLINE') { @@ -323,8 +345,15 @@ async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) { } } } - debug(result.command.join(' '), '\n =>exit:', result.exit, '\n =>err :', result.stderr, - '\n =>out (1000 chars) :', result.stdout.substring(0, 1000)) + debug( + result.command.join(' '), + '\n =>exit:', + result.exit, + '\n =>err :', + result.stderr, + '\n =>out (1000 chars) :', + result.stdout.substring(0, 1000) + ) // 255 seems to be ssh's own error codes. if (result.exit !== 255) { if (!ignoreError && result.exit !== 0) { @@ -333,8 +362,11 @@ async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) { return result } } - throw new Error(result != null ? formatSshError(result) : 'no suitable SSH host: ' + - JSON.stringify(glusterEndpoint)) + throw new Error( + result != null + ? formatSshError(result) + : 'no suitable SSH host: ' + JSON.stringify(glusterEndpoint) + ) } function findErrorMessage (commandResut) { @@ -348,11 +380,17 @@ function findErrorMessage (commandResut) { return cliOut['output'] } } - return commandResut['stderr'].length ? commandResut['stderr'] : commandResut['stdout'] + return commandResut['stderr'].length + ? commandResut['stderr'] + : commandResut['stdout'] } async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) { - const result = await remoteSsh(glusterEndpoint, `gluster --mode=script --xml ${cmd}`, true) + const result = await remoteSsh( + glusterEndpoint, + `gluster --mode=script --xml ${cmd}`, + true + ) try { result.parsed = parseXml(result['stdout']) } catch (e) { @@ -361,7 +399,8 @@ async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) { if (result['exit'] === 0) { const cliOut = result.parsed['cliOutput'] // we have found cases where opErrno is !=0 and opRet was 0, albeit the operation was an error. - result.commandStatus = cliOut['opRet'].trim() === '0' && cliOut['opErrno'].trim() === '0' + result.commandStatus = + cliOut['opRet'].trim() === '0' && cliOut['opErrno'].trim() === '0' result.error = findErrorMessage(result) } else { result.commandStatus = false @@ -376,7 +415,13 @@ async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) { return result } -const createNetworkAndInsertHosts = defer(async function ($defer, xapi, pif, vlan, networkPrefix) { +const createNetworkAndInsertHosts = defer(async function ( + $defer, + xapi, + pif, + vlan, + networkPrefix +) { let hostIpLastNumber = HOST_FIRST_NUMBER const xosanNetwork = await xapi.createNetwork({ name: 'XOSAN network', @@ -386,14 +431,25 @@ const createNetworkAndInsertHosts = defer(async function ($defer, xapi, pif, vla vlan: +vlan, }) $defer.onFailure(() => xapi.deleteNetwork(xosanNetwork)) - const addresses = xosanNetwork.$PIFs.map(pif => ({pif, address: networkPrefix + (hostIpLastNumber++)})) - await asyncMap(addresses, addressAndPif => reconfigurePifIP(xapi, addressAndPif.pif, addressAndPif.address)) + const addresses = xosanNetwork.$PIFs.map(pif => ({ + pif, + address: networkPrefix + hostIpLastNumber++, + })) + await asyncMap(addresses, addressAndPif => + reconfigurePifIP(xapi, addressAndPif.pif, addressAndPif.address) + ) const master = xapi.pool.$master const otherAddresses = addresses.filter(addr => addr.pif.$host !== master) - await asyncMap(otherAddresses, async (address) => { - const result = await callPlugin(xapi, master, 'run_ping', {address: address.address}) + await asyncMap(otherAddresses, async address => { + const result = await callPlugin(xapi, master, 'run_ping', { + address: address.address, + }) if (result.exit !== 0) { - throw invalidParameters(`Could not ping ${master.name_label}->${address.pif.$host.name_label} (${address.address}) \n${result.stdout}`) + throw invalidParameters( + `Could not ping ${master.name_label}->${ + address.pif.$host.name_label + } (${address.address}) \n${result.stdout}` + ) } }) return xosanNetwork @@ -414,7 +470,17 @@ async function getOrCreateSshKey (xapi) { try { await readKeys() } catch (e) { - await execa('ssh-keygen', ['-q', '-f', SSH_KEY_FILE, '-t', 'rsa', '-b', '4096', '-N', '']) + await execa('ssh-keygen', [ + '-q', + '-f', + SSH_KEY_FILE, + '-t', + 'rsa', + '-b', + '4096', + '-N', + '', + ]) await readKeys() } } @@ -422,10 +488,16 @@ async function getOrCreateSshKey (xapi) { return sshKey } -const _probePoolAndWaitForPresence = defer(async function ($defer, glusterEndpoint, addresses) { - await asyncMap(addresses, async (address) => { +const _probePoolAndWaitForPresence = defer(async function ( + $defer, + glusterEndpoint, + addresses +) { + await asyncMap(addresses, async address => { await glusterCmd(glusterEndpoint, 'peer probe ' + address) - $defer.onFailure(() => glusterCmd(glusterEndpoint, 'peer detach ' + address, true)) + $defer.onFailure(() => + glusterCmd(glusterEndpoint, 'peer detach ' + address, true) + ) }) function shouldRetry (peers) { @@ -440,11 +512,19 @@ const _probePoolAndWaitForPresence = defer(async function ($defer, glusterEndpoi return false } - const getPoolStatus = async () => (await glusterCmd(glusterEndpoint, 'pool list')).parsed.cliOutput.peerStatus.peer + const getPoolStatus = async () => + (await glusterCmd(glusterEndpoint, 'pool list')).parsed.cliOutput.peerStatus + .peer return rateLimitedRetry(getPoolStatus, shouldRetry) }) -async function configureGluster (redundancy, ipAndHosts, glusterEndpoint, glusterType, arbiter = null) { +async function configureGluster ( + redundancy, + ipAndHosts, + glusterEndpoint, + glusterType, + arbiter = null +) { const configByType = { replica_arbiter: { creation: 'replica 3 arbiter 1', @@ -455,31 +535,68 @@ async function configureGluster (redundancy, ipAndHosts, glusterEndpoint, gluste extra: ['volume set xosan cluster.data-self-heal on'], }, disperse: { - creation: 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ', + creation: + 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ', extra: [], }, } const brickVms = arbiter ? ipAndHosts.concat(arbiter) : ipAndHosts - await _probePoolAndWaitForPresence(glusterEndpoint, map(brickVms.slice(1), bv => bv.address)) + await _probePoolAndWaitForPresence( + glusterEndpoint, + map(brickVms.slice(1), bv => bv.address) + ) const creation = configByType[glusterType].creation - const volumeCreation = 'volume create xosan ' + creation + ' ' + + const volumeCreation = + 'volume create xosan ' + + creation + + ' ' + brickVms.map(ipAndHost => ipAndHost.brickName).join(' ') debug('creating volume: ', volumeCreation) await glusterCmd(glusterEndpoint, volumeCreation) - await glusterCmd(glusterEndpoint, 'volume set xosan network.remote-dio enable') - await glusterCmd(glusterEndpoint, 'volume set xosan cluster.eager-lock enable') - await glusterCmd(glusterEndpoint, 'volume set xosan cluster.locking-scheme granular') + await glusterCmd( + glusterEndpoint, + 'volume set xosan network.remote-dio enable' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan cluster.eager-lock enable' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan cluster.locking-scheme granular' + ) await glusterCmd(glusterEndpoint, 'volume set xosan performance.io-cache off') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.read-ahead off') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.quick-read off') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.strict-write-ordering off') + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.read-ahead off' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.quick-read off' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.strict-write-ordering off' + ) await glusterCmd(glusterEndpoint, 'volume set xosan client.event-threads 8') await glusterCmd(glusterEndpoint, 'volume set xosan server.event-threads 8') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.io-thread-count 64') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.stat-prefetch on') - await glusterCmd(glusterEndpoint, 'volume set xosan performance.low-prio-threads 32') + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.io-thread-count 64' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.stat-prefetch on' + ) + await glusterCmd( + glusterEndpoint, + 'volume set xosan performance.low-prio-threads 32' + ) await glusterCmd(glusterEndpoint, 'volume set xosan features.shard on') - await glusterCmd(glusterEndpoint, 'volume set xosan features.shard-block-size 512MB') + await glusterCmd( + glusterEndpoint, + 'volume set xosan features.shard-block-size 512MB' + ) await glusterCmd(glusterEndpoint, 'volume set xosan user.cifs off') for (const confChunk of configByType[glusterType].extra) { await glusterCmd(glusterEndpoint, confChunk) @@ -490,22 +607,47 @@ async function configureGluster (redundancy, ipAndHosts, glusterEndpoint, gluste async function _setQuota (glusterEndpoint) { await glusterCmd(glusterEndpoint, 'volume quota xosan enable', true) - await glusterCmd(glusterEndpoint, 'volume set xosan quota-deem-statfs on', true) - await glusterCmd(glusterEndpoint, `volume quota xosan limit-usage / ${XOSAN_LICENSE_QUOTA}B`, true) + await glusterCmd( + glusterEndpoint, + 'volume set xosan quota-deem-statfs on', + true + ) + await glusterCmd( + glusterEndpoint, + `volume quota xosan limit-usage / ${XOSAN_LICENSE_QUOTA}B`, + true + ) } async function _removeQuota (glusterEndpoint) { await glusterCmd(glusterEndpoint, 'volume quota xosan disable', true) } -export const createSR = defer(async function ($defer, { - template, pif, vlan, srs, glusterType, - redundancy, brickSize = this::computeBrickSize(srs), memorySize = 4 * GIGABYTE, ipRange = DEFAULT_NETWORK_PREFIX + '.0', -}) { +export const createSR = defer(async function ( + $defer, + { + template, + pif, + vlan, + srs, + glusterType, + redundancy, + brickSize = this::computeBrickSize(srs), + memorySize = 4 * GIGABYTE, + ipRange = DEFAULT_NETWORK_PREFIX + '.0', + } +) { const OPERATION_OBJECT = { operation: 'createSr', - states: ['configuringNetwork', 'importingVm', 'copyingVms', - 'configuringVms', 'configuringGluster', 'creatingSr', 'scanningSr'], + states: [ + 'configuringNetwork', + 'importingVm', + 'copyingVms', + 'configuringVms', + 'configuringGluster', + 'creatingSr', + 'scanningSr', + ], } if (!this.requestResource) { throw new Error('requestResource is not a function') @@ -521,70 +663,125 @@ export const createSR = defer(async function ($defer, { throw new Error('createSR is already running for this pool') } - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 0 } const tmpBoundObjectId = srs.join(',') - const license = await this.createBoundXosanTrialLicense({ boundObjectId: tmpBoundObjectId }) + const license = await this.createBoundXosanTrialLicense({ + boundObjectId: tmpBoundObjectId, + }) $defer.onFailure(() => this.unbindXosanLicense({ srId: tmpBoundObjectId })) // '172.31.100.0' -> '172.31.100.' - const networkPrefix = ipRange.split('.').slice(0, 3).join('.') + '.' + const networkPrefix = + ipRange + .split('.') + .slice(0, 3) + .join('.') + '.' let vmIpLastNumber = VM_FIRST_NUMBER try { - const xosanNetwork = await createNetworkAndInsertHosts(xapi, pif, vlan, networkPrefix) + const xosanNetwork = await createNetworkAndInsertHosts( + xapi, + pif, + vlan, + networkPrefix + ) $defer.onFailure(() => xapi.deleteNetwork(xosanNetwork)) const sshKey = await getOrCreateSshKey(xapi) const srsObjects = map(srs, srId => xapi.getObject(srId)) - await Promise.all(srsObjects.map(sr => callPlugin(xapi, sr.$PBDs[0].$host, 'receive_ssh_keys', { - private_key: sshKey.private, - public_key: sshKey.public, - force: 'true', - }))) + await Promise.all( + srsObjects.map(sr => + callPlugin(xapi, sr.$PBDs[0].$host, 'receive_ssh_keys', { + private_key: sshKey.private, + public_key: sshKey.public, + force: 'true', + }) + ) + ) const firstSr = srsObjects[0] - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 1 } const firstVM = await this::_importGlusterVM(xapi, template, firstSr) $defer.onFailure(() => xapi.deleteVm(firstVM, true)) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 2 } const copiedVms = await asyncMap(srsObjects.slice(1), sr => - copyVm(xapi, firstVM, sr)::tap(({vm}) => + copyVm(xapi, firstVM, sr)::tap(({ vm }) => $defer.onFailure(() => xapi.deleteVm(vm)) ) ) - const vmsAndSrs = [{ - vm: firstVM, - sr: firstSr, - }].concat(copiedVms) + const vmsAndSrs = [ + { + vm: firstVM, + sr: firstSr, + }, + ].concat(copiedVms) let arbiter = null - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 3 } if (srs.length === 2) { const sr = firstSr - const arbiterIP = networkPrefix + (vmIpLastNumber++) + const arbiterIP = networkPrefix + vmIpLastNumber++ const arbiterVm = await xapi.copyVm(firstVM, sr) $defer.onFailure(() => xapi.deleteVm(arbiterVm, true)) - arbiter = await _prepareGlusterVm(xapi, sr, arbiterVm, xosanNetwork, arbiterIP, { - labelSuffix: '_arbiter', - increaseDataDisk: false, - memorySize, - }) + arbiter = await _prepareGlusterVm( + xapi, + sr, + arbiterVm, + xosanNetwork, + arbiterIP, + { + labelSuffix: '_arbiter', + increaseDataDisk: false, + memorySize, + } + ) arbiter.arbiter = true } - const ipAndHosts = await asyncMap(vmsAndSrs, vmAndSr => _prepareGlusterVm(xapi, vmAndSr.sr, vmAndSr.vm, xosanNetwork, - networkPrefix + (vmIpLastNumber++), {maxDiskSize: brickSize, memorySize})) - const glusterEndpoint = {xapi, hosts: map(ipAndHosts, ih => ih.host), addresses: map(ipAndHosts, ih => ih.address)} - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 4} - await configureGluster(redundancy, ipAndHosts, glusterEndpoint, glusterType, arbiter) + const ipAndHosts = await asyncMap(vmsAndSrs, vmAndSr => + _prepareGlusterVm( + xapi, + vmAndSr.sr, + vmAndSr.vm, + xosanNetwork, + networkPrefix + vmIpLastNumber++, + { maxDiskSize: brickSize, memorySize } + ) + ) + const glusterEndpoint = { + xapi, + hosts: map(ipAndHosts, ih => ih.host), + addresses: map(ipAndHosts, ih => ih.address), + } + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 4 } + await configureGluster( + redundancy, + ipAndHosts, + glusterEndpoint, + glusterType, + arbiter + ) debug('xosan gluster volume started') // We use 10 IPs of the gluster VM range as backup, in the hope that even if the first VM gets destroyed we find at least // one VM to give mount the volfile. // It is not possible to edit the device_config after the SR is created and this data is only used at mount time when rebooting // the hosts. - const backupservers = map(range(VM_FIRST_NUMBER, VM_FIRST_NUMBER + 10), ipLastByte => networkPrefix + ipLastByte).join(':') - const config = {server: ipAndHosts[0].address + ':/xosan', backupservers} - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 5} - const xosanSrRef = await xapi.call('SR.create', firstSr.$PBDs[0].$host.$ref, config, 0, 'XOSAN', 'XOSAN', - 'xosan', '', true, {}) + const backupservers = map( + range(VM_FIRST_NUMBER, VM_FIRST_NUMBER + 10), + ipLastByte => networkPrefix + ipLastByte + ).join(':') + const config = { server: ipAndHosts[0].address + ':/xosan', backupservers } + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 5 } + const xosanSrRef = await xapi.call( + 'SR.create', + firstSr.$PBDs[0].$host.$ref, + config, + 0, + 'XOSAN', + 'XOSAN', + 'xosan', + '', + true, + {} + ) debug('sr created') // we just forget because the cleanup actions are stacked in the $onFailure system $defer.onFailure(() => xapi.forgetSr(xosanSrRef)) @@ -594,7 +791,7 @@ export const createSR = defer(async function ($defer, { const nodes = ipAndHosts.map(param => ({ brickName: param.brickName, host: param.host.$id, - vm: {id: param.vm.$id, ip: param.address}, + vm: { id: param.vm.$id, ip: param.address }, underlyingSr: param.underlyingSr.$id, arbiter: !!param['arbiter'], })) @@ -608,7 +805,7 @@ export const createSR = defer(async function ($defer, { networkPrefix, redundancy, }) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 6} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 6 } debug('scanning new SR') await xapi.call('SR.scan', xosanSrRef) await this.rebindLicense({ @@ -643,10 +840,12 @@ createSR.params = { type: 'number', }, memorySize: { - type: 'number', optional: true, + type: 'number', + optional: true, }, ipRange: { - type: 'string', optional: true, + type: 'string', + optional: true, }, } @@ -656,7 +855,10 @@ createSR.resolve = { } async function umountDisk (localEndpoint, diskMountPoint) { - await remoteSsh(localEndpoint, `killall -v -w /usr/sbin/xfs_growfs; fuser -v ${diskMountPoint}; umount ${diskMountPoint} && sed -i '\\_${diskMountPoint}\\S_d' /etc/fstab && rm -rf ${diskMountPoint}`) + await remoteSsh( + localEndpoint, + `killall -v -w /usr/sbin/xfs_growfs; fuser -v ${diskMountPoint}; umount ${diskMountPoint} && sed -i '\\_${diskMountPoint}\\S_d' /etc/fstab && rm -rf ${diskMountPoint}` + ) } // this is mostly what the LVM SR driver does, but we are avoiding the 2To limit it imposes. @@ -672,14 +874,21 @@ async function createVDIOnLVMWithoutSizeLimit (xapi, lvmSr, diskSize) { const vgName = VG_PREFIX + srUuid const host = $PBDs[0].$host const sizeMb = Math.ceil(diskSize / 1024 / 1024) - const result = await callPlugin(xapi, host, 'run_lvcreate', {sizeMb: asInteger(sizeMb), lvName, vgName}) + const result = await callPlugin(xapi, host, 'run_lvcreate', { + sizeMb: asInteger(sizeMb), + lvName, + vgName, + }) if (result.exit !== 0) { throw Error('Could not create volume ->' + result.stdout) } await xapi.call('SR.scan', xapi.getObject(lvmSr).$ref) const vdi = find(xapi.getObject(lvmSr).$VDIs, vdi => vdi.uuid === uuid) if (vdi != null) { - await xapi.setSrProperties(vdi.$ref, {nameLabel: 'xosan_data', nameDescription: 'Created by XO'}) + await xapi.setSrProperties(vdi.$ref, { + nameLabel: 'xosan_data', + nameDescription: 'Created by XO', + }) return vdi } } @@ -687,29 +896,46 @@ async function createVDIOnLVMWithoutSizeLimit (xapi, lvmSr, diskSize) { async function createNewDisk (xapi, sr, vm, diskSize) { const newDisk = await createVDIOnLVMWithoutSizeLimit(xapi, sr, diskSize) await xapi.createVbd({ vdi: newDisk, vm }) - let vbd = await xapi._waitObjectState(newDisk.$id, disk => Boolean(disk.$VBDs.length)).$VBDs[0] + let vbd = await xapi._waitObjectState(newDisk.$id, disk => + Boolean(disk.$VBDs.length) + ).$VBDs[0] vbd = await xapi._waitObjectState(vbd.$id, vbd => Boolean(vbd.device.length)) return '/dev/' + vbd.device } async function mountNewDisk (localEndpoint, hostname, newDeviceFiledeviceFile) { - const brickRootCmd = 'bash -c \'mkdir -p /bricks; for TESTVAR in {1..9}; do TESTDIR="/bricks/xosan$TESTVAR" ;if mkdir $TESTDIR; then echo $TESTDIR; exit 0; fi ; done ; exit 1\'' - const newBrickRoot = (await remoteSsh(localEndpoint, brickRootCmd)).stdout.trim() + const brickRootCmd = + 'bash -c \'mkdir -p /bricks; for TESTVAR in {1..9}; do TESTDIR="/bricks/xosan$TESTVAR" ;if mkdir $TESTDIR; then echo $TESTDIR; exit 0; fi ; done ; exit 1\'' + const newBrickRoot = (await remoteSsh( + localEndpoint, + brickRootCmd + )).stdout.trim() const brickName = `${hostname}:${newBrickRoot}/xosandir` const mountBrickCmd = `mkfs.xfs -i size=512 ${newDeviceFiledeviceFile}; mkdir -p ${newBrickRoot}; echo "${newDeviceFiledeviceFile} ${newBrickRoot} xfs defaults 0 0" >> /etc/fstab; mount -a` await remoteSsh(localEndpoint, mountBrickCmd) return brickName } -async function replaceBrickOnSameVM (xosansr, previousBrick, newLvmSr, brickSize) { +async function replaceBrickOnSameVM ( + xosansr, + previousBrick, + newLvmSr, + brickSize +) { const OPERATION_OBJECT = { operation: 'replaceBrick', - states: ['creatingNewDisk', 'mountingDisk', 'swappingBrick', 'disconnectingOldDisk', 'scanningSr'], + states: [ + 'creatingNewDisk', + 'mountingDisk', + 'swappingBrick', + 'disconnectingOldDisk', + 'scanningSr', + ], } const xapi = this.getXapi(xosansr) const poolId = xapi.pool.$id try { - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 0 } // TODO: a bit of user input validation on 'previousBrick', it's going to ssh const previousIp = previousBrick.split(':')[0] @@ -719,34 +945,61 @@ async function replaceBrickOnSameVM (xosansr, previousBrick, newLvmSr, brickSize const nodeIndex = nodes.findIndex(node => node.vm.ip === previousIp) const glusterEndpoint = this::_getGlusterEndpoint(xosansr) const previousVM = _getIPToVMDict(xapi, xosansr)[previousBrick].vm - const newDeviceFile = await createNewDisk(xapi, newLvmSr, previousVM, brickSize) + const newDeviceFile = await createNewDisk( + xapi, + newLvmSr, + previousVM, + brickSize + ) const localEndpoint = { xapi, hosts: map(nodes, node => xapi.getObject(node.host)), addresses: [previousIp], } - const previousBrickRoot = previousBrick.split(':')[1].split('/').slice(0, 3).join('/') - const previousBrickDevice = (await remoteSsh(localEndpoint, `grep " ${previousBrickRoot} " /proc/mounts | cut -d ' ' -f 1 | sed 's_/dev/__'`)).stdout.trim() - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} - const brickName = await mountNewDisk(localEndpoint, previousIp, newDeviceFile) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} - await glusterCmd(glusterEndpoint, `volume replace-brick xosan ${previousBrick} ${brickName} commit force`) + const previousBrickRoot = previousBrick + .split(':')[1] + .split('/') + .slice(0, 3) + .join('/') + const previousBrickDevice = (await remoteSsh( + localEndpoint, + `grep " ${previousBrickRoot} " /proc/mounts | cut -d ' ' -f 1 | sed 's_/dev/__'` + )).stdout.trim() + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 1 } + const brickName = await mountNewDisk( + localEndpoint, + previousIp, + newDeviceFile + ) + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 2 } + await glusterCmd( + glusterEndpoint, + `volume replace-brick xosan ${previousBrick} ${brickName} commit force` + ) nodes[nodeIndex].brickName = brickName nodes[nodeIndex].underlyingSr = newLvmSr await xapi.xo.setData(xosansr, 'xosan_config', data) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 3 } await umountDisk(localEndpoint, previousBrickRoot) - const previousVBD = previousVM.$VBDs.find(vbd => vbd.device === previousBrickDevice) + const previousVBD = previousVM.$VBDs.find( + vbd => vbd.device === previousBrickDevice + ) await xapi.disconnectVbd(previousVBD) await xapi.deleteVdi(previousVBD.VDI) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 4} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 4 } await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) } finally { delete CURRENT_POOL_OPERATIONS[poolId] } } -export async function replaceBrick ({xosansr, previousBrick, newLvmSr, brickSize, onSameVM = true}) { +export async function replaceBrick ({ + xosansr, + previousBrick, + newLvmSr, + brickSize, + onSameVM = true, +}) { await this.checkXosanLicense({ srId: xosansr.uuid }) const OPERATION_OBJECT = { @@ -754,7 +1007,12 @@ export async function replaceBrick ({xosansr, previousBrick, newLvmSr, brickSize states: ['insertingNewVm', 'swapingBrick', 'deletingVm', 'scaningSr'], } if (onSameVM) { - return this::replaceBrickOnSameVM(xosansr, previousBrick, newLvmSr, brickSize) + return this::replaceBrickOnSameVM( + xosansr, + previousBrick, + newLvmSr, + brickSize + ) } const xapi = this.getXapi(xosansr) const poolId = xapi.pool.$id @@ -774,25 +1032,40 @@ export async function replaceBrick ({xosansr, previousBrick, newLvmSr, brickSize } const previousVMEntry = _getIPToVMDict(xapi, xosansr)[previousBrick] const arbiter = nodes[nodeIndex].arbiter - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} - const {newVM, addressAndHost} = await this::insertNewGlusterVm(xapi, xosansr, newLvmSr, - {labelSuffix: arbiter ? '_arbiter' : '', glusterEndpoint, newIpAddress, increaseDataDisk: !arbiter, brickSize}) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} - await glusterCmd(glusterEndpoint, `volume replace-brick xosan ${previousBrick} ${addressAndHost.brickName} commit force`) + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 0 } + const { newVM, addressAndHost } = await this::insertNewGlusterVm( + xapi, + xosansr, + newLvmSr, + { + labelSuffix: arbiter ? '_arbiter' : '', + glusterEndpoint, + newIpAddress, + increaseDataDisk: !arbiter, + brickSize, + } + ) + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 1 } + await glusterCmd( + glusterEndpoint, + `volume replace-brick xosan ${previousBrick} ${ + addressAndHost.brickName + } commit force` + ) await glusterCmd(glusterEndpoint, 'peer detach ' + previousIp) data.nodes.splice(nodeIndex, 1, { brickName: addressAndHost.brickName, host: addressAndHost.host.$id, arbiter: arbiter, - vm: {ip: addressAndHost.address, id: newVM.$id}, + vm: { ip: addressAndHost.address, id: newVM.$id }, underlyingSr: newLvmSr, }) await xapi.xo.setData(xosansr, 'xosan_config', data) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 2 } if (previousVMEntry) { await xapi.deleteVm(previousVMEntry.vm, true) } - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 3 } await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) } finally { delete CURRENT_POOL_OPERATIONS[poolId] @@ -802,20 +1075,29 @@ export async function replaceBrick ({xosansr, previousBrick, newLvmSr, brickSize replaceBrick.description = 'replaceBrick brick in gluster volume' replaceBrick.permission = 'admin' replaceBrick.params = { - xosansr: {type: 'string'}, - previousBrick: {type: 'string'}, - newLvmSr: {type: 'string'}, - brickSize: {type: 'number'}, + xosansr: { type: 'string' }, + previousBrick: { type: 'string' }, + newLvmSr: { type: 'string' }, + brickSize: { type: 'number' }, } replaceBrick.resolve = { xosansr: ['sr', 'SR', 'administrate'], } -async function _prepareGlusterVm (xapi, lvmSr, newVM, xosanNetwork, ipAddress, { - labelSuffix = '', increaseDataDisk = true, - maxDiskSize = Infinity, memorySize = 2 * GIGABYTE, -}) { +async function _prepareGlusterVm ( + xapi, + lvmSr, + newVM, + xosanNetwork, + ipAddress, + { + labelSuffix = '', + increaseDataDisk = true, + maxDiskSize = Infinity, + memorySize = 2 * GIGABYTE, + } +) { const host = lvmSr.$PBDs[0].$host const xenstoreData = { 'vm-data/hostname': 'XOSAN' + lvmSr.name_label + labelSuffix, @@ -842,32 +1124,54 @@ async function _prepareGlusterVm (xapi, lvmSr, newVM, xosanNetwork, ipAddress, { } await xapi.addTag(newVM.$id, `XOSAN-${xapi.pool.name_label}`) await xapi.editVm(newVM, { - name_label: `XOSAN - ${lvmSr.name_label} - ${host.name_label} ${labelSuffix}`, + name_label: `XOSAN - ${lvmSr.name_label} - ${ + host.name_label + } ${labelSuffix}`, name_description: 'Xosan VM storage', memory: memorySize, }) await xapi.call('VM.set_xenstore_data', newVM.$ref, xenstoreData) - const rootDisk = newVM.$VBDs.map(vbd => vbd && vbd.$VDI).find(vdi => vdi && vdi.name_label === 'xosan_root') + const rootDisk = newVM.$VBDs + .map(vbd => vbd && vbd.$VDI) + .find(vdi => vdi && vdi.name_label === 'xosan_root') const rootDiskSize = rootDisk.virtual_size await xapi.startVm(newVM) debug('waiting for boot of ', ip) // wait until we find the assigned IP in the networks, we are just checking the boot is complete - const vmIsUp = vm => Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip)) + const vmIsUp = vm => + Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip)) const vm = await xapi._waitObjectState(newVM.$id, vmIsUp) debug('booted ', ip) - const localEndpoint = {xapi: xapi, hosts: [host], addresses: [ip]} + const localEndpoint = { xapi: xapi, hosts: [host], addresses: [ip] } const srFreeSpace = sr.physical_size - sr.physical_utilisation // we use a percentage because it looks like the VDI overhead is proportional - const newSize = floor2048(Math.min(maxDiskSize - rootDiskSize, srFreeSpace * XOSAN_DATA_DISK_USEAGE_RATIO)) + const newSize = floor2048( + Math.min( + maxDiskSize - rootDiskSize, + srFreeSpace * XOSAN_DATA_DISK_USEAGE_RATIO + ) + ) const smallDiskSize = 1073741824 - const deviceFile = await createNewDisk(xapi, lvmSr, newVM, increaseDataDisk ? newSize : smallDiskSize) + const deviceFile = await createNewDisk( + xapi, + lvmSr, + newVM, + increaseDataDisk ? newSize : smallDiskSize + ) const brickName = await mountNewDisk(localEndpoint, ip, deviceFile) - return {address: ip, host, vm, underlyingSr: lvmSr, brickName} + return { address: ip, host, vm, underlyingSr: lvmSr, brickName } } async function _importGlusterVM (xapi, template, lvmsrId) { - const templateStream = await this.requestResource('xosan', template.id, template.version) - const newVM = await xapi.importVm(templateStream, {srId: lvmsrId, type: 'xva'}) + const templateStream = await this.requestResource( + 'xosan', + template.id, + template.version + ) + const newVM = await xapi.importVm(templateStream, { + srId: lvmsrId, + type: 'xva', + }) await xapi.editVm(newVM, { autoPoweron: true, name_label: 'XOSAN imported VM', @@ -880,7 +1184,11 @@ function _findAFreeIPAddress (nodes, networkPrefix) { return _findIPAddressOutsideList(map(nodes, n => n.vm.ip), networkPrefix) } -function _findIPAddressOutsideList (reservedList, networkPrefix, vmIpLastNumber = 101) { +function _findIPAddressOutsideList ( + reservedList, + networkPrefix, + vmIpLastNumber = 101 +) { for (let i = vmIpLastNumber; i < 255; i++) { const candidate = networkPrefix + i if (!reservedList.find(a => a === candidate)) { @@ -895,10 +1203,19 @@ const _median = arr => { return arr[Math.floor(arr.length / 2)] } -const insertNewGlusterVm = defer(async function ($defer, xapi, xosansr, lvmsrId, { - labelSuffix = '', - glusterEndpoint = null, ipAddress = null, increaseDataDisk = true, brickSize = Infinity, -}) { +const insertNewGlusterVm = defer(async function ( + $defer, + xapi, + xosansr, + lvmsrId, + { + labelSuffix = '', + glusterEndpoint = null, + ipAddress = null, + increaseDataDisk = true, + brickSize = Infinity, + } +) { const data = getXosanConfig(xosansr, xapi) if (ipAddress === null) { ipAddress = _findAFreeIPAddress(data.nodes, data.networkPrefix) @@ -916,20 +1233,30 @@ const insertNewGlusterVm = defer(async function ($defer, xapi, xosansr, lvmsrId, // can't really copy an existing VM, because existing gluster VMs disks might too large to be copied. const newVM = await this::_importGlusterVM(xapi, data.template, lvmsrId) $defer.onFailure(() => xapi.deleteVm(newVM, true)) - const addressAndHost = await _prepareGlusterVm(xapi, srObject, newVM, xosanNetwork, ipAddress, { - labelSuffix, - increaseDataDisk, - maxDiskSize: brickSize, - memorySize: vmsMemories.length ? _median(vmsMemories) : 2 * GIGABYTE, - }) + const addressAndHost = await _prepareGlusterVm( + xapi, + srObject, + newVM, + xosanNetwork, + ipAddress, + { + labelSuffix, + increaseDataDisk, + maxDiskSize: brickSize, + memorySize: vmsMemories.length ? _median(vmsMemories) : 2 * GIGABYTE, + } + ) if (!glusterEndpoint) { glusterEndpoint = this::_getGlusterEndpoint(xosansr) } await _probePoolAndWaitForPresence(glusterEndpoint, [addressAndHost.address]) - return {data, newVM, addressAndHost, glusterEndpoint} + return { data, newVM, addressAndHost, glusterEndpoint } }) -export const addBricks = defer(async function ($defer, {xosansr, lvmsrs, brickSize}) { +export const addBricks = defer(async function ( + $defer, + { xosansr, lvmsrs, brickSize } +) { await this.checkXosanLicense({ srId: xosansr.uuid }) const OPERATION_OBJECT = { @@ -941,7 +1268,7 @@ export const addBricks = defer(async function ($defer, {xosansr, lvmsrs, brickSi if (CURRENT_POOL_OPERATIONS[poolId]) { throw new Error('createSR is already running for this pool') } - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 0 } try { const data = getXosanConfig(xosansr, xapi) const usedAddresses = map(data.nodes, n => n.vm.ip) @@ -949,29 +1276,55 @@ export const addBricks = defer(async function ($defer, {xosansr, lvmsrs, brickSi const newAddresses = [] const newNodes = [] for (const newSr of lvmsrs) { - const ipAddress = _findIPAddressOutsideList(usedAddresses.concat(newAddresses), data.networkPrefix) + const ipAddress = _findIPAddressOutsideList( + usedAddresses.concat(newAddresses), + data.networkPrefix + ) newAddresses.push(ipAddress) - const {newVM, addressAndHost} = await this::insertNewGlusterVm(xapi, xosansr, newSr, {ipAddress, brickSize}) - $defer.onFailure(() => glusterCmd(glusterEndpoint, 'peer detach ' + ipAddress, true)) + const { newVM, addressAndHost } = await this::insertNewGlusterVm( + xapi, + xosansr, + newSr, + { ipAddress, brickSize } + ) + $defer.onFailure(() => + glusterCmd(glusterEndpoint, 'peer detach ' + ipAddress, true) + ) $defer.onFailure(() => xapi.deleteVm(newVM, true)) const brickName = addressAndHost.brickName - newNodes.push({brickName, host: addressAndHost.host.$id, vm: {id: newVM.$id, ip: ipAddress}, underlyingSr: newSr}) + newNodes.push({ + brickName, + host: addressAndHost.host.$id, + vm: { id: newVM.$id, ip: ipAddress }, + underlyingSr: newSr, + }) } const arbiterNode = data.nodes.find(n => n['arbiter']) if (arbiterNode) { - await glusterCmd(glusterEndpoint, - `volume remove-brick xosan replica ${data.nodes.length - 1} ${arbiterNode.brickName} force`) + await glusterCmd( + glusterEndpoint, + `volume remove-brick xosan replica ${data.nodes.length - 1} ${ + arbiterNode.brickName + } force` + ) data.nodes = data.nodes.filter(n => n !== arbiterNode) data.type = 'replica' await xapi.xo.setData(xosansr, 'xosan_config', data) - await glusterCmd(glusterEndpoint, 'peer detach ' + arbiterNode.vm.ip, true) + await glusterCmd( + glusterEndpoint, + 'peer detach ' + arbiterNode.vm.ip, + true + ) await xapi.deleteVm(arbiterNode.vm.id, true) } - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} - await glusterCmd(glusterEndpoint, `volume add-brick xosan ${newNodes.map(n => n.brickName).join(' ')}`) + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 1 } + await glusterCmd( + glusterEndpoint, + `volume add-brick xosan ${newNodes.map(n => n.brickName).join(' ')}` + ) data.nodes = data.nodes.concat(newNodes) await xapi.xo.setData(xosansr, 'xosan_config', data) - CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 2 } await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) } finally { delete CURRENT_POOL_OPERATIONS[poolId] @@ -981,14 +1334,14 @@ export const addBricks = defer(async function ($defer, {xosansr, lvmsrs, brickSi addBricks.description = 'add brick to XOSAN SR' addBricks.permission = 'admin' addBricks.params = { - xosansr: {type: 'string'}, + xosansr: { type: 'string' }, lvmsrs: { type: 'array', items: { type: 'string', }, }, - brickSize: {type: 'number'}, + brickSize: { type: 'number' }, } addBricks.resolve = { @@ -996,12 +1349,14 @@ addBricks.resolve = { lvmsrs: ['sr', 'SR', 'administrate'], } -export const removeBricks = defer(async function ($defer, {xosansr, bricks}) { +export const removeBricks = defer(async function ($defer, { xosansr, bricks }) { await this.checkXosanLicense({ srId: xosansr.uuid }) const xapi = this.getXapi(xosansr) if (CURRENT_POOL_OPERATIONS[xapi.pool.$id]) { - throw new Error('this there is already a XOSAN operation running on this pool') + throw new Error( + 'this there is already a XOSAN operation running on this pool' + ) } CURRENT_POOL_OPERATIONS[xapi.pool.$id] = true try { @@ -1013,8 +1368,13 @@ export const removeBricks = defer(async function ($defer, {xosansr, bricks}) { remove(glusterEndpoint.addresses, ip => ips.includes(ip)) const dict = _getIPToVMDict(xapi, xosansr.id) const brickVMs = map(bricks, b => dict[b]) - await glusterCmd(glusterEndpoint, `volume remove-brick xosan ${bricks.join(' ')} force`) - await asyncMap(ips, ip => glusterCmd(glusterEndpoint, 'peer detach ' + ip, true)) + await glusterCmd( + glusterEndpoint, + `volume remove-brick xosan ${bricks.join(' ')} force` + ) + await asyncMap(ips, ip => + glusterCmd(glusterEndpoint, 'peer detach ' + ip, true) + ) remove(data.nodes, node => ips.includes(node.vm.ip)) await xapi.xo.setData(xosansr.id, 'xosan_config', data) await xapi.call('SR.scan', xapi.getObject(xosansr._xapiId).$ref) @@ -1027,70 +1387,91 @@ export const removeBricks = defer(async function ($defer, {xosansr, bricks}) { removeBricks.description = 'remove brick from XOSAN SR' removeBricks.permission = 'admin' removeBricks.params = { - xosansr: {type: 'string'}, + xosansr: { type: 'string' }, bricks: { type: 'array', - items: {type: 'string'}, + items: { type: 'string' }, }, } removeBricks.resolve = { xosansr: ['sr', 'SR', 'administrate'] } -export function checkSrCurrentState ({poolId}) { +export function checkSrCurrentState ({ poolId }) { return CURRENT_POOL_OPERATIONS[poolId] } -checkSrCurrentState.description = 'checks if there is an operation currently running on the SR' +checkSrCurrentState.description = + 'checks if there is an operation currently running on the SR' checkSrCurrentState.permission = 'admin' -checkSrCurrentState.params = {poolId: {type: 'string'}} +checkSrCurrentState.params = { poolId: { type: 'string' } } const POSSIBLE_CONFIGURATIONS = {} -POSSIBLE_CONFIGURATIONS[2] = [{layout: 'replica_arbiter', redundancy: 3, capacity: 1}] +POSSIBLE_CONFIGURATIONS[2] = [ + { layout: 'replica_arbiter', redundancy: 3, capacity: 1 }, +] POSSIBLE_CONFIGURATIONS[3] = [ - {layout: 'replica', redundancy: 3, capacity: 1}, - {layout: 'disperse', redundancy: 1, capacity: 2}, + { layout: 'replica', redundancy: 3, capacity: 1 }, + { layout: 'disperse', redundancy: 1, capacity: 2 }, +] +POSSIBLE_CONFIGURATIONS[4] = [{ layout: 'replica', redundancy: 2, capacity: 2 }] +POSSIBLE_CONFIGURATIONS[5] = [ + { layout: 'disperse', redundancy: 1, capacity: 4 }, ] -POSSIBLE_CONFIGURATIONS[4] = [{layout: 'replica', redundancy: 2, capacity: 2}] -POSSIBLE_CONFIGURATIONS[5] = [{layout: 'disperse', redundancy: 1, capacity: 4}] POSSIBLE_CONFIGURATIONS[6] = [ - {layout: 'replica', redundancy: 2, capacity: 3}, - {layout: 'replica', redundancy: 3, capacity: 2}, - {layout: 'disperse', redundancy: 2, capacity: 4}, + { layout: 'replica', redundancy: 2, capacity: 3 }, + { layout: 'replica', redundancy: 3, capacity: 2 }, + { layout: 'disperse', redundancy: 2, capacity: 4 }, ] -POSSIBLE_CONFIGURATIONS[7] = [{layout: 'disperse', redundancy: 3, capacity: 4}] -POSSIBLE_CONFIGURATIONS[8] = [{layout: 'replica', redundancy: 2, capacity: 4}] +POSSIBLE_CONFIGURATIONS[7] = [ + { layout: 'disperse', redundancy: 3, capacity: 4 }, +] +POSSIBLE_CONFIGURATIONS[8] = [{ layout: 'replica', redundancy: 2, capacity: 4 }] POSSIBLE_CONFIGURATIONS[9] = [ - {layout: 'replica', redundancy: 3, capacity: 3}, - {layout: 'disperse', redundancy: 1, capacity: 8}, + { layout: 'replica', redundancy: 3, capacity: 3 }, + { layout: 'disperse', redundancy: 1, capacity: 8 }, ] POSSIBLE_CONFIGURATIONS[10] = [ - {layout: 'replica', redundancy: 2, capacity: 5}, - {layout: 'disperse', redundancy: 2, capacity: 8}, + { layout: 'replica', redundancy: 2, capacity: 5 }, + { layout: 'disperse', redundancy: 2, capacity: 8 }, +] +POSSIBLE_CONFIGURATIONS[11] = [ + { layout: 'disperse', redundancy: 3, capacity: 8 }, ] -POSSIBLE_CONFIGURATIONS[11] = [{layout: 'disperse', redundancy: 3, capacity: 8}] POSSIBLE_CONFIGURATIONS[12] = [ - {layout: 'replica', redundancy: 2, capacity: 6}, - {layout: 'disperse', redundancy: 4, capacity: 8}, + { layout: 'replica', redundancy: 2, capacity: 6 }, + { layout: 'disperse', redundancy: 4, capacity: 8 }, +] +POSSIBLE_CONFIGURATIONS[13] = [ + { layout: 'disperse', redundancy: 5, capacity: 8 }, ] -POSSIBLE_CONFIGURATIONS[13] = [{layout: 'disperse', redundancy: 5, capacity: 8}] POSSIBLE_CONFIGURATIONS[14] = [ - {layout: 'replica', redundancy: 2, capacity: 7}, - {layout: 'disperse', redundancy: 6, capacity: 8}, + { layout: 'replica', redundancy: 2, capacity: 7 }, + { layout: 'disperse', redundancy: 6, capacity: 8 }, ] POSSIBLE_CONFIGURATIONS[15] = [ - {layout: 'replica', redundancy: 3, capacity: 5}, - {layout: 'disperse', redundancy: 7, capacity: 8}, + { layout: 'replica', redundancy: 3, capacity: 5 }, + { layout: 'disperse', redundancy: 7, capacity: 8 }, +] +POSSIBLE_CONFIGURATIONS[16] = [ + { layout: 'replica', redundancy: 2, capacity: 8 }, ] -POSSIBLE_CONFIGURATIONS[16] = [{layout: 'replica', redundancy: 2, capacity: 8}] function computeBrickSize (srs, brickSize = Infinity) { const xapi = this.getXapi(srs[0]) const srsObjects = map(srs, srId => xapi.getObject(srId)) - const srSizes = map(srsObjects, sr => sr.physical_size - sr.physical_utilisation) + const srSizes = map( + srsObjects, + sr => sr.physical_size - sr.physical_utilisation + ) const minSize = Math.min(brickSize, ...srSizes) - return Math.floor((minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO) + return Math.floor( + (minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO + ) } -export async function computeXosanPossibleOptions ({lvmSrs, brickSize = Infinity}) { +export async function computeXosanPossibleOptions ({ + lvmSrs, + brickSize = Infinity, +}) { const count = lvmSrs.length const configurations = POSSIBLE_CONFIGURATIONS[count] if (!configurations) { @@ -1098,7 +1479,10 @@ export async function computeXosanPossibleOptions ({lvmSrs, brickSize = Infinity } if (count > 0) { const finalBrickSize = this::computeBrickSize(lvmSrs, brickSize) - return configurations.map(conf => ({...conf, availableSpace: Math.max(0, finalBrickSize * conf.capacity)})) + return configurations.map(conf => ({ + ...conf, + availableSpace: Math.max(0, finalBrickSize * conf.capacity), + })) } } @@ -1110,7 +1494,8 @@ computeXosanPossibleOptions.params = { }, }, brickSize: { - type: 'number', optional: true, + type: 'number', + optional: true, }, } @@ -1121,10 +1506,14 @@ export async function unlock ({ licenseId, sr }) { const glusterEndpoint = this::_getGlusterEndpoint(sr.id) await _removeQuota(glusterEndpoint) - await glusterEndpoint.xapi.call('SR.scan', glusterEndpoint.xapi.getObject(sr).$ref) + await glusterEndpoint.xapi.call( + 'SR.scan', + glusterEndpoint.xapi.getObject(sr).$ref + ) } -unlock.description = 'Unlock XOSAN SR functionalities by binding it to a paid license' +unlock.description = + 'Unlock XOSAN SR functionalities by binding it to a paid license' unlock.permission = 'admin' @@ -1139,7 +1528,7 @@ unlock.resolve = { // --------------------------------------------------------------------- -export async function downloadAndInstallXosanPack ({id, version, pool}) { +export async function downloadAndInstallXosanPack ({ id, version, pool }) { if (!this.requestResource) { throw new Error('requestResource is not a function') } @@ -1149,16 +1538,16 @@ export async function downloadAndInstallXosanPack ({id, version, pool}) { await xapi.installSupplementalPackOnAllHosts(res) await xapi._updateObjectMapProperty(xapi.pool, 'other_config', { - 'xosan_pack_installation_time': String(Math.floor(Date.now() / 1e3)), + xosan_pack_installation_time: String(Math.floor(Date.now() / 1e3)), }) } downloadAndInstallXosanPack.description = 'Register a resource via cloud plugin' downloadAndInstallXosanPack.params = { - id: {type: 'string'}, - version: {type: 'string'}, - pool: {type: 'string'}, + id: { type: 'string' }, + version: { type: 'string' }, + pool: { type: 'string' }, } downloadAndInstallXosanPack.resolve = { diff --git a/packages/xo-server/src/collection.js b/packages/xo-server/src/collection.js index 184fc749c..79748d163 100644 --- a/packages/xo-server/src/collection.js +++ b/packages/xo-server/src/collection.js @@ -1,11 +1,7 @@ import Model from './model' -import {BaseError} from 'make-error' -import {EventEmitter} from 'events' -import { - isArray, - isObject, - map, -} from './utils' +import { BaseError } from 'make-error' +import { EventEmitter } from 'events' +import { isArray, isObject, map } from './utils' // =================================================================== @@ -39,34 +35,34 @@ export default class Collection extends EventEmitter { models = [models] } - const {Model} = this - map(models, model => { - if (!(model instanceof Model)) { - model = new Model(model) - } + const { Model } = this + map( + models, + model => { + if (!(model instanceof Model)) { + model = new Model(model) + } - const error = model.validate() - if (error) { - // TODO: Better system inspired by Backbone.js - throw error - } + const error = model.validate() + if (error) { + // TODO: Better system inspired by Backbone.js + throw error + } - return model.properties - }, models) + return model.properties + }, + models + ) models = await this._add(models, opts) this.emit('add', models) - return array - ? models - : new this.Model(models[0]) + return array ? models : new this.Model(models[0]) } async first (properties) { if (!isObject(properties)) { - properties = (properties !== undefined) - ? { id: properties } - : {} + properties = properties !== undefined ? { id: properties } : {} } const model = await this._first(properties) @@ -75,9 +71,7 @@ export default class Collection extends EventEmitter { async get (properties) { if (!isObject(properties)) { - properties = (properties !== undefined) - ? { id: properties } - : {} + properties = properties !== undefined ? { id: properties } : {} } return /* await */ this._get(properties) @@ -100,37 +94,39 @@ export default class Collection extends EventEmitter { models = [models] } - const {Model} = this - map(models, model => { - if (!(model instanceof Model)) { - // TODO: Problems, we may be mixing in some default - // properties which will overwrite existing ones. - model = new Model(model) - } + const { Model } = this + map( + models, + model => { + if (!(model instanceof Model)) { + // TODO: Problems, we may be mixing in some default + // properties which will overwrite existing ones. + model = new Model(model) + } - const id = model.get('id') + const id = model.get('id') - // Missing models should be added not updated. - if (id === undefined) { - // FIXME: should not throw an exception but return a rejected promise. - throw new Error('a model without an id cannot be updated') - } + // Missing models should be added not updated. + if (id === undefined) { + // FIXME: should not throw an exception but return a rejected promise. + throw new Error('a model without an id cannot be updated') + } - const error = model.validate() - if (error !== undefined) { - // TODO: Better system inspired by Backbone.js. - throw error - } + const error = model.validate() + if (error !== undefined) { + // TODO: Better system inspired by Backbone.js. + throw error + } - return model.properties - }, models) + return model.properties + }, + models + ) models = await this._update(models) this.emit('update', models) - return array - ? models - : new this.Model(models[0]) + return array ? models : new this.Model(models[0]) } // Methods to override in implementations. @@ -165,8 +161,6 @@ export default class Collection extends EventEmitter { async _first (properties) { const models = await this.get(properties) - return models.length - ? models[0] - : null + return models.length ? models[0] : null } } diff --git a/packages/xo-server/src/collection/redis.js b/packages/xo-server/src/collection/redis.js index d83260dfd..26dd386b9 100644 --- a/packages/xo-server/src/collection/redis.js +++ b/packages/xo-server/src/collection/redis.js @@ -1,5 +1,12 @@ import { createClient as createRedisClient } from 'redis' -import { difference, filter, forEach, isEmpty, keys as getKeys, map } from 'lodash' +import { + difference, + filter, + forEach, + isEmpty, + keys as getKeys, + map, +} from 'lodash' import { ignoreErrors, promisifyAll } from 'promise-toolbox' import { v4 as generateUuid } from 'uuid' @@ -28,33 +35,33 @@ import { asyncMap } from '../utils' const VERSION = '20170905' export default class Redis extends Collection { - constructor ({ - connection, - indexes = [], - prefix, - uri, - }) { + constructor ({ connection, indexes = [], prefix, uri }) { super() this.indexes = indexes this.prefix = prefix - const redis = this.redis = promisifyAll(connection || createRedisClient(uri)) + const redis = (this.redis = promisifyAll( + connection || createRedisClient(uri) + )) const key = `${prefix}:version` - redis.get(key).then(version => { - if (version === VERSION) { - return - } + redis + .get(key) + .then(version => { + if (version === VERSION) { + return + } - let p = redis.set(`${prefix}:version`, VERSION) - switch (version) { - case undefined: - // - clean indexes - // - indexes are now case insensitive - p = p.then(() => this.rebuildIndexes()) - } - return p - })::ignoreErrors() + let p = redis.set(`${prefix}:version`, VERSION) + switch (version) { + case undefined: + // - clean indexes + // - indexes are now case insensitive + p = p.then(() => this.rebuildIndexes()) + } + return p + }) + ::ignoreErrors() } rebuildIndexes () { @@ -66,113 +73,120 @@ export default class Redis extends Collection { const idsIndex = `${prefix}_ids` return asyncMap(indexes, index => - redis.keys(`${prefix}_${index}:*`).then(keys => - keys.length !== 0 && redis.del(keys) + redis + .keys(`${prefix}_${index}:*`) + .then(keys => keys.length !== 0 && redis.del(keys)) + ).then(() => + asyncMap(redis.smembers(idsIndex), id => + redis.hgetall(`${prefix}:${id}`).then( + values => + values == null + ? redis.srem(idsIndex, id) // entry no longer exists + : asyncMap(indexes, index => { + const value = values[index] + if (value !== undefined) { + return redis.sadd( + `${prefix}_${index}:${String(value).toLowerCase()}`, + id + ) + } + }) + ) ) - ).then(() => asyncMap(redis.smembers(idsIndex), id => - redis.hgetall(`${prefix}:${id}`).then(values => - values == null - ? redis.srem(idsIndex, id) // entry no longer exists - : asyncMap(indexes, index => { - const value = values[index] - if (value !== undefined) { - return redis.sadd( - `${prefix}_${index}:${String(value).toLowerCase()}`, - id - ) - } - }) - ) - )) + ) } _extract (ids) { const prefix = this.prefix + ':' - const {redis} = this + const { redis } = this const models = [] - return Promise.all(map(ids, id => { - return redis.hgetall(prefix + id).then(model => { - // If empty, consider it a no match. - if (isEmpty(model)) { - return - } + return Promise.all( + map(ids, id => { + return redis.hgetall(prefix + id).then(model => { + // If empty, consider it a no match. + if (isEmpty(model)) { + return + } - // Mix the identifier in. - model.id = id + // Mix the identifier in. + model.id = id - models.push(model) + models.push(model) + }) }) - })).then(() => models) + ).then(() => models) } - _add (models, {replace = false} = {}) { + _add (models, { replace = false } = {}) { // TODO: remove “replace” which is a temporary measure, implement // “set()” instead. - const {indexes, prefix, redis} = this + const { indexes, prefix, redis } = this - return Promise.all(map(models, async model => { - // Generate a new identifier if necessary. - if (model.id === undefined) { - model.id = generateUuid() - } - const { id } = model + return Promise.all( + map(models, async model => { + // Generate a new identifier if necessary. + if (model.id === undefined) { + model.id = generateUuid() + } + const { id } = model - const newEntry = await redis.sadd(prefix + '_ids', id) + const newEntry = await redis.sadd(prefix + '_ids', id) - if (!newEntry) { - if (!replace) { - throw new ModelAlreadyExists(id) + if (!newEntry) { + if (!replace) { + throw new ModelAlreadyExists(id) + } + + // remove the previous values from indexes + if (indexes.length !== 0) { + const previous = await redis.hgetall(`${prefix}:${id}`) + await asyncMap(indexes, index => { + const value = previous[index] + if (value !== undefined) { + return redis.srem( + `${prefix}_${index}:${String(value).toLowerCase()}`, + id + ) + } + }) + } } - // remove the previous values from indexes - if (indexes.length !== 0) { - const previous = await redis.hgetall(`${prefix}:${id}`) - await asyncMap(indexes, index => { - const value = previous[index] - if (value !== undefined) { - return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id) - } - }) - } - } + const params = [] + forEach(model, (value, name) => { + // No need to store the identifier (already in the key). + if (name === 'id') { + return + } - const params = [] - forEach(model, (value, name) => { - // No need to store the identifier (already in the key). - if (name === 'id') { - return - } + params.push(name, value) + }) - params.push(name, value) + const key = `${prefix}:${id}` + const promises = [redis.del(key), redis.hmset(key, ...params)] + + // Update indexes. + forEach(indexes, index => { + const value = model[index] + if (value === undefined) { + return + } + + const key = prefix + '_' + index + ':' + String(value).toLowerCase() + promises.push(redis.sadd(key, id)) + }) + + await Promise.all(promises) + + return model }) - - const key = `${prefix}:${id}` - const promises = [ - redis.del(key), - redis.hmset(key, ...params), - ] - - // Update indexes. - forEach(indexes, (index) => { - const value = model[index] - if (value === undefined) { - return - } - - const key = prefix + '_' + index + ':' + String(value).toLowerCase() - promises.push(redis.sadd(key, id)) - }) - - await Promise.all(promises) - - return model - })) + ) } _get (properties) { - const {prefix, redis} = this + const { prefix, redis } = this if (isEmpty(properties)) { return redis.smembers(prefix + '_ids').then(ids => this._extract(ids)) @@ -183,13 +197,11 @@ export default class Redis extends Collection { if (id !== undefined) { delete properties.id return this._extract([id]).then(models => { - return (models.length && !isEmpty(properties)) - ? filter(models) - : models + return models.length && !isEmpty(properties) ? filter(models) : models }) } - const {indexes} = this + const { indexes } = this // Check for non indexed fields. const unfit = difference(getKeys(properties), indexes) @@ -197,7 +209,10 @@ export default class Redis extends Collection { throw new Error('fields not indexed: ' + unfit.join()) } - const keys = map(properties, (value, index) => `${prefix}_${index}:${String(value).toLowerCase()}`) + const keys = map( + properties, + (value, index) => `${prefix}_${index}:${String(value).toLowerCase()}` + ) return redis.sinter(...keys).then(ids => this._extract(ids)) } @@ -213,16 +228,24 @@ export default class Redis extends Collection { // update other indexes if (indexes.length !== 0) { - promise = Promise.all([ promise, asyncMap(ids, id => - redis.hgetall(`${prefix}:${id}`).then(values => - values != null && asyncMap(indexes, index => { - const value = values[index] - if (value !== undefined) { - return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id) - } - }) - ) - ) ]) + promise = Promise.all([ + promise, + asyncMap(ids, id => + redis.hgetall(`${prefix}:${id}`).then( + values => + values != null && + asyncMap(indexes, index => { + const value = values[index] + if (value !== undefined) { + return redis.srem( + `${prefix}_${index}:${String(value).toLowerCase()}`, + id + ) + } + }) + ) + ), + ]) } return promise.then(() => diff --git a/packages/xo-server/src/connection.js b/packages/xo-server/src/connection.js index 22d30cfac..66ff8b0b9 100644 --- a/packages/xo-server/src/connection.js +++ b/packages/xo-server/src/connection.js @@ -1,6 +1,6 @@ -import {EventEmitter} from 'events' +import { EventEmitter } from 'events' -import {createRawObject, noop} from './utils' +import { createRawObject, noop } from './utils' // =================================================================== @@ -21,7 +21,7 @@ export default class Connection extends EventEmitter { // Gets the value for this key. get (key, defaultValue) { - const {_data: data} = this + const { _data: data } = this if (key in data) { return data[key] diff --git a/packages/xo-server/src/decorators.js b/packages/xo-server/src/decorators.js index 87df27291..4def99c02 100644 --- a/packages/xo-server/src/decorators.js +++ b/packages/xo-server/src/decorators.js @@ -1,16 +1,10 @@ import { getBoundPropertyDescriptor } from 'bind-property-descriptor' -import { - isArray, - isFunction, -} from './utils' +import { isArray, isFunction } from './utils' // =================================================================== -const { - defineProperties, - getOwnPropertyDescriptor, -} = Object +const { defineProperties, getOwnPropertyDescriptor } = Object // =================================================================== @@ -27,10 +21,12 @@ export const debounce = duration => (target, name, descriptor) => { const s = Symbol(`debounced ${name} data`) function debounced () { - const data = this[s] || (this[s] = { - lastCall: 0, - wrapper: null, - }) + const data = + this[s] || + (this[s] = { + lastCall: 0, + wrapper: null, + }) const now = Date.now() if (now > data.lastCall + duration) { @@ -39,12 +35,16 @@ export const debounce = duration => (target, name, descriptor) => { const result = fn.apply(this, arguments) data.wrapper = () => result } catch (error) { - data.wrapper = () => { throw error } + data.wrapper = () => { + throw error + } } } return data.wrapper() } - debounced.reset = obj => { delete obj[s] } + debounced.reset = obj => { + delete obj[s] + } descriptor.value = debounced return descriptor @@ -52,21 +52,12 @@ export const debounce = duration => (target, name, descriptor) => { // ------------------------------------------------------------------- -const _ownKeys = ( +const _ownKeys = (typeof Reflect !== 'undefined' && Reflect.ownKeys) || - (({ - getOwnPropertyNames: names, - getOwnPropertySymbols: symbols, - }) => symbols - ? obj => names(obj).concat(symbols(obj)) - : names - )(Object) -) + (({ getOwnPropertyNames: names, getOwnPropertySymbols: symbols }) => + symbols ? obj => names(obj).concat(symbols(obj)) : names)(Object) -const _isIgnoredProperty = name => ( - name[0] === '_' || - name === 'constructor' -) +const _isIgnoredProperty = name => name[0] === '_' || name === 'constructor' const _IGNORED_STATIC_PROPERTIES = { __proto__: null, @@ -81,7 +72,7 @@ const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name] export const mixin = MixIns => Class => { if (!isArray(MixIns)) { - MixIns = [ MixIns ] + MixIns = [MixIns] } const { name } = Class @@ -103,9 +94,10 @@ export const mixin = MixIns => Class => { throw new Error(`${name}#${prop} is already defined`) } - ( - descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop) - ).enumerable = false // Object methods are enumerable but class methods are not. + ;(descriptors[prop] = getOwnPropertyDescriptor( + MixIn, + prop + )).enumerable = false // Object methods are enumerable but class methods are not. } } defineProperties(prototype, descriptors) @@ -143,16 +135,15 @@ export const mixin = MixIns => Class => { const descriptors = { __proto__: null } for (const prop of _ownKeys(Class)) { let descriptor - if (!( - // Special properties are not defined... - _isIgnoredStaticProperty(prop) && - - // if they already exist... - (descriptor = getOwnPropertyDescriptor(Decorator, prop)) && - - // and are not configurable. - !descriptor.configurable - )) { + if ( + !( + _isIgnoredStaticProperty(prop) && + // if they already exist... + (descriptor = getOwnPropertyDescriptor(Decorator, prop)) && + // and are not configurable. + !descriptor.configurable + ) + ) { descriptors[prop] = getOwnPropertyDescriptor(Class, prop) } } diff --git a/packages/xo-server/src/decorators.spec.js b/packages/xo-server/src/decorators.spec.js index 2fcaa693e..f3aecd443 100644 --- a/packages/xo-server/src/decorators.spec.js +++ b/packages/xo-server/src/decorators.spec.js @@ -1,6 +1,6 @@ /* eslint-env jest */ -import {debounce} from './decorators' +import { debounce } from './decorators' // =================================================================== diff --git a/packages/xo-server/src/fatfs-buffer.js b/packages/xo-server/src/fatfs-buffer.js index 2c0241fd9..8620a058d 100644 --- a/packages/xo-server/src/fatfs-buffer.js +++ b/packages/xo-server/src/fatfs-buffer.js @@ -27,28 +27,31 @@ export function init () { const buf = Buffer.alloc(TEN_MIB) // https://github.com/natevw/fatfs/blob/master/structs.js - fat16.pack({ - jmpBoot: Buffer.from('eb3c90', 'hex'), - OEMName: 'mkfs.fat', - BytsPerSec: SECTOR_SIZE, - SecPerClus: 4, - ResvdSecCnt: 1, - NumFATs: 2, - RootEntCnt: 512, - TotSec16: 20480, - Media: 248, - FATSz16: 20, - SecPerTrk: 32, - NumHeads: 64, - HiddSec: 0, - TotSec32: 0, - DrvNum: 128, - Reserved1: 0, - BootSig: 41, - VolID: 895111106, - VolLab: 'NO NAME ', - FilSysType: 'FAT16 ', - }, buf) + fat16.pack( + { + jmpBoot: Buffer.from('eb3c90', 'hex'), + OEMName: 'mkfs.fat', + BytsPerSec: SECTOR_SIZE, + SecPerClus: 4, + ResvdSecCnt: 1, + NumFATs: 2, + RootEntCnt: 512, + TotSec16: 20480, + Media: 248, + FATSz16: 20, + SecPerTrk: 32, + NumHeads: 64, + HiddSec: 0, + TotSec32: 0, + DrvNum: 128, + Reserved1: 0, + BootSig: 41, + VolID: 895111106, + VolLab: 'NO NAME ', + FilSysType: 'FAT16 ', + }, + buf + ) // End of sector. buf[0x1fe] = 0x55 diff --git a/packages/xo-server/src/index.js b/packages/xo-server/src/index.js index 852f626d5..7e25548af 100644 --- a/packages/xo-server/src/index.js +++ b/packages/xo-server/src/index.js @@ -17,11 +17,7 @@ import { join as joinPath } from 'path' import JsonRpcPeer from 'json-rpc-peer' import { invalidCredentials } from 'xo-common/api-errors' -import { - ensureDir, - readdir, - readFile, -} from 'fs-extra' +import { ensureDir, readdir, readFile } from 'fs-extra' import WebServer from 'http-server-plus' import Xo from './xo' @@ -52,10 +48,7 @@ const warn = (...args) => { // =================================================================== -const DEPRECATED_ENTRIES = [ - 'users', - 'servers', -] +const DEPRECATED_ENTRIES = ['users', 'servers'] async function loadConfiguration () { const config = await appConf.load('xo-server', { @@ -85,13 +78,15 @@ function createExpressApp () { // Registers the cookie-parser and express-session middlewares, // necessary for connect-flash. app.use(cookieParser()) - app.use(expressSession({ - resave: false, - saveUninitialized: false, + app.use( + expressSession({ + resave: false, + saveUninitialized: false, - // TODO: should be in the config file. - secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS', - })) + // TODO: should be in the config file. + secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS', + }) + ) // Registers the connect-flash middleware, necessary for Passport to // display error messages. @@ -112,7 +107,7 @@ async function setUpPassport (express, xo) { xo.registerPassportStrategy = strategy => { passport.use(strategy) - const {name} = strategy + const { name } = strategy if (name !== 'local') { strategies[name] = strategy.label || name } @@ -123,10 +118,12 @@ async function setUpPassport (express, xo) { await readFile(joinPath(__dirname, '..', 'signin.pug')) ) express.get('/signin', (req, res, next) => { - res.send(signInPage({ - error: req.flash('error')[0], - strategies, - })) + res.send( + signInPage({ + error: req.flash('error')[0], + strategies, + }) + ) }) express.get('/signout', (req, res) => { @@ -154,7 +151,7 @@ async function setUpPassport (express, xo) { // browsers do not save cookies on redirect. req.flash( 'token', - (await xo.createAuthenticationToken({userId: user.id})).id + (await xo.createAuthenticationToken({ userId: user.id })).id ) // The session is only persistent for internal provider and if 'Remember me' box is checked @@ -183,7 +180,9 @@ async function setUpPassport (express, xo) { next() } else if (req.cookies.token) { next() - } else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) { + } else if ( + /favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url) + ) { next() } else { req.flash('return-url', url) @@ -192,16 +191,16 @@ async function setUpPassport (express, xo) { }) // Install the local strategy. - xo.registerPassportStrategy(new LocalStrategy( - async (username, password, done) => { + xo.registerPassportStrategy( + new LocalStrategy(async (username, password, done) => { try { - const user = await xo.authenticateUser({username, password}) + const user = await xo.authenticateUser({ username, password }) done(null, user) } catch (error) { done(null, false, { message: error.message }) } - } - )) + }) + ) } // =================================================================== @@ -274,40 +273,44 @@ async function registerPluginsInPath (path) { throw error }) - await Promise.all(mapToArray(files, name => { - if (startsWith(name, PLUGIN_PREFIX)) { - return registerPluginWrapper.call( - this, - `${path}/${name}`, - name.slice(PLUGIN_PREFIX_LENGTH) - ) - } - })) + await Promise.all( + mapToArray(files, name => { + if (startsWith(name, PLUGIN_PREFIX)) { + return registerPluginWrapper.call( + this, + `${path}/${name}`, + name.slice(PLUGIN_PREFIX_LENGTH) + ) + } + }) + ) } async function registerPlugins (xo) { - await Promise.all(mapToArray([ - `${__dirname}/../node_modules/`, - '/usr/local/lib/node_modules/', - ], xo::registerPluginsInPath)) + await Promise.all( + mapToArray( + [`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'], + xo::registerPluginsInPath + ) + ) } // =================================================================== -async function makeWebServerListen (webServer, { - certificate, +async function makeWebServerListen ( + webServer, + { + certificate, - // The properties was called `certificate` before. - cert = certificate, + // The properties was called `certificate` before. + cert = certificate, - key, - ...opts -}) { + key, + ...opts + } +) { if (cert && key) { - [opts.cert, opts.key] = await Promise.all([ - readFile(cert), - readFile(key), - ]) + ;[opts.cert, opts.key] = await Promise.all([readFile(cert), readFile(key)]) } try { const niceAddress = await webServer.listen(opts) @@ -316,7 +319,7 @@ async function makeWebServerListen (webServer, { if (error.niceAddress) { warn(`Web server could not listen on ${error.niceAddress}`) - const {code} = error + const { code } = error if (code === 'EACCES') { warn(' Access denied.') warn(' Ports < 1024 are often reserved to privileges users.') @@ -332,9 +335,11 @@ async function makeWebServerListen (webServer, { async function createWebServer ({ listen, listenOptions }) { const webServer = new WebServer() - await Promise.all(mapToArray(listen, - opts => makeWebServerListen(webServer, { ...listenOptions, ...opts }) - )) + await Promise.all( + mapToArray(listen, opts => + makeWebServerListen(webServer, { ...listenOptions, ...opts }) + ) + ) return webServer } @@ -348,7 +353,7 @@ const setUpProxies = (express, opts, xo) => { const proxy = createProxyServer({ ignorePath: true, - }).on('error', (error) => console.error(error)) + }).on('error', error => console.error(error)) // TODO: sort proxies by descending prefix length. @@ -464,7 +469,9 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => { } webServer.on('upgrade', (req, socket, head) => { if (req.url === '/api/') { - webSocketServer.handleUpgrade(req, socket, head, ws => onConnection(ws, req)) + webSocketServer.handleUpgrade(req, socket, head, ws => + onConnection(ws, req) + ) } }) } @@ -492,7 +499,7 @@ const setUpConsoleProxy = (webServer, xo) => { const { token } = parseCookies(req.headers.cookie) const user = await xo.authenticateUser({ token }) - if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { + if (!await xo.hasPermissions(user.id, [[id, 'operate']])) { throw invalidCredentials() } @@ -518,10 +525,7 @@ const setUpConsoleProxy = (webServer, xo) => { // =================================================================== -const USAGE = (({ - name, - version, -}) => `Usage: ${name} [--safe-mode] +const USAGE = (({ name, version }) => `Usage: ${name} [--safe-mode] ${name} v${version}`)(require('../package.json')) @@ -545,7 +549,7 @@ export default async function main (args) { // Now the web server is listening, drop privileges. try { - const {user, group} = config + const { user, group } = config if (group) { process.setgid(group) debug('Group changed to', group) @@ -576,10 +580,7 @@ export default async function main (args) { if (config.http.redirectToHttps) { let port forEach(config.http.listen, listen => { - if ( - listen.port && - (listen.cert || listen.certificate) - ) { + if (listen.port && (listen.cert || listen.certificate)) { port = listen.port return false } @@ -629,7 +630,7 @@ export default async function main (args) { // // TODO: implements a timeout? (or maybe it is the services launcher // responsibility?) - forEach([ 'SIGINT', 'SIGTERM' ], signal => { + forEach(['SIGINT', 'SIGTERM'], signal => { let alreadyCalled = false process.on(signal, () => { diff --git a/packages/xo-server/src/job-executor.js b/packages/xo-server/src/job-executor.js index 4dc608ba7..c01f776cc 100644 --- a/packages/xo-server/src/job-executor.js +++ b/packages/xo-server/src/job-executor.js @@ -2,20 +2,10 @@ import Bluebird from 'bluebird' import { BaseError } from 'make-error' import { createPredicate } from 'value-matcher' import { timeout } from 'promise-toolbox' -import { - assign, - filter, - find, - isEmpty, - map, - mapValues, -} from 'lodash' +import { assign, filter, find, isEmpty, map, mapValues } from 'lodash' import { crossProduct } from './math' -import { - serializeError, - thunkToArray, -} from './utils' +import { serializeError, thunkToArray } from './utils' export class JobExecutorError extends BaseError {} export class UnsupportedJobType extends JobExecutorError { @@ -36,9 +26,9 @@ const paramsVectorActionsMap = { return mapValues(mapping, key => value[key]) }, crossProduct ({ items }) { - return thunkToArray(crossProduct( - map(items, value => resolveParamsVector.call(this, value)) - )) + return thunkToArray( + crossProduct(map(items, value => resolveParamsVector.call(this, value))) + ) }, fetchObjects ({ pattern }) { const objects = filter(this.xo.getObjects(), createPredicate(pattern)) @@ -74,9 +64,11 @@ export default class JobExecutor { this.xo = xo // The logger is not available until Xo has started. - xo.on('start', () => xo.getLogger('jobs').then(logger => { - this._logger = logger - })) + xo.on('start', () => + xo.getLogger('jobs').then(logger => { + this._logger = logger + }) + ) } async exec (job) { @@ -130,51 +122,68 @@ export default class JobExecutor { timezone: schedule !== undefined ? schedule.timezone : undefined, } - await Bluebird.map(paramsFlatVector, params => { - const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, { - event: 'jobCall.start', - runJobId, - method: job.method, - params, - }) - - const call = execStatus.calls[runCallId] = { - method: job.method, - params, - start: Date.now(), - } - let promise = this.xo.callApiMethod(connection, job.method, assign({}, params)) - if (job.timeout) { - promise = promise::timeout(job.timeout) - } - - return promise.then( - value => { - this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, { - event: 'jobCall.end', + await Bluebird.map( + paramsFlatVector, + params => { + const runCallId = this._logger.notice( + `Starting ${job.method} call. (${job.id})`, + { + event: 'jobCall.start', runJobId, - runCallId, - returnedValue: value, - }) + method: job.method, + params, + } + ) - call.returnedValue = value - call.end = Date.now() - }, - reason => { - this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, { - event: 'jobCall.end', - runJobId, - runCallId, - error: serializeError(reason), - }) - - call.error = reason - call.end = Date.now() + const call = (execStatus.calls[runCallId] = { + method: job.method, + params, + start: Date.now(), + }) + let promise = this.xo.callApiMethod( + connection, + job.method, + assign({}, params) + ) + if (job.timeout) { + promise = promise::timeout(job.timeout) } - ) - }, { - concurrency: 2, - }) + + return promise.then( + value => { + this._logger.notice( + `Call ${job.method} (${runCallId}) is a success. (${job.id})`, + { + event: 'jobCall.end', + runJobId, + runCallId, + returnedValue: value, + } + ) + + call.returnedValue = value + call.end = Date.now() + }, + reason => { + this._logger.notice( + `Call ${job.method} (${runCallId}) has failed. (${job.id})`, + { + event: 'jobCall.end', + runJobId, + runCallId, + error: serializeError(reason), + } + ) + + call.error = reason + call.end = Date.now() + } + ) + }, + { + concurrency: 2, + } + ) connection.close() execStatus.end = Date.now() diff --git a/packages/xo-server/src/job-executor.spec.js b/packages/xo-server/src/job-executor.spec.js index b47ce0569..f879d0a2a 100644 --- a/packages/xo-server/src/job-executor.spec.js +++ b/packages/xo-server/src/job-executor.spec.js @@ -4,97 +4,113 @@ import { forEach } from 'lodash' import { resolveParamsVector } from './job-executor' describe('resolveParamsVector', function () { - forEach({ - 'cross product with three sets': [ - // Expected result. - [ { id: 3, value: 'foo', remote: 'local' }, - { id: 7, value: 'foo', remote: 'local' }, - { id: 10, value: 'foo', remote: 'local' }, - { id: 3, value: 'bar', remote: 'local' }, - { id: 7, value: 'bar', remote: 'local' }, - { id: 10, value: 'bar', remote: 'local' } ], - // Entry. - { - type: 'crossProduct', - items: [{ - type: 'set', - values: [ { id: 3 }, { id: 7 }, { id: 10 } ], - }, { - type: 'set', - values: [ { value: 'foo' }, { value: 'bar' } ], - }, { - type: 'set', - values: [ { remote: 'local' } ], - }], - }, - ], - 'cross product with `set` and `map`': [ - // Expected result. - [ - { remote: 'local', id: 'vm:2' }, - { remote: 'smb', id: 'vm:2' }, + forEach( + { + 'cross product with three sets': [ + // Expected result. + [ + { id: 3, value: 'foo', remote: 'local' }, + { id: 7, value: 'foo', remote: 'local' }, + { id: 10, value: 'foo', remote: 'local' }, + { id: 3, value: 'bar', remote: 'local' }, + { id: 7, value: 'bar', remote: 'local' }, + { id: 10, value: 'bar', remote: 'local' }, + ], + // Entry. + { + type: 'crossProduct', + items: [ + { + type: 'set', + values: [{ id: 3 }, { id: 7 }, { id: 10 }], + }, + { + type: 'set', + values: [{ value: 'foo' }, { value: 'bar' }], + }, + { + type: 'set', + values: [{ remote: 'local' }], + }, + ], + }, ], + 'cross product with `set` and `map`': [ + // Expected result. + [{ remote: 'local', id: 'vm:2' }, { remote: 'smb', id: 'vm:2' }], - // Entry. - { - type: 'crossProduct', - items: [{ - type: 'set', - values: [ { remote: 'local' }, { remote: 'smb' } ], - }, { - type: 'map', - collection: { - type: 'fetchObjects', - pattern: { - $pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] }, - power_state: 'Running', - tags: [ 'foo' ], - type: 'VM', + // Entry. + { + type: 'crossProduct', + items: [ + { + type: 'set', + values: [{ remote: 'local' }, { remote: 'smb' }], + }, + { + type: 'map', + collection: { + type: 'fetchObjects', + pattern: { + $pool: { __or: ['pool:1', 'pool:8', 'pool:12'] }, + power_state: 'Running', + tags: ['foo'], + type: 'VM', + }, + }, + iteratee: { + type: 'extractProperties', + mapping: { id: 'id' }, + }, + }, + ], + }, + + // Context. + { + xo: { + getObjects: function () { + return [ + { + id: 'vm:1', + $pool: 'pool:1', + tags: [], + type: 'VM', + power_state: 'Halted', + }, + { + id: 'vm:2', + $pool: 'pool:1', + tags: ['foo'], + type: 'VM', + power_state: 'Running', + }, + { + id: 'host:1', + type: 'host', + power_state: 'Running', + }, + { + id: 'vm:3', + $pool: 'pool:8', + tags: ['foo'], + type: 'VM', + power_state: 'Halted', + }, + ] }, }, - iteratee: { - type: 'extractProperties', - mapping: { id: 'id' }, - }, - }], - }, - - // Context. - { - xo: { - getObjects: function () { - return [{ - id: 'vm:1', - $pool: 'pool:1', - tags: [], - type: 'VM', - power_state: 'Halted', - }, { - id: 'vm:2', - $pool: 'pool:1', - tags: [ 'foo' ], - type: 'VM', - power_state: 'Running', - }, { - id: 'host:1', - type: 'host', - power_state: 'Running', - }, { - id: 'vm:3', - $pool: 'pool:8', - tags: [ 'foo' ], - type: 'VM', - power_state: 'Halted', - }] - }, }, - }, - ], - }, ([ expectedResult, entry, context ], name) => { - describe(`with ${name}`, () => { - it('Resolves params vector', () => { - expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult) + ], + }, + ([expectedResult, entry, context], name) => { + describe(`with ${name}`, () => { + it('Resolves params vector', () => { + expect(resolveParamsVector.call(context, entry)).toEqual( + expectedResult + ) + }) }) - }) - }) + } + ) }) diff --git a/packages/xo-server/src/logs-cli.js b/packages/xo-server/src/logs-cli.js index a657f5284..917892bb9 100644 --- a/packages/xo-server/src/logs-cli.js +++ b/packages/xo-server/src/logs-cli.js @@ -8,26 +8,26 @@ import sublevel from 'level-sublevel' import util from 'util' import { repair as repairDb } from 'level' -import {forEach} from './utils' +import { forEach } from './utils' import globMatcher from './glob-matcher' // =================================================================== async function printLogs (db, args) { - let stream = highland(db.createReadStream({reverse: true})) + let stream = highland(db.createReadStream({ reverse: true })) if (args.since) { - stream = stream.filter(({value}) => (value.time >= args.since)) + stream = stream.filter(({ value }) => value.time >= args.since) } if (args.until) { - stream = stream.filter(({value}) => (value.time <= args.until)) + stream = stream.filter(({ value }) => value.time <= args.until) } const fields = Object.keys(args.matchers) if (fields.length > 0) { - stream = stream.filter(({value}) => { + stream = stream.filter(({ value }) => { for (const field of fields) { const fieldValue = get(value, field) if (fieldValue === undefined || !args.matchers[field](fieldValue)) { @@ -42,10 +42,9 @@ async function printLogs (db, args) { stream = stream.take(args.limit) if (args.json) { - stream = highland(stream.pipe(ndjson.serialize())) - .each(value => { - process.stdout.write(value) - }) + stream = highland(stream.pipe(ndjson.serialize())).each(value => { + process.stdout.write(value) + }) } else { stream = stream.each(value => { console.log(util.inspect(value, { depth: null })) @@ -126,7 +125,7 @@ function getArgs () { patterns[pattern] ? patterns[field].push(pattern) - : patterns[field] = [ pattern ] + : (patterns[field] = [pattern]) } else if (!patterns[value]) { patterns[value] = null } @@ -137,7 +136,7 @@ function getArgs () { for (const field in patterns) { const values = patterns[field] - args.matchers[field] = (values === null) ? trueFunction : globMatcher(values) + args.matchers[field] = values === null ? trueFunction : globMatcher(values) } // Warning: minimist makes one array of values if the same option is used many times. @@ -147,7 +146,6 @@ function getArgs () { throw new Error(`error: too many values for ${arg} argument`) } }) - ;['since', 'until'].forEach(arg => { if (args[arg] !== undefined) { args[arg] = Date.parse(args[arg]) @@ -158,7 +156,7 @@ function getArgs () { } }) - if (isNaN(args.limit = +args.limit)) { + if (isNaN((args.limit = +args.limit))) { throw new Error('error: limit is not a valid number') } @@ -193,10 +191,9 @@ export default async function main () { return } - const db = sublevel(levelup( - `${config.datadir}/leveldb`, - { valueEncoding: 'json' } - )).sublevel('logs') + const db = sublevel( + levelup(`${config.datadir}/leveldb`, { valueEncoding: 'json' }) + ).sublevel('logs') return printLogs(db, args) } diff --git a/packages/xo-server/src/lvm.js b/packages/xo-server/src/lvm.js index 3e2fd0530..d05a35e41 100644 --- a/packages/xo-server/src/lvm.js +++ b/packages/xo-server/src/lvm.js @@ -9,25 +9,29 @@ const parse = createParser({ keyTransform: key => key.slice(5).toLowerCase(), }) const makeFunction = command => (fields, ...args) => - execa.stdout(command, [ - '--noheading', - '--nosuffix', - '--nameprefixes', - '--unbuffered', - '--units', - 'b', - '-o', - String(fields), - ...args, - ]).then(stdout => map( - splitLines(stdout), - isArray(fields) - ? parse - : line => { - const data = parse(line) - return data[fields] - } - )) + execa + .stdout(command, [ + '--noheading', + '--nosuffix', + '--nameprefixes', + '--unbuffered', + '--units', + 'b', + '-o', + String(fields), + ...args, + ]) + .then(stdout => + map( + splitLines(stdout), + isArray(fields) + ? parse + : line => { + const data = parse(line) + return data[fields] + } + ) + ) export const lvs = makeFunction('lvs') export const pvs = makeFunction('pvs') diff --git a/packages/xo-server/src/math.js b/packages/xo-server/src/math.js index 81063a679..5bb52dfa3 100644 --- a/packages/xo-server/src/math.js +++ b/packages/xo-server/src/math.js @@ -11,7 +11,7 @@ const _combine = (vectors, n, cb) => { const m = vector.length if (n === 1) { for (let i = 0; i < m; ++i) { - cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal + cb([vector[i]]) // eslint-disable-line standard/no-callback-literal } return } @@ -19,7 +19,7 @@ const _combine = (vectors, n, cb) => { for (let i = 0; i < m; ++i) { const value = vector[i] - _combine(vectors, nLast, (vector) => { + _combine(vectors, nLast, vector => { vector.push(value) cb(vector) }) @@ -41,8 +41,7 @@ export const mergeObjects = objects => assign({}, ...objects) // // Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] ) // => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ] -export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => ( +export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => combine(vectors)(vector => { cb(mergeFn(vector)) }) -) diff --git a/packages/xo-server/src/math.spec.js b/packages/xo-server/src/math.spec.js index 588d4ac9f..88cb83a01 100644 --- a/packages/xo-server/src/math.spec.js +++ b/packages/xo-server/src/math.spec.js @@ -2,41 +2,36 @@ import { forEach } from 'lodash' import { thunkToArray } from './utils' -import { - crossProduct, - mergeObjects, -} from './math' +import { crossProduct, mergeObjects } from './math' describe('mergeObjects', function () { - forEach({ - 'Two sets of one': [ - {a: 1, b: 2}, {a: 1}, {b: 2}, - ], - 'Two sets of two': [ - {a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}, - ], - 'Three sets': [ - {a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}, - ], - 'One set': [ - {a: 1, b: 2}, {a: 1, b: 2}, - ], - 'Empty set': [ - {a: 1}, {a: 1}, {}, - ], - 'All empty': [ - {}, {}, {}, - ], - 'No set': [ - {}, - ], - }, ([ resultSet, ...sets ], name) => { - describe(`with ${name}`, () => { - it('Assembles all given param sets in on set', function () { - expect(mergeObjects(sets)).toEqual(resultSet) + forEach( + { + 'Two sets of one': [{ a: 1, b: 2 }, { a: 1 }, { b: 2 }], + 'Two sets of two': [ + { a: 1, b: 2, c: 3, d: 4 }, + { a: 1, b: 2 }, + { c: 3, d: 4 }, + ], + 'Three sets': [ + { a: 1, b: 2, c: 3, d: 4, e: 5, f: 6 }, + { a: 1 }, + { b: 2, c: 3 }, + { d: 4, e: 5, f: 6 }, + ], + 'One set': [{ a: 1, b: 2 }, { a: 1, b: 2 }], + 'Empty set': [{ a: 1 }, { a: 1 }, {}], + 'All empty': [{}, {}, {}], + 'No set': [{}], + }, + ([resultSet, ...sets], name) => { + describe(`with ${name}`, () => { + it('Assembles all given param sets in on set', function () { + expect(mergeObjects(sets)).toEqual(resultSet) + }) }) - }) - }) + } + ) }) describe('crossProduct', function () { @@ -45,30 +40,43 @@ describe('crossProduct', function () { // Gives the product of all args const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1) - forEach({ - '2 sets of 2 items to multiply': [ - [10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest, - ], - '3 sets of 2 items to multiply': [ - [110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest, - ], - '2 sets of 3 items to multiply': [ - [14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest, - ], - '2 sets of 2 items to add': [ - [7, 9, 8, 10], [[2, 3], [5, 7]], addTest, - ], - '3 sets of 2 items to add': [ - [18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest, - ], - '2 sets of 3 items to add': [ - [9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest, - ], - }, ([ product, items, cb ], name) => { - describe(`with ${name}`, () => { - it('Crosses sets of values with a crossProduct callback', function () { - expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort()) + forEach( + { + '2 sets of 2 items to multiply': [ + [10, 14, 15, 21], + [[2, 3], [5, 7]], + multiplyTest, + ], + '3 sets of 2 items to multiply': [ + [110, 130, 154, 182, 165, 195, 231, 273], + [[2, 3], [5, 7], [11, 13]], + multiplyTest, + ], + '2 sets of 3 items to multiply': [ + [14, 22, 26, 21, 33, 39, 35, 55, 65], + [[2, 3, 5], [7, 11, 13]], + multiplyTest, + ], + '2 sets of 2 items to add': [[7, 9, 8, 10], [[2, 3], [5, 7]], addTest], + '3 sets of 2 items to add': [ + [18, 20, 20, 22, 19, 21, 21, 23], + [[2, 3], [5, 7], [11, 13]], + addTest, + ], + '2 sets of 3 items to add': [ + [9, 13, 15, 10, 14, 16, 12, 16, 18], + [[2, 3, 5], [7, 11, 13]], + addTest, + ], + }, + ([product, items, cb], name) => { + describe(`with ${name}`, () => { + it('Crosses sets of values with a crossProduct callback', function () { + expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual( + product.sort() + ) + }) }) - }) - }) + } + ) }) diff --git a/packages/xo-server/src/model.js b/packages/xo-server/src/model.js index 658a7d3d0..0fcc08b2c 100644 --- a/packages/xo-server/src/model.js +++ b/packages/xo-server/src/model.js @@ -1,10 +1,6 @@ -import {EventEmitter} from 'events' +import { EventEmitter } from 'events' -import { - forEach, - isEmpty, - isString, -} from './utils' +import { forEach, isEmpty, isString } from './utils' // =================================================================== @@ -35,7 +31,7 @@ export default class Model extends EventEmitter { // Check whether a property exists. has (name) { - return (this.properties[name] !== undefined) + return this.properties[name] !== undefined } // Set properties. diff --git a/packages/xo-server/src/models/acl.js b/packages/xo-server/src/models/acl.js index 0bc9aa7be..e20941999 100644 --- a/packages/xo-server/src/models/acl.js +++ b/packages/xo-server/src/models/acl.js @@ -1,10 +1,6 @@ import Collection from '../collection/redis' import Model from '../model' -import { - forEach, - mapToArray, - multiKeyHash, -} from '../utils' +import { forEach, mapToArray, multiKeyHash } from '../utils' // =================================================================== @@ -17,12 +13,15 @@ const DEFAULT_ACTION = 'admin' export default class Acl extends Model {} Acl.create = (subject, object, action) => { - return Acl.hash(subject, object, action).then(hash => new Acl({ - id: hash, - subject, - object, - action, - })) + return Acl.hash(subject, object, action).then( + hash => + new Acl({ + id: hash, + subject, + object, + action, + }) + ) } Acl.hash = (subject, object, action) => multiKeyHash(subject, object, action) @@ -62,13 +61,14 @@ export class Acls extends Collection { await this.remove(mapToArray(toUpdate, 'id')) // Compute the new ids (new hashes). - const {hash} = Acl - await Promise.all(mapToArray( - toUpdate, - (acl) => hash(acl.subject, acl.object, acl.action).then(id => { - acl.id = id - }) - )) + const { hash } = Acl + await Promise.all( + mapToArray(toUpdate, acl => + hash(acl.subject, acl.object, acl.action).then(id => { + acl.id = id + }) + ) + ) // Inserts the new (updated) entries. await this.add(toUpdate) diff --git a/packages/xo-server/src/models/group.js b/packages/xo-server/src/models/group.js index bb5914c3f..225705109 100644 --- a/packages/xo-server/src/models/group.js +++ b/packages/xo-server/src/models/group.js @@ -25,9 +25,7 @@ export class Groups extends Collection { async save (group) { // Serializes. let tmp - group.users = isEmpty(tmp = group.users) - ? undefined - : JSON.stringify(tmp) + group.users = isEmpty((tmp = group.users)) ? undefined : JSON.stringify(tmp) return /* await */ this.update(group) } diff --git a/packages/xo-server/src/models/plugin-metadata.js b/packages/xo-server/src/models/plugin-metadata.js index f35312148..a89424a3d 100644 --- a/packages/xo-server/src/models/plugin-metadata.js +++ b/packages/xo-server/src/models/plugin-metadata.js @@ -41,9 +41,13 @@ export class PluginsMetadata extends Collection { const { autoload, configuration } = pluginMetadata pluginMetadata.autoload = autoload === 'true' try { - pluginMetadata.configuration = configuration && JSON.parse(configuration) + pluginMetadata.configuration = + configuration && JSON.parse(configuration) } catch (error) { - console.warn('cannot parse pluginMetadata.configuration:', configuration) + console.warn( + 'cannot parse pluginMetadata.configuration:', + configuration + ) pluginMetadata.configuration = [] } }) diff --git a/packages/xo-server/src/models/remote.js b/packages/xo-server/src/models/remote.js index 74c795acf..6c56f3f97 100644 --- a/packages/xo-server/src/models/remote.js +++ b/packages/xo-server/src/models/remote.js @@ -1,8 +1,6 @@ import Collection from '../collection/redis' import Model from '../model' -import { - forEach, -} from '../utils' +import { forEach } from '../utils' // =================================================================== @@ -14,12 +12,14 @@ export class Remotes extends Collection { } create (name, url) { - return this.add(new Remote({ - name, - url, - enabled: false, - error: '', - })) + return this.add( + new Remote({ + name, + url, + enabled: false, + error: '', + }) + ) } async save (remote) { @@ -29,7 +29,7 @@ export class Remotes extends Collection { async get (properties) { const remotes = await super.get(properties) forEach(remotes, remote => { - remote.enabled = (remote.enabled === 'true') + remote.enabled = remote.enabled === 'true' }) return remotes } diff --git a/packages/xo-server/src/models/schedule.js b/packages/xo-server/src/models/schedule.js index a2b3184db..52fc52281 100644 --- a/packages/xo-server/src/models/schedule.js +++ b/packages/xo-server/src/models/schedule.js @@ -12,14 +12,16 @@ export class Schedules extends Collection { } create (userId, job, cron, enabled, name = undefined, timezone = undefined) { - return this.add(new Schedule({ - userId, - job, - cron, - enabled, - name, - timezone, - })) + return this.add( + new Schedule({ + userId, + job, + cron, + enabled, + name, + timezone, + }) + ) } async save (schedule) { @@ -29,7 +31,7 @@ export class Schedules extends Collection { async get (properties) { const schedules = await super.get(properties) forEach(schedules, schedule => { - schedule.enabled = (schedule.enabled === 'true') + schedule.enabled = schedule.enabled === 'true' }) return schedules } diff --git a/packages/xo-server/src/models/server.js b/packages/xo-server/src/models/server.js index 3fd176954..47090ed6a 100644 --- a/packages/xo-server/src/models/server.js +++ b/packages/xo-server/src/models/server.js @@ -18,7 +18,7 @@ export class Servers extends Collection { async create (params) { const { host } = params - if (await this.exists({host})) { + if (await this.exists({ host })) { throw new Error('server already exists') } diff --git a/packages/xo-server/src/models/user.js b/packages/xo-server/src/models/user.js index b4e28ab6e..d51924a1e 100644 --- a/packages/xo-server/src/models/user.js +++ b/packages/xo-server/src/models/user.js @@ -25,7 +25,7 @@ export class Users extends Collection { const { email } = properties // Avoid duplicates. - if (await this.exists({email})) { + if (await this.exists({ email })) { throw new Error(`the user ${email} already exists`) } @@ -39,10 +39,8 @@ export class Users extends Collection { async save (user) { // Serializes. let tmp - user.groups = isEmpty(tmp = user.groups) - ? undefined - : JSON.stringify(tmp) - user.preferences = isEmpty(tmp = user.preferences) + user.groups = isEmpty((tmp = user.groups)) ? undefined : JSON.stringify(tmp) + user.preferences = isEmpty((tmp = user.preferences)) ? undefined : JSON.stringify(tmp) diff --git a/packages/xo-server/src/proxy-console.js b/packages/xo-server/src/proxy-console.js index dfcaa714d..676cf13f1 100644 --- a/packages/xo-server/src/proxy-console.js +++ b/packages/xo-server/src/proxy-console.js @@ -1,7 +1,7 @@ import createDebug from 'debug' import partialStream from 'partial-stream' -import {connect} from 'tls' -import {parse} from 'url' +import { connect } from 'tls' +import { parse } from 'url' const debug = createDebug('xo:proxy-console') @@ -10,62 +10,79 @@ export default function proxyConsole (ws, vmConsole, sessionId) { let closed = false - const socket = connect({ - host: url.host, - port: url.port || 443, - rejectUnauthorized: false, - }, () => { - // Write headers. - socket.write([ - `CONNECT ${url.path} HTTP/1.0`, - `Host: ${url.hostname}`, - `Cookie: session_id=${sessionId}`, - '', '', - ].join('\r\n')) + const socket = connect( + { + host: url.host, + port: url.port || 443, + rejectUnauthorized: false, + }, + () => { + // Write headers. + socket.write( + [ + `CONNECT ${url.path} HTTP/1.0`, + `Host: ${url.hostname}`, + `Cookie: session_id=${sessionId}`, + '', + '', + ].join('\r\n') + ) - const onSend = (error) => { - if (error) { - debug('error sending to the XO client: %s', error.stack || error.message || error) - } - } - - socket.pipe(partialStream('\r\n\r\n', headers => { - // TODO: check status code 200. - debug('connected') - })).on('data', data => { - if (!closed) { - ws.send(data, onSend) - } - }).on('end', () => { - if (!closed) { - closed = true - debug('disconnected from the console') - } - - ws.close() - }) - - ws - .on('error', error => { - closed = true - debug('error from the XO client: %s', error.stack || error.message || error) - - socket.end() - }) - .on('message', data => { - if (!closed) { - socket.write(data) + const onSend = error => { + if (error) { + debug( + 'error sending to the XO client: %s', + error.stack || error.message || error + ) } - }) - .on('close', () => { - if (!closed) { + } + + socket + .pipe( + partialStream('\r\n\r\n', headers => { + // TODO: check status code 200. + debug('connected') + }) + ) + .on('data', data => { + if (!closed) { + ws.send(data, onSend) + } + }) + .on('end', () => { + if (!closed) { + closed = true + debug('disconnected from the console') + } + + ws.close() + }) + + ws + .on('error', error => { closed = true - debug('disconnected from the XO client') - } + debug( + 'error from the XO client: %s', + error.stack || error.message || error + ) - socket.end() - }) - }).on('error', error => { + socket.end() + }) + .on('message', data => { + if (!closed) { + socket.write(data) + } + }) + .on('close', () => { + if (!closed) { + closed = true + debug('disconnected from the XO client') + } + + socket.end() + }) + } + ).on('error', error => { closed = true debug('error from the console: %s', error.stack || error.message || error) diff --git a/packages/xo-server/src/recover-account-cli.js b/packages/xo-server/src/recover-account-cli.js index 82c09a74c..eee93a8ab 100644 --- a/packages/xo-server/src/recover-account-cli.js +++ b/packages/xo-server/src/recover-account-cli.js @@ -4,12 +4,8 @@ import pw from 'pw' import Xo from './xo' import { generateToken } from './utils' -const recoverAccount = async ([ name ]) => { - if ( - name === undefined || - name === '--help' || - name === '-h' - ) { +const recoverAccount = async ([name]) => { + if (name === undefined || name === '--help' || name === '-h') { return ` xo-server-recover-account @@ -28,9 +24,11 @@ xo-server-recover-account console.log('The generated password is', password) } - const xo = new Xo(await appConf.load('xo-server', { - ignoreUnknownFormats: true, - })) + const xo = new Xo( + await appConf.load('xo-server', { + ignoreUnknownFormats: true, + }) + ) const user = await xo.getUserByName(name, true) if (user !== null) { diff --git a/packages/xo-server/src/remote-handlers/abstract.js b/packages/xo-server/src/remote-handlers/abstract.js index ca322d1e1..408afa553 100644 --- a/packages/xo-server/src/remote-handlers/abstract.js +++ b/packages/xo-server/src/remote-handlers/abstract.js @@ -12,7 +12,7 @@ import { export default class RemoteHandlerAbstract { constructor (remote) { - this._remote = {...remote, ...parse(remote.url)} + this._remote = { ...remote, ...parse(remote.url) } if (this._remote.type !== this.type) { throw new Error('Incorrect remote type') } @@ -66,7 +66,7 @@ export default class RemoteHandlerAbstract { error: error.message || String(error), } } finally { - this.unlink(testFileName)::ignoreErrors() + ;this.unlink(testFileName)::ignoreErrors() } } @@ -108,11 +108,10 @@ export default class RemoteHandlerAbstract { throw new Error('Not implemented') } - createReadStream (file, { - checksum = false, - ignoreMissingChecksum = false, - ...options - } = {}) { + createReadStream ( + file, + { checksum = false, ignoreMissingChecksum = false, ...options } = {} + ) { const streamP = this._createReadStream(file, options).then(stream => { // detect early errors let promise = eventToPromise(stream, 'readable') @@ -125,9 +124,11 @@ export default class RemoteHandlerAbstract { ) { promise = Promise.all([ promise, - this.getSize(file).then(size => { - stream.length = size - })::ignoreErrors(), + this.getSize(file) + .then(size => { + stream.length = size + }) + ::ignoreErrors(), ]) } @@ -139,16 +140,17 @@ export default class RemoteHandlerAbstract { } // avoid a unhandled rejection warning - streamP::ignoreErrors() + ;streamP::ignoreErrors() return this.readFile(`${file}.checksum`).then( - checksum => streamP.then(stream => { - const { length } = stream - stream = validChecksumOfReadStream(stream, String(checksum).trim()) - stream.length = length + checksum => + streamP.then(stream => { + const { length } = stream + stream = validChecksumOfReadStream(stream, String(checksum).trim()) + stream.length = length - return stream - }), + return stream + }), error => { if (ignoreMissingChecksum && error && error.code === 'ENOENT') { return streamP @@ -169,10 +171,7 @@ export default class RemoteHandlerAbstract { await this.outputFile(`${path}.checksum`, checksum) } - async createOutputStream (file, { - checksum = false, - ...options - } = {}) { + async createOutputStream (file, { checksum = false, ...options } = {}) { const streamP = this._createOutputStream(file, { flags: 'wx', ...options, @@ -201,11 +200,9 @@ export default class RemoteHandlerAbstract { throw new Error('Not implemented') } - async unlink (file, { - checksum = true, - } = {}) { + async unlink (file, { checksum = true } = {}) { if (checksum) { - this._unlink(`${file}.checksum`)::ignoreErrors() + ;this._unlink(`${file}.checksum`)::ignoreErrors() } return this._unlink(file) diff --git a/packages/xo-server/src/remote-handlers/nfs.js b/packages/xo-server/src/remote-handlers/nfs.js index a36717637..d60686f57 100644 --- a/packages/xo-server/src/remote-handlers/nfs.js +++ b/packages/xo-server/src/remote-handlers/nfs.js @@ -17,7 +17,14 @@ export default class NfsHandler extends LocalHandler { let stdout const mounted = {} try { - stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings']) + stdout = await execa.stdout('findmnt', [ + '-P', + '-t', + 'nfs,nfs4', + '--output', + 'SOURCE,TARGET', + '--noheadings', + ]) const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/ forEach(stdout.split('\n'), m => { if (m) { @@ -45,7 +52,14 @@ export default class NfsHandler extends LocalHandler { async _mount () { await fs.ensureDir(this._getRealPath()) - return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()]) + return execa('mount', [ + '-t', + 'nfs', + '-o', + 'vers=3', + `${this._remote.host}:${this._remote.path}`, + this._getRealPath(), + ]) } async _sync () { diff --git a/packages/xo-server/src/remote-handlers/smb.js b/packages/xo-server/src/remote-handlers/smb.js index 4ab6685bd..03b48cba3 100644 --- a/packages/xo-server/src/remote-handlers/smb.js +++ b/packages/xo-server/src/remote-handlers/smb.js @@ -1,19 +1,14 @@ import Smb2 from '@marsaud/smb2-promise' import RemoteHandlerAbstract from './abstract' -import { - noop, - pFinally, -} from '../utils' +import { noop, pFinally } from '../utils' // Normalize the error code for file not found. const normalizeError = error => { const { code } = error - return ( - code === 'STATUS_OBJECT_NAME_NOT_FOUND' || + return code === 'STATUS_OBJECT_NAME_NOT_FOUND' || code === 'STATUS_OBJECT_PATH_NOT_FOUND' - ) ? Object.create(error, { code: { configurable: true, @@ -50,9 +45,7 @@ export default class SmbHandler extends RemoteHandlerAbstract { file = undefined } - let path = (this._remote.path !== '') - ? this._remote.path - : '' + let path = this._remote.path !== '' ? this._remote.path : '' // Ensure remote path is a directory. if (path !== '' && path[path.length - 1] !== '\\') { @@ -94,7 +87,9 @@ export default class SmbHandler extends RemoteHandlerAbstract { await client.ensureDir(dir) } - return client.writeFile(path, data, options)::pFinally(() => { client.close() }) + return client.writeFile(path, data, options)::pFinally(() => { + client.close() + }) } async _readFile (file, options = {}) { @@ -102,7 +97,11 @@ export default class SmbHandler extends RemoteHandlerAbstract { let content try { - content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() }) + content = await client + .readFile(this._getFilePath(file), options) + ::pFinally(() => { + client.close() + }) } catch (error) { throw normalizeError(error) } @@ -114,7 +113,11 @@ export default class SmbHandler extends RemoteHandlerAbstract { const client = this._getClient(this._remote) try { - await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() }) + await client + .rename(this._getFilePath(oldPath), this._getFilePath(newPath)) + ::pFinally(() => { + client.close() + }) } catch (error) { throw normalizeError(error) } @@ -125,7 +128,9 @@ export default class SmbHandler extends RemoteHandlerAbstract { let list try { - list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() }) + list = await client.readdir(this._getFilePath(dir))::pFinally(() => { + client.close() + }) } catch (error) { throw normalizeError(error) } @@ -170,7 +175,9 @@ export default class SmbHandler extends RemoteHandlerAbstract { const client = this._getClient(this._remote) try { - await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() }) + await client.unlink(this._getFilePath(file))::pFinally(() => { + client.close() + }) } catch (error) { throw normalizeError(error) } @@ -181,7 +188,9 @@ export default class SmbHandler extends RemoteHandlerAbstract { let size try { - size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() }) + size = await client.getSize(this._getFilePath(file))::pFinally(() => { + client.close() + }) } catch (error) { throw normalizeError(error) } diff --git a/packages/xo-server/src/schemas/acl.js b/packages/xo-server/src/schemas/acl.js index 84362c289..54435cb98 100644 --- a/packages/xo-server/src/schemas/acl.js +++ b/packages/xo-server/src/schemas/acl.js @@ -19,10 +19,5 @@ export default { description: 'user (or group)', }, }, - required: [ - 'id', - 'action', - 'object', - 'subject', - ], + required: ['id', 'action', 'object', 'subject'], } diff --git a/packages/xo-server/src/schemas/job.js b/packages/xo-server/src/schemas/job.js index 7da1fe069..e65662259 100644 --- a/packages/xo-server/src/schemas/job.js +++ b/packages/xo-server/src/schemas/job.js @@ -15,7 +15,8 @@ export default { }, userId: { type: 'string', - description: 'identifier of the user who have created the job (the permissions of the user are used by the job)', + description: + 'identifier of the user who have created the job (the permissions of the user are used by the job)', }, key: { type: 'string', @@ -30,14 +31,9 @@ export default { }, timeout: { type: 'number', - description: 'number of milliseconds after which the job is considered failed', + description: + 'number of milliseconds after which the job is considered failed', }, }, - required: [ - 'type', - 'id', - 'userId', - 'key', - 'method', - ], + required: ['type', 'id', 'userId', 'key', 'method'], } diff --git a/packages/xo-server/src/schemas/log.js b/packages/xo-server/src/schemas/log.js index 9778ae969..99240e159 100644 --- a/packages/xo-server/src/schemas/log.js +++ b/packages/xo-server/src/schemas/log.js @@ -20,10 +20,5 @@ export default { }, data: {}, }, - required: [ - 'id', - 'time', - 'message', - 'namespace', - ], + required: ['id', 'time', 'message', 'namespace'], } diff --git a/packages/xo-server/src/schemas/log/jobCallEnd.js b/packages/xo-server/src/schemas/log/jobCallEnd.js index a1f32afe1..c0b85019e 100644 --- a/packages/xo-server/src/schemas/log/jobCallEnd.js +++ b/packages/xo-server/src/schemas/log/jobCallEnd.js @@ -18,16 +18,9 @@ export default { description: 'describe one failure, exists if the call has failed', }, returnedValue: { - description: 'call\'s result, exists if the call is a success', + description: "call's result, exists if the call is a success", }, }, - required: [ - 'event', - 'runJobId', - 'runCallId', - ], - oneOf: [ - { required: ['error'] }, - { required: ['returnedValue'] }, - ], + required: ['event', 'runJobId', 'runCallId'], + oneOf: [{ required: ['error'] }, { required: ['returnedValue'] }], } diff --git a/packages/xo-server/src/schemas/log/jobCallStart.js b/packages/xo-server/src/schemas/log/jobCallStart.js index 8e5e6af49..07f12b8b7 100644 --- a/packages/xo-server/src/schemas/log/jobCallStart.js +++ b/packages/xo-server/src/schemas/log/jobCallStart.js @@ -18,10 +18,5 @@ export default { description: 'params of the called method', }, }, - required: [ - 'event', - 'runJobId', - 'method', - 'params', - ], + required: ['event', 'runJobId', 'method', 'params'], } diff --git a/packages/xo-server/src/schemas/log/jobEnd.js b/packages/xo-server/src/schemas/log/jobEnd.js index a74aa3089..c50dc6062 100644 --- a/packages/xo-server/src/schemas/log/jobEnd.js +++ b/packages/xo-server/src/schemas/log/jobEnd.js @@ -14,8 +14,5 @@ export default { description: 'describe one failure, exists if no call has been made', }, }, - required: [ - 'event', - 'runJobId', - ], + required: ['event', 'runJobId'], } diff --git a/packages/xo-server/src/schemas/log/jobStart.js b/packages/xo-server/src/schemas/log/jobStart.js index 1c29df2aa..13bab541d 100644 --- a/packages/xo-server/src/schemas/log/jobStart.js +++ b/packages/xo-server/src/schemas/log/jobStart.js @@ -17,10 +17,5 @@ export default { type: 'string', }, }, - required: [ - 'event', - 'userId', - 'jobId', - 'key', - ], + required: ['event', 'userId', 'jobId', 'key'], } diff --git a/packages/xo-server/src/schemas/plugin.js b/packages/xo-server/src/schemas/plugin.js index a6a364cbf..330cccaa5 100644 --- a/packages/xo-server/src/schemas/plugin.js +++ b/packages/xo-server/src/schemas/plugin.js @@ -29,7 +29,8 @@ export default { }, configurationSchema: { $ref: 'http://json-schema.org/draft-04/schema#', - description: 'configuration schema for this plugin (not present if not configurable)', + description: + 'configuration schema for this plugin (not present if not configurable)', }, testable: { type: 'boolean', @@ -40,10 +41,5 @@ export default { description: 'test schema for this plugin', }, }, - required: [ - 'id', - 'name', - 'autoload', - 'loaded', - ], + required: ['id', 'name', 'autoload', 'loaded'], } diff --git a/packages/xo-server/src/schemas/user.js b/packages/xo-server/src/schemas/user.js index aa61c586c..9ed845c5b 100644 --- a/packages/xo-server/src/schemas/user.js +++ b/packages/xo-server/src/schemas/user.js @@ -19,7 +19,8 @@ export default { }, permission: { enum: ['none', 'read', 'write', 'admin'], - description: 'root permission for this user, none and admin are the only significant ones', + description: + 'root permission for this user, none and admin are the only significant ones', }, preferences: { type: 'object', @@ -33,18 +34,12 @@ export default { key: { type: 'string' }, title: { type: 'string' }, }, - required: [ - 'key', - 'title', - ], + required: ['key', 'title'], }, }, }, description: 'various user preferences', }, }, - required: [ - 'id', - 'email', - ], + required: ['id', 'email'], } diff --git a/packages/xo-server/src/size-stream.js b/packages/xo-server/src/size-stream.js index acc31903b..aa8693376 100644 --- a/packages/xo-server/src/size-stream.js +++ b/packages/xo-server/src/size-stream.js @@ -1,12 +1,10 @@ import through2 from 'through2' const createSizeStream = () => { - const wrapper = through2( - (chunk, enc, cb) => { - wrapper.size += chunk.length - cb(null, chunk) - } - ) + const wrapper = through2((chunk, enc, cb) => { + wrapper.size += chunk.length + cb(null, chunk) + }) wrapper.size = 0 return wrapper } diff --git a/packages/xo-server/src/stream-to-existing-buffer.js b/packages/xo-server/src/stream-to-existing-buffer.js index 1e11725bb..15664503f 100644 --- a/packages/xo-server/src/stream-to-existing-buffer.js +++ b/packages/xo-server/src/stream-to-existing-buffer.js @@ -5,40 +5,41 @@ const streamToExistingBuffer = ( buffer, offset = 0, end = buffer.length -) => new Promise((resolve, reject) => { - assert(offset >= 0) - assert(end > offset) - assert(end <= buffer.length) +) => + new Promise((resolve, reject) => { + assert(offset >= 0) + assert(end > offset) + assert(end <= buffer.length) - let i = offset + let i = offset - const onData = chunk => { - const prev = i - i += chunk.length + const onData = chunk => { + const prev = i + i += chunk.length - if (i > end) { - return onError(new Error('too much data')) + if (i > end) { + return onError(new Error('too much data')) + } + + chunk.copy(buffer, prev) } + stream.on('data', onData) - chunk.copy(buffer, prev) - } - stream.on('data', onData) - - const clean = () => { - stream.removeListener('data', onData) - stream.removeListener('end', onEnd) - stream.removeListener('error', onError) - } - const onEnd = () => { - resolve(i - offset) - clean() - } - stream.on('end', onEnd) - const onError = error => { - reject(error) - clean() - } - stream.on('error', onError) -}) + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(i - offset) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) + }) export { streamToExistingBuffer as default } diff --git a/packages/xo-server/src/stream-to-new-buffer.js b/packages/xo-server/src/stream-to-new-buffer.js index 4018b9979..8ca1b519a 100644 --- a/packages/xo-server/src/stream-to-new-buffer.js +++ b/packages/xo-server/src/stream-to-new-buffer.js @@ -1,27 +1,28 @@ -const streamToNewBuffer = stream => new Promise((resolve, reject) => { - const chunks = [] - let length = 0 +const streamToNewBuffer = stream => + new Promise((resolve, reject) => { + const chunks = [] + let length = 0 - const onData = chunk => { - chunks.push(chunk) - length += chunk.length - } - stream.on('data', onData) + const onData = chunk => { + chunks.push(chunk) + length += chunk.length + } + stream.on('data', onData) - const clean = () => { - stream.removeListener('data', onData) - stream.removeListener('end', onEnd) - stream.removeListener('error', onError) - } - const onEnd = () => { - resolve(Buffer.concat(chunks, length)) - clean() - } - stream.on('end', onEnd) - const onError = error => { - reject(error) - clean() - } - stream.on('error', onError) -}) + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(Buffer.concat(chunks, length)) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) + }) export { streamToNewBuffer as default } diff --git a/packages/xo-server/src/utils.js b/packages/xo-server/src/utils.js index 8a311ce92..49674949d 100644 --- a/packages/xo-server/src/utils.js +++ b/packages/xo-server/src/utils.js @@ -32,10 +32,7 @@ import { promisify, reflect as pReflect, } from 'promise-toolbox' -import { - createHash, - randomBytes, -} from 'crypto' +import { createHash, randomBytes } from 'crypto' // =================================================================== @@ -53,11 +50,13 @@ export const asyncMap = (collection, iteratee) => { } } - return Promise.all(mapToArray(collection, (item, key, collection) => - new Promise(resolve => { - resolve(iteratee(item, key, collection)) - }).catch(onError) - )).then(values => { + return Promise.all( + mapToArray(collection, (item, key, collection) => + new Promise(resolve => { + resolve(iteratee(item, key, collection)) + }).catch(onError) + ) + ).then(values => { if (errorContainer !== undefined) { throw errorContainer.error } @@ -103,7 +102,7 @@ export const diffItems = (coll1, coll2) => { } }) - return [ added, keys(removed) ] + return [added, keys(removed)] } // ------------------------------------------------------------------- @@ -138,16 +137,18 @@ export const addChecksumToReadStream = (stream, algorithm = 'md5') => { const hash = createHash(algorithm) const { promise, resolve } = defer() - const wrapper = stream.pipe(through2( - (chunk, enc, callback) => { - hash.update(chunk) - callback(null, chunk) - }, - callback => { - resolve(hash.digest('hex')) - callback() - } - )) + const wrapper = stream.pipe( + through2( + (chunk, enc, callback) => { + hash.update(chunk) + callback(null, chunk) + }, + callback => { + resolve(hash.digest('hex')) + callback() + } + ) + ) stream.on('error', error => wrapper.emit('error', error)) wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`) @@ -159,7 +160,10 @@ export const addChecksumToReadStream = (stream, algorithm = 'md5') => { // The given stream is wrapped in a stream which emits an error event // if the computed checksum is not equals to the expected checksum. export const validChecksumOfReadStream = (stream, expectedChecksum) => { - const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1)) + const algorithmId = expectedChecksum.slice( + 1, + expectedChecksum.indexOf('$', 1) + ) if (!algorithmId) { throw new Error(`unknown algorithm: ${algorithmId}`) @@ -167,22 +171,26 @@ export const validChecksumOfReadStream = (stream, expectedChecksum) => { const hash = createHash(ID_TO_ALGORITHM[algorithmId]) - const wrapper = stream.pipe(through2( - { highWaterMark: 0 }, - (chunk, enc, callback) => { - hash.update(chunk) - callback(null, chunk) - }, - callback => { - const checksum = `$${algorithmId}$$${hash.digest('hex')}` + const wrapper = stream.pipe( + through2( + { highWaterMark: 0 }, + (chunk, enc, callback) => { + hash.update(chunk) + callback(null, chunk) + }, + callback => { + const checksum = `$${algorithmId}$$${hash.digest('hex')}` - callback( - checksum !== expectedChecksum - ? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`) - : null - ) - } - )) + callback( + checksum !== expectedChecksum + ? new Error( + `Bad checksum (${checksum}), expected: ${expectedChecksum}` + ) + : null + ) + } + ) + ) stream.on('error', error => wrapper.emit('error', error)) wrapper.checksumVerified = eventToPromise(wrapper, 'end') @@ -225,10 +233,16 @@ export const firstDefined = function () { // ------------------------------------------------------------------- -export const getUserPublicProperties = user => pick( - user.properties || user, - 'id', 'email', 'groups', 'permission', 'preferences', 'provider' -) +export const getUserPublicProperties = user => + pick( + user.properties || user, + 'id', + 'email', + 'groups', + 'permission', + 'preferences', + 'provider' + ) // ------------------------------------------------------------------- @@ -237,17 +251,18 @@ export const getPseudoRandomBytes = n => { const odd = n & 1 for (let i = 0, m = n - odd; i < m; i += 2) { - bytes.writeUInt16BE(Math.random() * 65536 | 0, i) + bytes.writeUInt16BE((Math.random() * 65536) | 0, i) } if (odd) { - bytes.writeUInt8(Math.random() * 256 | 0, n - 1) + bytes.writeUInt8((Math.random() * 256) | 0, n - 1) } return bytes } -export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n)) +export const generateUnsecureToken = (n = 32) => + base64url(getPseudoRandomBytes(n)) // Generate a secure random Base64 string. export const generateToken = (randomBytes => { @@ -270,7 +285,7 @@ export const parseXml = (function () { explicitArray: false, } - return (xml) => { + return xml => { let result // xml2js.parseString() use a callback for synchronous code. @@ -340,13 +355,17 @@ export function pDebug (promise, name) { value => { console.log( '%s', - `Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}` + `Promise ${name} resolved${ + value !== undefined ? ` with ${kindOf(value)}` : '' + }` ) }, reason => { console.log( '%s', - `Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}` + `Promise ${name} rejected${ + reason !== undefined ? ` with ${kindOf(reason)}` : '' + }` ) } ) @@ -471,14 +490,15 @@ export function map ( // ------------------------------------------------------------------- // Create a hash from multiple values. -export const multiKeyHash = (...args) => new Promise(resolve => { - const hash = multiKeyHashInt(...args) +export const multiKeyHash = (...args) => + new Promise(resolve => { + const hash = multiKeyHashInt(...args) - const buf = Buffer.allocUnsafe(4) - buf.writeUInt32LE(hash, 0) + const buf = Buffer.allocUnsafe(4) + buf.writeUInt32LE(hash, 0) - resolve(base64url(buf)) -}) + resolve(base64url(buf)) + }) // ------------------------------------------------------------------- @@ -487,19 +507,17 @@ export const resolveSubpath = (root, path) => // ------------------------------------------------------------------- -export const streamToArray = (stream, { - filter, - mapper, -} = {}) => new Promise((resolve, reject) => { - stream = highland(stream).stopOnError(reject) - if (filter) { - stream = stream.filter(filter) - } - if (mapper) { - stream = stream.map(mapper) - } - stream.toArray(resolve) -}) +export const streamToArray = (stream, { filter, mapper } = {}) => + new Promise((resolve, reject) => { + stream = highland(stream).stopOnError(reject) + if (filter) { + stream = stream.filter(filter) + } + if (mapper) { + stream = stream.map(mapper) + } + stream.toArray(resolve) + }) // ------------------------------------------------------------------- @@ -520,7 +538,10 @@ export const scheduleFn = (cronTime, fn, timeZone) => { try { await fn() } catch (error) { - console.error('[WARN] scheduled function:', (error && error.stack) || error) + console.error( + '[WARN] scheduled function:', + (error && error.stack) || error + ) } finally { running = false } @@ -563,11 +584,7 @@ export const thunkToArray = thunk => { // function foo (param = throwFn('param is required')()) {} // ``` export const throwFn = error => () => { - throw ( - isString(error) - ? new Error(error) - : error - ) + throw isString(error) ? new Error(error) : error } // ------------------------------------------------------------------- @@ -596,10 +613,9 @@ export const mapFilter = (collection, iteratee) => { export const splitFirst = (string, separator) => { const i = string.indexOf(separator) - return i === -1 ? null : [ - string.slice(0, i), - string.slice(i + separator.length), - ] + return i === -1 + ? null + : [string.slice(0, i), string.slice(i + separator.length)] } // ------------------------------------------------------------------- diff --git a/packages/xo-server/src/utils.spec.js b/packages/xo-server/src/utils.spec.js index a2bb80594..179026ace 100644 --- a/packages/xo-server/src/utils.spec.js +++ b/packages/xo-server/src/utils.spec.js @@ -52,10 +52,7 @@ describe('createRawObject()', () => { describe('diffItems', () => { it('computes the added/removed items between 2 iterables', () => { - expect(diffItems( - ['foo', 'bar'], - ['baz', 'foo'] - )).toEqual([ + expect(diffItems(['foo', 'bar'], ['baz', 'foo'])).toEqual([ ['bar'], ['baz'], ]) @@ -105,14 +102,13 @@ describe('extractProperty()', function () { describe('formatXml()', function () { it('formats a JS object to an XML string', function () { - expect(formatXml({ - foo: { - bar: [ - {$: {baz: 'plop'}}, - {$: {baz: 'plip'}}, - ], - }, - })).toBe(` + expect( + formatXml({ + foo: { + bar: [{ $: { baz: 'plop' } }, { $: { baz: 'plip' } }], + }, + }) + ).toBe(` `) @@ -154,11 +150,7 @@ describe('parseSize()', function () { describe('pSettle()', () => { it('works with arrays', async () => { const rejection = 'fatality' - const [ - status1, - status2, - status3, - ] = await pSettle([ + const [status1, status2, status3] = await pSettle([ Promise.resolve(42), Math.PI, Promise.reject(rejection), @@ -184,11 +176,7 @@ describe('pSettle()', () => { it('works with objects', async () => { const rejection = 'fatality' - const { - a: status1, - b: status2, - c: status3, - } = await pSettle({ + const { a: status1, b: status2, c: status3 } = await pSettle({ a: Promise.resolve(42), b: Math.PI, c: Promise.reject(rejection), diff --git a/packages/xo-server/src/vhd-merge.js b/packages/xo-server/src/vhd-merge.js index 425874645..f34b529e1 100644 --- a/packages/xo-server/src/vhd-merge.js +++ b/packages/xo-server/src/vhd-merge.js @@ -6,15 +6,10 @@ import fu from '@nraynaud/struct-fu' import isEqual from 'lodash/isEqual' import constantStream from './constant-stream' -import { - noop, - streamToBuffer, -} from './utils' +import { noop, streamToBuffer } from './utils' const VHD_UTIL_DEBUG = 0 -const debug = VHD_UTIL_DEBUG - ? str => console.log(`[vhd-util]${str}`) - : noop +const debug = VHD_UTIL_DEBUG ? str => console.log(`[vhd-util]${str}`) : noop // =================================================================== // @@ -42,7 +37,7 @@ const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup. const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup. // Other. -const BLOCK_UNUSED = 0xFFFFFFFF +const BLOCK_UNUSED = 0xffffffff const BIT_MASK = 0x80 // unused block as buffer containing a uint32BE @@ -63,11 +58,13 @@ const fuFooter = fu.struct([ fu.char('creatorApplication', 4), // 28 fu.uint32('creatorVersion'), // 32 fu.uint32('creatorHostOs'), // 36 - fu.struct('originalSize', [ // At the creation, current size of the hard disk. + fu.struct('originalSize', [ + // At the creation, current size of the hard disk. fu.uint32('high'), // 40 fu.uint32('low'), // 44 ]), - fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize. + fu.struct('currentSize', [ + // Current size of the virtual disk. At the creation: currentSize = originalSize. fu.uint32('high'), // 48 fu.uint32('low'), // 52 ]), @@ -86,11 +83,9 @@ const fuFooter = fu.struct([ const fuHeader = fu.struct([ fu.char('cookie', 8), - fu.struct('dataOffset', [ - fu.uint32('high'), - fu.uint32('low'), - ]), - fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table. + fu.struct('dataOffset', [fu.uint32('high'), fu.uint32('low')]), + fu.struct('tableOffset', [ + // Absolute byte offset of the Block Allocation Table. fu.uint32('high'), fu.uint32('low'), ]), @@ -102,16 +97,21 @@ const fuHeader = fu.struct([ fu.uint32('parentTimestamp'), fu.uint32('reserved1'), fu.char16be('parentUnicodeName', 512), - fu.struct('parentLocatorEntry', [ - fu.uint32('platformCode'), - fu.uint32('platformDataSpace'), - fu.uint32('platformDataLength'), - fu.uint32('reserved'), - fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data. - fu.uint32('high'), - fu.uint32('low'), - ]), - ], VHD_PARENT_LOCATOR_ENTRIES), + fu.struct( + 'parentLocatorEntry', + [ + fu.uint32('platformCode'), + fu.uint32('platformDataSpace'), + fu.uint32('platformDataLength'), + fu.uint32('reserved'), + fu.struct('platformDataOffset', [ + // Absolute byte offset of the locator data. + fu.uint32('high'), + fu.uint32('low'), + ]), + ], + VHD_PARENT_LOCATOR_ENTRIES + ), fu.char('reserved2', 256), ]) @@ -120,19 +120,22 @@ const fuHeader = fu.struct([ // =================================================================== const SIZE_OF_32_BITS = Math.pow(2, 32) -const uint32ToUint64 = (fu) => fu.high * SIZE_OF_32_BITS + fu.low +const uint32ToUint64 = fu => fu.high * SIZE_OF_32_BITS + fu.low // Returns a 32 bits integer corresponding to a Vhd version. -const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF) +const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000ffff) // Sectors conversions. -const sectorsRoundUp = bytes => Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE) +const sectorsRoundUp = bytes => + Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE) const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1 const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE // Check/Set a bit on a vhd map. const mapTestBit = (map, bit) => ((map[bit >> 3] << (bit & 7)) & BIT_MASK) !== 0 -const mapSetBit = (map, bit) => { map[bit >> 3] |= (BIT_MASK >> (bit & 7)) } +const mapSetBit = (map, bit) => { + map[bit >> 3] |= BIT_MASK >> (bit & 7) +} const packField = (field, value, buf) => { const { offset } = field @@ -140,7 +143,7 @@ const packField = (field, value, buf) => { field.pack( value, buf, - (typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset + typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset ) } @@ -149,7 +152,7 @@ const unpackField = (field, buf) => { return field.unpack( buf, - (typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset + typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset ) } // =================================================================== @@ -165,10 +168,10 @@ function checksumStruct (rawStruct, struct) { packField(checksumField, 0, rawStruct) for (let i = 0, n = struct.size; i < n; i++) { - sum = (sum + rawStruct[i]) & 0xFFFFFFFF + sum = (sum + rawStruct[i]) & 0xffffffff } - sum = 0xFFFFFFFF - sum + sum = 0xffffffff - sum // Write new sum. packField(checksumField, sum, rawStruct) @@ -210,15 +213,19 @@ class Vhd { ) // Max(end, block allocation table end) - end = Math.max(end, uint32ToUint64(header.tableOffset) + blockAllocationTableSize) + end = Math.max( + end, + uint32ToUint64(header.tableOffset) + blockAllocationTableSize + ) for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) { const entry = header.parentLocatorEntry[i] if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) { - end = Math.max(end, + end = Math.max( + end, uint32ToUint64(entry.platformDataOffset) + - sectorsToBytes(entry.platformDataSpace) + sectorsToBytes(entry.platformDataSpace) ) } } @@ -256,19 +263,27 @@ class Vhd { // Checksum child & parent. if (sumToTest !== sum) { - throw new Error(`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString('hex')})`) + throw new Error( + `Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString( + 'hex' + )})` + ) } - const header = this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE)) + const header = (this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE))) this.footer = fuFooter.unpack(buf) // Compute the number of sectors in one block. // Default: One block contains 4096 sectors of 512 bytes. - const sectorsPerBlock = this.sectorsPerBlock = Math.floor(header.blockSize / VHD_SECTOR_SIZE) + const sectorsPerBlock = (this.sectorsPerBlock = Math.floor( + header.blockSize / VHD_SECTOR_SIZE + )) // Compute bitmap size in sectors. // Default: 1. - const sectorsOfBitmap = this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3) + const sectorsOfBitmap = (this.sectorsOfBitmap = sectorsRoundUpNoZero( + sectorsPerBlock >> 3 + )) // Full block size => data block size + bitmap size. this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap) @@ -309,12 +324,14 @@ class Vhd { return this._read( sectorsToBytes(blockAddr), onlyBitmap ? this.bitmapSize : this.fullBlockSize - ).then(buf => onlyBitmap - ? { bitmap: buf } - : { - bitmap: buf.slice(0, this.bitmapSize), - data: buf.slice(this.bitmapSize), - } + ).then( + buf => + onlyBitmap + ? { bitmap: buf } + : { + bitmap: buf.slice(0, this.bitmapSize), + data: buf.slice(this.bitmapSize), + } ) } @@ -366,19 +383,26 @@ class Vhd { // Write a buffer/stream at a given position in a vhd file. _write (data, offset) { - debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`) - // TODO: could probably be merged in remote handlers. - return this._handler.createOutputStream(this._path, { - flags: 'r+', - start: offset, - }).then( - Buffer.isBuffer(data) - ? stream => new Promise((resolve, reject) => { - stream.on('error', reject) - stream.end(data, resolve) - }) - : stream => eventToPromise(data.pipe(stream), 'finish') + debug( + `_write offset=${offset} size=${ + Buffer.isBuffer(data) ? data.length : '???' + }` ) + // TODO: could probably be merged in remote handlers. + return this._handler + .createOutputStream(this._path, { + flags: 'r+', + start: offset, + }) + .then( + Buffer.isBuffer(data) + ? stream => + new Promise((resolve, reject) => { + stream.on('error', reject) + stream.end(data, resolve) + }) + : stream => eventToPromise(data.pipe(stream), 'finish') + ) } async ensureBatSize (size) { @@ -393,16 +417,20 @@ class Vhd { const { first, firstSector, lastSector } = this._getFirstAndLastBlocks() // extend BAT - const maxTableEntries = header.maxTableEntries = size + const maxTableEntries = (header.maxTableEntries = size) const batSize = maxTableEntries * VHD_ENTRY_SIZE const prevBat = this.blockTable - const bat = this.blockTable = Buffer.allocUnsafe(batSize) + const bat = (this.blockTable = Buffer.allocUnsafe(batSize)) prevBat.copy(bat) bat.fill(BUF_BLOCK_UNUSED, prevBat.length) - debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`) + debug( + `ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}` + ) const extendBat = () => { - debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`) + debug( + `ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}` + ) return this._write( constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries), @@ -411,10 +439,7 @@ class Vhd { } if (tableOffset + batSize < sectorsToBytes(firstSector)) { - return Promise.all([ - extendBat(), - this.writeHeader(), - ]) + return Promise.all([extendBat(), this.writeHeader()]) } const { fullBlockSize } = this @@ -423,9 +448,9 @@ class Vhd { return Promise.all([ // copy the first block at the end - this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream => - this._write(stream, sectorsToBytes(newFirstSector)) - ).then(extendBat), + this._readStream(sectorsToBytes(firstSector), fullBlockSize) + .then(stream => this._write(stream, sectorsToBytes(newFirstSector))) + .then(extendBat), this._setBatEntry(first, newFirstSector), this.writeHeader(), @@ -456,7 +481,7 @@ class Vhd { await Promise.all([ // Write an empty block and addr in vhd file. this._write( - constantStream([ 0 ], this.fullBlockSize), + constantStream([0], this.fullBlockSize), sectorsToBytes(blockAddr) ), @@ -476,7 +501,11 @@ class Vhd { const offset = sectorsToBytes(blockAddr) - debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`) + debug( + `Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString( + 'hex' + )})` + ) await this._write(bitmap, sectorsToBytes(blockAddr)) } @@ -489,7 +518,11 @@ class Vhd { const offset = blockAddr + this.sectorsOfBitmap + beginSectorId - debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`) + debug( + `writeBlockSectors at ${offset} block=${ + block.id + }, sectors=${beginSectorId}...${endSectorId}` + ) await this._write( block.data.slice( @@ -532,11 +565,7 @@ class Vhd { // Write n sectors into parent. debug(`coalesceBlock: write sectors=${i}...${endSector}`) - await this.writeBlockSectors( - { id: blockId, data }, - i, - endSector - ) + await this.writeBlockSectors({ id: blockId, data }, i, endSector) i = endSector } @@ -553,7 +582,11 @@ class Vhd { const rawFooter = fuFooter.pack(footer) footer.checksum = checksumStruct(rawFooter, fuFooter) - debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`) + debug( + `Write footer at: ${offset} (checksum=${ + footer.checksum + }). (data=${rawFooter.toString('hex')})` + ) await this._write(rawFooter, 0) await this._write(rawFooter, offset) @@ -564,7 +597,11 @@ class Vhd { const rawHeader = fuHeader.pack(header) header.checksum = checksumStruct(rawHeader, fuHeader) const offset = VHD_FOOTER_SIZE - debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`) + debug( + `Write header at: ${offset} (checksum=${ + header.checksum + }). (data=${rawHeader.toString('hex')})` + ) return this._write(rawHeader, offset) } } @@ -576,8 +613,10 @@ class Vhd { // // TODO: update the identifier of the parent VHD. export default async function vhdMerge ( - parentHandler, parentPath, - childHandler, childPath + parentHandler, + parentPath, + childHandler, + childPath ) { const parentVhd = new Vhd(parentHandler, parentPath) const childVhd = new Vhd(childHandler, childPath) @@ -609,10 +648,7 @@ export default async function vhdMerge ( } // Read allocation table of child/parent. - await Promise.all([ - parentVhd.readBlockTable(), - childVhd.readBlockTable(), - ]) + await Promise.all([parentVhd.readBlockTable(), childVhd.readBlockTable()]) await parentVhd.ensureBatSize(childVhd.header.maxTableEntries) @@ -641,8 +677,10 @@ export default async function vhdMerge ( // returns true if the child was actually modified export async function chainVhd ( - parentHandler, parentPath, - childHandler, childPath + parentHandler, + parentPath, + childHandler, + childPath ) { const parentVhd = new Vhd(parentHandler, parentPath) const childVhd = new Vhd(childHandler, childPath) diff --git a/packages/xo-server/src/xapi-object-to-xo.js b/packages/xo-server/src/xapi-object-to-xo.js index a2ebbedb8..39e518d38 100644 --- a/packages/xo-server/src/xapi-object-to-xo.js +++ b/packages/xo-server/src/xapi-object-to-xo.js @@ -1,6 +1,4 @@ -import { - startsWith, -} from 'lodash' +import { startsWith } from 'lodash' import { ensureArray, @@ -12,22 +10,12 @@ import { mapToArray, parseXml, } from './utils' -import { - isHostRunning, - isVmHvm, - isVmRunning, - parseDateTime, -} from './xapi' -import { - useUpdateSystem, -} from './xapi/utils' +import { isHostRunning, isVmHvm, isVmRunning, parseDateTime } from './xapi' +import { useUpdateSystem } from './xapi/utils' // =================================================================== -const { - defineProperties, - freeze, -} = Object +const { defineProperties, freeze } = Object function link (obj, prop, idField = '$id') { const dynamicValue = obj[`$${prop}`] @@ -56,7 +44,8 @@ function toTimestamp (date) { const timestamp = +date // Not NaN. - if (timestamp === timestamp) { // eslint-disable-line no-self-compare + // eslint-disable-next-line no-self-compare + if (timestamp === timestamp) { return timestamp } @@ -80,7 +69,9 @@ const TRANSFORMS = { tags: obj.tags, name_description: obj.name_description, name_label: obj.name_label || obj.$master.name_label, - xosanPackInstallationTime: toTimestamp(obj.other_config.xosan_pack_installation_time), + xosanPackInstallationTime: toTimestamp( + obj.other_config.xosan_pack_installation_time + ), cpus: { cores: cpuInfo && +cpuInfo.cpu_count, sockets: cpuInfo && +cpuInfo.socket_count, @@ -178,15 +169,14 @@ const TRANSFORMS = { })(), patches: patches || link(obj, 'patches'), powerOnMode: obj.power_on_mode, - power_state: metrics - ? (isRunning ? 'Running' : 'Halted') - : 'Unknown', + power_state: metrics ? (isRunning ? 'Running' : 'Halted') : 'Unknown', startTime: toTimestamp(otherConfig.boot_time), - supplementalPacks: supplementalPacks || + supplementalPacks: + supplementalPacks || mapFilter(softwareVersion, (value, key) => { let author, name - if (([ author, name ] = key.split(':')).length === 2) { - const [ description, version ] = value.split(', ') + if (([author, name] = key.split(':')).length === 2) { + const [description, version] = value.split(', ') return { name, description, @@ -242,10 +232,9 @@ const TRANSFORMS = { } const { major, minor } = guestMetrics.PV_drivers_version - const [ hostMajor, hostMinor ] = (obj.$resident_on || obj.$pool.$master) - .software_version - .product_version - .split('.') + const [hostMajor, hostMinor] = ( + obj.$resident_on || obj.$pool.$master + ).software_version.product_version.split('.') return major >= hostMajor && minor >= hostMinor ? 'up to date' @@ -272,11 +261,10 @@ const TRANSFORMS = { boot: obj.HVM_boot_params, CPUs: { max: +obj.VCPUs_max, - number: ( + number: isRunning && metrics && xenTools ? +metrics.VCPUs_number - : +obj.VCPUs_at_startup - ), + : +obj.VCPUs_at_startup, }, current_operations: obj.current_operations, docker: (function () { @@ -316,8 +304,8 @@ const TRANSFORMS = { const staticMax = +obj.memory_static_max const memory = { - dynamic: [ dynamicMin, dynamicMax ], - static: [ staticMin, staticMax ], + dynamic: [dynamicMin, dynamicMax], + static: [staticMin, staticMax], } const gmMemory = guestMetrics && guestMetrics.memory @@ -356,11 +344,8 @@ const TRANSFORMS = { // - 'up to date': optimized xenTools, - $container: ( - isRunning - ? link(obj, 'resident_on') - : link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`). - ), + // TODO: handle local VMs (`VM.get_possible_hosts()`). + $container: isRunning ? link(obj, 'resident_on') : link(obj, 'pool'), $VBDs: link(obj, 'VBDs'), // TODO: dedupe @@ -369,10 +354,7 @@ const TRANSFORMS = { } if (isHvm) { - ({ - vga: vm.vga = 'cirrus', - videoram: vm.videoram = 4, - } = obj.platform) + ;({ vga: vm.vga = 'cirrus', videoram: vm.videoram = 4 } = obj.platform) } const coresPerSocket = obj.platform['cores-per-socket'] @@ -398,7 +380,7 @@ const TRANSFORMS = { vm.template_info = { arch: otherConfig['install-arch'], disks: (function () { - const {disks: xml} = otherConfig + const { disks: xml } = otherConfig let data if (!xml || !(data = parseXml(xml)).provision) { return [] @@ -457,11 +439,10 @@ const TRANSFORMS = { other_config: obj.other_config, sm_config: obj.sm_config, - $container: ( + $container: obj.shared || !obj.$PBDs[0] ? link(obj, 'pool') - : link(obj.$PBDs[0], 'host') - ), + : link(obj.$PBDs[0], 'host'), $PBDs: link(obj, 'PBDs'), } }, @@ -762,10 +743,7 @@ export default xapiObj => { if (!('type' in xoObj)) { xoObj.type = xapiObj.$type } - if ( - 'uuid' in xapiObj && - !('uuid' in xoObj) - ) { + if ('uuid' in xapiObj && !('uuid' in xoObj)) { xoObj.uuid = xapiObj.uuid } xoObj.$pool = xapiObj.$pool.$id diff --git a/packages/xo-server/src/xapi-stats.js b/packages/xo-server/src/xapi-stats.js index 7812e4111..f0619f3d7 100644 --- a/packages/xo-server/src/xapi-stats.js +++ b/packages/xo-server/src/xapi-stats.js @@ -11,10 +11,10 @@ const RRD_STEP_HOURS = 3600 const RRD_STEP_DAYS = 86400 const RRD_STEP_FROM_STRING = { - 'seconds': RRD_STEP_SECONDS, - 'minutes': RRD_STEP_MINUTES, - 'hours': RRD_STEP_HOURS, - 'days': RRD_STEP_DAYS, + seconds: RRD_STEP_SECONDS, + minutes: RRD_STEP_MINUTES, + hours: RRD_STEP_HOURS, + days: RRD_STEP_DAYS, } const RRD_POINTS_PER_STEP = { @@ -178,7 +178,7 @@ function parseLegends (json) { throw new UnknownLegendFormat(value) } - const [ , name, uuid, type, , ] = parsedLine + const [, name, uuid, type] = parsedLine if (name !== 'vm') { parseOneHostLegend(hostLegends, type, index) @@ -208,7 +208,10 @@ export default class XapiStats { for (const key in source) { if (key === 'cpus') { for (const cpuIndex in source.cpus) { - dest.cpus[cpuIndex].splice(0, dest.cpus[cpuIndex].length - pointsPerStep) + dest.cpus[cpuIndex].splice( + 0, + dest.cpus[cpuIndex].length - pointsPerStep + ) } // If the number of cpus has been decreased, remove ! @@ -221,20 +224,28 @@ export default class XapiStats { // For each pif or vif for (const ifType in source[key]) { for (const pifIndex in source[key][ifType]) { - dest[key][ifType][pifIndex].splice(0, dest[key][ifType][pifIndex].length - pointsPerStep) + dest[key][ifType][pifIndex].splice( + 0, + dest[key][ifType][pifIndex].length - pointsPerStep + ) } // If the number of pifs has been decreased, remove ! let offset - if ((offset = dest[key][ifType].length - source[key][ifType].length) > 0) { + if ( + (offset = dest[key][ifType].length - source[key][ifType].length) > 0 + ) { dest[key][ifType].splice(-offset) } } } else if (key === 'xvds') { for (const xvdType in source.xvds) { for (const xvdLetter in source.xvds[xvdType]) { - dest.xvds[xvdType][xvdLetter].splice(0, dest.xvds[xvdType][xvdLetter].length - pointsPerStep) + dest.xvds[xvdType][xvdLetter].splice( + 0, + dest.xvds[xvdType][xvdLetter].length - pointsPerStep + ) } // If the number of xvds has been decreased, remove ! @@ -340,7 +351,9 @@ export default class XapiStats { vmStats.xvds[xvdType][index] = [] } - vmStats.xvds[xvdType][index].push(convertNanToNull(values[vmLegends.xvds[xvdType][index]])) + vmStats.xvds[xvdType][index].push( + convertNanToNull(values[vmLegends.xvds[xvdType][index]]) + ) } } @@ -385,15 +398,17 @@ export default class XapiStats { // Return stats (Json format) or throws got exception @limitConcurrency(3) _getJson (xapi, host, timestamp) { - return xapi.getResource('/rrd_updates', { - host, - query: { - cf: 'AVERAGE', - host: 'true', - json: 'true', - start: timestamp, - }, - }).then(response => response.readAll().then(JSON5.parse)) + return xapi + .getResource('/rrd_updates', { + host, + query: { + cf: 'AVERAGE', + host: 'true', + json: 'true', + start: timestamp, + }, + }) + .then(response => response.readAll().then(JSON5.parse)) } async _getLastTimestamp (xapi, host, step) { @@ -428,11 +443,15 @@ export default class XapiStats { async _getAndUpdatePoints (xapi, host, vmId, granularity) { // Get granularity to use - const step = (granularity === undefined || granularity === 0) - ? RRD_STEP_SECONDS : RRD_STEP_FROM_STRING[granularity] + const step = + granularity === undefined || granularity === 0 + ? RRD_STEP_SECONDS + : RRD_STEP_FROM_STRING[granularity] if (step === undefined) { - throw new FaultyGranularity(`Unknown granularity: '${granularity}'. Use 'seconds', 'minutes', 'hours', or 'days'.`) + throw new FaultyGranularity( + `Unknown granularity: '${granularity}'. Use 'seconds', 'minutes', 'hours', or 'days'.` + ) } // Limit the number of http requests @@ -443,8 +462,10 @@ export default class XapiStats { this._vms[hostname] = {} } - if (this._hosts[hostname][step] !== undefined && - this._hosts[hostname][step].localTimestamp + step > getCurrentTimestamp()) { + if ( + this._hosts[hostname][step] !== undefined && + this._hosts[hostname][step].localTimestamp + step > getCurrentTimestamp() + ) { return this._getPoints(hostname, step, vmId) } @@ -459,15 +480,25 @@ export default class XapiStats { // Check if the granularity is linked to 'step' // If it's not the case, we retry other url with the json timestamp if (json.meta.step !== step) { - console.log(`RRD call: Expected step: ${step}, received step: ${json.meta.step}. Retry with other timestamp`) + console.log( + `RRD call: Expected step: ${step}, received step: ${ + json.meta.step + }. Retry with other timestamp` + ) const serverTimestamp = await getServerTimestamp(xapi, host) // Approximately: half points are asked // FIXME: Not the best solution - json = await this._getJson(xapi, host, serverTimestamp - step * (RRD_POINTS_PER_STEP[step] / 2) + step) + json = await this._getJson( + xapi, + host, + serverTimestamp - step * (RRD_POINTS_PER_STEP[step] / 2) + step + ) if (json.meta.step !== step) { - throw new FaultyGranularity(`Unable to get the true granularity: ${json.meta.step}`) + throw new FaultyGranularity( + `Unable to get the true granularity: ${json.meta.step}` + ) } } @@ -488,7 +519,9 @@ export default class XapiStats { // Remove useless data and reorder // Note: Older values are at end of json.data.row - const parseOffset = (this._hosts[hostname][step].endTimestamp - startTimestamp + step) / step + const parseOffset = + (this._hosts[hostname][step].endTimestamp - startTimestamp + step) / + step json.data.splice(json.data.length - parseOffset) json.data.reverse() @@ -502,10 +535,18 @@ export default class XapiStats { this._parseHostStats(json, hostname, hostLegends, step) // Remove older stats - this._removeOlderStats(hostLegends, this._hosts[hostname][step].stats, RRD_POINTS_PER_STEP[step]) + this._removeOlderStats( + hostLegends, + this._hosts[hostname][step].stats, + RRD_POINTS_PER_STEP[step] + ) for (const uuid in vmsLegends) { - this._removeOlderStats(vmsLegends[uuid], this._vms[hostname][step][uuid], RRD_POINTS_PER_STEP[step]) + this._removeOlderStats( + vmsLegends[uuid], + this._vms[hostname][step][uuid], + RRD_POINTS_PER_STEP[step] + ) } } } diff --git a/packages/xo-server/src/xapi/index.js b/packages/xo-server/src/xapi/index.js index 947dabc47..d974ee1cf 100644 --- a/packages/xo-server/src/xapi/index.js +++ b/packages/xo-server/src/xapi/index.js @@ -4,7 +4,12 @@ import fatfs from 'fatfs' import synchronized from 'decorator-synchronized' import tarStream from 'tar-stream' import vmdkToVhd from 'xo-vmdk-to-vhd' -import { cancellable, catchPlus as pCatch, defer, ignoreErrors } from 'promise-toolbox' +import { + cancellable, + catchPlus as pCatch, + defer, + ignoreErrors, +} from 'promise-toolbox' import { PassThrough } from 'stream' import { forbiddenOperation } from 'xo-common/api-errors' import { Xapi as XapiBase } from 'xen-api' @@ -20,9 +25,7 @@ import { startsWith, uniq, } from 'lodash' -import { - satisfies as versionSatisfies, -} from 'semver' +import { satisfies as versionSatisfies } from 'semver' import createSizeStream from '../size-stream' import fatfsBuffer, { init as fatfsBufferInit } from '../fatfs-buffer' @@ -96,15 +99,12 @@ export default class Xapi extends XapiBase { return getObject.apply(this, args) })(this.getObject) - const genericWatchers = this._genericWatchers = createRawObject() - const objectsWatchers = this._objectWatchers = createRawObject() + const genericWatchers = (this._genericWatchers = createRawObject()) + const objectsWatchers = (this._objectWatchers = createRawObject()) const onAddOrUpdate = objects => { forEach(objects, object => { - const { - $id: id, - $ref: ref, - } = object + const { $id: id, $ref: ref } = object // Run generic watchers. for (const watcherId in genericWatchers) { @@ -129,9 +129,13 @@ export default class Xapi extends XapiBase { call (...args) { const fn = super.call - const loop = () => fn.apply(this, args)::pCatch({ - code: 'TOO_MANY_PENDING_TASKS', - }, () => pDelay(5e3).then(loop)) + const loop = () => + fn.apply(this, args)::pCatch( + { + code: 'TOO_MANY_PENDING_TASKS', + }, + () => pDelay(5e3).then(loop) + ) return loop() } @@ -197,9 +201,10 @@ export default class Xapi extends XapiBase { return object } - const loop = () => this._waitObject(idOrUuidOrRef).then( - (object) => predicate(object) ? object : loop() - ) + const loop = () => + this._waitObject(idOrUuidOrRef).then( + object => (predicate(object) ? object : loop()) + ) return loop() } @@ -207,8 +212,7 @@ export default class Xapi extends XapiBase { // Returns the objects if already presents or waits for it. async _getOrWaitObject (idOrUuidOrRef) { return ( - this.getObject(idOrUuidOrRef, null) || - this._waitObject(idOrUuidOrRef) + this.getObject(idOrUuidOrRef, null) || this._waitObject(idOrUuidOrRef) ) } @@ -223,27 +227,27 @@ export default class Xapi extends XapiBase { } _setObjectProperties (object, props) { - const { - $ref: ref, - $type: type, - } = object + const { $ref: ref, $type: type } = object const namespace = getNamespaceForType(type) // TODO: the thrown error should contain the name of the // properties that failed to be set. - return Promise.all(mapToArray(props, (value, name) => { - if (value != null) { - return this.call(`${namespace}.set_${camelToSnakeCase(name)}`, ref, prepareXapiParam(value)) - } - }))::ignoreErrors() + return Promise.all( + mapToArray(props, (value, name) => { + if (value != null) { + return this.call( + `${namespace}.set_${camelToSnakeCase(name)}`, + ref, + prepareXapiParam(value) + ) + } + }) + )::ignoreErrors() } async _updateObjectMapProperty (object, prop, values) { - const { - $ref: ref, - $type: type, - } = object + const { $ref: ref, $type: type } = object prop = camelToSnakeCase(prop) @@ -251,33 +255,30 @@ export default class Xapi extends XapiBase { const add = `${namespace}.add_to_${prop}` const remove = `${namespace}.remove_from_${prop}` - await Promise.all(mapToArray(values, (value, name) => { - if (value !== undefined) { - name = camelToSnakeCase(name) - const removal = this.call(remove, ref, name) + await Promise.all( + mapToArray(values, (value, name) => { + if (value !== undefined) { + name = camelToSnakeCase(name) + const removal = this.call(remove, ref, name) - return value === null - ? removal - : removal::ignoreErrors().then(() => this.call(add, ref, name, prepareXapiParam(value))) - } - })) + return value === null + ? removal + : removal + ::ignoreErrors() + .then(() => this.call(add, ref, name, prepareXapiParam(value))) + } + }) + ) } - async setHostProperties (id, { - nameLabel, - nameDescription, - }) { + async setHostProperties (id, { nameLabel, nameDescription }) { await this._setObjectProperties(this.getObject(id), { nameLabel, nameDescription, }) } - async setPoolProperties ({ - autoPoweron, - nameLabel, - nameDescription, - }) { + async setPoolProperties ({ autoPoweron, nameLabel, nameDescription }) { const { pool } = this await Promise.all([ @@ -285,27 +286,24 @@ export default class Xapi extends XapiBase { nameLabel, nameDescription, }), - autoPoweron != null && this._updateObjectMapProperty(pool, 'other_config', { - autoPoweron: autoPoweron ? 'true' : null, - }), + autoPoweron != null && + this._updateObjectMapProperty(pool, 'other_config', { + autoPoweron: autoPoweron ? 'true' : null, + }), ]) } - async setSrProperties (id, { - nameLabel, - nameDescription, - }) { + async setSrProperties (id, { nameLabel, nameDescription }) { await this._setObjectProperties(this.getObject(id), { nameLabel, nameDescription, }) } - async setNetworkProperties (id, { - nameLabel, - nameDescription, - defaultIsLocked, - }) { + async setNetworkProperties ( + id, + { nameLabel, nameDescription, defaultIsLocked } + ) { let defaultLockingMode if (defaultIsLocked != null) { defaultLockingMode = defaultIsLocked ? 'disabled' : 'unlocked' @@ -320,20 +318,14 @@ export default class Xapi extends XapiBase { // ================================================================= async addTag (id, tag) { - const { - $ref: ref, - $type: type, - } = this.getObject(id) + const { $ref: ref, $type: type } = this.getObject(id) const namespace = getNamespaceForType(type) await this.call(`${namespace}.add_tags`, ref, tag) } async removeTag (id, tag) { - const { - $ref: ref, - $type: type, - } = this.getObject(id) + const { $ref: ref, $type: type } = this.getObject(id) const namespace = getNamespaceForType(type) await this.call(`${namespace}.remove_tags`, ref, tag) @@ -444,11 +436,11 @@ export default class Xapi extends XapiBase { // Clone a VM: make a fast copy by fast copying each of its VDIs // (using snapshots where possible) on the same SRs. _cloneVm (vm, nameLabel = vm.name_label) { - debug(`Cloning VM ${vm.name_label}${ - nameLabel !== vm.name_label - ? ` as ${nameLabel}` - : '' - }`) + debug( + `Cloning VM ${vm.name_label}${ + nameLabel !== vm.name_label ? ` as ${nameLabel}` : '' + }` + ) return this.call('VM.clone', vm.$ref, nameLabel) } @@ -463,15 +455,11 @@ export default class Xapi extends XapiBase { snapshot = await this._snapshotVm(vm) } - debug(`Copying VM ${vm.name_label}${ - nameLabel !== vm.name_label - ? ` as ${nameLabel}` - : '' - }${ - sr - ? ` on ${sr.name_label}` - : '' - }`) + debug( + `Copying VM ${vm.name_label}${ + nameLabel !== vm.name_label ? ` as ${nameLabel}` : '' + }${sr ? ` on ${sr.name_label}` : ''}` + ) try { return await this.call( @@ -487,37 +475,28 @@ export default class Xapi extends XapiBase { } } - async cloneVm (vmId, { - nameLabel = undefined, - fast = true, - } = {}) { + async cloneVm (vmId, { nameLabel = undefined, fast = true } = {}) { const vm = this.getObject(vmId) - const cloneRef = await ( - fast - ? this._cloneVm(vm, nameLabel) - : this._copyVm(vm, nameLabel) - ) + const cloneRef = await (fast + ? this._cloneVm(vm, nameLabel) + : this._copyVm(vm, nameLabel)) return /* await */ this._getOrWaitObject(cloneRef) } - async copyVm (vmId, srId, { - nameLabel = undefined, - } = {}) { + async copyVm (vmId, srId, { nameLabel = undefined } = {}) { return /* await */ this._getOrWaitObject( - await this._copyVm( - this.getObject(vmId), - nameLabel, - this.getObject(srId) - ) + await this._copyVm(this.getObject(vmId), nameLabel, this.getObject(srId)) ) } - async remoteCopyVm (vmId, targetXapi, targetSrId, { - compress = true, - nameLabel = undefined, - } = {}) { + async remoteCopyVm ( + vmId, + targetXapi, + targetSrId, + { compress = true, nameLabel = undefined } = {} + ) { // Fall back on local copy if possible. if (targetXapi === this) { return { @@ -533,18 +512,16 @@ export default class Xapi extends XapiBase { const sizeStream = createSizeStream() stream = stream.pipe(sizeStream) - const onVmCreation = nameLabel !== undefined - ? vm => targetXapi._setObjectProperties(vm, { - nameLabel, - }) - : null + const onVmCreation = + nameLabel !== undefined + ? vm => + targetXapi._setObjectProperties(vm, { + nameLabel, + }) + : null const vm = await targetXapi._getOrWaitObject( - await targetXapi._importVm( - stream, - sr, - onVmCreation - ) + await targetXapi._importVm(stream, sr, onVmCreation) ) return { @@ -601,51 +578,57 @@ export default class Xapi extends XapiBase { }) { debug(`Creating VM ${name_label}`) - return this.call('VM.create', filterUndefineds({ - actions_after_crash, - actions_after_reboot, - actions_after_shutdown, - affinity: affinity == null ? NULL_REF : affinity, - HVM_boot_params, - HVM_boot_policy, - is_a_template: asBoolean(is_a_template), - memory_dynamic_max: asInteger(memory_dynamic_max), - memory_dynamic_min: asInteger(memory_dynamic_min), - memory_static_max: asInteger(memory_static_max), - memory_static_min: asInteger(memory_static_min), - other_config, - PCI_bus, - platform, - PV_args, - PV_bootloader, - PV_bootloader_args, - PV_kernel, - PV_legacy_args, - PV_ramdisk, - recommendations, - user_version: asInteger(user_version), - VCPUs_at_startup: asInteger(VCPUs_at_startup), - VCPUs_max: asInteger(VCPUs_max), - VCPUs_params, + return this.call( + 'VM.create', + filterUndefineds({ + actions_after_crash, + actions_after_reboot, + actions_after_shutdown, + affinity: affinity == null ? NULL_REF : affinity, + HVM_boot_params, + HVM_boot_policy, + is_a_template: asBoolean(is_a_template), + memory_dynamic_max: asInteger(memory_dynamic_max), + memory_dynamic_min: asInteger(memory_dynamic_min), + memory_static_max: asInteger(memory_static_max), + memory_static_min: asInteger(memory_static_min), + other_config, + PCI_bus, + platform, + PV_args, + PV_bootloader, + PV_bootloader_args, + PV_kernel, + PV_legacy_args, + PV_ramdisk, + recommendations, + user_version: asInteger(user_version), + VCPUs_at_startup: asInteger(VCPUs_at_startup), + VCPUs_max: asInteger(VCPUs_max), + VCPUs_params, - // Optional fields. - blocked_operations, - generation_id, - ha_always_run: asBoolean(ha_always_run), - ha_restart_priority, - has_vendor_device, - hardware_platform_version: optional(hardware_platform_version, asInteger), - // HVM_shadow_multiplier: asFloat(HVM_shadow_multiplier), // FIXME: does not work FIELD_TYPE_ERROR(hVM_shadow_multiplier) - name_description, - name_label, - order: optional(order, asInteger), - protection_policy, - shutdown_delay: asInteger(shutdown_delay), - start_delay: asInteger(start_delay), - tags, - version: asInteger(version), - xenstore_data, - })) + // Optional fields. + blocked_operations, + generation_id, + ha_always_run: asBoolean(ha_always_run), + ha_restart_priority, + has_vendor_device, + hardware_platform_version: optional( + hardware_platform_version, + asInteger + ), + // HVM_shadow_multiplier: asFloat(HVM_shadow_multiplier), // FIXME: does not work FIELD_TYPE_ERROR(hVM_shadow_multiplier) + name_description, + name_label, + order: optional(order, asInteger), + protection_policy, + shutdown_delay: asInteger(shutdown_delay), + start_delay: asInteger(start_delay), + tags, + version: asInteger(version), + xenstore_data, + }) + ) } async _deleteVm (vm, deleteDisks = true, force = false) { @@ -675,43 +658,41 @@ export default class Xapi extends XapiBase { this._deleteVm(snapshot) )::ignoreErrors(), - deleteDisks && asyncMap(getVmDisks(vm), ({ $ref: vdiRef }) => { - let onFailure = () => { - onFailure = vdi => { - console.error(`cannot delete VDI ${vdi.name_label} (from VM ${vm.name_label})`) - forEach(vdi.$VBDs, vbd => { - if (vbd.VM !== $ref) { - const vm = vbd.$VM - console.error('- %s (%s)', vm.name_label, vm.uuid) - } - }) - } + deleteDisks && + asyncMap(getVmDisks(vm), ({ $ref: vdiRef }) => { + let onFailure = () => { + onFailure = vdi => { + console.error( + `cannot delete VDI ${vdi.name_label} (from VM ${vm.name_label})` + ) + forEach(vdi.$VBDs, vbd => { + if (vbd.VM !== $ref) { + const vm = vbd.$VM + console.error('- %s (%s)', vm.name_label, vm.uuid) + } + }) + } - // maybe the control domain has not yet unmounted the VDI, - // check and retry after 5 seconds - return pDelay(5e3).then(test) - } - const test = () => { - const vdi = this.getObjectByRef(vdiRef) - return ( - // Only remove VBDs not attached to other VMs. - vdi.VBDs.length < 2 || - every(vdi.$VBDs, vbd => vbd.VM === $ref) - ) - ? this._deleteVdi(vdi) - : onFailure(vdi) - } - return test() - })::ignoreErrors(), + // maybe the control domain has not yet unmounted the VDI, + // check and retry after 5 seconds + return pDelay(5e3).then(test) + } + const test = () => { + const vdi = this.getObjectByRef(vdiRef) + return ( + // Only remove VBDs not attached to other VMs. + vdi.VBDs.length < 2 || every(vdi.$VBDs, vbd => vbd.VM === $ref) + ? this._deleteVdi(vdi) + : onFailure(vdi) + ) + } + return test() + })::ignoreErrors(), ]) } async deleteVm (vmId, deleteDisks, force) { - return /* await */ this._deleteVm( - this.getObject(vmId), - deleteDisks, - force - ) + return /* await */ this._deleteVm(this.getObject(vmId), deleteDisks, force) } getVmConsole (vmId) { @@ -726,9 +707,7 @@ export default class Xapi extends XapiBase { } // Returns a stream to the exported VM. - async exportVm (vmId, { - compress = true, - } = {}) { + async exportVm (vmId, { compress = true } = {}) { const vm = this.getObject(vmId) let host @@ -748,9 +727,9 @@ export default class Xapi extends XapiBase { }) if (snapshotRef !== undefined) { - promise.then(_ => _.task::pFinally(() => - this.deleteVm(snapshotRef)::ignoreErrors() - )) + promise.then(_ => + _.task::pFinally(() => this.deleteVm(snapshotRef)::ignoreErrors()) + ) } return promise @@ -765,7 +744,10 @@ export default class Xapi extends XapiBase { const { SR } = vdi let childrenMap = cache[SR] if (childrenMap === undefined) { - childrenMap = cache[SR] = groupBy(vdi.$SR.$VDIs, _ => _.sm_config['vhd-parent']) + childrenMap = cache[SR] = groupBy( + vdi.$SR.$VDIs, + _ => _.sm_config['vhd-parent'] + ) } // an unmanaged VDI should not have exactly one child: they @@ -795,15 +777,21 @@ export default class Xapi extends XapiBase { // Create a snapshot of the VM and returns a delta export object. @cancellable @deferrable - async exportDeltaVm ($defer, $cancelToken, vmId, baseVmId = undefined, { - bypassVdiChainsCheck = false, + async exportDeltaVm ( + $defer, + $cancelToken, + vmId, + baseVmId = undefined, + { + bypassVdiChainsCheck = false, - // Contains a vdi.$id set of vmId. - fullVdisRequired = [], + // Contains a vdi.$id set of vmId. + fullVdisRequired = [], - disableBaseTags = false, - snapshotNameLabel = undefined, - } = {}) { + disableBaseTags = false, + snapshotNameLabel = undefined, + } = {} + ) { if (!bypassVdiChainsCheck) { this._assertHealthyVdiChains(this.getObject(vmId)) } @@ -811,7 +799,7 @@ export default class Xapi extends XapiBase { const vm = await this.snapshotVm(vmId) $defer.onFailure(() => this._deleteVm(vm)) if (snapshotNameLabel) { - this._setObjectProperties(vm, { + ;this._setObjectProperties(vm, { nameLabel: snapshotNameLabel, })::ignoreErrors() } @@ -820,26 +808,24 @@ export default class Xapi extends XapiBase { // refs of VM's VDIs → base's VDIs. const baseVdis = {} - baseVm && forEach(baseVm.$VBDs, vbd => { - let vdi, snapshotOf - if ( - (vdi = vbd.$VDI) && - (snapshotOf = vdi.$snapshot_of) && - !find(fullVdisRequired, id => snapshotOf.$id === id) - ) { - baseVdis[vdi.snapshot_of] = vdi - } - }) + baseVm && + forEach(baseVm.$VBDs, vbd => { + let vdi, snapshotOf + if ( + (vdi = vbd.$VDI) && + (snapshotOf = vdi.$snapshot_of) && + !find(fullVdisRequired, id => snapshotOf.$id === id) + ) { + baseVdis[vdi.snapshot_of] = vdi + } + }) const streams = {} const vdis = {} const vbds = {} forEach(vm.$VBDs, vbd => { let vdi - if ( - vbd.type !== 'Disk' || - !(vdi = vbd.$VDI) - ) { + if (vbd.type !== 'Disk' || !(vdi = vbd.$VDI)) { // Ignore this VBD. return } @@ -851,7 +837,7 @@ export default class Xapi extends XapiBase { // // The snapshot must not exist otherwise it could break the // next export. - this._deleteVdi(vdi)::ignoreErrors() + ;this._deleteVdi(vdi)::ignoreErrors() return } @@ -866,20 +852,26 @@ export default class Xapi extends XapiBase { // Look for a snapshot of this vdi in the base VM. const baseVdi = baseVdis[vdi.snapshot_of] - vdis[vdiRef] = baseVdi && !disableBaseTags - ? { - ...vdi, - other_config: { - ...vdi.other_config, - [TAG_BASE_DELTA]: baseVdi.uuid, - }, - $SR$uuid: vdi.$SR.uuid, - } - : { - ...vdi, - $SR$uuid: vdi.$SR.uuid, - } - const stream = streams[`${vdiRef}.vhd`] = this._exportVdi($cancelToken, vdi, baseVdi, VDI_FORMAT_VHD) + vdis[vdiRef] = + baseVdi && !disableBaseTags + ? { + ...vdi, + other_config: { + ...vdi.other_config, + [TAG_BASE_DELTA]: baseVdi.uuid, + }, + $SR$uuid: vdi.$SR.uuid, + } + : { + ...vdi, + $SR$uuid: vdi.$SR.uuid, + } + const stream = (streams[`${vdiRef}.vhd`] = this._exportVdi( + $cancelToken, + vdi, + baseVdi, + VDI_FORMAT_VHD + )) $defer.onFailure(stream.cancel) }) @@ -891,33 +883,42 @@ export default class Xapi extends XapiBase { } }) - return Object.defineProperty({ - version: '1.1.0', - vbds, - vdis, - vifs, - vm: { - ...vm, - other_config: baseVm && !disableBaseTags - ? { - ...vm.other_config, - [TAG_BASE_DELTA]: baseVm.uuid, - } - : omit(vm.other_config, TAG_BASE_DELTA), + return Object.defineProperty( + { + version: '1.1.0', + vbds, + vdis, + vifs, + vm: { + ...vm, + other_config: + baseVm && !disableBaseTags + ? { + ...vm.other_config, + [TAG_BASE_DELTA]: baseVm.uuid, + } + : omit(vm.other_config, TAG_BASE_DELTA), + }, }, - }, 'streams', { - value: await streams::pAll(), - }) + 'streams', + { + value: await streams::pAll(), + } + ) } @deferrable - async importDeltaVm ($defer, delta, { - deleteBase = false, - disableStartAfterImport = true, - mapVdisSrs = {}, - name_label = delta.vm.name_label, - srId = this.pool.default_SR, - } = {}) { + async importDeltaVm ( + $defer, + delta, + { + deleteBase = false, + disableStartAfterImport = true, + mapVdisSrs = {}, + name_label = delta.vm.name_label, + srId = this.pool.default_SR, + } = {} + ) { const { version } = delta if (!versionSatisfies(version, '^1')) { @@ -927,10 +928,11 @@ export default class Xapi extends XapiBase { const remoteBaseVmUuid = delta.vm.other_config[TAG_BASE_DELTA] let baseVm if (remoteBaseVmUuid) { - baseVm = find(this.objects.all, obj => ( - (obj = obj.other_config) && - obj[TAG_COPY_SRC] === remoteBaseVmUuid - )) + baseVm = find( + this.objects.all, + obj => + (obj = obj.other_config) && obj[TAG_COPY_SRC] === remoteBaseVmUuid + ) if (!baseVm) { throw new Error('could not find the base VM') @@ -938,9 +940,10 @@ export default class Xapi extends XapiBase { } const baseVdis = {} - baseVm && forEach(baseVm.$VBDs, vbd => { - baseVdis[vbd.VDI] = vbd.$VDI - }) + baseVm && + forEach(baseVm.$VBDs, vbd => { + baseVdis[vbd.VDI] = vbd.$VDI + }) const { streams } = delta @@ -967,10 +970,7 @@ export default class Xapi extends XapiBase { ]) // 2. Delete all VBDs which may have been created by the import. - await asyncMap( - vm.$VBDs, - vbd => this._deleteVbd(vbd) - )::ignoreErrors() + await asyncMap(vm.$VBDs, vbd => this._deleteVbd(vbd))::ignoreErrors() // 3. Create VDIs. const newVdis = await map(delta.vdis, async vdi => { @@ -998,9 +998,7 @@ export default class Xapi extends XapiBase { throw new Error(`missing base VDI (copy of ${remoteBaseVdiUuid})`) } - const newVdi = await this._getOrWaitObject( - await this._cloneVdi(baseVdi) - ) + const newVdi = await this._getOrWaitObject(await this._cloneVdi(baseVdi)) $defer.onFailure(() => this._deleteVdi(newVdi)) await this._updateObjectMapProperty(newVdi, 'other_config', { @@ -1018,9 +1016,8 @@ export default class Xapi extends XapiBase { await Promise.all([ // Create VBDs. - asyncMap( - delta.vbds, - vbd => this.createVbd({ + asyncMap(delta.vbds, vbd => + this.createVbd({ ...vbd, vdi: newVdis[vbd.VDI], vm, @@ -1028,20 +1025,14 @@ export default class Xapi extends XapiBase { ), // Import VDI contents. - asyncMap( - newVdis, - async (vdi, id) => { - for (const stream of ensureArray(streams[`${id}.vhd`])) { - await this._importVdiContent(vdi, stream, VDI_FORMAT_VHD) - } + asyncMap(newVdis, async (vdi, id) => { + for (const stream of ensureArray(streams[`${id}.vhd`])) { + await this._importVdiContent(vdi, stream, VDI_FORMAT_VHD) } - ), + }), // Wait for VDI export tasks (if any) termination. - asyncMap( - streams, - stream => stream.task - ), + asyncMap(streams, stream => stream.task), // Create VIFs. asyncMap(delta.vifs, vif => { @@ -1051,17 +1042,13 @@ export default class Xapi extends XapiBase { defaultNetwork if (network) { - return this._createVif( - vm, - network, - vif - ) + return this._createVif(vm, network, vif) } }), ]) if (deleteBase && baseVm) { - this._deleteVm(baseVm)::ignoreErrors() + ;this._deleteVm(baseVm)::ignoreErrors() } await Promise.all([ @@ -1079,34 +1066,40 @@ export default class Xapi extends XapiBase { return vm } - async _migrateVmWithStorageMotion (vm, hostXapi, host, { - migrationNetwork = find(host.$PIFs, pif => pif.management).$network, // TODO: handle not found - sr, - mapVdisSrs, - mapVifsNetworks, - }) { + async _migrateVmWithStorageMotion ( + vm, + hostXapi, + host, + { + migrationNetwork = find(host.$PIFs, pif => pif.management).$network, // TODO: handle not found + sr, + mapVdisSrs, + mapVifsNetworks, + } + ) { // VDIs/SRs mapping const vdis = {} const defaultSr = host.$pool.$default_SR for (const vbd of vm.$VBDs) { const vdi = vbd.$VDI if (vbd.type === 'Disk') { - vdis[vdi.$ref] = mapVdisSrs && mapVdisSrs[vdi.$id] - ? hostXapi.getObject(mapVdisSrs[vdi.$id]).$ref - : sr !== undefined - ? hostXapi.getObject(sr).$ref - : defaultSr.$ref // Will error if there are no default SR. + vdis[vdi.$ref] = + mapVdisSrs && mapVdisSrs[vdi.$id] + ? hostXapi.getObject(mapVdisSrs[vdi.$id]).$ref + : sr !== undefined ? hostXapi.getObject(sr).$ref : defaultSr.$ref // Will error if there are no default SR. } } // VIFs/Networks mapping const vifsMap = {} if (vm.$pool !== host.$pool) { - const defaultNetworkRef = find(host.$PIFs, pif => pif.management).$network.$ref + const defaultNetworkRef = find(host.$PIFs, pif => pif.management).$network + .$ref for (const vif of vm.$VIFs) { - vifsMap[vif.$ref] = mapVifsNetworks && mapVifsNetworks[vif.$id] - ? hostXapi.getObject(mapVifsNetworks[vif.$id]).$ref - : defaultNetworkRef + vifsMap[vif.$ref] = + mapVifsNetworks && mapVifsNetworks[vif.$id] + ? hostXapi.getObject(mapVifsNetworks[vif.$id]).$ref + : defaultNetworkRef } } @@ -1117,27 +1110,33 @@ export default class Xapi extends XapiBase { {} ) - const loop = () => this.call( - 'VM.migrate_send', - vm.$ref, - token, - true, // Live migration. - vdis, - vifsMap, - { - force: 'true', - } - )::pCatch( - { code: 'TOO_MANY_STORAGE_MIGRATES' }, - () => pDelay(1e4).then(loop) - ) + const loop = () => + this.call( + 'VM.migrate_send', + vm.$ref, + token, + true, // Live migration. + vdis, + vifsMap, + { + force: 'true', + } + )::pCatch({ code: 'TOO_MANY_STORAGE_MIGRATES' }, () => + pDelay(1e4).then(loop) + ) return loop() } @synchronized _callInstallationPlugin (hostRef, vdi) { - return this.call('host.call_plugin', hostRef, 'install-supp-pack', 'install', { vdi }).catch(error => { + return this.call( + 'host.call_plugin', + hostRef, + 'install-supp-pack', + 'install', + { vdi } + ).catch(error => { if (error.code !== 'XENAPI_PLUGIN_FAILURE') { console.warn('_callInstallationPlugin', error) throw error @@ -1151,7 +1150,12 @@ export default class Xapi extends XapiBase { throw new Error('stream must have a length') } - const vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') + const vdi = await this.createTemporaryVdiOnHost( + stream, + hostId, + '[XO] Supplemental pack ISO', + 'small temporary VDI to store a supplemental pack ISO' + ) $defer(() => this._deleteVdi(vdi)) await this._callInstallationPlugin(this.getObject(hostId).$ref, vdi.uuid) @@ -1164,7 +1168,9 @@ export default class Xapi extends XapiBase { } const isSrAvailable = sr => - sr && sr.content_type === 'user' && sr.physical_size - sr.physical_utilisation >= stream.length + sr && + sr.content_type === 'user' && + sr.physical_size - sr.physical_utilisation >= stream.length const hosts = filter(this.objects.all, { $type: 'host' }) @@ -1172,7 +1178,12 @@ export default class Xapi extends XapiBase { // Shared SR available: create only 1 VDI for all the installations if (sr) { - const vdi = await this.createTemporaryVdiOnSr(stream, sr, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') + const vdi = await this.createTemporaryVdiOnSr( + stream, + sr, + '[XO] Supplemental pack ISO', + 'small temporary VDI to store a supplemental pack ISO' + ) $defer(() => this._deleteVdi(vdi)) // Install pack sequentially to prevent concurrent access to the unique VDI @@ -1184,25 +1195,32 @@ export default class Xapi extends XapiBase { } // No shared SR available: find an available local SR on each host - return Promise.all(mapToArray(hosts, deferrable(async ($defer, host) => { - // pipe stream synchronously to several PassThroughs to be able to pipe them asynchronously later - const pt = stream.pipe(new PassThrough()) - pt.length = stream.length + return Promise.all( + mapToArray( + hosts, + deferrable(async ($defer, host) => { + // pipe stream synchronously to several PassThroughs to be able to pipe them asynchronously later + const pt = stream.pipe(new PassThrough()) + pt.length = stream.length - const sr = find( - mapToArray(host.$PBDs, '$SR'), - isSrAvailable + const sr = find(mapToArray(host.$PBDs, '$SR'), isSrAvailable) + + if (!sr) { + throw new Error('no SR available to store installation file') + } + + const vdi = await this.createTemporaryVdiOnSr( + pt, + sr, + '[XO] Supplemental pack ISO', + 'small temporary VDI to store a supplemental pack ISO' + ) + $defer(() => this._deleteVdi(vdi)) + + await this._callInstallationPlugin(host.$ref, vdi.uuid) + }) ) - - if (!sr) { - throw new Error('no SR available to store installation file') - } - - const vdi = await this.createTemporaryVdiOnSr(pt, sr, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') - $defer(() => this._deleteVdi(vdi)) - - await this._callInstallationPlugin(host.$ref, vdi.uuid) - }))) + ) } async _importVm (stream, sr, onVmCreation = undefined) { @@ -1216,33 +1234,30 @@ export default class Xapi extends XapiBase { } if (onVmCreation) { - this._waitObject( - obj => obj && obj.current_operations && taskRef in obj.current_operations - ).then(onVmCreation)::ignoreErrors() + ;this._waitObject( + obj => + obj && obj.current_operations && taskRef in obj.current_operations + ) + .then(onVmCreation) + ::ignoreErrors() } - const vmRef = await this.putResource( - stream, - '/import/', - { - host, - query, - task: taskRef, - } - ).then(extractOpaqueRef) + const vmRef = await this.putResource(stream, '/import/', { + host, + query, + task: taskRef, + }).then(extractOpaqueRef) return vmRef } @deferrable - async _importOvaVm ($defer, stream, { - descriptionLabel, - disks, - memory, - nameLabel, - networks, - nCpus, - }, sr) { + async _importOvaVm ( + $defer, + stream, + { descriptionLabel, disks, memory, nameLabel, networks, nCpus }, + sr + ) { // 1. Create VM. const vm = await this._getOrWaitObject( await this._createVmRecord({ @@ -1259,8 +1274,14 @@ export default class Xapi extends XapiBase { $defer.onFailure(() => this._deleteVm(vm)) // Disable start and change the VM name label during import. await Promise.all([ - this.addForbiddenOperationToVm(vm.$id, 'start', 'OVA import in progress...'), - this._setObjectProperties(vm, { name_label: `[Importing...] ${nameLabel}` }), + this.addForbiddenOperationToVm( + vm.$id, + 'start', + 'OVA import in progress...' + ), + this._setObjectProperties(vm, { + name_label: `[Importing...] ${nameLabel}`, + }), ]) // 2. Create VDIs & Vifs. @@ -1268,12 +1289,12 @@ export default class Xapi extends XapiBase { const vifDevices = await this.call('VM.get_allowed_VIF_devices', vm.$ref) await Promise.all( map(disks, async disk => { - const vdi = vdis[disk.path] = await this.createVdi({ + const vdi = (vdis[disk.path] = await this.createVdi({ name_description: disk.descriptionLabel, name_label: disk.nameLabel, size: disk.capacity, sr: sr.$ref, - }) + })) $defer.onFailure(() => this._deleteVdi(vdi)) return this.createVbd({ @@ -1281,11 +1302,13 @@ export default class Xapi extends XapiBase { vdi, vm, }) - }).concat(map(networks, (networkId, i) => ( - this._createVif(vm, this.getObject(networkId), { - device: vifDevices[i], - }) - ))) + }).concat( + map(networks, (networkId, i) => + this._createVif(vm, this.getObject(networkId), { + device: vifDevices[i], + }) + ) + ) ) // 3. Import VDIs contents. @@ -1324,18 +1347,11 @@ export default class Xapi extends XapiBase { } // TODO: an XVA can contain multiple VMs - async importVm (stream, { - data, - srId, - type = 'xva', - } = {}) { + async importVm (stream, { data, srId, type = 'xva' } = {}) { const sr = srId && this.getObject(srId) if (type === 'xva') { - return /* await */ this._getOrWaitObject(await this._importVm( - stream, - sr, - )) + return /* await */ this._getOrWaitObject(await this._importVm(stream, sr)) } if (type === 'ova') { @@ -1345,34 +1361,36 @@ export default class Xapi extends XapiBase { throw new Error(`unsupported type: '${type}'`) } - async migrateVm (vmId, hostXapi, hostId, { - sr, - migrationNetworkId, - mapVifsNetworks, - mapVdisSrs, - } = {}) { + async migrateVm ( + vmId, + hostXapi, + hostId, + { sr, migrationNetworkId, mapVifsNetworks, mapVdisSrs } = {} + ) { const vm = this.getObject(vmId) const host = hostXapi.getObject(hostId) const accrossPools = vm.$pool !== host.$pool - const useStorageMotion = ( + const useStorageMotion = accrossPools || sr !== undefined || migrationNetworkId !== undefined || !isEmpty(mapVifsNetworks) || !isEmpty(mapVdisSrs) - ) if (useStorageMotion) { await this._migrateVmWithStorageMotion(vm, hostXapi, host, { - migrationNetwork: migrationNetworkId && hostXapi.getObject(migrationNetworkId), + migrationNetwork: + migrationNetworkId && hostXapi.getObject(migrationNetworkId), sr, mapVdisSrs, mapVifsNetworks, }) } else { try { - await this.call('VM.pool_migrate', vm.$ref, host.$ref, { force: 'true' }) + await this.call('VM.pool_migrate', vm.$ref, host.$ref, { + force: 'true', + }) } catch (error) { if (error.code !== 'VM_REQUIRES_SR') { throw error @@ -1385,11 +1403,11 @@ export default class Xapi extends XapiBase { } async _snapshotVm (vm, nameLabel = vm.name_label) { - debug(`Snapshotting VM ${vm.name_label}${ - nameLabel !== vm.name_label - ? ` as ${nameLabel}` - : '' - }`) + debug( + `Snapshotting VM ${vm.name_label}${ + nameLabel !== vm.name_label ? ` as ${nameLabel}` : '' + }` + ) let ref try { @@ -1401,10 +1419,8 @@ export default class Xapi extends XapiBase { const { code } = error if ( code !== 'VM_SNAPSHOT_WITH_QUIESCE_NOT_SUPPORTED' && - // quiesce only work on a running VM code !== 'VM_BAD_POWER_STATE' && - // quiesce failed, fallback on standard snapshot // TODO: emit warning code !== 'VM_SNAPSHOT_WITH_QUIESCE_FAILED' @@ -1415,7 +1431,7 @@ export default class Xapi extends XapiBase { } // Convert the template to a VM and wait to have receive the up- // to-date object. - const [ , snapshot ] = await Promise.all([ + const [, snapshot] = await Promise.all([ this.call('VM.set_is_a_template', ref, false), this._waitObjectState(ref, snapshot => !snapshot.is_a_template), ]) @@ -1424,16 +1440,13 @@ export default class Xapi extends XapiBase { } async snapshotVm (vmId, nameLabel = undefined) { - return /* await */ this._snapshotVm( - this.getObject(vmId), - nameLabel - ) + return /* await */ this._snapshotVm(this.getObject(vmId), nameLabel) } async setVcpuWeight (vmId, weight) { weight = weight || null // Take all falsy values as a removal (0 included) const vm = this.getObject(vmId) - await this._updateObjectMapProperty(vm, 'VCPUs_params', {weight}) + await this._updateObjectMapProperty(vm, 'VCPUs_params', { weight }) } async _startVm (vm, force) { @@ -1487,12 +1500,15 @@ export default class Xapi extends XapiBase { } else { // Find the original template by name (*sigh*). const templateNameLabel = vm.other_config['base_template_name'] - const template = templateNameLabel && - find(this.objects.all, obj => ( - obj.$type === 'vm' && - obj.is_a_template && - obj.name_label === templateNameLabel - )) + const template = + templateNameLabel && + find( + this.objects.all, + obj => + obj.$type === 'vm' && + obj.is_a_template && + obj.name_label === templateNameLabel + ) const bootloader = vm.PV_bootloader const bootables = [] @@ -1507,7 +1523,7 @@ export default class Xapi extends XapiBase { }) ) - bootables.push([ vbd, Boolean(vbd.bootable) ]) + bootables.push([vbd, Boolean(vbd.bootable)]) }) promises.push( @@ -1515,7 +1531,8 @@ export default class Xapi extends XapiBase { PV_bootloader: 'eliloader', }), this._updateObjectMapProperty(vm, 'other_config', { - 'install-distro': template && template.other_config['install-distro'], + 'install-distro': + template && template.other_config['install-distro'], 'install-repository': 'cdrom', }) ) @@ -1524,12 +1541,12 @@ export default class Xapi extends XapiBase { await this._startVm(vm) } finally { - this._setObjectProperties(vm, { + ;this._setObjectProperties(vm, { PV_bootloader: bootloader, })::ignoreErrors() - forEach(bootables, ([ vbd, bootable ]) => { - this._setObjectProperties(vbd, { bootable })::ignoreErrors() + forEach(bootables, ([vbd, bootable]) => { + ;this._setObjectProperties(vbd, { bootable })::ignoreErrors() }) } } @@ -1537,11 +1554,20 @@ export default class Xapi extends XapiBase { // vm_operations: http://xapi-project.github.io/xen-api/classes/vm.html async addForbiddenOperationToVm (vmId, operation, reason) { - await this.call('VM.add_to_blocked_operations', this.getObject(vmId).$ref, operation, `[XO] ${reason}`) + await this.call( + 'VM.add_to_blocked_operations', + this.getObject(vmId).$ref, + operation, + `[XO] ${reason}` + ) } async removeForbiddenOperationFromVm (vmId, operation) { - await this.call('VM.remove_from_blocked_operations', this.getObject(vmId).$ref, operation) + await this.call( + 'VM.remove_from_blocked_operations', + this.getObject(vmId).$ref, + operation + ) } // ================================================================= @@ -1560,7 +1586,7 @@ export default class Xapi extends XapiBase { vdi = VDI, empty = vdi === undefined, - mode = (type === 'Disk') ? 'RW' : 'RO', + mode = type === 'Disk' ? 'RW' : 'RO', vm = VM, }) { vdi = this.getObject(vdi) @@ -1570,16 +1596,14 @@ export default class Xapi extends XapiBase { if (userdevice == null) { const allowed = await this.call('VM.get_allowed_VBD_devices', vm.$ref) - const {length} = allowed + const { length } = allowed if (length === 0) { throw new Error('no allowed VBD devices') } if (type === 'CD') { // Choose position 3 if allowed. - userdevice = includes(allowed, '3') - ? '3' - : allowed[0] + userdevice = includes(allowed, '3') ? '3' : allowed[0] } else { userdevice = allowed[0] @@ -1635,19 +1659,21 @@ export default class Xapi extends XapiBase { sr = this.getObject(sr) debug(`Creating VDI ${name_label} on ${sr.name_label}`) - return this._getOrWaitObject(await this.call('VDI.create', { - name_description, - name_label, - other_config, - read_only: Boolean(read_only), - sharable: Boolean(sharable), - sm_config, - SR: sr.$ref, - tags, - type, - virtual_size: size !== undefined ? parseSize(size) : virtual_size, - xenstore_data, - })) + return this._getOrWaitObject( + await this.call('VDI.create', { + name_description, + name_label, + other_config, + read_only: Boolean(read_only), + sharable: Boolean(sharable), + sm_config, + SR: sr.$ref, + tags, + type, + virtual_size: size !== undefined ? parseSize(size) : virtual_size, + xenstore_data, + }) + ) } async moveVdi (vdiId, srId) { @@ -1658,24 +1684,33 @@ export default class Xapi extends XapiBase { return // nothing to do } - debug(`Moving VDI ${vdi.name_label} from ${vdi.$SR.name_label} to ${sr.name_label}`) + debug( + `Moving VDI ${vdi.name_label} from ${vdi.$SR.name_label} to ${ + sr.name_label + }` + ) try { await this.call('VDI.pool_migrate', vdi.$ref, sr.$ref, {}) } catch (error) { const { code } = error - if (code !== 'LICENCE_RESTRICTION' && code !== 'VDI_NEEDS_VM_FOR_MIGRATE') { + if ( + code !== 'LICENCE_RESTRICTION' && + code !== 'VDI_NEEDS_VM_FOR_MIGRATE' + ) { throw error } const newVdi = await this.barrier( await this.call('VDI.copy', vdi.$ref, sr.$ref) ) - await asyncMap(vdi.$VBDs, vbd => Promise.all([ - this.call('VBD.destroy', vbd.$ref), - this.createVbd({ - ...vbd, - vdi: newVdi, - }), - ])) + await asyncMap(vdi.$VBDs, vbd => + Promise.all([ + this.call('VBD.destroy', vbd.$ref), + this.createVbd({ + ...vbd, + vdi: newVdi, + }), + ]) + ) await this._deleteVdi(vdi) } } @@ -1708,10 +1743,7 @@ export default class Xapi extends XapiBase { } } - async _insertCdIntoVm (cd, vm, { - bootable = false, - force = false, - } = {}) { + async _insertCdIntoVm (cd, vm, { bootable = false, force = false } = {}) { const cdDrive = await this._getVmCdDrive(vm) if (cdDrive) { try { @@ -1728,7 +1760,7 @@ export default class Xapi extends XapiBase { } if (bootable !== Boolean(cdDrive.bootable)) { - await this._setObjectProperties(cdDrive, {bootable}) + await this._setObjectProperties(cdDrive, { bootable }) } } else { await this.createVbd({ @@ -1792,11 +1824,7 @@ export default class Xapi extends XapiBase { } async insertCdIntoVm (cdId, vmId, opts = undefined) { - await this._insertCdIntoVm( - this.getObject(cdId), - this.getObject(vmId), - opts - ) + await this._insertCdIntoVm(this.getObject(cdId), this.getObject(vmId), opts) } // ----------------------------------------------------------------- @@ -1827,10 +1855,11 @@ export default class Xapi extends XapiBase { query.base = base.$ref } - debug(`exporting VDI ${vdi.name_label}${base - ? ` (from base ${vdi.name_label})` - : '' - }`) + debug( + `exporting VDI ${vdi.name_label}${ + base ? ` (from base ${vdi.name_label})` : '' + }` + ) return this.getResource($cancelToken, '/export_raw_vdi/', { host, @@ -1840,10 +1869,7 @@ export default class Xapi extends XapiBase { } // Returns a stream to the exported VDI. - exportVdi (vdiId, { - baseId, - format, - } = {}) { + exportVdi (vdiId, { baseId, format } = {}) { return this._exportVdi( this.getObject(vdiId), baseId && this.getObject(baseId), @@ -1873,51 +1899,54 @@ export default class Xapi extends XapiBase { ]) } - importVdiContent (vdiId, body, { - format, - } = {}) { - return this._importVdiContent( - this.getObject(vdiId), - body, - format - ) + importVdiContent (vdiId, body, { format } = {}) { + return this._importVdiContent(this.getObject(vdiId), body, format) } // ================================================================= - async _createVif (vm, network, { - mac = '', - position = undefined, + async _createVif ( + vm, + network, + { + mac = '', + position = undefined, - currently_attached = true, - device = position != null ? String(position) : undefined, - ipv4_allowed = undefined, - ipv6_allowed = undefined, - locking_mode = undefined, - MAC = mac, - other_config = {}, - qos_algorithm_params = {}, - qos_algorithm_type = '', - } = {}) { - debug(`Creating VIF for VM ${vm.name_label} on network ${network.name_label}`) + currently_attached = true, + device = position != null ? String(position) : undefined, + ipv4_allowed = undefined, + ipv6_allowed = undefined, + locking_mode = undefined, + MAC = mac, + other_config = {}, + qos_algorithm_params = {}, + qos_algorithm_type = '', + } = {} + ) { + debug( + `Creating VIF for VM ${vm.name_label} on network ${network.name_label}` + ) if (device == null) { device = (await this.call('VM.get_allowed_VIF_devices', vm.$ref))[0] } - const vifRef = await this.call('VIF.create', filterUndefineds({ - device, - ipv4_allowed, - ipv6_allowed, - locking_mode, - MAC, - MTU: asInteger(network.MTU), - network: network.$ref, - other_config, - qos_algorithm_params, - qos_algorithm_type, - VM: vm.$ref, - })) + const vifRef = await this.call( + 'VIF.create', + filterUndefineds({ + device, + ipv4_allowed, + ipv6_allowed, + locking_mode, + MAC, + MTU: asInteger(network.MTU), + network: network.$ref, + other_config, + qos_algorithm_params, + qos_algorithm_type, + VM: vm.$ref, + }) + ) if (currently_attached && isVmRunning(vm)) { await this.call('VIF.plug', vifRef) @@ -1936,13 +1965,10 @@ export default class Xapi extends XapiBase { ) } @deferrable - async createNetwork ($defer, { - name, - description = 'Created with Xen Orchestra', - pifId, - mtu, - vlan, - }) { + async createNetwork ( + $defer, + { name, description = 'Created with Xen Orchestra', pifId, mtu, vlan } + ) { const networkRef = await this.call('network.create', { name_label: name, name_description: description, @@ -1951,23 +1977,27 @@ export default class Xapi extends XapiBase { }) $defer.onFailure(() => this.call('network.destroy', networkRef)) if (pifId) { - await this.call('pool.create_VLAN_from_PIF', this.getObject(pifId).$ref, networkRef, asInteger(vlan)) + await this.call( + 'pool.create_VLAN_from_PIF', + this.getObject(pifId).$ref, + networkRef, + asInteger(vlan) + ) } return this._getOrWaitObject(networkRef) } - async editPif ( - pifId, - { vlan } - ) { + async editPif (pifId, { vlan }) { const pif = this.getObject(pifId) - const physPif = find(this.objects.all, obj => ( - obj.$type === 'pif' && - (obj.physical || !isEmpty(obj.bond_master_of)) && - obj.$pool === pif.$pool && - obj.device === pif.device - )) + const physPif = find( + this.objects.all, + obj => + obj.$type === 'pif' && + (obj.physical || !isEmpty(obj.bond_master_of)) && + obj.$pool === pif.$pool && + obj.device === pif.device + ) if (!physPif) { throw new Error('PIF not found') @@ -1982,30 +2012,42 @@ export default class Xapi extends XapiBase { const vlans = uniq(mapToArray(pifs, pif => pif.VLAN_master_of)) await Promise.all( - mapToArray(vlans, vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan)) + mapToArray( + vlans, + vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan) + ) ) - const newPifs = await this.call('pool.create_VLAN_from_PIF', physPif.$ref, pif.network, asInteger(vlan)) + const newPifs = await this.call( + 'pool.create_VLAN_from_PIF', + physPif.$ref, + pif.network, + asInteger(vlan) + ) await Promise.all( - mapToArray(newPifs, pifRef => - !wasAttached[this.getObject(pifRef).host] && this.call('PIF.unplug', pifRef)::ignoreErrors() + mapToArray( + newPifs, + pifRef => + !wasAttached[this.getObject(pifRef).host] && + this.call('PIF.unplug', pifRef)::ignoreErrors() ) ) } @deferrable - async createBondedNetwork ($defer, { - bondMode, - mac = '', - pifIds, - ...params - }) { + async createBondedNetwork ($defer, { bondMode, mac = '', pifIds, ...params }) { const network = await this.createNetwork(params) $defer.onFailure(() => this.deleteNetwork(network)) // TODO: test and confirm: // Bond.create is called here with PIFs from one host but XAPI should then replicate the // bond on each host in the same pool with the corresponding PIFs (ie same interface names?). - await this.call('Bond.create', network.$ref, map(pifIds, pifId => this.getObject(pifId).$ref), mac, bondMode) + await this.call( + 'Bond.create', + network.$ref, + map(pifIds, pifId => this.getObject(pifId).$ref), + mac, + bondMode + ) return network } @@ -2016,7 +2058,10 @@ export default class Xapi extends XapiBase { const vlans = uniq(mapToArray(pifs, pif => pif.VLAN_master_of)) await Promise.all( - mapToArray(vlans, vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan)) + mapToArray( + vlans, + vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan) + ) ) const bonds = uniq(flatten(mapToArray(pifs, pif => pif.bond_master_of))) @@ -2033,10 +2078,16 @@ export default class Xapi extends XapiBase { const vm = this.getObject(vmId) const host = vm.$resident_on || this.pool.$master - return /* await */ this.call('host.call_plugin', host.$ref, 'xscontainer', action, { - vmuuid: vm.uuid, - container: containerId, - }) + return /* await */ this.call( + 'host.call_plugin', + host.$ref, + 'xscontainer', + action, + { + vmuuid: vm.uuid, + container: containerId, + } + ) } async registerDockerContainer (vmId) { @@ -2071,9 +2122,15 @@ export default class Xapi extends XapiBase { const template = this.getObject(templateId) const host = this.pool.$master - const config = await this.call('host.call_plugin', host.$ref, 'xscontainer', 'get_config_drive_default', { - templateuuid: template.uuid, - }) + const config = await this.call( + 'host.call_plugin', + host.$ref, + 'xscontainer', + 'get_config_drive_default', + { + templateuuid: template.uuid, + } + ) return config.slice(4) // FIXME remove the "True" string on the begining } @@ -2083,11 +2140,17 @@ export default class Xapi extends XapiBase { const host = this.pool.$master const sr = this.getObject(srId) - await this.call('host.call_plugin', host.$ref, 'xscontainer', 'create_config_drive', { - vmuuid: vm.uuid, - sruuid: sr.uuid, - configuration: config, - }) + await this.call( + 'host.call_plugin', + host.$ref, + 'xscontainer', + 'create_config_drive', + { + vmuuid: vm.uuid, + sruuid: sr.uuid, + configuration: config, + } + ) await this.registerDockerContainer(vmId) } @@ -2121,13 +2184,21 @@ export default class Xapi extends XapiBase { // ignore errors, I (JFT) don't understand why they are emitted // because it works - await this._importVdiContent(vdi, buffer, VDI_FORMAT_RAW).catch(console.warn) + await this._importVdiContent(vdi, buffer, VDI_FORMAT_RAW).catch( + console.warn + ) await this.createVbd({ vdi, vm }) } @deferrable - async createTemporaryVdiOnSr ($defer, stream, sr, name_label, name_description) { + async createTemporaryVdiOnSr ( + $defer, + stream, + sr, + name_label, + name_description + ) { const vdi = await this.createVdi({ name_description, name_label, @@ -2143,22 +2214,27 @@ export default class Xapi extends XapiBase { // Create VDI on an adequate local SR async createTemporaryVdiOnHost (stream, hostId, name_label, name_description) { - const pbd = find( - this.getObject(hostId).$PBDs, - pbd => canSrHaveNewVdiOfSize(pbd.$SR, stream.length) + const pbd = find(this.getObject(hostId).$PBDs, pbd => + canSrHaveNewVdiOfSize(pbd.$SR, stream.length) ) if (pbd == null) { throw new Error('no SR available') } - return this.createTemporaryVdiOnSr(stream, pbd.SR, name_label, name_description) + return this.createTemporaryVdiOnSr( + stream, + pbd.SR, + name_label, + name_description + ) } findAvailableSharedSr (minSize) { return find( this.objects.all, - obj => obj.$type === 'sr' && obj.shared && canSrHaveNewVdiOfSize(obj, minSize) + obj => + obj.$type === 'sr' && obj.shared && canSrHaveNewVdiOfSize(obj, minSize) ) } diff --git a/packages/xo-server/src/xapi/mixins/gpu.js b/packages/xo-server/src/xapi/mixins/gpu.js index 7df8b9bef..860551168 100644 --- a/packages/xo-server/src/xapi/mixins/gpu.js +++ b/packages/xo-server/src/xapi/mixins/gpu.js @@ -1,7 +1,14 @@ export default { createVgpu (vm, gpuGroup, vgpuType) { // TODO: properly handle device. Can a VM have 2 vGPUS? - return this.call('VGPU.create', this.getObject(vm).$ref, this.getObject(gpuGroup).$ref, '0', {}, this.getObject(vgpuType).$ref) + return this.call( + 'VGPU.create', + this.getObject(vm).$ref, + this.getObject(gpuGroup).$ref, + '0', + {}, + this.getObject(vgpuType).$ref + ) }, deleteVgpu (vgpu) { return this.call('VGPU.destroy', this.getObject(vgpu).$ref) diff --git a/packages/xo-server/src/xapi/mixins/networking.js b/packages/xo-server/src/xapi/mixins/networking.js index 2e9952124..a9ccd3da0 100644 --- a/packages/xo-server/src/xapi/mixins/networking.js +++ b/packages/xo-server/src/xapi/mixins/networking.js @@ -31,9 +31,10 @@ export default { set: [ 'ipv4Allowed', function (value, vif) { - const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed) - ? 'network_default' - : 'locked' + const lockingMode = + isEmpty(value) && isEmpty(vif.ipv6_allowed) + ? 'network_default' + : 'locked' if (lockingMode !== vif.locking_mode) { return this._set('locking_mode', lockingMode) @@ -46,9 +47,10 @@ export default { set: [ 'ipv6Allowed', function (value, vif) { - const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed) - ? 'network_default' - : 'locked' + const lockingMode = + isEmpty(value) && isEmpty(vif.ipv4_allowed) + ? 'network_default' + : 'locked' if (lockingMode !== vif.locking_mode) { return this._set('locking_mode', lockingMode) diff --git a/packages/xo-server/src/xapi/mixins/patching.js b/packages/xo-server/src/xapi/mixins/patching.js index a5f954e8c..0b433b116 100644 --- a/packages/xo-server/src/xapi/mixins/patching.js +++ b/packages/xo-server/src/xapi/mixins/patching.js @@ -19,11 +19,7 @@ import { parseXml, } from '../../utils' -import { - debug, - extractOpaqueRef, - useUpdateSystem, -} from '../utils' +import { debug, extractOpaqueRef, useUpdateSystem } from '../utils' export default { // FIXME: should be static @@ -71,7 +67,7 @@ export default { const resolveVersionPatches = function (uuids) { const versionPatches = createRawObject() - forEach(ensureArray(uuids), ({uuid}) => { + forEach(ensureArray(uuids), ({ uuid }) => { versionPatches[uuid] = patches[uuid] }) @@ -112,9 +108,7 @@ export default { versions[hostVersions.product_version] || versions[hostVersions.product_version_text] - return version - ? version.patches - : [] + return version ? version.patches : [] }, _getInstalledPoolPatchesOnHost (host) { @@ -163,9 +157,8 @@ export default { }, async _ejectToolsIsos (hostRef) { - return Promise.all(mapFilter( - this.objects.all, - vm => { + return Promise.all( + mapFilter(this.objects.all, vm => { if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) { return } @@ -179,8 +172,8 @@ export default { if (shouldEjectCd) { return this.ejectCdFromVm(vm.$id) } - } - )) + }) + ) }, // ----------------------------------------------------------------- @@ -208,7 +201,9 @@ export default { _isPoolPatchInstallableOnPool (patchUuid) { return every( this.objects.all, - obj => obj.$type !== 'host' || this._isPoolPatchInstallableOnHost(patchUuid, obj) + obj => + obj.$type !== 'host' || + this._isPoolPatchInstallableOnHost(patchUuid, obj) ) }, @@ -216,13 +211,9 @@ export default { // platform_version < 2.1.1 ---------------------------------------- async uploadPoolPatch (stream, patchName) { - const patchRef = await this.putResource( - stream, - '/pool_patch_upload', - { - task: this.createTask('Patch upload', patchName), - } - ).then(extractOpaqueRef) + const patchRef = await this.putResource(stream, '/pool_patch_upload', { + task: this.createTask('Patch upload', patchName), + }).then(extractOpaqueRef) return this._getOrWaitObject(patchRef) }, @@ -242,14 +233,17 @@ export default { let stream = await this.xo.httpRequest(patchInfo.url) stream = await new Promise((resolve, reject) => { const PATCH_RE = /\.xsupdate$/ - stream.pipe(unzip.Parse()).on('entry', entry => { - if (PATCH_RE.test(entry.path)) { - entry.length = entry.size - resolve(entry) - } else { - entry.autodrain() - } - }).on('error', reject) + stream + .pipe(unzip.Parse()) + .on('entry', entry => { + if (PATCH_RE.test(entry.path)) { + entry.length = entry.size + resolve(entry) + } else { + entry.autodrain() + } + }) + .on('error', reject) }) return this.uploadPoolPatch(stream, patchInfo.name) @@ -266,10 +260,13 @@ export default { let stream = await this.xo.httpRequest(patchInfo.url) stream = await new Promise((resolve, reject) => { - stream.pipe(unzip.Parse()).on('entry', entry => { - entry.length = entry.size - resolve(entry) - }).on('error', reject) + stream + .pipe(unzip.Parse()) + .on('entry', entry => { + entry.length = entry.size + resolve(entry) + }) + .on('error', reject) }) let vdi @@ -282,9 +279,19 @@ export default { return } - vdi = await this.createTemporaryVdiOnSr(stream, sr, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO') + vdi = await this.createTemporaryVdiOnSr( + stream, + sr, + '[XO] Patch ISO', + 'small temporary VDI to store a patch ISO' + ) } else { - vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO') + vdi = await this.createTemporaryVdiOnHost( + stream, + hostId, + '[XO] Patch ISO', + 'small temporary VDI to store a patch ISO' + ) } $defer(() => this._deleteVdi(vdi)) @@ -295,14 +302,21 @@ export default { // patform_version < 2.1.1 ----------------------------------------- async _installPoolPatchOnHost (patchUuid, host) { - const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ]) + const [patch] = await Promise.all([ + this._getOrUploadPoolPatch(patchUuid), + this._ejectToolsIsos(host.$ref), + ]) await this.call('pool_patch.apply', patch.$ref, host.$ref) }, // patform_version >= 2.1.1 - _installPatchUpdateOnHost: deferrable(async function ($defer, patchUuid, host) { - const [ vdi ] = await Promise.all([ + _installPatchUpdateOnHost: deferrable(async function ( + $defer, + patchUuid, + host + ) { + const [vdi] = await Promise.all([ this._getUpdateVdi($defer, patchUuid, host.$id), this._ejectToolsIsos(host.$ref), ]) @@ -333,7 +347,7 @@ export default { // platform_version < 2.1.1 async _installPoolPatchOnAllHosts (patchUuid) { - const [ patch ] = await Promise.all([ + const [patch] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(), ]) @@ -343,7 +357,7 @@ export default { // platform_version >= 2.1.1 _installPatchUpdateOnAllHosts: deferrable(async function ($defer, patchUuid) { - let [ vdi ] = await Promise.all([ + let [vdi] = await Promise.all([ this._getUpdateVdi($defer, patchUuid), this._ejectToolsIsos(), ]) @@ -369,9 +383,10 @@ export default { // If no host is provided, install on pool async _installPoolPatchAndRequirements (patch, patchesByUuid, host) { - if (host == null - ? !this._isPoolPatchInstallableOnPool(patch.uuid) - : !this._isPoolPatchInstallableOnHost(patch.uuid, host) + if ( + host == null + ? !this._isPoolPatchInstallableOnPool(patch.uuid) + : !this._isPoolPatchInstallableOnHost(patch.uuid, host) ) { return } @@ -383,7 +398,11 @@ export default { const requirement = patchesByUuid[requirementUuid] if (requirement != null) { - await this._installPoolPatchAndRequirements(requirement, patchesByUuid, host) + await this._installPoolPatchAndRequirements( + requirement, + patchesByUuid, + host + ) host = host && this.getObject(host.$id) } } @@ -408,21 +427,29 @@ export default { } }) } - addPatchesToList(mapToArray(patchNames, name => - find(missingPatches, { name }) - )) + addPatchesToList( + mapToArray(patchNames, name => find(missingPatches, { name })) + ) for (let i = 0, n = patchesToInstall.length; i < n; i++) { - await this._installPoolPatchAndRequirements(patchesToInstall[i], missingPatches, host) + await this._installPoolPatchAndRequirements( + patchesToInstall[i], + missingPatches, + host + ) } }, async installAllPoolPatchesOnHost (hostId) { let host = this.getObject(hostId) - const installableByUuid = host.license_params.sku_type !== 'free' - ? await this._listMissingPoolPatchesOnHost(host) - : filter(await this._listMissingPoolPatchesOnHost(host), { paid: false, upgrade: false }) + const installableByUuid = + host.license_params.sku_type !== 'free' + ? await this._listMissingPoolPatchesOnHost(host) + : filter(await this._listMissingPoolPatchesOnHost(host), { + paid: false, + upgrade: false, + }) // List of all installable patches sorted from the newest to the // oldest. @@ -435,8 +462,15 @@ export default { const patch = installable[i] if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) { - await this._installPoolPatchAndRequirements(patch, installableByUuid, host).catch(error => { - if (error.code !== 'PATCH_ALREADY_APPLIED' && error.code !== 'UPDATE_ALREADY_APPLIED') { + await this._installPoolPatchAndRequirements( + patch, + installableByUuid, + host + ).catch(error => { + if ( + error.code !== 'PATCH_ALREADY_APPLIED' && + error.code !== 'UPDATE_ALREADY_APPLIED' + ) { throw error } }) @@ -448,14 +482,18 @@ export default { async installAllPoolPatchesOnAllHosts () { const installableByUuid = assign( {}, - ...await Promise.all(mapFilter(this.objects.all, host => { - if (host.$type === 'host') { - return this._listMissingPoolPatchesOnHost(host).then(patches => host.license_params.sku_type !== 'free' - ? patches - : filter(patches, { paid: false, upgrade: false }) - ) - } - })) + ...(await Promise.all( + mapFilter(this.objects.all, host => { + if (host.$type === 'host') { + return this._listMissingPoolPatchesOnHost(host).then( + patches => + host.license_params.sku_type !== 'free' + ? patches + : filter(patches, { paid: false, upgrade: false }) + ) + } + }) + )) ) // List of all installable patches sorted from the newest to the @@ -468,8 +506,14 @@ export default { for (let i = 0, n = installable.length; i < n; ++i) { const patch = installable[i] - await this._installPoolPatchAndRequirements(patch, installableByUuid).catch(error => { - if (error.code !== 'PATCH_ALREADY_APPLIED' && error.code !== 'UPDATE_ALREADY_APPLIED_IN_POOL') { + await this._installPoolPatchAndRequirements( + patch, + installableByUuid + ).catch(error => { + if ( + error.code !== 'PATCH_ALREADY_APPLIED' && + error.code !== 'UPDATE_ALREADY_APPLIED_IN_POOL' + ) { throw error } }) diff --git a/packages/xo-server/src/xapi/mixins/storage.js b/packages/xo-server/src/xapi/mixins/storage.js index bfd003b33..0b258e27f 100644 --- a/packages/xo-server/src/xapi/mixins/storage.js +++ b/packages/xo-server/src/xapi/mixins/storage.js @@ -1,18 +1,10 @@ -import { - forEach, - groupBy, -} from 'lodash' +import { forEach, groupBy } from 'lodash' -import { - createRawObject, - mapToArray, -} from '../../utils' +import { createRawObject, mapToArray } from '../../utils' export default { _connectAllSrPbds (sr) { - return Promise.all( - mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd)) - ) + return Promise.all(mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))) }, async connectAllSrPbds (id) { @@ -20,9 +12,7 @@ export default { }, _disconnectAllSrPbds (sr) { - return Promise.all( - mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd)) - ) + return Promise.all(mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))) }, async disconnectAllSrPbds (id) { @@ -61,9 +51,7 @@ export default { let length = cache[uuid] if (length === undefined) { const children = childrenMap[uuid] - length = children !== undefined && children.length === 1 - ? 1 - : 0 + length = children !== undefined && children.length === 1 ? 1 : 0 try { const parent = this.getObjectByUuid(uuid).sm_config['vhd-parent'] if (parent !== undefined) { diff --git a/packages/xo-server/src/xapi/mixins/vm.js b/packages/xo-server/src/xapi/mixins/vm.js index bd628e4aa..fe5298353 100644 --- a/packages/xo-server/src/xapi/mixins/vm.js +++ b/packages/xo-server/src/xapi/mixins/vm.js @@ -1,25 +1,10 @@ import deferrable from 'golike-defer' import { catchPlus as pCatch, ignoreErrors } from 'promise-toolbox' -import { - find, - gte, - includes, - isEmpty, - lte, -} from 'lodash' +import { find, gte, includes, isEmpty, lte } from 'lodash' -import { - forEach, - mapToArray, - parseSize, -} from '../../utils' +import { forEach, mapToArray, parseSize } from '../../utils' -import { - isVmHvm, - isVmRunning, - makeEditObject, - NULL_REF, -} from '../utils' +import { isVmHvm, isVmRunning, makeEditObject, NULL_REF } from '../utils' // According to: https://xenserver.org/blog/entry/vga-over-cirrus-in-xenserver-6-2.html. const XEN_VGA_VALUES = ['std', 'cirrus'] @@ -28,24 +13,29 @@ const XEN_VIDEORAM_VALUES = [1, 2, 4, 8, 16] export default { // TODO: clean up on error. @deferrable - async createVm ($defer, templateId, { - name_label, // eslint-disable-line camelcase - nameLabel = name_label, // eslint-disable-line camelcase + async createVm ( + $defer, + templateId, + { + name_label, // eslint-disable-line camelcase + nameLabel = name_label, // eslint-disable-line camelcase - clone = true, - installRepository = undefined, - vdis = undefined, - vifs = undefined, - existingVdis = undefined, + clone = true, + installRepository = undefined, + vdis = undefined, + vifs = undefined, + existingVdis = undefined, - coreOs = false, - cloudConfig = undefined, + coreOs = false, + cloudConfig = undefined, - vgpuType = undefined, - gpuGroup = undefined, + vgpuType = undefined, + gpuGroup = undefined, - ...props - } = {}, checkLimits) { + ...props + } = {}, + checkLimits + ) { const installMethod = (() => { if (installRepository == null) { return 'none' @@ -61,14 +51,21 @@ export default { const template = this.getObject(templateId) // Clones the template. - const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel) + const vmRef = await this[clone ? '_cloneVm' : '_copyVm']( + template, + nameLabel + ) $defer.onFailure(() => this.deleteVm(vmRef)) // TODO: copy BIOS strings? // Removes disks from the provision XML, we will create them by // ourselves. - await this.call('VM.remove_from_other_config', vmRef, 'disks')::ignoreErrors() + await this.call( + 'VM.remove_from_other_config', + vmRef, + 'disks' + )::ignoreErrors() // Creates the VDIs and executes the initial steps of the // installation. @@ -97,7 +94,8 @@ export default { HVM_boot_params: { ...bootParams, order }, }) } - } else { // PV + } else { + // PV if (vm.PV_bootloader === 'eliloader') { if (installMethod === 'network') { // TODO: normalize RHEL URL? @@ -127,47 +125,54 @@ export default { } // Modify existing (previous template) disks if necessary - existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => { - const vbd = find(vm.$VBDs, { userdevice }) - if (!vbd) { - return - } - const vdi = vbd.$VDI - await this._setObjectProperties(vdi, properties) + existingVdis && + (await Promise.all( + mapToArray( + existingVdis, + async ({ size, $SR: srId, ...properties }, userdevice) => { + const vbd = find(vm.$VBDs, { userdevice }) + if (!vbd) { + return + } + const vdi = vbd.$VDI + await this._setObjectProperties(vdi, properties) - // if the disk is bigger - if ( - size != null && - size > vdi.virtual_size - ) { - await this.resizeVdi(vdi.$id, size) - } - // if another SR is set, move it there - if (srId) { - await this.moveVdi(vdi.$id, srId) - } - })) + // if the disk is bigger + if (size != null && size > vdi.virtual_size) { + await this.resizeVdi(vdi.$id, size) + } + // if another SR is set, move it there + if (srId) { + await this.moveVdi(vdi.$id, srId) + } + } + ) + )) // Creates the user defined VDIs. // // TODO: set vm.suspend_SR if (!isEmpty(vdis)) { const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref) - await Promise.all(mapToArray(vdis, (vdiDescription, i) => this.createVdi({ - name_description: vdiDescription.name_description, - name_label: vdiDescription.name_label, - size: vdiDescription.size, - sr: vdiDescription.sr || vdiDescription.SR, - }) - .then(vdi => this.createVbd({ - // Either the CD or the 1st disk is bootable (only useful for PV VMs) - bootable: !(hasBootableDisk || i), + await Promise.all( + mapToArray(vdis, (vdiDescription, i) => + this.createVdi({ + name_description: vdiDescription.name_description, + name_label: vdiDescription.name_label, + size: vdiDescription.size, + sr: vdiDescription.sr || vdiDescription.SR, + }).then(vdi => + this.createVbd({ + // Either the CD or the 1st disk is bootable (only useful for PV VMs) + bootable: !(hasBootableDisk || i), - userdevice: devices[i], - vdi, - vm, - })) - )) + userdevice: devices[i], + vdi, + vm, + }) + ) + ) + ) } // Destroys the VIFs cloned from the template. @@ -176,18 +181,21 @@ export default { // Creates the VIFs specified by the user. if (vifs) { const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref) - await Promise.all(mapToArray(vifs, (vif, index) => this._createVif( - vm, - this.getObject(vif.network), - { - ipv4_allowed: vif.ipv4_allowed, - ipv6_allowed: vif.ipv6_allowed, - device: devices[index], - locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked', - mac: vif.mac, - mtu: vif.mtu, - } - ))) + await Promise.all( + mapToArray(vifs, (vif, index) => + this._createVif(vm, this.getObject(vif.network), { + ipv4_allowed: vif.ipv4_allowed, + ipv6_allowed: vif.ipv6_allowed, + device: devices[index], + locking_mode: + isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) + ? 'network_default' + : 'locked', + mac: vif.mac, + mtu: vif.mtu, + }) + ) + ) } if (vgpuType !== undefined && gpuGroup !== undefined) { @@ -203,10 +211,7 @@ export default { let srRef forEach(vm.$VBDs, vbd => { let vdi - if ( - vbd.type === 'Disk' && - (vdi = vbd.$VDI) - ) { + if (vbd.type === 'Disk' && (vdi = vbd.$VDI)) { srRef = vdi.SR return false } @@ -243,16 +248,19 @@ export default { this._updateObjectMapProperty(vm, 'other_config', { autoPoweron: value ? 'true' : null, }), - value && this.setPoolProperties({ - autoPoweron: true, - }), + value && + this.setPoolProperties({ + autoPoweron: true, + }), ]) }, }, coresPerSocket: { set (coresPerSocket, vm) { - return this._updateObjectMapProperty(vm, 'platform', {'cores-per-socket': coresPerSocket}) + return this._updateObjectMapProperty(vm, 'platform', { + 'cores-per-socket': coresPerSocket, + }) }, }, @@ -302,7 +310,11 @@ export default { highAvailability: { set (ha, vm) { - return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '') + return this.call( + 'VM.set_ha_restart_priority', + vm.$ref, + ha ? 'restart' : '' + ) }, }, @@ -375,7 +387,9 @@ export default { vga: { set (vga, vm) { if (!includes(XEN_VGA_VALUES, vga)) { - throw new Error(`The different values that the VGA can take are: ${XEN_VGA_VALUES}`) + throw new Error( + `The different values that the VGA can take are: ${XEN_VGA_VALUES}` + ) } return this._updateObjectMapProperty(vm, 'platform', { vga }) }, @@ -384,7 +398,9 @@ export default { videoram: { set (videoram, vm) { if (!includes(XEN_VIDEORAM_VALUES, videoram)) { - throw new Error(`The different values that the video RAM can take are: ${XEN_VIDEORAM_VALUES}`) + throw new Error( + `The different values that the video RAM can take are: ${XEN_VIDEORAM_VALUES}` + ) } return this._updateObjectMapProperty(vm, 'platform', { videoram }) }, @@ -404,9 +420,9 @@ export default { if (snapshot.snapshot_info['power-state-at-snapshot'] === 'Running') { const vm = snapshot.$snapshot_of if (vm.power_state === 'Halted') { - this.startVm(vm.$id)::ignoreErrors() + ;this.startVm(vm.$id)::ignoreErrors() } else if (vm.power_state === 'Suspended') { - this.resumeVm(vm.$id)::ignoreErrors() + ;this.resumeVm(vm.$id)::ignoreErrors() } } }, diff --git a/packages/xo-server/src/xapi/other-config-template.js b/packages/xo-server/src/xapi/other-config-template.js index 4aa74fa74..a53d019a2 100644 --- a/packages/xo-server/src/xapi/other-config-template.js +++ b/packages/xo-server/src/xapi/other-config-template.js @@ -41,7 +41,8 @@ const OTHER_CONFIG_TEMPLATE = { PV_kernel: '', PV_legacy_args: '', PV_ramdisk: '', - recommendations: '', + recommendations: + '', shutdown_delay: 0, start_delay: 0, user_version: 1, diff --git a/packages/xo-server/src/xapi/utils.js b/packages/xo-server/src/xapi/utils.js index 435343081..9513d1a91 100644 --- a/packages/xo-server/src/xapi/utils.js +++ b/packages/xo-server/src/xapi/utils.js @@ -37,9 +37,8 @@ export const asInteger = value => String(value) export const filterUndefineds = obj => pickBy(obj, value => value !== undefined) -export const optional = (value, fn) => value == null - ? undefined - : fn ? fn(value) : value +export const optional = (value, fn) => + value == null ? undefined : fn ? fn(value) : value export const prepareXapiParam = param => { // if (isFinite(param) && !isInteger(param)) { @@ -79,33 +78,36 @@ export const extractOpaqueRef = str => { // ------------------------------------------------------------------- const TYPE_TO_NAMESPACE = createRawObject() -forEach([ - 'Bond', - 'DR_task', - 'GPU_group', - 'PBD', - 'PCI', - 'PGPU', - 'PIF', - 'PIF_metrics', - 'SM', - 'SR', - 'VBD', - 'VBD_metrics', - 'VDI', - 'VGPU', - 'VGPU_type', - 'VIF', - 'VLAN', - 'VM', - 'VM_appliance', - 'VM_guest_metrics', - 'VM_metrics', - 'VMPP', - 'VTPM', -], namespace => { - TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace -}) +forEach( + [ + 'Bond', + 'DR_task', + 'GPU_group', + 'PBD', + 'PCI', + 'PGPU', + 'PIF', + 'PIF_metrics', + 'SM', + 'SR', + 'VBD', + 'VBD_metrics', + 'VDI', + 'VGPU', + 'VGPU_type', + 'VIF', + 'VLAN', + 'VM', + 'VM_appliance', + 'VM_guest_metrics', + 'VM_metrics', + 'VMPP', + 'VTPM', + ], + namespace => { + TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace + } +) // Object types given by `xen-api` are always lowercase but the // namespaces in the Xen API can have a different casing. @@ -120,7 +122,6 @@ export const getVmDisks = vm => { if ( // Do not remove CDs and Floppies. vbd.type === 'Disk' && - // Ignore VBD without VDI. (vdi = vbd.$VDI) ) { @@ -273,8 +274,8 @@ export const makeEditObject = specs => { } let tmp - specs[tmp = camelCase(name)] || (specs[tmp] = spec) - specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec) + specs[(tmp = camelCase(name))] || (specs[tmp] = spec) + specs[(tmp = camelToSnakeCase(name))] || (specs[tmp] = spec) }) return async function _editObject_ (id, values, checkLimits) { @@ -287,7 +288,8 @@ export const makeEditObject = specs => { // Context used to execute functions. const context = { __proto__: this, - _set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value)), + _set: (prop, value) => + this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value)), } const set = (value, name) => { @@ -335,7 +337,10 @@ export const makeEditObject = specs => { const constraintNewValue = values[constraintName] if (!constraint(constraintCurrentValue, value)) { - const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName) + const cb = set( + constraintNewValue == null ? value : constraintNewValue, + constraintName + ) if (cb) { cbs.push(cb) } diff --git a/packages/xo-server/src/xo-mixins/acls.js b/packages/xo-server/src/xo-mixins/acls.js index 502261380..859ca54ae 100644 --- a/packages/xo-server/src/xo-mixins/acls.js +++ b/packages/xo-server/src/xo-mixins/acls.js @@ -1,15 +1,9 @@ import checkAuthorization from 'xo-acl-resolver' import { forEach, includes, map } from 'lodash' -import { - ModelAlreadyExists, -} from '../collection' -import { - Acls, -} from '../models/acl' -import { - createRawObject, -} from '../utils' +import { ModelAlreadyExists } from '../collection' +import { Acls } from '../models/acl' +import { createRawObject } from '../utils' // =================================================================== @@ -17,17 +11,18 @@ export default class { constructor (xo) { this._xo = xo - const aclsDb = this._acls = new Acls({ + const aclsDb = (this._acls = new Acls({ connection: xo._redis, prefix: 'xo:acl', indexes: ['subject', 'object'], - }) + })) xo.on('start', () => { - xo.addConfigManager('acls', + xo.addConfigManager( + 'acls', () => aclsDb.get(), acls => aclsDb.update(acls), - [ 'groups', 'users' ] + ['groups', 'users'] ) }) @@ -48,19 +43,16 @@ export default class { const user = await this._xo.getUser(userId) const { groups } = user - const subjects = groups - ? groups.concat(userId) - : [ userId ] + const subjects = groups ? groups.concat(userId) : [userId] const acls = [] const pushAcls = (push => entries => { push.apply(acls, entries) })(acls.push) - await Promise.all(map( - subjects, - subject => this.getAclsForSubject(subject).then(pushAcls) - )) + await Promise.all( + map(subjects, subject => this.getAclsForSubject(subject).then(pushAcls)) + ) return acls } @@ -89,20 +81,15 @@ export default class { } async getPermissionsForUser (userId) { - const [ - acls, - permissionsByRole, - ] = await Promise.all([ + const [acls, permissionsByRole] = await Promise.all([ this._getAclsForUser(userId), this._getPermissionsByRole(), ]) const permissions = createRawObject() for (const { action, object: objectId } of acls) { - const current = ( - permissions[objectId] || - (permissions[objectId] = createRawObject()) - ) + const current = + permissions[objectId] || (permissions[objectId] = createRawObject()) const permissionsForRole = permissionsByRole[action] if (permissionsForRole) { @@ -154,26 +141,17 @@ export default class { { id: 'viewer', name: 'Viewer', - permissions: [ - 'view', - ], + permissions: ['view'], }, { id: 'operator', name: 'Operator', - permissions: [ - 'view', - 'operate', - ], + permissions: ['view', 'operate'], }, { id: 'admin', name: 'Admin', - permissions: [ - 'view', - 'operate', - 'administrate', - ], + permissions: ['view', 'operate', 'administrate'], }, ] } diff --git a/packages/xo-server/src/xo-mixins/api.js b/packages/xo-server/src/xo-mixins/api.js index 52afb013e..10a5540c1 100644 --- a/packages/xo-server/src/xo-mixins/api.js +++ b/packages/xo-server/src/xo-mixins/api.js @@ -2,23 +2,11 @@ import createDebug from 'debug' import kindOf from 'kindof' import ms from 'ms' import schemaInspector from 'schema-inspector' -import { - forEach, - isArray, - isFunction, - map, - mapValues, -} from 'lodash' +import { forEach, isArray, isFunction, map, mapValues } from 'lodash' import * as methods from '../api' -import { - MethodNotFound, -} from 'json-rpc-peer' -import { - createRawObject, - noop, - serializeError, -} from '../utils' +import { MethodNotFound } from 'json-rpc-peer' +import { createRawObject, noop, serializeError } from '../utils' import * as errors from 'xo-common/api-errors' @@ -38,24 +26,30 @@ const PERMISSIONS = { // - error when halted VM migration failure is due to XS < 7 const XAPI_ERROR_TO_XO_ERROR = { EHOSTUNREACH: errors.serverUnreachable, - HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }), + HOST_OFFLINE: ([host], getId) => errors.hostOffline({ host: getId(host) }), NO_HOSTS_AVAILABLE: errors.noHostsAvailable, NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade, - OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }), - PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }), - PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }), + OPERATION_BLOCKED: ([ref, code], getId) => + errors.operationBlocked({ objectId: getId(ref), code }), + PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([patch]) => + errors.patchPrecheck({ errorType: 'isoMounted', patch }), + PIF_VLAN_EXISTS: ([pif], getId) => + errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }), SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed, - VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }), - VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }), + VDI_IN_USE: ([vdi, operation], getId) => + errors.vdiInUse({ vdi: getId(vdi), operation }), + VM_BAD_POWER_STATE: ([vm, expected, actual], getId) => + errors.vmBadPowerState({ vm: getId(vm), expected, actual }), VM_IS_TEMPLATE: errors.vmIsTemplate, - VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }), - VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }), - VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) }), + VM_LACKS_FEATURE: ([vm], getId) => errors.vmLacksFeature({ vm: getId(vm) }), + VM_LACKS_FEATURE_SHUTDOWN: ([vm], getId) => + errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }), + VM_MISSING_PV_DRIVERS: ([vm], getId) => + errors.vmMissingPvDrivers({ vm: getId(vm) }), } -const hasPermission = (user, permission) => ( +const hasPermission = (user, permission) => PERMISSIONS[user.permission] >= PERMISSIONS[permission] -) function checkParams (method, params) { const schema = method.params @@ -63,10 +57,13 @@ function checkParams (method, params) { return } - const result = schemaInspector.validate({ - type: 'object', - properties: schema, - }, params) + const result = schemaInspector.validate( + { + type: 'object', + properties: schema, + }, + params + ) if (!result.valid) { throw errors.invalidParameters(result.error) @@ -76,14 +73,14 @@ function checkParams (method, params) { function checkPermission (method) { /* jshint validthis: true */ - const {permission} = method + const { permission } = method // No requirement. if (permission === undefined) { return } - const {user} = this + const { user } = this if (!user) { throw errors.unauthorized() } @@ -104,7 +101,7 @@ function resolveParams (method, params) { return params } - const {user} = this + const { user } = this if (!user) { throw errors.unauthorized() } @@ -133,7 +130,7 @@ function resolveParams (method, params) { // value (except null or undefined which trigger the default // value) to simply do a resolve without checking any permissions. if (permission) { - permissions.push([ object.id, permission ]) + permissions.push([object.id, permission]) } }) @@ -242,7 +239,8 @@ export default class Api { // FIXME: it can cause issues if there any property assignments in // XO methods called from the API. const context = Object.create(this._xo, { - api: { // Used by system.*(). + api: { + // Used by system.*(). value: this, }, session: { @@ -252,10 +250,8 @@ export default class Api { // Fetch and inject the current user. const userId = session.get('user_id', undefined) - context.user = userId && await this._xo.getUser(userId) - const userName = context.user - ? context.user.email - : '(unknown user)' + context.user = userId && (await this._xo.getUser(userId)) + const userName = context.user ? context.user.email : '(unknown user)' try { await checkPermission.call(context, method) @@ -305,7 +301,9 @@ export default class Api { duration: Date.now() - startTime, error: serializeError(error), } - const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}` + const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms( + Date.now() - startTime + )}] =!> ${error}` this._logger.error(message, data) diff --git a/packages/xo-server/src/xo-mixins/authentication.js b/packages/xo-server/src/xo-mixins/authentication.js index ea276fe0f..9fb206caf 100644 --- a/packages/xo-server/src/xo-mixins/authentication.js +++ b/packages/xo-server/src/xo-mixins/authentication.js @@ -3,16 +3,11 @@ import { noSuchObject } from 'xo-common/api-errors' import { ignoreErrors } from 'promise-toolbox' import Token, { Tokens } from '../models/token' -import { - createRawObject, - forEach, - generateToken, -} from '../utils' +import { createRawObject, forEach, generateToken } from '../utils' // =================================================================== -const noSuchAuthenticationToken = id => - noSuchObject(id, 'authenticationToken') +const noSuchAuthenticationToken = id => noSuchObject(id, 'authenticationToken') const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30 @@ -27,31 +22,26 @@ export default class { this._providers = new Set() // Creates persistent collections. - const tokensDb = this._tokens = new Tokens({ + const tokensDb = (this._tokens = new Tokens({ connection: xo._redis, prefix: 'xo:token', indexes: ['user_id'], - }) + })) // Password authentication provider. - this.registerAuthenticationProvider(async ({ - username, - password, - }) => { + this.registerAuthenticationProvider(async ({ username, password }) => { if (username === undefined || password === undefined) { return } const user = await xo.getUserByName(username, true) - if (user && await xo.checkUserPassword(user.id, password)) { + if (user && (await xo.checkUserPassword(user.id, password))) { return user.id } }) // Token authentication provider. - this.registerAuthenticationProvider(async ({ - token: tokenId, - }) => { + this.registerAuthenticationProvider(async ({ token: tokenId }) => { if (!tokenId) { return } @@ -75,7 +65,8 @@ export default class { }) xo.on('start', () => { - xo.addConfigManager('authTokens', + xo.addConfigManager( + 'authTokens', () => tokensDb.get(), tokens => tokensDb.update(tokens) ) @@ -135,7 +126,7 @@ export default class { if ( username && (lastFailure = failures[username]) && - (lastFailure + 2e3) > now + lastFailure + 2e3 > now ) { throw new Error('too fast authentication tries') } @@ -152,18 +143,12 @@ export default class { // ----------------------------------------------------------------- - async createAuthenticationToken ({ - expiresIn = ONE_MONTH, - userId, - }) { + async createAuthenticationToken ({ expiresIn = ONE_MONTH, userId }) { const token = new Token({ id: await generateToken(), user_id: userId, - expiration: Date.now() + ( - typeof expiresIn === 'string' - ? ms(expiresIn) - : expiresIn - ), + expiration: + Date.now() + (typeof expiresIn === 'string' ? ms(expiresIn) : expiresIn), }) await this._tokens.add(token) @@ -186,10 +171,8 @@ export default class { token = token.properties - if (!( - token.expiration > Date.now() - )) { - this._tokens.remove(id)::ignoreErrors() + if (!(token.expiration > Date.now())) { + ;this._tokens.remove(id)::ignoreErrors() throw noSuchAuthenticationToken(id) } diff --git a/packages/xo-server/src/xo-mixins/backups.js b/packages/xo-server/src/xo-mixins/backups.js index 695233c40..b153df60d 100644 --- a/packages/xo-server/src/xo-mixins/backups.js +++ b/packages/xo-server/src/xo-mixins/backups.js @@ -8,10 +8,7 @@ import { createParser as createPairsParser } from 'parse-pairs' import { createReadStream, readdir, stat } from 'fs' import { satisfies as versionSatisfies } from 'semver' import { utcFormat } from 'd3-time-format' -import { - basename, - dirname, -} from 'path' +import { basename, dirname } from 'path' import { endsWith, filter, @@ -101,7 +98,8 @@ const getVdiTimestamp = name => { return arr[1] } -const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH) +const getDeltaBackupNameWithoutExt = name => + name.slice(0, -DELTA_BACKUP_EXT_LENGTH) const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT) // Checksums have been corrupted between 5.2.6 and 5.2.7. @@ -132,14 +130,27 @@ async function checkFileIntegrity (handler, name) { const listPartitions = (() => { const IGNORED = {} - forEach([ - // https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38 - 0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5, + forEach( + [ + // https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38 + 0x05, + 0x0f, + 0x85, + 0x15, + 0x91, + 0x9b, + 0x5e, + 0x5f, + 0xcf, + 0xd5, + 0xc5, - 0x82, // swap - ], type => { - IGNORED[type] = true - }) + 0x82, // swap + ], + type => { + IGNORED[type] = true + } + ) const TYPES = { 0x7: 'NTFS', @@ -148,154 +159,158 @@ const listPartitions = (() => { } const parseLine = createPairsParser({ - keyTransform: key => key === 'UUID' - ? 'id' - : key.toLowerCase(), - valueTransform: (value, key) => key === 'start' || key === 'size' - ? +value - : key === 'type' - ? TYPES[+value] || value - : value, + keyTransform: key => (key === 'UUID' ? 'id' : key.toLowerCase()), + valueTransform: (value, key) => + key === 'start' || key === 'size' + ? +value + : key === 'type' ? TYPES[+value] || value : value, }) - return device => execa.stdout('partx', [ - '--bytes', - '--output=NR,START,SIZE,NAME,UUID,TYPE', - '--pairs', - device.path, - ]).then(stdout => mapFilter(splitLines(stdout), line => { - const partition = parseLine(line) - const { type } = partition - if (type != null && !IGNORED[+type]) { - return partition - } - })) + return device => + execa + .stdout('partx', [ + '--bytes', + '--output=NR,START,SIZE,NAME,UUID,TYPE', + '--pairs', + device.path, + ]) + .then(stdout => + mapFilter(splitLines(stdout), line => { + const partition = parseLine(line) + const { type } = partition + if (type != null && !IGNORED[+type]) { + return partition + } + }) + ) })() // handle LVM logical volumes automatically -const listPartitions2 = device => listPartitions(device).then(partitions => { - const partitions2 = [] - const promises = [] - forEach(partitions, partition => { - if (+partition.type === 0x8e) { - promises.push(mountLvmPv(device, partition).then(device => { - const promise = listLvmLvs(device).then(lvs => { - forEach(lvs, lv => { - partitions2.push({ - name: lv.lv_name, - size: +lv.lv_size, - id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`, +const listPartitions2 = device => + listPartitions(device).then(partitions => { + const partitions2 = [] + const promises = [] + forEach(partitions, partition => { + if (+partition.type === 0x8e) { + promises.push( + mountLvmPv(device, partition).then(device => { + const promise = listLvmLvs(device).then(lvs => { + forEach(lvs, lv => { + partitions2.push({ + name: lv.lv_name, + size: +lv.lv_size, + id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`, + }) + }) }) + promise::pFinally(device.unmount) + return promise }) - }) - promise::pFinally(device.unmount) - return promise - })) - } else { - partitions2.push(partition) - } + ) + } else { + partitions2.push(partition) + } + }) + return Promise.all(promises).then(() => partitions2) }) - return Promise.all(promises).then(() => partitions2) -}) -const mountPartition = (device, partitionId) => Promise.all([ - partitionId != null && listPartitions(device), - tmpDir(), -]).then(([ partitions, path ]) => { - const options = [ - 'loop', - 'ro', - ] +const mountPartition = (device, partitionId) => + Promise.all([partitionId != null && listPartitions(device), tmpDir()]).then( + ([partitions, path]) => { + const options = ['loop', 'ro'] - if (partitions) { - const partition = find(partitions, { id: partitionId }) + if (partitions) { + const partition = find(partitions, { id: partitionId }) - const { start } = partition - if (start != null) { - options.push(`offset=${start * 512}`) + const { start } = partition + if (start != null) { + options.push(`offset=${start * 512}`) + } + } + + const mount = options => + execa('mount', [ + `--options=${options.join(',')}`, + `--source=${device.path}`, + `--target=${path}`, + ]) + + // `norecovery` option is used for ext3/ext4/xfs, if it fails it + // might be another fs, try without + return mount([...options, 'norecovery']) + .catch(() => mount(options)) + .then( + () => ({ + path, + unmount: once(() => execa('umount', ['--lazy', path])), + }), + error => { + console.log(error) + + throw error + } + ) } - } - - const mount = options => execa('mount', [ - `--options=${options.join(',')}`, - `--source=${device.path}`, - `--target=${path}`, - ]) - - // `norecovery` option is used for ext3/ext4/xfs, if it fails it - // might be another fs, try without - return mount([ ...options, 'norecovery' ]).catch(() => - mount(options) - ).then(() => ({ - path, - unmount: once(() => execa('umount', [ '--lazy', path ])), - }), error => { - console.log(error) - - throw error - }) -}) + ) // handle LVM logical volumes automatically const mountPartition2 = (device, partitionId) => { - if ( - partitionId == null || - !includes(partitionId, '/') - ) { + if (partitionId == null || !includes(partitionId, '/')) { return mountPartition(device, partitionId) } - const [ pvId, vgName, lvName ] = partitionId.split('/') + const [pvId, vgName, lvName] = partitionId.split('/') - return listPartitions(device).then(partitions => - find(partitions, { id: pvId }) - ).then(pvId => mountLvmPv(device, pvId)).then(device1 => - execa('vgchange', [ '-ay', vgName ]).then(() => - lvs([ 'lv_name', 'lv_path' ], vgName).then(lvs => - find(lvs, { lv_name: lvName }).lv_path - ) - ).then(path => - mountPartition({ path }).then(device2 => ({ - ...device2, - unmount: () => device2.unmount().then(device1.unmount), - })) - ).catch(error => device1.unmount().then(() => { - throw error - })) - ) + return listPartitions(device) + .then(partitions => find(partitions, { id: pvId })) + .then(pvId => mountLvmPv(device, pvId)) + .then(device1 => + execa('vgchange', ['-ay', vgName]) + .then(() => + lvs(['lv_name', 'lv_path'], vgName).then( + lvs => find(lvs, { lv_name: lvName }).lv_path + ) + ) + .then(path => + mountPartition({ path }).then(device2 => ({ + ...device2, + unmount: () => device2.unmount().then(device1.unmount), + })) + ) + .catch(error => + device1.unmount().then(() => { + throw error + }) + ) + ) } // ------------------------------------------------------------------- -const listLvmLvs = device => pvs([ - 'lv_name', - 'lv_path', - 'lv_size', - 'vg_name', -], device.path).then(pvs => filter(pvs, 'lv_name')) +const listLvmLvs = device => + pvs(['lv_name', 'lv_path', 'lv_size', 'vg_name'], device.path).then(pvs => + filter(pvs, 'lv_name') + ) const mountLvmPv = (device, partition) => { const args = [] if (partition) { args.push('-o', partition.start * 512) } - args.push( - '--show', - '-f', - device.path - ) + args.push('--show', '-f', device.path) return execa.stdout('losetup', args).then(stdout => { const path = trim(stdout) return { path, - unmount: once(() => Promise.all([ - execa('losetup', [ '-d', path ]), - pvs('vg_name', path).then(vgNames => execa('vgchange', [ - '-an', - ...vgNames, - ])), - ])), + unmount: once(() => + Promise.all([ + execa('losetup', ['-d', path]), + pvs('vg_name', path).then(vgNames => + execa('vgchange', ['-an', ...vgNames]) + ), + ]) + ), } }) } @@ -308,12 +323,11 @@ export default class { // clean any LVM volumes that might have not been properly // unmounted - xo.on('start', () => Promise.all([ - execa('losetup', [ '-D' ]), - execa('vgchange', [ '-an' ]), - ]).then(() => - execa('pvscan', [ '--cache' ]) - )) + xo.on('start', () => + Promise.all([execa('losetup', ['-D']), execa('vgchange', ['-an'])]).then( + () => execa('pvscan', ['--cache']) + ) + ) } async listRemoteBackups (remoteId) { @@ -332,12 +346,11 @@ export default class { const files = await handler.list(deltaDir) const deltaBackups = filter(files, isDeltaBackup) - backups.push(...mapToArray( - deltaBackups, - deltaBackup => { + backups.push( + ...mapToArray(deltaBackups, deltaBackup => { return `${deltaDir}/${getDeltaBackupNameWithoutExt(deltaBackup)}` - } - )) + }) + ) } return backups @@ -403,7 +416,7 @@ export default class { const targetXapi = this._xo.getXapi(targetSr) // Get Xen objects from XO objects. - const { uuid } = srcVm = srcXapi.getObject(srcVm._xapiId) + const { uuid } = (srcVm = srcXapi.getObject(srcVm._xapiId)) targetSr = targetXapi.getObject(targetSr._xapiId) // 1. Find the local base for this SR (if any). @@ -419,10 +432,17 @@ export default class { let size = 0 const dstVm = await (async () => { const { cancel, token } = CancelToken.source() - const delta = await srcXapi.exportDeltaVm(token, srcVm.$id, localBaseUuid, { - bypassVdiChainsCheck: force, - snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${targetSr.uuid})`, - }) + const delta = await srcXapi.exportDeltaVm( + token, + srcVm.$id, + localBaseUuid, + { + bypassVdiChainsCheck: force, + snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${ + targetSr.uuid + })`, + } + ) $defer.onFailure(() => srcXapi.deleteVm(delta.vm.uuid)) $defer.onFailure(cancel) @@ -430,7 +450,7 @@ export default class { delta.vm.name_label += ` (${date})` delta.vm.other_config[TAG_SOURCE_VM] = uuid delta.vm.other_config[TAG_EXPORT_TIME] = date - delta.vm.tags = [ ...delta.vm.tags, 'Continuous Replication' ] + delta.vm.tags = [...delta.vm.tags, 'Continuous Replication'] const { streams } = delta forEach(delta.vdis, (vdi, key) => { @@ -443,49 +463,49 @@ export default class { streams[id] = stream.pipe(sizeStream) }) - let toRemove = filter(targetXapi.objects.all, obj => - obj.$type === 'vm' && - obj.other_config[TAG_SOURCE_VM] === uuid + let toRemove = filter( + targetXapi.objects.all, + obj => obj.$type === 'vm' && obj.other_config[TAG_SOURCE_VM] === uuid ) const { length } = toRemove const deleteBase = length === 0 // old replications are not captured in toRemove const n = length - retention + 1 // take into account the future copy - toRemove = n > 0 - ? sortBy(toRemove, _ => _.other_config[TAG_EXPORT_TIME]).slice(0, n) - : undefined + toRemove = + n > 0 + ? sortBy(toRemove, _ => _.other_config[TAG_EXPORT_TIME]).slice(0, n) + : undefined - const promise = targetXapi.importDeltaVm( - delta, - { - deleteBase, - srId: targetSr.$id, - } - ) + const promise = targetXapi.importDeltaVm(delta, { + deleteBase, + srId: targetSr.$id, + }) // Once done, (asynchronously) remove the (now obsolete) local // base. if (localBaseUuid) { - promise.then(() => srcXapi.deleteVm(localBaseUuid))::ignoreErrors() + ;promise.then(() => srcXapi.deleteVm(localBaseUuid))::ignoreErrors() } if (toRemove !== undefined) { - promise.then(() => asyncMap(toRemove, _ => - targetXapi.deleteVm(_.$id)) - )::ignoreErrors() + ;promise + .then(() => asyncMap(toRemove, _ => targetXapi.deleteVm(_.$id))) + ::ignoreErrors() } // (Asynchronously) Identify snapshot as future base. - promise.then(() => { - return srcXapi._updateObjectMapProperty(srcVm, 'other_config', { - [TAG_LAST_BASE_DELTA]: delta.vm.uuid, + ;promise + .then(() => { + return srcXapi._updateObjectMapProperty(srcVm, 'other_config', { + [TAG_LAST_BASE_DELTA]: delta.vm.uuid, + }) }) - })::ignoreErrors() + ::ignoreErrors() return promise })() return { - // 5. Return the identifier of the new XO VM object. + // 5. Return the identifier of the new XO VM object. id: xapiObjectToXo(dstVm).id, transferDuration: Date.now() - transferStart, transferSize: size, @@ -501,7 +521,7 @@ export default class { return } - const getPath = (file, dir) => dir ? `${dir}/${file}` : file + const getPath = (file, dir) => (dir ? `${dir}/${file}` : file) await asyncMap(backups.slice(0, n), backup => handler.unlink(getPath(backup, dir)) @@ -534,18 +554,23 @@ export default class { } // fix the parent UUID and filename in delta files after download from xapi or backup compression - async _chainDeltaVdiBackups ({handler, dir}) { + async _chainDeltaVdiBackups ({ handler, dir }) { const backups = await this._listVdiBackups(handler, dir) for (let i = 1; i < backups.length; i++) { const childPath = dir + '/' + backups[i] - const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath) + const modified = await chainVhd( + handler, + dir + '/' + backups[i - 1], + handler, + childPath + ) if (modified) { await handler.refreshChecksum(childPath) } } } - async _mergeDeltaVdiBackups ({handler, dir, retention}) { + async _mergeDeltaVdiBackups ({ handler, dir, retention }) { const backups = await this._listVdiBackups(handler, dir) const i = backups.length - retention @@ -564,9 +589,7 @@ export default class { const fullBackupId = j // Remove old backups before the most recent full. - await asyncMap(range(0, j), i => - handler.unlink(`${dir}/${backups[i]}`) - ) + await asyncMap(range(0, j), i => handler.unlink(`${dir}/${backups[i]}`)) const parent = `${dir}/${backups[fullBackupId]}` @@ -597,8 +620,9 @@ export default class { const backups = await this._listVdiBackups(handler, dir) // Search file. (delta or full backup) - const i = findIndex(backups, backup => - getVdiTimestamp(backup) === getVdiTimestamp(filename) + const i = findIndex( + backups, + backup => getVdiTimestamp(backup) === getVdiTimestamp(filename) ) if (i === -1) { @@ -624,7 +648,10 @@ export default class { return sortBy(filter(files, isDeltaBackup)) } - async _saveDeltaVdiBackup (xapi, { vdiParent, isFull, handler, stream, dir, retention }) { + async _saveDeltaVdiBackup ( + xapi, + { vdiParent, isFull, handler, stream, dir, retention } + ) { const backupDirectory = `vdi_${vdiParent.uuid}` dir = `${dir}/${backupDirectory}` @@ -632,10 +659,14 @@ export default class { // For old versions: remove old bases if exists. const bases = sortBy( - filter(vdiParent.$snapshots, { name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT' }), + filter(vdiParent.$snapshots, { + name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT', + }), base => base.snapshot_time ) - forEach(bases, base => { xapi.deleteVdi(base.$id)::ignoreErrors() }) + forEach(bases, base => { + ;xapi.deleteVdi(base.$id)::ignoreErrors() + }) // Export full or delta backup. const vdiFilename = `${date}_${isFull ? 'full' : 'delta'}.vhd` @@ -655,12 +686,7 @@ export default class { stream.on('error', error => targetStream.emit('error', error)) await Promise.all([ - eventToPromise( - stream - .pipe(sizeStream) - .pipe(targetStream), - 'finish' - ), + eventToPromise(stream.pipe(sizeStream).pipe(targetStream), 'finish'), stream.task, ]) } catch (error) { @@ -690,7 +716,7 @@ export default class { } @deferrable - async rollingDeltaVmBackup ($defer, {vm, remoteId, tag, retention}) { + async rollingDeltaVmBackup ($defer, { vm, remoteId, tag, retention }) { const transferStart = Date.now() const handler = await this._xo.getRemoteHandler(remoteId) const xapi = this._xo.getXapi(vm) @@ -703,7 +729,9 @@ export default class { base => base.snapshot_time ) const baseVm = bases.pop() - forEach(bases, base => { xapi.deleteVm(base.$id)::ignoreErrors() }) + forEach(bases, base => { + ;xapi.deleteVm(base.$id)::ignoreErrors() + }) // Check backup dirs. const dir = `vm_delta_${tag}_${vm.uuid}` @@ -716,7 +744,10 @@ export default class { } const vdi = vbd.$VDI - const backups = await this._listVdiBackups(handler, `${dir}/vdi_${vdi.uuid}`) + const backups = await this._listVdiBackups( + handler, + `${dir}/vdi_${vdi.uuid}` + ) // Force full if missing full. if (!find(backups, isFullVdiBackup)) { @@ -727,11 +758,16 @@ export default class { // Export... const { cancel, token } = CancelToken.source() - const delta = await xapi.exportDeltaVm(token, vm.$id, baseVm && baseVm.$id, { - snapshotNameLabel: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}`, - fullVdisRequired, - disableBaseTags: true, - }) + const delta = await xapi.exportDeltaVm( + token, + vm.$id, + baseVm && baseVm.$id, + { + snapshotNameLabel: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}`, + fullVdisRequired, + disableBaseTags: true, + } + ) $defer.onFailure(() => xapi.deleteVm(delta.vm.uuid)) $defer.onFailure(cancel) @@ -747,15 +783,14 @@ export default class { stream: delta.streams[`${key}.vhd`], dir, retention, - }) - .then(data => { - delta.vdis[key] = { - ...delta.vdis[key], - xoPath: data.path, - } + }).then(data => { + delta.vdis[key] = { + ...delta.vdis[key], + xoPath: data.path, + } - return data - }) + return data + }) }) ) @@ -772,9 +807,11 @@ export default class { } } - $defer.onFailure(() => asyncMap(fulFilledVdiBackups, vdiBackup => - handler.unlink(`${dir}/${vdiBackup.value()}`)::ignoreErrors() - )) + $defer.onFailure(() => + asyncMap(fulFilledVdiBackups, vdiBackup => + handler.unlink(`${dir}/${vdiBackup.value()}`)::ignoreErrors() + ) + ) if (error) { throw error @@ -801,14 +838,17 @@ export default class { const backupDir = `${dir}/${backupDirectory}` dataSize += vdiBackup.value().size - return this._mergeDeltaVdiBackups({ handler, dir: backupDir, retention }) - .then(size => { - this._chainDeltaVdiBackups({ handler, dir: backupDir }) + return this._mergeDeltaVdiBackups({ + handler, + dir: backupDir, + retention, + }).then(size => { + this._chainDeltaVdiBackups({ handler, dir: backupDir }) - if (size !== undefined) { - mergedDataSize += size - } - }) + if (size !== undefined) { + mergedDataSize += size + } + }) }) ) @@ -818,7 +858,7 @@ export default class { await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, retention }) if (baseVm) { - xapi.deleteVm(baseVm.$id)::ignoreErrors() + ;xapi.deleteVm(baseVm.$id)::ignoreErrors() } return { @@ -831,12 +871,14 @@ export default class { } } - async importDeltaVmBackup ({sr, remoteId, filePath, mapVdisSrs = {}}) { + async importDeltaVmBackup ({ sr, remoteId, filePath, mapVdisSrs = {} }) { filePath = `${filePath}${DELTA_BACKUP_EXT}` const { datetime } = parseVmBackupPath(filePath) const handler = await this._xo.getRemoteHandler(remoteId) - const xapi = this._xo.getXapi(sr || mapVdisSrs[getFirstPropertyName(mapVdisSrs)]) + const xapi = this._xo.getXapi( + sr || mapVdisSrs[getFirstPropertyName(mapVdisSrs)] + ) const delta = JSON.parse(await handler.readFile(filePath)) let vm @@ -844,20 +886,25 @@ export default class { if (versionSatisfies(version, '^1')) { const basePath = dirname(filePath) - const streams = delta.streams = {} + const streams = (delta.streams = {}) await Promise.all( - mapToArray( - delta.vdis, - async (vdi, id) => { - const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}` - const backups = await this._listDeltaVdiDependencies(handler, `${basePath}/${vdi.xoPath}`) + mapToArray(delta.vdis, async (vdi, id) => { + const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}` + const backups = await this._listDeltaVdiDependencies( + handler, + `${basePath}/${vdi.xoPath}` + ) - streams[`${id}.vhd`] = await Promise.all(mapToArray(backups, async backup => - handler.createReadStream(`${vdisFolder}/${backup}`, { checksum: true, ignoreMissingChecksum: true }) - )) - } - ) + streams[`${id}.vhd`] = await Promise.all( + mapToArray(backups, async backup => + handler.createReadStream(`${vdisFolder}/${backup}`, { + checksum: true, + ignoreMissingChecksum: true, + }) + ) + ) + }) ) delta.vm.name_label += ` (${shortDate(datetime * 1e3)})` @@ -877,13 +924,13 @@ export default class { // ----------------------------------------------------------------- - async backupVm ({vm, remoteId, file, compress}) { + async backupVm ({ vm, remoteId, file, compress }) { const handler = await this._xo.getRemoteHandler(remoteId) - return this._backupVm(vm, handler, file, {compress}) + return this._backupVm(vm, handler, file, { compress }) } @deferrable - async _backupVm ($defer, vm, handler, file, {compress}) { + async _backupVm ($defer, vm, handler, file, { compress }) { const targetStream = await handler.createOutputStream(file) $defer.onFailure.call(handler, 'unlink', file) $defer.onFailure.call(targetStream, 'close') @@ -896,9 +943,7 @@ export default class { const sizeStream = createSizeStream() - sourceStream - .pipe(sizeStream) - .pipe(targetStream) + sourceStream.pipe(sizeStream).pipe(targetStream) await promise @@ -907,20 +952,27 @@ export default class { } } - async rollingBackupVm ({vm, remoteId, tag, retention, compress}) { + async rollingBackupVm ({ vm, remoteId, tag, retention, compress }) { const transferStart = Date.now() const handler = await this._xo.getRemoteHandler(remoteId) const files = await handler.list() - const reg = new RegExp('^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`)) - const backups = sortBy(filter(files, (fileName) => reg.test(fileName))) + const reg = new RegExp( + '^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`) + ) + const backups = sortBy(filter(files, fileName => reg.test(fileName))) const date = safeDateFormat(new Date()) const file = `${date}_${tag}_${vm.name_label}.xva` - const data = await this._backupVm(vm, handler, file, {compress}) - await this._removeOldBackups(backups, handler, undefined, backups.length - (retention - 1)) + const data = await this._backupVm(vm, handler, file, { compress }) + await this._removeOldBackups( + backups, + handler, + undefined, + backups.length - (retention - 1) + ) data.transferDuration = Date.now() - transferStart return data @@ -930,14 +982,26 @@ export default class { const xapi = this._xo.getXapi(vm) vm = xapi.getObject(vm._xapiId) - const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_') - const snapshots = sortBy(filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), 'name_label') + const reg = new RegExp( + '^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_' + ) + const snapshots = sortBy( + filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), + 'name_label' + ) const date = safeDateFormat(new Date()) - await xapi.snapshotVm(vm.$id, `rollingSnapshot_${date}_${tag}_${vm.name_label}`) + await xapi.snapshotVm( + vm.$id, + `rollingSnapshot_${date}_${tag}_${vm.name_label}` + ) const promises = [] - for (let surplus = snapshots.length - (retention - 1); surplus > 0; surplus--) { + for ( + let surplus = snapshots.length - (retention - 1); + surplus > 0; + surplus-- + ) { const oldSnap = snapshots.shift() promises.push(xapi.deleteVm(oldSnap.uuid)) } @@ -945,16 +1009,22 @@ export default class { } _removeVms (xapi, vms) { - return Promise.all(mapToArray(vms, vm => - // Do not consider a failure to delete an old copy as a fatal error. - xapi.deleteVm(vm.$id)::ignoreErrors() - )) + return Promise.all( + mapToArray(vms, vm => + // Do not consider a failure to delete an old copy as a fatal error. + xapi.deleteVm(vm.$id)::ignoreErrors() + ) + ) } - async rollingDrCopyVm ({vm, sr, tag, retention, deleteOldBackupsFirst}) { + async rollingDrCopyVm ({ vm, sr, tag, retention, deleteOldBackupsFirst }) { const transferStart = Date.now() tag = 'DR_' + tag - const reg = new RegExp('^' + escapeStringRegexp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$') + const reg = new RegExp( + '^' + + escapeStringRegexp(`${vm.name_label}_${tag}_`) + + '[0-9]{8}T[0-9]{6}Z$' + ) const targetXapi = this._xo.getXapi(sr) sr = targetXapi.getObject(sr._xapiId) @@ -986,7 +1056,8 @@ export default class { }) targetXapi._updateObjectMapProperty(data.vm, 'blocked_operations', { - start: 'Start operation for this vm is blocked, clone it if you want to use it.', + start: + 'Start operation for this vm is blocked, clone it if you want to use it.', }) await targetXapi.addTag(data.vm.$id, 'Disaster Recovery') @@ -1004,67 +1075,73 @@ export default class { // ----------------------------------------------------------------- _mountVhd (remoteId, vhdPath) { - return Promise.all([ - this._xo.getRemoteHandler(remoteId), - tmpDir(), - ]).then(([ handler, mountDir ]) => { - if (!handler._getRealPath) { - throw new Error(`this remote is not supported`) - } + return Promise.all([this._xo.getRemoteHandler(remoteId), tmpDir()]).then( + ([handler, mountDir]) => { + if (!handler._getRealPath) { + throw new Error(`this remote is not supported`) + } - const remotePath = handler._getRealPath() - vhdPath = resolveSubpath(remotePath, vhdPath) + const remotePath = handler._getRealPath() + vhdPath = resolveSubpath(remotePath, vhdPath) - return Promise.resolve().then(() => { - // TODO: remove when no longer necessary. - // - // Currently, the filenames of the VHD changes over time - // (delta → full), but the JSON is not updated, therefore the - // VHD path may need to be fixed. - return endsWith(vhdPath, '_delta.vhd') - ? pFromCallback(cb => stat(vhdPath, cb)).then( - () => vhdPath, - error => { - if (error && error.code === 'ENOENT') { - return `${vhdPath.slice(0, -10)}_full.vhd` - } - } - ) - : vhdPath - }).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() => - pFromCallback(cb => readdir(mountDir, cb)).then(entries => { - let max = 0 - forEach(entries, entry => { - const matches = /^vhdi(\d+)/.exec(entry) - if (matches) { - const value = +matches[1] - if (value > max) { - max = value - } - } + return Promise.resolve() + .then(() => { + // TODO: remove when no longer necessary. + // + // Currently, the filenames of the VHD changes over time + // (delta → full), but the JSON is not updated, therefore the + // VHD path may need to be fixed. + return endsWith(vhdPath, '_delta.vhd') + ? pFromCallback(cb => stat(vhdPath, cb)).then( + () => vhdPath, + error => { + if (error && error.code === 'ENOENT') { + return `${vhdPath.slice(0, -10)}_full.vhd` + } + } + ) + : vhdPath }) + .then(vhdPath => execa('vhdimount', [vhdPath, mountDir])) + .then(() => + pFromCallback(cb => readdir(mountDir, cb)).then(entries => { + let max = 0 + forEach(entries, entry => { + const matches = /^vhdi(\d+)/.exec(entry) + if (matches) { + const value = +matches[1] + if (value > max) { + max = value + } + } + }) - if (!max) { - throw new Error('no disks found') - } + if (!max) { + throw new Error('no disks found') + } - return { - path: `${mountDir}/vhdi${max}`, - unmount: once(() => execa('fusermount', [ '-uz', mountDir ])), - } - }) - ) - }) + return { + path: `${mountDir}/vhdi${max}`, + unmount: once(() => execa('fusermount', ['-uz', mountDir])), + } + }) + ) + } + ) } _mountPartition (remoteId, vhdPath, partitionId) { return this._mountVhd(remoteId, vhdPath).then(device => - mountPartition2(device, partitionId).then(partition => ({ - ...partition, - unmount: () => partition.unmount().then(device.unmount), - })).catch(error => device.unmount().then(() => { - throw error - })) + mountPartition2(device, partitionId) + .then(partition => ({ + ...partition, + unmount: () => partition.unmount().then(device.unmount), + })) + .catch(error => + device.unmount().then(() => { + throw error + }) + ) ) } @@ -1088,12 +1165,16 @@ export default class { const entries = await pFromCallback(cb => readdir(path, cb)) const entriesMap = {} - await Promise.all(mapToArray(entries, async name => { - const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::ignoreErrors() - if (stats) { - entriesMap[stats.isDirectory() ? `${name}/` : name] = {} - } - })) + await Promise.all( + mapToArray(entries, async name => { + const stats = await pFromCallback(cb => + stat(`${path}/${name}`, cb) + )::ignoreErrors() + if (stats) { + entriesMap[stats.isDirectory() ? `${name}/` : name] = {} + } + }) + ) return entriesMap } @@ -1108,7 +1189,10 @@ export default class { } return mapToArray(paths, path => { ++i - return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd) + return createReadStream(resolveSubpath(partition.path, path)).once( + 'end', + onEnd + ) }) } } diff --git a/packages/xo-server/src/xo-mixins/hooks.js b/packages/xo-server/src/xo-mixins/hooks.js index bfe10201d..ce6565b6b 100644 --- a/packages/xo-server/src/xo-mixins/hooks.js +++ b/packages/xo-server/src/xo-mixins/hooks.js @@ -20,11 +20,13 @@ function emitAsync (event) { const onError = opts != null && opts.onError - return Promise.all(this.listeners(event).map( - listener => new Promise(resolve => { - resolve(listener.apply(this, args)) - }).catch(onError) - )) + return Promise.all( + this.listeners(event).map(listener => + new Promise(resolve => { + resolve(listener.apply(this, args)) + }).catch(onError) + ) + ) } const makeSingletonHook = (hook, postEvent) => { @@ -44,12 +46,17 @@ const makeSingletonHook = (hook, postEvent) => { const runHook = (app, hook) => { debug(`${hook} start…`) - const promise = emitAsync.call(app, { - onError: error => console.error( - `[WARN] hook ${hook} failure:`, - (error != null && error.stack) || error - ), - }, hook) + const promise = emitAsync.call( + app, + { + onError: error => + console.error( + `[WARN] hook ${hook} failure:`, + (error != null && error.stack) || error + ), + }, + hook + ) promise.then(() => { debug(`${hook} finished`) }) diff --git a/packages/xo-server/src/xo-mixins/http.js b/packages/xo-server/src/xo-mixins/http.js index b814f38c7..5ba2f1056 100644 --- a/packages/xo-server/src/xo-mixins/http.js +++ b/packages/xo-server/src/xo-mixins/http.js @@ -1,23 +1,22 @@ import hrp from 'http-request-plus' import ProxyAgent from 'proxy-agent' -import { - firstDefined, -} from '../utils' +import { firstDefined } from '../utils' export default class Http { - constructor (_, { - httpProxy = firstDefined( - process.env.http_proxy, - process.env.HTTP_PROXY - ), - }) { + constructor ( + _, + { httpProxy = firstDefined(process.env.http_proxy, process.env.HTTP_PROXY) } + ) { this._proxy = httpProxy && new ProxyAgent(httpProxy) } httpRequest (...args) { - return hrp({ - agent: this._proxy, - }, ...args) + return hrp( + { + agent: this._proxy, + }, + ...args + ) } } diff --git a/packages/xo-server/src/xo-mixins/ip-pools.js b/packages/xo-server/src/xo-mixins/ip-pools.js index 283feb548..2be6af2fc 100644 --- a/packages/xo-server/src/xo-mixins/ip-pools.js +++ b/packages/xo-server/src/xo-mixins/ip-pools.js @@ -40,10 +40,10 @@ const normalize = ({ resourceSets, }) -const _isAddressInIpPool = (address, network, ipPool) => ( - ipPool.addresses && (address in ipPool.addresses) && +const _isAddressInIpPool = (address, network, ipPool) => + ipPool.addresses && + address in ipPool.addresses && includes(ipPool.networks, isObject(network) ? network.id : network) -) // =================================================================== @@ -57,9 +57,11 @@ export default class IpPools { xo.on('start', async () => { this._store = await xo.getStore('ipPools') - xo.addConfigManager('ipPools', + xo.addConfigManager( + 'ipPools', () => this.getAllIpPools(), - ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool))) + ipPools => + Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool))) ) }) } @@ -81,10 +83,12 @@ export default class IpPools { const store = this._store if (await store.has(id)) { - await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => { - await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id) - return this._xo.removeIpPoolFromResourceSet(id, set.id) - })) + await Promise.all( + mapToArray(await this._xo.getAllResourceSets(), async set => { + await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id) + return this._xo.removeIpPoolFromResourceSet(id, set.id) + }) + ) await this._removeIpAddressesFromVifs( mapValues((await this.getIpPool(id)).addresses, 'vifs') ) @@ -123,7 +127,9 @@ export default class IpPools { } async _getAddressIpPool (address, network) { - const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool)) + const ipPools = await this._getAllIpPools(ipPool => + _isAddressInIpPool(address, network, ipPool) + ) return ipPools && ipPools[0] } @@ -134,9 +140,16 @@ export default class IpPools { const vifs = vm.VIFs const ipPools = [] for (const vifId of vifs) { - const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId) + const { + allowedIpv4Addresses, + allowedIpv6Addresses, + $network, + } = this._xo.getObject(vifId) - for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) { + for (const address of concat( + allowedIpv4Addresses, + allowedIpv6Addresses + )) { const ipPool = await this._getAddressIpPool(address, $network) ipPool && ipPools.push(ipPool.id) } @@ -158,11 +171,12 @@ export default class IpPools { const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet') return () => { - const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool))) + const saveIpPools = () => + Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool))) return resourseSetId - ? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then( - saveIpPools - ) + ? this._xo + .allocateLimitsInResourceSet(limits, resourseSetId) + .then(saveIpPools) : saveIpPools() } })() @@ -173,45 +187,47 @@ export default class IpPools { const isVif = id => id === vifId - highland(this._store.createValueStream()).each(ipPool => { - const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool - if (!(addresses && networks && includes(networks, networkId))) { - return false - } + highland(this._store.createValueStream()) + .each(ipPool => { + const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool + if (!(addresses && networks && includes(networks, networkId))) { + return false + } - let allocations = 0 - let changed = false - forEach(removeAddresses, address => { - let vifs, i - if ( - (vifs = addresses[address]) && - (vifs = vifs.vifs) && - (i = findIndex(vifs, isVif)) !== -1 - ) { - vifs.splice(i, 1) - --allocations - changed = true + let allocations = 0 + let changed = false + forEach(removeAddresses, address => { + let vifs, i + if ( + (vifs = addresses[address]) && + (vifs = vifs.vifs) && + (i = findIndex(vifs, isVif)) !== -1 + ) { + vifs.splice(i, 1) + --allocations + changed = true + } + }) + forEach(addAddresses, address => { + const data = addresses[address] + if (!data) { + return + } + const vifs = data.vifs || (data.vifs = []) + if (!includes(vifs, vifId)) { + vifs.push(vifId) + ++allocations + changed = true + } + }) + + if (changed) { + const { id } = ipPool + updatedIpPools[id] = ipPool + limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations } }) - forEach(addAddresses, address => { - const data = addresses[address] - if (!data) { - return - } - const vifs = data.vifs || (data.vifs = []) - if (!includes(vifs, vifId)) { - vifs.push(vifId) - ++allocations - changed = true - } - }) - - if (changed) { - const { id } = ipPool - updatedIpPools[id] = ipPool - limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations - } - }).toCallback(cb) + .toCallback(cb) }).then(allocAndSave) } @@ -222,38 +238,39 @@ export default class IpPools { if (mapVifAddresses[vifId]) { mapVifAddresses[vifId].push(address) } else { - mapVifAddresses[vifId] = [ address ] + mapVifAddresses[vifId] = [address] } }) }) const { getXapi } = this._xo - return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => { - let vif - try { - // The IP may not have been correctly deallocated from the IP pool when the VIF was deleted - vif = this._xo.getObject(vifId) - } catch (error) { - return - } - const { allowedIpv4Addresses, allowedIpv6Addresses } = vif - remove(allowedIpv4Addresses, address => includes(addresses, address)) - remove(allowedIpv6Addresses, address => includes(addresses, address)) - this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses)) + return Promise.all( + mapToArray(mapVifAddresses, (addresses, vifId) => { + let vif + try { + // The IP may not have been correctly deallocated from the IP pool when the VIF was deleted + vif = this._xo.getObject(vifId) + } catch (error) { + return + } + const { allowedIpv4Addresses, allowedIpv6Addresses } = vif + remove(allowedIpv4Addresses, address => includes(addresses, address)) + remove(allowedIpv6Addresses, address => includes(addresses, address)) + this.allocIpAddresses( + vifId, + undefined, + concat(allowedIpv4Addresses, allowedIpv6Addresses) + ) - return getXapi(vif).editVif(vif._xapiId, { - ipv4Allowed: allowedIpv4Addresses, - ipv6Allowed: allowedIpv6Addresses, + return getXapi(vif).editVif(vif._xapiId, { + ipv4Allowed: allowedIpv4Addresses, + ipv6Allowed: allowedIpv6Addresses, + }) }) - })) + ) } - async updateIpPool (id, { - addresses, - name, - networks, - resourceSets, - }) { + async updateIpPool (id, { addresses, name, networks, resourceSets }) { const ipPool = await this.getIpPool(id) const previousAddresses = { ...ipPool.addresses } @@ -270,7 +287,9 @@ export default class IpPools { // Remove the addresses that are no longer in the IP pool from the concerned VIFs const deletedAddresses = diff(keys(previousAddresses), keys(addresses_)) - await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses)) + await this._removeIpAddressesFromVifs( + pick(previousAddresses, deletedAddresses) + ) if (isEmpty(addresses_)) { delete ipPool.addresses diff --git a/packages/xo-server/src/xo-mixins/jobs.js b/packages/xo-server/src/xo-mixins/jobs.js index 4f2bea755..40dfa4d29 100644 --- a/packages/xo-server/src/xo-mixins/jobs.js +++ b/packages/xo-server/src/xo-mixins/jobs.js @@ -11,21 +11,20 @@ import { mapToArray } from '../utils' export default class Jobs { constructor (xo) { this._executor = new JobExecutor(xo) - const jobsDb = this._jobs = new JobsDb({ + const jobsDb = (this._jobs = new JobsDb({ connection: xo._redis, prefix: 'xo:job', indexes: ['user_id', 'key'], - }) + })) this._runningJobs = Object.create(null) xo.on('clean', () => jobsDb.rebuildIndexes()) xo.on('start', () => { - xo.addConfigManager('jobs', + xo.addConfigManager( + 'jobs', () => jobsDb.get(), - jobs => Promise.all(mapToArray(jobs, job => - jobsDb.save(job) - )), - [ 'users' ] + jobs => Promise.all(mapToArray(jobs, job => jobsDb.save(job))), + ['users'] ) }) } @@ -49,7 +48,7 @@ export default class Jobs { return job_.properties } - async updateJob ({id, ...props}) { + async updateJob ({ id, ...props }) { const job = await this.getJob(id) assign(job, props) @@ -77,7 +76,9 @@ export default class Jobs { } async runJobSequence (idSequence) { - const jobs = await Promise.all(mapToArray(idSequence, id => this.getJob(id))) + const jobs = await Promise.all( + mapToArray(idSequence, id => this.getJob(id)) + ) for (const job of jobs) { await this._runJob(job) diff --git a/packages/xo-server/src/xo-mixins/logs/index.js b/packages/xo-server/src/xo-mixins/logs/index.js index f2c669806..a10aeb8fb 100644 --- a/packages/xo-server/src/xo-mixins/logs/index.js +++ b/packages/xo-server/src/xo-mixins/logs/index.js @@ -29,14 +29,15 @@ export default class Logs { db.del(key, cb) } - const onData = keep !== 0 - ? () => { - if (--keep === 0) { - stream.on('data', deleteEntry) - stream.removeListener('data', onData) + const onData = + keep !== 0 + ? () => { + if (--keep === 0) { + stream.on('data', deleteEntry) + stream.removeListener('data', onData) + } } - } - : deleteEntry + : deleteEntry stream.on('data', onData) await fromEvent(stream, 'end') @@ -46,9 +47,8 @@ export default class Logs { } getLogger (namespace) { - return this._app.getStore('logs').then(store => new LevelDbLogger( - store, - namespace - )) + return this._app + .getStore('logs') + .then(store => new LevelDbLogger(store, namespace)) } } diff --git a/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js b/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js index afc13c06b..418c0d83f 100644 --- a/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js +++ b/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js @@ -13,7 +13,7 @@ function generateUniqueKey (date) { } increment = 0 - return String(lastDate = date) + return String((lastDate = date)) } export default class LevelDbLogger extends AbstractLogger { @@ -41,8 +41,9 @@ export default class LevelDbLogger extends AbstractLogger { } createReadStream () { - return highland(this._db.createReadStream()) - .filter(({value}) => value.namespace === this._namespace) + return highland(this._db.createReadStream()).filter( + ({ value }) => value.namespace === this._namespace + ) } del (id) { diff --git a/packages/xo-server/src/xo-mixins/plugins.js b/packages/xo-server/src/xo-mixins/plugins.js index caac19743..ef130d408 100644 --- a/packages/xo-server/src/xo-mixins/plugins.js +++ b/packages/xo-server/src/xo-mixins/plugins.js @@ -1,15 +1,8 @@ import Ajv from 'ajv' import { PluginsMetadata } from '../models/plugin-metadata' -import { - invalidParameters, - noSuchObject, -} from 'xo-common/api-errors' -import { - createRawObject, - isFunction, - mapToArray, -} from '../utils' +import { invalidParameters, noSuchObject } from 'xo-common/api-errors' +import { createRawObject, isFunction, mapToArray } from '../utils' // =================================================================== @@ -26,11 +19,13 @@ export default class { }) xo.on('start', () => { - xo.addConfigManager('plugins', + xo.addConfigManager( + 'plugins', () => this._pluginsMetadata.get(), - plugins => Promise.all(mapToArray(plugins, plugin => - this._pluginsMetadata.save(plugin) - )) + plugins => + Promise.all( + mapToArray(plugins, plugin => this._pluginsMetadata.save(plugin)) + ) ) }) } @@ -45,9 +40,7 @@ export default class { async _getPluginMetadata (id) { const metadata = await this._pluginsMetadata.first(id) - return metadata - ? metadata.properties - : null + return metadata ? metadata.properties : null } async registerPlugin ( @@ -60,7 +53,7 @@ export default class { version ) { const id = name - const plugin = this._plugins[id] = { + const plugin = (this._plugins[id] = { configurationPresets, configurationSchema, configured: !configurationSchema, @@ -72,16 +65,13 @@ export default class { testSchema, unloadable: isFunction(instance.unload), version, - } + }) const metadata = await this._getPluginMetadata(id) let autoload = true let configuration if (metadata) { - ({ - autoload, - configuration, - } = metadata) + ;({ autoload, configuration } = metadata) } else { console.log(`[NOTICE] register plugin ${name} for the first time`) await this._pluginsMetadata.save({ @@ -115,10 +105,8 @@ export default class { unloadable, version, } = this._getRawPlugin(id) - const { - autoload, - configuration, - } = (await this._getPluginMetadata(id)) || {} + const { autoload, configuration } = + (await this._getPluginMetadata(id)) || {} return { id, @@ -232,10 +220,12 @@ export default class { const { testSchema } = plugin if (testSchema) { if (data == null) { - throw invalidParameters([{ - field: 'data', - message: 'is the wrong type', - }]) + throw invalidParameters([ + { + field: 'data', + message: 'is the wrong type', + }, + ]) } const validate = this._ajv.compile(testSchema) diff --git a/packages/xo-server/src/xo-mixins/remotes.js b/packages/xo-server/src/xo-mixins/remotes.js index 8c1fd2b2d..82f576007 100644 --- a/packages/xo-server/src/xo-mixins/remotes.js +++ b/packages/xo-server/src/xo-mixins/remotes.js @@ -3,13 +3,8 @@ import { noSuchObject } from 'xo-common/api-errors' import RemoteHandlerLocal from '../remote-handlers/local' import RemoteHandlerNfs from '../remote-handlers/nfs' import RemoteHandlerSmb from '../remote-handlers/smb' -import { - forEach, - mapToArray, -} from '../utils' -import { - Remotes, -} from '../models/remote' +import { forEach, mapToArray } from '../utils' +import { Remotes } from '../models/remote' // =================================================================== @@ -23,11 +18,11 @@ export default class { xo.on('clean', () => this._remotes.rebuildIndexes()) xo.on('start', async () => { - xo.addConfigManager('remotes', + xo.addConfigManager( + 'remotes', () => this._remotes.get(), - remotes => Promise.all(mapToArray(remotes, remote => - this._remotes.save(remote) - )) + remotes => + Promise.all(mapToArray(remotes, remote => this._remotes.save(remote))) ) await this.initRemotes() @@ -83,21 +78,21 @@ export default class { return (await this._getRemote(id)).properties } - async createRemote ({name, url}) { + async createRemote ({ name, url }) { const remote = await this._remotes.create(name, url) - return /* await */ this.updateRemote(remote.get('id'), {enabled: true}) + return /* await */ this.updateRemote(remote.get('id'), { enabled: true }) } - async updateRemote (id, {name, url, enabled, error}) { + async updateRemote (id, { name, url, enabled, error }) { const remote = await this._getRemote(id) - this._updateRemote(remote, {name, url, enabled, error}) + this._updateRemote(remote, { name, url, enabled, error }) const handler = await this.getRemoteHandler(remote.properties, true) const props = await handler.sync() this._updateRemote(remote, props) return (await this._remotes.save(remote)).properties } - _updateRemote (remote, {name, url, enabled, error}) { + _updateRemote (remote, { name, url, enabled, error }) { if (name) remote.set('name', name) if (url) remote.set('url', url) if (enabled !== undefined) remote.set('enabled', enabled) @@ -127,7 +122,7 @@ export default class { const remotes = await this.getAllRemotes() for (const remote of remotes) { try { - (await this.getRemoteHandler(remote, true)).forget() + ;(await this.getRemoteHandler(remote, true)).forget() } catch (_) {} } } @@ -136,7 +131,10 @@ export default class { async initRemotes () { const remotes = await this.getAllRemotes() if (!remotes || !remotes.length) { - await this.createRemote({name: 'default', url: 'file://var/lib/xoa-backups'}) + await this.createRemote({ + name: 'default', + url: 'file://var/lib/xoa-backups', + }) } } } diff --git a/packages/xo-server/src/xo-mixins/resource-sets.js b/packages/xo-server/src/xo-mixins/resource-sets.js index 75e5af8d2..a5dec8e9a 100644 --- a/packages/xo-server/src/xo-mixins/resource-sets.js +++ b/packages/xo-server/src/xo-mixins/resource-sets.js @@ -9,10 +9,7 @@ import { remove, some, } from 'lodash' -import { - noSuchObject, - unauthorized, -} from 'xo-common/api-errors' +import { noSuchObject, unauthorized } from 'xo-common/api-errors' import { asyncMap, @@ -41,7 +38,7 @@ const computeVmResourcesUsage = vm => { let vdi, vdiId if ( vbd.type === 'Disk' && - !processed[vdiId = vbd.VDI] && + !processed[(vdiId = vbd.VDI)] && (vdi = vbd.$VDI) ) { processed[vdiId] = true @@ -63,12 +60,15 @@ const normalize = set => ({ id: set.id, ipPools: set.ipPools || [], limits: set.limits - ? map(set.limits, limit => isObject(limit) - ? limit - : { - available: limit, - total: limit, - } + ? map( + set.limits, + limit => + isObject(limit) + ? limit + : { + available: limit, + total: limit, + } ) : {}, name: set.name || '', @@ -84,12 +84,14 @@ export default class { this._store = null xo.on('start', async () => { - xo.addConfigManager('resourceSets', + xo.addConfigManager( + 'resourceSets', () => this.getAllResourceSets(), - resourceSets => Promise.all(mapToArray(resourceSets, resourceSet => - this._save(resourceSet) - )), - [ 'groups', 'users' ] + resourceSets => + Promise.all( + mapToArray(resourceSets, resourceSet => this._save(resourceSet)) + ), + ['groups', 'users'] ) this._store = await xo.getStore('resourceSets') @@ -112,32 +114,32 @@ export default class { const set = await this.getResourceSet(id) const user = await this._xo.getUser(userId) - if (( - user.permission !== 'admin' && - - // The set does not contains ANY subjects related to this user - // (itself or its groups). - !some(set.subjects, lightSet(user.groups).add(user.id).has) - ) || ( - objectIds && - - // The set does not contains ALL objects. - !every(objectIds, lightSet(set.objects).has) - )) { + if ( + (user.permission !== 'admin' && + // The set does not contains ANY subjects related to this user + // (itself or its groups). + !some(set.subjects, lightSet(user.groups).add(user.id).has)) || + (objectIds && + // The set does not contains ALL objects. + !every(objectIds, lightSet(set.objects).has)) + ) { throw unauthorized() } } async computeVmResourcesUsage (vm) { return assign( - computeVmResourcesUsage( - this._xo.getXapi(vm).getObject(vm._xapiId) - ), + computeVmResourcesUsage(this._xo.getXapi(vm).getObject(vm._xapiId)), await this._xo.computeVmIpPoolsUsage(vm) ) } - async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) { + async createResourceSet ( + name, + subjects = undefined, + objects = undefined, + limits = undefined + ) { const id = await this._generateId() const set = normalize({ id, @@ -162,13 +164,16 @@ export default class { throw noSuchObject(id, 'resourceSet') } - async updateResourceSet (id, { - name = undefined, - subjects = undefined, - objects = undefined, - limits = undefined, - ipPools = undefined, - }) { + async updateResourceSet ( + id, + { + name = undefined, + subjects = undefined, + objects = undefined, + limits = undefined, + ipPools = undefined, + } + ) { const set = await this.getResourceSet(id) if (name) { set.name = name @@ -317,7 +322,8 @@ export default class { const sets = keyBy(await this.getAllResourceSets(), 'id') forEach(sets, ({ limits }) => { forEach(limits, (limit, id) => { - if (VM_RESOURCES[id]) { // only reset VMs related limits + if (VM_RESOURCES[id]) { + // only reset VMs related limits limit.available = limit.total } }) @@ -329,10 +335,8 @@ export default class { let set if ( object.$type !== 'vm' || - // No set for this VM. !(id = xapi.xo.getData(object, 'resourceSet')) || - // Not our set. !(set = sets[id]) ) { @@ -356,20 +360,32 @@ export default class { const xapi = this._xo.getXapi(vmId) const previousResourceSetId = xapi.xo.getData(vmId, 'resourceSet') - if (resourceSetId === previousResourceSetId || (previousResourceSetId === undefined && resourceSetId === null)) { + if ( + resourceSetId === previousResourceSetId || + (previousResourceSetId === undefined && resourceSetId === null) + ) { return } - const resourcesUsage = await this.computeVmResourcesUsage(this._xo.getObject(vmId)) + const resourcesUsage = await this.computeVmResourcesUsage( + this._xo.getObject(vmId) + ) if (resourceSetId != null) { await this.allocateLimitsInResourceSet(resourcesUsage, resourceSetId) } if (previousResourceSetId !== undefined) { - await this.releaseLimitsInResourceSet(resourcesUsage, previousResourceSetId) + await this.releaseLimitsInResourceSet( + resourcesUsage, + previousResourceSetId + ) } - await xapi.xo.setData(vmId, 'resourceSet', resourceSetId === undefined ? null : resourceSetId) + await xapi.xo.setData( + vmId, + 'resourceSet', + resourceSetId === undefined ? null : resourceSetId + ) if (previousResourceSetId !== undefined) { await this._xo.removeAclsForObject(vmId) diff --git a/packages/xo-server/src/xo-mixins/scheduling.js b/packages/xo-server/src/xo-mixins/scheduling.js index 3236010f4..816317534 100644 --- a/packages/xo-server/src/xo-mixins/scheduling.js +++ b/packages/xo-server/src/xo-mixins/scheduling.js @@ -2,11 +2,7 @@ import { BaseError } from 'make-error' import { noSuchObject } from 'xo-common/api-errors.js' import { Schedules } from '../models/schedule' -import { - forEach, - mapToArray, - scheduleFn, -} from '../utils' +import { forEach, mapToArray, scheduleFn } from '../utils' // =================================================================== @@ -37,21 +33,23 @@ export class ScheduleAlreadyEnabled extends SchedulerError { export default class { constructor (xo) { this.xo = xo - const schedules = this._redisSchedules = new Schedules({ + const schedules = (this._redisSchedules = new Schedules({ connection: xo._redis, prefix: 'xo:schedule', indexes: ['user_id', 'job'], - }) + })) this._scheduleTable = undefined xo.on('clean', () => schedules.rebuildIndexes()) xo.on('start', () => { - xo.addConfigManager('schedules', + xo.addConfigManager( + 'schedules', () => schedules.get(), - schedules_ => Promise.all(mapToArray(schedules_, schedule => - schedules.save(schedule) - )), - [ 'jobs' ] + schedules_ => + Promise.all( + mapToArray(schedules_, schedule => schedules.save(schedule)) + ), + ['jobs'] ) return this._loadSchedules() @@ -86,7 +84,7 @@ export default class { const stopSchedule = scheduleFn( schedule.cron, - () => this.xo.runJobSequence([ schedule.job ]), + () => this.xo.runJobSequence([schedule.job]), schedule.timezone ) @@ -150,7 +148,14 @@ export default class { } async createSchedule (userId, { job, cron, enabled, name, timezone }) { - const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name, timezone) + const schedule_ = await this._redisSchedules.create( + userId, + job, + cron, + enabled, + name, + timezone + ) const schedule = schedule_.properties this._add(schedule) diff --git a/packages/xo-server/src/xo-mixins/store.js b/packages/xo-server/src/xo-mixins/store.js index 98a9e7d74..ef231a992 100644 --- a/packages/xo-server/src/xo-mixins/store.js +++ b/packages/xo-server/src/xo-mixins/store.js @@ -4,23 +4,16 @@ import startsWith from 'lodash/startsWith' import sublevel from 'level-sublevel' import { ensureDir } from 'fs-extra' -import { - forEach, - isFunction, - promisify, -} from '../utils' +import { forEach, isFunction, promisify } from '../utils' // =================================================================== const _levelHas = function has (key, cb) { if (cb) { - return this.get(key, (error, value) => error - ? ( - error.notFound - ? cb(null, false) - : cb(error) - ) - : cb(null, true) + return this.get( + key, + (error, value) => + error ? (error.notFound ? cb(null, false) : cb(error)) : cb(null, true) ) } @@ -47,10 +40,7 @@ const levelPromise = db => { return } - if ( - endsWith(name, 'Stream') || - startsWith(name, 'is') - ) { + if (endsWith(name, 'Stream') || startsWith(name, 'is')) { dbP[name] = db::value } else { dbP[`${name}Sync`] = db::value @@ -67,15 +57,15 @@ export default class { constructor (xo) { const dir = `${xo._config.datadir}/leveldb` this._db = ensureDir(dir).then(() => { - return sublevel(levelup(dir, { - valueEncoding: 'json', - })) + return sublevel( + levelup(dir, { + valueEncoding: 'json', + }) + ) }) } getStore (namespace) { - return this._db.then(db => levelPromise( - levelHas(db.sublevel(namespace)) - )) + return this._db.then(db => levelPromise(levelHas(db.sublevel(namespace)))) } } diff --git a/packages/xo-server/src/xo-mixins/subjects.js b/packages/xo-server/src/xo-mixins/subjects.js index c5bf20435..7cce3f95c 100644 --- a/packages/xo-server/src/xo-mixins/subjects.js +++ b/packages/xo-server/src/xo-mixins/subjects.js @@ -1,34 +1,18 @@ import { filter, includes } from 'lodash' import { ignoreErrors } from 'promise-toolbox' -import { - hash, - needsRehash, - verify, -} from 'hashy' -import { - invalidCredentials, - noSuchObject, -} from 'xo-common/api-errors' +import { hash, needsRehash, verify } from 'hashy' +import { invalidCredentials, noSuchObject } from 'xo-common/api-errors' -import { - Groups, -} from '../models/group' -import { - Users, -} from '../models/user' -import { - forEach, - isEmpty, - lightSet, - mapToArray, -} from '../utils' +import { Groups } from '../models/group' +import { Users } from '../models/user' +import { forEach, isEmpty, lightSet, mapToArray } from '../utils' // =================================================================== -const addToArraySet = (set, value) => set && !includes(set, value) - ? set.concat(value) - : [ value ] -const removeFromArraySet = (set, value) => set && filter(set, current => current !== value) +const addToArraySet = (set, value) => + set && !includes(set, value) ? set.concat(value) : [value] +const removeFromArraySet = (set, value) => + set && filter(set, current => current !== value) // =================================================================== @@ -38,46 +22,59 @@ export default class { const redis = xo._redis - const groupsDb = this._groups = new Groups({ + const groupsDb = (this._groups = new Groups({ connection: redis, prefix: 'xo:group', - }) - const usersDb = this._users = new Users({ + })) + const usersDb = (this._users = new Users({ connection: redis, prefix: 'xo:user', indexes: ['email'], - }) + })) - xo.on('clean', () => Promise.all([ - groupsDb.rebuildIndexes(), - usersDb.rebuildIndexes(), - ])) + xo.on('clean', () => + Promise.all([groupsDb.rebuildIndexes(), usersDb.rebuildIndexes()]) + ) xo.on('start', async () => { - xo.addConfigManager('groups', + xo.addConfigManager( + 'groups', () => groupsDb.get(), - groups => Promise.all(mapToArray(groups, group => groupsDb.save(group))), - [ 'users' ] + groups => + Promise.all(mapToArray(groups, group => groupsDb.save(group))), + ['users'] ) - xo.addConfigManager('users', + xo.addConfigManager( + 'users', () => usersDb.get(), - users => Promise.all(mapToArray(users, async user => { - const userId = user.id - const conflictUsers = await usersDb.get({ email: user.email }) - if (!isEmpty(conflictUsers)) { - await Promise.all(mapToArray(conflictUsers, ({ id }) => - (id !== userId) && this.deleteUser(id) - )) - } - return usersDb.save(user) - })) + users => + Promise.all( + mapToArray(users, async user => { + const userId = user.id + const conflictUsers = await usersDb.get({ email: user.email }) + if (!isEmpty(conflictUsers)) { + await Promise.all( + mapToArray( + conflictUsers, + ({ id }) => id !== userId && this.deleteUser(id) + ) + ) + } + return usersDb.save(user) + }) + ) ) if (!await usersDb.exists()) { const email = 'admin@admin.net' const password = 'admin' - await this.createUser({email, password, permission: 'admin'}) - console.log('[INFO] Default user created:', email, ' with password', password) + await this.createUser({ email, password, permission: 'admin' }) + console.log( + '[INFO] Default user created:', + email, + ' with password', + password + ) } }) } @@ -105,10 +102,11 @@ export default class { await this._users.remove(id) // Remove tokens of user. - this._xo.getAuthenticationTokensForUser(id) + this._xo + .getAuthenticationTokensForUser(id) .then(tokens => { forEach(tokens, token => { - this._xo.deleteAuthenticationToken(id)::ignoreErrors() + ;this._xo.deleteAuthenticationToken(id)::ignoreErrors() }) }) ::ignoreErrors() @@ -116,27 +114,30 @@ export default class { // Remove ACLs for this user. this._xo.getAclsForSubject(id).then(acls => { forEach(acls, acl => { - this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() + ;this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() }) }) // Remove the user from all its groups. forEach(user.groups, groupId => { - this.getGroup(groupId) + ;this.getGroup(groupId) .then(group => this._removeUserFromGroup(id, group)) ::ignoreErrors() }) } - async updateUser (id, { - // TODO: remove - email, + async updateUser ( + id, + { + // TODO: remove + email, - name = email, - password, - permission, - preferences, - }) { + name = email, + password, + permission, + preferences, + } + ) { const user = await this.getUser(id) if (name) { @@ -157,9 +158,7 @@ export default class { newPreferences[name] = value } }) - user.preferences = isEmpty(newPreferences) - ? undefined - : newPreferences + user.preferences = isEmpty(newPreferences) ? undefined : newPreferences // TODO: remove user.email = user.name @@ -230,7 +229,7 @@ export default class { } async changeUserPassword (userId, oldPassword, newPassword) { - if (!(await this.checkUserPassword(userId, oldPassword, false))) { + if (!await this.checkUserPassword(userId, oldPassword, false)) { throw invalidCredentials() } @@ -239,10 +238,7 @@ export default class { async checkUserPassword (userId, password, updateIfNecessary = true) { const { pw_hash: hash } = await this.getUser(userId) - if (!( - hash && - await verify(password, hash) - )) { + if (!(hash && (await verify(password, hash)))) { return false } @@ -255,7 +251,7 @@ export default class { // ----------------------------------------------------------------- - async createGroup ({name}) { + async createGroup ({ name }) { // TODO: use plain objects. const group = (await this._groups.create(name)).properties @@ -270,19 +266,19 @@ export default class { // Remove ACLs for this group. this._xo.getAclsForSubject(id).then(acls => { forEach(acls, acl => { - this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() + ;this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() }) }) // Remove the group from all its users. forEach(group.users, userId => { - this.getUser(userId) + ;this.getUser(userId) .then(user => this._removeGroupFromUser(id, user)) ::ignoreErrors() }) } - async updateGroup (id, {name}) { + async updateGroup (id, { name }) { const group = await this.getGroup(id) if (name) group.name = name @@ -312,10 +308,7 @@ export default class { user.groups = addToArraySet(user.groups, groupId) group.users = addToArraySet(group.users, userId) - await Promise.all([ - this._users.save(user), - this._groups.save(group), - ]) + await Promise.all([this._users.save(user), this._groups.save(group)]) } async _removeUserFromGroup (userId, group) { diff --git a/packages/xo-server/src/xo-mixins/xen-servers.js b/packages/xo-server/src/xo-mixins/xen-servers.js index 9bd94d0ab..75edc94c2 100644 --- a/packages/xo-server/src/xo-mixins/xen-servers.js +++ b/packages/xo-server/src/xo-mixins/xen-servers.js @@ -13,20 +13,18 @@ import { popProperty, serializeError, } from '../utils' -import { - Servers, -} from '../models/server' +import { Servers } from '../models/server' // =================================================================== export default class { constructor (xo) { this._objectConflicts = createRawObject() // TODO: clean when a server is disconnected. - const serversDb = this._servers = new Servers({ + const serversDb = (this._servers = new Servers({ connection: xo._redis, prefix: 'xo:server', indexes: ['host'], - }) + })) this._stats = new XapiStats() this._xapis = createRawObject() this._xapisByPool = createRawObject() @@ -34,7 +32,8 @@ export default class { xo.on('clean', () => serversDb.rebuildIndexes()) xo.on('start', async () => { - xo.addConfigManager('xenServers', + xo.addConfigManager( + 'xenServers', () => serversDb.get(), servers => serversDb.update(servers) ) @@ -81,23 +80,26 @@ export default class { } async unregisterXenServer (id) { - this.disconnectXenServer(id)::ignoreErrors() + ;this.disconnectXenServer(id)::ignoreErrors() if (!await this._servers.remove(id)) { throw noSuchObject(id, 'xenServer') } } - async updateXenServer (id, { - allowUnauthorized, - enabled, - error, - host, - label, - password, - readOnly, - username, - }) { + async updateXenServer ( + id, + { + allowUnauthorized, + enabled, + error, + host, + label, + password, + readOnly, + username, + } + ) { const server = await this._getXenServer(id) const xapi = this._xapis[id] const requireDisconnected = @@ -111,7 +113,9 @@ export default class { xapi !== undefined && xapi.status !== 'disconnected' ) { - throw new Error('this entry require disconnecting the server to update it') + throw new Error( + 'this entry require disconnecting the server to update it' + ) } if (label !== undefined) server.set('label', label || undefined) @@ -167,14 +171,10 @@ export default class { xapiIdsToXo[xapiId] = xoId const previous = objects.get(xoId, undefined) - if ( - previous && - previous._xapiRef !== xapiObject.$ref - ) { - ( - conflicts[xoId] || - (conflicts[xoId] = createRawObject()) - )[conId] = xoObject + if (previous && previous._xapiRef !== xapiObject.$ref) { + const conflicts_ = + conflicts[xoId] || (conflicts[xoId] = createRawObject()) + conflicts_[conId] = xoObject } else { objects.set(xoId, xoObject) } @@ -221,7 +221,7 @@ export default class { async connectXenServer (id) { const server = (await this._getXenServer(id)).properties - const xapi = this._xapis[server.id] = new Xapi({ + const xapi = (this._xapis[server.id] = new Xapi({ allowUnauthorized: Boolean(server.allowUnauthorized), auth: { user: server.username, @@ -229,7 +229,7 @@ export default class { }, readOnly: Boolean(server.readOnly), url: server.host, - }) + })) xapi.xo = (() => { const conId = server.id @@ -300,18 +300,18 @@ export default class { addObject, getData: (id, key) => { - const value = ( - typeof id === 'string' - ? xapi.getObject(id) - : id - ).other_config[`xo:${camelToSnakeCase(key)}`] + const value = (typeof id === 'string' ? xapi.getObject(id) : id) + .other_config[`xo:${camelToSnakeCase(key)}`] return value && JSON.parse(value) }, setData: async (id, key, value) => { await xapi._updateObjectMapProperty( xapi.getObject(id), 'other_config', - { [`xo:${camelToSnakeCase(key)}`]: value !== null ? JSON.stringify(value) : value } + { + [`xo:${camelToSnakeCase(key)}`]: + value !== null ? JSON.stringify(value) : value, + } ) // Register the updated object. @@ -381,10 +381,7 @@ export default class { server.status = xapi.status let pool - if ( - server.label === undefined && - (pool = xapi.pool) != null - ) { + if (server.label === undefined && (pool = xapi.pool) != null) { server.label = pool.name_label } } @@ -408,10 +405,9 @@ export default class { async mergeXenPools (sourceId, targetId, force = false) { const sourceXapi = this.getXapi(sourceId) - const { - _auth: { user, password }, - _url: { hostname }, - } = this.getXapi(targetId) + const { _auth: { user, password }, _url: { hostname } } = this.getXapi( + targetId + ) // We don't want the events of the source XAPI to interfere with // the events of the new XAPI. diff --git a/packages/xo-server/src/xo.js b/packages/xo-server/src/xo.js index 5d998cf26..e8e059b5e 100644 --- a/packages/xo-server/src/xo.js +++ b/packages/xo-server/src/xo.js @@ -1,7 +1,7 @@ import XoCollection from 'xo-collection' import XoUniqueIndex from 'xo-collection/unique-index' -import {createClient as createRedisClient} from 'redis' -import {EventEmitter} from 'events' +import { createClient as createRedisClient } from 'redis' +import { EventEmitter } from 'events' import { noSuchObject } from 'xo-common/api-errors' import { forEach, @@ -16,14 +16,8 @@ import { import mixins from './xo-mixins' import Connection from './connection' -import { - mixin, -} from './decorators' -import { - createRawObject, - generateToken, - noop, -} from './utils' +import { mixin } from './decorators' +import { createRawObject, generateToken, noop } from './utils' // =================================================================== @@ -48,13 +42,13 @@ export default class Xo extends EventEmitter { // Connects to Redis. { - const { - renameCommands, - socket: path, - uri: url, - } = config.redis || {} + const { renameCommands, socket: path, uri: url } = config.redis || {} - this._redis = createRedisClient({ path, rename_commands: renameCommands, url }) + this._redis = createRedisClient({ + path, + rename_commands: renameCommands, + url, + }) } this.on('start', () => this._watchObjects()) @@ -64,22 +58,17 @@ export default class Xo extends EventEmitter { // Returns an object from its key or UUID. getObject (key, type) { - const { - all, - indexes: { - byRef, - }, - } = this._objects + const { all, indexes: { byRef } } = this._objects const obj = all[key] || byRef[key] if (!obj) { throw noSuchObject(key, type) } - if (type != null && ( - (isString(type) && type !== obj.type) || - !includes(type, obj.type) // Array - )) { + if ( + type != null && + ((isString(type) && type !== obj.type) || !includes(type, obj.type)) // Array + ) { throw noSuchObject(key, type) } @@ -117,10 +106,10 @@ export default class Xo extends EventEmitter { // ----------------------------------------------------------------- createUserConnection () { - const {_connections: connections} = this + const { _connections: connections } = this const connection = new Connection() - const id = connection.id = this._nextConId++ + const id = (connection.id = this._nextConId++) connections[id] = connection connection.on('close', () => { @@ -133,9 +122,9 @@ export default class Xo extends EventEmitter { // ----------------------------------------------------------------- _handleHttpRequest (req, res, next) { - const {url} = req + const { url } = req - const {_httpRequestWatchers: watchers} = this + const { _httpRequestWatchers: watchers } = this const watcher = watchers[url] if (!watcher) { next() @@ -145,7 +134,7 @@ export default class Xo extends EventEmitter { delete watchers[url] } - const {fn, data} = watcher + const { fn, data } = watcher new Promise(resolve => { resolve(fn.call(this, req, res, data, next)) }).then( @@ -166,15 +155,13 @@ export default class Xo extends EventEmitter { } async registerHttpRequest (fn, data, { suffix = '' } = {}) { - const {_httpRequestWatchers: watchers} = this + const { _httpRequestWatchers: watchers } = this const url = await (function generateUniqueUrl () { return generateToken().then(token => { const url = `/api/${token}${suffix}` - return url in watchers - ? generateUniqueUrl() - : url + return url in watchers ? generateUniqueUrl() : url }) })() @@ -186,11 +173,12 @@ export default class Xo extends EventEmitter { return url } - async registerHttpRequestHandler (url, fn, { - data = undefined, - persistent = true, - } = {}) { - const {_httpRequestWatchers: watchers} = this + async registerHttpRequestHandler ( + url, + fn, + { data = undefined, persistent = true } = {} + ) { + const { _httpRequestWatchers: watchers } = this if (url in watchers) { throw new Error(`a handler is already registered for ${url}`) @@ -217,9 +205,10 @@ export default class Xo extends EventEmitter { // For security, prevent from accessing `this`. if (isFunction(value)) { - value = (value => function () { - return value.apply(thisArg, arguments) - })(value) + value = (value => + function () { + return value.apply(thisArg, arguments) + })(value) } Object.defineProperty(this, name, { @@ -258,10 +247,7 @@ export default class Xo extends EventEmitter { // Some should be forwarded to connected clients. // Some should be persistently saved. _watchObjects () { - const { - _connections: connections, - _objects: objects, - } = this + const { _connections: connections, _objects: objects } = this let entered, exited function reset () { @@ -278,7 +264,7 @@ export default class Xo extends EventEmitter { objects.on('add', onAdd) objects.on('update', onAdd) - objects.on('remove', (items) => { + objects.on('remove', items => { forEach(items, (_, id) => { // We don't care about the value here, so we choose `0` // because it is small in JSON. diff --git a/packages/xo-vmdk-to-vhd/src/vhd-write.js b/packages/xo-vmdk-to-vhd/src/vhd-write.js index 10b7483ab..b83ccee76 100644 --- a/packages/xo-vmdk-to-vhd/src/vhd-write.js +++ b/packages/xo-vmdk-to-vhd/src/vhd-write.js @@ -1,7 +1,7 @@ 'use strict' -import {open, write} from 'fs-promise' +import { open, write } from 'fs-promise' import stream from 'stream' -import {VMDKDirectParser} from './vmdk-read' +import { VMDKDirectParser } from './vmdk-read' const footerCookie = 'conectix' const creatorApp = 'xo ' @@ -19,13 +19,14 @@ export function computeChecksum (buffer) { sum += buffer[i] } // http://stackoverflow.com/a/1908655/72637 the >>> prevents the number from going negative - return (~sum) >>> 0 + return ~sum >>> 0 } class Block { constructor (blockSize) { const bitmapSize = blockSize / sectorSize / 8 - const bufferSize = Math.ceil((blockSize + bitmapSize) / sectorSize) * sectorSize + const bufferSize = + Math.ceil((blockSize + bitmapSize) / sectorSize) * sectorSize this.buffer = Buffer.alloc(bufferSize) this.bitmapBuffer = this.buffer.slice(0, bitmapSize) this.dataBuffer = this.buffer.slice(bitmapSize) @@ -68,13 +69,16 @@ class SparseExtent { const startBlock = Math.floor(offset / this.blockSize) const endBlock = Math.ceil((offset + buffer.length) / this.blockSize) for (let i = startBlock; i < endBlock; i++) { - const blockDelta = offset - (i * this.blockSize) + const blockDelta = offset - i * this.blockSize let blockBuffer, blockOffset if (blockDelta > 0) { blockBuffer = buffer.slice(0, (i + 1) * this.blockSize - offset) blockOffset = blockDelta } else { - blockBuffer = buffer.slice(-blockDelta, (i + 1) * this.blockSize - offset) + blockBuffer = buffer.slice( + -blockDelta, + (i + 1) * this.blockSize - offset + ) blockOffset = 0 } this._writeBlock(blockBuffer, i, blockOffset) @@ -105,7 +109,11 @@ export class VHDFile { this.geomtry = computeGeometryForSize(virtualSize) this.timestamp = timestamp this.blockSize = 0x00200000 - this.sparseFile = new SparseExtent(this.geomtry.actualSize, this.blockSize, sectorSize * 3) + this.sparseFile = new SparseExtent( + this.geomtry.actualSize, + this.blockSize, + sectorSize * 3 + ) } writeBuffer (buffer, offset = 0) { @@ -113,8 +121,18 @@ export class VHDFile { } async writeFile (fileName) { - const fileFooter = createFooter(this.geomtry.actualSize, this.timestamp, this.geomtry, dynamicHardDiskType, 512, 0) - const diskHeader = createDynamicDiskHeader(this.sparseFile.entryCount, this.blockSize) + const fileFooter = createFooter( + this.geomtry.actualSize, + this.timestamp, + this.geomtry, + dynamicHardDiskType, + 512, + 0 + ) + const diskHeader = createDynamicDiskHeader( + this.sparseFile.entryCount, + this.blockSize + ) const file = await open(fileName, 'w') await write(file, fileFooter, 0, fileFooter.length) await write(file, diskHeader, 0, diskHeader.length) @@ -143,12 +161,12 @@ export function computeGeometryForSize (size) { if (heads < 4) { heads = 4 } - if (cylinderTimesHeads >= (heads * 1024) || heads > 16) { + if (cylinderTimesHeads >= heads * 1024 || heads > 16) { sectorsPerTrack = 31 heads = 16 cylinderTimesHeads = totalSectors / sectorsPerTrack } - if (cylinderTimesHeads >= (heads * 1024)) { + if (cylinderTimesHeads >= heads * 1024) { sectorsPerTrack = 63 heads = 16 cylinderTimesHeads = totalSectors / sectorsPerTrack @@ -156,10 +174,17 @@ export function computeGeometryForSize (size) { } const cylinders = Math.floor(cylinderTimesHeads / heads) const actualSize = cylinders * heads * sectorsPerTrack * sectorSize - return {cylinders, heads, sectorsPerTrack, actualSize} + return { cylinders, heads, sectorsPerTrack, actualSize } } -export function createFooter (size, timestamp, geometry, diskType, dataOffsetLow = 0xFFFFFFFF, dataOffsetHigh = 0xFFFFFFFF) { +export function createFooter ( + size, + timestamp, + geometry, + diskType, + dataOffsetLow = 0xffffffff, + dataOffsetHigh = 0xffffffff +) { const footer = Buffer.alloc(512) Buffer.from(footerCookie, 'ascii').copy(footer) footer.writeUInt32BE(2, 8) @@ -189,8 +214,8 @@ export function createDynamicDiskHeader (tableEntries, blockSize) { const header = Buffer.alloc(1024) Buffer.from(headerCookie, 'ascii').copy(header) // hard code no next data - header.writeUInt32BE(0xFFFFFFFF, 8) - header.writeUInt32BE(0xFFFFFFFF, 12) + header.writeUInt32BE(0xffffffff, 8) + header.writeUInt32BE(0xffffffff, 12) // hard code table offset header.writeUInt32BE(0, 16) header.writeUInt32BE(sectorSize * 3, 20) @@ -206,7 +231,7 @@ export function createEmptyTable (dataSize, blockSize) { const blockCount = Math.ceil(dataSize / blockSize) const tableSizeSectors = Math.ceil(blockCount * 4 / sectorSize) const buffer = Buffer.alloc(tableSizeSectors * sectorSize, 0xff) - return {entryCount: blockCount, buffer: buffer, entries: []} + return { entryCount: blockCount, buffer: buffer, entries: [] } } export class ReadableRawVHDStream extends stream.Readable { @@ -214,7 +239,12 @@ export class ReadableRawVHDStream extends stream.Readable { super() this.size = size const geometry = computeGeometryForSize(size) - this.footer = createFooter(size, Math.floor(Date.now() / 1000), geometry, fixedHardDiskType) + this.footer = createFooter( + size, + Math.floor(Date.now() / 1000), + geometry, + fixedHardDiskType + ) this.position = 0 this.vmdkParser = vmdkParser this.done = false @@ -254,7 +284,12 @@ export class ReadableRawVHDStream extends stream.Readable { const buffer = next.grain const paddingLength = offset - this.position if (paddingLength < 0) { - process.nextTick(() => this.emit('error', 'This VMDK file does not have its blocks in the correct order')) + process.nextTick(() => + this.emit( + 'error', + 'This VMDK file does not have its blocks in the correct order' + ) + ) } this.filePadding(paddingLength) this.currentFile.push(() => buffer) @@ -278,8 +313,7 @@ export class ReadableRawVHDStream extends stream.Readable { } async pushNextUntilFull () { - while (!this.done && await this.pushNextBlock()) { - } + while (!this.done && (await this.pushNextBlock())) {} } _read () { @@ -288,11 +322,13 @@ export class ReadableRawVHDStream extends stream.Readable { } if (this.pushFileUntilFull()) { this.busy = true - this.pushNextUntilFull().then(() => { - this.busy = false - }).catch((error) => { - process.nextTick(() => this.emit('error', error)) - }) + this.pushNextUntilFull() + .then(() => { + this.busy = false + }) + .catch(error => { + process.nextTick(() => this.emit('error', error)) + }) } } } diff --git a/packages/xo-vmdk-to-vhd/src/vhd-write.spec.js b/packages/xo-vmdk-to-vhd/src/vhd-write.spec.js index e69929c17..7475b03cf 100644 --- a/packages/xo-vmdk-to-vhd/src/vhd-write.spec.js +++ b/packages/xo-vmdk-to-vhd/src/vhd-write.spec.js @@ -1,10 +1,10 @@ 'use strict' import expect from 'must' -import {createWriteStream} from 'fs' -import {describe, it} from 'mocha' -import {exec} from 'child-process-promise' -import {readFile} from 'fs-promise' +import { createWriteStream } from 'fs' +import { describe, it } from 'mocha' +import { exec } from 'child-process-promise' +import { readFile } from 'fs-promise' import { computeChecksum, @@ -18,16 +18,26 @@ import { describe('VHD writing', () => { it('computeChecksum() is correct against some reference values', () => { // those values were taken from a file generated by qemu - const testValue1 = '636F6E6563746978000000020001000000000000000002001F34DB9F71656D75000500035769326B0000000000019800000000000001980000030411000000030000000033B3A5E17F94433498376740246E5660' - const expectedChecksum1 = 0xFFFFEFB2 - const testValue2 = '6378737061727365FFFFFFFFFFFFFFFF0000000000000600000100000000000100200000' - const expectedChecksum2 = 0xFFFFF476 - expect(computeChecksum(Buffer.from(testValue1, 'hex'))).to.equal(expectedChecksum1) - expect(computeChecksum(Buffer.from(testValue2, 'hex'))).to.equal(expectedChecksum2) + const testValue1 = + '636F6E6563746978000000020001000000000000000002001F34DB9F71656D75000500035769326B0000000000019800000000000001980000030411000000030000000033B3A5E17F94433498376740246E5660' + const expectedChecksum1 = 0xffffefb2 + const testValue2 = + '6378737061727365FFFFFFFFFFFFFFFF0000000000000600000100000000000100200000' + const expectedChecksum2 = 0xfffff476 + expect(computeChecksum(Buffer.from(testValue1, 'hex'))).to.equal( + expectedChecksum1 + ) + expect(computeChecksum(Buffer.from(testValue2, 'hex'))).to.equal( + expectedChecksum2 + ) }) it('createFooter() does not crash', () => { - createFooter(104448, Math.floor(Date.now() / 1000), {cylinders: 3, heads: 4, sectorsPerTrack: 17}) + createFooter(104448, Math.floor(Date.now() / 1000), { + cylinders: 3, + heads: 4, + sectorsPerTrack: 17, + }) }) it('createDynamicDiskHeader() does not crash', () => { @@ -35,13 +45,16 @@ describe('VHD writing', () => { }) it('ReadableRawVHDStream does not crash', () => { - const data = [{ - lbaBytes: 100, - grain: Buffer.from('azerzaerazeraze', 'ascii'), - }, { - lbaBytes: 700, - grain: Buffer.from('gdfslkdfguer', 'ascii'), - }] + const data = [ + { + lbaBytes: 100, + grain: Buffer.from('azerzaerazeraze', 'ascii'), + }, + { + lbaBytes: 700, + grain: Buffer.from('gdfslkdfguer', 'ascii'), + }, + ] let index = 0 const mockParser = { next: () => { @@ -63,13 +76,16 @@ describe('VHD writing', () => { }) it('ReadableRawVHDStream detects when blocks are out of order', () => { - const data = [{ - lbaBytes: 700, - grain: Buffer.from('azerzaerazeraze', 'ascii'), - }, { - lbaBytes: 100, - grain: Buffer.from('gdfslkdfguer', 'ascii'), - }] + const data = [ + { + lbaBytes: 700, + grain: Buffer.from('azerzaerazeraze', 'ascii'), + }, + { + lbaBytes: 100, + grain: Buffer.from('gdfslkdfguer', 'ascii'), + }, + ] let index = 0 const mockParser = { next: () => { @@ -82,13 +98,17 @@ describe('VHD writing', () => { } }, } - return expect(new Promise((resolve, reject) => { - const stream = new ReadableRawVHDStream(100000, mockParser) - stream.on('error', reject) - const pipe = stream.pipe(createWriteStream('outputStream')) - pipe.on('finish', resolve) - pipe.on('error', reject) - })).to.reject.to.equal('This VMDK file does not have its blocks in the correct order') + return expect( + new Promise((resolve, reject) => { + const stream = new ReadableRawVHDStream(100000, mockParser) + stream.on('error', reject) + const pipe = stream.pipe(createWriteStream('outputStream')) + pipe.on('finish', resolve) + pipe.on('error', reject) + }) + ).to.reject.to.equal( + 'This VMDK file does not have its blocks in the correct order' + ) }) it('writing a known file with VHDFile is successful', async () => { @@ -97,7 +117,9 @@ describe('VHD writing', () => { const randomFileName = 'random.raw' const geometry = computeGeometryForSize(1024 * 1024 * 8) const dataSize = geometry.actualSize - await exec('base64 /dev/urandom | head -c ' + dataSize + ' > ' + randomFileName) + await exec( + 'base64 /dev/urandom | head -c ' + dataSize + ' > ' + randomFileName + ) const buffer = await readFile(randomFileName) const f = new VHDFile(buffer.length, 523557791) const splitPoint = Math.floor(Math.random() * buffer.length) diff --git a/packages/xo-vmdk-to-vhd/src/virtual-buffer.js b/packages/xo-vmdk-to-vhd/src/virtual-buffer.js index 5df1823f1..92036bdc5 100644 --- a/packages/xo-vmdk-to-vhd/src/virtual-buffer.js +++ b/packages/xo-vmdk-to-vhd/src/virtual-buffer.js @@ -1,6 +1,6 @@ 'use strict' -import {Slicer} from 'pipette' +import { Slicer } from 'pipette' const chunkSize = 1024 * 1024 @@ -30,7 +30,7 @@ export class VirtualBuffer { if (error !== false && error !== true) { reject(error) } else { - resolve({error, data}) + resolve({ error, data }) } }) }) diff --git a/packages/xo-vmdk-to-vhd/src/virtual-buffer.spec.js b/packages/xo-vmdk-to-vhd/src/virtual-buffer.spec.js index de6fd99c2..b2fec1a2d 100644 --- a/packages/xo-vmdk-to-vhd/src/virtual-buffer.spec.js +++ b/packages/xo-vmdk-to-vhd/src/virtual-buffer.spec.js @@ -1,9 +1,9 @@ import expect from 'must' -import {createReadStream, readFile} from 'fs-promise' -import {describe, it} from 'mocha' -import {exec} from 'child-process-promise' +import { createReadStream, readFile } from 'fs-promise' +import { describe, it } from 'mocha' +import { exec } from 'child-process-promise' -import {VirtualBuffer} from './virtual-buffer' +import { VirtualBuffer } from './virtual-buffer' describe('Virtual Buffer', function () { it('can read a file correctly', async () => { @@ -14,6 +14,8 @@ describe('Virtual Buffer', function () { const part2 = await buffer.readChunk(-1) const original = await readFile(rawFileName) expect(buffer.isDepleted).to.be.true() - expect(Buffer.concat([part1, part2]).toString('ascii')).to.equal(original.toString('ascii')) + expect(Buffer.concat([part1, part2]).toString('ascii')).to.equal( + original.toString('ascii') + ) }) }) diff --git a/packages/xo-vmdk-to-vhd/src/vmdk-read.js b/packages/xo-vmdk-to-vhd/src/vmdk-read.js index 6215482fb..6ce55f3dc 100644 --- a/packages/xo-vmdk-to-vhd/src/vmdk-read.js +++ b/packages/xo-vmdk-to-vhd/src/vmdk-read.js @@ -1,7 +1,7 @@ 'use strict' import zlib from 'zlib' -import {VirtualBuffer} from './virtual-buffer' +import { VirtualBuffer } from './virtual-buffer' const sectorSize = 512 const compressionDeflate = 'COMPRESSION_DEFLATE' @@ -12,7 +12,7 @@ function parseS64b (buffer, offset, valueName) { const low = buffer.readInt32LE(offset) const high = buffer.readInt32LE(offset + 4) // here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa) - const value = low | high << 32 + const value = low | (high << 32) if ((value & (Math.pow(2, 32) - 1)) !== low) { throw new Error('Unsupported VMDK, ' + valueName + ' is too big') } @@ -23,7 +23,7 @@ function parseU64b (buffer, offset, valueName) { const low = buffer.readUInt32LE(offset) const high = buffer.readUInt32LE(offset + 4) // here there might be a surprise because we are reading 64 integers into double floats (53 bits mantissa) - const value = low | high << 32 + const value = low | (high << 32) if ((value & (Math.pow(2, 32) - 1)) !== low) { throw new Error('Unsupported VMDK, ' + valueName + ' is too big') } @@ -34,7 +34,7 @@ function parseDescriptor (descriptorSlice) { const descriptorText = descriptorSlice.toString('ascii').replace(/\x00+$/, '') // eslint-disable-line no-control-regex const descriptorDict = {} const extentList = [] - const lines = descriptorText.split(/\r?\n/).filter((line) => { + const lines = descriptorText.split(/\r?\n/).filter(line => { return line.trim().length > 0 && line[0] !== '#' }) for (const line of lines) { @@ -53,7 +53,7 @@ function parseDescriptor (descriptorSlice) { }) } } - return {descriptor: descriptorDict, extents: extentList} + return { descriptor: descriptorDict, extents: extentList } } function parseFlags (flagBuffer) { @@ -74,16 +74,32 @@ function parseHeader (buffer) { } const version = buffer.readUInt32LE(4) if (version !== 1 && version !== 3) { - throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported') + throw new Error( + 'unsupported VMDK version ' + + version + + ', only version 1 and 3 are supported' + ) } const flags = parseFlags(buffer.slice(8, 12)) const capacitySectors = parseU64b(buffer, 12, 'capacitySectors') const grainSizeSectors = parseU64b(buffer, 20, 'grainSizeSectors') - const descriptorOffsetSectors = parseU64b(buffer, 28, 'descriptorOffsetSectors') + const descriptorOffsetSectors = parseU64b( + buffer, + 28, + 'descriptorOffsetSectors' + ) const descriptorSizeSectors = parseU64b(buffer, 36, 'descriptorSizeSectors') const numGTEsPerGT = buffer.readUInt32LE(44) - const rGrainDirectoryOffsetSectors = parseS64b(buffer, 48, 'rGrainDirectoryOffsetSectors') - const grainDirectoryOffsetSectors = parseS64b(buffer, 56, 'grainDirectoryOffsetSectors') + const rGrainDirectoryOffsetSectors = parseS64b( + buffer, + 48, + 'rGrainDirectoryOffsetSectors' + ) + const grainDirectoryOffsetSectors = parseS64b( + buffer, + 56, + 'grainDirectoryOffsetSectors' + ) const overheadSectors = parseS64b(buffer, 64, 'overheadSectors') const compressionMethod = compressionMap[buffer.readUInt16LE(77)] const l1EntrySectors = numGTEsPerGT * grainSizeSectors @@ -105,7 +121,9 @@ async function readGrain (offsetSectors, buffer, compressed) { const offset = offsetSectors * sectorSize const size = buffer.readUInt32LE(offset + 8) const grainBuffer = buffer.slice(offset + 12, offset + 12 + size) - const grainContent = compressed ? await zlib.inflateSync(grainBuffer) : grainBuffer + const grainContent = compressed + ? await zlib.inflateSync(grainBuffer) + : grainBuffer const lba = parseU64b(buffer, offset, 'l2Lba') return { offsetSectors: offsetSectors, @@ -123,7 +141,7 @@ function tryToParseMarker (buffer) { const value = buffer.readUInt32LE(0) const size = buffer.readUInt32LE(8) const type = buffer.readUInt32LE(12) - return {value, size, type} + return { value, size, type } } function alignSectors (number) { @@ -140,18 +158,26 @@ export class VMDKDirectParser { // I detect this case and eat those tables first then let the normal loop go over the grains. async _readL1 () { const position = this.virtualBuffer.position - const l1entries = Math.floor((this.header.capacitySectors + this.header.l1EntrySectors - 1) / this.header.l1EntrySectors) + const l1entries = Math.floor( + (this.header.capacitySectors + this.header.l1EntrySectors - 1) / + this.header.l1EntrySectors + ) const sectorAlignedL1Bytes = alignSectors(l1entries * 4) - const l1Buffer = await this.virtualBuffer.readChunk(sectorAlignedL1Bytes, 'L1 table ' + position) + const l1Buffer = await this.virtualBuffer.readChunk( + sectorAlignedL1Bytes, + 'L1 table ' + position + ) let l2Start = 0 let l2IsContiguous = true for (let i = 0; i < l1entries; i++) { const l1Entry = l1Buffer.readUInt32LE(i * 4) if (i > 0) { const previousL1Entry = l1Buffer.readUInt32LE((i - 1) * 4) - l2IsContiguous = l2IsContiguous && ((l1Entry - previousL1Entry) === 4) + l2IsContiguous = l2IsContiguous && l1Entry - previousL1Entry === 4 } else { - l2IsContiguous = (l1Entry * sectorSize === this.virtualBuffer.position) || (l1Entry * sectorSize === this.virtualBuffer.position + 512) + l2IsContiguous = + l1Entry * sectorSize === this.virtualBuffer.position || + l1Entry * sectorSize === this.virtualBuffer.position + 512 l2Start = l1Entry * sectorSize } } @@ -160,18 +186,27 @@ export class VMDKDirectParser { } const l1L2FreeSpace = l2Start - this.virtualBuffer.position if (l1L2FreeSpace > 0) { - await this.virtualBuffer.readChunk(l1L2FreeSpace, 'freeSpace between L1 and L2') + await this.virtualBuffer.readChunk( + l1L2FreeSpace, + 'freeSpace between L1 and L2' + ) } - const l2entries = Math.ceil(this.header.capacitySectors / this.header.grainSizeSectors) + const l2entries = Math.ceil( + this.header.capacitySectors / this.header.grainSizeSectors + ) const l2ByteSize = alignSectors(l1entries * this.header.numGTEsPerGT * 4) - const l2Buffer = await this.virtualBuffer.readChunk(l2ByteSize, 'L2 table ' + position) + const l2Buffer = await this.virtualBuffer.readChunk( + l2ByteSize, + 'L2 table ' + position + ) let grainsAreInAscendingOrder = true let previousL2Entry = 0 let firstGrain = null for (let i = 0; i < l2entries; i++) { const l2Entry = l2Buffer.readUInt32LE(i * 4) if (i > 0 && previousL2Entry !== 0 && l2Entry !== 0) { - grainsAreInAscendingOrder = grainsAreInAscendingOrder && (previousL2Entry < l2Entry) + grainsAreInAscendingOrder = + grainsAreInAscendingOrder && previousL2Entry < l2Entry } previousL2Entry = l2Entry if (firstGrain === null) { @@ -196,19 +231,33 @@ export class VMDKDirectParser { } const version = headerBuffer.readUInt32LE(4) if (version !== 1 && version !== 3) { - throw new Error('unsupported VMDK version ' + version + ', only version 1 and 3 are supported') + throw new Error( + 'unsupported VMDK version ' + + version + + ', only version 1 and 3 are supported' + ) } this.header = parseHeader(headerBuffer) // I think the multiplications are OK, because the descriptor is always at the beginning of the file const descriptorLength = this.header.descriptorSizeSectors * sectorSize - const descriptorBuffer = await this.virtualBuffer.readChunk(descriptorLength, 'descriptor') + const descriptorBuffer = await this.virtualBuffer.readChunk( + descriptorLength, + 'descriptor' + ) this.descriptor = parseDescriptor(descriptorBuffer) let l1PositionBytes = null - if (this.header.grainDirectoryOffsetSectors !== -1 && this.header.grainDirectoryOffsetSectors !== 0) { + if ( + this.header.grainDirectoryOffsetSectors !== -1 && + this.header.grainDirectoryOffsetSectors !== 0 + ) { l1PositionBytes = this.header.grainDirectoryOffsetSectors * sectorSize } const endOfDescriptor = this.virtualBuffer.position - if (l1PositionBytes !== null && (l1PositionBytes === endOfDescriptor || l1PositionBytes === endOfDescriptor + sectorSize)) { + if ( + l1PositionBytes !== null && + (l1PositionBytes === endOfDescriptor || + l1PositionBytes === endOfDescriptor + sectorSize) + ) { if (l1PositionBytes === endOfDescriptor + sectorSize) { await this.virtualBuffer.readChunk(sectorSize, 'skipping L1 marker') } @@ -220,25 +269,39 @@ export class VMDKDirectParser { async next () { while (!this.virtualBuffer.isDepleted) { const position = this.virtualBuffer.position - const sector = await this.virtualBuffer.readChunk(512, 'marker start ' + position) + const sector = await this.virtualBuffer.readChunk( + 512, + 'marker start ' + position + ) if (sector.length === 0) { break } const marker = tryToParseMarker(sector) if (marker.size === 0) { if (marker.value !== 0) { - await this.virtualBuffer.readChunk(marker.value * sectorSize, 'other marker value ' + this.virtualBuffer.position) + await this.virtualBuffer.readChunk( + marker.value * sectorSize, + 'other marker value ' + this.virtualBuffer.position + ) } } else if (marker.size > 10) { const grainDiskSize = marker.size + 12 const alignedGrainDiskSize = alignSectors(grainDiskSize) const remainOfBufferSize = alignedGrainDiskSize - sectorSize - const remainderOfGrainBuffer = await this.virtualBuffer.readChunk(remainOfBufferSize, 'grain remainder ' + this.virtualBuffer.position) + const remainderOfGrainBuffer = await this.virtualBuffer.readChunk( + remainOfBufferSize, + 'grain remainder ' + this.virtualBuffer.position + ) const grainBuffer = Buffer.concat([sector, remainderOfGrainBuffer]) - return readGrain(0, grainBuffer, this.header.compressionMethod === compressionDeflate && this.header.flags.compressedGrains) + return readGrain( + 0, + grainBuffer, + this.header.compressionMethod === compressionDeflate && + this.header.flags.compressedGrains + ) } } - return new Promise((resolve) => resolve(null)) + return new Promise(resolve => resolve(null)) } } @@ -249,28 +312,43 @@ export async function readRawContent (readStream) { // I think the multiplications are OK, because the descriptor is always at the beginning of the file const descriptorLength = header.descriptorSizeSectors * sectorSize - const descriptorBuffer = await virtualBuffer.readChunk(descriptorLength, 'descriptor') + const descriptorBuffer = await virtualBuffer.readChunk( + descriptorLength, + 'descriptor' + ) const descriptor = parseDescriptor(descriptorBuffer) // TODO: we concat them back for now so that the indices match, we'll have to introduce a bias later const remainingBuffer = await virtualBuffer.readChunk(-1, 'remainder') - const buffer = Buffer.concat([headerBuffer, descriptorBuffer, remainingBuffer]) + const buffer = Buffer.concat([ + headerBuffer, + descriptorBuffer, + remainingBuffer, + ]) if (header.grainDirectoryOffsetSectors === -1) { header = parseHeader(buffer.slice(-1024, -1024 + sectorSize)) } const rawOutputBuffer = Buffer.alloc(header.capacitySectors * sectorSize) - const l1Size = Math.floor((header.capacitySectors + header.l1EntrySectors - 1) / header.l1EntrySectors) + const l1Size = Math.floor( + (header.capacitySectors + header.l1EntrySectors - 1) / header.l1EntrySectors + ) const l2Size = header.numGTEsPerGT const l1 = [] for (let i = 0; i < l1Size; i++) { - const l1Entry = buffer.readUInt32LE(header.grainDirectoryOffsetSectors * sectorSize + 4 * i) + const l1Entry = buffer.readUInt32LE( + header.grainDirectoryOffsetSectors * sectorSize + 4 * i + ) if (l1Entry !== 0) { l1.push(l1Entry) const l2 = [] for (let j = 0; j < l2Size; j++) { const l2Entry = buffer.readUInt32LE(l1Entry * sectorSize + 4 * j) if (l2Entry !== 0 && l2Entry !== 1) { - const grain = await readGrain(l2Entry, buffer, header['flags']['compressedGrains']) + const grain = await readGrain( + l2Entry, + buffer, + header['flags']['compressedGrains'] + ) grain.grain.copy(rawOutputBuffer, grain.lba * sectorSize) l2[j] = grain } @@ -279,7 +357,15 @@ export async function readRawContent (readStream) { } const vmdkType = descriptor['descriptor']['createType'] if (!vmdkType || vmdkType.toLowerCase() !== 'streamOptimized'.toLowerCase()) { - throw new Error('unsupported VMDK type "' + vmdkType + '", only streamOptimized is supported') + throw new Error( + 'unsupported VMDK type "' + + vmdkType + + '", only streamOptimized is supported' + ) + } + return { + descriptor: descriptor.descriptor, + extents: descriptor.extents, + rawFile: rawOutputBuffer, } - return {descriptor: descriptor.descriptor, extents: descriptor.extents, rawFile: rawOutputBuffer} } diff --git a/packages/xo-vmdk-to-vhd/src/vmdk-read.spec.js b/packages/xo-vmdk-to-vhd/src/vmdk-read.spec.js index 9d4dcfb65..7925c2477 100644 --- a/packages/xo-vmdk-to-vhd/src/vmdk-read.spec.js +++ b/packages/xo-vmdk-to-vhd/src/vmdk-read.spec.js @@ -1,16 +1,23 @@ import expect from 'must' -import {createReadStream} from 'fs-promise' -import {describe, it} from 'mocha' -import {exec} from 'child-process-promise' +import { createReadStream } from 'fs-promise' +import { describe, it } from 'mocha' +import { exec } from 'child-process-promise' -import {VMDKDirectParser} from './vmdk-read' +import { VMDKDirectParser } from './vmdk-read' describe('VMDK reading', () => { it('VMDKDirectParser reads OK', async () => { const rawFileName = 'random-data' const fileName = 'random-data.vmdk' await exec('base64 /dev/urandom | head -c 104448 > ' + rawFileName) - await exec('rm -f ' + fileName + '&& VBoxManage convertfromraw --format VMDK --variant Stream ' + rawFileName + ' ' + fileName) + await exec( + 'rm -f ' + + fileName + + '&& VBoxManage convertfromraw --format VMDK --variant Stream ' + + rawFileName + + ' ' + + fileName + ) const parser = new VMDKDirectParser(createReadStream(fileName)) const header = await parser.readHeader() const harvested = [] diff --git a/packages/xo-vmdk-to-vhd/src/vmdk-to-vhd.spec.js b/packages/xo-vmdk-to-vhd/src/vmdk-to-vhd.spec.js index a53ac451c..83fa2e5aa 100644 --- a/packages/xo-vmdk-to-vhd/src/vmdk-to-vhd.spec.js +++ b/packages/xo-vmdk-to-vhd/src/vmdk-to-vhd.spec.js @@ -1,11 +1,11 @@ 'use strict' -import {describe, it} from 'mocha' -import {exec} from 'child-process-promise' -import {createReadStream, createWriteStream} from 'fs-promise' +import { describe, it } from 'mocha' +import { exec } from 'child-process-promise' +import { createReadStream, createWriteStream } from 'fs-promise' -import {readRawContent} from './vmdk-read' -import {VHDFile, convertFromVMDK, computeGeometryForSize} from './vhd-write' +import { readRawContent } from './vmdk-read' +import { VHDFile, convertFromVMDK, computeGeometryForSize } from './vhd-write' describe('VMDK to VHD conversion', () => { it('can convert a random data file with readRawContent()', async () => { @@ -14,22 +14,44 @@ describe('VMDK to VHD conversion', () => { const vhdFileName = 'from-vmdk-readRawContent.vhd' const reconvertedRawFilemane = 'from-vhd.raw' const dataSize = 5222400 - await exec('rm -f ' + [inputRawFileName, vmdkFileName, vhdFileName, reconvertedRawFilemane].join(' ')) - await exec('base64 /dev/urandom | head -c ' + dataSize + ' > ' + inputRawFileName) - await exec('VBoxManage convertfromraw --format VMDK --variant Stream ' + inputRawFileName + ' ' + vmdkFileName) - const rawContent = (await readRawContent(createReadStream(vmdkFileName))).rawFile + await exec( + 'rm -f ' + + [ + inputRawFileName, + vmdkFileName, + vhdFileName, + reconvertedRawFilemane, + ].join(' ') + ) + await exec( + 'base64 /dev/urandom | head -c ' + dataSize + ' > ' + inputRawFileName + ) + await exec( + 'VBoxManage convertfromraw --format VMDK --variant Stream ' + + inputRawFileName + + ' ' + + vmdkFileName + ) + const rawContent = (await readRawContent(createReadStream(vmdkFileName))) + .rawFile const f = new VHDFile(rawContent.length, 523557791) await f.writeBuffer(rawContent) await f.writeFile(vhdFileName) - await exec('qemu-img convert -fvpc -Oraw ' + vhdFileName + ' ' + reconvertedRawFilemane) - return exec('qemu-img compare ' + vmdkFileName + ' ' + vhdFileName) - .catch((error) => { + await exec( + 'qemu-img convert -fvpc -Oraw ' + + vhdFileName + + ' ' + + reconvertedRawFilemane + ) + return exec('qemu-img compare ' + vmdkFileName + ' ' + vhdFileName).catch( + error => { console.error(error.stdout) console.error(error.stderr) console.error(vhdFileName, vmdkFileName, error.message) throw error - }) + } + ) }) it('can convert a random data file with VMDKDirectParser', async () => { @@ -39,23 +61,55 @@ describe('VMDK to VHD conversion', () => { const reconvertedRawFilemane = 'from-vhd.raw' const reconvertedByVBoxRawFilemane = 'from-vhd-by-vbox.raw' const dataSize = computeGeometryForSize(8 * 1024 * 1024).actualSize - await exec('rm -f ' + [inputRawFileName, vmdkFileName, vhdFileName, reconvertedRawFilemane, reconvertedByVBoxRawFilemane].join(' ')) - await exec('base64 /dev/urandom | head -c ' + dataSize + ' > ' + inputRawFileName) - await exec('VBoxManage convertfromraw --format VMDK --variant Stream ' + inputRawFileName + ' ' + vmdkFileName) - const pipe = (await convertFromVMDK(createReadStream(vmdkFileName))).pipe(createWriteStream(vhdFileName)) + await exec( + 'rm -f ' + + [ + inputRawFileName, + vmdkFileName, + vhdFileName, + reconvertedRawFilemane, + reconvertedByVBoxRawFilemane, + ].join(' ') + ) + await exec( + 'base64 /dev/urandom | head -c ' + dataSize + ' > ' + inputRawFileName + ) + await exec( + 'VBoxManage convertfromraw --format VMDK --variant Stream ' + + inputRawFileName + + ' ' + + vmdkFileName + ) + const pipe = (await convertFromVMDK(createReadStream(vmdkFileName))).pipe( + createWriteStream(vhdFileName) + ) await new Promise((resolve, reject) => { pipe.on('finish', resolve) pipe.on('error', reject) }) - await exec('qemu-img convert -fvmdk -Oraw ' + vmdkFileName + ' ' + reconvertedByVBoxRawFilemane) - await exec('qemu-img convert -fvpc -Oraw ' + vhdFileName + ' ' + reconvertedRawFilemane) - return exec('qemu-img compare ' + reconvertedByVBoxRawFilemane + ' ' + reconvertedRawFilemane) - .catch((error) => { - console.error(error.stdout) - console.error(error.stderr) - console.error(vhdFileName, vmdkFileName, error.message) + await exec( + 'qemu-img convert -fvmdk -Oraw ' + + vmdkFileName + + ' ' + + reconvertedByVBoxRawFilemane + ) + await exec( + 'qemu-img convert -fvpc -Oraw ' + + vhdFileName + + ' ' + + reconvertedRawFilemane + ) + return exec( + 'qemu-img compare ' + + reconvertedByVBoxRawFilemane + + ' ' + + reconvertedRawFilemane + ).catch(error => { + console.error(error.stdout) + console.error(error.stderr) + console.error(vhdFileName, vmdkFileName, error.message) - throw error - }) + throw error + }) }) }) diff --git a/packages/xo-web/src/common/add-subscriptions.js b/packages/xo-web/src/common/add-subscriptions.js index ed361ecbf..0f14f7209 100644 --- a/packages/xo-web/src/common/add-subscriptions.js +++ b/packages/xo-web/src/common/add-subscriptions.js @@ -11,7 +11,9 @@ const addSubscriptions = subscriptions => Component => componentWillMount () { this._unsubscribes = map( - typeof subscriptions === 'function' ? subscriptions(this.props) : subscriptions, + typeof subscriptions === 'function' + ? subscriptions(this.props) + : subscriptions, (subscribe, prop) => subscribe(value => this.setState({ [prop]: value })) ) diff --git a/packages/xo-web/src/xo-app/dashboard/health/index.js b/packages/xo-web/src/xo-app/dashboard/health/index.js index 1fe71e73e..e78636f2c 100644 --- a/packages/xo-web/src/xo-app/dashboard/health/index.js +++ b/packages/xo-web/src/xo-app/dashboard/health/index.js @@ -457,10 +457,7 @@ export default class Health extends Component { isEmpty(poolIds) ? undefined : item => includes(poolIds, item.$pool) ) - _getUserSrs = createFilter( - () => this.props.userSrs, - this._getPoolPredicate - ) + _getUserSrs = createFilter(() => this.props.userSrs, this._getPoolPredicate) _getVdiOrphaned = createFilter( () => this.props.vdiOrphaned, @@ -482,10 +479,7 @@ export default class Health extends Component { this._getPoolPredicate ) - _getMessages = createFilter( - () => this.state.messages, - this._getPoolPredicate - ) + _getMessages = createFilter(() => this.state.messages, this._getPoolPredicate) render () { const { props, state } = this @@ -577,7 +571,9 @@ export default class Health extends Component { {() => ( diff --git a/scripts/normalize-packages b/scripts/normalize-packages index 6d62d406d..314505d7b 100755 --- a/scripts/normalize-packages +++ b/scripts/normalize-packages @@ -29,7 +29,7 @@ require('exec-promise')(() => delete pkg.standard - deleteProperties(pkg, 'config', [ 'commitizen' ]) + deleteProperties(pkg, 'config', ['commitizen']) deleteProperties(pkg, 'devDependencies', [ 'babel-7-jest', 'babel-eslint', @@ -51,15 +51,12 @@ require('exec-promise')(() => 'prettier', 'standard', ]) - deleteProperties(pkg, 'scripts', [ 'commitmsg', 'cz' ]) + deleteProperties(pkg, 'scripts', ['commitmsg', 'cz']) const { scripts } = pkg if (scripts !== undefined) { const prepublish = scripts.prepublish - if ( - prepublish !== undefined && - !('prepublishOnly' in scripts) - ) { + if (prepublish !== undefined && !('prepublishOnly' in scripts)) { delete scripts.prepublish scripts.prepublishOnly = prepublish pkg.scripts = sortedObject(scripts) @@ -67,10 +64,7 @@ require('exec-promise')(() => } return Promise.all([ - writeFile( - `${dir}/package.json`, - JSON.stringify(pkg, null, 2) + '\n' - ), + writeFile(`${dir}/package.json`, JSON.stringify(pkg, null, 2) + '\n'), unlink(`${dir}/.editorconfig`), unlink(`${dir}/.eslintrc.js`), unlink(`${dir}/.flowconfig`), diff --git a/scripts/run b/scripts/run index ea538502f..7eb50b7f7 100755 --- a/scripts/run +++ b/scripts/run @@ -5,13 +5,16 @@ const { spawn } = require('child_process') const { getPackages } = require('./utils') -require('exec-promise')(([ command, ...args ]) => +require('exec-promise')(([command, ...args]) => getPackages().forEach(({ dir, name }) => { console.log('*', name) - return fromEvent(spawn(command, args, { - cwd: dir, - stdio: 'inherit', - }), 'exit').then(code => { + return fromEvent( + spawn(command, args, { + cwd: dir, + stdio: 'inherit', + }), + 'exit' + ).then(code => { if (code !== 0) { throw code } diff --git a/scripts/run-script b/scripts/run-script index 5f240f50d..66c20d0e8 100755 --- a/scripts/run-script +++ b/scripts/run-script @@ -13,25 +13,24 @@ const { env } = process // TODO: https://docs.npmjs.com/misc/scripts#environment require('exec-promise')(args => { const parallel = args[0] === '--parallel' - let script = args[parallel ? 1 : 0] + const script = args[parallel ? 1 : 0] let errors = 0 - return getPackages(true)[parallel ? 'map' : 'forEach'](({ dir, name, package: { scripts } }) => { - if (scripts == null) { - return - } + return getPackages(true) + [parallel ? 'map' : 'forEach'](({ dir, name, package: { scripts } }) => { + if (scripts == null) { + return + } - const spawnOpts = { - cwd: dir, - env: Object.assign({}, env, { - PATH: `${dir}/node_modules/.bin${delimiter}${env.PATH}`, - }), - shell: true, - stdio: 'inherit', - } - return forEach.call( - [ `pre${script}`, script, `post${script}` ], - script => { + const spawnOpts = { + cwd: dir, + env: Object.assign({}, env, { + PATH: `${dir}/node_modules/.bin${delimiter}${env.PATH}`, + }), + shell: true, + stdio: 'inherit', + } + return forEach.call([`pre${script}`, script, `post${script}`], script => { const command = scripts[script] if (command !== undefined) { console.log(`* ${name}:${script} −`, command) @@ -42,11 +41,11 @@ require('exec-promise')(args => { } }) } + }) + }) + .then(() => { + if (errors !== 0) { + throw errors } - ) - }).then(() => { - if (errors !== 0) { - throw errors - } - }) + }) }) diff --git a/scripts/utils.js b/scripts/utils.js index be37a98a2..c5e3bd2b0 100644 --- a/scripts/utils.js +++ b/scripts/utils.js @@ -15,39 +15,37 @@ const _getPackages = scope => { } exports.getPackages = (readPackageJson = false) => { - const p = Promise.all([ - _getPackages(), - _getPackages('@xen-orchestra'), - ]).then(pkgs => { - pkgs = [].concat(...pkgs) // flatten - return readPackageJson - ? Promise.all(pkgs.map(pkg => - readFile(`${pkg.dir}/package.json`).then(data => { - pkg.package = JSON.parse(data) - return pkg - }, noop) - )).then(pkgs => pkgs.filter(pkg => pkg !== undefined)) - : pkgs - }) + const p = Promise.all([_getPackages(), _getPackages('@xen-orchestra')]).then( + pkgs => { + pkgs = [].concat(...pkgs) // flatten + return readPackageJson + ? Promise.all( + pkgs.map(pkg => + readFile(`${pkg.dir}/package.json`).then(data => { + pkg.package = JSON.parse(data) + return pkg + }, noop) + ) + ).then(pkgs => pkgs.filter(pkg => pkg !== undefined)) + : pkgs + } + ) p.forEach = fn => p.then(pkgs => forEach.call(pkgs, fn)) p.map = fn => p.then(pkgs => Promise.all(pkgs.map(fn))).then(noop) return p } -const noop = exports.noop = () => {} +const noop = (exports.noop = () => {}) -const readFile = exports.readFile = file => fromCallback(cb => - fs.readFile(file, 'utf8', cb) -) +const readFile = (exports.readFile = file => + fromCallback(cb => fs.readFile(file, 'utf8', cb))) -exports.unlink = path => fromCallback(cb => - fs.unlink(path, cb) -).catch(error => { - if (error.code !== 'ENOENT') { - throw error - } -}) +exports.unlink = path => + fromCallback(cb => fs.unlink(path, cb)).catch(error => { + if (error.code !== 'ENOENT') { + throw error + } + }) -exports.writeFile = (file, data) => fromCallback(cb => - fs.writeFile(file, data, cb) -) +exports.writeFile = (file, data) => + fromCallback(cb => fs.writeFile(file, data, cb))