chore: format all code (#2632)
This commit is contained in:
parent
8bf35b2a63
commit
7cb720b11f
@ -18,7 +18,8 @@ class Job {
|
||||
}
|
||||
const scheduleNext = () => {
|
||||
const delay = schedule._nextDelay()
|
||||
this._timeout = delay < MAX_DELAY
|
||||
this._timeout =
|
||||
delay < MAX_DELAY
|
||||
? setTimeout(wrapper, delay)
|
||||
: setTimeout(scheduleNext, MAX_DELAY)
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ const NEXT_MAPPING = {
|
||||
minute: { hour: 1 },
|
||||
}
|
||||
|
||||
const getFirst = values => values !== undefined ? values[0] : 0
|
||||
const getFirst = values => (values !== undefined ? values[0] : 0)
|
||||
|
||||
const setFirstAvailable = (date, unit, values) => {
|
||||
if (values === undefined) {
|
||||
|
@ -90,7 +90,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
|
||||
if (!match('/')) {
|
||||
return
|
||||
}
|
||||
[start, end] = field.range
|
||||
;[start, end] = field.range
|
||||
step = parseInteger()
|
||||
} else {
|
||||
start = parseValue()
|
||||
|
@ -28,9 +28,7 @@ describe('parse()', () => {
|
||||
})
|
||||
|
||||
it('reports missing integer', () => {
|
||||
expect(() => parse('*/a')).toThrow(
|
||||
'minute: missing integer at character 2'
|
||||
)
|
||||
expect(() => parse('*/a')).toThrow('minute: missing integer at character 2')
|
||||
expect(() => parse('*')).toThrow('hour: missing integer at character 1')
|
||||
})
|
||||
|
||||
|
@ -325,7 +325,10 @@ class P {
|
||||
value.push(result.value)
|
||||
pos = result.pos
|
||||
}
|
||||
while (i < max && (result = this._parse(input, pos, end)) instanceof Success) {
|
||||
while (
|
||||
i < max &&
|
||||
(result = this._parse(input, pos, end)) instanceof Success
|
||||
) {
|
||||
++i
|
||||
value.push(result.value)
|
||||
pos = result.pos
|
||||
@ -359,8 +362,9 @@ P.eof = new P(
|
||||
|
||||
const parser = P.grammar({
|
||||
default: r =>
|
||||
P.seq(r.ws, r.term.repeat(), P.eof)
|
||||
.map(([, terms]) => (terms.length === 0 ? new Null() : new And(terms))),
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(
|
||||
([, terms]) => (terms.length === 0 ? new Null() : new And(terms))
|
||||
),
|
||||
quotedString: new P((input, pos, end) => {
|
||||
if (input[pos] !== '"') {
|
||||
return new Failure(pos, '"')
|
||||
@ -416,7 +420,7 @@ const parser = P.grammar({
|
||||
? new StringNode(str)
|
||||
: new NumberNode(asNum)
|
||||
})
|
||||
),
|
||||
)
|
||||
).skip(r.ws),
|
||||
ws: P.regex(/\s*/),
|
||||
}).default
|
||||
@ -476,7 +480,9 @@ export const getPropertyClausesStrings = node => {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const setPropertyClause = (node, name, child) => {
|
||||
const property = child && new Property(
|
||||
const property =
|
||||
child &&
|
||||
new Property(
|
||||
name,
|
||||
typeof child === 'string' ? new StringNode(child) : child
|
||||
)
|
||||
@ -485,8 +491,8 @@ export const setPropertyClause = (node, name, child) => {
|
||||
return property
|
||||
}
|
||||
|
||||
const children = (node instanceof And ? node.children : [node]).filter(child =>
|
||||
!(child instanceof Property && child.name === name)
|
||||
const children = (node instanceof And ? node.children : [node]).filter(
|
||||
child => !(child instanceof Property && child.name === name)
|
||||
)
|
||||
if (property !== undefined) {
|
||||
children.push(property)
|
||||
|
@ -49,13 +49,15 @@ describe('Number', () => {
|
||||
|
||||
describe('setPropertyClause', () => {
|
||||
it('creates a node if none passed', () => {
|
||||
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe('foo:bar')
|
||||
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
|
||||
'foo:bar'
|
||||
)
|
||||
})
|
||||
|
||||
it('adds a property clause if there was none', () => {
|
||||
expect(
|
||||
setPropertyClause(parse('baz'), 'foo', 'bar').toString()
|
||||
).toBe('baz foo:bar')
|
||||
expect(setPropertyClause(parse('baz'), 'foo', 'bar').toString()).toBe(
|
||||
'baz foo:bar'
|
||||
)
|
||||
})
|
||||
|
||||
it('replaces the property clause if there was one', () => {
|
||||
|
@ -26,14 +26,17 @@ type ObjectPattern = { [string]: Pattern }
|
||||
type ArrayPattern = Array<Pattern>
|
||||
|
||||
// value equals the pattern
|
||||
type ValuePattern = bool | number | string
|
||||
type ValuePattern = boolean | number | string
|
||||
|
||||
const match = (pattern: Pattern, value: any) => {
|
||||
if (Array.isArray(pattern)) {
|
||||
return Array.isArray(value) && pattern.every((subpattern, i) =>
|
||||
return (
|
||||
Array.isArray(value) &&
|
||||
pattern.every((subpattern, i) =>
|
||||
// FIXME: subpatterns should match different subvalues
|
||||
value.some(subvalue => match(subpattern, subvalue))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
if (pattern !== null && typeof pattern === 'object') {
|
||||
@ -74,4 +77,5 @@ const match = (pattern: Pattern, value: any) => {
|
||||
return pattern === value
|
||||
}
|
||||
|
||||
export const createPredicate = (pattern: Pattern) => (value: any) => match(pattern, value)
|
||||
export const createPredicate = (pattern: Pattern) => (value: any) =>
|
||||
match(pattern, value)
|
||||
|
@ -36,11 +36,13 @@ const fuFooter = fu.struct([
|
||||
fu.char('creatorApplication', 4), // 28
|
||||
fu.uint32('creatorVersion'), // 32
|
||||
fu.uint32('creatorHostOs'), // 36
|
||||
fu.struct('originalSize', [ // At the creation, current size of the hard disk.
|
||||
fu.struct('originalSize', [
|
||||
// At the creation, current size of the hard disk.
|
||||
fu.uint32('high'), // 40
|
||||
fu.uint32('low'), // 44
|
||||
]),
|
||||
fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize.
|
||||
fu.struct('currentSize', [
|
||||
// Current size of the virtual disk. At the creation: currentSize = originalSize.
|
||||
fu.uint32('high'), // 48
|
||||
fu.uint32('low'), // 52
|
||||
]),
|
||||
@ -60,11 +62,9 @@ const FOOTER_SIZE = fuFooter.size
|
||||
|
||||
const fuHeader = fu.struct([
|
||||
fu.char('cookie', 8),
|
||||
fu.struct('dataOffset', [
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low'),
|
||||
]),
|
||||
fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table.
|
||||
fu.struct('dataOffset', [fu.uint32('high'), fu.uint32('low')]),
|
||||
fu.struct('tableOffset', [
|
||||
// Absolute byte offset of the Block Allocation Table.
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low'),
|
||||
]),
|
||||
@ -76,16 +76,21 @@ const fuHeader = fu.struct([
|
||||
fu.uint32('parentTimestamp'),
|
||||
fu.byte('reserved1', 4),
|
||||
fu.char16be('parentUnicodeName', 512),
|
||||
fu.struct('parentLocatorEntry', [
|
||||
fu.struct(
|
||||
'parentLocatorEntry',
|
||||
[
|
||||
fu.uint32('platformCode'),
|
||||
fu.uint32('platformDataSpace'),
|
||||
fu.uint32('platformDataLength'),
|
||||
fu.uint32('reserved'),
|
||||
fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data.
|
||||
fu.struct('platformDataOffset', [
|
||||
// Absolute byte offset of the locator data.
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low'),
|
||||
]),
|
||||
], 8),
|
||||
],
|
||||
8
|
||||
),
|
||||
fu.byte('reserved2', 256),
|
||||
])
|
||||
const HEADER_SIZE = fuHeader.size
|
||||
@ -98,10 +103,10 @@ const SIZE_OF_32_BITS = Math.pow(2, 32)
|
||||
const uint32ToUint64 = fu => fu.high * SIZE_OF_32_BITS + fu.low
|
||||
|
||||
// Returns a 32 bits integer corresponding to a Vhd version.
|
||||
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF)
|
||||
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000ffff)
|
||||
|
||||
// bytes[] bit manipulation
|
||||
const testBit = (map, bit) => map[bit >> 3] & 1 << (bit & 7)
|
||||
const testBit = (map, bit) => map[bit >> 3] & (1 << (bit & 7))
|
||||
const setBit = (map, bit) => {
|
||||
map[bit >> 3] |= 1 << (bit & 7)
|
||||
}
|
||||
@ -109,8 +114,10 @@ const unsetBit = (map, bit) => {
|
||||
map[bit >> 3] &= ~(1 << (bit & 7))
|
||||
}
|
||||
|
||||
const addOffsets = (...offsets) => offsets.reduce(
|
||||
(a, b) => b == null
|
||||
const addOffsets = (...offsets) =>
|
||||
offsets.reduce(
|
||||
(a, b) =>
|
||||
b == null
|
||||
? a
|
||||
: typeof b === 'object'
|
||||
? { bytes: a.bytes + b.bytes, bits: a.bits + b.bits }
|
||||
@ -119,22 +126,16 @@ const addOffsets = (...offsets) => offsets.reduce(
|
||||
)
|
||||
|
||||
const pack = (field, value, buf, offset) => {
|
||||
field.pack(
|
||||
value,
|
||||
buf,
|
||||
addOffsets(field.offset, offset)
|
||||
)
|
||||
field.pack(value, buf, addOffsets(field.offset, offset))
|
||||
}
|
||||
|
||||
const unpack = (field, buf, offset) =>
|
||||
field.unpack(
|
||||
buf,
|
||||
addOffsets(field.offset, offset)
|
||||
)
|
||||
field.unpack(buf, addOffsets(field.offset, offset))
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
|
||||
const streamToNewBuffer = stream =>
|
||||
new Promise((resolve, reject) => {
|
||||
const chunks = []
|
||||
let length = 0
|
||||
|
||||
@ -166,7 +167,8 @@ const streamToExistingBuffer = (
|
||||
buffer,
|
||||
offset = 0,
|
||||
end = buffer.length
|
||||
) => new Promise((resolve, reject) => {
|
||||
) =>
|
||||
new Promise((resolve, reject) => {
|
||||
assert(offset >= 0)
|
||||
assert(end > offset)
|
||||
assert(end <= buffer.length)
|
||||
@ -214,7 +216,11 @@ const computeChecksum = (struct, buf, offset = 0) => {
|
||||
for (let i = offset, n = checksumOffset; i < n; ++i) {
|
||||
sum += buf[i]
|
||||
}
|
||||
for (let i = checksumOffset + checksumField.size, n = offset + struct.size; i < n; ++i) {
|
||||
for (
|
||||
let i = checksumOffset + checksumField.size, n = offset + struct.size;
|
||||
i < n;
|
||||
++i
|
||||
) {
|
||||
sum += buf[i]
|
||||
}
|
||||
|
||||
@ -222,7 +228,8 @@ const computeChecksum = (struct, buf, offset = 0) => {
|
||||
}
|
||||
|
||||
const verifyChecksum = (struct, buf, offset) =>
|
||||
unpack(struct.fields.checksum, buf, offset) === computeChecksum(struct, buf, offset)
|
||||
unpack(struct.fields.checksum, buf, offset) ===
|
||||
computeChecksum(struct, buf, offset)
|
||||
|
||||
const getParentLocatorSize = parentLocatorEntry => {
|
||||
const { platformDataSpace } = parentLocatorEntry
|
||||
@ -231,9 +238,7 @@ const getParentLocatorSize = parentLocatorEntry => {
|
||||
return platformDataSpace * SECTOR_SIZE
|
||||
}
|
||||
|
||||
return (platformDataSpace % SECTOR_SIZE === 0)
|
||||
? platformDataSpace
|
||||
: 0
|
||||
return platformDataSpace % SECTOR_SIZE === 0 ? platformDataSpace : 0
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -263,11 +268,20 @@ export default class Vhd {
|
||||
assert(begin >= 0)
|
||||
assert(length > 0)
|
||||
|
||||
return this._handler.createReadStream(this._path, {
|
||||
return this._handler
|
||||
.createReadStream(this._path, {
|
||||
end: begin + length - 1,
|
||||
start: begin,
|
||||
}).then(buf
|
||||
? stream => streamToExistingBuffer(stream, buf, offset, (offset || 0) + length)
|
||||
})
|
||||
.then(
|
||||
buf
|
||||
? stream =>
|
||||
streamToExistingBuffer(
|
||||
stream,
|
||||
buf,
|
||||
offset,
|
||||
(offset || 0) + length
|
||||
)
|
||||
: streamToNewBuffer
|
||||
)
|
||||
}
|
||||
@ -296,7 +310,7 @@ export default class Vhd {
|
||||
assert(block < this._header.maxTableEntries)
|
||||
|
||||
const blockAddr = this._blockAllocationTable[block]
|
||||
if (blockAddr !== 0xFFFFFFFF) {
|
||||
if (blockAddr !== 0xffffffff) {
|
||||
return blockAddr * SECTOR_SIZE
|
||||
}
|
||||
}
|
||||
@ -325,7 +339,8 @@ export default class Vhd {
|
||||
assert(sectorsPerBlock % 1 === 0)
|
||||
|
||||
// 1 bit per sector, rounded up to full sectors
|
||||
this._blockBitmapSize = Math.ceil(sectorsPerBlock / 8 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
this._blockBitmapSize =
|
||||
Math.ceil(sectorsPerBlock / 8 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
assert(this._blockBitmapSize === SECTOR_SIZE)
|
||||
|
||||
this._footer = footer
|
||||
@ -368,10 +383,10 @@ export default class Vhd {
|
||||
const blockBitmapSize = this._blockBitmapSize
|
||||
const parent = this._parent
|
||||
|
||||
if (blockAddr && (
|
||||
!parent ||
|
||||
testBit(await this._read(blockAddr, blockBitmapSize), sector)
|
||||
)) {
|
||||
if (
|
||||
blockAddr &&
|
||||
(!parent || testBit(await this._read(blockAddr, blockBitmapSize), sector))
|
||||
) {
|
||||
return this._read(
|
||||
blockAddr + blockBitmapSize + sector * SECTOR_SIZE + begin,
|
||||
length,
|
||||
@ -402,7 +417,12 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
if (!parent) {
|
||||
return this._read(blockAddr + this._blockBitmapSize + begin, length, buf, offset)
|
||||
return this._read(
|
||||
blockAddr + this._blockBitmapSize + begin,
|
||||
length,
|
||||
buf,
|
||||
offset
|
||||
)
|
||||
}
|
||||
|
||||
// FIXME: we should read as many sectors in a single pass as
|
||||
|
@ -29,13 +29,15 @@ exports.createOutputStream = path => {
|
||||
exports.resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
|
||||
isOpaqueRef(refOrUuidOrNameLabel)
|
||||
? refOrUuidOrNameLabel
|
||||
: xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(
|
||||
() => xapi.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel).then(
|
||||
refs => {
|
||||
: xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(() =>
|
||||
xapi
|
||||
.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel)
|
||||
.then(refs => {
|
||||
if (refs.length === 1) {
|
||||
return refs[0]
|
||||
}
|
||||
throw new Error(`no single match for ${type} with name label ${refOrUuidOrNameLabel}`)
|
||||
}
|
||||
throw new Error(
|
||||
`no single match for ${type} with name label ${refOrUuidOrNameLabel}`
|
||||
)
|
||||
})
|
||||
)
|
||||
|
@ -86,11 +86,11 @@ const main = async args => {
|
||||
|
||||
// Make the REPL waits for promise completion.
|
||||
repl.eval = (evaluate => (cmd, context, filename, cb) => {
|
||||
fromCallback(cb => {
|
||||
;fromCallback(cb => {
|
||||
evaluate.call(repl, cmd, context, filename, cb)
|
||||
}).then(value =>
|
||||
isArray(value) ? Promise.all(value) : value
|
||||
)::asCallback(cb)
|
||||
})
|
||||
.then(value => (isArray(value) ? Promise.all(value) : value))
|
||||
::asCallback(cb)
|
||||
})(repl.eval)
|
||||
|
||||
await eventToPromise(repl, 'exit')
|
||||
|
@ -93,8 +93,9 @@ class XapiError extends BaseError {
|
||||
|
||||
export const wrapError = error => {
|
||||
let code, params
|
||||
if (isArray(error)) { // < XenServer 7.3
|
||||
[ code, ...params ] = error
|
||||
if (isArray(error)) {
|
||||
// < XenServer 7.3
|
||||
;[code, ...params] = error
|
||||
} else {
|
||||
code = error.message
|
||||
params = error.data
|
||||
@ -128,17 +129,13 @@ const {
|
||||
|
||||
const OPAQUE_REF_PREFIX = 'OpaqueRef:'
|
||||
export const isOpaqueRef = value =>
|
||||
typeof value === 'string' &&
|
||||
startsWith(value, OPAQUE_REF_PREFIX)
|
||||
typeof value === 'string' && startsWith(value, OPAQUE_REF_PREFIX)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const RE_READ_ONLY_METHOD = /^[^.]+\.get_/
|
||||
const isReadOnlyCall = (method, args) => (
|
||||
args.length === 1 &&
|
||||
isOpaqueRef(args[0]) &&
|
||||
RE_READ_ONLY_METHOD.test(method)
|
||||
)
|
||||
const isReadOnlyCall = (method, args) =>
|
||||
args.length === 1 && isOpaqueRef(args[0]) && RE_READ_ONLY_METHOD.test(method)
|
||||
|
||||
// Prepare values before passing them to the XenAPI:
|
||||
//
|
||||
@ -209,7 +206,7 @@ export class Xapi extends EventEmitter {
|
||||
this._pool = null
|
||||
this._readOnly = Boolean(opts.readOnly)
|
||||
this._sessionId = null
|
||||
const url = this._url = parseUrl(opts.url)
|
||||
const url = (this._url = parseUrl(opts.url))
|
||||
|
||||
if (this._auth === undefined) {
|
||||
const user = url.username
|
||||
@ -224,9 +221,7 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
if (opts.watchEvents !== false) {
|
||||
this._debounce = opts.debounce == null
|
||||
? 200
|
||||
: opts.debounce
|
||||
this._debounce = opts.debounce == null ? 200 : opts.debounce
|
||||
|
||||
this._eventWatchers = createObject(null)
|
||||
|
||||
@ -237,7 +232,7 @@ export class Xapi extends EventEmitter {
|
||||
|
||||
this._nTasks = 0
|
||||
|
||||
const objects = this._objects = new Collection()
|
||||
const objects = (this._objects = new Collection())
|
||||
objects.getKey = getKey
|
||||
|
||||
this._objectsByRefs = createObject(null)
|
||||
@ -286,13 +281,7 @@ export class Xapi extends EventEmitter {
|
||||
get status () {
|
||||
const id = this._sessionId
|
||||
|
||||
return id
|
||||
? (
|
||||
id === CONNECTING
|
||||
? CONNECTING
|
||||
: CONNECTED
|
||||
)
|
||||
: DISCONNECTED
|
||||
return id ? (id === CONNECTING ? CONNECTING : CONNECTED) : DISCONNECTED
|
||||
}
|
||||
|
||||
get _humanId () {
|
||||
@ -305,20 +294,29 @@ export class Xapi extends EventEmitter {
|
||||
barrier (ref) {
|
||||
const eventWatchers = this._eventWatchers
|
||||
if (eventWatchers === undefined) {
|
||||
return Promise.reject(new Error('Xapi#barrier() requires events watching'))
|
||||
return Promise.reject(
|
||||
new Error('Xapi#barrier() requires events watching')
|
||||
)
|
||||
}
|
||||
|
||||
const key = `xo:barrier:${Math.random().toString(36).slice(2)}`
|
||||
const key = `xo:barrier:${Math.random()
|
||||
.toString(36)
|
||||
.slice(2)}`
|
||||
const poolRef = this._pool.$ref
|
||||
|
||||
const { promise, resolve } = defer()
|
||||
eventWatchers[key] = resolve
|
||||
|
||||
return this._sessionCall(
|
||||
'pool.add_to_other_config',
|
||||
[ poolRef, key, '' ]
|
||||
).then(() => promise.then(() => {
|
||||
this._sessionCall('pool.remove_from_other_config', [ poolRef, key ]).catch(noop)
|
||||
return this._sessionCall('pool.add_to_other_config', [
|
||||
poolRef,
|
||||
key,
|
||||
'',
|
||||
]).then(() =>
|
||||
promise.then(() => {
|
||||
this._sessionCall('pool.remove_from_other_config', [
|
||||
poolRef,
|
||||
key,
|
||||
]).catch(noop)
|
||||
|
||||
if (ref === undefined) {
|
||||
return
|
||||
@ -330,7 +328,8 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
return this.getObjectByRef(ref)
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
connect () {
|
||||
@ -434,12 +433,9 @@ export class Xapi extends EventEmitter {
|
||||
// this lib), UUID (unique identifier that some objects have) or
|
||||
// opaque reference (internal to XAPI).
|
||||
getObject (idOrUuidOrRef, defaultValue) {
|
||||
const object = typeof idOrUuidOrRef === 'string'
|
||||
? (
|
||||
// if there is an UUID, it is also the $id.
|
||||
this._objects.all[idOrUuidOrRef] ||
|
||||
this._objectsByRefs[idOrUuidOrRef]
|
||||
)
|
||||
const object =
|
||||
typeof idOrUuidOrRef === 'string'
|
||||
? this._objects.all[idOrUuidOrRef] || this._objectsByRefs[idOrUuidOrRef]
|
||||
: this._objects.all[idOrUuidOrRef.$id]
|
||||
|
||||
if (object) return object
|
||||
@ -479,15 +475,9 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
@cancelable
|
||||
getResource ($cancelToken, pathname, {
|
||||
host,
|
||||
query,
|
||||
task,
|
||||
}) {
|
||||
return this._autoTask(
|
||||
task,
|
||||
`Xapi#getResource ${pathname}`
|
||||
).then(taskRef => {
|
||||
getResource ($cancelToken, pathname, { host, query, task }) {
|
||||
return this._autoTask(task, `Xapi#getResource ${pathname}`).then(
|
||||
taskRef => {
|
||||
query = { ...query, session_id: this.sessionId }
|
||||
let taskResult
|
||||
if (taskRef !== undefined) {
|
||||
@ -520,23 +510,20 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
return promise
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@cancelable
|
||||
putResource ($cancelToken, body, pathname, {
|
||||
host,
|
||||
query,
|
||||
task,
|
||||
} = {}) {
|
||||
putResource ($cancelToken, body, pathname, { host, query, task } = {}) {
|
||||
if (this._readOnly) {
|
||||
return Promise.reject(new Error(new Error('cannot put resource in read only mode')))
|
||||
return Promise.reject(
|
||||
new Error(new Error('cannot put resource in read only mode'))
|
||||
)
|
||||
}
|
||||
|
||||
return this._autoTask(
|
||||
task,
|
||||
`Xapi#putResource ${pathname}`
|
||||
).then(taskRef => {
|
||||
return this._autoTask(task, `Xapi#putResource ${pathname}`).then(
|
||||
taskRef => {
|
||||
query = { ...query, session_id: this.sessionId }
|
||||
|
||||
let taskResult
|
||||
@ -559,7 +546,8 @@ export class Xapi extends EventEmitter {
|
||||
headers['content-length'] = '1125899906842624'
|
||||
}
|
||||
|
||||
const doRequest = override => httpRequest.put(
|
||||
const doRequest = override =>
|
||||
httpRequest.put(
|
||||
$cancelToken,
|
||||
this._url,
|
||||
host && {
|
||||
@ -575,16 +563,14 @@ export class Xapi extends EventEmitter {
|
||||
override
|
||||
)
|
||||
|
||||
// if a stream, sends a dummy request to probe for a
|
||||
// redirection before consuming body
|
||||
const promise = isStream
|
||||
|
||||
// dummy request to probe for a redirection before consuming body
|
||||
? doRequest({
|
||||
body: '',
|
||||
|
||||
// omit task_id because this request will fail on purpose
|
||||
query: 'task_id' in query
|
||||
? omit(query, 'task_id')
|
||||
: query,
|
||||
query: 'task_id' in query ? omit(query, 'task_id') : query,
|
||||
|
||||
maxRedirects: 0,
|
||||
}).then(
|
||||
@ -606,8 +592,6 @@ export class Xapi extends EventEmitter {
|
||||
throw error
|
||||
}
|
||||
)
|
||||
|
||||
// http-request-plus correctly handle redirects if body is not a stream
|
||||
: doRequest()
|
||||
|
||||
return promise.then(response => {
|
||||
@ -630,7 +614,8 @@ export class Xapi extends EventEmitter {
|
||||
return taskResult
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
watchTask (ref) {
|
||||
@ -692,15 +677,17 @@ export class Xapi extends EventEmitter {
|
||||
newArgs.push.apply(newArgs, args)
|
||||
}
|
||||
|
||||
return this._transportCall(method, newArgs)
|
||||
::pCatch(isSessionInvalid, () => {
|
||||
return this._transportCall(method, newArgs)::pCatch(
|
||||
isSessionInvalid,
|
||||
() => {
|
||||
// XAPI is sometimes reinitialized and sessions are lost.
|
||||
// Try to login again.
|
||||
debug('%s: the session has been reinitialized', this._humanId)
|
||||
|
||||
this._sessionId = null
|
||||
return this.connect().then(() => this._sessionCall(method, args))
|
||||
})
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
return Promise.reject(error)
|
||||
}
|
||||
@ -715,9 +702,8 @@ export class Xapi extends EventEmitter {
|
||||
ref: true,
|
||||
type: true,
|
||||
}
|
||||
const getKey = (key, obj) => reservedKeys[key] && obj === object
|
||||
? `$$${key}`
|
||||
: `$${key}`
|
||||
const getKey = (key, obj) =>
|
||||
reservedKeys[key] && obj === object ? `$$${key}` : `$${key}`
|
||||
|
||||
// Creates resolved properties.
|
||||
forEach(object, function resolveObject (value, key, object) {
|
||||
@ -736,7 +722,7 @@ export class Xapi extends EventEmitter {
|
||||
} else if (isOpaqueRef(value[0])) {
|
||||
// This is an array of refs.
|
||||
defineProperty(object, getKey(key, object), {
|
||||
get: () => freezeObject(map(value, (ref) => objectsByRefs[ref])),
|
||||
get: () => freezeObject(map(value, ref => objectsByRefs[ref])),
|
||||
})
|
||||
|
||||
freezeObject(value)
|
||||
@ -836,7 +822,9 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
|
||||
_watchEvents () {
|
||||
const loop = () => this.status === CONNECTED && this._sessionCall('event.from', [
|
||||
const loop = () =>
|
||||
this.status === CONNECTED &&
|
||||
this._sessionCall('event.from', [
|
||||
['*'],
|
||||
this._fromToken,
|
||||
60 + 0.1, // Force float.
|
||||
@ -847,7 +835,8 @@ export class Xapi extends EventEmitter {
|
||||
this._processEvents(events)
|
||||
|
||||
if (task !== this._nTasks) {
|
||||
this._sessionCall('task.get_all_records').then(tasks => {
|
||||
this._sessionCall('task.get_all_records')
|
||||
.then(tasks => {
|
||||
const toRemove = new Set()
|
||||
forEach(this.objects.all, object => {
|
||||
if (object.$type === 'task') {
|
||||
@ -861,13 +850,12 @@ export class Xapi extends EventEmitter {
|
||||
toRemove.forEach(ref => {
|
||||
this._removeObject('task', ref)
|
||||
})
|
||||
}).catch(noop)
|
||||
})
|
||||
.catch(noop)
|
||||
}
|
||||
|
||||
const debounce = this._debounce
|
||||
return debounce != null
|
||||
? pDelay(debounce).then(loop)
|
||||
: loop()
|
||||
return debounce != null ? pDelay(debounce).then(loop) : loop()
|
||||
}
|
||||
const onFailure = error => {
|
||||
if (areEventsLost(error)) {
|
||||
@ -906,9 +894,9 @@ export class Xapi extends EventEmitter {
|
||||
::/\.get_all_records$/.test
|
||||
)
|
||||
|
||||
return Promise.all(map(
|
||||
getAllRecordsMethods,
|
||||
method => this._sessionCall(method).then(
|
||||
return Promise.all(
|
||||
map(getAllRecordsMethods, method =>
|
||||
this._sessionCall(method).then(
|
||||
objects => {
|
||||
const type = method.slice(0, method.indexOf('.')).toLowerCase()
|
||||
forEach(objects, (object, ref) => {
|
||||
@ -921,21 +909,23 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
)
|
||||
))
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
const watchEvents = () => this._sessionCall('event.register', [ ['*'] ]).then(loop)
|
||||
const watchEvents = () =>
|
||||
this._sessionCall('event.register', [['*']]).then(loop)
|
||||
|
||||
const loop = () => this.status === CONNECTED && this._sessionCall('event.next').then(onSuccess, onFailure)
|
||||
const loop = () =>
|
||||
this.status === CONNECTED &&
|
||||
this._sessionCall('event.next').then(onSuccess, onFailure)
|
||||
|
||||
const onSuccess = events => {
|
||||
this._processEvents(events)
|
||||
|
||||
const debounce = this._debounce
|
||||
return debounce == null
|
||||
? loop()
|
||||
: pDelay(debounce).then(loop)
|
||||
return debounce == null ? loop() : pDelay(debounce).then(loop)
|
||||
}
|
||||
|
||||
const onFailure = error => {
|
||||
@ -950,7 +940,8 @@ export class Xapi extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
Xapi.prototype._transportCall = reduce([
|
||||
Xapi.prototype._transportCall = reduce(
|
||||
[
|
||||
function (method, args) {
|
||||
return this._call(method, args).catch(error => {
|
||||
if (!(error instanceof Error)) {
|
||||
@ -961,12 +952,18 @@ Xapi.prototype._transportCall = reduce([
|
||||
throw error
|
||||
})
|
||||
},
|
||||
call => function () {
|
||||
call =>
|
||||
function () {
|
||||
let iterator // lazily created
|
||||
const loop = () => call.apply(this, arguments)
|
||||
const loop = () =>
|
||||
call
|
||||
.apply(this, arguments)
|
||||
::pCatch(isNetworkError, isXapiNetworkError, error => {
|
||||
if (iterator === undefined) {
|
||||
iterator = fibonacci().clamp(undefined, 60).take(10).toMs()
|
||||
iterator = fibonacci()
|
||||
.clamp(undefined, 60)
|
||||
.take(10)
|
||||
.toMs()
|
||||
}
|
||||
|
||||
const cursor = iterator.next()
|
||||
@ -975,7 +972,12 @@ Xapi.prototype._transportCall = reduce([
|
||||
// TODO: ability to force immediate reconnection
|
||||
|
||||
const delay = cursor.value
|
||||
debug('%s: network error %s, next try in %s ms', this._humanId, error.code, delay)
|
||||
debug(
|
||||
'%s: network error %s, next try in %s ms',
|
||||
this._humanId,
|
||||
error.code,
|
||||
delay
|
||||
)
|
||||
return pDelay(delay).then(loop)
|
||||
}
|
||||
|
||||
@ -988,10 +990,16 @@ Xapi.prototype._transportCall = reduce([
|
||||
})
|
||||
return loop()
|
||||
},
|
||||
call => function loop () {
|
||||
return call.apply(this, arguments)
|
||||
call =>
|
||||
function loop () {
|
||||
return call
|
||||
.apply(this, arguments)
|
||||
::pCatch(isHostSlave, ({ params: [master] }) => {
|
||||
debug('%s: host is slave, attempting to connect at %s', this._humanId, master)
|
||||
debug(
|
||||
'%s: host is slave, attempting to connect at %s',
|
||||
this._humanId,
|
||||
master
|
||||
)
|
||||
|
||||
const newUrl = {
|
||||
...this._url,
|
||||
@ -1003,7 +1011,8 @@ Xapi.prototype._transportCall = reduce([
|
||||
return loop.apply(this, arguments)
|
||||
})
|
||||
},
|
||||
call => function (method) {
|
||||
call =>
|
||||
function (method) {
|
||||
const startTime = Date.now()
|
||||
return call.apply(this, arguments).then(
|
||||
result => {
|
||||
@ -1028,7 +1037,9 @@ Xapi.prototype._transportCall = reduce([
|
||||
}
|
||||
)
|
||||
},
|
||||
], (call, decorator) => decorator(call))
|
||||
],
|
||||
(call, decorator) => decorator(call)
|
||||
)
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
@ -14,14 +14,17 @@ const xapi = (() => {
|
||||
})
|
||||
})()
|
||||
|
||||
xapi.connect()
|
||||
xapi
|
||||
.connect()
|
||||
|
||||
// Get the pool record's ref.
|
||||
.then(() => xapi.call('pool.get_all'))
|
||||
|
||||
// Injects lots of events.
|
||||
.then(([poolRef]) => {
|
||||
const loop = () => xapi.call('event.inject', 'pool', poolRef)
|
||||
const loop = () =>
|
||||
xapi
|
||||
.call('event.inject', 'pool', poolRef)
|
||||
::pDelay(10) // A small delay is required to avoid overloading the Xen API.
|
||||
.then(loop)
|
||||
|
||||
|
@ -14,18 +14,18 @@ export default opts => {
|
||||
const current = factories[i++](opts)
|
||||
if (i < length) {
|
||||
const currentI = i
|
||||
call = (method, args) => current(method, args).catch(
|
||||
error => {
|
||||
call = (method, args) =>
|
||||
current(method, args).catch(error => {
|
||||
if (error instanceof UnsupportedTransport) {
|
||||
if (currentI === i) { // not changed yet
|
||||
if (currentI === i) {
|
||||
// not changed yet
|
||||
create()
|
||||
}
|
||||
return call(method, args)
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
)
|
||||
})
|
||||
} else {
|
||||
call = current
|
||||
}
|
||||
|
@ -4,15 +4,19 @@ import { format, parse } from 'json-rpc-protocol'
|
||||
import { UnsupportedTransport } from './_utils'
|
||||
|
||||
export default ({ allowUnauthorized, url }) => {
|
||||
return (method, args) => httpRequestPlus.post(url, {
|
||||
return (method, args) =>
|
||||
httpRequestPlus
|
||||
.post(url, {
|
||||
rejectUnauthorized: !allowUnauthorized,
|
||||
body: format.request(0, method, args),
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
path: '/jsonrpc',
|
||||
}).readAll('utf8').then(
|
||||
})
|
||||
.readAll('utf8')
|
||||
.then(
|
||||
text => {
|
||||
let response
|
||||
try {
|
||||
@ -28,7 +32,8 @@ export default ({ allowUnauthorized, url }) => {
|
||||
throw response.error
|
||||
},
|
||||
error => {
|
||||
if (error.response !== undefined) { // HTTP error
|
||||
if (error.response !== undefined) {
|
||||
// HTTP error
|
||||
throw new UnsupportedTransport()
|
||||
}
|
||||
|
||||
|
@ -20,10 +20,7 @@ const SPECIAL_CHARS = {
|
||||
'\r': '\\r',
|
||||
'\t': '\\t',
|
||||
}
|
||||
const SPECIAL_CHARS_RE = new RegExp(
|
||||
Object.keys(SPECIAL_CHARS).join('|'),
|
||||
'g'
|
||||
)
|
||||
const SPECIAL_CHARS_RE = new RegExp(Object.keys(SPECIAL_CHARS).join('|'), 'g')
|
||||
|
||||
const parseResult = result => {
|
||||
const status = result.Status
|
||||
@ -78,11 +75,7 @@ export default ({
|
||||
allowUnauthorized,
|
||||
url: { hostname, path, port, protocol },
|
||||
}) => {
|
||||
const client = (
|
||||
protocol === 'https:'
|
||||
? createSecureClient
|
||||
: createClient
|
||||
)({
|
||||
const client = (protocol === 'https:' ? createSecureClient : createClient)({
|
||||
host: hostname,
|
||||
path: '/json',
|
||||
port,
|
||||
@ -90,8 +83,5 @@ export default ({
|
||||
})
|
||||
const call = promisify(client.methodCall, client)
|
||||
|
||||
return (method, args) => call(method, args).then(
|
||||
parseResult,
|
||||
logError
|
||||
)
|
||||
return (method, args) => call(method, args).then(parseResult, logError)
|
||||
}
|
||||
|
@ -34,19 +34,12 @@ export default ({
|
||||
allowUnauthorized,
|
||||
url: { hostname, path, port, protocol },
|
||||
}) => {
|
||||
const client = (
|
||||
protocol === 'https:'
|
||||
? createSecureClient
|
||||
: createClient
|
||||
)({
|
||||
const client = (protocol === 'https:' ? createSecureClient : createClient)({
|
||||
host: hostname,
|
||||
port,
|
||||
rejectUnauthorized: !allowUnauthorized,
|
||||
})
|
||||
const call = promisify(client.methodCall, client)
|
||||
|
||||
return (method, args) => call(method, args).then(
|
||||
parseResult,
|
||||
logError
|
||||
)
|
||||
return (method, args) => call(method, args).then(parseResult, logError)
|
||||
}
|
||||
|
@ -8,7 +8,8 @@ let getObject
|
||||
const authorized = () => true // eslint-disable-line no-unused-vars
|
||||
const forbiddden = () => false // eslint-disable-line no-unused-vars
|
||||
|
||||
const and = (...checkers) => (object, permission) => { // eslint-disable-line no-unused-vars
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const and = (...checkers) => (object, permission) => {
|
||||
for (const checker of checkers) {
|
||||
if (!checker(object, permission)) {
|
||||
return false
|
||||
@ -17,7 +18,8 @@ const and = (...checkers) => (object, permission) => { // eslint-disable-line no
|
||||
return true
|
||||
}
|
||||
|
||||
const or = (...checkers) => (object, permission) => { // eslint-disable-line no-unused-vars
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const or = (...checkers) => (object, permission) => {
|
||||
for (const checker of checkers) {
|
||||
if (checker(object, permission)) {
|
||||
return true
|
||||
@ -28,7 +30,7 @@ const or = (...checkers) => (object, permission) => { // eslint-disable-line no-
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const checkMember = (memberName) => (object, permission) => {
|
||||
const checkMember = memberName => (object, permission) => {
|
||||
const member = object[memberName]
|
||||
return member !== object.id && checkAuthorization(member, permission)
|
||||
}
|
||||
@ -36,10 +38,7 @@ const checkMember = (memberName) => (object, permission) => {
|
||||
const checkSelf = ({ id }, permission) => {
|
||||
const permissionsForObject = permissionsByObject[id]
|
||||
|
||||
return (
|
||||
permissionsForObject &&
|
||||
permissionsForObject[permission]
|
||||
)
|
||||
return permissionsForObject && permissionsForObject[permission]
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -102,12 +101,7 @@ function checkAuthorization (objectId, permission) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export default (
|
||||
permissionsByObject_,
|
||||
getObject_,
|
||||
permissions,
|
||||
permission
|
||||
) => {
|
||||
export default (permissionsByObject_, getObject_, permissions, permission) => {
|
||||
// Assign global variables.
|
||||
permissionsByObject = permissionsByObject_
|
||||
getObject = getObject_
|
||||
|
@ -14,9 +14,7 @@ module.exports = {
|
||||
debug: !__TEST__,
|
||||
loose: true,
|
||||
shippedProposals: true,
|
||||
targets: __PROD__
|
||||
? { node: '6' }
|
||||
: { node: 'current' },
|
||||
targets: __PROD__ ? { node: '6' } : { node: 'current' },
|
||||
useBuiltIns: 'usage',
|
||||
},
|
||||
],
|
||||
|
@ -19,11 +19,13 @@ const configFile = configPath + '/config.json'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const load = exports.load = function () {
|
||||
return readFile(configFile).then(JSON.parse).catch(function () {
|
||||
const load = (exports.load = function () {
|
||||
return readFile(configFile)
|
||||
.then(JSON.parse)
|
||||
.catch(function () {
|
||||
return {}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
exports.get = function (path) {
|
||||
return load().then(function (config) {
|
||||
@ -31,11 +33,11 @@ exports.get = function (path) {
|
||||
})
|
||||
}
|
||||
|
||||
const save = exports.save = function (config) {
|
||||
const save = (exports.save = function (config) {
|
||||
return mkdirp(configPath).then(function () {
|
||||
return writeFile(configFile, JSON.stringify(config))
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
exports.set = function (data) {
|
||||
return load().then(function (config) {
|
||||
|
@ -108,14 +108,16 @@ const humanFormatOpts = {
|
||||
|
||||
function printProgress (progress) {
|
||||
if (progress.length) {
|
||||
console.warn('%s% of %s @ %s/s - ETA %s',
|
||||
console.warn(
|
||||
'%s% of %s @ %s/s - ETA %s',
|
||||
Math.round(progress.percentage),
|
||||
humanFormat(progress.length, humanFormatOpts),
|
||||
humanFormat(progress.speed, humanFormatOpts),
|
||||
prettyMs(progress.eta * 1e3)
|
||||
)
|
||||
} else {
|
||||
console.warn('%s @ %s/s',
|
||||
console.warn(
|
||||
'%s @ %s/s',
|
||||
humanFormat(progress.transferred, humanFormatOpts),
|
||||
humanFormat(progress.speed, humanFormatOpts)
|
||||
)
|
||||
@ -130,8 +132,10 @@ function wrap (val) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const help = wrap((function (pkg) {
|
||||
return require('strip-indent')(`
|
||||
const help = wrap(
|
||||
(function (pkg) {
|
||||
return require('strip-indent')(
|
||||
`
|
||||
Usage:
|
||||
|
||||
$name --register [--expiresIn duration] <XO-Server URL> <username> [<password>]
|
||||
@ -162,7 +166,8 @@ const help = wrap((function (pkg) {
|
||||
Executes a command on the current XO instance.
|
||||
|
||||
$name v$version
|
||||
`).replace(/<([^>]+)>|\$(\w+)/g, function (_, arg, key) {
|
||||
`
|
||||
).replace(/<([^>]+)>|\$(\w+)/g, function (_, arg, key) {
|
||||
if (arg) {
|
||||
return '<' + chalk.yellow(arg) + '>'
|
||||
}
|
||||
@ -173,7 +178,8 @@ const help = wrap((function (pkg) {
|
||||
|
||||
return pkg[key]
|
||||
})
|
||||
})(require('../package')))
|
||||
})(require('../package'))
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@ -230,10 +236,7 @@ async function register (args) {
|
||||
exports.register = register
|
||||
|
||||
function unregister () {
|
||||
return config.unset([
|
||||
'server',
|
||||
'token',
|
||||
])
|
||||
return config.unset(['server', 'token'])
|
||||
}
|
||||
exports.unregister = unregister
|
||||
|
||||
@ -284,11 +287,7 @@ async function listCommands (args) {
|
||||
str.push(
|
||||
name,
|
||||
'=<',
|
||||
type == null
|
||||
? 'unknown type'
|
||||
: isArray(type)
|
||||
? type.join('|')
|
||||
: type,
|
||||
type == null ? 'unknown type' : isArray(type) ? type.join('|') : type,
|
||||
'>'
|
||||
)
|
||||
|
||||
@ -347,10 +346,7 @@ async function call (args) {
|
||||
|
||||
const result = await xo.call(method, params)
|
||||
let keys, key, url
|
||||
if (
|
||||
isObject(result) &&
|
||||
(keys = getKeys(result)).length === 1
|
||||
) {
|
||||
if (isObject(result) && (keys = getKeys(result)).length === 1) {
|
||||
key = keys[0]
|
||||
|
||||
if (key === '$getFrom') {
|
||||
@ -359,7 +355,8 @@ async function call (args) {
|
||||
|
||||
const progress = progressStream({ time: 1e3 }, printProgress)
|
||||
|
||||
return eventToPromise(nicePipe([
|
||||
return eventToPromise(
|
||||
nicePipe([
|
||||
got.stream(url).on('response', function (response) {
|
||||
const length = response.headers['content-length']
|
||||
if (length !== undefined) {
|
||||
@ -368,7 +365,9 @@ async function call (args) {
|
||||
}),
|
||||
progress,
|
||||
output,
|
||||
]), 'finish')
|
||||
]),
|
||||
'finish'
|
||||
)
|
||||
}
|
||||
|
||||
if (key === '$sendTo') {
|
||||
@ -379,10 +378,13 @@ async function call (args) {
|
||||
|
||||
const input = nicePipe([
|
||||
createReadStream(file),
|
||||
progressStream({
|
||||
progressStream(
|
||||
{
|
||||
length: length,
|
||||
time: 1e3,
|
||||
}, printProgress),
|
||||
},
|
||||
printProgress
|
||||
),
|
||||
])
|
||||
|
||||
const response = await got.post(url, {
|
||||
|
@ -8,10 +8,7 @@ import isObject from './is-object'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {
|
||||
create: createObject,
|
||||
prototype: { hasOwnProperty },
|
||||
} = Object
|
||||
const { create: createObject, prototype: { hasOwnProperty } } = Object
|
||||
|
||||
export const ACTION_ADD = 'add'
|
||||
export const ACTION_UPDATE = 'update'
|
||||
@ -354,7 +351,8 @@ export default class Collection extends EventEmitter {
|
||||
} else {
|
||||
this._buffer[key] = ACTION_REMOVE
|
||||
}
|
||||
} else { // update
|
||||
} else {
|
||||
// update
|
||||
if (!this._buffer[key]) {
|
||||
this._buffer[key] = ACTION_UPDATE
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ function waitTicks (n = 2) {
|
||||
const { nextTick } = process
|
||||
|
||||
return new Promise(function (resolve) {
|
||||
(function waitNextTick () {
|
||||
;(function waitNextTick () {
|
||||
// The first tick is handled by Promise#then()
|
||||
if (--n) {
|
||||
nextTick(waitNextTick)
|
||||
@ -235,7 +235,7 @@ describe('Collection', function () {
|
||||
return waitTicks().then(() => {
|
||||
col.touch(foo)
|
||||
|
||||
return eventToPromise(col, 'update', (items) => {
|
||||
return eventToPromise(col, 'update', items => {
|
||||
expect(Object.keys(items)).toEqual(['foo'])
|
||||
expect(items.foo).toBe(foo)
|
||||
})
|
||||
@ -249,7 +249,7 @@ describe('Collection', function () {
|
||||
|
||||
expect(col.size).toBe(0)
|
||||
|
||||
return eventToPromise(col, 'remove').then((items) => {
|
||||
return eventToPromise(col, 'remove').then(items => {
|
||||
expect(Object.keys(items)).toEqual(['bar'])
|
||||
expect(items.bar).toBeUndefined()
|
||||
})
|
||||
@ -257,12 +257,10 @@ describe('Collection', function () {
|
||||
})
|
||||
|
||||
describe('deduplicates events', function () {
|
||||
forEach({
|
||||
forEach(
|
||||
{
|
||||
'add & update → add': [
|
||||
[
|
||||
['add', 'foo', 0],
|
||||
['update', 'foo', 1],
|
||||
],
|
||||
[['add', 'foo', 0], ['update', 'foo', 1]],
|
||||
{
|
||||
add: {
|
||||
foo: 1,
|
||||
@ -270,19 +268,10 @@ describe('Collection', function () {
|
||||
},
|
||||
],
|
||||
|
||||
'add & remove → ∅': [
|
||||
[
|
||||
['add', 'foo', 0],
|
||||
['remove', 'foo'],
|
||||
],
|
||||
{},
|
||||
],
|
||||
'add & remove → ∅': [[['add', 'foo', 0], ['remove', 'foo']], {}],
|
||||
|
||||
'update & update → update': [
|
||||
[
|
||||
['update', 'bar', 1],
|
||||
['update', 'bar', 2],
|
||||
],
|
||||
[['update', 'bar', 1], ['update', 'bar', 2]],
|
||||
{
|
||||
update: {
|
||||
bar: 2,
|
||||
@ -291,10 +280,7 @@ describe('Collection', function () {
|
||||
],
|
||||
|
||||
'update & remove → remove': [
|
||||
[
|
||||
['update', 'bar', 1],
|
||||
['remove', 'bar'],
|
||||
],
|
||||
[['update', 'bar', 1], ['remove', 'bar']],
|
||||
{
|
||||
remove: {
|
||||
bar: undefined,
|
||||
@ -303,17 +289,15 @@ describe('Collection', function () {
|
||||
],
|
||||
|
||||
'remove & add → update': [
|
||||
[
|
||||
['remove', 'bar'],
|
||||
['add', 'bar', 0],
|
||||
],
|
||||
[['remove', 'bar'], ['add', 'bar', 0]],
|
||||
{
|
||||
update: {
|
||||
bar: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
}, ([operations, results], label) => {
|
||||
},
|
||||
([operations, results], label) => {
|
||||
it(label, function () {
|
||||
forEach(operations, ([method, ...args]) => {
|
||||
col[method](...args)
|
||||
@ -335,6 +319,7 @@ describe('Collection', function () {
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -3,11 +3,7 @@ import { bind, iteratee } from 'lodash'
|
||||
import clearObject from './clear-object'
|
||||
import isEmpty from './is-empty'
|
||||
import NotImplemented from './not-implemented'
|
||||
import {
|
||||
ACTION_ADD,
|
||||
ACTION_UPDATE,
|
||||
ACTION_REMOVE,
|
||||
} from './collection'
|
||||
import { ACTION_ADD, ACTION_UPDATE, ACTION_REMOVE } from './collection'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -86,14 +82,11 @@ export default class Index {
|
||||
const hash = computeHash(value, key)
|
||||
|
||||
if (hash != null) {
|
||||
(
|
||||
itemsByHash[hash] ||
|
||||
|
||||
;(itemsByHash[hash] ||
|
||||
// FIXME: We do not use objects without prototype for now
|
||||
// because it breaks Angular in xo-web, change it back when
|
||||
// this is fixed.
|
||||
(itemsByHash[hash] = {})
|
||||
)[key] = value
|
||||
(itemsByHash[hash] = {}))[key] = value
|
||||
|
||||
keysToHash[key] = hash
|
||||
}
|
||||
@ -118,12 +111,9 @@ export default class Index {
|
||||
|
||||
// Inserts item into the new hash's list if any.
|
||||
if (hash != null) {
|
||||
(
|
||||
itemsByHash[hash] ||
|
||||
|
||||
;(itemsByHash[hash] ||
|
||||
// FIXME: idem: change back to Object.create(null)
|
||||
(itemsByHash[hash] = {})
|
||||
)[key] = value
|
||||
(itemsByHash[hash] = {}))[key] = value
|
||||
|
||||
keysToHash[key] = hash
|
||||
} else {
|
||||
@ -133,10 +123,7 @@ export default class Index {
|
||||
}
|
||||
|
||||
_onRemove (items) {
|
||||
const {
|
||||
_itemsByHash: itemsByHash,
|
||||
_keysToHash: keysToHash,
|
||||
} = this
|
||||
const { _itemsByHash: itemsByHash, _keysToHash: keysToHash } = this
|
||||
|
||||
for (const key in items) {
|
||||
const prev = keysToHash[key]
|
||||
|
@ -12,7 +12,7 @@ const waitTicks = (n = 2) => {
|
||||
const { nextTick } = process
|
||||
|
||||
return new Promise(resolve => {
|
||||
(function waitNextTick () {
|
||||
;(function waitNextTick () {
|
||||
// The first tick is handled by Promise#then()
|
||||
if (--n) {
|
||||
nextTick(waitNextTick)
|
||||
|
@ -1,3 +1,3 @@
|
||||
export default function isObject (value) {
|
||||
return (value !== null) && (typeof value === 'object')
|
||||
return value !== null && typeof value === 'object'
|
||||
}
|
||||
|
@ -2,11 +2,7 @@ import { bind, iteratee } from 'lodash'
|
||||
|
||||
import clearObject from './clear-object'
|
||||
import NotImplemented from './not-implemented'
|
||||
import {
|
||||
ACTION_ADD,
|
||||
ACTION_UPDATE,
|
||||
ACTION_REMOVE,
|
||||
} from './collection'
|
||||
import { ACTION_ADD, ACTION_UPDATE, ACTION_REMOVE } from './collection'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -108,10 +104,7 @@ export default class UniqueIndex {
|
||||
}
|
||||
|
||||
_onRemove (items) {
|
||||
const {
|
||||
_itemByHash: itemByHash,
|
||||
_keysToHash: keysToHash,
|
||||
} = this
|
||||
const { _itemByHash: itemByHash, _keysToHash: keysToHash } = this
|
||||
|
||||
for (const key in items) {
|
||||
const prev = keysToHash[key]
|
||||
|
@ -12,7 +12,7 @@ const waitTicks = (n = 2) => {
|
||||
const { nextTick } = process
|
||||
|
||||
return new Promise(resolve => {
|
||||
(function waitNextTick () {
|
||||
;(function waitNextTick () {
|
||||
// The first tick is handled by Promise#then()
|
||||
if (--n) {
|
||||
nextTick(waitNextTick)
|
||||
|
@ -7,7 +7,7 @@ import View from './view'
|
||||
|
||||
// Create the collection.
|
||||
const users = new Collection()
|
||||
users.getKey = (user) => user.name
|
||||
users.getKey = user => user.name
|
||||
|
||||
// Inserts some data.
|
||||
users.add({
|
||||
|
@ -10,36 +10,53 @@ const xo = new Xo({
|
||||
url: 'localhost:9000',
|
||||
})
|
||||
|
||||
xo.open().then(function () {
|
||||
return xo.call('acl.get', {}).then(function (result) {
|
||||
xo
|
||||
.open()
|
||||
.then(function () {
|
||||
return xo
|
||||
.call('acl.get', {})
|
||||
.then(function (result) {
|
||||
console.log('success:', result)
|
||||
}).catch(function (error) {
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log('failure:', error)
|
||||
})
|
||||
}).then(function () {
|
||||
return xo.signIn({
|
||||
})
|
||||
.then(function () {
|
||||
return xo
|
||||
.signIn({
|
||||
email: 'admin@admin.net',
|
||||
password: 'admin',
|
||||
}).then(function () {
|
||||
})
|
||||
.then(function () {
|
||||
console.log('connected as ', xo.user)
|
||||
}).catch(function (error) {
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log('failure:', error)
|
||||
})
|
||||
}).then(function () {
|
||||
return xo.signIn({
|
||||
})
|
||||
.then(function () {
|
||||
return xo
|
||||
.signIn({
|
||||
email: 'tom',
|
||||
password: 'tom',
|
||||
}).then(function () {
|
||||
})
|
||||
.then(function () {
|
||||
console.log('connected as', xo.user)
|
||||
|
||||
return xo.call('acl.get', {}).then(function (result) {
|
||||
return xo
|
||||
.call('acl.get', {})
|
||||
.then(function (result) {
|
||||
console.log('success:', result)
|
||||
}).catch(function (error) {
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log('failure:', error)
|
||||
})
|
||||
}).catch(function (error) {
|
||||
})
|
||||
.catch(function (error) {
|
||||
console.log('failure', error)
|
||||
})
|
||||
}).then(function () {
|
||||
})
|
||||
.then(function () {
|
||||
return xo.close()
|
||||
})
|
||||
|
@ -1,7 +1,4 @@
|
||||
import JsonRpcWebSocketClient, {
|
||||
OPEN,
|
||||
CLOSED,
|
||||
} from 'jsonrpc-websocket-client'
|
||||
import JsonRpcWebSocketClient, { OPEN, CLOSED } from 'jsonrpc-websocket-client'
|
||||
import { BaseError } from 'make-error'
|
||||
import { startsWith } from 'lodash'
|
||||
|
||||
@ -20,7 +17,7 @@ export default class Xo extends JsonRpcWebSocketClient {
|
||||
const url = opts != null ? opts.url : '.'
|
||||
super(`${url === '/' ? '' : url}/api/`)
|
||||
|
||||
this._credentials = (opts != null ? opts.credentials : null)
|
||||
this._credentials = opts != null ? opts.credentials : null
|
||||
this._user = null
|
||||
|
||||
this.on(OPEN, () => {
|
||||
@ -45,7 +42,8 @@ export default class Xo extends JsonRpcWebSocketClient {
|
||||
}
|
||||
|
||||
const promise = super.call(method, args)
|
||||
promise.retry = (predicate) => promise.catch((error) => {
|
||||
promise.retry = predicate =>
|
||||
promise.catch(error => {
|
||||
i = (i || 0) + 1
|
||||
if (predicate(error, i)) {
|
||||
return this.call(method, args, i)
|
||||
|
@ -3,7 +3,10 @@ import map from 'lodash/map'
|
||||
import trim from 'lodash/trim'
|
||||
import trimStart from 'lodash/trimStart'
|
||||
|
||||
const sanitizePath = (...paths) => filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join('/')
|
||||
const sanitizePath = (...paths) =>
|
||||
filter(map(paths, s => s && filter(map(s.split('/'), trim)).join('/'))).join(
|
||||
'/'
|
||||
)
|
||||
|
||||
export const parse = string => {
|
||||
const object = {}
|
||||
|
@ -29,13 +29,18 @@ class AuthGitHubXoPlugin {
|
||||
load () {
|
||||
const { _xo: xo } = this
|
||||
|
||||
xo.registerPassportStrategy(new Strategy(this._conf, async (accessToken, refreshToken, profile, done) => {
|
||||
xo.registerPassportStrategy(
|
||||
new Strategy(
|
||||
this._conf,
|
||||
async (accessToken, refreshToken, profile, done) => {
|
||||
try {
|
||||
done(null, await xo.registerUser('github', profile.username))
|
||||
} catch (error) {
|
||||
done(error.message)
|
||||
}
|
||||
}))
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,8 @@ export const configurationSchema = {
|
||||
properties: {
|
||||
callbackURL: {
|
||||
type: 'string',
|
||||
description: 'Must be exactly the same as specified on the Google developer console.',
|
||||
description:
|
||||
'Must be exactly the same as specified on the Google developer console.',
|
||||
},
|
||||
clientID: {
|
||||
type: 'string',
|
||||
@ -41,18 +42,23 @@ class AuthGoogleXoPlugin {
|
||||
const conf = this._conf
|
||||
const xo = this._xo
|
||||
|
||||
xo.registerPassportStrategy(new Strategy(conf, async (accessToken, refreshToken, profile, done) => {
|
||||
xo.registerPassportStrategy(
|
||||
new Strategy(conf, async (accessToken, refreshToken, profile, done) => {
|
||||
try {
|
||||
done(null, await xo.registerUser(
|
||||
done(
|
||||
null,
|
||||
await xo.registerUser(
|
||||
'google',
|
||||
conf.scope === 'email'
|
||||
? profile.emails[0].value
|
||||
: profile.displayName
|
||||
))
|
||||
)
|
||||
)
|
||||
} catch (error) {
|
||||
done(error.message)
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,8 @@ import { readFile } from 'fs'
|
||||
// ===================================================================
|
||||
|
||||
const VAR_RE = /\{\{([^}]+)\}\}/g
|
||||
const evalFilter = (filter, vars) => filter.replace(VAR_RE, (_, name) => {
|
||||
const evalFilter = (filter, vars) =>
|
||||
filter.replace(VAR_RE, (_, name) => {
|
||||
const value = vars[name]
|
||||
|
||||
if (value === undefined) {
|
||||
@ -39,7 +40,8 @@ If not specified, it will use a default set of well-known CAs.
|
||||
},
|
||||
},
|
||||
checkCertificate: {
|
||||
description: 'Enforce the validity of the server\'s certificates. You can disable it when connecting to servers that use a self-signed certificate.',
|
||||
description:
|
||||
"Enforce the validity of the server's certificates. You can disable it when connecting to servers that use a self-signed certificate.",
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
},
|
||||
@ -58,14 +60,16 @@ For Microsoft Active Directory, it can also be \`<user>@<domain>\`.
|
||||
type: 'string',
|
||||
},
|
||||
password: {
|
||||
description: 'Password of the user permitted of search the LDAP directory.',
|
||||
description:
|
||||
'Password of the user permitted of search the LDAP directory.',
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
required: ['dn', 'password'],
|
||||
},
|
||||
base: {
|
||||
description: 'The base is the part of the description tree where the users are looked for.',
|
||||
description:
|
||||
'The base is the part of the description tree where the users are looked for.',
|
||||
type: 'string',
|
||||
},
|
||||
filter: {
|
||||
@ -116,18 +120,14 @@ class AuthLdap {
|
||||
}
|
||||
|
||||
async configure (conf) {
|
||||
const clientOpts = this._clientOpts = {
|
||||
const clientOpts = (this._clientOpts = {
|
||||
url: conf.uri,
|
||||
maxConnections: 5,
|
||||
tlsOptions: {},
|
||||
}
|
||||
})
|
||||
|
||||
{
|
||||
const {
|
||||
bind,
|
||||
checkCertificate = true,
|
||||
certificateAuthorities,
|
||||
} = conf
|
||||
const { bind, checkCertificate = true, certificateAuthorities } = conf
|
||||
|
||||
if (bind) {
|
||||
clientOpts.bindDN = bind.dn
|
||||
@ -229,7 +229,11 @@ class AuthLdap {
|
||||
try {
|
||||
logger(`attempting to bind as ${entry.objectName}`)
|
||||
await bind(entry.objectName, password)
|
||||
logger(`successfully bound as ${entry.objectName} => ${username} authenticated`)
|
||||
logger(
|
||||
`successfully bound as ${
|
||||
entry.objectName
|
||||
} => ${username} authenticated`
|
||||
)
|
||||
return { username }
|
||||
} catch (error) {
|
||||
logger(`failed to bind as ${entry.objectName}: ${error.message}`)
|
||||
|
@ -8,20 +8,26 @@ const EMPTY_OBJECT = Object.freeze({ __proto__: null })
|
||||
|
||||
const _extractValue = ({ value }) => value
|
||||
|
||||
export const confirm = (message, {
|
||||
default: defaultValue = null,
|
||||
} = EMPTY_OBJECT) => prompt({
|
||||
export const confirm = (
|
||||
message,
|
||||
{ default: defaultValue = null } = EMPTY_OBJECT
|
||||
) =>
|
||||
prompt({
|
||||
default: defaultValue,
|
||||
message,
|
||||
name: 'value',
|
||||
type: 'confirm',
|
||||
}).then(_extractValue)
|
||||
|
||||
export const input = (message, {
|
||||
export const input = (
|
||||
message,
|
||||
{
|
||||
default: defaultValue = null,
|
||||
filter = undefined,
|
||||
validate = undefined,
|
||||
} = EMPTY_OBJECT) => prompt({
|
||||
} = EMPTY_OBJECT
|
||||
) =>
|
||||
prompt({
|
||||
default: defaultValue,
|
||||
message,
|
||||
name: 'value',
|
||||
@ -29,9 +35,12 @@ export const input = (message, {
|
||||
validate,
|
||||
}).then(_extractValue)
|
||||
|
||||
export const list = (message, choices, {
|
||||
default: defaultValue = null,
|
||||
} = EMPTY_OBJECT) => prompt({
|
||||
export const list = (
|
||||
message,
|
||||
choices,
|
||||
{ default: defaultValue = null } = EMPTY_OBJECT
|
||||
) =>
|
||||
prompt({
|
||||
default: defaultValue,
|
||||
choices,
|
||||
message,
|
||||
@ -39,11 +48,15 @@ export const list = (message, choices, {
|
||||
type: 'list',
|
||||
}).then(_extractValue)
|
||||
|
||||
export const password = (message, {
|
||||
export const password = (
|
||||
message,
|
||||
{
|
||||
default: defaultValue = null,
|
||||
filter = undefined,
|
||||
validate = undefined,
|
||||
} = EMPTY_OBJECT) => prompt({
|
||||
} = EMPTY_OBJECT
|
||||
) =>
|
||||
prompt({
|
||||
default: defaultValue,
|
||||
message,
|
||||
name: 'value',
|
||||
@ -69,25 +82,25 @@ const promptByType = {
|
||||
items[i] = await promptGeneric(
|
||||
itemSchema,
|
||||
defaultValue[i],
|
||||
path
|
||||
? `${path} [${i}]`
|
||||
: `[${i}]`
|
||||
path ? `${path} [${i}]` : `[${i}]`
|
||||
)
|
||||
|
||||
++i
|
||||
}
|
||||
|
||||
let n = schema.minItems || 0
|
||||
while (i < n) { // eslint-disable-line no-unmodified-loop-condition
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while (i < n) {
|
||||
await promptItem()
|
||||
}
|
||||
|
||||
n = schema.maxItems || Infinity
|
||||
while (
|
||||
i < n && // eslint-disable-line no-unmodified-loop-condition
|
||||
await confirm('additional item?', {
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
i < n &&
|
||||
(await confirm('additional item?', {
|
||||
default: false,
|
||||
})
|
||||
}))
|
||||
) {
|
||||
await promptItem()
|
||||
}
|
||||
@ -95,21 +108,25 @@ const promptByType = {
|
||||
return items
|
||||
},
|
||||
|
||||
boolean: (schema, defaultValue, path) => confirm(path, {
|
||||
boolean: (schema, defaultValue, path) =>
|
||||
confirm(path, {
|
||||
default: defaultValue != null ? defaultValue : schema.default,
|
||||
}),
|
||||
|
||||
enum: (schema, defaultValue, path) => list(path, schema.enum, {
|
||||
enum: (schema, defaultValue, path) =>
|
||||
list(path, schema.enum, {
|
||||
defaultValue: defaultValue || schema.defaultValue,
|
||||
}),
|
||||
|
||||
integer: (schema, defaultValue, path) => input(path, {
|
||||
integer: (schema, defaultValue, path) =>
|
||||
input(path, {
|
||||
default: defaultValue || schema.default,
|
||||
filter: input => +input,
|
||||
validate: input => isInteger(+input),
|
||||
}),
|
||||
|
||||
number: (schema, defaultValue, path) => input(path, {
|
||||
number: (schema, defaultValue, path) =>
|
||||
input(path, {
|
||||
default: defaultValue || schema.default,
|
||||
filter: input => +input,
|
||||
validate: input => isFinite(+input),
|
||||
@ -119,7 +136,8 @@ const promptByType = {
|
||||
const value = {}
|
||||
|
||||
const required = {}
|
||||
schema.required && forEach(schema.required, name => {
|
||||
schema.required &&
|
||||
forEach(schema.required, name => {
|
||||
required[name] = true
|
||||
})
|
||||
|
||||
@ -130,9 +148,9 @@ const promptByType = {
|
||||
|
||||
if (
|
||||
required[name] ||
|
||||
await confirm(`fill optional ${subpath}?`, {
|
||||
(await confirm(`fill optional ${subpath}?`, {
|
||||
default: Boolean(defaultValue && name in defaultValue),
|
||||
})
|
||||
}))
|
||||
) {
|
||||
value[name] = await promptGeneric(
|
||||
schema,
|
||||
@ -147,15 +165,14 @@ const promptByType = {
|
||||
return value
|
||||
},
|
||||
|
||||
string: (schema, defaultValue, path) => input(path, {
|
||||
string: (schema, defaultValue, path) =>
|
||||
input(path, {
|
||||
default: defaultValue || schema.default,
|
||||
}),
|
||||
}
|
||||
|
||||
export default function promptGeneric (schema, defaultValue, path) {
|
||||
const type = schema.enum
|
||||
? 'enum'
|
||||
: schema.type
|
||||
const type = schema.enum ? 'enum' : schema.type
|
||||
|
||||
const prompt = promptByType[type.toLowerCase()]
|
||||
if (!prompt) {
|
||||
|
@ -5,13 +5,8 @@ import { bind } from 'lodash'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { readFile, writeFile } from 'fs'
|
||||
|
||||
import promptSchema, {
|
||||
input,
|
||||
password,
|
||||
} from './prompt-schema'
|
||||
import createPlugin, {
|
||||
configurationSchema,
|
||||
} from './'
|
||||
import promptSchema, { input, password } from './prompt-schema'
|
||||
import createPlugin, { configurationSchema } from './'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -27,7 +22,9 @@ execPromise(async args => {
|
||||
() => ({})
|
||||
)
|
||||
)
|
||||
await fromCallback(cb => writeFile(CACHE_FILE, JSON.stringify(config, null, 2), cb)).then(
|
||||
await fromCallback(cb =>
|
||||
writeFile(CACHE_FILE, JSON.stringify(config, null, 2), cb)
|
||||
).then(
|
||||
() => {
|
||||
console.log('configuration saved in %s', CACHE_FILE)
|
||||
},
|
||||
@ -40,10 +37,13 @@ execPromise(async args => {
|
||||
const plugin = createPlugin({})
|
||||
await plugin.configure(config)
|
||||
|
||||
await plugin._authenticate({
|
||||
await plugin._authenticate(
|
||||
{
|
||||
username: await input('Username', {
|
||||
validate: input => !!input.length,
|
||||
}),
|
||||
password: await password('Password'),
|
||||
}, bind(console.log, console))
|
||||
},
|
||||
bind(console.log, console)
|
||||
)
|
||||
})
|
||||
|
@ -38,7 +38,8 @@ class AuthSamlXoPlugin {
|
||||
load () {
|
||||
const xo = this._xo
|
||||
|
||||
xo.registerPassportStrategy(new Strategy(this._conf, async (profile, done) => {
|
||||
xo.registerPassportStrategy(
|
||||
new Strategy(this._conf, async (profile, done) => {
|
||||
const name = profile[this._usernameField]
|
||||
if (!name) {
|
||||
done('no name found for this user')
|
||||
@ -50,7 +51,8 @@ class AuthSamlXoPlugin {
|
||||
} catch (error) {
|
||||
done(error.message)
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,15 +37,17 @@ const ICON_FAILURE = '🚨'
|
||||
const ICON_SUCCESS = '✔'
|
||||
|
||||
const DATE_FORMAT = 'dddd, MMMM Do YYYY, h:mm:ss a'
|
||||
const createDateFormater = timezone => timezone !== undefined
|
||||
? timestamp => moment(timestamp).tz(timezone).format(DATE_FORMAT)
|
||||
const createDateFormater = timezone =>
|
||||
timezone !== undefined
|
||||
? timestamp =>
|
||||
moment(timestamp)
|
||||
.tz(timezone)
|
||||
.format(DATE_FORMAT)
|
||||
: timestamp => moment(timestamp).format(DATE_FORMAT)
|
||||
|
||||
const formatDuration = milliseconds =>
|
||||
moment.duration(milliseconds).humanize()
|
||||
const formatDuration = milliseconds => moment.duration(milliseconds).humanize()
|
||||
|
||||
const formatMethod = method =>
|
||||
startCase(method.slice(method.indexOf('.') + 1))
|
||||
const formatMethod = method => startCase(method.slice(method.indexOf('.') + 1))
|
||||
|
||||
const formatSize = bytes =>
|
||||
humanFormat(bytes, {
|
||||
@ -83,7 +85,9 @@ class BackupReportsXoPlugin {
|
||||
}
|
||||
|
||||
_wrapper (status) {
|
||||
return new Promise(resolve => resolve(this._listener(status))).catch(logError)
|
||||
return new Promise(resolve => resolve(this._listener(status))).catch(
|
||||
logError
|
||||
)
|
||||
}
|
||||
|
||||
_listener (status) {
|
||||
@ -114,8 +118,7 @@ class BackupReportsXoPlugin {
|
||||
}
|
||||
|
||||
const reportOnFailure =
|
||||
reportWhen === 'fail' || // xo-web < 5
|
||||
reportWhen === 'failure' // xo-web >= 5
|
||||
reportWhen === 'fail' || reportWhen === 'failure' // xo-web < 5 // xo-web >= 5
|
||||
|
||||
let globalMergeSize = 0
|
||||
let globalTransferSize = 0
|
||||
@ -152,11 +155,7 @@ class BackupReportsXoPlugin {
|
||||
|
||||
const { message } = error
|
||||
|
||||
failedBackupsText.push(
|
||||
...text,
|
||||
`- **Error**: ${message}`,
|
||||
''
|
||||
)
|
||||
failedBackupsText.push(...text, `- **Error**: ${message}`, '')
|
||||
|
||||
nagiosText.push(
|
||||
`[ ${vm !== undefined ? vm.name_label : 'undefined'} : ${message} ]`
|
||||
@ -169,22 +168,25 @@ class BackupReportsXoPlugin {
|
||||
globalTransferSize += transferSize
|
||||
text.push(
|
||||
`- **Transfer size**: ${formatSize(transferSize)}`,
|
||||
`- **Transfer speed**: ${formatSpeed(transferSize, returnedValue.transferDuration)}`
|
||||
`- **Transfer speed**: ${formatSpeed(
|
||||
transferSize,
|
||||
returnedValue.transferDuration
|
||||
)}`
|
||||
)
|
||||
}
|
||||
if (mergeSize !== undefined) {
|
||||
globalMergeSize += mergeSize
|
||||
text.push(
|
||||
`- **Merge size**: ${formatSize(mergeSize)}`,
|
||||
`- **Merge speed**: ${formatSpeed(mergeSize, returnedValue.mergeDuration)}`
|
||||
`- **Merge speed**: ${formatSpeed(
|
||||
mergeSize,
|
||||
returnedValue.mergeDuration
|
||||
)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
successfulBackupText.push(
|
||||
...text,
|
||||
''
|
||||
)
|
||||
successfulBackupText.push(...text, '')
|
||||
}
|
||||
})
|
||||
|
||||
@ -208,14 +210,10 @@ class BackupReportsXoPlugin {
|
||||
`- **Successes**: ${nSuccesses} / ${nCalls}`,
|
||||
]
|
||||
if (globalTransferSize !== 0) {
|
||||
markdown.push(
|
||||
`- **Transfer size**: ${formatSize(globalTransferSize)}`
|
||||
)
|
||||
markdown.push(`- **Transfer size**: ${formatSize(globalTransferSize)}`)
|
||||
}
|
||||
if (globalMergeSize !== 0) {
|
||||
markdown.push(
|
||||
`- **Merge size**: ${formatSize(globalMergeSize)}`
|
||||
)
|
||||
markdown.push(`- **Merge size**: ${formatSize(globalMergeSize)}`)
|
||||
}
|
||||
markdown.push('')
|
||||
|
||||
@ -239,17 +237,14 @@ class BackupReportsXoPlugin {
|
||||
)
|
||||
}
|
||||
|
||||
markdown.push(
|
||||
'---',
|
||||
'',
|
||||
`*${pkg.name} v${pkg.version}*`
|
||||
)
|
||||
markdown.push('---', '', `*${pkg.name} v${pkg.version}*`)
|
||||
|
||||
markdown = markdown.join('\n')
|
||||
|
||||
const xo = this._xo
|
||||
return Promise.all([
|
||||
xo.sendEmail !== undefined && xo.sendEmail({
|
||||
xo.sendEmail !== undefined &&
|
||||
xo.sendEmail({
|
||||
to: this._mailsReceivers,
|
||||
subject: `[Xen Orchestra] ${
|
||||
globalSuccess ? 'Success' : 'Failure'
|
||||
@ -258,18 +253,23 @@ class BackupReportsXoPlugin {
|
||||
}`,
|
||||
markdown,
|
||||
}),
|
||||
xo.sendToXmppClient !== undefined && xo.sendToXmppClient({
|
||||
xo.sendToXmppClient !== undefined &&
|
||||
xo.sendToXmppClient({
|
||||
to: this._xmppReceivers,
|
||||
message: markdown,
|
||||
}),
|
||||
xo.sendSlackMessage !== undefined && xo.sendSlackMessage({
|
||||
xo.sendSlackMessage !== undefined &&
|
||||
xo.sendSlackMessage({
|
||||
message: markdown,
|
||||
}),
|
||||
xo.sendPassiveCheck !== undefined && xo.sendPassiveCheck({
|
||||
xo.sendPassiveCheck !== undefined &&
|
||||
xo.sendPassiveCheck({
|
||||
status: globalSuccess ? 0 : 2,
|
||||
message: globalSuccess
|
||||
? `[Xen Orchestra] [Success] Backup report for ${tag}`
|
||||
: `[Xen Orchestra] [Failure] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
|
||||
: `[Xen Orchestra] [Failure] Backup report for ${tag} - VMs : ${nagiosText.join(
|
||||
' '
|
||||
)}`,
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
@ -27,7 +27,8 @@ class XoServerCloud {
|
||||
getResourceCatalog.description = 'Get the list of all available resources'
|
||||
getResourceCatalog.permission = 'admin'
|
||||
|
||||
const registerResource = ({ namespace }) => this._registerResource(namespace)
|
||||
const registerResource = ({ namespace }) =>
|
||||
this._registerResource(namespace)
|
||||
registerResource.description = 'Register a resource via cloud plugin'
|
||||
registerResource.params = {
|
||||
namespace: {
|
||||
@ -42,19 +43,20 @@ class XoServerCloud {
|
||||
registerResource,
|
||||
},
|
||||
})
|
||||
this._unsetRequestResource = this._xo.defineProperty('requestResource', this._requestResource, this)
|
||||
this._unsetRequestResource = this._xo.defineProperty(
|
||||
'requestResource',
|
||||
this._requestResource,
|
||||
this
|
||||
)
|
||||
|
||||
const updater = this._updater = new Client(`${UPDATER_URL}:${WS_PORT}`)
|
||||
const connect = () => updater.open(createBackoff()).catch(
|
||||
error => {
|
||||
const updater = (this._updater = new Client(`${UPDATER_URL}:${WS_PORT}`))
|
||||
const connect = () =>
|
||||
updater.open(createBackoff()).catch(error => {
|
||||
console.error('xo-server-cloud: fail to connect to updater', error)
|
||||
|
||||
return connect()
|
||||
}
|
||||
)
|
||||
updater
|
||||
.on('closed', connect)
|
||||
.on('scheduledAttempt', ({ delay }) => {
|
||||
})
|
||||
updater.on('closed', connect).on('scheduledAttempt', ({ delay }) => {
|
||||
console.warn('xo-server-cloud: next attempt in %s ms', delay)
|
||||
})
|
||||
connect()
|
||||
@ -138,13 +140,15 @@ class XoServerCloud {
|
||||
throw new Error('cannot get download token')
|
||||
}
|
||||
|
||||
const req = request.get(`${UPDATER_URL}:${HTTP_PORT}/`)
|
||||
const req = request
|
||||
.get(`${UPDATER_URL}:${HTTP_PORT}/`)
|
||||
.set('Authorization', `Bearer ${downloadToken}`)
|
||||
|
||||
// Impossible to pipe the response directly: https://github.com/visionmedia/superagent/issues/1187
|
||||
const pt = new PassThrough()
|
||||
req.pipe(pt)
|
||||
pt.length = (await eventToPromise(req, 'response')).headers['content-length']
|
||||
const { headers } = await eventToPromise(req, 'response')
|
||||
pt.length = headers['content-length']
|
||||
|
||||
return pt
|
||||
}
|
||||
|
@ -7,8 +7,9 @@ import { debug } from './utils'
|
||||
|
||||
export default class DensityPlan extends Plan {
|
||||
_checkRessourcesThresholds (objects, averages) {
|
||||
return filter(objects, object =>
|
||||
averages[object.id].memoryFree > this._thresholds.memoryFree.low
|
||||
return filter(
|
||||
objects,
|
||||
object => averages[object.id].memoryFree > this._thresholds.memoryFree.low
|
||||
)
|
||||
}
|
||||
|
||||
@ -19,27 +20,17 @@ export default class DensityPlan extends Plan {
|
||||
return
|
||||
}
|
||||
|
||||
const {
|
||||
hosts,
|
||||
toOptimize,
|
||||
} = results
|
||||
const { hosts, toOptimize } = results
|
||||
|
||||
let {
|
||||
averages: hostsAverages,
|
||||
} = results
|
||||
let { averages: hostsAverages } = results
|
||||
|
||||
const pools = await this._getPlanPools()
|
||||
let optimizationsCount = 0
|
||||
|
||||
for (const hostToOptimize of toOptimize) {
|
||||
const {
|
||||
id: hostId,
|
||||
$poolId: poolId,
|
||||
} = hostToOptimize
|
||||
const { id: hostId, $poolId: poolId } = hostToOptimize
|
||||
|
||||
const {
|
||||
master: masterId,
|
||||
} = pools[poolId]
|
||||
const { master: masterId } = pools[poolId]
|
||||
|
||||
// Avoid master optimization.
|
||||
if (masterId === hostId) {
|
||||
@ -58,10 +49,7 @@ export default class DensityPlan extends Plan {
|
||||
const otherHosts = []
|
||||
|
||||
for (const dest of hosts) {
|
||||
const {
|
||||
id: destId,
|
||||
$poolId: destPoolId,
|
||||
} = dest
|
||||
const { id: destId, $poolId: destPoolId } = dest
|
||||
|
||||
// Destination host != Host to optimize!
|
||||
if (destId === hostId) {
|
||||
@ -83,12 +71,7 @@ export default class DensityPlan extends Plan {
|
||||
|
||||
const simulResults = await this._simulate({
|
||||
host: hostToOptimize,
|
||||
destinations: [
|
||||
[ poolMaster ],
|
||||
poolHosts,
|
||||
masters,
|
||||
otherHosts,
|
||||
],
|
||||
destinations: [[poolMaster], poolHosts, masters, otherHosts],
|
||||
hostsAverages: clone(hostsAverages),
|
||||
})
|
||||
|
||||
@ -115,15 +98,15 @@ export default class DensityPlan extends Plan {
|
||||
|
||||
for (const vm of vms) {
|
||||
if (!vm.xenTools) {
|
||||
debug(`VM (${vm.id}) of Host (${hostId}) does not support pool migration.`)
|
||||
debug(
|
||||
`VM (${vm.id}) of Host (${hostId}) does not support pool migration.`
|
||||
)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Sort vms by amount of memory. (+ -> -)
|
||||
vms.sort((a, b) =>
|
||||
vmsAverages[b.id].memory - vmsAverages[a.id].memory
|
||||
)
|
||||
vms.sort((a, b) => vmsAverages[b.id].memory - vmsAverages[a.id].memory)
|
||||
|
||||
const simulResults = {
|
||||
hostsAverages,
|
||||
@ -162,15 +145,11 @@ export default class DensityPlan extends Plan {
|
||||
|
||||
// Test if a VM migration on a destination (of a destinations set) is possible.
|
||||
_testMigration ({ vm, destinations, hostsAverages, vmsAverages }) {
|
||||
const {
|
||||
_thresholds: {
|
||||
critical: criticalThreshold,
|
||||
},
|
||||
} = this
|
||||
const { _thresholds: { critical: criticalThreshold } } = this
|
||||
|
||||
// Sort the destinations by available memory. (- -> +)
|
||||
destinations.sort((a, b) =>
|
||||
hostsAverages[a.id].memoryFree - hostsAverages[b.id].memoryFree
|
||||
destinations.sort(
|
||||
(a, b) => hostsAverages[a.id].memoryFree - hostsAverages[b.id].memoryFree
|
||||
)
|
||||
|
||||
for (const destination of destinations) {
|
||||
@ -204,13 +183,18 @@ export default class DensityPlan extends Plan {
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(moves, move => {
|
||||
const {
|
||||
vm,
|
||||
destination,
|
||||
} = move
|
||||
const { vm, destination } = move
|
||||
const xapiDest = this.xo.getXapi(destination)
|
||||
debug(`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${vm.$container}).`)
|
||||
return xapiDest.migrateVm(vm._xapiId, this.xo.getXapi(destination), destination._xapiId)
|
||||
debug(
|
||||
`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${
|
||||
vm.$container
|
||||
}).`
|
||||
)
|
||||
return xapiDest.migrateVm(
|
||||
vm._xapiId,
|
||||
this.xo.getXapi(destination),
|
||||
destination._xapiId
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
|
@ -9,10 +9,7 @@ import {
|
||||
DEFAULT_CRITICAL_THRESHOLD_CPU,
|
||||
DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE,
|
||||
} from './plan'
|
||||
import {
|
||||
EXECUTION_DELAY,
|
||||
debug,
|
||||
} from './utils'
|
||||
import { EXECUTION_DELAY, debug } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -115,7 +112,10 @@ const makeJob = (cronPattern, fn) => {
|
||||
try {
|
||||
await fn()
|
||||
} catch (error) {
|
||||
console.error('[WARN] scheduled function:', (error && error.stack) || error)
|
||||
console.error(
|
||||
'[WARN] scheduled function:',
|
||||
(error && error.stack) || error
|
||||
)
|
||||
} finally {
|
||||
job.running = false
|
||||
job.emitter.emit('finish')
|
||||
@ -133,7 +133,10 @@ const makeJob = (cronPattern, fn) => {
|
||||
class LoadBalancerPlugin {
|
||||
constructor (xo) {
|
||||
this.xo = xo
|
||||
this._job = makeJob(`*/${EXECUTION_DELAY} * * * *`, this._executePlans.bind(this))
|
||||
this._job = makeJob(
|
||||
`*/${EXECUTION_DELAY} * * * *`,
|
||||
this._executePlans.bind(this)
|
||||
)
|
||||
}
|
||||
|
||||
async configure ({ plans }) {
|
||||
@ -154,7 +157,10 @@ class LoadBalancerPlugin {
|
||||
|
||||
if (plans) {
|
||||
for (const plan of plans) {
|
||||
this._addPlan(plan.mode === 'Performance mode' ? PERFORMANCE_MODE : DENSITY_MODE, plan)
|
||||
this._addPlan(
|
||||
plan.mode === 'Performance mode' ? PERFORMANCE_MODE : DENSITY_MODE,
|
||||
plan
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,7 +186,8 @@ class LoadBalancerPlugin {
|
||||
}
|
||||
|
||||
this._poolIds = this._poolIds.concat(pools)
|
||||
this._plans.push(mode === PERFORMANCE_MODE
|
||||
this._plans.push(
|
||||
mode === PERFORMANCE_MODE
|
||||
? new PerformancePlan(this.xo, name, pools, options)
|
||||
: new DensityPlan(this.xo, name, pools, options)
|
||||
)
|
||||
@ -189,9 +196,7 @@ class LoadBalancerPlugin {
|
||||
_executePlans () {
|
||||
debug('Execute plans!')
|
||||
|
||||
return Promise.all(
|
||||
mapToArray(this._plans, plan => plan.execute())
|
||||
)
|
||||
return Promise.all(mapToArray(this._plans, plan => plan.execute()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,7 +35,10 @@ export default class PerformancePlan extends Plan {
|
||||
try {
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
filter(this._getHosts({ powerState: 'Halted' }), host => host.powerOnMode !== ''),
|
||||
filter(
|
||||
this._getHosts({ powerState: 'Halted' }),
|
||||
host => host.powerOnMode !== ''
|
||||
),
|
||||
host => {
|
||||
const { id } = host
|
||||
return this.xo.getXapi(id).powerOnHost(id)
|
||||
@ -52,17 +55,14 @@ export default class PerformancePlan extends Plan {
|
||||
return
|
||||
}
|
||||
|
||||
const {
|
||||
averages,
|
||||
toOptimize,
|
||||
} = results
|
||||
const { averages, toOptimize } = results
|
||||
let { hosts } = results
|
||||
|
||||
toOptimize.sort((a, b) => {
|
||||
a = averages[a.id]
|
||||
b = averages[b.id]
|
||||
|
||||
return (b.cpu - a.cpu) || (a.memoryFree - b.memoryFree)
|
||||
return b.cpu - a.cpu || a.memoryFree - b.memoryFree
|
||||
})
|
||||
|
||||
for (const exceededHost of toOptimize) {
|
||||
@ -85,9 +85,7 @@ export default class PerformancePlan extends Plan {
|
||||
const vmsAverages = await this._getVmsAverages(vms, exceededHost)
|
||||
|
||||
// Sort vms by cpu usage. (lower to higher)
|
||||
vms.sort((a, b) =>
|
||||
vmsAverages[b.id].cpu - vmsAverages[a.id].cpu
|
||||
)
|
||||
vms.sort((a, b) => vmsAverages[b.id].cpu - vmsAverages[a.id].cpu)
|
||||
|
||||
const exceededAverages = hostsAverages[exceededHost.id]
|
||||
const promises = []
|
||||
@ -95,11 +93,15 @@ export default class PerformancePlan extends Plan {
|
||||
const xapiSrc = this.xo.getXapi(exceededHost)
|
||||
let optimizationsCount = 0
|
||||
|
||||
const searchFunction = (a, b) => hostsAverages[b.id].cpu - hostsAverages[a.id].cpu
|
||||
const searchFunction = (a, b) =>
|
||||
hostsAverages[b.id].cpu - hostsAverages[a.id].cpu
|
||||
|
||||
for (const vm of vms) {
|
||||
// Search host with lower cpu usage in the same pool first. In other pool if necessary.
|
||||
let destination = searchBestObject(find(hosts, host => host.$poolId === vm.$poolId), searchFunction)
|
||||
let destination = searchBestObject(
|
||||
find(hosts, host => host.$poolId === vm.$poolId),
|
||||
searchFunction
|
||||
)
|
||||
|
||||
if (!destination) {
|
||||
destination = searchBestObject(hosts, searchFunction)
|
||||
@ -110,7 +112,8 @@ export default class PerformancePlan extends Plan {
|
||||
|
||||
// Unable to move the vm.
|
||||
if (
|
||||
exceededAverages.cpu - vmAverages.cpu < destinationAverages.cpu + vmAverages.cpu ||
|
||||
exceededAverages.cpu - vmAverages.cpu <
|
||||
destinationAverages.cpu + vmAverages.cpu ||
|
||||
destinationAverages.memoryFree > vmAverages.memory
|
||||
) {
|
||||
continue
|
||||
@ -122,15 +125,27 @@ export default class PerformancePlan extends Plan {
|
||||
exceededAverages.memoryFree += vmAverages.memory
|
||||
destinationAverages.memoryFree -= vmAverages.memory
|
||||
|
||||
debug(`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${exceededHost.id}).`)
|
||||
debug(
|
||||
`Migrate VM (${vm.id}) to Host (${destination.id}) from Host (${
|
||||
exceededHost.id
|
||||
}).`
|
||||
)
|
||||
optimizationsCount++
|
||||
|
||||
promises.push(
|
||||
xapiSrc.migrateVm(vm._xapiId, this.xo.getXapi(destination), destination._xapiId)
|
||||
xapiSrc.migrateVm(
|
||||
vm._xapiId,
|
||||
this.xo.getXapi(destination),
|
||||
destination._xapiId
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
await Promise.all(promises)
|
||||
debug(`Performance mode: ${optimizationsCount} optimizations for Host (${exceededHost.id}).`)
|
||||
debug(
|
||||
`Performance mode: ${optimizationsCount} optimizations for Host (${
|
||||
exceededHost.id
|
||||
}).`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,6 @@
|
||||
import { filter, includes, map as mapToArray } from 'lodash'
|
||||
|
||||
import {
|
||||
EXECUTION_DELAY,
|
||||
debug,
|
||||
} from './utils'
|
||||
import { EXECUTION_DELAY, debug } from './utils'
|
||||
|
||||
const MINUTES_OF_HISTORICAL_DATA = 30
|
||||
|
||||
@ -20,7 +17,7 @@ const LOW_THRESHOLD_FACTOR = 0.25
|
||||
const HIGH_THRESHOLD_MEMORY_FREE_FACTOR = 1.25
|
||||
const LOW_THRESHOLD_MEMORY_FREE_FACTOR = 20.0
|
||||
|
||||
const numberOrDefault = (value, def) => (value >= 0) ? value : def
|
||||
const numberOrDefault = (value, def) => (value >= 0 ? value : def)
|
||||
|
||||
// ===================================================================
|
||||
// Averages.
|
||||
@ -69,10 +66,12 @@ function computeRessourcesAverageWithWeight (averages1, averages2, ratio) {
|
||||
const averages = {}
|
||||
|
||||
for (const id in averages1) {
|
||||
const objectAverages = averages[id] = {}
|
||||
const objectAverages = (averages[id] = {})
|
||||
|
||||
for (const averageName in averages1[id]) {
|
||||
objectAverages[averageName] = averages1[id][averageName] * ratio + averages2[id][averageName] * (1 - ratio)
|
||||
objectAverages[averageName] =
|
||||
averages1[id][averageName] * ratio +
|
||||
averages2[id][averageName] * (1 - ratio)
|
||||
}
|
||||
}
|
||||
|
||||
@ -89,20 +88,24 @@ function setRealCpuAverageOfVms (vms, vmsAverages, nCpus) {
|
||||
// ===================================================================
|
||||
|
||||
export default class Plan {
|
||||
constructor (xo, name, poolIds, {
|
||||
excludedHosts,
|
||||
thresholds,
|
||||
} = {}) {
|
||||
constructor (xo, name, poolIds, { excludedHosts, thresholds } = {}) {
|
||||
this.xo = xo
|
||||
this._name = name
|
||||
this._poolIds = poolIds
|
||||
this._excludedHosts = excludedHosts
|
||||
this._thresholds = {
|
||||
cpu: {
|
||||
critical: numberOrDefault(thresholds && thresholds.cpu, DEFAULT_CRITICAL_THRESHOLD_CPU),
|
||||
critical: numberOrDefault(
|
||||
thresholds && thresholds.cpu,
|
||||
DEFAULT_CRITICAL_THRESHOLD_CPU
|
||||
),
|
||||
},
|
||||
memoryFree: {
|
||||
critical: numberOrDefault(thresholds && thresholds.memoryFree, DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE) * 1024,
|
||||
critical:
|
||||
numberOrDefault(
|
||||
thresholds && thresholds.memoryFree,
|
||||
DEFAULT_CRITICAL_THRESHOLD_MEMORY_FREE
|
||||
) * 1024,
|
||||
},
|
||||
}
|
||||
|
||||
@ -143,8 +146,16 @@ export default class Plan {
|
||||
}
|
||||
|
||||
// Check in the last 30 min interval with ratio.
|
||||
const avgBefore = computeRessourcesAverage(hosts, hostsStats, MINUTES_OF_HISTORICAL_DATA)
|
||||
const avgWithRatio = computeRessourcesAverageWithWeight(avgNow, avgBefore, 0.75)
|
||||
const avgBefore = computeRessourcesAverage(
|
||||
hosts,
|
||||
hostsStats,
|
||||
MINUTES_OF_HISTORICAL_DATA
|
||||
)
|
||||
const avgWithRatio = computeRessourcesAverageWithWeight(
|
||||
avgNow,
|
||||
avgBefore,
|
||||
0.75
|
||||
)
|
||||
|
||||
toOptimize = this._checkRessourcesThresholds(toOptimize, avgWithRatio)
|
||||
|
||||
@ -185,16 +196,20 @@ export default class Plan {
|
||||
|
||||
// Compute hosts for each pool. They can change over time.
|
||||
_getHosts ({ powerState = 'Running' } = {}) {
|
||||
return filter(this.xo.getObjects(), object => (
|
||||
return filter(
|
||||
this.xo.getObjects(),
|
||||
object =>
|
||||
object.type === 'host' &&
|
||||
includes(this._poolIds, object.$poolId) &&
|
||||
object.power_state === powerState &&
|
||||
!includes(this._excludedHosts, object.id)
|
||||
))
|
||||
)
|
||||
}
|
||||
|
||||
async _getVms (hostId) {
|
||||
return filter(this.xo.getObjects(), object =>
|
||||
return filter(
|
||||
this.xo.getObjects(),
|
||||
object =>
|
||||
object.type === 'VM' &&
|
||||
object.power_state === 'Running' &&
|
||||
object.$container === hostId
|
||||
@ -208,7 +223,8 @@ export default class Plan {
|
||||
async _getHostsStats (hosts, granularity) {
|
||||
const hostsStats = {}
|
||||
|
||||
await Promise.all(mapToArray(hosts, host =>
|
||||
await Promise.all(
|
||||
mapToArray(hosts, host =>
|
||||
this.xo.getXapiHostStats(host, granularity).then(hostStats => {
|
||||
hostsStats[host.id] = {
|
||||
nPoints: hostStats.stats.cpus[0].length,
|
||||
@ -216,7 +232,8 @@ export default class Plan {
|
||||
averages: {},
|
||||
}
|
||||
})
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
return hostsStats
|
||||
}
|
||||
@ -224,7 +241,8 @@ export default class Plan {
|
||||
async _getVmsStats (vms, granularity) {
|
||||
const vmsStats = {}
|
||||
|
||||
await Promise.all(mapToArray(vms, vm =>
|
||||
await Promise.all(
|
||||
mapToArray(vms, vm =>
|
||||
this.xo.getXapiVmStats(vm, granularity).then(vmStats => {
|
||||
vmsStats[vm.id] = {
|
||||
nPoints: vmStats.stats.cpus[0].length,
|
||||
@ -232,7 +250,8 @@ export default class Plan {
|
||||
averages: {},
|
||||
}
|
||||
})
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
return vmsStats
|
||||
}
|
||||
|
@ -27,9 +27,7 @@ module.exports = {
|
||||
debug: !__TEST__,
|
||||
loose: true,
|
||||
shippedProposals: true,
|
||||
targets: __PROD__
|
||||
? { node: nodeCompat }
|
||||
: { node: 'current' },
|
||||
targets: __PROD__ ? { node: nodeCompat } : { node: 'current' },
|
||||
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
|
||||
},
|
||||
],
|
||||
|
@ -91,9 +91,7 @@ const HOST_FUNCTIONS = {
|
||||
unit: '% used',
|
||||
comparator: '>',
|
||||
createParser: (legend, threshold) => {
|
||||
const memoryKBytesLegend = legend.find(
|
||||
l => l.name === 'memory_total_kib'
|
||||
)
|
||||
const memoryKBytesLegend = legend.find(l => l.name === 'memory_total_kib')
|
||||
const memoryKBytesFreeLegend = legend.find(
|
||||
l => l.name === 'memory_free_kib'
|
||||
)
|
||||
|
@ -37,7 +37,6 @@ exports.default = function (opts) {
|
||||
// For simplicity's sake, this plugin returns a plain object, but
|
||||
// usually it returns a new instance of an existing class.
|
||||
return {
|
||||
|
||||
// This (optional) method is called each time the plugin is
|
||||
// (re-)configured.
|
||||
//
|
||||
|
@ -16,7 +16,10 @@ const removeUndefined = obj => {
|
||||
const markdownCompiler = nodemailerMarkdown()
|
||||
|
||||
const logAndRethrow = error => {
|
||||
console.error('[WARN] plugin transport-email:', (error && error.stack) || error)
|
||||
console.error(
|
||||
'[WARN] plugin transport-email:',
|
||||
(error && error.stack) || error
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
@ -70,7 +73,8 @@ export const configurationSchema = {
|
||||
},
|
||||
ignoreUnauthorized: {
|
||||
type: 'boolean',
|
||||
description: 'ignore certificates error (e.g. self-signed certificate)',
|
||||
description:
|
||||
'ignore certificates error (e.g. self-signed certificate)',
|
||||
},
|
||||
|
||||
// FIXME: xo-web does not support edition of too nested
|
||||
@ -138,18 +142,11 @@ class TransportEmailPlugin {
|
||||
|
||||
configure ({
|
||||
from,
|
||||
transport: {
|
||||
ignoreUnauthorized,
|
||||
password,
|
||||
secure,
|
||||
user,
|
||||
...transportConf
|
||||
},
|
||||
transport: { ignoreUnauthorized, password, secure, user, ...transportConf },
|
||||
}) {
|
||||
if (ignoreUnauthorized != null) {
|
||||
(
|
||||
transportConf.tls ||
|
||||
(transportConf.tls = {})
|
||||
;(
|
||||
transportConf.tls || (transportConf.tls = {})
|
||||
).rejectUnauthorized = !ignoreUnauthorized
|
||||
}
|
||||
|
||||
@ -159,11 +156,14 @@ class TransportEmailPlugin {
|
||||
|
||||
switch (secure) {
|
||||
case true:
|
||||
transportConf.secure = true; break
|
||||
transportConf.secure = true
|
||||
break
|
||||
case 'disabled':
|
||||
transportConf.ignoreTLS = true; break
|
||||
transportConf.ignoreTLS = true
|
||||
break
|
||||
case 'required':
|
||||
transportConf.requireTLS = true; break
|
||||
transportConf.requireTLS = true
|
||||
break
|
||||
}
|
||||
|
||||
const transport = createTransport(transportConf, { from })
|
||||
@ -188,21 +188,18 @@ class TransportEmailPlugin {
|
||||
|
||||
The transport-email plugin for Xen Orchestra server seems to be working fine, nicely done :)
|
||||
`,
|
||||
attachments: [ {
|
||||
attachments: [
|
||||
{
|
||||
filename: 'example.txt',
|
||||
content: 'Attachments are working too, great!\n',
|
||||
} ],
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
_sendEmail ({
|
||||
from,
|
||||
to, cc, bcc,
|
||||
subject,
|
||||
markdown,
|
||||
attachments,
|
||||
}) {
|
||||
return this._send(removeUndefined({
|
||||
_sendEmail ({ from, to, cc, bcc, subject, markdown, attachments }) {
|
||||
return this._send(
|
||||
removeUndefined({
|
||||
from,
|
||||
to,
|
||||
cc,
|
||||
@ -210,7 +207,8 @@ The transport-email plugin for Xen Orchestra server seems to be working fine, ni
|
||||
subject,
|
||||
markdown,
|
||||
attachments,
|
||||
})).catch(logAndRethrow)
|
||||
})
|
||||
).catch(logAndRethrow)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,18 +35,12 @@ export const configurationSchema = {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const bind = (fn, thisArg) => function __bound__ () {
|
||||
const bind = (fn, thisArg) =>
|
||||
function __bound__ () {
|
||||
return fn.apply(thisArg, arguments)
|
||||
}
|
||||
|
||||
function nscaPacketBuilder ({
|
||||
host,
|
||||
iv,
|
||||
message,
|
||||
service,
|
||||
status,
|
||||
timestamp,
|
||||
}) {
|
||||
function nscaPacketBuilder ({ host, iv, message, service, status, timestamp }) {
|
||||
// Building NSCA packet
|
||||
const SIZE = 720
|
||||
const packet = Buffer.alloc(SIZE)
|
||||
@ -112,15 +106,13 @@ class XoServerNagios {
|
||||
|
||||
test () {
|
||||
return this._sendPassiveCheck({
|
||||
message: 'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)',
|
||||
message:
|
||||
'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)',
|
||||
status: OK,
|
||||
})
|
||||
}
|
||||
|
||||
_sendPassiveCheck ({
|
||||
message,
|
||||
status,
|
||||
}) {
|
||||
_sendPassiveCheck ({ message, status }) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (/\r|\n/.test(message)) {
|
||||
throw new Error('the message must not contain a line break')
|
||||
@ -145,13 +137,7 @@ class XoServerNagios {
|
||||
|
||||
// 1) Using xor between the NSCA packet and the initialization vector
|
||||
// 2) Using xor between the result of the first operation and the encryption key
|
||||
const xorPacketBuffer = xor(
|
||||
xor(
|
||||
packet,
|
||||
iv
|
||||
),
|
||||
this._key
|
||||
)
|
||||
const xorPacketBuffer = xor(xor(packet, iv), this._key)
|
||||
|
||||
client.write(xorPacketBuffer, res => {
|
||||
client.destroy()
|
||||
|
@ -4,7 +4,10 @@ import { promisify } from 'promise-toolbox'
|
||||
// ===================================================================
|
||||
|
||||
const logAndRethrow = error => {
|
||||
console.error('[WARN] plugin transport-slack:', (error != null && error.stack) || error)
|
||||
console.error(
|
||||
'[WARN] plugin transport-slack:',
|
||||
(error != null && error.stack) || error
|
||||
)
|
||||
|
||||
throw error
|
||||
}
|
||||
@ -48,10 +51,7 @@ class XoServerTransportSlack {
|
||||
this._send = null
|
||||
}
|
||||
|
||||
configure ({
|
||||
webhookUri,
|
||||
...conf
|
||||
}) {
|
||||
configure ({ webhookUri, ...conf }) {
|
||||
const slack = new Slack()
|
||||
slack.setWebhook(webhookUri)
|
||||
this._conf = conf
|
||||
@ -74,9 +74,7 @@ The transport-slack plugin for Xen Orchestra server seems to be working fine, ni
|
||||
})
|
||||
}
|
||||
|
||||
_sendSlack ({
|
||||
message,
|
||||
}) {
|
||||
_sendSlack ({ message }) {
|
||||
// TODO: handle errors
|
||||
return this._send({ ...this._conf, text: message }).catch(logAndRethrow)
|
||||
}
|
||||
|
@ -74,7 +74,9 @@ class TransportXmppPlugin {
|
||||
new XmppClient.Stanza('message', {
|
||||
to: receiver,
|
||||
type: 'chat',
|
||||
}).c('body').t(message)
|
||||
})
|
||||
.c('body')
|
||||
.t(message)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -14,13 +14,8 @@ import {
|
||||
values,
|
||||
zipObject,
|
||||
} from 'lodash'
|
||||
import {
|
||||
promisify,
|
||||
} from 'promise-toolbox'
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
} from 'fs'
|
||||
import { promisify } from 'promise-toolbox'
|
||||
import { readFile, writeFile } from 'fs'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -41,9 +36,9 @@ const mibPower = Math.pow(2, 20)
|
||||
const kibPower = Math.pow(2, 10)
|
||||
|
||||
let template = null
|
||||
pReadFile(`${__dirname}/../report.html.tpl`, 'utf8')
|
||||
.then(tpl => {
|
||||
template = Handlebars.compile(minify(tpl, {
|
||||
pReadFile(`${__dirname}/../report.html.tpl`, 'utf8').then(tpl => {
|
||||
template = Handlebars.compile(
|
||||
minify(tpl, {
|
||||
collapseBooleanAttributes: true,
|
||||
collapseWhitespace: true,
|
||||
minifyCSS: true,
|
||||
@ -51,12 +46,12 @@ pReadFile(`${__dirname}/../report.html.tpl`, 'utf8')
|
||||
removeComments: true,
|
||||
removeOptionalTags: true,
|
||||
removeRedundantAttributes: true,
|
||||
}))
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
let imgXo = null
|
||||
pReadFile(`${__dirname}/../images/xo.png`, 'base64')
|
||||
.then(data => {
|
||||
pReadFile(`${__dirname}/../images/xo.png`, 'base64').then(data => {
|
||||
imgXo = `data:image/png;base64,${data}`
|
||||
})
|
||||
|
||||
@ -75,7 +70,8 @@ export const configurationSchema = {
|
||||
periodicity: {
|
||||
type: 'string',
|
||||
enum: ['monthly', 'weekly'],
|
||||
description: 'If you choose weekly you will receive the report every sunday and if you choose monthly you will receive it every first day of the month.',
|
||||
description:
|
||||
'If you choose weekly you will receive the report every sunday and if you choose monthly you will receive it every first day of the month.',
|
||||
},
|
||||
},
|
||||
|
||||
@ -85,16 +81,25 @@ export const configurationSchema = {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
Handlebars.registerHelper('compare', function (lvalue, operator, rvalue, options) {
|
||||
Handlebars.registerHelper('compare', function (
|
||||
lvalue,
|
||||
operator,
|
||||
rvalue,
|
||||
options
|
||||
) {
|
||||
if (arguments.length < 3) {
|
||||
throw new Error('Handlerbars Helper "compare" needs 2 parameters')
|
||||
}
|
||||
|
||||
if (!compareOperators[operator]) {
|
||||
throw new Error(`Handlerbars Helper "compare" doesn't know the operator ${operator}`)
|
||||
throw new Error(
|
||||
`Handlerbars Helper "compare" doesn't know the operator ${operator}`
|
||||
)
|
||||
}
|
||||
|
||||
return compareOperators[operator](lvalue, rvalue) ? options.fn(this) : options.inverse(this)
|
||||
return compareOperators[operator](lvalue, rvalue)
|
||||
? options.fn(this)
|
||||
: options.inverse(this)
|
||||
})
|
||||
|
||||
Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) {
|
||||
@ -103,7 +108,9 @@ Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) {
|
||||
}
|
||||
|
||||
if (!mathOperators[operator]) {
|
||||
throw new Error(`Handlerbars Helper "math" doesn't know the operator ${operator}`)
|
||||
throw new Error(
|
||||
`Handlerbars Helper "math" doesn't know the operator ${operator}`
|
||||
)
|
||||
}
|
||||
|
||||
return mathOperators[operator](+lvalue, +rvalue)
|
||||
@ -135,24 +142,24 @@ const computeDoubleMean = val => computeMean(val.map(computeMean))
|
||||
function computeMeans (objects, options) {
|
||||
return zipObject(
|
||||
options,
|
||||
map(
|
||||
options,
|
||||
opt => round(computeMean(map(objects, opt)), 2)
|
||||
)
|
||||
map(options, opt => round(computeMean(map(objects, opt)), 2))
|
||||
)
|
||||
}
|
||||
|
||||
function getTop (objects, options) {
|
||||
return zipObject(
|
||||
options,
|
||||
map(options, opt =>
|
||||
map(
|
||||
options,
|
||||
opt => map(
|
||||
orderBy(objects, object => {
|
||||
orderBy(
|
||||
objects,
|
||||
object => {
|
||||
const value = object[opt]
|
||||
|
||||
return isNaN(value) ? -Infinity : value
|
||||
}, 'desc').slice(0, 3),
|
||||
},
|
||||
'desc'
|
||||
).slice(0, 3),
|
||||
obj => ({
|
||||
uuid: obj.uuid,
|
||||
name: obj.name,
|
||||
@ -168,7 +175,10 @@ function conputePercentage (curr, prev, options) {
|
||||
options,
|
||||
map(
|
||||
options,
|
||||
opt => prev[opt] === 0 ? 'NONE' : `${round((curr[opt] - prev[opt]) * 100 / prev[opt], 2)}`
|
||||
opt =>
|
||||
prev[opt] === 0
|
||||
? 'NONE'
|
||||
: `${round((curr[opt] - prev[opt]) * 100 / prev[opt], 2)}`
|
||||
)
|
||||
)
|
||||
}
|
||||
@ -182,11 +192,9 @@ function getDiff (oldElements, newElements) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function getVmsStats ({
|
||||
runningVms,
|
||||
xo,
|
||||
}) {
|
||||
return Promise.all(map(runningVms, async vm => {
|
||||
function getVmsStats ({ runningVms, xo }) {
|
||||
return Promise.all(
|
||||
map(runningVms, async vm => {
|
||||
const vmStats = await xo.getXapiVmStats(vm, 'days')
|
||||
return {
|
||||
uuid: vm.uuid,
|
||||
@ -198,14 +206,13 @@ function getVmsStats ({
|
||||
netReception: computeDoubleMean(vmStats.stats.vifs.rx) / kibPower,
|
||||
netTransmission: computeDoubleMean(vmStats.stats.vifs.tx) / kibPower,
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
function getHostsStats ({
|
||||
runningHosts,
|
||||
xo,
|
||||
}) {
|
||||
return Promise.all(map(runningHosts, async host => {
|
||||
function getHostsStats ({ runningHosts, xo }) {
|
||||
return Promise.all(
|
||||
map(runningHosts, async host => {
|
||||
const hostStats = await xo.getXapiHostStats(host, 'days')
|
||||
return {
|
||||
uuid: host.uuid,
|
||||
@ -216,14 +223,11 @@ function getHostsStats ({
|
||||
netReception: computeDoubleMean(hostStats.stats.pifs.rx) / kibPower,
|
||||
netTransmission: computeDoubleMean(hostStats.stats.pifs.tx) / kibPower,
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
function computeGlobalVmsStats ({
|
||||
haltedVms,
|
||||
vmsStats,
|
||||
xo,
|
||||
}) {
|
||||
function computeGlobalVmsStats ({ haltedVms, vmsStats, xo }) {
|
||||
const allVms = concat(
|
||||
map(vmsStats, vm => ({
|
||||
uuid: vm.uuid,
|
||||
@ -235,17 +239,23 @@ function computeGlobalVmsStats ({
|
||||
}))
|
||||
)
|
||||
|
||||
return assign(computeMeans(vmsStats, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission']), {
|
||||
return assign(
|
||||
computeMeans(vmsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'diskRead',
|
||||
'diskWrite',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
]),
|
||||
{
|
||||
number: allVms.length,
|
||||
allVms,
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function computeGlobalHostsStats ({
|
||||
haltedHosts,
|
||||
hostsStats,
|
||||
xo,
|
||||
}) {
|
||||
function computeGlobalHostsStats ({ haltedHosts, hostsStats, xo }) {
|
||||
const allHosts = concat(
|
||||
map(hostsStats, host => ({
|
||||
uuid: host.uuid,
|
||||
@ -257,44 +267,56 @@ function computeGlobalHostsStats ({
|
||||
}))
|
||||
)
|
||||
|
||||
return assign(computeMeans(hostsStats, ['cpu', 'ram', 'load', 'netReception', 'netTransmission']), {
|
||||
return assign(
|
||||
computeMeans(hostsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'load',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
]),
|
||||
{
|
||||
number: allHosts.length,
|
||||
allHosts,
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getTopVms ({
|
||||
vmsStats,
|
||||
xo,
|
||||
}) {
|
||||
return getTop(vmsStats, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission'])
|
||||
function getTopVms ({ vmsStats, xo }) {
|
||||
return getTop(vmsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'diskRead',
|
||||
'diskWrite',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
])
|
||||
}
|
||||
|
||||
function getTopHosts ({
|
||||
hostsStats,
|
||||
xo,
|
||||
}) {
|
||||
return getTop(hostsStats, ['cpu', 'ram', 'load', 'netReception', 'netTransmission'])
|
||||
function getTopHosts ({ hostsStats, xo }) {
|
||||
return getTop(hostsStats, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'load',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
])
|
||||
}
|
||||
|
||||
function getMostAllocatedSpaces ({
|
||||
disks,
|
||||
xo,
|
||||
}) {
|
||||
return map(
|
||||
orderBy(disks, ['size'], ['desc']).slice(0, 3), disk => ({
|
||||
function getMostAllocatedSpaces ({ disks, xo }) {
|
||||
return map(orderBy(disks, ['size'], ['desc']).slice(0, 3), disk => ({
|
||||
uuid: disk.uuid,
|
||||
name: disk.name_label,
|
||||
size: round(disk.size / gibPower, 2),
|
||||
}))
|
||||
}
|
||||
|
||||
async function getHostsMissingPatches ({
|
||||
runningHosts,
|
||||
xo,
|
||||
}) {
|
||||
const hostsMissingPatches = await Promise.all(map(runningHosts, async host => {
|
||||
const hostsPatches = await xo.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
|
||||
async function getHostsMissingPatches ({ runningHosts, xo }) {
|
||||
const hostsMissingPatches = await Promise.all(
|
||||
map(runningHosts, async host => {
|
||||
const hostsPatches = await xo
|
||||
.getXapi(host)
|
||||
.listMissingPoolPatchesOnHost(host._xapiId)
|
||||
if (hostsPatches.length > 0) {
|
||||
return {
|
||||
uuid: host.uuid,
|
||||
@ -302,7 +324,8 @@ async function getHostsMissingPatches ({
|
||||
patches: map(hostsPatches, 'name'),
|
||||
}
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
return filter(hostsMissingPatches, host => host !== undefined)
|
||||
}
|
||||
|
||||
@ -310,17 +333,11 @@ function getAllUsersEmail (users) {
|
||||
return map(users, 'email')
|
||||
}
|
||||
|
||||
async function storeStats ({
|
||||
data,
|
||||
storedStatsPath,
|
||||
}) {
|
||||
async function storeStats ({ data, storedStatsPath }) {
|
||||
await pWriteFile(storedStatsPath, JSON.stringify(data))
|
||||
}
|
||||
|
||||
async function computeEvolution ({
|
||||
storedStatsPath,
|
||||
...newStats
|
||||
}) {
|
||||
async function computeEvolution ({ storedStatsPath, ...newStats }) {
|
||||
try {
|
||||
const oldStats = JSON.parse(await pReadFile(storedStatsPath, 'utf8'))
|
||||
const newStatsVms = newStats.vms
|
||||
@ -332,16 +349,35 @@ async function computeEvolution ({
|
||||
|
||||
const vmsEvolution = {
|
||||
number: newStatsVms.number - oldStatsVms.number,
|
||||
...conputePercentage(newStatsVms, oldStatsVms, ['cpu', 'ram', 'diskRead', 'diskWrite', 'netReception', 'netTransmission']),
|
||||
...conputePercentage(newStatsVms, oldStatsVms, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'diskRead',
|
||||
'diskWrite',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
]),
|
||||
}
|
||||
|
||||
const hostsEvolution = {
|
||||
number: newStatsHosts.number - oldStatsHosts.number,
|
||||
...conputePercentage(newStatsHosts, oldStatsHosts, ['cpu', 'ram', 'load', 'netReception', 'netTransmission']),
|
||||
...conputePercentage(newStatsHosts, oldStatsHosts, [
|
||||
'cpu',
|
||||
'ram',
|
||||
'load',
|
||||
'netReception',
|
||||
'netTransmission',
|
||||
]),
|
||||
}
|
||||
|
||||
const vmsRessourcesEvolution = getDiff(oldStatsVms.allVms, newStatsVms.allVms)
|
||||
const hostsRessourcesEvolution = getDiff(oldStatsHosts.allHosts, newStatsHosts.allHosts)
|
||||
const vmsRessourcesEvolution = getDiff(
|
||||
oldStatsVms.allVms,
|
||||
newStatsVms.allVms
|
||||
)
|
||||
const hostsRessourcesEvolution = getDiff(
|
||||
oldStatsHosts.allHosts,
|
||||
newStatsHosts.allHosts
|
||||
)
|
||||
|
||||
const usersEvolution = getDiff(oldStats.users, newStats.users)
|
||||
|
||||
@ -358,17 +394,23 @@ async function computeEvolution ({
|
||||
}
|
||||
}
|
||||
|
||||
async function dataBuilder ({
|
||||
xo,
|
||||
storedStatsPath,
|
||||
}) {
|
||||
async function dataBuilder ({ xo, storedStatsPath }) {
|
||||
const xoObjects = values(xo.getObjects())
|
||||
const runningVms = filter(xoObjects, { type: 'VM', power_state: 'Running' })
|
||||
const haltedVms = filter(xoObjects, { type: 'VM', power_state: 'Halted' })
|
||||
const runningHosts = filter(xoObjects, {type: 'host', power_state: 'Running'})
|
||||
const runningHosts = filter(xoObjects, {
|
||||
type: 'host',
|
||||
power_state: 'Running',
|
||||
})
|
||||
const haltedHosts = filter(xoObjects, { type: 'host', power_state: 'Halted' })
|
||||
const disks = filter(xoObjects, { type: 'SR' })
|
||||
const [users, vmsStats, hostsStats, topAllocation, hostsMissingPatches] = await Promise.all([
|
||||
const [
|
||||
users,
|
||||
vmsStats,
|
||||
hostsStats,
|
||||
topAllocation,
|
||||
hostsMissingPatches,
|
||||
] = await Promise.all([
|
||||
xo.getAllUsers(),
|
||||
getVmsStats({ xo, runningVms }),
|
||||
getHostsStats({ xo, runningHosts }),
|
||||
@ -376,7 +418,13 @@ async function dataBuilder ({
|
||||
getHostsMissingPatches({ xo, runningHosts }),
|
||||
])
|
||||
|
||||
const [globalVmsStats, globalHostsStats, topVms, topHosts, usersEmail] = await Promise.all([
|
||||
const [
|
||||
globalVmsStats,
|
||||
globalHostsStats,
|
||||
topVms,
|
||||
topHosts,
|
||||
usersEmail,
|
||||
] = await Promise.all([
|
||||
computeGlobalVmsStats({ xo, vmsStats, haltedVms }),
|
||||
computeGlobalHostsStats({ xo, hostsStats, haltedHosts }),
|
||||
getTopVms({ xo, vmsStats }),
|
||||
@ -430,7 +478,8 @@ class UsageReportPlugin {
|
||||
this._conf = configuration
|
||||
|
||||
this._job = new CronJob({
|
||||
cronTime: configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0',
|
||||
cronTime:
|
||||
configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0',
|
||||
onTick: () => this._sendReport(),
|
||||
start: false,
|
||||
})
|
||||
@ -467,10 +516,12 @@ class UsageReportPlugin {
|
||||
Please, find the attached report.
|
||||
|
||||
best regards.`,
|
||||
attachments: [{
|
||||
attachments: [
|
||||
{
|
||||
filename: `xoReport_${currDate}.html`,
|
||||
content: template(data),
|
||||
}],
|
||||
},
|
||||
],
|
||||
}),
|
||||
storeStats({
|
||||
data,
|
||||
|
@ -8,25 +8,21 @@ try {
|
||||
const filtered = frames.filter(function (frame) {
|
||||
const name = frame && frame.getFileName()
|
||||
|
||||
return (
|
||||
// has a filename
|
||||
return (// has a filename
|
||||
name &&
|
||||
|
||||
// contains a separator (no internal modules)
|
||||
name.indexOf(sep) !== -1 &&
|
||||
|
||||
// does not start with `internal`
|
||||
name.lastIndexOf('internal', 0) !== -1
|
||||
)
|
||||
name.lastIndexOf('internal', 0) !== -1)
|
||||
})
|
||||
|
||||
// depd (used amongst other by express requires at least 3 frames
|
||||
// in the stack.
|
||||
return filtered.length > 2
|
||||
? filtered
|
||||
: frames
|
||||
return filtered.length > 2 ? filtered : frames
|
||||
})
|
||||
} catch (_) {}
|
||||
|
||||
// Source maps.
|
||||
try { require('julien-f-source-map-support/register') } catch (_) {}
|
||||
try {
|
||||
require('julien-f-source-map-support/register')
|
||||
} catch (_) {}
|
||||
|
@ -14,7 +14,8 @@ export async function getCurrentPermissions () {
|
||||
|
||||
getCurrentPermissions.permission = ''
|
||||
|
||||
getCurrentPermissions.description = 'get (explicit) permissions by object for the current user'
|
||||
getCurrentPermissions.description =
|
||||
'get (explicit) permissions by object for the current user'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
@ -42,8 +42,13 @@ scanFiles.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
|
||||
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
|
||||
function handleFetchFiles (
|
||||
req,
|
||||
res,
|
||||
{ remote, disk, partition, paths, format: archiveFormat }
|
||||
) {
|
||||
this.fetchFilesInDiskBackup(remote, disk, partition, paths)
|
||||
.then(files => {
|
||||
res.setHeader('content-disposition', 'attachment')
|
||||
res.setHeader('content-type', 'application/octet-stream')
|
||||
|
||||
@ -67,7 +72,8 @@ function handleFetchFiles (req, res, { remote, disk, partition, paths, format: a
|
||||
archive.finalize()
|
||||
|
||||
archive.pipe(res)
|
||||
}).catch(error => {
|
||||
})
|
||||
.catch(error => {
|
||||
console.error(error)
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, error))
|
||||
@ -75,13 +81,18 @@ function handleFetchFiles (req, res, { remote, disk, partition, paths, format: a
|
||||
}
|
||||
|
||||
export async function fetchFiles ({ format = 'zip', ...params }) {
|
||||
const fileName = params.paths.length > 1
|
||||
const fileName =
|
||||
params.paths.length > 1
|
||||
? `restore_${new Date().toJSON()}.${format}`
|
||||
: basename(params.paths[0])
|
||||
|
||||
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
|
||||
return this.registerHttpRequest(
|
||||
handleFetchFiles,
|
||||
{ ...params, format },
|
||||
{
|
||||
suffix: encodeURI(`/${fileName}`),
|
||||
}).then(url => ({ $getFrom: url }))
|
||||
}
|
||||
).then(url => ({ $getFrom: url }))
|
||||
}
|
||||
|
||||
fetchFiles.permission = 'admin'
|
||||
|
@ -10,7 +10,9 @@ export async function create ({ name, size, sr, vm, bootable, position, mode })
|
||||
if (attach && (resourceSet = vm.resourceSet) != null) {
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [sr.id])
|
||||
await this.allocateLimitsInResourceSet({ disk: size }, resourceSet)
|
||||
} else if (!(await this.hasPermissions(this.user.id, [ [ sr.id, 'administrate' ] ]))) {
|
||||
} else if (
|
||||
!await this.hasPermissions(this.user.id, [[sr.id, 'administrate']])
|
||||
) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
|
||||
import { format } from 'json-rpc-peer'
|
||||
|
||||
// ===================================================================
|
||||
@ -180,7 +179,8 @@ export function listMissingPatches ({host}) {
|
||||
return this.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
|
||||
}
|
||||
|
||||
listMissingPatches.description = 'return an array of missing new patches in the host'
|
||||
listMissingPatches.description =
|
||||
'return an array of missing new patches in the host'
|
||||
|
||||
listMissingPatches.params = {
|
||||
host: { type: 'string' },
|
||||
@ -280,7 +280,9 @@ async function handleInstallSupplementalPack (req, res, { hostId }) {
|
||||
|
||||
export async function installSupplementalPack ({ host }) {
|
||||
return {
|
||||
$sendTo: (await this.registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })),
|
||||
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, {
|
||||
hostId: host.id,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,9 +26,8 @@ export function getAll (params) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllIpPools(user.permission === 'admin'
|
||||
? params && params.userId
|
||||
: user.id
|
||||
return this.getAllIpPools(
|
||||
user.permission === 'admin' ? params && params.userId : user.id
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -4,8 +4,9 @@ export async function get ({namespace}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const logs = {}
|
||||
|
||||
logger.createReadStream()
|
||||
.on('data', (data) => {
|
||||
logger
|
||||
.createReadStream()
|
||||
.on('data', data => {
|
||||
logs[data.key] = data.value
|
||||
})
|
||||
.on('end', () => {
|
||||
|
@ -4,7 +4,14 @@ export function getBondModes () {
|
||||
return ['balance-slb', 'active-backup', 'lacp']
|
||||
}
|
||||
|
||||
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
|
||||
export async function create ({
|
||||
pool,
|
||||
name,
|
||||
description,
|
||||
pif,
|
||||
mtu = 1500,
|
||||
vlan = 0,
|
||||
}) {
|
||||
return this.getXapi(pool).createNetwork({
|
||||
name,
|
||||
description,
|
||||
@ -30,13 +37,19 @@ create.permission = 'admin'
|
||||
|
||||
// =================================================================
|
||||
|
||||
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
|
||||
export async function createBonded ({
|
||||
pool,
|
||||
name,
|
||||
description,
|
||||
pifs,
|
||||
mtu = 1500,
|
||||
mac,
|
||||
bondMode,
|
||||
}) {
|
||||
return this.getXapi(pool).createBondedNetwork({
|
||||
name,
|
||||
description,
|
||||
pifIds: mapToArray(pifs, pif =>
|
||||
this.getObject(pif, 'PIF')._xapiId
|
||||
),
|
||||
pifIds: mapToArray(pifs, pif => this.getObject(pif, 'PIF')._xapiId),
|
||||
mtu: +mtu,
|
||||
mac,
|
||||
bondMode,
|
||||
@ -56,14 +69,18 @@ createBonded.params = {
|
||||
mtu: { type: ['integer', 'string'], optional: true },
|
||||
mac: { type: 'string', optional: true },
|
||||
// RegExp since schema-inspector does not provide a param check based on an enumeration
|
||||
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) },
|
||||
bondMode: {
|
||||
type: 'string',
|
||||
pattern: new RegExp(`^(${getBondModes().join('|')})$`),
|
||||
},
|
||||
}
|
||||
|
||||
createBonded.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate'],
|
||||
}
|
||||
createBonded.permission = 'admin'
|
||||
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
|
||||
createBonded.description =
|
||||
'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
@ -60,8 +60,23 @@ connect.resolve = {
|
||||
// ===================================================================
|
||||
// Reconfigure IP
|
||||
|
||||
export async function reconfigureIp ({ pif, mode = 'DHCP', ip = '', netmask = '', gateway = '', dns = '' }) {
|
||||
await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns)
|
||||
export async function reconfigureIp ({
|
||||
pif,
|
||||
mode = 'DHCP',
|
||||
ip = '',
|
||||
netmask = '',
|
||||
gateway = '',
|
||||
dns = '',
|
||||
}) {
|
||||
await this.getXapi(pif).call(
|
||||
'PIF.reconfigure_ip',
|
||||
pif._xapiRef,
|
||||
mode,
|
||||
ip,
|
||||
netmask,
|
||||
gateway,
|
||||
dns
|
||||
)
|
||||
}
|
||||
|
||||
reconfigureIp.params = {
|
||||
|
@ -107,7 +107,8 @@ installAllPatches.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate'],
|
||||
}
|
||||
|
||||
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
|
||||
installAllPatches.description =
|
||||
'Install automatically all patches for every hosts of a pool'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@ -178,10 +179,7 @@ mergeInto.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getLicenseState ({ pool }) {
|
||||
return this.getXapi(pool).call(
|
||||
'pool.get_license_state',
|
||||
pool._xapiId.$ref
|
||||
)
|
||||
return this.getXapi(pool).call('pool.get_license_state', pool._xapiId.$ref)
|
||||
}
|
||||
|
||||
getLicenseState.params = {
|
||||
@ -215,11 +213,14 @@ async function handleInstallSupplementalPack (req, res, { poolId }) {
|
||||
|
||||
export async function installSupplementalPack ({ pool }) {
|
||||
return {
|
||||
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id }),
|
||||
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, {
|
||||
poolId: pool.id,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts'
|
||||
installSupplementalPack.description =
|
||||
'installs supplemental pack from ISO file on all hosts'
|
||||
|
||||
installSupplementalPack.params = {
|
||||
pool: { type: 'string' },
|
||||
|
@ -1,6 +1,4 @@
|
||||
import {
|
||||
unauthorized,
|
||||
} from 'xo-common/api-errors'
|
||||
import { unauthorized } from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -237,4 +235,5 @@ export function recomputeAllLimits () {
|
||||
}
|
||||
|
||||
recomputeAllLimits.permission = 'admin'
|
||||
recomputeAllLimits.description = 'Recompute manually the current resource set usage'
|
||||
recomputeAllLimits.description =
|
||||
'Recompute manually the current resource set usage'
|
||||
|
@ -18,7 +18,13 @@ get.params = {
|
||||
}
|
||||
|
||||
export async function create ({ jobId, cron, enabled, name, timezone }) {
|
||||
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
|
||||
return /* await */ this.createSchedule(this.session.get('user_id'), {
|
||||
job: jobId,
|
||||
cron,
|
||||
enabled,
|
||||
name,
|
||||
timezone,
|
||||
})
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
|
@ -5,7 +5,7 @@ export async function enable ({id}) {
|
||||
}
|
||||
|
||||
enable.permission = 'admin'
|
||||
enable.description = 'Enables a schedule to run it\'s job as scheduled'
|
||||
enable.description = "Enables a schedule to run it's job as scheduled"
|
||||
enable.params = {
|
||||
id: { type: 'string' },
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ export async function add ({autoConnect = true, ...props}) {
|
||||
const server = await this.registerXenServer(props)
|
||||
|
||||
if (autoConnect) {
|
||||
this.connectXenServer(server.id)::ignoreErrors()
|
||||
;this.connectXenServer(server.id)::ignoreErrors()
|
||||
}
|
||||
|
||||
return server.id
|
||||
@ -105,7 +105,7 @@ set.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connect ({ id }) {
|
||||
this.updateXenServer(id, {enabled: true})::ignoreErrors()
|
||||
;this.updateXenServer(id, { enabled: true })::ignoreErrors()
|
||||
await this.connectXenServer(id)
|
||||
}
|
||||
|
||||
@ -122,7 +122,7 @@ connect.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnect ({ id }) {
|
||||
this.updateXenServer(id, {enabled: false})::ignoreErrors()
|
||||
;this.updateXenServer(id, { enabled: false })::ignoreErrors()
|
||||
await this.disconnectXenServer(id)
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,10 @@ signIn.description = 'sign in'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const signInWithPassword = deprecate(signIn, 'use session.signIn() instead')
|
||||
export const signInWithPassword = deprecate(
|
||||
signIn,
|
||||
'use session.signIn() instead'
|
||||
)
|
||||
|
||||
signInWithPassword.params = {
|
||||
email: { type: 'string' },
|
||||
|
@ -1,12 +1,7 @@
|
||||
import { some } from 'lodash'
|
||||
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import {
|
||||
asyncMap,
|
||||
ensureArray,
|
||||
forEach,
|
||||
parseXml,
|
||||
} from '../utils'
|
||||
import { asyncMap, ensureArray, forEach, parseXml } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -50,7 +45,8 @@ scan.resolve = {
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
const srIsBackingHa = (sr) => sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr)
|
||||
const srIsBackingHa = sr =>
|
||||
sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr)
|
||||
|
||||
// TODO: find a way to call this "delete" and not destroy
|
||||
export async function destroy ({ sr }) {
|
||||
@ -61,7 +57,9 @@ export async function destroy ({sr}) {
|
||||
}
|
||||
const xapiSr = xapi.getObject(sr)
|
||||
if (srIsBackingHa(xapiSr)) {
|
||||
throw new Error('You tried to remove a SR the High Availability is relying on. Please disable HA first.')
|
||||
throw new Error(
|
||||
'You tried to remove a SR the High Availability is relying on. Please disable HA first.'
|
||||
)
|
||||
}
|
||||
const config = xapi.xo.getData(sr, 'xosan_config')
|
||||
// we simply forget because the hosted disks are being destroyed with the VMs
|
||||
@ -239,12 +237,7 @@ createNfs.resolve = {
|
||||
|
||||
// This functions creates an HBA SR
|
||||
|
||||
export async function createHba ({
|
||||
host,
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
scsiId,
|
||||
}) {
|
||||
export async function createHba ({ host, nameLabel, nameDescription, scsiId }) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
@ -284,12 +277,7 @@ createHba.resolve = {
|
||||
|
||||
// This functions creates a local LVM SR
|
||||
|
||||
export async function createLvm ({
|
||||
host,
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
device,
|
||||
}) {
|
||||
export async function createLvm ({ host, nameLabel, nameDescription, device }) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
@ -328,10 +316,7 @@ createLvm.resolve = {
|
||||
// This function helps to detect all NFS shares (exports) on a NFS server
|
||||
// Return a table of exports with their paths and ACLs
|
||||
|
||||
export async function probeNfs ({
|
||||
host,
|
||||
server,
|
||||
}) {
|
||||
export async function probeNfs ({ host, server }) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
@ -341,13 +326,7 @@ export async function probeNfs ({
|
||||
let xml
|
||||
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'nfs',
|
||||
{}
|
||||
)
|
||||
await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {})
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
@ -381,20 +360,13 @@ probeNfs.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
// This function helps to detect all HBA devices on the host
|
||||
|
||||
export async function probeHba ({
|
||||
host,
|
||||
}) {
|
||||
export async function probeHba ({ host }) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
let xml
|
||||
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host._xapiRef,
|
||||
'type',
|
||||
{}
|
||||
)
|
||||
await xapi.call('SR.probe', host._xapiRef, 'type', {})
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
@ -527,13 +499,7 @@ export async function probeIscsiIqns ({
|
||||
let xml
|
||||
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
)
|
||||
await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {})
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
@ -605,13 +571,7 @@ export async function probeIscsiLuns ({
|
||||
let xml
|
||||
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
)
|
||||
await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {})
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
@ -681,7 +641,9 @@ export async function probeIscsiExists ({
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
|
||||
const xml = parseXml(
|
||||
await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {})
|
||||
)
|
||||
|
||||
const srs = []
|
||||
forEach(ensureArray(xml['SRlist'].SR), sr => {
|
||||
@ -710,11 +672,7 @@ probeIscsiExists.resolve = {
|
||||
// This function helps to detect if this NFS SR already exists in XAPI
|
||||
// It returns a table of SR UUID, empty if no existing connections
|
||||
|
||||
export async function probeNfsExists ({
|
||||
host,
|
||||
server,
|
||||
serverPath,
|
||||
}) {
|
||||
export async function probeNfsExists ({ host, server, serverPath }) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
@ -722,7 +680,9 @@ export async function probeNfsExists ({
|
||||
serverpath: serverPath,
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}))
|
||||
const xml = parseXml(
|
||||
await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {})
|
||||
)
|
||||
|
||||
const srs = []
|
||||
|
||||
|
@ -20,7 +20,8 @@ export function getMethodsInfo () {
|
||||
|
||||
return methods
|
||||
}
|
||||
getMethodsInfo.description = 'returns the signatures of all available API methods'
|
||||
getMethodsInfo.description =
|
||||
'returns the signatures of all available API methods'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
@ -13,9 +13,7 @@ getPermissionsForUser.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function hasPermission ({ userId, objectId, permission }) {
|
||||
return this.hasPermissions(userId, [
|
||||
[ objectId, permission ],
|
||||
])
|
||||
return this.hasPermissions(userId, [[objectId, permission]])
|
||||
}
|
||||
|
||||
hasPermission.permission = 'admin'
|
||||
|
@ -64,7 +64,9 @@ export async function set ({id, email, password, permission, preferences}) {
|
||||
throw invalidParameters('a user cannot change its own permission')
|
||||
}
|
||||
} else if (email || password || permission) {
|
||||
throw invalidParameters('this properties can only changed by an administrator')
|
||||
throw invalidParameters(
|
||||
'this properties can only changed by an administrator'
|
||||
)
|
||||
}
|
||||
|
||||
await this.updateUser(id, { email, password, permission, preferences })
|
||||
@ -89,7 +91,8 @@ export async function changePassword ({oldPassword, newPassword}) {
|
||||
await this.changeUserPassword(id, oldPassword, newPassword)
|
||||
}
|
||||
|
||||
changePassword.description = 'change password after checking old password (user function)'
|
||||
changePassword.description =
|
||||
'change password after checking old password (user function)'
|
||||
|
||||
changePassword.permission = ''
|
||||
|
||||
|
@ -10,7 +10,8 @@ import { parseSize } from '../utils'
|
||||
export async function delete_ ({ vdi }) {
|
||||
const resourceSet = reduce(
|
||||
vdi.$VBDs,
|
||||
(resourceSet, vbd) => resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet,
|
||||
(resourceSet, vbd) =>
|
||||
resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet,
|
||||
undefined
|
||||
)
|
||||
|
||||
@ -52,18 +53,26 @@ export async function set (params) {
|
||||
|
||||
const vbds = vdi.$VBDs
|
||||
if (
|
||||
(vbds.length === 1) &&
|
||||
((resourceSetId = xapi.xo.getData(this.getObject(vbds[0], 'VBD').VM, 'resourceSet')) !== undefined)
|
||||
vbds.length === 1 &&
|
||||
(resourceSetId = xapi.xo.getData(
|
||||
this.getObject(vbds[0], 'VBD').VM,
|
||||
'resourceSet'
|
||||
)) !== undefined
|
||||
) {
|
||||
if (this.user.permission !== 'admin') {
|
||||
await this.checkResourceSetConstraints(resourceSetId, this.user.id)
|
||||
}
|
||||
|
||||
await this.allocateLimitsInResourceSet({ disk: size - vdi.size }, resourceSetId)
|
||||
} else if (!(
|
||||
(this.user.permission === 'admin') ||
|
||||
await this.allocateLimitsInResourceSet(
|
||||
{ disk: size - vdi.size },
|
||||
resourceSetId
|
||||
)
|
||||
} else if (
|
||||
!(
|
||||
this.user.permission === 'admin' ||
|
||||
(await this.hasPermissions(this.user.id, [[vdi.$SR, 'operate']]))
|
||||
)) {
|
||||
)
|
||||
) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
@ -72,14 +81,16 @@ export async function set (params) {
|
||||
|
||||
// Other fields.
|
||||
const object = {
|
||||
'name_label': 'name_label',
|
||||
'name_description': 'name_description',
|
||||
name_label: 'name_label',
|
||||
name_description: 'name_description',
|
||||
}
|
||||
for (const param in object) {
|
||||
const fields = object[param]
|
||||
if (!(param in params)) { continue }
|
||||
if (!(param in params)) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (const field of (isArray(fields) ? fields : [fields])) {
|
||||
for (const field of isArray(fields) ? fields : [fields]) {
|
||||
await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`)
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ import { diffItems } from '../utils'
|
||||
|
||||
// TODO: move into vm and rename to removeInterface
|
||||
async function delete_ ({ vif }) {
|
||||
this.allocIpAddresses(
|
||||
;this.allocIpAddresses(
|
||||
vif.id,
|
||||
null,
|
||||
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
|
||||
@ -65,7 +65,9 @@ export async function set ({
|
||||
allowedIpv6Addresses,
|
||||
attached,
|
||||
}) {
|
||||
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
|
||||
const oldIpAddresses = vif.allowedIpv4Addresses.concat(
|
||||
vif.allowedIpv6Addresses
|
||||
)
|
||||
const newIpAddresses = []
|
||||
{
|
||||
const { push } = newIpAddresses
|
||||
@ -100,11 +102,7 @@ export async function set ({
|
||||
newIpAddresses,
|
||||
oldIpAddresses
|
||||
)
|
||||
await this.allocIpAddresses(
|
||||
vif.id,
|
||||
addAddresses,
|
||||
removeAddresses
|
||||
)
|
||||
await this.allocIpAddresses(vif.id, addAddresses, removeAddresses)
|
||||
|
||||
return this.getXapi(vif).editVif(vif._xapiId, {
|
||||
ipv4Allowed: allowedIpv4Addresses,
|
||||
|
@ -25,14 +25,13 @@ function checkPermissionOnSrs (vm, permission = 'operate') {
|
||||
return permissions.push([this.getObject(vdiId, 'VDI').$SR, permission])
|
||||
})
|
||||
|
||||
return this.hasPermissions(
|
||||
this.session.get('user_id'),
|
||||
permissions
|
||||
).then(success => {
|
||||
return this.hasPermissions(this.session.get('user_id'), permissions).then(
|
||||
success => {
|
||||
if (!success) {
|
||||
throw unauthorized()
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -359,7 +358,7 @@ async function delete_ ({
|
||||
// Update resource sets
|
||||
const resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
|
||||
if (resourceSet != null) {
|
||||
this.setVmResourceSet(vm._xapiId, null)::ignoreErrors()
|
||||
;this.setVmResourceSet(vm._xapiId, null)::ignoreErrors()
|
||||
}
|
||||
|
||||
return xapi.deleteVm(vm._xapiId, deleteDisks, force)
|
||||
@ -1239,8 +1238,10 @@ export async function createInterface ({
|
||||
}) {
|
||||
const { resourceSet } = vm
|
||||
if (resourceSet != null) {
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [ network.id ])
|
||||
} else if (!(await this.hasPermissions(this.user.id, [ [ network.id, 'view' ] ]))) {
|
||||
await this.checkResourceSetConstraints(resourceSet, this.user.id, [
|
||||
network.id,
|
||||
])
|
||||
} else if (!await this.hasPermissions(this.user.id, [[network.id, 'view']])) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,8 @@ clean.permission = 'admin'
|
||||
|
||||
export async function exportConfig () {
|
||||
return {
|
||||
$getFrom: await this.registerHttpRequest((req, res) => {
|
||||
$getFrom: await this.registerHttpRequest(
|
||||
(req, res) => {
|
||||
res.writeHead(200, 'OK', {
|
||||
'content-disposition': 'attachment',
|
||||
})
|
||||
@ -20,7 +21,8 @@ export async function exportConfig () {
|
||||
return this.exportConfig()
|
||||
},
|
||||
undefined,
|
||||
{ suffix: '/config.json' }),
|
||||
{ suffix: '/config.json' }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,7 @@
|
||||
import Model from './model'
|
||||
import { BaseError } from 'make-error'
|
||||
import { EventEmitter } from 'events'
|
||||
import {
|
||||
isArray,
|
||||
isObject,
|
||||
map,
|
||||
} from './utils'
|
||||
import { isArray, isObject, map } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -40,7 +36,9 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
const { Model } = this
|
||||
map(models, model => {
|
||||
map(
|
||||
models,
|
||||
model => {
|
||||
if (!(model instanceof Model)) {
|
||||
model = new Model(model)
|
||||
}
|
||||
@ -52,21 +50,19 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
return model.properties
|
||||
}, models)
|
||||
},
|
||||
models
|
||||
)
|
||||
|
||||
models = await this._add(models, opts)
|
||||
this.emit('add', models)
|
||||
|
||||
return array
|
||||
? models
|
||||
: new this.Model(models[0])
|
||||
return array ? models : new this.Model(models[0])
|
||||
}
|
||||
|
||||
async first (properties) {
|
||||
if (!isObject(properties)) {
|
||||
properties = (properties !== undefined)
|
||||
? { id: properties }
|
||||
: {}
|
||||
properties = properties !== undefined ? { id: properties } : {}
|
||||
}
|
||||
|
||||
const model = await this._first(properties)
|
||||
@ -75,9 +71,7 @@ export default class Collection extends EventEmitter {
|
||||
|
||||
async get (properties) {
|
||||
if (!isObject(properties)) {
|
||||
properties = (properties !== undefined)
|
||||
? { id: properties }
|
||||
: {}
|
||||
properties = properties !== undefined ? { id: properties } : {}
|
||||
}
|
||||
|
||||
return /* await */ this._get(properties)
|
||||
@ -101,7 +95,9 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
const { Model } = this
|
||||
map(models, model => {
|
||||
map(
|
||||
models,
|
||||
model => {
|
||||
if (!(model instanceof Model)) {
|
||||
// TODO: Problems, we may be mixing in some default
|
||||
// properties which will overwrite existing ones.
|
||||
@ -123,14 +119,14 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
return model.properties
|
||||
}, models)
|
||||
},
|
||||
models
|
||||
)
|
||||
|
||||
models = await this._update(models)
|
||||
this.emit('update', models)
|
||||
|
||||
return array
|
||||
? models
|
||||
: new this.Model(models[0])
|
||||
return array ? models : new this.Model(models[0])
|
||||
}
|
||||
|
||||
// Methods to override in implementations.
|
||||
@ -165,8 +161,6 @@ export default class Collection extends EventEmitter {
|
||||
async _first (properties) {
|
||||
const models = await this.get(properties)
|
||||
|
||||
return models.length
|
||||
? models[0]
|
||||
: null
|
||||
return models.length ? models[0] : null
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,12 @@
|
||||
import { createClient as createRedisClient } from 'redis'
|
||||
import { difference, filter, forEach, isEmpty, keys as getKeys, map } from 'lodash'
|
||||
import {
|
||||
difference,
|
||||
filter,
|
||||
forEach,
|
||||
isEmpty,
|
||||
keys as getKeys,
|
||||
map,
|
||||
} from 'lodash'
|
||||
import { ignoreErrors, promisifyAll } from 'promise-toolbox'
|
||||
import { v4 as generateUuid } from 'uuid'
|
||||
|
||||
@ -28,20 +35,19 @@ import { asyncMap } from '../utils'
|
||||
const VERSION = '20170905'
|
||||
|
||||
export default class Redis extends Collection {
|
||||
constructor ({
|
||||
connection,
|
||||
indexes = [],
|
||||
prefix,
|
||||
uri,
|
||||
}) {
|
||||
constructor ({ connection, indexes = [], prefix, uri }) {
|
||||
super()
|
||||
|
||||
this.indexes = indexes
|
||||
this.prefix = prefix
|
||||
const redis = this.redis = promisifyAll(connection || createRedisClient(uri))
|
||||
const redis = (this.redis = promisifyAll(
|
||||
connection || createRedisClient(uri)
|
||||
))
|
||||
|
||||
const key = `${prefix}:version`
|
||||
redis.get(key).then(version => {
|
||||
redis
|
||||
.get(key)
|
||||
.then(version => {
|
||||
if (version === VERSION) {
|
||||
return
|
||||
}
|
||||
@ -54,7 +60,8 @@ export default class Redis extends Collection {
|
||||
p = p.then(() => this.rebuildIndexes())
|
||||
}
|
||||
return p
|
||||
})::ignoreErrors()
|
||||
})
|
||||
::ignoreErrors()
|
||||
}
|
||||
|
||||
rebuildIndexes () {
|
||||
@ -66,11 +73,13 @@ export default class Redis extends Collection {
|
||||
|
||||
const idsIndex = `${prefix}_ids`
|
||||
return asyncMap(indexes, index =>
|
||||
redis.keys(`${prefix}_${index}:*`).then(keys =>
|
||||
keys.length !== 0 && redis.del(keys)
|
||||
)
|
||||
).then(() => asyncMap(redis.smembers(idsIndex), id =>
|
||||
redis.hgetall(`${prefix}:${id}`).then(values =>
|
||||
redis
|
||||
.keys(`${prefix}_${index}:*`)
|
||||
.then(keys => keys.length !== 0 && redis.del(keys))
|
||||
).then(() =>
|
||||
asyncMap(redis.smembers(idsIndex), id =>
|
||||
redis.hgetall(`${prefix}:${id}`).then(
|
||||
values =>
|
||||
values == null
|
||||
? redis.srem(idsIndex, id) // entry no longer exists
|
||||
: asyncMap(indexes, index => {
|
||||
@ -83,7 +92,8 @@ export default class Redis extends Collection {
|
||||
}
|
||||
})
|
||||
)
|
||||
))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
_extract (ids) {
|
||||
@ -91,7 +101,8 @@ export default class Redis extends Collection {
|
||||
const { redis } = this
|
||||
|
||||
const models = []
|
||||
return Promise.all(map(ids, id => {
|
||||
return Promise.all(
|
||||
map(ids, id => {
|
||||
return redis.hgetall(prefix + id).then(model => {
|
||||
// If empty, consider it a no match.
|
||||
if (isEmpty(model)) {
|
||||
@ -103,7 +114,8 @@ export default class Redis extends Collection {
|
||||
|
||||
models.push(model)
|
||||
})
|
||||
})).then(() => models)
|
||||
})
|
||||
).then(() => models)
|
||||
}
|
||||
|
||||
_add (models, { replace = false } = {}) {
|
||||
@ -112,7 +124,8 @@ export default class Redis extends Collection {
|
||||
|
||||
const { indexes, prefix, redis } = this
|
||||
|
||||
return Promise.all(map(models, async model => {
|
||||
return Promise.all(
|
||||
map(models, async model => {
|
||||
// Generate a new identifier if necessary.
|
||||
if (model.id === undefined) {
|
||||
model.id = generateUuid()
|
||||
@ -132,7 +145,10 @@ export default class Redis extends Collection {
|
||||
await asyncMap(indexes, index => {
|
||||
const value = previous[index]
|
||||
if (value !== undefined) {
|
||||
return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
|
||||
return redis.srem(
|
||||
`${prefix}_${index}:${String(value).toLowerCase()}`,
|
||||
id
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -149,13 +165,10 @@ export default class Redis extends Collection {
|
||||
})
|
||||
|
||||
const key = `${prefix}:${id}`
|
||||
const promises = [
|
||||
redis.del(key),
|
||||
redis.hmset(key, ...params),
|
||||
]
|
||||
const promises = [redis.del(key), redis.hmset(key, ...params)]
|
||||
|
||||
// Update indexes.
|
||||
forEach(indexes, (index) => {
|
||||
forEach(indexes, index => {
|
||||
const value = model[index]
|
||||
if (value === undefined) {
|
||||
return
|
||||
@ -168,7 +181,8 @@ export default class Redis extends Collection {
|
||||
await Promise.all(promises)
|
||||
|
||||
return model
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
_get (properties) {
|
||||
@ -183,9 +197,7 @@ export default class Redis extends Collection {
|
||||
if (id !== undefined) {
|
||||
delete properties.id
|
||||
return this._extract([id]).then(models => {
|
||||
return (models.length && !isEmpty(properties))
|
||||
? filter(models)
|
||||
: models
|
||||
return models.length && !isEmpty(properties) ? filter(models) : models
|
||||
})
|
||||
}
|
||||
|
||||
@ -197,7 +209,10 @@ export default class Redis extends Collection {
|
||||
throw new Error('fields not indexed: ' + unfit.join())
|
||||
}
|
||||
|
||||
const keys = map(properties, (value, index) => `${prefix}_${index}:${String(value).toLowerCase()}`)
|
||||
const keys = map(
|
||||
properties,
|
||||
(value, index) => `${prefix}_${index}:${String(value).toLowerCase()}`
|
||||
)
|
||||
return redis.sinter(...keys).then(ids => this._extract(ids))
|
||||
}
|
||||
|
||||
@ -213,16 +228,24 @@ export default class Redis extends Collection {
|
||||
|
||||
// update other indexes
|
||||
if (indexes.length !== 0) {
|
||||
promise = Promise.all([ promise, asyncMap(ids, id =>
|
||||
redis.hgetall(`${prefix}:${id}`).then(values =>
|
||||
values != null && asyncMap(indexes, index => {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
asyncMap(ids, id =>
|
||||
redis.hgetall(`${prefix}:${id}`).then(
|
||||
values =>
|
||||
values != null &&
|
||||
asyncMap(indexes, index => {
|
||||
const value = values[index]
|
||||
if (value !== undefined) {
|
||||
return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
|
||||
return redis.srem(
|
||||
`${prefix}_${index}:${String(value).toLowerCase()}`,
|
||||
id
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
) ])
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
return promise.then(() =>
|
||||
|
@ -1,16 +1,10 @@
|
||||
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
|
||||
|
||||
import {
|
||||
isArray,
|
||||
isFunction,
|
||||
} from './utils'
|
||||
import { isArray, isFunction } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {
|
||||
defineProperties,
|
||||
getOwnPropertyDescriptor,
|
||||
} = Object
|
||||
const { defineProperties, getOwnPropertyDescriptor } = Object
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -27,7 +21,9 @@ export const debounce = duration => (target, name, descriptor) => {
|
||||
const s = Symbol(`debounced ${name} data`)
|
||||
|
||||
function debounced () {
|
||||
const data = this[s] || (this[s] = {
|
||||
const data =
|
||||
this[s] ||
|
||||
(this[s] = {
|
||||
lastCall: 0,
|
||||
wrapper: null,
|
||||
})
|
||||
@ -39,12 +35,16 @@ export const debounce = duration => (target, name, descriptor) => {
|
||||
const result = fn.apply(this, arguments)
|
||||
data.wrapper = () => result
|
||||
} catch (error) {
|
||||
data.wrapper = () => { throw error }
|
||||
data.wrapper = () => {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
return data.wrapper()
|
||||
}
|
||||
debounced.reset = obj => { delete obj[s] }
|
||||
debounced.reset = obj => {
|
||||
delete obj[s]
|
||||
}
|
||||
|
||||
descriptor.value = debounced
|
||||
return descriptor
|
||||
@ -52,21 +52,12 @@ export const debounce = duration => (target, name, descriptor) => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _ownKeys = (
|
||||
const _ownKeys =
|
||||
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
|
||||
(({
|
||||
getOwnPropertyNames: names,
|
||||
getOwnPropertySymbols: symbols,
|
||||
}) => symbols
|
||||
? obj => names(obj).concat(symbols(obj))
|
||||
: names
|
||||
)(Object)
|
||||
)
|
||||
(({ getOwnPropertyNames: names, getOwnPropertySymbols: symbols }) =>
|
||||
symbols ? obj => names(obj).concat(symbols(obj)) : names)(Object)
|
||||
|
||||
const _isIgnoredProperty = name => (
|
||||
name[0] === '_' ||
|
||||
name === 'constructor'
|
||||
)
|
||||
const _isIgnoredProperty = name => name[0] === '_' || name === 'constructor'
|
||||
|
||||
const _IGNORED_STATIC_PROPERTIES = {
|
||||
__proto__: null,
|
||||
@ -103,9 +94,10 @@ export const mixin = MixIns => Class => {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
(
|
||||
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
|
||||
).enumerable = false // Object methods are enumerable but class methods are not.
|
||||
;(descriptors[prop] = getOwnPropertyDescriptor(
|
||||
MixIn,
|
||||
prop
|
||||
)).enumerable = false // Object methods are enumerable but class methods are not.
|
||||
}
|
||||
}
|
||||
defineProperties(prototype, descriptors)
|
||||
@ -143,16 +135,15 @@ export const mixin = MixIns => Class => {
|
||||
const descriptors = { __proto__: null }
|
||||
for (const prop of _ownKeys(Class)) {
|
||||
let descriptor
|
||||
if (!(
|
||||
// Special properties are not defined...
|
||||
if (
|
||||
!(
|
||||
_isIgnoredStaticProperty(prop) &&
|
||||
|
||||
// if they already exist...
|
||||
(descriptor = getOwnPropertyDescriptor(Decorator, prop)) &&
|
||||
|
||||
// and are not configurable.
|
||||
!descriptor.configurable
|
||||
)) {
|
||||
)
|
||||
) {
|
||||
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,8 @@ export function init () {
|
||||
const buf = Buffer.alloc(TEN_MIB)
|
||||
|
||||
// https://github.com/natevw/fatfs/blob/master/structs.js
|
||||
fat16.pack({
|
||||
fat16.pack(
|
||||
{
|
||||
jmpBoot: Buffer.from('eb3c90', 'hex'),
|
||||
OEMName: 'mkfs.fat',
|
||||
BytsPerSec: SECTOR_SIZE,
|
||||
@ -48,7 +49,9 @@ export function init () {
|
||||
VolID: 895111106,
|
||||
VolLab: 'NO NAME ',
|
||||
FilSysType: 'FAT16 ',
|
||||
}, buf)
|
||||
},
|
||||
buf
|
||||
)
|
||||
|
||||
// End of sector.
|
||||
buf[0x1fe] = 0x55
|
||||
|
@ -17,11 +17,7 @@ import { join as joinPath } from 'path'
|
||||
|
||||
import JsonRpcPeer from 'json-rpc-peer'
|
||||
import { invalidCredentials } from 'xo-common/api-errors'
|
||||
import {
|
||||
ensureDir,
|
||||
readdir,
|
||||
readFile,
|
||||
} from 'fs-extra'
|
||||
import { ensureDir, readdir, readFile } from 'fs-extra'
|
||||
|
||||
import WebServer from 'http-server-plus'
|
||||
import Xo from './xo'
|
||||
@ -52,10 +48,7 @@ const warn = (...args) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DEPRECATED_ENTRIES = [
|
||||
'users',
|
||||
'servers',
|
||||
]
|
||||
const DEPRECATED_ENTRIES = ['users', 'servers']
|
||||
|
||||
async function loadConfiguration () {
|
||||
const config = await appConf.load('xo-server', {
|
||||
@ -85,13 +78,15 @@ function createExpressApp () {
|
||||
// Registers the cookie-parser and express-session middlewares,
|
||||
// necessary for connect-flash.
|
||||
app.use(cookieParser())
|
||||
app.use(expressSession({
|
||||
app.use(
|
||||
expressSession({
|
||||
resave: false,
|
||||
saveUninitialized: false,
|
||||
|
||||
// TODO: should be in the config file.
|
||||
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
// Registers the connect-flash middleware, necessary for Passport to
|
||||
// display error messages.
|
||||
@ -123,10 +118,12 @@ async function setUpPassport (express, xo) {
|
||||
await readFile(joinPath(__dirname, '..', 'signin.pug'))
|
||||
)
|
||||
express.get('/signin', (req, res, next) => {
|
||||
res.send(signInPage({
|
||||
res.send(
|
||||
signInPage({
|
||||
error: req.flash('error')[0],
|
||||
strategies,
|
||||
}))
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
express.get('/signout', (req, res) => {
|
||||
@ -183,7 +180,9 @@ async function setUpPassport (express, xo) {
|
||||
next()
|
||||
} else if (req.cookies.token) {
|
||||
next()
|
||||
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
|
||||
} else if (
|
||||
/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)
|
||||
) {
|
||||
next()
|
||||
} else {
|
||||
req.flash('return-url', url)
|
||||
@ -192,16 +191,16 @@ async function setUpPassport (express, xo) {
|
||||
})
|
||||
|
||||
// Install the local strategy.
|
||||
xo.registerPassportStrategy(new LocalStrategy(
|
||||
async (username, password, done) => {
|
||||
xo.registerPassportStrategy(
|
||||
new LocalStrategy(async (username, password, done) => {
|
||||
try {
|
||||
const user = await xo.authenticateUser({ username, password })
|
||||
done(null, user)
|
||||
} catch (error) {
|
||||
done(null, false, { message: error.message })
|
||||
}
|
||||
}
|
||||
))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -274,7 +273,8 @@ async function registerPluginsInPath (path) {
|
||||
throw error
|
||||
})
|
||||
|
||||
await Promise.all(mapToArray(files, name => {
|
||||
await Promise.all(
|
||||
mapToArray(files, name => {
|
||||
if (startsWith(name, PLUGIN_PREFIX)) {
|
||||
return registerPluginWrapper.call(
|
||||
this,
|
||||
@ -282,19 +282,24 @@ async function registerPluginsInPath (path) {
|
||||
name.slice(PLUGIN_PREFIX_LENGTH)
|
||||
)
|
||||
}
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function registerPlugins (xo) {
|
||||
await Promise.all(mapToArray([
|
||||
`${__dirname}/../node_modules/`,
|
||||
'/usr/local/lib/node_modules/',
|
||||
], xo::registerPluginsInPath))
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'],
|
||||
xo::registerPluginsInPath
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function makeWebServerListen (webServer, {
|
||||
async function makeWebServerListen (
|
||||
webServer,
|
||||
{
|
||||
certificate,
|
||||
|
||||
// The properties was called `certificate` before.
|
||||
@ -302,12 +307,10 @@ async function makeWebServerListen (webServer, {
|
||||
|
||||
key,
|
||||
...opts
|
||||
}) {
|
||||
}
|
||||
) {
|
||||
if (cert && key) {
|
||||
[opts.cert, opts.key] = await Promise.all([
|
||||
readFile(cert),
|
||||
readFile(key),
|
||||
])
|
||||
;[opts.cert, opts.key] = await Promise.all([readFile(cert), readFile(key)])
|
||||
}
|
||||
try {
|
||||
const niceAddress = await webServer.listen(opts)
|
||||
@ -332,9 +335,11 @@ async function makeWebServerListen (webServer, {
|
||||
async function createWebServer ({ listen, listenOptions }) {
|
||||
const webServer = new WebServer()
|
||||
|
||||
await Promise.all(mapToArray(listen,
|
||||
opts => makeWebServerListen(webServer, { ...listenOptions, ...opts })
|
||||
))
|
||||
await Promise.all(
|
||||
mapToArray(listen, opts =>
|
||||
makeWebServerListen(webServer, { ...listenOptions, ...opts })
|
||||
)
|
||||
)
|
||||
|
||||
return webServer
|
||||
}
|
||||
@ -348,7 +353,7 @@ const setUpProxies = (express, opts, xo) => {
|
||||
|
||||
const proxy = createProxyServer({
|
||||
ignorePath: true,
|
||||
}).on('error', (error) => console.error(error))
|
||||
}).on('error', error => console.error(error))
|
||||
|
||||
// TODO: sort proxies by descending prefix length.
|
||||
|
||||
@ -464,7 +469,9 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
}
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
if (req.url === '/api/') {
|
||||
webSocketServer.handleUpgrade(req, socket, head, ws => onConnection(ws, req))
|
||||
webSocketServer.handleUpgrade(req, socket, head, ws =>
|
||||
onConnection(ws, req)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -518,10 +525,7 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const USAGE = (({
|
||||
name,
|
||||
version,
|
||||
}) => `Usage: ${name} [--safe-mode]
|
||||
const USAGE = (({ name, version }) => `Usage: ${name} [--safe-mode]
|
||||
|
||||
${name} v${version}`)(require('../package.json'))
|
||||
|
||||
@ -576,10 +580,7 @@ export default async function main (args) {
|
||||
if (config.http.redirectToHttps) {
|
||||
let port
|
||||
forEach(config.http.listen, listen => {
|
||||
if (
|
||||
listen.port &&
|
||||
(listen.cert || listen.certificate)
|
||||
) {
|
||||
if (listen.port && (listen.cert || listen.certificate)) {
|
||||
port = listen.port
|
||||
return false
|
||||
}
|
||||
|
@ -2,20 +2,10 @@ import Bluebird from 'bluebird'
|
||||
import { BaseError } from 'make-error'
|
||||
import { createPredicate } from 'value-matcher'
|
||||
import { timeout } from 'promise-toolbox'
|
||||
import {
|
||||
assign,
|
||||
filter,
|
||||
find,
|
||||
isEmpty,
|
||||
map,
|
||||
mapValues,
|
||||
} from 'lodash'
|
||||
import { assign, filter, find, isEmpty, map, mapValues } from 'lodash'
|
||||
|
||||
import { crossProduct } from './math'
|
||||
import {
|
||||
serializeError,
|
||||
thunkToArray,
|
||||
} from './utils'
|
||||
import { serializeError, thunkToArray } from './utils'
|
||||
|
||||
export class JobExecutorError extends BaseError {}
|
||||
export class UnsupportedJobType extends JobExecutorError {
|
||||
@ -36,9 +26,9 @@ const paramsVectorActionsMap = {
|
||||
return mapValues(mapping, key => value[key])
|
||||
},
|
||||
crossProduct ({ items }) {
|
||||
return thunkToArray(crossProduct(
|
||||
map(items, value => resolveParamsVector.call(this, value))
|
||||
))
|
||||
return thunkToArray(
|
||||
crossProduct(map(items, value => resolveParamsVector.call(this, value)))
|
||||
)
|
||||
},
|
||||
fetchObjects ({ pattern }) {
|
||||
const objects = filter(this.xo.getObjects(), createPredicate(pattern))
|
||||
@ -74,9 +64,11 @@ export default class JobExecutor {
|
||||
this.xo = xo
|
||||
|
||||
// The logger is not available until Xo has started.
|
||||
xo.on('start', () => xo.getLogger('jobs').then(logger => {
|
||||
xo.on('start', () =>
|
||||
xo.getLogger('jobs').then(logger => {
|
||||
this._logger = logger
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async exec (job) {
|
||||
@ -130,51 +122,68 @@ export default class JobExecutor {
|
||||
timezone: schedule !== undefined ? schedule.timezone : undefined,
|
||||
}
|
||||
|
||||
await Bluebird.map(paramsFlatVector, params => {
|
||||
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
|
||||
await Bluebird.map(
|
||||
paramsFlatVector,
|
||||
params => {
|
||||
const runCallId = this._logger.notice(
|
||||
`Starting ${job.method} call. (${job.id})`,
|
||||
{
|
||||
event: 'jobCall.start',
|
||||
runJobId,
|
||||
method: job.method,
|
||||
params,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const call = execStatus.calls[runCallId] = {
|
||||
const call = (execStatus.calls[runCallId] = {
|
||||
method: job.method,
|
||||
params,
|
||||
start: Date.now(),
|
||||
}
|
||||
let promise = this.xo.callApiMethod(connection, job.method, assign({}, params))
|
||||
})
|
||||
let promise = this.xo.callApiMethod(
|
||||
connection,
|
||||
job.method,
|
||||
assign({}, params)
|
||||
)
|
||||
if (job.timeout) {
|
||||
promise = promise::timeout(job.timeout)
|
||||
}
|
||||
|
||||
return promise.then(
|
||||
value => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
|
||||
this._logger.notice(
|
||||
`Call ${job.method} (${runCallId}) is a success. (${job.id})`,
|
||||
{
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
returnedValue: value,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
call.returnedValue = value
|
||||
call.end = Date.now()
|
||||
},
|
||||
reason => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
|
||||
this._logger.notice(
|
||||
`Call ${job.method} (${runCallId}) has failed. (${job.id})`,
|
||||
{
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
error: serializeError(reason),
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
call.error = reason
|
||||
call.end = Date.now()
|
||||
}
|
||||
)
|
||||
}, {
|
||||
},
|
||||
{
|
||||
concurrency: 2,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
connection.close()
|
||||
execStatus.end = Date.now()
|
||||
|
@ -4,44 +4,50 @@ import { forEach } from 'lodash'
|
||||
import { resolveParamsVector } from './job-executor'
|
||||
|
||||
describe('resolveParamsVector', function () {
|
||||
forEach({
|
||||
forEach(
|
||||
{
|
||||
'cross product with three sets': [
|
||||
// Expected result.
|
||||
[ { id: 3, value: 'foo', remote: 'local' },
|
||||
[
|
||||
{ id: 3, value: 'foo', remote: 'local' },
|
||||
{ id: 7, value: 'foo', remote: 'local' },
|
||||
{ id: 10, value: 'foo', remote: 'local' },
|
||||
{ id: 3, value: 'bar', remote: 'local' },
|
||||
{ id: 7, value: 'bar', remote: 'local' },
|
||||
{ id: 10, value: 'bar', remote: 'local' } ],
|
||||
{ id: 10, value: 'bar', remote: 'local' },
|
||||
],
|
||||
// Entry.
|
||||
{
|
||||
type: 'crossProduct',
|
||||
items: [{
|
||||
items: [
|
||||
{
|
||||
type: 'set',
|
||||
values: [{ id: 3 }, { id: 7 }, { id: 10 }],
|
||||
}, {
|
||||
},
|
||||
{
|
||||
type: 'set',
|
||||
values: [{ value: 'foo' }, { value: 'bar' }],
|
||||
}, {
|
||||
},
|
||||
{
|
||||
type: 'set',
|
||||
values: [{ remote: 'local' }],
|
||||
}],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
'cross product with `set` and `map`': [
|
||||
// Expected result.
|
||||
[
|
||||
{ remote: 'local', id: 'vm:2' },
|
||||
{ remote: 'smb', id: 'vm:2' },
|
||||
],
|
||||
[{ remote: 'local', id: 'vm:2' }, { remote: 'smb', id: 'vm:2' }],
|
||||
|
||||
// Entry.
|
||||
{
|
||||
type: 'crossProduct',
|
||||
items: [{
|
||||
items: [
|
||||
{
|
||||
type: 'set',
|
||||
values: [{ remote: 'local' }, { remote: 'smb' }],
|
||||
}, {
|
||||
},
|
||||
{
|
||||
type: 'map',
|
||||
collection: {
|
||||
type: 'fetchObjects',
|
||||
@ -56,45 +62,55 @@ describe('resolveParamsVector', function () {
|
||||
type: 'extractProperties',
|
||||
mapping: { id: 'id' },
|
||||
},
|
||||
}],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// Context.
|
||||
{
|
||||
xo: {
|
||||
getObjects: function () {
|
||||
return [{
|
||||
return [
|
||||
{
|
||||
id: 'vm:1',
|
||||
$pool: 'pool:1',
|
||||
tags: [],
|
||||
type: 'VM',
|
||||
power_state: 'Halted',
|
||||
}, {
|
||||
},
|
||||
{
|
||||
id: 'vm:2',
|
||||
$pool: 'pool:1',
|
||||
tags: ['foo'],
|
||||
type: 'VM',
|
||||
power_state: 'Running',
|
||||
}, {
|
||||
},
|
||||
{
|
||||
id: 'host:1',
|
||||
type: 'host',
|
||||
power_state: 'Running',
|
||||
}, {
|
||||
},
|
||||
{
|
||||
id: 'vm:3',
|
||||
$pool: 'pool:8',
|
||||
tags: ['foo'],
|
||||
type: 'VM',
|
||||
power_state: 'Halted',
|
||||
}]
|
||||
},
|
||||
]
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}, ([ expectedResult, entry, context ], name) => {
|
||||
},
|
||||
([expectedResult, entry, context], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Resolves params vector', () => {
|
||||
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
|
||||
})
|
||||
expect(resolveParamsVector.call(context, entry)).toEqual(
|
||||
expectedResult
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -17,11 +17,11 @@ async function printLogs (db, args) {
|
||||
let stream = highland(db.createReadStream({ reverse: true }))
|
||||
|
||||
if (args.since) {
|
||||
stream = stream.filter(({value}) => (value.time >= args.since))
|
||||
stream = stream.filter(({ value }) => value.time >= args.since)
|
||||
}
|
||||
|
||||
if (args.until) {
|
||||
stream = stream.filter(({value}) => (value.time <= args.until))
|
||||
stream = stream.filter(({ value }) => value.time <= args.until)
|
||||
}
|
||||
|
||||
const fields = Object.keys(args.matchers)
|
||||
@ -42,8 +42,7 @@ async function printLogs (db, args) {
|
||||
stream = stream.take(args.limit)
|
||||
|
||||
if (args.json) {
|
||||
stream = highland(stream.pipe(ndjson.serialize()))
|
||||
.each(value => {
|
||||
stream = highland(stream.pipe(ndjson.serialize())).each(value => {
|
||||
process.stdout.write(value)
|
||||
})
|
||||
} else {
|
||||
@ -126,7 +125,7 @@ function getArgs () {
|
||||
|
||||
patterns[pattern]
|
||||
? patterns[field].push(pattern)
|
||||
: patterns[field] = [ pattern ]
|
||||
: (patterns[field] = [pattern])
|
||||
} else if (!patterns[value]) {
|
||||
patterns[value] = null
|
||||
}
|
||||
@ -137,7 +136,7 @@ function getArgs () {
|
||||
|
||||
for (const field in patterns) {
|
||||
const values = patterns[field]
|
||||
args.matchers[field] = (values === null) ? trueFunction : globMatcher(values)
|
||||
args.matchers[field] = values === null ? trueFunction : globMatcher(values)
|
||||
}
|
||||
|
||||
// Warning: minimist makes one array of values if the same option is used many times.
|
||||
@ -147,7 +146,6 @@ function getArgs () {
|
||||
throw new Error(`error: too many values for ${arg} argument`)
|
||||
}
|
||||
})
|
||||
|
||||
;['since', 'until'].forEach(arg => {
|
||||
if (args[arg] !== undefined) {
|
||||
args[arg] = Date.parse(args[arg])
|
||||
@ -158,7 +156,7 @@ function getArgs () {
|
||||
}
|
||||
})
|
||||
|
||||
if (isNaN(args.limit = +args.limit)) {
|
||||
if (isNaN((args.limit = +args.limit))) {
|
||||
throw new Error('error: limit is not a valid number')
|
||||
}
|
||||
|
||||
@ -193,10 +191,9 @@ export default async function main () {
|
||||
return
|
||||
}
|
||||
|
||||
const db = sublevel(levelup(
|
||||
`${config.datadir}/leveldb`,
|
||||
{ valueEncoding: 'json' }
|
||||
)).sublevel('logs')
|
||||
const db = sublevel(
|
||||
levelup(`${config.datadir}/leveldb`, { valueEncoding: 'json' })
|
||||
).sublevel('logs')
|
||||
|
||||
return printLogs(db, args)
|
||||
}
|
||||
|
@ -9,7 +9,8 @@ const parse = createParser({
|
||||
keyTransform: key => key.slice(5).toLowerCase(),
|
||||
})
|
||||
const makeFunction = command => (fields, ...args) =>
|
||||
execa.stdout(command, [
|
||||
execa
|
||||
.stdout(command, [
|
||||
'--noheading',
|
||||
'--nosuffix',
|
||||
'--nameprefixes',
|
||||
@ -19,7 +20,9 @@ const makeFunction = command => (fields, ...args) =>
|
||||
'-o',
|
||||
String(fields),
|
||||
...args,
|
||||
]).then(stdout => map(
|
||||
])
|
||||
.then(stdout =>
|
||||
map(
|
||||
splitLines(stdout),
|
||||
isArray(fields)
|
||||
? parse
|
||||
@ -27,7 +30,8 @@ const makeFunction = command => (fields, ...args) =>
|
||||
const data = parse(line)
|
||||
return data[fields]
|
||||
}
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
export const lvs = makeFunction('lvs')
|
||||
export const pvs = makeFunction('pvs')
|
||||
|
@ -19,7 +19,7 @@ const _combine = (vectors, n, cb) => {
|
||||
for (let i = 0; i < m; ++i) {
|
||||
const value = vector[i]
|
||||
|
||||
_combine(vectors, nLast, (vector) => {
|
||||
_combine(vectors, nLast, vector => {
|
||||
vector.push(value)
|
||||
cb(vector)
|
||||
})
|
||||
@ -41,8 +41,7 @@ export const mergeObjects = objects => assign({}, ...objects)
|
||||
//
|
||||
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
|
||||
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
|
||||
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
|
||||
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb =>
|
||||
combine(vectors)(vector => {
|
||||
cb(mergeFn(vector))
|
||||
})
|
||||
)
|
||||
|
@ -2,41 +2,36 @@
|
||||
|
||||
import { forEach } from 'lodash'
|
||||
import { thunkToArray } from './utils'
|
||||
import {
|
||||
crossProduct,
|
||||
mergeObjects,
|
||||
} from './math'
|
||||
import { crossProduct, mergeObjects } from './math'
|
||||
|
||||
describe('mergeObjects', function () {
|
||||
forEach({
|
||||
'Two sets of one': [
|
||||
{a: 1, b: 2}, {a: 1}, {b: 2},
|
||||
],
|
||||
forEach(
|
||||
{
|
||||
'Two sets of one': [{ a: 1, b: 2 }, { a: 1 }, { b: 2 }],
|
||||
'Two sets of two': [
|
||||
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4},
|
||||
{ a: 1, b: 2, c: 3, d: 4 },
|
||||
{ a: 1, b: 2 },
|
||||
{ c: 3, d: 4 },
|
||||
],
|
||||
'Three sets': [
|
||||
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6},
|
||||
{ a: 1, b: 2, c: 3, d: 4, e: 5, f: 6 },
|
||||
{ a: 1 },
|
||||
{ b: 2, c: 3 },
|
||||
{ d: 4, e: 5, f: 6 },
|
||||
],
|
||||
'One set': [
|
||||
{a: 1, b: 2}, {a: 1, b: 2},
|
||||
],
|
||||
'Empty set': [
|
||||
{a: 1}, {a: 1}, {},
|
||||
],
|
||||
'All empty': [
|
||||
{}, {}, {},
|
||||
],
|
||||
'No set': [
|
||||
{},
|
||||
],
|
||||
}, ([ resultSet, ...sets ], name) => {
|
||||
'One set': [{ a: 1, b: 2 }, { a: 1, b: 2 }],
|
||||
'Empty set': [{ a: 1 }, { a: 1 }, {}],
|
||||
'All empty': [{}, {}, {}],
|
||||
'No set': [{}],
|
||||
},
|
||||
([resultSet, ...sets], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Assembles all given param sets in on set', function () {
|
||||
expect(mergeObjects(sets)).toEqual(resultSet)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('crossProduct', function () {
|
||||
@ -45,30 +40,43 @@ describe('crossProduct', function () {
|
||||
// Gives the product of all args
|
||||
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
|
||||
|
||||
forEach({
|
||||
forEach(
|
||||
{
|
||||
'2 sets of 2 items to multiply': [
|
||||
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest,
|
||||
[10, 14, 15, 21],
|
||||
[[2, 3], [5, 7]],
|
||||
multiplyTest,
|
||||
],
|
||||
'3 sets of 2 items to multiply': [
|
||||
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest,
|
||||
[110, 130, 154, 182, 165, 195, 231, 273],
|
||||
[[2, 3], [5, 7], [11, 13]],
|
||||
multiplyTest,
|
||||
],
|
||||
'2 sets of 3 items to multiply': [
|
||||
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest,
|
||||
],
|
||||
'2 sets of 2 items to add': [
|
||||
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest,
|
||||
[14, 22, 26, 21, 33, 39, 35, 55, 65],
|
||||
[[2, 3, 5], [7, 11, 13]],
|
||||
multiplyTest,
|
||||
],
|
||||
'2 sets of 2 items to add': [[7, 9, 8, 10], [[2, 3], [5, 7]], addTest],
|
||||
'3 sets of 2 items to add': [
|
||||
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest,
|
||||
[18, 20, 20, 22, 19, 21, 21, 23],
|
||||
[[2, 3], [5, 7], [11, 13]],
|
||||
addTest,
|
||||
],
|
||||
'2 sets of 3 items to add': [
|
||||
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest,
|
||||
[9, 13, 15, 10, 14, 16, 12, 16, 18],
|
||||
[[2, 3, 5], [7, 11, 13]],
|
||||
addTest,
|
||||
],
|
||||
}, ([ product, items, cb ], name) => {
|
||||
},
|
||||
([product, items, cb], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Crosses sets of values with a crossProduct callback', function () {
|
||||
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
|
||||
})
|
||||
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(
|
||||
product.sort()
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -1,10 +1,6 @@
|
||||
import { EventEmitter } from 'events'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
isEmpty,
|
||||
isString,
|
||||
} from './utils'
|
||||
import { forEach, isEmpty, isString } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -35,7 +31,7 @@ export default class Model extends EventEmitter {
|
||||
|
||||
// Check whether a property exists.
|
||||
has (name) {
|
||||
return (this.properties[name] !== undefined)
|
||||
return this.properties[name] !== undefined
|
||||
}
|
||||
|
||||
// Set properties.
|
||||
|
@ -1,10 +1,6 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
multiKeyHash,
|
||||
} from '../utils'
|
||||
import { forEach, mapToArray, multiKeyHash } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -17,12 +13,15 @@ const DEFAULT_ACTION = 'admin'
|
||||
export default class Acl extends Model {}
|
||||
|
||||
Acl.create = (subject, object, action) => {
|
||||
return Acl.hash(subject, object, action).then(hash => new Acl({
|
||||
return Acl.hash(subject, object, action).then(
|
||||
hash =>
|
||||
new Acl({
|
||||
id: hash,
|
||||
subject,
|
||||
object,
|
||||
action,
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
Acl.hash = (subject, object, action) => multiKeyHash(subject, object, action)
|
||||
@ -63,12 +62,13 @@ export class Acls extends Collection {
|
||||
|
||||
// Compute the new ids (new hashes).
|
||||
const { hash } = Acl
|
||||
await Promise.all(mapToArray(
|
||||
toUpdate,
|
||||
(acl) => hash(acl.subject, acl.object, acl.action).then(id => {
|
||||
await Promise.all(
|
||||
mapToArray(toUpdate, acl =>
|
||||
hash(acl.subject, acl.object, acl.action).then(id => {
|
||||
acl.id = id
|
||||
})
|
||||
))
|
||||
)
|
||||
)
|
||||
|
||||
// Inserts the new (updated) entries.
|
||||
await this.add(toUpdate)
|
||||
|
@ -25,9 +25,7 @@ export class Groups extends Collection {
|
||||
async save (group) {
|
||||
// Serializes.
|
||||
let tmp
|
||||
group.users = isEmpty(tmp = group.users)
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
group.users = isEmpty((tmp = group.users)) ? undefined : JSON.stringify(tmp)
|
||||
|
||||
return /* await */ this.update(group)
|
||||
}
|
||||
|
@ -41,9 +41,13 @@ export class PluginsMetadata extends Collection {
|
||||
const { autoload, configuration } = pluginMetadata
|
||||
pluginMetadata.autoload = autoload === 'true'
|
||||
try {
|
||||
pluginMetadata.configuration = configuration && JSON.parse(configuration)
|
||||
pluginMetadata.configuration =
|
||||
configuration && JSON.parse(configuration)
|
||||
} catch (error) {
|
||||
console.warn('cannot parse pluginMetadata.configuration:', configuration)
|
||||
console.warn(
|
||||
'cannot parse pluginMetadata.configuration:',
|
||||
configuration
|
||||
)
|
||||
pluginMetadata.configuration = []
|
||||
}
|
||||
})
|
||||
|
@ -1,8 +1,6 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {
|
||||
forEach,
|
||||
} from '../utils'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -14,12 +12,14 @@ export class Remotes extends Collection {
|
||||
}
|
||||
|
||||
create (name, url) {
|
||||
return this.add(new Remote({
|
||||
return this.add(
|
||||
new Remote({
|
||||
name,
|
||||
url,
|
||||
enabled: false,
|
||||
error: '',
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async save (remote) {
|
||||
@ -29,7 +29,7 @@ export class Remotes extends Collection {
|
||||
async get (properties) {
|
||||
const remotes = await super.get(properties)
|
||||
forEach(remotes, remote => {
|
||||
remote.enabled = (remote.enabled === 'true')
|
||||
remote.enabled = remote.enabled === 'true'
|
||||
})
|
||||
return remotes
|
||||
}
|
||||
|
@ -12,14 +12,16 @@ export class Schedules extends Collection {
|
||||
}
|
||||
|
||||
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
|
||||
return this.add(new Schedule({
|
||||
return this.add(
|
||||
new Schedule({
|
||||
userId,
|
||||
job,
|
||||
cron,
|
||||
enabled,
|
||||
name,
|
||||
timezone,
|
||||
}))
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async save (schedule) {
|
||||
@ -29,7 +31,7 @@ export class Schedules extends Collection {
|
||||
async get (properties) {
|
||||
const schedules = await super.get(properties)
|
||||
forEach(schedules, schedule => {
|
||||
schedule.enabled = (schedule.enabled === 'true')
|
||||
schedule.enabled = schedule.enabled === 'true'
|
||||
})
|
||||
return schedules
|
||||
}
|
||||
|
@ -39,10 +39,8 @@ export class Users extends Collection {
|
||||
async save (user) {
|
||||
// Serializes.
|
||||
let tmp
|
||||
user.groups = isEmpty(tmp = user.groups)
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
user.preferences = isEmpty(tmp = user.preferences)
|
||||
user.groups = isEmpty((tmp = user.groups)) ? undefined : JSON.stringify(tmp)
|
||||
user.preferences = isEmpty((tmp = user.preferences))
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
|
||||
|
@ -10,33 +10,46 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
|
||||
let closed = false
|
||||
|
||||
const socket = connect({
|
||||
const socket = connect(
|
||||
{
|
||||
host: url.host,
|
||||
port: url.port || 443,
|
||||
rejectUnauthorized: false,
|
||||
}, () => {
|
||||
},
|
||||
() => {
|
||||
// Write headers.
|
||||
socket.write([
|
||||
socket.write(
|
||||
[
|
||||
`CONNECT ${url.path} HTTP/1.0`,
|
||||
`Host: ${url.hostname}`,
|
||||
`Cookie: session_id=${sessionId}`,
|
||||
'', '',
|
||||
].join('\r\n'))
|
||||
'',
|
||||
'',
|
||||
].join('\r\n')
|
||||
)
|
||||
|
||||
const onSend = (error) => {
|
||||
const onSend = error => {
|
||||
if (error) {
|
||||
debug('error sending to the XO client: %s', error.stack || error.message || error)
|
||||
debug(
|
||||
'error sending to the XO client: %s',
|
||||
error.stack || error.message || error
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
socket.pipe(partialStream('\r\n\r\n', headers => {
|
||||
socket
|
||||
.pipe(
|
||||
partialStream('\r\n\r\n', headers => {
|
||||
// TODO: check status code 200.
|
||||
debug('connected')
|
||||
})).on('data', data => {
|
||||
})
|
||||
)
|
||||
.on('data', data => {
|
||||
if (!closed) {
|
||||
ws.send(data, onSend)
|
||||
}
|
||||
}).on('end', () => {
|
||||
})
|
||||
.on('end', () => {
|
||||
if (!closed) {
|
||||
closed = true
|
||||
debug('disconnected from the console')
|
||||
@ -48,7 +61,10 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
ws
|
||||
.on('error', error => {
|
||||
closed = true
|
||||
debug('error from the XO client: %s', error.stack || error.message || error)
|
||||
debug(
|
||||
'error from the XO client: %s',
|
||||
error.stack || error.message || error
|
||||
)
|
||||
|
||||
socket.end()
|
||||
})
|
||||
@ -65,7 +81,8 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
|
||||
socket.end()
|
||||
})
|
||||
}).on('error', error => {
|
||||
}
|
||||
).on('error', error => {
|
||||
closed = true
|
||||
debug('error from the console: %s', error.stack || error.message || error)
|
||||
|
||||
|
@ -5,11 +5,7 @@ import Xo from './xo'
|
||||
import { generateToken } from './utils'
|
||||
|
||||
const recoverAccount = async ([name]) => {
|
||||
if (
|
||||
name === undefined ||
|
||||
name === '--help' ||
|
||||
name === '-h'
|
||||
) {
|
||||
if (name === undefined || name === '--help' || name === '-h') {
|
||||
return `
|
||||
xo-server-recover-account <user name or email>
|
||||
|
||||
@ -28,9 +24,11 @@ xo-server-recover-account <user name or email>
|
||||
console.log('The generated password is', password)
|
||||
}
|
||||
|
||||
const xo = new Xo(await appConf.load('xo-server', {
|
||||
const xo = new Xo(
|
||||
await appConf.load('xo-server', {
|
||||
ignoreUnknownFormats: true,
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
const user = await xo.getUserByName(name, true)
|
||||
if (user !== null) {
|
||||
|
@ -66,7 +66,7 @@ export default class RemoteHandlerAbstract {
|
||||
error: error.message || String(error),
|
||||
}
|
||||
} finally {
|
||||
this.unlink(testFileName)::ignoreErrors()
|
||||
;this.unlink(testFileName)::ignoreErrors()
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,11 +108,10 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
createReadStream (file, {
|
||||
checksum = false,
|
||||
ignoreMissingChecksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
createReadStream (
|
||||
file,
|
||||
{ checksum = false, ignoreMissingChecksum = false, ...options } = {}
|
||||
) {
|
||||
const streamP = this._createReadStream(file, options).then(stream => {
|
||||
// detect early errors
|
||||
let promise = eventToPromise(stream, 'readable')
|
||||
@ -125,9 +124,11 @@ export default class RemoteHandlerAbstract {
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
this.getSize(file).then(size => {
|
||||
this.getSize(file)
|
||||
.then(size => {
|
||||
stream.length = size
|
||||
})::ignoreErrors(),
|
||||
})
|
||||
::ignoreErrors(),
|
||||
])
|
||||
}
|
||||
|
||||
@ -139,10 +140,11 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
// avoid a unhandled rejection warning
|
||||
streamP::ignoreErrors()
|
||||
;streamP::ignoreErrors()
|
||||
|
||||
return this.readFile(`${file}.checksum`).then(
|
||||
checksum => streamP.then(stream => {
|
||||
checksum =>
|
||||
streamP.then(stream => {
|
||||
const { length } = stream
|
||||
stream = validChecksumOfReadStream(stream, String(checksum).trim())
|
||||
stream.length = length
|
||||
@ -169,10 +171,7 @@ export default class RemoteHandlerAbstract {
|
||||
await this.outputFile(`${path}.checksum`, checksum)
|
||||
}
|
||||
|
||||
async createOutputStream (file, {
|
||||
checksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
async createOutputStream (file, { checksum = false, ...options } = {}) {
|
||||
const streamP = this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
@ -201,11 +200,9 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async unlink (file, {
|
||||
checksum = true,
|
||||
} = {}) {
|
||||
async unlink (file, { checksum = true } = {}) {
|
||||
if (checksum) {
|
||||
this._unlink(`${file}.checksum`)::ignoreErrors()
|
||||
;this._unlink(`${file}.checksum`)::ignoreErrors()
|
||||
}
|
||||
|
||||
return this._unlink(file)
|
||||
|
@ -17,7 +17,14 @@ export default class NfsHandler extends LocalHandler {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
|
||||
stdout = await execa.stdout('findmnt', [
|
||||
'-P',
|
||||
'-t',
|
||||
'nfs,nfs4',
|
||||
'--output',
|
||||
'SOURCE,TARGET',
|
||||
'--noheadings',
|
||||
])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
@ -45,7 +52,14 @@ export default class NfsHandler extends LocalHandler {
|
||||
|
||||
async _mount () {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
|
||||
return execa('mount', [
|
||||
'-t',
|
||||
'nfs',
|
||||
'-o',
|
||||
'vers=3',
|
||||
`${this._remote.host}:${this._remote.path}`,
|
||||
this._getRealPath(),
|
||||
])
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
|
@ -1,19 +1,14 @@
|
||||
import Smb2 from '@marsaud/smb2-promise'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import {
|
||||
noop,
|
||||
pFinally,
|
||||
} from '../utils'
|
||||
import { noop, pFinally } from '../utils'
|
||||
|
||||
// Normalize the error code for file not found.
|
||||
const normalizeError = error => {
|
||||
const { code } = error
|
||||
|
||||
return (
|
||||
code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
return code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
)
|
||||
? Object.create(error, {
|
||||
code: {
|
||||
configurable: true,
|
||||
@ -50,9 +45,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
file = undefined
|
||||
}
|
||||
|
||||
let path = (this._remote.path !== '')
|
||||
? this._remote.path
|
||||
: ''
|
||||
let path = this._remote.path !== '' ? this._remote.path : ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
@ -94,7 +87,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
|
||||
return client.writeFile(path, data, options)::pFinally(() => { client.close() })
|
||||
return client.writeFile(path, data, options)::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
}
|
||||
|
||||
async _readFile (file, options = {}) {
|
||||
@ -102,7 +97,11 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
let content
|
||||
|
||||
try {
|
||||
content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() })
|
||||
content = await client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
@ -114,7 +113,11 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
const client = this._getClient(this._remote)
|
||||
|
||||
try {
|
||||
await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() })
|
||||
await client
|
||||
.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
@ -125,7 +128,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
let list
|
||||
|
||||
try {
|
||||
list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() })
|
||||
list = await client.readdir(this._getFilePath(dir))::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
@ -170,7 +175,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
const client = this._getClient(this._remote)
|
||||
|
||||
try {
|
||||
await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() })
|
||||
await client.unlink(this._getFilePath(file))::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
@ -181,7 +188,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
let size
|
||||
|
||||
try {
|
||||
size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() })
|
||||
size = await client.getSize(this._getFilePath(file))::pFinally(() => {
|
||||
client.close()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
@ -19,10 +19,5 @@ export default {
|
||||
description: 'user (or group)',
|
||||
},
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'action',
|
||||
'object',
|
||||
'subject',
|
||||
],
|
||||
required: ['id', 'action', 'object', 'subject'],
|
||||
}
|
||||
|
@ -15,7 +15,8 @@ export default {
|
||||
},
|
||||
userId: {
|
||||
type: 'string',
|
||||
description: 'identifier of the user who have created the job (the permissions of the user are used by the job)',
|
||||
description:
|
||||
'identifier of the user who have created the job (the permissions of the user are used by the job)',
|
||||
},
|
||||
key: {
|
||||
type: 'string',
|
||||
@ -30,14 +31,9 @@ export default {
|
||||
},
|
||||
timeout: {
|
||||
type: 'number',
|
||||
description: 'number of milliseconds after which the job is considered failed',
|
||||
description:
|
||||
'number of milliseconds after which the job is considered failed',
|
||||
},
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'id',
|
||||
'userId',
|
||||
'key',
|
||||
'method',
|
||||
],
|
||||
required: ['type', 'id', 'userId', 'key', 'method'],
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user