chore: change print width to 120 chars

This commit is contained in:
Julien Fontanet 2020-11-24 10:50:40 +01:00
parent fdf52a3d59
commit 7a8ca2f068
497 changed files with 5504 additions and 17434 deletions

View File

@ -3,4 +3,9 @@ module.exports = {
jsxSingleQuote: true,
semi: false,
singleQuote: true,
// 2020-11-24: Requested by nraynaud and approved by the rest of the team
//
// https://team.vates.fr/vates/pl/a1i8af1b9id7pgzm3jcg4toacy
printWidth: 120,
}

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -119,9 +119,7 @@ export class AuditCore {
if (record === undefined) {
throw new MissingRecordError(newest, nValid)
}
if (
newest !== createHash(record, newest.slice(1, newest.indexOf('$', 1)))
) {
if (newest !== createHash(record, newest.slice(1, newest.indexOf('$', 1)))) {
throw new AlteredRecordError(newest, nValid, record)
}
newest = record.previousId

View File

@ -1,12 +1,6 @@
/* eslint-env jest */
import {
AlteredRecordError,
AuditCore,
MissingRecordError,
NULL_ID,
Storage,
} from '.'
import { AlteredRecordError, AuditCore, MissingRecordError, NULL_ID, Storage } from '.'
const asyncIteratorToArray = async asyncIterator => {
const array = []
@ -88,16 +82,13 @@ describe('auditCore', () => {
it('detects that a record is missing', async () => {
const [newestRecord, deletedRecord] = await storeAuditRecords()
const nValidRecords = await auditCore.checkIntegrity(
NULL_ID,
newestRecord.id
)
const nValidRecords = await auditCore.checkIntegrity(NULL_ID, newestRecord.id)
expect(nValidRecords).toBe(DATA.length)
await db.del(deletedRecord.id)
await expect(
auditCore.checkIntegrity(NULL_ID, newestRecord.id)
).rejects.toEqual(new MissingRecordError(deletedRecord.id, 1))
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
new MissingRecordError(deletedRecord.id, 1)
)
})
it('detects that a record has been altered', async () => {
@ -106,9 +97,7 @@ describe('auditCore', () => {
alteredRecord.event = ''
await db.put(alteredRecord)
await expect(
auditCore.checkIntegrity(NULL_ID, newestRecord.id)
).rejects.toEqual(
await expect(auditCore.checkIntegrity(NULL_ID, newestRecord.id)).rejects.toEqual(
new AlteredRecordError(alteredRecord.id, 1, alteredRecord)
)
})

View File

@ -38,18 +38,11 @@ const configs = {
const getConfig = (key, ...args) => {
const config = configs[key]
return config === undefined
? {}
: typeof config === 'function'
? config(...args)
: config
return config === undefined ? {} : typeof config === 'function' ? config(...args) : config
}
// some plugins must be used in a specific order
const pluginsOrder = [
'@babel/plugin-proposal-decorators',
'@babel/plugin-proposal-class-properties',
]
const pluginsOrder = ['@babel/plugin-proposal-decorators', '@babel/plugin-proposal-class-properties']
module.exports = function (pkg, plugins, presets) {
plugins === undefined && (plugins = {})

View File

@ -1,7 +1,5 @@
const curryRight = require('lodash/curryRight')
module.exports = curryRight((iterable, fn) =>
Promise.all(
Array.isArray(iterable) ? iterable.map(fn) : Array.from(iterable, fn)
)
Promise.all(Array.isArray(iterable) ? iterable.map(fn) : Array.from(iterable, fn))
)

View File

@ -115,9 +115,7 @@ async function handleVm(vmDir) {
const parent = resolve(dirname(path), vhd.header.parentUnicodeName)
vhdParents[path] = parent
if (parent in vhdChildren) {
const error = new Error(
'this script does not support multiple VHD children'
)
const error = new Error('this script does not support multiple VHD children')
error.parent = parent
error.child1 = vhdChildren[parent]
error.child2 = path
@ -224,11 +222,7 @@ async function handleVm(vmDir) {
} else {
console.warn('Error while checking backup', json)
const missingVhds = linkedVhds.filter(_ => !vhds.has(_))
console.warn(
' %i/%i missing VHDs',
missingVhds.length,
linkedVhds.length
)
console.warn(' %i/%i missing VHDs', missingVhds.length, linkedVhds.length)
missingVhds.forEach(vhd => {
console.warn(' ', vhd)
})

View File

@ -10,9 +10,7 @@ const sum = values => values.reduce((a, b) => a + b)
module.exports = async function info(vmDirs) {
const jsonFiles = (
await asyncMap(vmDirs, async vmDir =>
(await readdir2(vmDir)).filter(_ => _.endsWith('.json'))
)
await asyncMap(vmDirs, async vmDir => (await readdir2(vmDir)).filter(_ => _.endsWith('.json')))
).flat()
const hashes = { __proto__: null }
@ -39,9 +37,7 @@ module.exports = async function info(vmDirs) {
size:
json.length +
(await (metadata.mode === 'delta'
? asyncMap(Object.values(metadata.vhds), _ =>
getSize(resolve(jsonDir, _))
).then(sum)
? asyncMap(Object.values(metadata.vhds), _ => getSize(resolve(jsonDir, _))).then(sum)
: getSize(resolve(jsonDir, metadata.xva)))),
}
} catch (error) {

View File

@ -1,7 +1,3 @@
// returns all entries but the last retention-th
exports.getOldEntries = (retention, entries) =>
entries === undefined
? []
: retention > 0
? entries.slice(0, -retention)
: entries
entries === undefined ? [] : retention > 0 ? entries.slice(0, -retention) : entries

View File

@ -4,10 +4,7 @@ const fs = require('fs-extra')
const isGzipFile = async fd => {
// https://tools.ietf.org/html/rfc1952.html#page-5
const magicNumber = Buffer.allocUnsafe(2)
assert.strictEqual(
(await fs.read(fd, magicNumber, 0, magicNumber.length, 0)).bytesRead,
magicNumber.length
)
assert.strictEqual((await fs.read(fd, magicNumber, 0, magicNumber.length, 0)).bytesRead, magicNumber.length)
return magicNumber[0] === 31 && magicNumber[1] === 139
}
@ -30,10 +27,7 @@ const isValidTar = async (size, fd) => {
}
const buf = Buffer.allocUnsafe(1024)
assert.strictEqual(
(await fs.read(fd, buf, 0, buf.length, size - buf.length)).bytesRead,
buf.length
)
assert.strictEqual((await fs.read(fd, buf, 0, buf.length, size - buf.length)).bytesRead, buf.length)
return buf.every(_ => _ === 0)
}

View File

@ -32,14 +32,7 @@ ${cliName} v${pkg.version}
)
}
const [
srcXapiUrl,
srcSnapshotUuid,
tgtXapiUrl,
tgtVmUuid,
jobId,
scheduleId,
] = args
const [srcXapiUrl, srcSnapshotUuid, tgtXapiUrl, tgtVmUuid, jobId, scheduleId] = args
const srcXapi = new Xapi({
allowUnauthorized: true,
@ -70,16 +63,10 @@ ${cliName} v${pkg.version}
'xo:backup:vm': srcVm.uuid,
}
const [srcDisks, tgtDisks] = await Promise.all([
srcXapi.getVmDisks(srcSnapshot),
tgtXapi.getVmDisks(tgtVm),
])
const [srcDisks, tgtDisks] = await Promise.all([srcXapi.getVmDisks(srcSnapshot), tgtXapi.getVmDisks(tgtVm)])
const userDevices = Object.keys(tgtDisks)
const tgtSr = await tgtXapi.getRecord(
'SR',
tgtDisks[Object.keys(tgtDisks)[0]].SR
)
const tgtSr = await tgtXapi.getRecord('SR', tgtDisks[Object.keys(tgtDisks)[0]].SR)
await Promise.all([
srcSnapshot.update_other_config(metadata),
@ -90,10 +77,7 @@ ${cliName} v${pkg.version}
'xo:backup:sr': tgtSr.uuid,
'xo:copy_of': srcSnapshotUuid,
}),
tgtVm.update_blocked_operations(
'start',
'Start operation for this vm is blocked, clone it if you want to use it.'
),
tgtVm.update_blocked_operations('start', 'Start operation for this vm is blocked, clone it if you want to use it.'),
Promise.all(
userDevices.map(userDevice => {
const srcDisk = srcDisks[userDevice]

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -42,10 +42,7 @@ class Job {
const now = schedule._createDate()
scheduledDate = +next(schedule._schedule, now)
const delay = scheduledDate - now
this._timeout =
delay < MAX_DELAY
? setTimeout(wrapper, delay)
: setTimeout(scheduleNext, MAX_DELAY)
this._timeout = delay < MAX_DELAY ? setTimeout(wrapper, delay) : setTimeout(scheduleNext, MAX_DELAY)
}
}
@ -73,12 +70,7 @@ class Job {
class Schedule {
constructor(pattern, zone = 'utc') {
this._schedule = parse(pattern)
this._createDate =
zone.toLowerCase() === 'utc'
? moment.utc
: zone === 'local'
? moment
: () => moment.tz(zone)
this._createDate = zone.toLowerCase() === 'utc' ? moment.utc : zone === 'local' ? moment : () => moment.tz(zone)
}
createJob(fn) {

View File

@ -37,9 +37,7 @@ describe('next()', () => {
})
it('fails when no solutions has been found', () => {
expect(() => N('0 0 30 feb *')).toThrow(
'no solutions found for this schedule'
)
expect(() => N('0 0 30 feb *')).toThrow('no solutions found for this schedule')
})
it('select the first sunday of the month', () => {

View File

@ -66,9 +66,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
aliasesRegExp.lastIndex = i
const matches = aliasesRegExp.exec(pattern)
if (matches === null) {
throw new SyntaxError(
`${field.name}: missing alias or integer at character ${i}`
)
throw new SyntaxError(`${field.name}: missing alias or integer at character ${i}`)
}
const [alias] = matches
i += alias.length
@ -77,9 +75,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
const { range } = field
if (value < range[0] || value > range[1]) {
throw new SyntaxError(
`${field.name}: ${value} is not between ${range[0]} and ${range[1]}`
)
throw new SyntaxError(`${field.name}: ${value} is not between ${range[0]} and ${range[1]}`)
}
return value
}
@ -117,9 +113,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
{
const schedule = presets[p]
if (schedule !== undefined) {
return typeof schedule === 'string'
? (presets[p] = parse(schedule))
: schedule
return typeof schedule === 'string' ? (presets[p] = parse(schedule)) : schedule
}
}
@ -142,9 +136,7 @@ const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
consumeWhitespaces()
if (i !== n) {
throw new SyntaxError(
`unexpected character at offset ${i}, expected end`
)
throw new SyntaxError(`unexpected character at offset ${i}, expected end`)
}
return schedule

View File

@ -33,9 +33,7 @@ describe('parse()', () => {
})
it('reports invalid aliases', () => {
expect(() => parse('* * * jan-foo *')).toThrow(
'month: missing alias or integer at character 10'
)
expect(() => parse('* * * jan-foo *')).toThrow('month: missing alias or integer at character 10')
})
it('dayOfWeek: 0 and 7 bind to sunday', () => {

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -60,5 +60,4 @@ export const get = (accessor: (input: ?any) => any, arg: ?any) => {
// _ => new ProxyAgent(_)
// )
// ```
export const ifDef = (value: ?any, thenFn: (value: any) => any) =>
value !== undefined ? thenFn(value) : value
export const ifDef = (value: ?any, thenFn: (value: any) => any) => (value !== undefined ? thenFn(value) : value)

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -6,33 +6,19 @@ import { tmpdir } from 'os'
import LocalHandler from './local'
const sudoExeca = (command, args, opts) =>
execa('sudo', [command, ...args], opts)
const sudoExeca = (command, args, opts) => execa('sudo', [command, ...args], opts)
export default class MountHandler extends LocalHandler {
constructor(
remote,
{
mountsDir = join(tmpdir(), 'xo-fs-mounts'),
useSudo = false,
...opts
} = {},
params
) {
constructor(remote, { mountsDir = join(tmpdir(), 'xo-fs-mounts'), useSudo = false, ...opts } = {}, params) {
super(remote, opts)
this._execa = useSudo ? sudoExeca : execa
this._keeper = undefined
this._params = {
...params,
options: [params.options, remote.options ?? params.defaultOptions]
.filter(_ => _ !== undefined)
.join(','),
options: [params.options, remote.options ?? params.defaultOptions].filter(_ => _ !== undefined).join(','),
}
this._realPath = join(
mountsDir,
remote.id || Math.random().toString(36).slice(2)
)
this._realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
}
async _forget() {
@ -75,16 +61,12 @@ export default class MountHandler extends LocalHandler {
// Linux mount is more flexible in which order the mount arguments appear.
// But FreeBSD requires this order of the arguments.
await this._execa(
'mount',
['-o', options, '-t', type, device, realPath],
{
env: {
LANG: 'C',
...env,
},
}
)
await this._execa('mount', ['-o', options, '-t', type, device, realPath], {
env: {
LANG: 'C',
...env,
},
})
} catch (error) {
try {
// the failure may mean it's already mounted, use `findmnt` to check
@ -99,9 +81,7 @@ export default class MountHandler extends LocalHandler {
// keep an open file on the mount to prevent it from being unmounted if used
// by another handler/process
const keeperPath = `${realPath}/.keeper_${Math.random()
.toString(36)
.slice(2)}`
const keeperPath = `${realPath}/.keeper_${Math.random().toString(36).slice(2)}`
this._keeper = await fs.open(keeperPath, 'w')
ignoreErrors.call(fs.unlink(keeperPath))
}

View File

@ -86,9 +86,7 @@ export default class RemoteHandlerAbstract {
}
;({ timeout: this._timeout = DEFAULT_TIMEOUT } = options)
const sharedLimit = limit(
options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS
)
const sharedLimit = limit(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS)
this.closeFile = sharedLimit(this.closeFile)
this.getInfo = sharedLimit(this.getInfo)
this.getSize = sharedLimit(this.getSize)
@ -122,10 +120,7 @@ export default class RemoteHandlerAbstract {
}
// TODO: remove method
async createOutputStream(
file: File,
{ checksum = false, ...options }: Object = {}
): Promise<LaxWritable> {
async createOutputStream(file: File, { checksum = false, ...options }: Object = {}): Promise<LaxWritable> {
if (typeof file === 'string') {
file = normalizePath(file)
}
@ -153,9 +148,7 @@ export default class RemoteHandlerAbstract {
// $FlowFixMe
checksumStream.checksumWritten = checksumStream.checksum
.then(value =>
this._outputFile(checksumFile(path), value, { flags: 'wx' })
)
.then(value => this._outputFile(checksumFile(path), value, { flags: 'wx' }))
.catch(forwardError)
return checksumStream
@ -169,30 +162,24 @@ export default class RemoteHandlerAbstract {
file = normalizePath(file)
}
const path = typeof file === 'string' ? file : file.path
const streamP = timeout
.call(this._createReadStream(file, options), this._timeout)
.then(stream => {
// detect early errors
let promise = fromEvent(stream, 'readable')
const streamP = timeout.call(this._createReadStream(file, options), this._timeout).then(stream => {
// detect early errors
let promise = fromEvent(stream, 'readable')
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([
promise,
ignoreErrors.call(
this._getSize(file).then(size => {
stream.length = size
})
),
])
}
// try to add the length prop if missing and not a range stream
if (stream.length === undefined && options.end === undefined && options.start === undefined) {
promise = Promise.all([
promise,
ignoreErrors.call(
this._getSize(file).then(size => {
stream.length = size
})
),
])
}
return promise.then(() => stream)
})
return promise.then(() => stream)
})
if (!checksum) {
return streamP
@ -205,10 +192,7 @@ export default class RemoteHandlerAbstract {
checksum =>
streamP.then(stream => {
const { length } = stream
stream = (validChecksumOfReadStream(
stream,
String(checksum).trim()
): LaxReadable)
stream = (validChecksumOfReadStream(stream, String(checksum).trim()): LaxReadable)
stream.length = length
return stream
@ -249,18 +233,12 @@ export default class RemoteHandlerAbstract {
}
async getSize(file: File): Promise<number> {
return timeout.call(
this._getSize(typeof file === 'string' ? normalizePath(file) : file),
this._timeout
)
return timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout)
}
async list(
dir: string,
{
filter,
prependDir = false,
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
{ filter, prependDir = false }: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
): Promise<string[]> {
const virtualDir = normalizePath(dir)
dir = normalizePath(dir)
@ -291,56 +269,31 @@ export default class RemoteHandlerAbstract {
return this.__openFile(path, flags)
}
async outputFile(
file: string,
data: Data,
{ flags = 'wx' }: { flags?: string } = {}
): Promise<void> {
async outputFile(file: string, data: Data, { flags = 'wx' }: { flags?: string } = {}): Promise<void> {
await this._outputFile(normalizePath(file), data, { flags })
}
async read(
file: File,
buffer: Buffer,
position?: number
): Promise<{| bytesRead: number, buffer: Buffer |}> {
return this._read(
typeof file === 'string' ? normalizePath(file) : file,
buffer,
position
)
async read(file: File, buffer: Buffer, position?: number): Promise<{| bytesRead: number, buffer: Buffer |}> {
return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async readFile(
file: string,
{ flags = 'r' }: { flags?: string } = {}
): Promise<Buffer> {
async readFile(file: string, { flags = 'r' }: { flags?: string } = {}): Promise<Buffer> {
return this._readFile(normalizePath(file), { flags })
}
async rename(
oldPath: string,
newPath: string,
{ checksum = false }: Object = {}
) {
async rename(oldPath: string, newPath: string, { checksum = false }: Object = {}) {
oldPath = normalizePath(oldPath)
newPath = normalizePath(newPath)
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
if (checksum) {
p = Promise.all([
p,
this._rename(checksumFile(oldPath), checksumFile(newPath)),
])
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
}
return p
}
async rmdir(dir: string): Promise<void> {
await timeout.call(
this._rmdir(normalizePath(dir)).catch(ignoreEnoent),
this._timeout
)
await timeout.call(this._rmdir(normalizePath(dir)).catch(ignoreEnoent), this._timeout)
}
async rmtree(dir: string): Promise<void> {
@ -405,23 +358,11 @@ export default class RemoteHandlerAbstract {
await this._unlink(file).catch(ignoreEnoent)
}
async write(
file: File,
buffer: Buffer,
position: number
): Promise<{| bytesWritten: number, buffer: Buffer |}> {
await this._write(
typeof file === 'string' ? normalizePath(file) : file,
buffer,
position
)
async write(file: File, buffer: Buffer, position: number): Promise<{| bytesWritten: number, buffer: Buffer |}> {
await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async writeFile(
file: string,
data: Data,
{ flags = 'wx' }: { flags?: string } = {}
): Promise<void> {
async writeFile(file: string, data: Data, { flags = 'wx' }: { flags?: string } = {}): Promise<void> {
await this._writeFile(normalizePath(file), data, { flags })
}
@ -516,11 +457,7 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async _outputFile(
file: string,
data: Data,
options: { flags?: string }
): Promise<void> {
async _outputFile(file: string, data: Data, options: { flags?: string }): Promise<void> {
try {
return await this._writeFile(file, data, options)
} catch (error) {
@ -549,11 +486,7 @@ export default class RemoteHandlerAbstract {
}
}
_read(
file: File,
buffer: Buffer,
position?: number
): Promise<{| bytesRead: number, buffer: Buffer |}> {
_read(file: File, buffer: Buffer, position?: number): Promise<{| bytesRead: number, buffer: Buffer |}> {
throw new Error('Not implemented')
}
@ -611,19 +544,11 @@ export default class RemoteHandlerAbstract {
}
}
async _writeFd(
fd: FileDescriptor,
buffer: Buffer,
position: number
): Promise<void> {
async _writeFd(fd: FileDescriptor, buffer: Buffer, position: number): Promise<void> {
throw new Error('Not implemented')
}
async _writeFile(
file: string,
data: Data,
options: { flags?: string }
): Promise<void> {
async _writeFile(file: string, data: Data, options: { flags?: string }): Promise<void> {
throw new Error('Not implemented')
}
}
@ -643,8 +568,7 @@ function createPrefixWrapperMethods() {
if (
hasOwnProperty.call(pPw, name) ||
name[0] === '_' ||
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name))
.value) !== 'function'
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name)).value) !== 'function'
) {
return
}

View File

@ -27,9 +27,7 @@ const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// const checksumStream = source.pipe(createChecksumStream())
// checksumStream.resume() // make the data flow without an output
// console.log(await checksumStream.checksum)
export const createChecksumStream = (
algorithm: string = 'md5'
): Transform & { checksum: Promise<string> } => {
export const createChecksumStream = (algorithm: string = 'md5'): Transform & { checksum: Promise<string> } => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
@ -60,10 +58,7 @@ export const validChecksumOfReadStream = (
stream: Readable,
expectedChecksum: string
): Readable & { checksumVerified: Promise<void> } => {
const algorithmId = expectedChecksum.slice(
1,
expectedChecksum.indexOf('$', 1)
)
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
@ -82,11 +77,7 @@ export const validChecksumOfReadStream = (
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
)
: null
checksum !== expectedChecksum ? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`) : null
)
}
)

View File

@ -126,16 +126,12 @@ handlers.forEach(url => {
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
expect(await handler.list('dir', { prependDir: true })).toEqual([
'/dir/file',
])
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
expect(await handler.list('dir', { prependDir: true })).toEqual([
'/dir/file',
])
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
})
@ -308,10 +304,7 @@ handlers.forEach(url => {
return { offset, expected }
})(),
'increase file size': (() => {
const offset = random(
TEST_DATA_LEN - PATCH_DATA_LEN + 1,
TEST_DATA_LEN
)
const offset = random(TEST_DATA_LEN - PATCH_DATA_LEN + 1, TEST_DATA_LEN)
const expected = Buffer.alloc(offset + PATCH_DATA_LEN)
TEST_DATA.copy(expected)

View File

@ -63,9 +63,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _getSize(file) {
const stats = await fs.stat(
this._getFilePath(typeof file === 'string' ? file : file.path)
)
const stats = await fs.stat(this._getFilePath(typeof file === 'string' ? file : file.path))
return stats.size
}
@ -85,13 +83,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
const needsClose = typeof file === 'string'
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
try {
return await fs.read(
file,
buffer,
0,
buffer.length,
position === undefined ? null : position
)
return await fs.read(file, buffer, 0, buffer.length, position === undefined ? null : position)
} finally {
if (needsClose) {
await fs.close(file)

View File

@ -70,9 +70,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _writeFile(file, data, options) {
return this._s3
.putObject({ ...this._createParams(file), Body: data })
.promise()
return this._s3.putObject({ ...this._createParams(file), Body: data }).promise()
}
async _createReadStream(file, options) {
@ -138,19 +136,13 @@ export default class S3Handler extends RemoteHandlerAbstract {
file = file.fd
}
const uploadParams = this._createParams(file)
const fileSize = +(await this._s3.headObject(uploadParams).promise())
.ContentLength
const fileSize = +(await this._s3.headObject(uploadParams).promise()).ContentLength
if (fileSize < MIN_PART_SIZE) {
const resultBuffer = Buffer.alloc(
Math.max(fileSize, position + buffer.length)
)
const fileContent = (await this._s3.getObject(uploadParams).promise())
.Body
const resultBuffer = Buffer.alloc(Math.max(fileSize, position + buffer.length))
const fileContent = (await this._s3.getObject(uploadParams).promise()).Body
fileContent.copy(resultBuffer)
buffer.copy(resultBuffer, position)
await this._s3
.putObject({ ...uploadParams, Body: resultBuffer })
.promise()
await this._s3.putObject({ ...uploadParams, Body: resultBuffer }).promise()
return { buffer, bytesWritten: buffer.length }
} else {
// using this trick: https://stackoverflow.com/a/38089437/72637
@ -160,9 +152,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
// otherwise otherwise it will be downloaded, concatenated to `edit`
// `edit` will always be an upload part
// `suffix` will ways be sourced from uploadPartCopy()
const multipartParams = await this._s3
.createMultipartUpload(uploadParams)
.promise()
const multipartParams = await this._s3.createMultipartUpload(uploadParams).promise()
try {
const parts = []
const prefixSize = position
@ -178,29 +168,22 @@ export default class S3Handler extends RemoteHandlerAbstract {
Range: `bytes=0-${prefixSize - 1}`,
}
const prefixBuffer =
prefixSize > 0
? (await this._s3.getObject(downloadParams).promise()).Body
: Buffer.alloc(0)
prefixSize > 0 ? (await this._s3.getObject(downloadParams).promise()).Body : Buffer.alloc(0)
editBuffer = Buffer.concat([prefixBuffer, buffer])
editBufferOffset = 0
} else {
const fragmentsCount = Math.ceil(prefixSize / MAX_PART_SIZE)
const prefixFragmentSize = Math.ceil(prefixSize / fragmentsCount)
const lastFragmentSize =
prefixFragmentSize * fragmentsCount - prefixSize
const lastFragmentSize = prefixFragmentSize * fragmentsCount - prefixSize
let prefixPosition = 0
for (let i = 0; i < fragmentsCount; i++) {
const copyPrefixParams = {
...multipartParams,
PartNumber: partNumber++,
CopySource: `/${this._bucket}/${this._dir + file}`,
CopySourceRange: `bytes=${prefixPosition}-${
prefixPosition + prefixFragmentSize - 1
}`,
CopySourceRange: `bytes=${prefixPosition}-${prefixPosition + prefixFragmentSize - 1}`,
}
const prefixPart = (
await this._s3.uploadPartCopy(copyPrefixParams).promise()
).CopyPartResult
const prefixPart = (await this._s3.uploadPartCopy(copyPrefixParams).promise()).CopyPartResult
parts.push({
ETag: prefixPart.ETag,
PartNumber: copyPrefixParams.PartNumber,
@ -214,21 +197,14 @@ export default class S3Handler extends RemoteHandlerAbstract {
// the edit fragment is too short and is not the last fragment
// let's steal from the suffix fragment to reach the minimum size
// the suffix might be too short and itself entirely absorbed in the edit fragment, making it the last one.
const complementSize = Math.min(
MIN_PART_SIZE - editBuffer.length,
suffixSize
)
const complementSize = Math.min(MIN_PART_SIZE - editBuffer.length, suffixSize)
const complementOffset = editBufferOffset + editBuffer.length
suffixOffset += complementSize
suffixSize -= complementSize
hasSuffix = suffixSize > 0
const prefixRange = `bytes=${complementOffset}-${
complementOffset + complementSize - 1
}`
const prefixRange = `bytes=${complementOffset}-${complementOffset + complementSize - 1}`
const downloadParams = { ...uploadParams, Range: prefixRange }
const complementBuffer = (
await this._s3.getObject(downloadParams).promise()
).Body
const complementBuffer = (await this._s3.getObject(downloadParams).promise()).Body
editBuffer = Buffer.concat([editBuffer, complementBuffer])
}
const editParams = {
@ -244,18 +220,14 @@ export default class S3Handler extends RemoteHandlerAbstract {
let suffixFragmentOffset = suffixOffset
for (let i = 0; i < suffixFragments; i++) {
const fragmentEnd = suffixFragmentOffset + suffixFragmentsSize
const suffixRange = `bytes=${suffixFragmentOffset}-${
Math.min(fileSize, fragmentEnd) - 1
}`
const suffixRange = `bytes=${suffixFragmentOffset}-${Math.min(fileSize, fragmentEnd) - 1}`
const copySuffixParams = {
...multipartParams,
PartNumber: partNumber++,
CopySource: `/${this._bucket}/${this._dir + file}`,
CopySourceRange: suffixRange,
}
const suffixPart = (
await this._s3.uploadPartCopy(copySuffixParams).promise()
).CopyPartResult
const suffixPart = (await this._s3.uploadPartCopy(copySuffixParams).promise()).CopyPartResult
parts.push({
ETag: suffixPart.ETag,
PartNumber: copySuffixParams.PartNumber,

View File

@ -5,9 +5,7 @@ import normalizePath from './_normalizePath'
export default class SmbMountHandler extends MountHandler {
constructor(remote, opts) {
const { domain = 'WORKGROUP', host, password, path, username } = parse(
remote.url
)
const { domain = 'WORKGROUP', host, password, path, username } = parse(remote.url)
super(remote, opts, {
type: 'cifs',
device: '//' + host + normalizePath(path),

View File

@ -17,8 +17,7 @@ const normalizeError = (error, shouldBeDirectory) => {
? wrapError(error, 'EISDIR')
: code === 'STATUS_NOT_A_DIRECTORY'
? wrapError(error, 'ENOTDIR')
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' || code === 'STATUS_OBJECT_PATH_NOT_FOUND'
? wrapError(error, 'ENOENT')
: code === 'STATUS_OBJECT_NAME_COLLISION'
? wrapError(error, 'EEXIST')
@ -44,12 +43,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
_getFilePath(file) {
return (
this._prefix +
(typeof file === 'string' ? file : file.path)
.slice(1)
.replace(/\//g, '\\')
)
return this._prefix + (typeof file === 'string' ? file : file.path).slice(1).replace(/\//g, '\\')
}
_dirname(file) {
@ -102,9 +96,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
// TODO: add flags
_openFile(path, flags) {
return this._client
.open(this._getFilePath(path), flags)
.catch(normalizeError)
return this._client.open(this._getFilePath(path), flags).catch(normalizeError)
}
async _read(file, buffer, position) {
@ -123,9 +115,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
_readFile(file, options) {
return this._client
.readFile(this._getFilePath(file), options)
.catch(normalizeError)
return this._client.readFile(this._getFilePath(file), options).catch(normalizeError)
}
_rename(oldPath, newPath) {
@ -156,9 +146,7 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
_truncate(file, len) {
return this._client
.truncate(this._getFilePath(file), len)
.catch(normalizeError)
return this._client.truncate(this._getFilePath(file), len).catch(normalizeError)
}
_unlink(file) {
@ -170,8 +158,6 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
_writeFile(file, data, options) {
return this._client
.writeFile(this._getFilePath(file), data, options)
.catch(normalizeError)
return this._client.writeFile(this._getFilePath(file), data, options).catch(normalizeError)
}
}

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -47,10 +47,7 @@ const createTransport = config => {
return transport
}
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
const symbol = typeof Symbol !== 'undefined' ? Symbol.for('@xen-orchestra/log') : '@@@xen-orchestra/log'
const { env } = process
global[symbol] = createTransport({

View File

@ -1,10 +1,7 @@
import createTransport from './transports/console'
import LEVELS, { resolve } from './levels'
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
const symbol = typeof Symbol !== 'undefined' ? Symbol.for('@xen-orchestra/log') : '@@@xen-orchestra/log'
if (!(symbol in global)) {
// the default behavior, without requiring `configure` is to avoid
// logging anything unless it's a real error
@ -64,9 +61,7 @@ prototype.wrap = function (message, fn) {
try {
const result = fn.apply(this, arguments)
const then = result != null && result.then
return typeof then === 'function'
? then.call(result, warnAndRethrow)
: result
return typeof then === 'function' ? then.call(result, warnAndRethrow) : result
} catch (error) {
warnAndRethrow(error)
}

View File

@ -3,12 +3,7 @@ import LEVELS, { NAMES } from '../levels'
const { DEBUG, ERROR, FATAL, INFO, WARN } = LEVELS
let formatLevel, formatNamespace
if (
process.stdout !== undefined &&
process.stdout.isTTY &&
process.stderr !== undefined &&
process.stderr.isTTY
) {
if (process.stdout !== undefined && process.stdout.isTTY && process.stderr !== undefined && process.stderr.isTTY) {
const ansi = (style, str) => `\x1b[${style}m${str}\x1b[0m`
const LEVEL_STYLES = {
@ -71,10 +66,7 @@ if (
// const g = f(3)
// const b = f(1)
// return ansi(`38;2;${r};${g};${b}`, namespace)
return ansi(
`1;38;5;${NAMESPACE_COLORS[Math.abs(hash) % NAMESPACE_COLORS.length]}`,
namespace
)
return ansi(`1;38;5;${NAMESPACE_COLORS[Math.abs(hash) % NAMESPACE_COLORS.length]}`, namespace)
}
} else {
formatLevel = str => NAMES[str]
@ -84,21 +76,10 @@ if (
const consoleTransport = ({ data, level, namespace, message, time }) => {
const fn =
/* eslint-disable no-console */
level < INFO
? console.log
: level < WARN
? console.info
: level < ERROR
? console.warn
: console.error
level < INFO ? console.log : level < WARN ? console.info : level < ERROR ? console.warn : console.error
/* eslint-enable no-console */
const args = [
time.toISOString(),
formatNamespace(namespace),
formatLevel(level),
message,
]
const args = [time.toISOString(), formatNamespace(namespace), formatLevel(level), message]
if (data != null) {
args.push(data)
}

View File

@ -54,11 +54,7 @@ export default ({
transporter.sendMail(
{
subject: evalTemplate(subject, key =>
key === 'level'
? NAMES[log.level]
: key === 'time'
? log.time.toISOString()
: log[key]
key === 'level' ? NAMES[log.level] : key === 'time' ? log.time.toISOString() : log[key]
),
text: prettyFormat(log.data),
},

View File

@ -4,16 +4,14 @@ import escapeRegExp from 'lodash/escapeRegExp'
const TPL_RE = /\{\{(.+?)\}\}/g
export const evalTemplate = (tpl, data) => {
const getData =
typeof data === 'function' ? (_, key) => data(key) : (_, key) => data[key]
const getData = typeof data === 'function' ? (_, key) => data(key) : (_, key) => data[key]
return tpl.replace(TPL_RE, getData)
}
// -------------------------------------------------------------------
const compileGlobPatternFragment = pattern =>
pattern.split('*').map(escapeRegExp).join('.*')
const compileGlobPatternFragment = pattern => pattern.split('*').map(escapeRegExp).join('.*')
export const compileGlobPattern = pattern => {
const no = []

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -20,9 +20,7 @@ const isIgnoredStaticProperty = name => name in IGNORED_STATIC_PROPERTIES
const ownKeys =
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
(({ getOwnPropertyNames: names, getOwnPropertySymbols: symbols }) =>
symbols !== undefined ? obj => names(obj).concat(symbols(obj)) : names)(
Object
)
symbols !== undefined ? obj => names(obj).concat(symbols(obj)) : names)(Object)
// -------------------------------------------------------------------
@ -50,10 +48,7 @@ const mixin = Mixins => Class => {
throw new Error(`${name}#${prop} is already defined`)
}
;(descriptors[prop] = getOwnPropertyDescriptor(
Mixin,
prop
)).enumerable = false // Object methods are enumerable but class methods are not.
;(descriptors[prop] = getOwnPropertyDescriptor(Mixin, prop)).enumerable = false // Object methods are enumerable but class methods are not.
}
})
defineProperties(prototype, descriptors)
@ -81,11 +76,7 @@ const mixin = Mixins => Class => {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = getBoundPropertyDescriptor(
prototype,
prop,
mixinInstance
)
descriptors[prop] = getBoundPropertyDescriptor(prototype, prop, mixinInstance)
}
defineProperties(instance, descriptors)
}
@ -101,8 +92,7 @@ const mixin = Mixins => Class => {
!(
isIgnoredStaticProperty(prop) &&
// if they already exist...
(descriptor = getOwnPropertyDescriptor(DecoratedClass, prop)) !==
undefined &&
(descriptor = getOwnPropertyDescriptor(DecoratedClass, prop)) !== undefined &&
// and are not configurable.
!descriptor.configurable
)

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -20,19 +20,11 @@ export default {
pack: object => {
const version = object.header.version
return get(
OPENFLOW,
version,
`Unsupported OpenFlow version: ${version}`
).pack(object)
return get(OPENFLOW, version, `Unsupported OpenFlow version: ${version}`).pack(object)
},
unpack: (buffer, offset = 0) => {
const version = buffer.readUInt8(offset + scheme.offsets.version)
return get(
OPENFLOW,
version,
`Unsupported OpenFlow version: ${version}`
).unpack(buffer, offset)
return get(OPENFLOW, version, `Unsupported OpenFlow version: ${version}`).unpack(buffer, offset)
},
}

View File

@ -41,18 +41,11 @@ const ACTION = {
export default {
pack: (object, buffer = undefined, offset = 0) => {
const { type } = object
return get(ACTION, type, `Invalid action type: ${type}`).pack(
object,
buffer,
offset
)
return get(ACTION, type, `Invalid action type: ${type}`).pack(object, buffer, offset)
},
unpack: (buffer, offset = 0) => {
const type = buffer.readUInt16BE(offset + of.offsets.actionHeader.type)
return get(ACTION, type, `Invalid action type: ${type}`).unpack(
buffer,
offset
)
return get(ACTION, type, `Invalid action type: ${type}`).unpack(buffer, offset)
},
}

View File

@ -34,16 +34,11 @@ export default {
pack: object => {
const type = object.header.type
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).pack(
object
)
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).pack(object)
},
unpack: (buffer, offset = 0) => {
const type = buffer.readUInt8(offset + of.offsets.header.type)
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).unpack(
buffer,
offset
)
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).unpack(buffer, offset)
},
}

View File

@ -37,11 +37,7 @@ const SIZES = {
// -----------------------------------------------------------------------------
const TYPES = [
of.instructionType.clearActions,
of.instructionType.writeActions,
of.instructionType.applyActions,
]
const TYPES = [of.instructionType.clearActions, of.instructionType.writeActions, of.instructionType.applyActions]
const OFFSETS = of.offsets.instructionActions
const PAD_LENGTH = 4
@ -57,11 +53,7 @@ export default {
actions.forEach(action => {
assert(Object.values(of.actionType).includes(action.type))
// TODO: manage experimenter
object.len += get(
SIZES,
action.type,
`Invalid action type: ${action.type}`
)
object.len += get(SIZES, action.type, `Invalid action type: ${action.type}`)
})
buffer = buffer !== undefined ? buffer : Buffer.alloc(object.len)

View File

@ -26,18 +26,11 @@ const OFFSETS = of.offsets.instruction
export default {
pack: (object, buffer = undefined, offset = 0) => {
const { type } = object
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).pack(
object,
buffer,
offset
)
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).pack(object, buffer, offset)
},
unpack: (buffer = undefined, offset = 0) => {
const type = buffer.readUInt16BE(offset + OFFSETS.type)
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).unpack(
buffer,
offset
)
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).unpack(buffer, offset)
},
}

View File

@ -33,12 +33,7 @@ export default {
const dataSize = header.length - of.sizes.header
if (dataSize > 0) {
object.data = Buffer.alloc(dataSize)
buffer.copy(
object.data,
0,
offset + OFFSETS.data,
offset + OFFSETS.data + dataSize
)
buffer.copy(object.data, 0, offset + OFFSETS.data, offset + OFFSETS.data + dataSize)
}
return object

View File

@ -66,12 +66,7 @@ export default {
const dataSize = header.length - of.sizes.errorMsg
if (dataSize > 0) {
object.data = Buffer.alloc(dataSize)
buffer.copy(
object.data,
0,
offset + OFFSETS.data,
offset + OFFSETS.data + dataSize
)
buffer.copy(object.data, 0, offset + OFFSETS.data, offset + OFFSETS.data + dataSize)
}
return object

View File

@ -13,15 +13,7 @@ const PAD_LENGTH = 3
export default {
pack: object => {
const {
header,
datapath_id: did,
n_buffers: nBufs,
n_tables: nTables,
capabilities,
reserved,
ports,
} = object
const { header, datapath_id: did, n_buffers: nBufs, n_tables: nTables, capabilities, reserved, ports } = object
assert(header.type === of.type.featuresReply)
header.length = of.sizes.switchFeatures + ports.length * of.sizes.port
@ -49,11 +41,7 @@ export default {
assert(header.type === of.type.featuresReply)
const object = { header }
object.datapath_id = buffer.toString(
'hex',
offset + OFFSETS.datapathId,
offset + OFFSETS.datapathId + 8
)
object.datapath_id = buffer.toString('hex', offset + OFFSETS.datapathId, offset + OFFSETS.datapathId + 8)
object.n_buffers = buffer.readUInt32BE(offset + OFFSETS.nBuffers)
object.n_tables = buffer.readUInt8(offset + OFFSETS.nTables)
@ -63,9 +51,7 @@ export default {
object.ports = []
const nPorts = (header.length - of.sizes.switchFeatures) / of.sizes.port
for (let i = 0; i < nPorts; ++i) {
object.ports.push(
ofPort.unpack(buffer, offset + OFFSETS.ports + i * of.sizes.port)
)
object.ports.push(ofPort.unpack(buffer, offset + OFFSETS.ports + i * of.sizes.port))
}
return object

View File

@ -76,18 +76,10 @@ export default {
// fill header length
header.length = of.sizes.flowMod
instructions.forEach(instruction => {
header.length += get(
INSTRUCTION_SIZE,
instruction.type,
`Invalid instruction type: ${instruction.type}`
)
header.length += get(INSTRUCTION_SIZE, instruction.type, `Invalid instruction type: ${instruction.type}`)
const { actions = [] } = instruction
actions.forEach(action => {
header.length += get(
ACTION_SIZE,
action.type,
`Invalid instruction type: ${action.type}`
)
header.length += get(ACTION_SIZE, action.type, `Invalid instruction type: ${action.type}`)
})
})
@ -99,24 +91,12 @@ export default {
if (cookie_mask !== undefined) {
cookie_mask.copy(buffer, offset + OFFSETS.cookieMask)
} else {
buffer.fill(
0x00,
offset + OFFSETS.cookie_mask,
offset + OFFSETS.cookieMask + COOKIE_LENGTH
)
buffer.fill(0x00, offset + OFFSETS.cookie_mask, offset + OFFSETS.cookieMask + COOKIE_LENGTH)
}
cookie.copy(buffer, offset + OFFSETS.cookie)
} else {
buffer.fill(
0x00,
offset + OFFSETS.cookie,
offset + OFFSETS.cookie + COOKIE_LENGTH
)
buffer.fill(
0xff,
offset + OFFSETS.cookieMask,
offset + OFFSETS.cookieMask + COOKIE_LENGTH
)
buffer.fill(0x00, offset + OFFSETS.cookie, offset + OFFSETS.cookie + COOKIE_LENGTH)
buffer.fill(0xff, offset + OFFSETS.cookieMask, offset + OFFSETS.cookieMask + COOKIE_LENGTH)
}
buffer.writeUInt8(table_id, offset + OFFSETS.tableId)
@ -149,12 +129,7 @@ export default {
const object = { header }
object.cookie = Buffer.alloc(COOKIE_LENGTH)
buffer.copy(
object.cookie,
0,
offset + OFFSETS.cookie,
offset + OFFSETS.cookie + COOKIE_LENGTH
)
buffer.copy(object.cookie, 0, offset + OFFSETS.cookie, offset + OFFSETS.cookie + COOKIE_LENGTH)
if (
!uIntHelper.isUInt64None([
buffer.readUInt32BE(offset + OFFSETS.cookieMask),
@ -162,12 +137,7 @@ export default {
])
) {
object.cookie_mask = Buffer.alloc(COOKIE_LENGTH)
buffer.copy(
object.cookie_mask,
0,
offset + OFFSETS.cookieMask,
offset + OFFSETS.cookieMask + COOKIE_LENGTH
)
buffer.copy(object.cookie_mask, 0, offset + OFFSETS.cookieMask, offset + OFFSETS.cookieMask + COOKIE_LENGTH)
}
object.table_id = buffer.readUInt8(offset + OFFSETS.tableId)

View File

@ -35,58 +35,26 @@ export default {
if (object.dl_src !== undefined) {
if (object.dl_src_mask !== undefined) {
addressParser.stringToEth(
object.dl_src_mask,
buffer,
offset + OFFSETS.dlSrcMask
)
addressParser.stringToEth(object.dl_src_mask, buffer, offset + OFFSETS.dlSrcMask)
} else {
buffer.fill(
0x00,
offset + OFFSETS.dlSrcMask,
offset + OFFSETS.dlSrcMask + of.ethAddrLen
)
buffer.fill(0x00, offset + OFFSETS.dlSrcMask, offset + OFFSETS.dlSrcMask + of.ethAddrLen)
}
addressParser.stringToEth(object.dl_src, buffer, offset + OFFSETS.dlSrc)
} else {
buffer.fill(
0x00,
offset + OFFSETS.dlSrc,
offset + OFFSETS.dlSrc + of.ethAddrLen
)
buffer.fill(
0xff,
offset + OFFSETS.dlSrcMask,
offset + OFFSETS.dlSrcMask + of.ethAddrLen
)
buffer.fill(0x00, offset + OFFSETS.dlSrc, offset + OFFSETS.dlSrc + of.ethAddrLen)
buffer.fill(0xff, offset + OFFSETS.dlSrcMask, offset + OFFSETS.dlSrcMask + of.ethAddrLen)
}
if (object.dl_dst !== undefined) {
if (object.dl_dst_mask !== undefined) {
addressParser.stringToEth(
object.dl_dst_mask,
buffer,
offset + OFFSETS.dlDstMask
)
addressParser.stringToEth(object.dl_dst_mask, buffer, offset + OFFSETS.dlDstMask)
} else {
buffer.fill(
0x00,
offset + OFFSETS.dlDstMask,
offset + OFFSETS.dlDstMask + of.ethAddrLen
)
buffer.fill(0x00, offset + OFFSETS.dlDstMask, offset + OFFSETS.dlDstMask + of.ethAddrLen)
}
addressParser.stringToEth(object.dl_dst, buffer, offset + OFFSETS.dlDst)
} else {
buffer.fill(
0x00,
offset + OFFSETS.dlDst,
offset + OFFSETS.dlDst + of.ethAddrLen
)
buffer.fill(
0xff,
offset + OFFSETS.dlDstMask,
offset + OFFSETS.dlDstMask + of.ethAddrLen
)
buffer.fill(0x00, offset + OFFSETS.dlDst, offset + OFFSETS.dlDst + of.ethAddrLen)
buffer.fill(0xff, offset + OFFSETS.dlDstMask, offset + OFFSETS.dlDstMask + of.ethAddrLen)
}
let dlVlan = 0
@ -133,58 +101,26 @@ export default {
if (object.nw_src !== undefined) {
if (object.nw_src_mask !== undefined) {
addressParser.stringToip4(
object.nw_src_mask,
buffer,
offset + OFFSETS.nwSrcMask
)
addressParser.stringToip4(object.nw_src_mask, buffer, offset + OFFSETS.nwSrcMask)
} else {
buffer.fill(
0x00,
offset + OFFSETS.nwSrcMask,
offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN
)
buffer.fill(0x00, offset + OFFSETS.nwSrcMask, offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN)
}
addressParser.stringToip4(object.nw_src, buffer, offset + OFFSETS.nwSrc)
} else {
buffer.fill(
0x00,
offset + OFFSETS.nwSrc,
offset + OFFSETS.nwSrc + IP4_ADDR_LEN
)
buffer.fill(
0xff,
offset + OFFSETS.nwSrcMask,
offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN
)
buffer.fill(0x00, offset + OFFSETS.nwSrc, offset + OFFSETS.nwSrc + IP4_ADDR_LEN)
buffer.fill(0xff, offset + OFFSETS.nwSrcMask, offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN)
}
if (object.nw_dst !== undefined) {
if (object.nw_dst_mask !== undefined) {
addressParser.stringToip4(
object.nw_dst_mask,
buffer,
offset + OFFSETS.nwDstMask
)
addressParser.stringToip4(object.nw_dst_mask, buffer, offset + OFFSETS.nwDstMask)
} else {
buffer.fill(
0x00,
offset + OFFSETS.nwDstMask,
offset + OFFSETS.nwDstMask + IP4_ADDR_LEN
)
buffer.fill(0x00, offset + OFFSETS.nwDstMask, offset + OFFSETS.nwDstMask + IP4_ADDR_LEN)
}
addressParser.stringToip4(object.nw_dst, buffer, offset + OFFSETS.nwDst)
} else {
buffer.fill(
0x00,
offset + OFFSETS.nwDst,
offset + OFFSETS.nwDst + IP4_ADDR_LEN
)
buffer.fill(
0xff,
offset + OFFSETS.nwDstMask,
offset + OFFSETS.nwDstMask + IP4_ADDR_LEN
)
buffer.fill(0x00, offset + OFFSETS.nwDst, offset + OFFSETS.nwDst + IP4_ADDR_LEN)
buffer.fill(0xff, offset + OFFSETS.nwDstMask, offset + OFFSETS.nwDstMask + IP4_ADDR_LEN)
}
let tpSrc = 0
@ -230,29 +166,12 @@ export default {
offset + OFFSETS.metadataMask + METADATA_LENGTH
)
} else {
buffer.fill(
0x00,
offset + OFFSETS.metadataMask,
offset + OFFSETS.metadataMask + METADATA_LENGTH
)
buffer.fill(0x00, offset + OFFSETS.metadataMask, offset + OFFSETS.metadataMask + METADATA_LENGTH)
}
buffer.copy(
object.metadata,
0,
offset + OFFSETS.metadata,
offset + OFFSETS.metadata + METADATA_LENGTH
)
buffer.copy(object.metadata, 0, offset + OFFSETS.metadata, offset + OFFSETS.metadata + METADATA_LENGTH)
} else {
buffer.fill(
0x00,
offset + OFFSETS.metadata,
offset + OFFSETS.metadata + METADATA_LENGTH
)
buffer.fill(
0xff,
offset + OFFSETS.metadataMask,
offset + OFFSETS.metadataMask + METADATA_LENGTH
)
buffer.fill(0x00, offset + OFFSETS.metadata, offset + OFFSETS.metadata + METADATA_LENGTH)
buffer.fill(0xff, offset + OFFSETS.metadataMask, offset + OFFSETS.metadataMask + METADATA_LENGTH)
}
buffer.writeUInt32BE(wildcards, offset + OFFSETS.wildcards)
@ -270,28 +189,20 @@ export default {
// Wildcards indicate which value to use for the match.
// if `wildcards & of.wildcards.<value>` === 0 then `value` is not wildcarded and must be used.
const wildcards = (object.wildcards = buffer.readUInt32BE(
offset + OFFSETS.wildcards
))
const wildcards = (object.wildcards = buffer.readUInt32BE(offset + OFFSETS.wildcards))
if ((wildcards & WILDCARDS.inPort) === 0) {
object.in_port = buffer.readUInt32BE(offset + OFFSETS.inPort)
}
if (!addressParser.isEthMaskAll(buffer, offset + OFFSETS.dlSrcMask)) {
if (!addressParser.isEthMaskNone(buffer, offset + OFFSETS.dlSrcMask)) {
object.dl_src_mask = addressParser.ethToString(
buffer,
offset + OFFSETS.dlSrcMask
)
object.dl_src_mask = addressParser.ethToString(buffer, offset + OFFSETS.dlSrcMask)
}
object.dl_src = addressParser.ethToString(buffer, offset + OFFSETS.dlSrc)
}
if (!addressParser.isEthMaskAll(buffer, offset + OFFSETS.dlDstMask)) {
if (!addressParser.isEthMaskNone(buffer, offset + OFFSETS.dlDstMask)) {
object.dl_dst_mask = addressParser.ethToString(
buffer,
offset + OFFSETS.dlDstMask
)
object.dl_dst_mask = addressParser.ethToString(buffer, offset + OFFSETS.dlDstMask)
}
object.dl_dst = addressParser.ethToString(buffer, offset + OFFSETS.dlDst)
}
@ -315,19 +226,13 @@ export default {
if (!addressParser.isIp4MaskAll(buffer, offset + OFFSETS.nwSrcMask)) {
if (!addressParser.isIp4MaskNone(buffer, offset + OFFSETS.nwSrcMask)) {
object.nw_src_mask = addressParser.ip4ToString(
buffer,
offset + OFFSETS.nwSrcMask
)
object.nw_src_mask = addressParser.ip4ToString(buffer, offset + OFFSETS.nwSrcMask)
}
object.nw_src = addressParser.ip4ToString(buffer, offset + OFFSETS.nwSrc)
}
if (!addressParser.isIp4MaskAll(buffer, offset + OFFSETS.nwDstMask)) {
if (!addressParser.isIp4MaskNone(buffer, offset + OFFSETS.nwDstMask)) {
object.nw_dst_mask = addressParser.ip4ToString(
buffer,
offset + OFFSETS.nwDstMask
)
object.nw_dst_mask = addressParser.ip4ToString(buffer, offset + OFFSETS.nwDstMask)
}
object.nw_dst = addressParser.ip4ToString(buffer, offset + OFFSETS.nwDst)
}
@ -361,12 +266,7 @@ export default {
)
}
object.metadata = Buffer.alloc(METADATA_LENGTH)
buffer.copy(
object.metadata,
0,
offset + OFFSETS.metadata,
offset + OFFSETS.metadata + METADATA_LENGTH
)
buffer.copy(object.metadata, 0, offset + OFFSETS.metadata, offset + OFFSETS.metadata + METADATA_LENGTH)
}
return object

View File

@ -32,11 +32,7 @@ export default {
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD2_LENGTH)
buffer.write(name, offset + OFFSETS.name, of.maxPortNameLen)
if (name.length < of.maxPortNameLen) {
buffer.fill(
0,
offset + OFFSETS.name + name.length,
offset + OFFSETS.name + of.maxPortNameLen
)
buffer.fill(0, offset + OFFSETS.name + name.length, offset + OFFSETS.name + of.maxPortNameLen)
}
buffer.writeUInt32BE(config, offset + OFFSETS.config)
@ -56,11 +52,7 @@ export default {
body.port_no = buffer.readUInt32BE(offset + OFFSETS.portNo)
body.hw_addr = addressParser.ethToString(buffer, offset + OFFSETS.hwAddr)
const name = buffer.toString(
'utf8',
offset + OFFSETS.name,
offset + OFFSETS.name + of.maxPortNameLen
)
const name = buffer.toString('utf8', offset + OFFSETS.name, offset + OFFSETS.name + of.maxPortNameLen)
body.name = name.substr(0, name.indexOf('\0')) // Remove useless 0 if name.length < of.maxPortNameLen
body.config = buffer.readUInt32BE(offset + OFFSETS.config)

View File

@ -5,12 +5,10 @@ import util from 'util'
export default {
isEthMaskNone: (buffer, offset) =>
buffer.readUInt32BE(offset) === 0x00000000 &&
buffer.readUInt16BE(offset + 4) === 0x0000,
buffer.readUInt32BE(offset) === 0x00000000 && buffer.readUInt16BE(offset + 4) === 0x0000,
isEthMaskAll: (buffer, offset) =>
buffer.readUInt32BE(offset) === 0xffffffff &&
buffer.readUInt16BE(offset + 4) === 0xffff,
buffer.readUInt32BE(offset) === 0xffffffff && buffer.readUInt16BE(offset + 4) === 0xffff,
isIp4MaskNone: (buffer, offset) => buffer.readUInt32BE(offset) === 0x00000000,

View File

@ -2,10 +2,6 @@ import assert from 'assert'
export default function get(map, key, errorMsg = undefined) {
const value = map[String(key)]
assert.notStrictEqual(
value,
undefined,
errorMsg !== undefined ? errorMsg : `${key} is invalid`
)
assert.notStrictEqual(value, undefined, errorMsg !== undefined ? errorMsg : `${key} is invalid`)
return value
}

View File

@ -13,13 +13,9 @@ const openssl = (cmd, args, { input, ...opts } = {}) =>
exports.genSelfSignedCert = async ({ days = 360 } = {}) => {
const key = await openssl('genrsa', ['2048'])
return {
cert: await openssl(
'req',
['-batch', '-new', '-key', '-', '-x509', '-days', String(days), '-nodes'],
{
input: key,
}
),
cert: await openssl('req', ['-batch', '-new', '-key', '-', '-x509', '-days', String(days), '-nodes'], {
input: key,
}),
key,
}
}

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -3,10 +3,7 @@ import escapeRegExp from 'lodash/escapeRegExp'
const compareLengthDesc = (a, b) => b.length - a.length
export function compileTemplate(pattern, rules) {
const matches = Object.keys(rules)
.sort(compareLengthDesc)
.map(escapeRegExp)
.join('|')
const matches = Object.keys(rules).sort(compareLengthDesc).map(escapeRegExp).join('|')
const regExp = new RegExp(`\\\\(?:\\\\|${matches})|${matches}`, 'g')
return (...params) =>
pattern.replace(regExp, match => {

View File

@ -2,13 +2,10 @@
import { compileTemplate } from '.'
it("correctly replaces the template's variables", () => {
const replacer = compileTemplate(
'{property}_\\{property}_\\\\{property}_{constant}_%_FOO',
{
'{property}': obj => obj.name,
'{constant}': 1235,
'%': (_, i) => i,
}
)
const replacer = compileTemplate('{property}_\\{property}_\\\\{property}_{constant}_%_FOO', {
'{property}': obj => obj.name,
'{constant}': 1235,
'%': (_, i) => i,
})
expect(replacer({ name: 'bar' }, 5)).toBe('bar_{property}_\\bar_1235_5_FOO')
})

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -23,11 +23,7 @@ import Xo from 'xo-lib'
import { parseOVAFile } from 'xo-vmdk-to-vhd'
import pkg from '../package'
import {
load as loadConfig,
set as setConfig,
unset as unsetConfig,
} from './config'
import { load as loadConfig, set as setConfig, unset as unsetConfig } from './config'
function help() {
return stripIndent(
@ -121,11 +117,7 @@ function nodeStringDecoder(buffer, encoder) {
export async function inspect(args) {
const file = args[0]
const data = await parseOVAFile(
new NodeParsableFile(file, (await stat(file)).size),
nodeStringDecoder,
true
)
const data = await parseOVAFile(new NodeParsableFile(file, (await stat(file)).size), nodeStringDecoder, true)
console.log('file metadata:', data)
}
@ -159,14 +151,10 @@ export async function upload(args) {
overrides = parseOverride(args)
}
const data = await parseOVAFile(
new NodeParsableFile(file, (await stat(file)).size),
nodeStringDecoder
)
const data = await parseOVAFile(new NodeParsableFile(file, (await stat(file)).size), nodeStringDecoder)
const params = { sr: srId }
const xo = await connect()
const getXoObject = async filter =>
Object.values(await xo.call('xo.getAllObjects', { filter }))[0]
const getXoObject = async filter => Object.values(await xo.call('xo.getAllObjects', { filter }))[0]
const sr = await getXoObject({ id: srId })
const pool = await getXoObject({ id: sr.$poolId })
const master = await getXoObject({ id: pool.master })
@ -260,10 +248,7 @@ export class NodeParsableFile {
)
// crazy stuff to get a browser-compatible ArrayBuffer from a node buffer
// https://stackoverflow.com/a/31394257/72637
return result.buffer.slice(
result.byteOffset,
result.byteOffset + result.byteLength
)
return result.buffer.slice(result.byteOffset, result.byteOffset + result.byteLength)
}
}
@ -294,9 +279,7 @@ export default async function main(args) {
if (!args || !args.length || args[0] === '-h' || args[0] === '--help') {
return help()
}
const fnName = args[0].replace(/^--|-\w/g, match =>
match === '--' ? '' : match[1].toUpperCase()
)
const fnName = args[0].replace(/^--|-\w/g, match => (match === '--' ? '' : match[1].toUpperCase()))
if (fnName in exports) {
return exports[fnName](args.slice(1))
}

View File

@ -92,10 +92,7 @@ module.exports = {
collapsable: false, // optional, defaults to true
sidebarDepth: 1, // optional, defaults to 1
children: [
[
'https://github.com/vatesfr/xen-orchestra/blob/master/CHANGELOG.md#changelog',
'Changelog',
],
['https://github.com/vatesfr/xen-orchestra/blob/master/CHANGELOG.md#changelog', 'Changelog'],
['/code_of_conduct', 'Code of Conduct'],
['/contributing', 'Contributing'],
['/licenses', 'Licenses'],

View File

@ -172,11 +172,7 @@ const handleHook = data => {
const { method, params, type, result, error, timestamp } = JSON.parse(data)
// Log it
console.log(
`${new Date(timestamp).toISOString()} [${method}|${type}] ${params} → ${
result || error
}`
)
console.log(`${new Date(timestamp).toISOString()} [${method}|${type}] ${params} → ${result || error}`)
// Run scripts
exec(`./hook-scripts/${method}-${type}.sh`)

View File

@ -292,7 +292,7 @@ Stored records are secured by:
- structure: records are chained using the hash chain structure which means that each record is linked to its parent in a cryptographically secure way. This structure prevents the alteration of old records.
- hash upload: the hash chain structure has limits, it does not protect from the rewrite of recent/all records. To reduce this risk, the Audit log plugin regularly uploads the last record hash to our database after checking the integrity of the whole record chain. This functionality keeps the records safe by notifying users in case of alteration of the records.
- hash upload: the hash chain structure has limits, it does not protect from the rewrite of recent/all records. To reduce this risk, the Audit log plugin regularly uploads the last record hash to our database after checking the integrity of the whole record chain. This functionality keeps the records safe by notifying users in case of alteration of the records.
### Configuration

View File

@ -1,6 +1,4 @@
declare module 'limit-concurrency-decorator' {
declare function limitConcurrencyDecorator(
concurrency: number
): <T: Function>(T) => T
declare function limitConcurrencyDecorator(concurrency: number): <T: Function>(T) => T
declare export default typeof limitConcurrencyDecorator
}

30
flow-typed/lodash.js vendored
View File

@ -1,33 +1,15 @@
declare module 'lodash' {
declare export function countBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: number }
declare export function forEach<K, V>(
object: { [K]: V },
iteratee: (V, K) => void
): void
declare export function groupBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: V[] }
declare export function countBy<K, V>(object: { [K]: V }, iteratee: K | ((V, K) => string)): { [string]: number }
declare export function forEach<K, V>(object: { [K]: V }, iteratee: (V, K) => void): void
declare export function groupBy<K, V>(object: { [K]: V }, iteratee: K | ((V, K) => string)): { [string]: V[] }
declare export function invert<K, V>(object: { [K]: V }): { [V]: K }
declare export function isEmpty(mixed): boolean
declare export function keyBy<T>(array: T[], iteratee: string): boolean
declare export function last<T>(array?: T[]): T | void
declare export function map<T1, T2>(
collection: T1[],
iteratee: (T1) => T2
): T2[]
declare export function mapValues<K, V1, V2>(
object: { [K]: V1 },
iteratee: (V1, K) => V2
): { [K]: V2 }
declare export function map<T1, T2>(collection: T1[], iteratee: (T1) => T2): T2[]
declare export function mapValues<K, V1, V2>(object: { [K]: V1 }, iteratee: (V1, K) => V2): { [K]: V2 }
declare export function noop(...args: mixed[]): void
declare export function some<T>(
collection: T[],
iteratee: (T, number) => boolean
): boolean
declare export function some<T>(collection: T[], iteratee: (T, number) => boolean): boolean
declare export function sum(values: number[]): number
declare export function values<K, V>(object: { [K]: V }): V[]
}

View File

@ -8,9 +8,7 @@ declare module 'promise-toolbox' {
reject: T => void,
resolve: T => void,
|}
declare export function fromCallback<T>(
(cb: (error: any, value: T) => void) => void
): Promise<T>
declare export function fromCallback<T>((cb: (error: any, value: T) => void) => void): Promise<T>
declare export function fromEvent(emitter: mixed, string): Promise<mixed>
declare export function ignoreErrors(): Promise<void>
declare export function timeout<T>(delay: number): Promise<T>

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -1,15 +1,11 @@
import * as CM from './'
export const pattern =
'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
export const pattern = 'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
export const ast = new CM.And([
new CM.String('foo'),
new CM.Not(new CM.String('\\ "')),
new CM.Property(
'name',
new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])
),
new CM.Property('name', new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])),
new CM.TruthyProperty('hasCape'),
new CM.Property('age', new CM.NumberOrStringNode('32')),
new CM.GlobPattern('chi*go'),

View File

@ -141,10 +141,7 @@ export class NumberNode extends Node {
}
match(value) {
return (
value === this.value ||
(value !== null && typeof value === 'object' && some(value, this.match))
)
return value === this.value || (value !== null && typeof value === 'object' && some(value, this.match))
}
toString() {
@ -170,8 +167,7 @@ export class NumberOrStringNode extends Node {
value === numValue ||
(typeof value === 'string'
? value.toLowerCase().indexOf(lcValue) !== -1
: (Array.isArray(value) || isPlainObject(value)) &&
some(value, this.match))
: (Array.isArray(value) || isPlainObject(value)) && some(value, this.match))
)
}
@ -200,11 +196,7 @@ export class Property extends Node {
const escapeChar = char => '\\' + char
const formatString = value =>
Number.isNaN(+value)
? isRawString(value)
? value
: `"${value.replace(/\\|"/g, escapeChar)}"`
: `"${value}"`
Number.isNaN(+value) ? (isRawString(value) ? value : `"${value.replace(/\\|"/g, escapeChar)}"`) : `"${value}"`
export class GlobPattern extends Node {
constructor(value) {
@ -219,10 +211,7 @@ export class GlobPattern extends Node {
// should not be enumerable for the tests
Object.defineProperty(this, 'match', {
value: this.match.bind(
this,
new RegExp(value.split('*').map(escapeRegExp).join('.*'), 'i')
),
value: this.match.bind(this, new RegExp(value.split('*').map(escapeRegExp).join('.*'), 'i')),
})
}
@ -330,9 +319,7 @@ class Failure {
}
get value() {
throw new Error(
`parse error: expected ${this.expected} at position ${this.pos}`
)
throw new Error(`parse error: expected ${this.expected} at position ${this.pos}`)
}
}
@ -369,9 +356,7 @@ class P {
}
static lazy(parserCreator, arg) {
const parser = new P((input, pos, end) =>
(parser._parse = parserCreator(arg)._parse)(input, pos, end)
)
const parser = new P((input, pos, end) => (parser._parse = parserCreator(arg)._parse)(input, pos, end))
return parser
}
@ -380,9 +365,7 @@ class P {
return new P((input, pos) => {
regex.lastIndex = pos
const matches = regex.exec(input)
return matches !== null
? new Success(regex.lastIndex, matches[0])
: new Failure(pos, regex)
return matches !== null ? new Success(regex.lastIndex, matches[0]) : new Failure(pos, regex)
})
}
@ -405,9 +388,7 @@ class P {
static text(text) {
const { length } = text
return new P((input, pos) =>
input.startsWith(text, pos)
? new Success(pos + length, text)
: new Failure(pos, `'${text}'`)
input.startsWith(text, pos) ? new Success(pos + length, text) : new Failure(pos, `'${text}'`)
)
}
@ -443,10 +424,7 @@ class P {
value.push(result.value)
pos = result.pos
}
while (
i < max &&
(result = this._parse(input, pos, end)) instanceof Success
) {
while (i < max && (result = this._parse(input, pos, end)) instanceof Success) {
++i
value.push(result.value)
pos = result.pos
@ -471,17 +449,13 @@ class P {
}
}
P.eof = new P((input, pos, end) =>
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
)
P.eof = new P((input, pos, end) => (pos < end ? new Failure(pos, 'end of input') : new Success(pos)))
// -------------------------------------------------------------------
const parser = P.grammar({
default: r =>
P.seq(r.ws, r.term.repeat(), P.eof).map(([, terms]) =>
terms.length === 0 ? new Null() : new And(terms)
),
P.seq(r.ws, r.term.repeat(), P.eof).map(([, terms]) => (terms.length === 0 ? new Null() : new And(terms))),
globPattern: new P((input, pos, end) => {
let value = ''
let c
@ -489,9 +463,7 @@ const parser = P.grammar({
++pos
value += c
}
return value.length === 0
? new Failure(pos, 'a raw string')
: new Success(pos, value)
return value.length === 0 ? new Failure(pos, 'a raw string') : new Success(pos, value)
}),
quotedString: new P((input, pos, end) => {
if (input[pos] !== '"') {
@ -518,9 +490,7 @@ const parser = P.grammar({
++pos
value += c
}
return value.length === 0
? new Failure(pos, 'a raw string')
: new Success(pos, value)
return value.length === 0 ? new Failure(pos, 'a raw string') : new Success(pos, value)
}),
regex: new P((input, pos, end) => {
if (input[pos] !== '/') {
@ -551,17 +521,8 @@ const parser = P.grammar({
}),
term: r =>
P.alt(
P.seq(P.text('('), r.ws, r.term.repeat(1), P.text(')')).map(
_ => new And(_[2])
),
P.seq(
P.text('|'),
r.ws,
P.text('('),
r.ws,
r.term.repeat(1),
P.text(')')
).map(_ => new Or(_[4])),
P.seq(P.text('('), r.ws, r.term.repeat(1), P.text(')')).map(_ => new And(_[2])),
P.seq(P.text('|'), r.ws, P.text('('), r.ws, r.term.repeat(1), P.text(')')).map(_ => new Or(_[4])),
P.seq(P.text('!'), r.ws, r.term).map(_ => new Not(_[2])),
P.seq(P.regex(/[<>]=?/), r.rawString).map(([op, val]) => {
val = +val
@ -570,9 +531,7 @@ const parser = P.grammar({
}
return new Comparison(op, val)
}),
P.seq(r.property, r.ws, P.text(':'), r.ws, r.term).map(
_ => new Property(_[0], _[4])
),
P.seq(r.property, r.ws, P.text(':'), r.ws, r.term).map(_ => new Property(_[0], _[4])),
P.seq(r.property, P.text('?')).map(_ => new TruthyProperty(_[0])),
r.value
).skip(r.ws),
@ -582,9 +541,7 @@ const parser = P.grammar({
r.regex,
r.globPattern.map(str => {
const asNum = +str
return Number.isNaN(asNum)
? new GlobPattern(str)
: new NumberOrStringNode(str)
return Number.isNaN(asNum) ? new GlobPattern(str) : new NumberOrStringNode(str)
})
),
ws: P.regex(/\s*/),
@ -664,12 +621,7 @@ export const getPropertyClausesStrings = node => {
// -------------------------------------------------------------------
export const setPropertyClause = (node, name, child) => {
const property =
child &&
new Property(
name,
typeof child === 'string' ? new StringNode(child) : child
)
const property = child && new Property(name, typeof child === 'string' ? new StringNode(child) : child)
if (node === undefined) {
return property

View File

@ -12,9 +12,7 @@ import {
} from './'
it('getPropertyClausesStrings', () => {
const tmp = getPropertyClausesStrings(
parse('foo bar:baz baz:|(foo bar /^boo$/ /^far$/) foo:/^bar$/')
)
const tmp = getPropertyClausesStrings(parse('foo bar:baz baz:|(foo bar /^boo$/ /^far$/) foo:/^bar$/'))
expect(tmp).toEqual({
bar: ['baz'],
baz: ['foo', 'bar', 'boo', 'far'],
@ -66,35 +64,23 @@ describe('NumberOrStringNode', () => {
describe('setPropertyClause', () => {
it('creates a node if none passed', () => {
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
'foo:bar'
)
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe('foo:bar')
})
it('adds a property clause if there was none', () => {
expect(setPropertyClause(parse('baz'), 'foo', 'bar').toString()).toBe(
'baz foo:bar'
)
expect(setPropertyClause(parse('baz'), 'foo', 'bar').toString()).toBe('baz foo:bar')
})
it('replaces the property clause if there was one', () => {
expect(
setPropertyClause(parse('plip foo:baz plop'), 'foo', 'bar').toString()
).toBe('plip plop foo:bar')
expect(setPropertyClause(parse('plip foo:baz plop'), 'foo', 'bar').toString()).toBe('plip plop foo:bar')
expect(
setPropertyClause(parse('foo:|(baz plop)'), 'foo', 'bar').toString()
).toBe('foo:bar')
expect(setPropertyClause(parse('foo:|(baz plop)'), 'foo', 'bar').toString()).toBe('foo:bar')
})
it('removes the property clause if no chid is passed', () => {
expect(
setPropertyClause(parse('foo bar:baz qux'), 'bar', undefined).toString()
).toBe('foo qux')
expect(setPropertyClause(parse('foo bar:baz qux'), 'bar', undefined).toString()).toBe('foo qux')
expect(
setPropertyClause(parse('foo bar:baz qux'), 'baz', undefined).toString()
).toBe('foo bar:baz qux')
expect(setPropertyClause(parse('foo bar:baz qux'), 'baz', undefined).toString()).toBe('foo bar:baz qux')
})
})

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -1,13 +1,7 @@
// @flow
/* eslint-disable no-use-before-define */
export type Pattern =
| AndPattern
| OrPattern
| NotPattern
| ObjectPattern
| ArrayPattern
| ValuePattern
export type Pattern = AndPattern | OrPattern | NotPattern | ObjectPattern | ArrayPattern | ValuePattern
/* eslint-enable no-use-before-define */
// all patterns must match
@ -77,5 +71,4 @@ const match = (pattern: Pattern, value: any) => {
return pattern === value
}
export const createPredicate = (pattern: Pattern) => (value: any) =>
match(pattern, value)
export const createPredicate = (pattern: Pattern) => (value: any) => match(pattern, value)

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -16,8 +16,6 @@ export const writeStream = (input, path) => {
const output = createOutputStream(path)
return new Promise((resolve, reject) =>
input
.on('error', reject)
.pipe(output.on('error', reject).on('finish', resolve))
input.on('error', reject).pipe(output.on('error', reject).on('finish', resolve))
)
}

View File

@ -14,8 +14,7 @@ export default async function main(args) {
onProgress({ done, total }) {
if (bar === undefined) {
bar = new Bar({
format:
'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}',
format: 'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}',
})
bar.start(total, done)
} else {

View File

@ -9,8 +9,5 @@ export default async args => {
return `Usage: ${this.command} <input VHD> [<output raw>]`
}
await writeStream(
createContentStream(getHandler({ url: 'file:///' }), resolve(args[0])),
args[1]
)
await writeStream(createContentStream(getHandler({ url: 'file:///' }), resolve(args[0])), args[1])
}

View File

@ -11,10 +11,6 @@ export default async function main(args) {
const handler = getHandler({ url: 'file:///' })
const stream = await createSyntheticStream(handler, path.resolve(args[0]))
return new Promise((resolve, reject) => {
stream
.on('error', reject)
.pipe(
createWriteStream(args[1]).on('error', reject).on('finish', resolve)
)
stream.on('error', reject).pipe(createWriteStream(args[1]).on('error', reject).on('finish', resolve))
})
}

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -67,9 +67,7 @@ test('blocks can be moved', async () => {
await newVhd._freeFirstBlockSpace(8000000)
const recoveredFileName = `${tempDir}/recovered`
await recoverRawContent(vhdFileName, recoveredFileName, originalSize)
expect(await fs.readFile(recoveredFileName)).toEqual(
await fs.readFile(rawFileName)
)
expect(await fs.readFile(recoveredFileName)).toEqual(await fs.readFile(rawFileName))
})
test('the BAT MSB is not used for sign', async () => {
@ -101,13 +99,7 @@ test('the BAT MSB is not used for sign', async () => {
const entry = vhd._getBatEntry(i)
if (entry !== 0xffffffff) {
const block = (await vhd2._readBlock(i)).data
await fs.write(
recoveredFile,
block,
0,
block.length,
vhd2.header.blockSize * i
)
await fs.write(recoveredFile, block, 0, block.length, vhd2.header.blockSize * i)
}
}
} finally {
@ -155,10 +147,7 @@ test('writeData in 2 non-overlaping operations', async () => {
await newVhd.readBlockAllocationTable()
const splitPointSectors = 2
await newVhd.writeData(0, randomData.slice(0, splitPointSectors * 512))
await newVhd.writeData(
splitPointSectors,
randomData.slice(splitPointSectors * 512)
)
await newVhd.writeData(splitPointSectors, randomData.slice(splitPointSectors * 512))
await recoverRawContent(emptyFileName, recoveredFileName, originalSize)
expect(await fs.readFile(recoveredFileName)).toEqual(randomData)
})
@ -179,10 +168,7 @@ test('writeData in 2 overlaping operations', async () => {
const endFirstWrite = 3
const startSecondWrite = 2
await newVhd.writeData(0, randomData.slice(0, endFirstWrite * 512))
await newVhd.writeData(
startSecondWrite,
randomData.slice(startSecondWrite * 512)
)
await newVhd.writeData(startSecondWrite, randomData.slice(startSecondWrite * 512))
await recoverRawContent(emptyFileName, recoveredFileName, originalSize)
expect(await fs.readFile(recoveredFileName)).toEqual(randomData)
})
@ -201,9 +187,7 @@ test('BAT can be extended and blocks moved', async () => {
await newVhd.readBlockAllocationTable()
await newVhd.ensureBatSize(2000)
await recoverRawContent(vhdFileName, recoveredFileName, originalSize)
expect(await fs.readFile(recoveredFileName)).toEqual(
await fs.readFile(rawFileName)
)
expect(await fs.readFile(recoveredFileName)).toEqual(await fs.readFile(rawFileName))
})
test('coalesce works with empty parent files', async () => {
@ -214,12 +198,7 @@ test('coalesce works with empty parent files', async () => {
const recoveredFileName = `${tempDir}/recovered`
await createRandomFile(rawFileName, mbOfRandom)
await convertFromRawToVhd(rawFileName, vhdFileName)
await execa('qemu-img', [
'create',
'-fvpc',
emptyFileName,
mbOfRandom + 1 + 'M',
])
await execa('qemu-img', ['create', '-fvpc', emptyFileName, mbOfRandom + 1 + 'M'])
await checkFile(vhdFileName)
await checkFile(emptyFileName)
const handler = getHandler({ url: 'file://' })
@ -229,9 +208,7 @@ test('coalesce works with empty parent files', async () => {
await checkFile(emptyFileName)
await vhdMerge(handler, emptyFileName, handler, vhdFileName)
await recoverRawContent(emptyFileName, recoveredFileName, originalSize)
expect(await fs.readFile(recoveredFileName)).toEqual(
await fs.readFile(rawFileName)
)
expect(await fs.readFile(recoveredFileName)).toEqual(await fs.readFile(rawFileName))
})
test('coalesce works in normal cases', async () => {
@ -245,21 +222,10 @@ test('coalesce works in normal cases', async () => {
const recoveredFileName = `${tempDir}/recovered`
await createRandomFile(randomFileName, mbOfRandom)
await createRandomFile(smallRandomFileName, Math.ceil(mbOfRandom / 2))
await execa('qemu-img', [
'create',
'-fvpc',
parentFileName,
mbOfRandom + 1 + 'M',
])
await execa('qemu-img', ['create', '-fvpc', parentFileName, mbOfRandom + 1 + 'M'])
await convertFromRawToVhd(randomFileName, child1FileName)
const handler = getHandler({ url: 'file://' })
await execa('vhd-util', [
'snapshot',
'-n',
child2FileName,
'-p',
child1FileName,
])
await execa('vhd-util', ['snapshot', '-n', child2FileName, '-p', child1FileName])
const vhd = new Vhd(handler, child2FileName)
await vhd.readHeaderAndFooter()
await vhd.readBlockAllocationTable()
@ -293,9 +259,7 @@ test('coalesce works in normal cases', async () => {
} finally {
await fs.close(fd)
}
expect(await fs.readFile(recoveredFileName)).toEqual(
await fs.readFile(random2FileName)
)
expect(await fs.readFile(recoveredFileName)).toEqual(await fs.readFile(random2FileName))
})
test.only('createSyntheticStream passes vhd-util check', async () => {
@ -310,9 +274,7 @@ test.only('createSyntheticStream passes vhd-util check', async () => {
const stream = await createSyntheticStream(handler, vhdFileName)
const expectedVhdSize = (await fs.stat(vhdFileName)).size
expect(stream.length).toEqual((await fs.stat(vhdFileName)).size)
await pFromCallback(cb =>
pipeline(stream, fs.createWriteStream(recoveredVhdFileName), cb)
)
await pFromCallback(cb => pipeline(stream, fs.createWriteStream(recoveredVhdFileName), cb))
await checkFile(recoveredVhdFileName)
const stats = await fs.stat(recoveredVhdFileName)
expect(stats.size).toEqual(expectedVhdSize)

View File

@ -13,8 +13,5 @@ export default footer => {
assert.strictEqual(footer.dataOffset, FOOTER_SIZE)
assert.strictEqual(footer.fileFormatVersion, FILE_FORMAT_VERSION)
assert(footer.originalSize <= footer.currentSize)
assert(
footer.diskType === DISK_TYPE_DIFFERENCING ||
footer.diskType === DISK_TYPE_DYNAMIC
)
assert(footer.diskType === DISK_TYPE_DIFFERENCING || footer.diskType === DISK_TYPE_DYNAMIC)
}

View File

@ -14,13 +14,7 @@ import {
PLATFORM_WI2K,
} from './_constants'
export function createFooter(
size,
timestamp,
geometry,
dataOffset,
diskType = DISK_TYPE_FIXED
) {
export function createFooter(size, timestamp, geometry, dataOffset, diskType = DISK_TYPE_FIXED) {
const footer = fuFooter.pack({
cookie: FOOTER_COOKIE,
features: 2,

View File

@ -1,6 +1,5 @@
import { dirname, resolve } from 'path'
const resolveRelativeFromFile = (file, path) =>
resolve('/', dirname(file), path).slice(1)
const resolveRelativeFromFile = (file, path) => resolve('/', dirname(file), path).slice(1)
export { resolveRelativeFromFile as default }

View File

@ -13,13 +13,8 @@ const uint64 = fu.derive(
const uint64Undefinable = fu.derive(
fu.uint32(2),
number =>
number === undefined
? [0xffffffff, 0xffffffff]
: [Math.floor(number / SIZE_OF_32_BITS), number % SIZE_OF_32_BITS],
_ =>
_[0] === 0xffffffff && _[1] === 0xffffffff
? undefined
: _[0] * SIZE_OF_32_BITS + _[1]
number === undefined ? [0xffffffff, 0xffffffff] : [Math.floor(number / SIZE_OF_32_BITS), number % SIZE_OF_32_BITS],
_ => (_[0] === 0xffffffff && _[1] === 0xffffffff ? undefined : _[0] * SIZE_OF_32_BITS + _[1])
)
export const fuFooter = fu.struct([
@ -77,20 +72,13 @@ assert.strictEqual(fuHeader.size, HEADER_SIZE)
export const packField = (field, value, buf) => {
const { offset } = field
field.pack(
value,
buf,
typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset
)
field.pack(value, buf, typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset)
}
export const unpackField = (field, buf) => {
const { offset } = field
return field.unpack(
buf,
typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset
)
return field.unpack(buf, typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset)
}
// Returns the checksum of a raw struct.
@ -104,11 +92,7 @@ export function checksumStruct(buf, struct) {
for (let i = 0, n = checksumOffset; i < n; ++i) {
sum += buf[i]
}
for (
let i = checksumOffset + checksumField.size, n = struct.size;
i < n;
++i
) {
for (let i = checksumOffset + checksumField.size, n = struct.size; i < n; ++i) {
sum += buf[i]
}

View File

@ -3,13 +3,7 @@ import { dirname, relative } from 'path'
import Vhd from './vhd'
import { DISK_TYPE_DIFFERENCING } from './_constants'
export default async function chain(
parentHandler,
parentPath,
childHandler,
childPath,
force = false
) {
export default async function chain(parentHandler, parentPath, childHandler, childPath, force = false) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
@ -23,10 +17,7 @@ export default async function chain(
footer.diskType = DISK_TYPE_DIFFERENCING
}
await Promise.all([
childVhd.readBlockAllocationTable(),
parentVhd.readHeaderAndFooter(),
])
await Promise.all([childVhd.readBlockAllocationTable(), parentVhd.readHeaderAndFooter()])
const parentName = relative(dirname(childPath), parentPath)
header.parentUuid = parentVhd.footer.uuid

View File

@ -20,10 +20,10 @@ export default asyncIteratorToStream(async function* (handler, path) {
yield vhd.containsBlock(i) ? (await vhd._readBlock(i)).data : emptyBlock
}
if (nLeftoverBytes !== 0) {
yield (vhd.containsBlock(nFullBlocks)
? (await vhd._readBlock(nFullBlocks)).data
: emptyBlock
).slice(0, nLeftoverBytes)
yield (vhd.containsBlock(nFullBlocks) ? (await vhd._readBlock(nFullBlocks)).data : emptyBlock).slice(
0,
nLeftoverBytes
)
}
} finally {
await handler.closeFile(fd)

View File

@ -6,21 +6,13 @@ import { createFooter } from './_createFooterHeader'
export default asyncIteratorToStream(async function* (size, blockParser) {
const geometry = computeGeometryForSize(size)
const actualSize = geometry.actualSize
const footer = createFooter(
actualSize,
Math.floor(Date.now() / 1000),
geometry
)
const footer = createFooter(actualSize, Math.floor(Date.now() / 1000), geometry)
let position = 0
function* filePadding(paddingLength) {
if (paddingLength > 0) {
const chunkSize = 1024 * 1024 // 1Mo
for (
let paddingPosition = 0;
paddingPosition + chunkSize < paddingLength;
paddingPosition += chunkSize
) {
for (let paddingPosition = 0; paddingPosition + chunkSize < paddingLength; paddingPosition += chunkSize) {
yield Buffer.alloc(chunkSize)
}
yield Buffer.alloc(paddingLength % chunkSize)

View File

@ -22,39 +22,24 @@ const VHD_BLOCK_SIZE_SECTORS = VHD_BLOCK_SIZE_BYTES / SECTOR_SIZE
* then allocates the blocks in a forwards pass.
* @returns currentVhdPositionSector the first free sector after the data
*/
function createBAT({
firstBlockPosition,
fragmentLogicAddressList,
fragmentSize,
bat,
bitmapSize,
}) {
function createBAT({ firstBlockPosition, fragmentLogicAddressList, fragmentSize, bat, bitmapSize }) {
let currentVhdPositionSector = firstBlockPosition / SECTOR_SIZE
const lastFragmentPerBlock = new Map()
forEachRight(fragmentLogicAddressList, fragmentLogicAddress => {
assert.strictEqual((fragmentLogicAddress * fragmentSize) % SECTOR_SIZE, 0)
const vhdTableIndex = Math.floor(
(fragmentLogicAddress * fragmentSize) / VHD_BLOCK_SIZE_BYTES
)
const vhdTableIndex = Math.floor((fragmentLogicAddress * fragmentSize) / VHD_BLOCK_SIZE_BYTES)
if (!lastFragmentPerBlock.has(vhdTableIndex)) {
lastFragmentPerBlock.set(
vhdTableIndex,
fragmentLogicAddress * fragmentSize
)
lastFragmentPerBlock.set(vhdTableIndex, fragmentLogicAddress * fragmentSize)
}
})
const lastFragmentPerBlockArray = [...lastFragmentPerBlock]
// lastFragmentPerBlock is from last to first, so we go the other way around
forEachRight(
lastFragmentPerBlockArray,
([vhdTableIndex, _fragmentVirtualAddress]) => {
if (bat.readUInt32BE(vhdTableIndex * 4) === BLOCK_UNUSED) {
bat.writeUInt32BE(currentVhdPositionSector, vhdTableIndex * 4)
currentVhdPositionSector +=
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
}
forEachRight(lastFragmentPerBlockArray, ([vhdTableIndex, _fragmentVirtualAddress]) => {
if (bat.readUInt32BE(vhdTableIndex * 4) === BLOCK_UNUSED) {
bat.writeUInt32BE(currentVhdPositionSector, vhdTableIndex * 4)
currentVhdPositionSector += (bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
}
)
})
return [currentVhdPositionSector, lastFragmentPerBlock]
}
@ -70,12 +55,7 @@ function createBAT({
* @returns {Promise<Function>}
*/
export default async function createReadableStream(
diskSize,
fragmentSize,
fragmentLogicAddressList,
fragmentIterator
) {
export default async function createReadableStream(diskSize, fragmentSize, fragmentLogicAddressList, fragmentIterator) {
const ratio = VHD_BLOCK_SIZE_BYTES / fragmentSize
if (ratio % 1 !== 0) {
throw new Error(
@ -83,33 +63,19 @@ export default async function createReadableStream(
)
}
if (ratio > 53) {
throw new Error(
`Can't import file, grain size / block size ratio is > 53 (${ratio})`
)
throw new Error(`Can't import file, grain size / block size ratio is > 53 (${ratio})`)
}
const maxTableEntries = Math.ceil(diskSize / VHD_BLOCK_SIZE_BYTES) + 1
const tablePhysicalSizeBytes =
Math.ceil((maxTableEntries * 4) / SECTOR_SIZE) * SECTOR_SIZE
const tablePhysicalSizeBytes = Math.ceil((maxTableEntries * 4) / SECTOR_SIZE) * SECTOR_SIZE
const batPosition = FOOTER_SIZE + HEADER_SIZE
const firstBlockPosition = batPosition + tablePhysicalSizeBytes
const geometry = computeGeometryForSize(diskSize)
const actualSize = geometry.actualSize
const footer = createFooter(
actualSize,
Math.floor(Date.now() / 1000),
geometry,
FOOTER_SIZE,
DISK_TYPE_DYNAMIC
)
const header = createHeader(
maxTableEntries,
batPosition,
VHD_BLOCK_SIZE_BYTES
)
const bitmapSize =
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
const footer = createFooter(actualSize, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPE_DYNAMIC)
const header = createHeader(maxTableEntries, batPosition, VHD_BLOCK_SIZE_BYTES)
const bitmapSize = Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
const [endOfData, lastFragmentPerBlock] = createBAT({
firstBlockPosition,
@ -123,11 +89,7 @@ export default async function createReadableStream(
function* yieldAndTrack(buffer, expectedPosition, reason) {
if (expectedPosition !== undefined) {
assert.strictEqual(
position,
expectedPosition,
`${reason} (${position}|${expectedPosition})`
)
assert.strictEqual(position, expectedPosition, `${reason} (${position}|${expectedPosition})`)
}
if (buffer.length > 0) {
yield buffer
@ -136,15 +98,11 @@ export default async function createReadableStream(
}
function insertFragmentInBlock(fragment, blockWithBitmap) {
const fragmentOffsetInBlock =
(fragment.logicalAddressBytes / SECTOR_SIZE) % VHD_BLOCK_SIZE_SECTORS
const fragmentOffsetInBlock = (fragment.logicalAddressBytes / SECTOR_SIZE) % VHD_BLOCK_SIZE_SECTORS
for (let bitPos = 0; bitPos < VHD_BLOCK_SIZE_SECTORS / ratio; bitPos++) {
setBitmap(blockWithBitmap, fragmentOffsetInBlock + bitPos)
}
fragment.data.copy(
blockWithBitmap,
bitmapSize + (fragment.logicalAddressBytes % VHD_BLOCK_SIZE_BYTES)
)
fragment.data.copy(blockWithBitmap, bitmapSize + (fragment.logicalAddressBytes % VHD_BLOCK_SIZE_BYTES))
}
async function* generateBlocks(fragmentIterator, bitmapSize) {
@ -153,9 +111,7 @@ export default async function createReadableStream(
const batIndexToBlockMap = new Map()
for await (const fragment of fragmentIterator) {
currentFragmentIndex++
const batIndex = Math.floor(
fragment.logicalAddressBytes / VHD_BLOCK_SIZE_BYTES
)
const batIndex = Math.floor(fragment.logicalAddressBytes / VHD_BLOCK_SIZE_BYTES)
let currentBlockWithBitmap = batIndexToBlockMap.get(batIndex)
if (currentBlockWithBitmap === undefined) {
currentBlockWithBitmap = Buffer.alloc(bitmapSize + VHD_BLOCK_SIZE_BYTES)
@ -167,11 +123,7 @@ export default async function createReadableStream(
const batPosition = batEntry * SECTOR_SIZE
if (lastFragmentPerBlock.get(batIndex) === fragment.logicalAddressBytes) {
batIndexToBlockMap.delete(batIndex)
yield* yieldAndTrack(
currentBlockWithBitmap,
batPosition,
`VHD block start index: ${currentFragmentIndex}`
)
yield* yieldAndTrack(currentBlockWithBitmap, batPosition, `VHD block start index: ${currentFragmentIndex}`)
}
}
}

View File

@ -4,13 +4,7 @@ import { createLogger } from '@xen-orchestra/log'
import resolveRelativeFromFile from './_resolveRelativeFromFile'
import Vhd from './vhd'
import {
BLOCK_UNUSED,
DISK_TYPE_DYNAMIC,
FOOTER_SIZE,
HEADER_SIZE,
SECTOR_SIZE,
} from './_constants'
import { BLOCK_UNUSED, DISK_TYPE_DYNAMIC, FOOTER_SIZE, HEADER_SIZE, SECTOR_SIZE } from './_constants'
import { fuFooter, fuHeader, checksumStruct } from './_structs'
import { test as mapTestBit } from './_bitmap'

View File

@ -26,15 +26,7 @@ afterEach(async () => {
const RAW = 'raw'
const VHD = 'vpc'
const convert = (inputFormat, inputFile, outputFormat, outputFile) =>
execa('qemu-img', [
'convert',
'-f',
inputFormat,
'-O',
outputFormat,
inputFile,
outputFile,
])
execa('qemu-img', ['convert', '-f', inputFormat, '-O', outputFormat, inputFile, outputFile])
const createRandomStream = asyncIteratorToStream(function* (size) {
let requested = Math.min(size, yield)
@ -52,8 +44,7 @@ async function createRandomFile(name, size) {
await pFromCallback(cb => pipeline(input, fs.createWriteStream(name), cb))
}
const forOwn = (object, cb) =>
Object.keys(object).forEach(key => cb(object[key], key, object))
const forOwn = (object, cb) => Object.keys(object).forEach(key => cb(object[key], key, object))
describe('createVhdStreamWithLength', () => {
forOwn(
@ -72,15 +63,11 @@ describe('createVhdStreamWithLength', () => {
const inputVhd = `${tempDir}/input.vhd`
await convert(RAW, inputRaw, VHD, inputVhd)
const result = await createVhdStreamWithLength(
await createReadStream(inputVhd)
)
const result = await createVhdStreamWithLength(await createReadStream(inputVhd))
const { length } = result
const outputVhd = `${tempDir}/output.vhd`
await pFromCallback(
pipeline.bind(undefined, result, await createWriteStream(outputVhd))
)
await pFromCallback(pipeline.bind(undefined, result, await createWriteStream(outputVhd)))
// ensure the guessed length correspond to the stream length
const { size: outputSize } = await fs.stat(outputVhd)
@ -102,9 +89,7 @@ describe('createVhdStreamWithLength', () => {
await convert(RAW, rawFileName, VHD, vhdName)
const { size: vhdSize } = await fs.stat(vhdName)
// read file footer
const footer = await getStream.buffer(
createReadStream(vhdName, { start: vhdSize - FOOTER_SIZE })
)
const footer = await getStream.buffer(createReadStream(vhdName, { start: vhdSize - FOOTER_SIZE }))
// we'll override the footer
const endOfFile = await createWriteStream(vhdName, {
@ -118,9 +103,7 @@ describe('createVhdStreamWithLength', () => {
const { size: longerSize } = await fs.stat(vhdName)
// check input file has been lengthened
expect(longerSize).toEqual(vhdSize + FOOTER_SIZE)
const result = await createVhdStreamWithLength(
await createReadStream(vhdName)
)
const result = await createVhdStreamWithLength(await createReadStream(vhdName))
expect(result.length).toEqual(vhdSize)
const outputFileStream = await createWriteStream(outputVhdName)
await pFromCallback(cb => pipeline(result, outputFileStream, cb))

View File

@ -67,8 +67,7 @@ export default async function createVhdStreamWithLength(stream) {
const footerOffset =
firstAndLastBlocks !== undefined
? firstAndLastBlocks.lastSector * SECTOR_SIZE +
Math.ceil(header.blockSize / SECTOR_SIZE / 8 / SECTOR_SIZE) *
SECTOR_SIZE +
Math.ceil(header.blockSize / SECTOR_SIZE / 8 / SECTOR_SIZE) * SECTOR_SIZE +
header.blockSize
: Math.ceil(streamPosition / SECTOR_SIZE) * SECTOR_SIZE

View File

@ -25,25 +25,16 @@ export default concurrency(2)(async function merge(
const childVhd = new Vhd(childHandler, childFd)
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
await Promise.all([parentVhd.readHeaderAndFooter(), childVhd.readHeaderAndFooter()])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
const parentDiskType = parentVhd.footer.diskType
assert(
parentDiskType === DISK_TYPE_DIFFERENCING ||
parentDiskType === DISK_TYPE_DYNAMIC
)
assert(parentDiskType === DISK_TYPE_DIFFERENCING || parentDiskType === DISK_TYPE_DYNAMIC)
assert.strictEqual(childVhd.footer.diskType, DISK_TYPE_DIFFERENCING)
// Read allocation table of child/parent.
await Promise.all([
parentVhd.readBlockAllocationTable(),
childVhd.readBlockAllocationTable(),
])
await Promise.all([parentVhd.readBlockAllocationTable(), childVhd.readBlockAllocationTable()])
const { maxTableEntries } = childVhd.header
@ -51,10 +42,7 @@ export default concurrency(2)(async function merge(
// finds first allocated block for the 2 following loops
let firstBlock = 0
while (
firstBlock < maxTableEntries &&
!childVhd.containsBlock(firstBlock)
) {
while (firstBlock < maxTableEntries && !childVhd.containsBlock(firstBlock)) {
++firstBlock
}

View File

@ -28,8 +28,7 @@ const { debug } = createLogger('vhd-lib:Vhd')
//
// ===================================================================
const computeBatSize = entries =>
sectorsToBytes(sectorsRoundUpNoZero(entries * 4))
const computeBatSize = entries => sectorsToBytes(sectorsRoundUpNoZero(entries * 4))
// Sectors conversions.
const sectorsRoundUpNoZero = bytes => Math.ceil(bytes / SECTOR_SIZE) || 1
@ -38,11 +37,7 @@ const sectorsToBytes = sectors => sectors * SECTOR_SIZE
const assertChecksum = (name, buf, struct) => {
const actual = unpackField(struct.fields.checksum, buf)
const expected = checksumStruct(buf, struct)
assert.strictEqual(
actual,
expected,
`invalid ${name} checksum ${actual}, expected ${expected}`
)
assert.strictEqual(actual, expected, `invalid ${name} checksum ${actual}, expected ${expected}`)
}
// unused block as buffer containing a uint32BE
@ -88,11 +83,7 @@ export default class Vhd {
// =================================================================
async _read(start, n) {
const { bytesRead, buffer } = await this._handler.read(
this._path,
Buffer.alloc(n),
start
)
const { bytesRead, buffer } = await this._handler.read(this._path, Buffer.alloc(n), start)
assert.strictEqual(bytesRead, n)
return buffer
}
@ -114,10 +105,7 @@ export default class Vhd {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== PLATFORM_NONE) {
end = Math.max(
end,
entry.platformDataOffset + sectorsToBytes(entry.platformDataSpace)
)
end = Math.max(end, entry.platformDataOffset + sectorsToBytes(entry.platformDataSpace))
}
}
@ -158,10 +146,7 @@ export default class Vhd {
if (checkSecondFooter) {
const size = await this._handler.getSize(this._path)
assert(
bufFooter.equals(await this._read(size - FOOTER_SIZE, FOOTER_SIZE)),
'footer1 !== footer2'
)
assert(bufFooter.equals(await this._read(size - FOOTER_SIZE, FOOTER_SIZE)), 'footer1 !== footer2')
}
const footer = (this.footer = fuFooter.unpack(bufFooter))
@ -172,14 +157,11 @@ export default class Vhd {
// Compute the number of sectors in one block.
// Default: One block contains 4096 sectors of 512 bytes.
const sectorsPerBlock = (this.sectorsPerBlock =
header.blockSize / SECTOR_SIZE)
const sectorsPerBlock = (this.sectorsPerBlock = header.blockSize / SECTOR_SIZE)
// Compute bitmap size in sectors.
// Default: 1.
const sectorsOfBitmap = (this.sectorsOfBitmap = sectorsRoundUpNoZero(
sectorsPerBlock >> 3
))
const sectorsOfBitmap = (this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3))
// Full block size => data block size + bitmap size.
this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap)
@ -192,10 +174,7 @@ export default class Vhd {
// Returns a buffer that contains the block allocation table of a vhd file.
async readBlockAllocationTable() {
const { header } = this
this.blockTable = await this._read(
header.tableOffset,
header.maxTableEntries * 4
)
this.blockTable = await this._read(header.tableOffset, header.maxTableEntries * 4)
}
// return the first sector (bitmap) of a block
@ -211,10 +190,7 @@ export default class Vhd {
throw new Error(`no such block ${blockId}`)
}
return this._read(
sectorsToBytes(blockAddr),
onlyBitmap ? this.bitmapSize : this.fullBlockSize
).then(buf =>
return this._read(sectorsToBytes(blockAddr), onlyBitmap ? this.bitmapSize : this.fullBlockSize).then(buf =>
onlyBitmap
? { id: blockId, bitmap: buf }
: {
@ -246,21 +222,11 @@ export default class Vhd {
const { first, firstSector, lastSector } = firstAndLastBlocks
const tableOffset = this.header.tableOffset
const { batSize } = this
const newMinSector = Math.ceil(
(tableOffset + batSize + spaceNeededBytes) / SECTOR_SIZE
)
if (
tableOffset + batSize + spaceNeededBytes >=
sectorsToBytes(firstSector)
) {
const newMinSector = Math.ceil((tableOffset + batSize + spaceNeededBytes) / SECTOR_SIZE)
if (tableOffset + batSize + spaceNeededBytes >= sectorsToBytes(firstSector)) {
const { fullBlockSize } = this
const newFirstSector = Math.max(
lastSector + fullBlockSize / SECTOR_SIZE,
newMinSector
)
debug(
`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`
)
const newFirstSector = Math.max(lastSector + fullBlockSize / SECTOR_SIZE, newMinSector)
debug(`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`)
// copy the first block at the end
const block = await this._read(sectorsToBytes(firstSector), fullBlockSize)
await this._write(block, sectorsToBytes(newFirstSector))
@ -287,9 +253,7 @@ export default class Vhd {
const bat = (this.blockTable = Buffer.allocUnsafe(newBatSize))
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevMaxTableEntries * 4)
debug(
`ensureBatSize: extend BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
debug(`ensureBatSize: extend BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
await this._write(
Buffer.alloc(maxTableEntries - prevMaxTableEntries, BUF_BLOCK_UNUSED),
header.tableOffset + prevBat.length
@ -330,11 +294,7 @@ export default class Vhd {
const offset = sectorsToBytes(blockAddr)
debug(
`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString(
'hex'
)})`
)
debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`)
await this._write(bitmap, sectorsToBytes(blockAddr))
}
@ -359,9 +319,7 @@ export default class Vhd {
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(
`_writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`
)
debug(`_writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(parentBitmap, i)
@ -369,10 +327,7 @@ export default class Vhd {
await this._writeBlockBitmap(blockAddr, parentBitmap)
await this._write(
block.data.slice(
sectorsToBytes(beginSectorId),
sectorsToBytes(endSectorId)
),
block.data.slice(sectorsToBytes(beginSectorId), sectorsToBytes(endSectorId)),
sectorsToBytes(offset)
)
}
@ -428,11 +383,7 @@ export default class Vhd {
const offset = Math.max(this._getEndOfData(), eof - rawFooter.length)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(
`Write footer at: ${offset} (checksum=${
footer.checksum
}). (data=${rawFooter.toString('hex')})`
)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
if (!onlyEndFooter) {
await this._write(rawFooter, 0)
}
@ -444,11 +395,7 @@ export default class Vhd {
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = FOOTER_SIZE
debug(
`Write header at: ${offset} (checksum=${
header.checksum
}). (data=${rawHeader.toString('hex')})`
)
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
return this._write(rawHeader, offset)
}
@ -462,26 +409,12 @@ export default class Vhd {
const coversWholeBlock = (offsetInBlockSectors, endInBlockSectors) =>
offsetInBlockSectors === 0 && endInBlockSectors === this.sectorsPerBlock
for (
let currentBlock = startBlock;
currentBlock <= lastBlock;
currentBlock++
) {
const offsetInBlockSectors = Math.max(
0,
offsetSectors - currentBlock * this.sectorsPerBlock
)
const endInBlockSectors = Math.min(
endBufferSectors - currentBlock * this.sectorsPerBlock,
this.sectorsPerBlock
)
const startInBuffer = Math.max(
0,
(currentBlock * this.sectorsPerBlock - offsetSectors) * SECTOR_SIZE
)
for (let currentBlock = startBlock; currentBlock <= lastBlock; currentBlock++) {
const offsetInBlockSectors = Math.max(0, offsetSectors - currentBlock * this.sectorsPerBlock)
const endInBlockSectors = Math.min(endBufferSectors - currentBlock * this.sectorsPerBlock, this.sectorsPerBlock)
const startInBuffer = Math.max(0, (currentBlock * this.sectorsPerBlock - offsetSectors) * SECTOR_SIZE)
const endInBuffer = Math.min(
((currentBlock + 1) * this.sectorsPerBlock - offsetSectors) *
SECTOR_SIZE,
((currentBlock + 1) * this.sectorsPerBlock - offsetSectors) * SECTOR_SIZE,
buffer.length
)
let inputBuffer
@ -489,27 +422,16 @@ export default class Vhd {
inputBuffer = buffer.slice(startInBuffer, endInBuffer)
} else {
inputBuffer = Buffer.alloc(blockSizeBytes, 0)
buffer.copy(
inputBuffer,
offsetInBlockSectors * SECTOR_SIZE,
startInBuffer,
endInBuffer
)
buffer.copy(inputBuffer, offsetInBlockSectors * SECTOR_SIZE, startInBuffer, endInBuffer)
}
await this._writeBlockSectors(
{ id: currentBlock, data: inputBuffer },
offsetInBlockSectors,
endInBlockSectors
)
await this._writeBlockSectors({ id: currentBlock, data: inputBuffer }, offsetInBlockSectors, endInBlockSectors)
}
await this.writeFooter()
}
async _ensureSpaceForParentLocators(neededSectors) {
const firstLocatorOffset = FOOTER_SIZE + HEADER_SIZE
const currentSpace =
Math.floor(this.header.tableOffset / SECTOR_SIZE) -
firstLocatorOffset / SECTOR_SIZE
const currentSpace = Math.floor(this.header.tableOffset / SECTOR_SIZE) - firstLocatorOffset / SECTOR_SIZE
if (currentSpace < neededSectors) {
const deltaSectors = neededSectors - currentSpace
await this._freeFirstBlockSpace(sectorsToBytes(deltaSectors))
@ -526,8 +448,7 @@ export default class Vhd {
const dataSpaceSectors = Math.ceil(encodedFilename.length / SECTOR_SIZE)
const position = await this._ensureSpaceForParentLocators(dataSpaceSectors)
await this._write(encodedFilename, position)
header.parentLocatorEntry[0].platformDataSpace =
dataSpaceSectors * SECTOR_SIZE
header.parentLocatorEntry[0].platformDataSpace = dataSpaceSectors * SECTOR_SIZE
header.parentLocatorEntry[0].platformDataLength = encodedFilename.length
header.parentLocatorEntry[0].platformDataOffset = position
for (let i = 1; i < 8; i++) {

View File

@ -53,9 +53,7 @@ test('ReadableRawVHDStream does not crash', async () => {
}
const fileSize = 1000
const stream = createReadableRawStream(fileSize, mockParser)
await pFromCallback(cb =>
pipeline(stream, createWriteStream(`${tempDir}/output.vhd`), cb)
)
await pFromCallback(cb => pipeline(stream, createWriteStream(`${tempDir}/output.vhd`), cb))
await execa('vhd-util', ['check', '-t', '-i', '-n', `${tempDir}/output.vhd`])
})
@ -86,9 +84,7 @@ test('ReadableRawVHDStream detects when blocks are out of order', async () => {
new Promise((resolve, reject) => {
const stream = createReadableRawStream(100000, mockParser)
stream.on('error', reject)
pipeline(stream, createWriteStream(`${tempDir}/outputStream`), err =>
err ? reject(err) : resolve()
)
pipeline(stream, createWriteStream(`${tempDir}/outputStream`), err => (err ? reject(err) : resolve()))
})
).rejects.toThrow('Received out of order blocks')
})
@ -116,15 +112,7 @@ test('ReadableSparseVHDStream can handle a sparse file', async () => {
const pipe = stream.pipe(createWriteStream(`${tempDir}/output.vhd`))
await fromEvent(pipe, 'finish')
await execa('vhd-util', ['check', '-t', '-i', '-n', `${tempDir}/output.vhd`])
await execa('qemu-img', [
'convert',
'-f',
'vpc',
'-O',
'raw',
`${tempDir}/output.vhd`,
`${tempDir}/out1.raw`,
])
await execa('qemu-img', ['convert', '-f', 'vpc', '-O', 'raw', `${tempDir}/output.vhd`, `${tempDir}/out1.raw`])
const out1 = await readFile(`${tempDir}/out1.raw`)
const expected = Buffer.alloc(fileSize)
blocks.forEach(b => {

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -30,14 +30,8 @@ const required = name => {
// -------------------------------------------------------------------
const STYLES = [
[
vdi => !vdi.managed,
chalk.enabled ? chalk.red : label => `[unmanaged] ${label}`,
],
[
vdi => vdi.is_a_snapshot,
chalk.enabled ? chalk.yellow : label => `[snapshot] ${label}`,
],
[vdi => !vdi.managed, chalk.enabled ? chalk.red : label => `[unmanaged] ${label}`],
[vdi => vdi.is_a_snapshot, chalk.enabled ? chalk.yellow : label => `[snapshot] ${label}`],
]
const getStyle = vdi => {
for (let i = 0, n = STYLES.length; i < n; ++i) {
@ -102,9 +96,7 @@ execPromise(async args => {
forEach(vdisByRef, vdi => {
const vhdParent = vdi.sm_config['vhd-parent']
if (vhdParent) {
;(
vhdChildrenByUuid[vhdParent] || (vhdChildrenByUuid[vhdParent] = [])
).push(vdi)
;(vhdChildrenByUuid[vhdParent] || (vhdChildrenByUuid[vhdParent] = [])).push(vdi)
} else if (!(vdi.snapshot_of in vdisByRef)) {
return
}
@ -115,18 +107,12 @@ execPromise(async args => {
const makeVdiNode = vdi => {
const { uuid } = vdi
let label = `${vdi.name_label} - ${uuid} - ${formatSize(
+vdi.physical_utilisation
)}`
let label = `${vdi.name_label} - ${uuid} - ${formatSize(+vdi.physical_utilisation)}`
const nodes = []
const vhdChildren = vhdChildrenByUuid[uuid]
if (vhdChildren) {
mapFilter(
orderBy(vhdChildren, 'is_a_snapshot', 'desc'),
makeVdiNode,
nodes
)
mapFilter(orderBy(vhdChildren, 'is_a_snapshot', 'desc'), makeVdiNode, nodes)
}
mapFilter(

View File

@ -1,3 +1 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)
module.exports = require('../../@xen-orchestra/babel-config')(require('./package.json'))

View File

@ -58,16 +58,12 @@ const resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
isOpaqueRef(refOrUuidOrNameLabel)
? refOrUuidOrNameLabel
: xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(() =>
xapi
.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel)
.then(refs => {
if (refs.length === 1) {
return refs[0]
}
throw new Error(
`no single match for ${type} with name label ${refOrUuidOrNameLabel}`
)
})
xapi.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel).then(refs => {
if (refs.length === 1) {
return refs[0]
}
throw new Error(`no single match for ${type} with name label ${refOrUuidOrNameLabel}`)
})
)
exports.resolveRecord = async (xapi, type, refOrUuidOrNameLabel) =>

View File

@ -1,4 +1,3 @@
const RE = /^[^.]+\.get_/
export default (method, args) =>
args.length === 1 && typeof args[0] === 'string' && RE.test(method)
export default (method, args) => args.length === 1 && typeof args[0] === 'string' && RE.test(method)

View File

@ -6,15 +6,7 @@ export default url => {
throw new Error('invalid URL: ' + url)
}
const [
,
protocol = 'https:',
username,
password,
ipv6,
hostname = ipv6,
port,
] = matches
const [, protocol = 'https:', username, password, ipv6, hostname = ipv6, port] = matches
const parsedUrl = { protocol, hostname, port }
if (username !== undefined) {
parsedUrl.username = decodeURIComponent(username)

View File

@ -5,15 +5,7 @@ import ms from 'ms'
import httpRequest from 'http-request-plus'
import { EventEmitter } from 'events'
import { map, noop, omit } from 'lodash'
import {
cancelable,
defer,
fromEvents,
ignoreErrors,
pDelay,
pRetry,
pTimeout,
} from 'promise-toolbox'
import { cancelable, defer, fromEvents, ignoreErrors, pDelay, pRetry, pTimeout } from 'promise-toolbox'
import autoTransport from './transports/auto'
import coalesceCalls from './_coalesceCalls'
@ -88,9 +80,7 @@ export class Xapi extends EventEmitter {
super()
this._addSyncStackTrace =
opts.syncStackTraces ?? process.env.NODE_ENV === 'development'
? addSyncStackTrace
: identity
opts.syncStackTraces ?? process.env.NODE_ENV === 'development' ? addSyncStackTrace : identity
this._callTimeout = makeCallSetting(opts.callTimeout, 60 * 60 * 1e3) // 1 hour but will be reduced in the future
this._httpInactivityTimeout = opts.httpInactivityTimeout ?? 5 * 60 * 1e3 // 5 mins
this._eventPollDelay = opts.eventPollDelay ?? 60 * 1e3 // 1 min
@ -262,8 +252,7 @@ export class Xapi extends EventEmitter {
const promise = this.watchTask(taskRef)
const destroyTask = () =>
ignoreErrors.call(this._sessionCall('task.destroy', [taskRef]))
const destroyTask = () => ignoreErrors.call(this._sessionCall('task.destroy', [taskRef]))
promise.then(destroyTask, destroyTask)
return promise
@ -274,25 +263,15 @@ export class Xapi extends EventEmitter {
// ===========================================================================
async getAllRecords(type) {
return map(
await this._sessionCall(`${type}.get_all_records`),
(record, ref) => this._wrapRecord(type, ref, record)
)
return map(await this._sessionCall(`${type}.get_all_records`), (record, ref) => this._wrapRecord(type, ref, record))
}
async getRecord(type, ref) {
return this._wrapRecord(
type,
ref,
await this._sessionCall(`${type}.get_record`, [ref])
)
return this._wrapRecord(type, ref, await this._sessionCall(`${type}.get_record`, [ref]))
}
async getRecordByUuid(type, uuid) {
return this.getRecord(
type,
await this._sessionCall(`${type}.get_by_uuid`, [uuid])
)
return this.getRecord(type, await this._sessionCall(`${type}.get_by_uuid`, [uuid]))
}
getRecords(type, refs) {
@ -423,12 +402,7 @@ export class Xapi extends EventEmitter {
const isStream = typeof body.pipe === 'function'
const useHack = isStream && body.length === undefined
if (useHack) {
console.warn(
this._humanId,
'Xapi#putResource',
pathname,
'missing length'
)
console.warn(this._humanId, 'Xapi#putResource', pathname, 'missing length')
headers['content-length'] = '1125899906842624'
}
@ -557,9 +531,7 @@ export class Xapi extends EventEmitter {
await this._addSyncStackTrace(promise)
ignoreErrors.call(
this._sessionCall('pool.remove_from_other_config', [poolRef, key])
)
ignoreErrors.call(this._sessionCall('pool.remove_from_other_config', [poolRef, key]))
if (ref !== undefined) {
return this.getObjectByRef(ref)
@ -571,13 +543,9 @@ export class Xapi extends EventEmitter {
// allowed even in read-only mode because it does not have impact on the
// XenServer and it's necessary for getResource()
async createTask(nameLabel, nameDescription = '') {
const taskRef = await this._sessionCall('task.create', [
nameLabel,
nameDescription,
])
const taskRef = await this._sessionCall('task.create', [nameLabel, nameDescription])
const destroyTask = () =>
ignoreErrors.call(this._sessionCall('task.destroy', [taskRef]))
const destroyTask = () => ignoreErrors.call(this._sessionCall('task.destroy', [taskRef]))
this.watchTask(taskRef).then(destroyTask, destroyTask)
return taskRef
@ -591,8 +559,7 @@ export class Xapi extends EventEmitter {
idOrUuidOrRef = idOrUuidOrRef.$id
}
const object =
this._objects.all[idOrUuidOrRef] || this._objectsByRef[idOrUuidOrRef]
const object = this._objects.all[idOrUuidOrRef] || this._objectsByRef[idOrUuidOrRef]
if (object !== undefined) return object
@ -663,17 +630,8 @@ export class Xapi extends EventEmitter {
async _call(method, args, timeout = this._callTimeout(method, args)) {
const startTime = Date.now()
try {
const result = await pTimeout.call(
this._addSyncStackTrace(this._transport(method, args)),
timeout
)
debug(
'%s: %s(...) [%s] ==> %s',
this._humanId,
method,
ms(Date.now() - startTime),
kindOf(result)
)
const result = await pTimeout.call(this._addSyncStackTrace(this._transport(method, args)), timeout)
debug('%s: %s(...) [%s] ==> %s', this._humanId, method, ms(Date.now() - startTime), kindOf(result))
return result
} catch (error) {
// do not log the session ID
@ -691,13 +649,7 @@ export class Xapi extends EventEmitter {
: replaceSensitiveValues(params, '* obfuscated *'),
}
debug(
'%s: %s(...) [%s] =!> %s',
this._humanId,
method,
ms(Date.now() - startTime),
error
)
debug('%s: %s(...) [%s] =!> %s', this._humanId, method, ms(Date.now() - startTime), error)
throw error
}
@ -737,15 +689,12 @@ export class Xapi extends EventEmitter {
_sessionCallRetryOptions = {
tries: 2,
when: error =>
this._status !== DISCONNECTED && error?.code === 'SESSION_INVALID',
when: error => this._status !== DISCONNECTED && error?.code === 'SESSION_INVALID',
onRetry: () => this._sessionOpen(),
}
_sessionCall(method, args, timeout) {
if (method.startsWith('session.')) {
return Promise.reject(
new Error('session.*() methods are disabled from this interface')
)
return Promise.reject(new Error('session.*() methods are disabled from this interface'))
}
return pRetry(() => {
@ -773,10 +722,7 @@ export class Xapi extends EventEmitter {
const { user, password } = this._auth
const params = [user, password]
this._sessionId = await pRetry(
() =>
this._interruptOnDisconnect(
this._call('session.login_with_password', params)
),
() => this._interruptOnDisconnect(this._call('session.login_with_password', params)),
{
tries: 2,
when: { code: 'HOST_IS_SLAVE' },
@ -794,9 +740,7 @@ export class Xapi extends EventEmitter {
// the event loop in that case
if (this._pool.$ref !== oldPoolRef) {
// Uses introspection to list available types.
const types = (this._types = (
await this._interruptOnDisconnect(this._call('system.listMethods'))
)
const types = (this._types = (await this._interruptOnDisconnect(this._call('system.listMethods')))
.filter(isGetAllRecordsMethod)
.map(method => method.slice(0, method.indexOf('.'))))
this._lcToTypes = { __proto__: null }
@ -833,11 +777,7 @@ export class Xapi extends EventEmitter {
// An object's UUID can change during its life.
const prev = objectsByRef[ref]
let prevUuid
if (
prev !== undefined &&
(prevUuid = prev.uuid) !== undefined &&
prevUuid !== object.uuid
) {
if (prev !== undefined && (prevUuid = prev.uuid) !== undefined && prevUuid !== object.uuid) {
objects.remove(prevUuid)
}
@ -977,10 +917,7 @@ export class Xapi extends EventEmitter {
// we need to do this before the initial fetch to avoid losing events
let fromToken
try {
fromToken = await this._sessionCall('event.inject', [
'pool',
this._pool.$ref,
])
fromToken = await this._sessionCall('event.inject', ['pool', this._pool.$ref])
} catch (error) {
if (error?.code === 'MESSAGE_METHOD_UNKNOWN') {
return this._watchEventsLegacy()
@ -1076,14 +1013,10 @@ export class Xapi extends EventEmitter {
try {
await this._connected
this._processEvents(
await this._sessionCall('event.next', undefined, EVENT_TIMEOUT * 1e3)
)
this._processEvents(await this._sessionCall('event.next', undefined, EVENT_TIMEOUT * 1e3))
} catch (error) {
if (error?.code === 'EVENTS_LOST') {
await ignoreErrors.call(
this._sessionCall('event.unregister', [types])
)
await ignoreErrors.call(this._sessionCall('event.unregister', [types]))
return this._watchEventsLegacy()
}
@ -1165,14 +1098,10 @@ export class Xapi extends EventEmitter {
}
props[`add_${field}`] = function (value) {
return xapi
.call(`${type}.add_${field}`, this.$ref, value)
.then(noop)
return xapi.call(`${type}.add_${field}`, this.$ref, value).then(noop)
}
props[`remove_${field}`] = function (value) {
return xapi
.call(`${type}.remove_${field}`, this.$ref, value)
.then(noop)
return xapi.call(`${type}.remove_${field}`, this.$ref, value).then(noop)
}
} else if (value !== null && typeof value === 'object') {
getters[$field] = function () {

View File

@ -20,10 +20,7 @@ async function main([url]) {
const { pool } = xapi
// eslint-disable-next-line no-unmodified-loop-condition
while (loop) {
await pool.update_other_config(
'xo:injectEvents',
Math.random().toString(36).slice(2)
)
await pool.update_other_config('xo:injectEvents', Math.random().toString(36).slice(2))
await pDelay(1e2)
}

View File

@ -8,11 +8,7 @@ import UnsupportedTransport from './_UnsupportedTransport'
const logError = error => {
if (error.res) {
console.error(
'XML-RPC Error: %s (response status %s)',
error.message,
error.res.statusCode
)
console.error('XML-RPC Error: %s (response status %s)', error.message, error.res.statusCode)
console.error('%s', error.body)
}
@ -84,6 +80,5 @@ export default ({ secureOptions, url: { hostname, port, protocol } }) => {
})
const call = promisify(client.methodCall, client)
return (method, args) =>
call(method, prepareXmlRpcParams(args)).then(parseResult, logError)
return (method, args) => call(method, prepareXmlRpcParams(args)).then(parseResult, logError)
}

View File

@ -7,11 +7,7 @@ import prepareXmlRpcParams from './_prepareXmlRpcParams'
const logError = error => {
if (error.res) {
console.error(
'XML-RPC Error: %s (response status %s)',
error.message,
error.res.statusCode
)
console.error('XML-RPC Error: %s (response status %s)', error.message, error.res.statusCode)
console.error('%s', error.body)
}
@ -43,6 +39,5 @@ export default ({ secureOptions, url: { hostname, port, protocol } }) => {
})
const call = promisify(client.methodCall, client)
return (method, args) =>
call(method, prepareXmlRpcParams(args)).then(parseResult, logError)
return (method, args) => call(method, prepareXmlRpcParams(args)).then(parseResult, logError)
}

Some files were not shown because too many files have changed in this diff Show More