Compare commits

..

3 Commits

Author SHA1 Message Date
florent
93e7bdaeb0 add proprtype 2016-02-02 20:59:08 +01:00
florent
877e471b10 working on the create vm form 2016-02-01 22:29:48 +01:00
florent
32478e470b FB proposition for react (WIP)
after a few try, I think it can work this way :

 * everything is in one store: xoApi, session, even component states. try to use boolean when possible to simplify usage in components
 * components states should rarely be used : let components be stateless, a pure function of their props
 * use connect on high level component to give them access to part of the store and the actions. A component should not have access to the full store and action.
 * use event to get data back from child components ( if they can't dispatch action themselves)
 * redux-thunk allow to dispatch multiple action in cascade (even async one) , like want to save->saving ->saved (or errored)
 * use imutable object, this will allow faster performance in the long run
2016-01-31 22:06:12 +01:00
665 changed files with 1394 additions and 131542 deletions

12
.babelrc Normal file
View File

@@ -0,0 +1,12 @@
{
"comments": false,
"compact": true,
"plugins": [
"transform-runtime"
],
"presets": [
"es2015",
"stage-0",
"react"
]
}

3
.bowerrc Normal file
View File

@@ -0,0 +1,3 @@
{
"directory": "dist/bower_components"
}

View File

@@ -11,7 +11,7 @@ root = true
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
trim_trailing_whitespaces = true
# CoffeeScript
#
@@ -28,12 +28,12 @@ indent_style = space
# Package.json
#
# This indentation style is the one used by npm.
[package.json]
[/package.json]
indent_size = 2
indent_style = space
# Pug (Jade)
[*.{jade,pug}]
# Jade
[*.jade]
indent_size = 2
indent_style = space
@@ -41,7 +41,7 @@ indent_style = space
#
# Two spaces seems to be the standard most common style, at least in
# Node.js (http://nodeguide.com/style.html#tabs-vs-spaces).
[*.{js,jsx,ts,tsx}]
[*.js]
indent_size = 2
indent_style = space

View File

@@ -1,22 +0,0 @@
module.exports = {
extends: ['standard', 'standard-jsx'],
globals: {
__DEV__: true,
$Dict: true,
$Diff: true,
$Exact: true,
$Keys: true,
$PropertyType: true,
$Shape: true,
},
parser: 'babel-eslint',
rules: {
'comma-dangle': ['error', 'always-multiline'],
indent: 'off',
'no-var': 'error',
'node/no-extraneous-import': 'error',
'node/no-extraneous-require': 'error',
'prefer-const': 'error',
'react/jsx-indent': 'off',
},
}

View File

@@ -1,15 +0,0 @@
[ignore]
<PROJECT_ROOT>/node_modules/.*
[include]
[libs]
[lints]
[options]
esproposal.decorators=ignore
include_warnings=true
module.use_strict=true
[strict]

33
.gitignore vendored
View File

@@ -1,30 +1,9 @@
/coverage/
/node_modules/
/lerna-debug.log
/lerna-debug.log.*
/@xen-orchestra/*/dist/
/@xen-orchestra/*/node_modules/
/packages/*/dist/
/packages/*/node_modules/
/packages/vhd-cli/src/commands/index.js
/packages/xen-api/plot.dat
/packages/xo-server/.xo-server.*
/packages/xo-server/src/api/index.js
/packages/xo-server/src/xapi/mixins/index.js
/packages/xo-server/src/xo-mixins/index.js
/packages/xo-server-auth-ldap/ldap.cache.conf
/packages/xo-web/src/common/intl/locales/index.js
/packages/xo-web/src/common/themes/index.js
/.nyc_output/
/bower_components/
/dist/
npm-debug.log
npm-debug.log.*
pnpm-debug.log
pnpm-debug.log.*
yarn-error.log
yarn-error.log.*
!node_modules/*
node_modules/*/

5
.mocha.js Normal file
View File

@@ -0,0 +1,5 @@
Error.stackTraceLimit = 100
try { require('trace') } catch (_) {}
try { require('clarify') } catch (_) {}
try { require('source-map-support/register') } catch (_) {}

1
.mocha.opts Normal file
View File

@@ -0,0 +1 @@
--require ./.mocha.js

View File

@@ -1,5 +0,0 @@
module.exports = {
semi: false,
singleQuote: true,
trailingComma: 'es5',
}

View File

@@ -1,26 +1,10 @@
language: node_js
node_js:
#- stable # disable for now due to an issue of indirect dep upath with Node 9
- 8
- 6
- 'stable'
- '4'
- '0.12'
- '0.10'
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
sudo: false
addons:
apt:
packages:
- qemu-utils
- blktap-utils
- vmdk-stream-converter
before_install:
- curl -o- -L https://yarnpkg.com/install.sh | bash
- export PATH="$HOME/.yarn/bin:$PATH"
cache:
yarn: true
script:
- yarn run test
- yarn run test-integration

View File

@@ -1,47 +0,0 @@
'use strict'
const PLUGINS_RE = /^(?:@babel\/plugin-.+|babel-plugin-lodash)$/
const PRESETS_RE = /^@babel\/preset-.+$/
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
module.exports = function (pkg, plugins, presets) {
plugins === undefined && (plugins = {})
presets === undefined && (presets = {})
presets['@babel/preset-env'] = {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && PLUGINS_RE.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && PRESETS_RE.test(name)) {
presets[name] = {}
}
})
return {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
}

View File

@@ -1,11 +0,0 @@
{
"private": true,
"name": "@xen-orchestra/babel-config",
"version": "0.0.0",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/babel-config",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
}
}

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,24 +0,0 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -1,145 +0,0 @@
# @xen-orchestra/cron [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> Focused, well maintained, cron parser/scheduler
## Install
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cron):
```
> npm install --save @xen-orchestra/cron
```
### Pattern syntax
```
<minute> <hour> <day of month> <month> <day of week>
```
Each entry can be:
- a single value
- a range (`0-23` or `*/2`)
- a list of values/ranges (`1,8-12`)
A wildcard (`*`) can be used as a shortcut for the whole range
(`first-last`).
Step values can be used in conjunctions with ranges. For instance,
`1-7/2` is the same as `1,3,5,7`.
| Field | Allowed values |
|------------------|----------------|
| minute | 0-59 |
| hour | 0-23 |
| day of the month | 1-31 or 3-letter names (`jan`, `feb`, …) |
| month | 0-11 |
| day of week | 0-7 (0 and 7 both mean Sunday) or 3-letter names (`mon`, `tue`, …) |
> Note: the month range is 0-11 to be compatible with
> [cron](https://github.com/kelektiv/node-cron), it does not appear to
> be very standard though.
### API
`createSchedule(pattern: string, zone: string = 'utc'): Schedule`
> Create a new schedule.
- `pattern`: the pattern to use, see [the syntax](#pattern-syntax)
- `zone`: the timezone to use, use `'local'` for the local timezone
```js
import { createSchedule } from '@xen-orchestra/cron'
const schedule = createSchedule('0 0 * * sun', 'America/New_York')
```
`Schedule#createJob(fn: Function): Job`
> Create a new job from this schedule.
- `fn`: function to execute, if it returns a promise, it will be
awaited before scheduling the next run.
```js
const job = schedule.createJob(() => {
console.log(new Date())
})
```
`Schedule#next(n: number): Array<Date>`
> Returns the next dates matching this schedule.
- `n`: number of dates to return
```js
schedule.next(2)
// [ 2018-02-11T05:00:00.000Z, 2018-02-18T05:00:00.000Z ]
```
`Schedule#startJob(fn: Function): () => void`
> Start a new job from this schedule and return a function to stop it.
- `fn`: function to execute, if it returns a promise, it will be
awaited before scheduling the next run.
```js
const stopJob = schedule.startJob(() => {
console.log(new Date())
})
stopJob()
```
`Job#start(): void`
> Start this job.
```js
job.start()
```
`Job#stop(): void`
> Stop this job.
```js
job.stop()
```
## Development
```
# Install dependencies
> yarn
# Run the tests
> yarn test
# Continuously compile
> yarn dev
# Continuously run the tests
> yarn dev-test
# Build for production (automatically called by npm install)
> yarn build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -1,59 +0,0 @@
{
"name": "@xen-orchestra/cron",
"version": "1.0.3",
"license": "ISC",
"description": "Focused, well maintained, cron parser/scheduler",
"keywords": [
"cron",
"cronjob",
"crontab",
"job",
"parser",
"pattern",
"schedule",
"scheduling",
"task"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/cron",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@isonoe.net"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"browserslist": [
">2%"
],
"engines": {
"node": ">=6"
},
"dependencies": {
"lodash": "^4.17.4",
"moment-timezone": "^0.5.14"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run clean",
"prepublishOnly": "yarn run build"
}
}

View File

@@ -1,76 +0,0 @@
import moment from 'moment-timezone'
import next from './next'
import parse from './parse'
const MAX_DELAY = 2 ** 31 - 1
class Job {
constructor (schedule, fn) {
const wrapper = () => {
const result = fn()
let then
if (result != null && typeof (then = result.then) === 'function') {
then.call(result, scheduleNext, scheduleNext)
} else {
scheduleNext()
}
}
const scheduleNext = () => {
const delay = schedule._nextDelay()
this._timeout =
delay < MAX_DELAY
? setTimeout(wrapper, delay)
: setTimeout(scheduleNext, MAX_DELAY)
}
this._scheduleNext = scheduleNext
this._timeout = undefined
}
start () {
this.stop()
this._scheduleNext()
}
stop () {
clearTimeout(this._timeout)
}
}
class Schedule {
constructor (pattern, zone = 'utc') {
this._schedule = parse(pattern)
this._createDate =
zone.toLowerCase() === 'utc'
? moment.utc
: zone === 'local' ? moment : () => moment.tz(zone)
}
createJob (fn) {
return new Job(this, fn)
}
next (n) {
const dates = new Array(n)
const schedule = this._schedule
let date = this._createDate()
for (let i = 0; i < n; ++i) {
dates[i] = (date = next(schedule, date)).toDate()
}
return dates
}
_nextDelay () {
const now = this._createDate()
return next(this._schedule, now) - now
}
startJob (fn) {
const job = this.createJob(fn)
job.start()
return job.stop.bind(job)
}
}
export const createSchedule = (...args) => new Schedule(...args)

View File

@@ -1,89 +0,0 @@
import moment from 'moment-timezone'
import sortedIndex from 'lodash/sortedIndex'
const NEXT_MAPPING = {
month: { year: 1 },
date: { month: 1 },
day: { week: 1 },
hour: { day: 1 },
minute: { hour: 1 },
}
const getFirst = values => (values !== undefined ? values[0] : 0)
const setFirstAvailable = (date, unit, values) => {
if (values === undefined) {
return
}
const curr = date.get(unit)
const next = values[sortedIndex(values, curr) % values.length]
if (curr === next) {
return
}
const timestamp = +date
date.set(unit, next)
if (timestamp > +date) {
date.add(NEXT_MAPPING[unit])
}
return true
}
// returns the next run, after the passed date
export default (schedule, fromDate) => {
let date = moment(fromDate)
.set({
second: 0,
millisecond: 0,
})
.add({ minute: 1 })
const { minute, hour, dayOfMonth, month, dayOfWeek } = schedule
setFirstAvailable(date, 'minute', minute)
if (setFirstAvailable(date, 'hour', hour)) {
date.set('minute', getFirst(minute))
}
let loop
let i = 0
do {
loop = false
if (setFirstAvailable(date, 'month', month)) {
date.set({
date: 1,
hour: getFirst(hour),
minute: getFirst(minute),
})
}
let newDate = date.clone()
if (dayOfMonth === undefined) {
if (dayOfWeek !== undefined) {
setFirstAvailable(newDate, 'day', dayOfWeek)
}
} else if (dayOfWeek === undefined) {
setFirstAvailable(newDate, 'date', dayOfMonth)
} else {
const dateDay = newDate.clone()
setFirstAvailable(dateDay, 'date', dayOfMonth)
setFirstAvailable(newDate, 'day', dayOfWeek)
newDate = moment.min(dateDay, newDate)
}
if (+date !== +newDate) {
loop = date.month() !== newDate.month()
date = newDate.set({
hour: getFirst(hour),
minute: getFirst(minute),
})
}
} while (loop && ++i < 5)
if (loop) {
throw new Error('no solutions found for this schedule')
}
return date
}

View File

@@ -1,48 +0,0 @@
/* eslint-env jest */
import mapValues from 'lodash/mapValues'
import moment from 'moment-timezone'
import next from './next'
import parse from './parse'
const N = (pattern, fromDate = '2018-04-09T06:25') => {
const iso = next(parse(pattern), moment.utc(fromDate)).toISOString()
return iso.slice(0, iso.lastIndexOf(':'))
}
describe('next()', () => {
mapValues(
{
minutely: ['* * * * *', '2018-04-09T06:26'],
hourly: ['@hourly', '2018-04-09T07:00'],
daily: ['@daily', '2018-04-10T00:00'],
monthly: ['@monthly', '2018-05-01T00:00'],
yearly: ['@yearly', '2019-01-01T00:00'],
weekly: ['@weekly', '2018-04-15T00:00'],
},
([pattern, result], title) =>
it(title, () => {
expect(N(pattern)).toBe(result)
})
)
it('select first between month-day and week-day', () => {
expect(N('* * 10 * wen')).toBe('2018-04-10T00:00')
expect(N('* * 12 * wen')).toBe('2018-04-11T00:00')
})
it('select the last available day of a month', () => {
expect(N('* * 29 feb *')).toBe('2020-02-29T00:00')
})
it('fails when no solutions has been found', () => {
expect(() => N('0 0 30 feb *')).toThrow(
'no solutions found for this schedule'
)
})
it('select the first sunday of the month', () => {
expect(N('* * * * 0', '2018-03-31T00:00')).toBe('2018-04-01T00:00')
})
})

View File

@@ -1,193 +0,0 @@
const compareNumbers = (a, b) => a - b
const createParser = ({ fields: [...fields], presets: { ...presets } }) => {
const m = fields.length
for (let j = 0; j < m; ++j) {
const field = fields[j]
let { aliases } = field
if (aliases !== undefined) {
let symbols = aliases
if (Array.isArray(aliases)) {
aliases = {}
const [start] = field.range
symbols.forEach((alias, i) => {
aliases[alias] = start + i
})
} else {
symbols = Object.keys(aliases)
}
fields[j] = {
...field,
aliases,
aliasesRegExp: new RegExp(symbols.join('|'), 'y'),
}
}
}
let field, i, n, pattern, schedule, values
const isDigit = c => c >= '0' && c <= '9'
const match = c => pattern[i] === c && (++i, true)
const consumeWhitespaces = () => {
let c
while ((c = pattern[i]) === ' ' || c === '\t') {
++i
}
}
const parseInteger = () => {
let c
const digits = []
while (isDigit((c = pattern[i]))) {
++i
digits.push(c)
}
if (digits.length === 0) {
throw new SyntaxError(`${field.name}: missing integer at character ${i}`)
}
return Number.parseInt(digits.join(''), 10)
}
const parseValue = () => {
let value
const { aliasesRegExp } = field
if (aliasesRegExp === undefined || isDigit(pattern[i])) {
value = parseInteger()
const { post } = field
if (post !== undefined) {
value = post(value)
}
} else {
aliasesRegExp.lastIndex = i
const matches = aliasesRegExp.exec(pattern)
if (matches === null) {
throw new SyntaxError(
`${field.name}: missing alias or integer at character ${i}`
)
}
const [alias] = matches
i += alias.length
value = field.aliases[alias]
}
const { range } = field
if (value < range[0] || value > range[1]) {
throw new SyntaxError(
`${field.name}: ${value} is not between ${range[0]} and ${range[1]}`
)
}
return value
}
const parseRange = () => {
let end, start, step
if (match('*')) {
if (!match('/')) {
return
}
;[start, end] = field.range
step = parseInteger()
} else {
start = parseValue()
if (!match('-')) {
values.add(start)
return
}
end = parseValue()
step = match('/') ? parseInteger() : 1
}
for (let i = start; i <= end; i += step) {
values.add(i)
}
}
const parseSequence = () => {
do {
parseRange()
} while (match(','))
}
const parse = p => {
{
const schedule = presets[p]
if (schedule !== undefined) {
return typeof schedule === 'string'
? (presets[p] = parse(schedule))
: schedule
}
}
try {
i = 0
n = p.length
pattern = p
schedule = {}
for (let j = 0; j < m; ++j) {
consumeWhitespaces()
field = fields[j]
values = new Set()
parseSequence()
if (values.size !== 0) {
schedule[field.name] = Array.from(values).sort(compareNumbers)
}
}
consumeWhitespaces()
if (i !== n) {
throw new SyntaxError(
`unexpected character at offset ${i}, expected end`
)
}
return schedule
} finally {
field = pattern = schedule = values = undefined
}
}
return parse
}
export default createParser({
fields: [
{
name: 'minute',
range: [0, 59],
},
{
name: 'hour',
range: [0, 23],
},
{
name: 'dayOfMonth',
range: [1, 31],
},
{
aliases: 'jan feb mar apr may jun jul aug sep oct nov dec'.split(' '),
name: 'month',
range: [0, 11],
},
{
aliases: 'sun mon tue wen thu fri sat'.split(' '),
name: 'dayOfWeek',
post: value => (value === 7 ? 0 : value),
range: [0, 6],
},
],
presets: {
'@annually': '0 0 1 jan *',
'@daily': '0 0 * * *',
'@hourly': '0 * * * *',
'@monthly': '0 0 1 * *',
'@weekly': '0 0 * * sun',
'@yearly': '0 0 1 jan *',
},
})

View File

@@ -1,49 +0,0 @@
/* eslint-env jest */
import parse from './parse'
describe('parse()', () => {
it('works', () => {
expect(parse('0 0-10 */10 jan,2,4-11/3 *')).toEqual({
minute: [0],
hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dayOfMonth: [1, 11, 21, 31],
month: [0, 2, 4, 7, 10],
})
})
it('correctly parse months', () => {
expect(parse('* * * 0,11 *')).toEqual({
month: [0, 11],
})
expect(parse('* * * jan,dec *')).toEqual({
month: [0, 11],
})
})
it('correctly parse days', () => {
expect(parse('* * * * mon,sun')).toEqual({
dayOfWeek: [0, 1],
})
})
it('reports missing integer', () => {
expect(() => parse('*/a')).toThrow('minute: missing integer at character 2')
expect(() => parse('*')).toThrow('hour: missing integer at character 1')
})
it('reports invalid aliases', () => {
expect(() => parse('* * * jan-foo *')).toThrow(
'month: missing alias or integer at character 10'
)
})
it('dayOfWeek: 0 and 7 bind to sunday', () => {
expect(parse('* * * * 0')).toEqual({
dayOfWeek: [0],
})
expect(parse('* * * * 7')).toEqual({
dayOfWeek: [0],
})
})
})

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,54 +0,0 @@
{
"name": "@xen-orchestra/fs",
"version": "0.0.0",
"license": "AGPL-3.0",
"description": "The File System for Xen Orchestra backups.",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"preferGlobal": true,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"engines": {
"node": ">=6"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.44",
"@marsaud/smb2-promise": "^0.2.1",
"execa": "^0.10.0",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"lodash": "^4.17.4",
"promise-toolbox": "^0.9.5",
"through2": "^2.0.3",
"tmp": "^0.0.33",
"xo-remote-parser": "^0.3"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.44",
"@babel/plugin-transform-runtime": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"index-modules": "^0.3.0",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run clean",
"prepare": "yarn run build"
}
}

View File

@@ -1,288 +0,0 @@
// @flow
import getStream from 'get-stream'
import { randomBytes } from 'crypto'
import { fromCallback, fromEvent, ignoreErrors } from 'promise-toolbox'
import { type Readable, type Writable } from 'stream'
import { parse } from 'xo-remote-parser'
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
type Data = Buffer | Readable | string
type FileDescriptor = {| fd: mixed, path: string |}
type LaxReadable = Readable & Object
type LaxWritable = Writable & Object
type File = FileDescriptor | string
const checksumFile = file => file + '.checksum'
export default class RemoteHandlerAbstract {
_remote: Object
constructor (remote: any) {
this._remote = { ...remote, ...parse(remote.url) }
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
}
get type (): string {
throw new Error('Not implemented')
}
/**
* Asks the handler to sync the state of the effective remote with its' metadata
*/
async sync (): Promise<mixed> {
return this._sync()
}
async _sync (): Promise<mixed> {
throw new Error('Not implemented')
}
/**
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
*/
async forget (): Promise<void> {
await this._forget()
}
async _forget (): Promise<void> {
throw new Error('Not implemented')
}
async test (): Promise<Object> {
const testFileName = `${Date.now()}.test`
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true,
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error),
}
} finally {
ignoreErrors.call(this.unlink(testFileName))
}
}
async outputFile (file: string, data: Data, options?: Object): Promise<void> {
return this._outputFile(file, data, {
flags: 'wx',
...options,
})
}
async _outputFile (file: string, data: Data, options?: Object): Promise<void> {
const stream = await this.createOutputStream(file, options)
const promise = fromEvent(stream, 'finish')
stream.end(data)
await promise
}
async readFile (file: string, options?: Object): Promise<Buffer> {
return this._readFile(file, options)
}
_readFile (file: string, options?: Object): Promise<Buffer> {
return this.createReadStream(file, options).then(getStream.buffer)
}
async rename (
oldPath: string,
newPath: string,
{ checksum = false }: Object = {}
) {
let p = this._rename(oldPath, newPath)
if (checksum) {
p = Promise.all([
p,
this._rename(checksumFile(oldPath), checksumFile(newPath)),
])
}
return p
}
async _rename (oldPath: string, newPath: string) {
throw new Error('Not implemented')
}
async list (
dir: string = '.',
{
filter,
prependDir = false,
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
): Promise<string[]> {
const entries = await this._list(dir)
if (prependDir) {
entries.forEach((entry, i) => {
entries[i] = dir + '/' + entry
})
}
return filter === undefined ? entries : entries.filter(filter)
}
async _list (dir: string): Promise<string[]> {
throw new Error('Not implemented')
}
createReadStream (
file: string,
{ checksum = false, ignoreMissingChecksum = false, ...options }: Object = {}
): Promise<LaxReadable> {
const path = typeof file === 'string' ? file : file.path
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = fromEvent(stream, 'readable')
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([
promise,
ignoreErrors.call(
this.getSize(file).then(size => {
stream.length = size
})
),
])
}
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
// avoid a unhandled rejection warning
ignoreErrors.call(streamP)
return this.readFile(checksumFile(path)).then(
checksum =>
streamP.then(stream => {
const { length } = stream
stream = (validChecksumOfReadStream(
stream,
String(checksum).trim()
): LaxReadable)
stream.length = length
return stream
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error
}
)
}
async _createReadStream (
file: string,
options?: Object
): Promise<LaxReadable> {
throw new Error('Not implemented')
}
async openFile (path: string, flags?: string): Promise<FileDescriptor> {
return { fd: await this._openFile(path, flags), path }
}
async _openFile (path: string, flags?: string): Promise<mixed> {
throw new Error('Not implemented')
}
async closeFile (fd: FileDescriptor): Promise<void> {
await this._closeFile(fd.fd)
}
async _closeFile (fd: mixed): Promise<void> {
throw new Error('Not implemented')
}
async refreshChecksum (path: string): Promise<void> {
const stream = (await this.createReadStream(path)).pipe(
createChecksumStream()
)
stream.resume() // start reading the whole file
await this.outputFile(checksumFile(path), await stream.checksum)
}
async createOutputStream (
file: File,
{ checksum = false, ...options }: Object = {}
): Promise<LaxWritable> {
const path = typeof file === 'string' ? file : file.path
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options,
})
if (!checksum) {
return streamP
}
const checksumStream = createChecksumStream()
const forwardError = error => {
checksumStream.emit('error', error)
}
const stream = await streamP
stream.on('error', forwardError)
checksumStream.pipe(stream)
// $FlowFixMe
checksumStream.checksumWritten = checksumStream.checksum
.then(value => this.outputFile(checksumFile(path), value))
.catch(forwardError)
return checksumStream
}
async _createOutputStream (
file: mixed,
options?: Object
): Promise<LaxWritable> {
throw new Error('Not implemented')
}
async unlink (file: string, { checksum = true }: Object = {}): Promise<void> {
if (checksum) {
ignoreErrors.call(this._unlink(checksumFile(file)))
}
await this._unlink(file)
}
async _unlink (file: mixed): Promise<void> {
throw new Error('Not implemented')
}
async getSize (file: mixed): Promise<number> {
return this._getSize(file)
}
async _getSize (file: mixed): Promise<number> {
throw new Error('Not implemented')
}
}

View File

@@ -1,100 +0,0 @@
// @flow
// $FlowFixMe
import through2 from 'through2'
import { createHash } from 'crypto'
import { defer, fromEvent } from 'promise-toolbox'
import { invert } from 'lodash'
import { type Readable, type Transform } from 'stream'
// Format: $<algorithm>$<salt>$<encrypted>
//
// http://man7.org/linux/man-pages/man3/crypt.3.html#NOTES
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
sha512: '6',
}
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// Create a through stream which computes the checksum of all data going
// through.
//
// The `checksum` attribute is a promise which resolves at the end of the stream
// with a string representation of the checksum.
//
// const source = ...
// const checksumStream = source.pipe(createChecksumStream())
// checksumStream.resume() // make the data flow without an output
// console.log(await checksumStream.checksum)
export const createChecksumStream = (
algorithm: string = 'md5'
): Transform & { checksum: Promise<string> } => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithm}`)
}
const hash = createHash(algorithm)
const { promise, resolve, reject } = defer()
const stream = through2(
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
resolve(`$${algorithmId}$$${hash.digest('hex')}`)
callback()
}
).once('error', reject)
stream.checksum = promise
return stream
}
// Check if the checksum of a readable stream is equals to an expected checksum.
// The given stream is wrapped in a stream which emits an error event
// if the computed checksum is not equals to the expected checksum.
export const validChecksumOfReadStream = (
stream: Readable,
expectedChecksum: string
): Readable & { checksumVerified: Promise<void> } => {
const algorithmId = expectedChecksum.slice(
1,
expectedChecksum.indexOf('$', 1)
)
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
}
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
const wrapper: any = stream.pipe(
through2(
{ highWaterMark: 0 },
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
)
: null
)
}
)
)
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksumVerified = fromEvent(wrapper, 'end')
return wrapper
}

View File

@@ -1,26 +0,0 @@
/* eslint-env jest */
import rimraf from 'rimraf'
import tmp from 'tmp'
import { fromCallback as pFromCallback } from 'promise-toolbox'
import { getHandler } from '.'
const initialDir = process.cwd()
beforeEach(async () => {
const dir = await pFromCallback(cb => tmp.dir(cb))
process.chdir(dir)
})
afterEach(async () => {
const tmpDir = process.cwd()
process.chdir(initialDir)
await pFromCallback(cb => rimraf(tmpDir, cb))
})
test("fs test doesn't crash", async () => {
const handler = getHandler({ url: 'file://' + process.cwd() })
const result = await handler.test()
expect(result.success).toBeTruthy()
})

View File

@@ -1,26 +0,0 @@
// @flow
import type RemoteHandler from './abstract'
import RemoteHandlerLocal from './local'
import RemoteHandlerNfs from './nfs'
import RemoteHandlerSmb from './smb'
export type { default as RemoteHandler } from './abstract'
export type Remote = { url: string }
const HANDLERS = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs,
}
export const getHandler = (remote: Remote): RemoteHandler => {
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
const Handler = HANDLERS[type]
if (!Handler) {
throw new Error('Unhandled remote type')
}
return new Handler(remote)
}

View File

@@ -1,114 +0,0 @@
import fs from 'fs-extra'
import { dirname, resolve } from 'path'
import { noop, startsWith } from 'lodash'
import RemoteHandlerAbstract from './abstract'
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
}
async _sync () {
if (this._remote.enabled) {
try {
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
return noop()
}
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {
return fs.readFile(this._getFilePath(file), options)
}
async _rename (oldPath, newPath) {
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _list (dir = '.') {
return fs.readdir(this._getFilePath(dir))
}
async _createReadStream (file, options) {
if (typeof file === 'string') {
return fs.createReadStream(this._getFilePath(file), options)
} else {
return fs.createReadStream('', {
autoClose: false,
...options,
fd: file.fd,
})
}
}
async _createOutputStream (file, options) {
if (typeof file === 'string') {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
return fs.createWriteStream(path, options)
} else {
return fs.createWriteStream('', {
autoClose: false,
...options,
fd: file.fd,
})
}
}
async _unlink (file) {
return fs.unlink(this._getFilePath(file)).catch(error => {
// do not throw if the file did not exist
if (error == null || error.code !== 'ENOENT') {
throw error
}
})
}
async _getSize (file) {
const stats = await fs.stat(
this._getFilePath(typeof file === 'string' ? file : file.path)
)
return stats.size
}
async _openFile (path, flags) {
return fs.open(this._getFilePath(path), flags)
}
async _closeFile (fd) {
return fs.close(fd)
}
}

View File

@@ -1,96 +0,0 @@
import execa from 'execa'
import fs from 'fs-extra'
import { forEach } from 'lodash'
import LocalHandler from './local'
export default class NfsHandler extends LocalHandler {
get type () {
return 'nfs'
}
_getRealPath () {
return `/run/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
try {
stdout = await execa.stdout('findmnt', [
'-P',
'-t',
'nfs,nfs4',
'--output',
'SOURCE,TARGET',
'--noheadings',
])
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
const match = regex.exec(m)
mounted[match[3]] = {
host: match[1],
share: match[2],
}
}
})
} catch (exc) {
// When no mounts are found, the call pretends to fail...
if (exc.stderr !== '') {
throw exc
}
}
this._realMounts = mounted
return mounted
}
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', [
'-t',
'nfs',
'-o',
'vers=3',
`${this._remote.host}:${this._remote.path}`,
this._getRealPath(),
])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
try {
await this._umount(this._remote)
} catch (_) {
// We have to go on...
}
}
async _umount (remote) {
await execa('umount', ['--force', this._getRealPath()])
}
}

View File

@@ -1,217 +0,0 @@
import Smb2 from '@marsaud/smb2-promise'
import { lastly as pFinally } from 'promise-toolbox'
import RemoteHandlerAbstract from './abstract'
const noop = () => {}
// Normalize the error code for file not found.
const normalizeError = error => {
const { code } = error
return code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
? Object.create(error, {
code: {
configurable: true,
readable: true,
value: 'ENOENT',
writable: true,
},
})
: error
}
export default class SmbHandler extends RemoteHandlerAbstract {
constructor (remote) {
super(remote)
this._forget = noop
}
get type () {
return 'smb'
}
_getClient (remote) {
return new Smb2({
share: `\\\\${remote.host}`,
domain: remote.domain,
username: remote.username,
password: remote.password,
autoCloseTimeout: 0,
})
}
_getFilePath (file) {
if (file === '.') {
file = undefined
}
let path = this._remote.path !== '' ? this._remote.path : ''
// Ensure remote path is a directory.
if (path !== '' && path[path.length - 1] !== '\\') {
path += '\\'
}
if (file) {
path += file.replace(/\//g, '\\')
}
return path
}
_dirname (file) {
const parts = file.split('\\')
parts.pop()
return parts.join('\\')
}
async _sync () {
if (this._remote.enabled) {
try {
// Check access (smb2 does not expose connect in public so far...)
await this.list()
} catch (error) {
this._remote.enabled = false
this._remote.error = error.message
}
}
return this._remote
}
async _outputFile (file, data, options = {}) {
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
if (dir) {
await client.ensureDir(dir)
}
return client.writeFile(path, data, options)::pFinally(() => {
client.close()
})
}
async _readFile (file, options = {}) {
const client = this._getClient(this._remote)
let content
try {
content = await client
.readFile(this._getFilePath(file), options)
::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
return content
}
async _rename (oldPath, newPath) {
const client = this._getClient(this._remote)
try {
await client
.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
}
async _list (dir = '.') {
const client = this._getClient(this._remote)
let list
try {
list = await client.readdir(this._getFilePath(dir))::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
return list
}
async _createReadStream (file, options = {}) {
if (typeof file !== 'string') {
file = file.path
}
const client = this._getClient(this._remote)
let stream
try {
// FIXME ensure that options are properly handled by @marsaud/smb2
stream = await client.createReadStream(this._getFilePath(file), options)
stream.on('end', () => client.close())
} catch (error) {
throw normalizeError(error)
}
return stream
}
async _createOutputStream (file, options = {}) {
if (typeof file !== 'string') {
file = file.path
}
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
let stream
try {
if (dir) {
await client.ensureDir(dir)
}
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
} catch (err) {
client.close()
throw err
}
stream.on('finish', () => client.close())
return stream
}
async _unlink (file) {
const client = this._getClient(this._remote)
try {
await client.unlink(this._getFilePath(file))::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
}
async _getSize (file) {
const client = await this._getClient(this._remote)
let size
try {
size = await client
.getSize(this._getFilePath(typeof file === 'string' ? file : file.path))
::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
return size
}
// this is a fake
async _openFile (path) {
return this._getFilePath(path)
}
async _closeFile (fd) {}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,46 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at julien.fontanet@vates.fr. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View File

@@ -1,27 +0,0 @@
<!--
Welcome to the issue section of Xen Orchestra!
Here you can:
- report an issue
- propose an enhancement
- ask a question
Please, respect this template as much as possible, it helps us sort
the issues :)
-->
### Context
- **XO origin**: the sources / XO Appliance
- **Versions**:
- Node: **FILL HERE**
- xo-web: **FILL HERE**
- xo-server: **FILL HERE**
### Expected behavior
<!-- What you expect to happen -->
### Current behavior
<!-- What is actually happening -->

View File

@@ -1,10 +1,68 @@
# Xen Orchestra [![Chat with us](https://storage.crisp.im/plugins/images/936925df-f37b-4ba8-bab0-70cd2edcb0be/badge.svg)](https://go.crisp.im/chat/embed/?website_id=-JzqzzwddSV7bKGtEyAQ) [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
# Xen Orchestra Web
![](http://i.imgur.com/tRffA5y.png)
XO-Web is part of [Xen Orchestra](https://github.com/vatesfr/xo), a web interface for XenServer or XAPI enabled hosts.
It is a web client for [XO-Server](https://github.com/vatesfr/xo-server).
[![Dependency Status](https://david-dm.org/vatesfr/xo-web.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-web)
[![devDependency Status](https://david-dm.org/vatesfr/xo-web/dev-status.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-web#info=devDependencies)
___
## Installation
XOA or manual install procedure is [available here](https://xen-orchestra.com/docs/installation.html)
XOA or manual install procedure is [available here](https://github.com/vatesfr/xo/blob/master/doc/installation/README.md)
## Compilation
Production build:
```
$ npm run build
```
Development build:
```
$ npm run dev
```
## How to report a bug?
If you are certain the bug is exclusively related to XO-Web, you may use the [bugtracker of this repository](https://github.com/vatesfr/xo-web/issues).
Otherwise, please consider using the [bugtracker of the general repository](https://github.com/vatesfr/xo/issues).
## Process for new release
```bash
# Switch to the master branch.
git checkout master
# Fetches latest changes.
git pull --ff-only
# Merge changes of the next-release branch.
git merge next-release
# Increment the version (patch, minor or major).
npm version minor
# Go back to the next-release branch.
git checkout next-release
# Fetches the last changes (the merge and version bump) from master to
# next-release.
git merge --ff-only master
# Push the changes on git.
git push --follow-tags origin master next-release
# Publish this release to npm.
npm publish
```
## License

24
flow-typed/lodash.js vendored
View File

@@ -1,24 +0,0 @@
declare module 'lodash' {
declare export function forEach<K, V>(
object: { [K]: V },
iteratee: (V, K) => void
): void
declare export function groupBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: V[] }
declare export function invert<K, V>(object: { [K]: V }): { [V]: K }
declare export function isEmpty(mixed): boolean
declare export function keyBy<T>(array: T[], iteratee: string): boolean
declare export function last<T>(array?: T[]): T | void
declare export function map<T1, T2>(
collection: T1[],
iteratee: (T1) => T2
): T2[]
declare export function mapValues<K, V1, V2>(
object: { [K]: V1 },
iteratee: (V1, K) => V2
): { [K]: V2 }
declare export function noop(...args: mixed[]): void
declare export function values<K, V>(object: { [K]: V }): V[]
}

View File

@@ -1,14 +0,0 @@
declare module 'promise-toolbox' {
declare export function cancelable(Function): Function
declare export function defer<T>(): {|
promise: Promise<T>,
reject: T => void,
resolve: T => void,
|}
declare export function fromCallback<T>(
(cb: (error: any, value: T) => void) => void
): Promise<T>
declare export function fromEvent(emitter: mixed, string): Promise<mixed>
declare export function ignoreErrors(): Promise<void>
declare export function timeout<T>(delay: number): Promise<T>
}

2
flow-typed/xo.js vendored
View File

@@ -1,2 +0,0 @@
// eslint-disable-next-line no-undef
declare type $Dict<T, K = string> = { [K]: T }

291
gulpfile.js Normal file
View File

@@ -0,0 +1,291 @@
'use strict'
// ===================================================================
var SRC_DIR = __dirname + '/src'
var DIST_DIR = __dirname + '/dist'
// Port to use for the livereload server.
//
// It must be available and if possible unique to not conflict with other projects.
// http://www.random.org/integers/?num=1&min=1024&max=65535&col=1&base=10&format=plain&rnd=new
var LIVERELOAD_PORT = 26242
// Port to use for the embedded web server.
//
// Set to 0 to choose a random port at each run.
var SERVER_PORT = LIVERELOAD_PORT + 1
// Address the server should bind to.
//
// - `'localhost'` to make it accessible from this host only
// - `null` to make it accessible for the whole network
var SERVER_ADDR = 'localhost'
var PRODUCTION = process.argv.indexOf('--production') !== -1
var DEVELOPMENT = !PRODUCTION
// ===================================================================
var gulp = require('gulp')
// ===================================================================
function lazyFn (factory) {
var fn = function () {
fn = factory()
return fn.apply(this, arguments)
}
return function () {
return fn.apply(this, arguments)
}
}
// -------------------------------------------------------------------
var livereload = lazyFn(function () {
var livereload = require('gulp-livereload')
livereload.listen(LIVERELOAD_PORT)
return livereload
})
var pipe = lazyFn(function () {
var pipe = require('nice-pipe')
return PRODUCTION
? pipe
: function () {
return require('gulp-plumber')().pipe(pipe.apply(this, arguments))
}
})
var resolvePath = lazyFn(function () {
return require('path').resolve
})
// -------------------------------------------------------------------
// Similar to `gulp.src()` but the pattern is relative to `SRC_DIR`
// and files are automatically watched when not in production mode.
var src = lazyFn(function () {
function resolve (path) {
return path
? resolvePath(SRC_DIR, path)
: SRC_DIR
}
return PRODUCTION
? function src (pattern, base) {
base = resolve(base)
return gulp.src(pattern, {
base: base,
cwd: base,
sourcemaps: true
})
}
: function src (pattern, base) {
base = resolve(base)
return pipe(
gulp.src(pattern, {
base: base,
cwd: base,
sourcemaps: true
}),
require('gulp-watch')(pattern, {
base: base,
cwd: base
}),
require('gulp-plumber')()
)
}
})
// Similar to `gulp.dest()` but the output directory is relative to
// `DIST_DIR` and default to `./`, and files are automatically live-
// reloaded when not in production mode.
var dest = lazyFn(function () {
function resolve (path) {
return path
? resolvePath(DIST_DIR, path)
: DIST_DIR
}
var opts = {
sourcemaps: {
path: '.'
}
}
return PRODUCTION
? function dest (path) {
return gulp.dest(resolve(path), opts)
}
: function dest (path) {
return pipe(
gulp.dest(resolve(path), opts),
livereload()
)
}
})
// ===================================================================
function browserify (path, opts) {
if (opts == null) {
opts = {}
}
var bundler = require('browserify')(path, {
basedir: SRC_DIR,
debug: DEVELOPMENT,
extensions: opts.extensions,
fullPaths: DEVELOPMENT,
standalone: opts.standalone,
// Required by Watchify.
cache: {},
packageCache: {}
})
if (PRODUCTION) {
bundler.plugin('bundle-collapser/plugin')
} else {
bundler = require('watchify')(bundler)
}
// Append the extension if necessary.
if (!/\.js$/.test(path)) {
path += '.js'
}
path = resolvePath(SRC_DIR, path)
var stream = new (require('readable-stream'))({
objectMode: true
})
var write
function bundle () {
bundler.bundle(function onBundle (error, buffer) {
if (error) {
stream.emit('error', error)
return
}
write(new (require('vinyl'))({
base: SRC_DIR,
contents: buffer,
path: path
}))
})
}
if (PRODUCTION) {
write = function (data) {
stream.push(data)
stream.push(null)
}
} else {
stream = require('gulp-plumber')().pipe(stream)
write = function (data) {
stream.push(data)
}
bundler.on('update', bundle)
}
stream._read = function () {
this._read = function () {}
bundle()
}
return stream
}
// ===================================================================
gulp.task(function buildPages () {
return pipe(
src('index.jade'),
require('gulp-jade')(),
DEVELOPMENT && require('gulp-embedlr')({
port: LIVERELOAD_PORT
}),
dest()
)
})
gulp.task(function buildScripts () {
return pipe(
browserify('./index.js'),
PRODUCTION && require('gulp-uglify')({
mangle: false // Avoid breaking Angular deps injector.
}),
dest()
)
})
gulp.task(function buildStyles () {
return pipe(
src('index.scss'),
require('gulp-sass')(),
require('gulp-autoprefixer')([
'last 1 version',
'> 1%'
]),
PRODUCTION && require('gulp-csso')(),
dest()
)
})
gulp.task(function copyAssets () {
return pipe(
src(['assets/**/*', 'favicon.*']),
src(
'fontawesome-webfont.*',
__dirname + '/node_modules/font-awesome/fonts'
),
dest()
)
})
gulp.task('build', gulp.parallel(
'buildPages',
'buildScripts',
'buildStyles',
'copyAssets'
))
// -------------------------------------------------------------------
gulp.task(function clean (done) {
require('rimraf')(DIST_DIR, done)
})
// -------------------------------------------------------------------
gulp.task(function server (done) {
require('connect')()
.use(require('serve-static')(DIST_DIR))
.listen(SERVER_PORT, SERVER_ADDR, function onListen () {
var address = this.address()
var port = address.port
address = address.address
// Correctly handle IPv6 addresses.
if (address.indexOf(':') !== -1) {
address = '[' + address + ']'
}
/* jshint devel: true*/
console.log('Listening on http://' + address + ':' + port)
})
.on('error', done)
.on('close', function onClose () {
done()
})
})

View File

@@ -1,69 +1,111 @@
{
"devDependencies": {
"@babel/register": "^7.0.0-beta.44",
"babel-7-jest": "^21.3.2",
"babel-eslint": "^8.1.2",
"benchmark": "^2.1.4",
"eslint": "^4.14.0",
"eslint-config-standard": "^11.0.0-beta.0",
"eslint-config-standard-jsx": "^5.0.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-node": "^6.0.0",
"eslint-plugin-promise": "^3.6.0",
"eslint-plugin-react": "^7.6.1",
"eslint-plugin-standard": "^3.0.1",
"exec-promise": "^0.7.0",
"flow-bin": "^0.69.0",
"globby": "^8.0.0",
"husky": "^0.14.3",
"jest": "^22.0.4",
"lodash": "^4.17.4",
"prettier": "^1.10.2",
"promise-toolbox": "^0.9.5",
"sorted-object": "^2.0.1"
},
"engines": {
"yarn": "^1.2.1"
},
"jest": {
"collectCoverage": true,
"projects": [
"<rootDir>",
"<rootDir>/packages/xo-web"
],
"testEnvironment": "node",
"testPathIgnorePatterns": [
"/dist/",
"/xo-web/"
],
"testRegex": "\\.spec\\.js$",
"transform": {
"/@xen-orchestra/cron/.+\\.jsx?$": "babel-7-jest",
"/@xen-orchestra/fs/.+\\.jsx?$": "babel-7-jest",
"/packages/complex-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/value-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/vhd-lib/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-cli/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-server/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-vmdk-to-vhd/.+\\.jsx?$": "babel-7-jest",
"\\.jsx?$": "babel-jest"
}
},
"private": true,
"scripts": {
"build": "scripts/run-script --parallel build",
"clean": "scripts/run-script --parallel clean",
"dev": "scripts/run-script --parallel dev",
"dev-test": "jest --bail --watch \"^(?!.*\\.integ\\.spec\\.js$)\"",
"posttest": "scripts/run-script test",
"precommit": "scripts/lint-staged",
"prepare": "scripts/run-script prepare",
"pretest": "eslint --ignore-path .gitignore .",
"test": "jest \"^(?!.*\\.integ\\.spec\\.js$)\"",
"test-integration": "jest \".integ\\.spec\\.js$\""
"name": "xo-web",
"version": "5.0.0",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [
"xen",
"orchestra",
"xen-orchestra",
"web"
],
"homepage": "https://github.com/vatesfr/xo-web",
"bugs": "https://github.com/vatesfr/xo-web/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xo-web"
},
"workspaces": [
"@xen-orchestra/*",
"packages/*"
]
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"engines": {
"node": ">=0.12"
},
"devDependencies": {
"babel-eslint": "^5.0.0-beta6",
"babel-plugin-transform-runtime": "^6.4.3",
"babel-preset-es2015": "^6.3.13",
"babel-preset-react": "^6.3.13",
"babel-preset-stage-0": "^6.3.13",
"babelify": "^7.2.0",
"bootstrap": "github:twbs/bootstrap#v4-dev",
"browserify": "^13.0.0",
"browserify-plain-jade": "^0.2.2",
"bundle-collapser": "^1.2.1",
"clarify": "^1.0.5",
"connect": "^3.4.0",
"dependency-check": "^2.5.1",
"font-awesome": "^4.5.0",
"gulp": "github:gulpjs/gulp#4.0",
"gulp-autoprefixer": "^3.1.0",
"gulp-csso": "^1.0.1",
"gulp-embedlr": "^0.5.2",
"gulp-jade": "^1.1.0",
"gulp-livereload": "^3.8.1",
"gulp-plumber": "^1.0.1",
"gulp-sass": "^2.1.1",
"gulp-uglify": "^1.5.1",
"gulp-watch": "^4.3.5",
"history": "^2.0.0-rc2",
"jsonrpc-websocket-client": "0.0.1-4",
"mocha": "^2.3.4",
"must": "^0.13.1",
"nice-pipe": "^0.3.4",
"nyc": "^5.3.0",
"react": "^0.14.6",
"react-dom": "^0.14.6",
"react-intl": "^1.2.2",
"react-redux": "^4.0.6",
"react-router": "^2.0.0-rc5",
"react-router-redux": "^2.1.0",
"readable-stream": "^2.0.5",
"redux": "^3.0.5",
"redux-devtools": "^3.0.1",
"redux-router": "^1.0.0-beta7",
"redux-thunk": "^1.0.3",
"serve-static": "^1.10.2",
"source-map-support": "^0.4.0",
"standard": "^5.4.1",
"trace": "^2.0.2",
"vinyl": "^1.1.1",
"watchify": "^3.7.0",
"xo-lib": "^0.8.0-1"
},
"scripts": {
"build": "gulp build --production",
"dev": "gulp build server",
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
"lint": "standard",
"depcheck": "dependency-check ./package.json",
"posttest": "npm run lint && npm run depcheck",
"prepublish": "npm run build",
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
},
"browser": {
"node_modules/ws/index.js": "./ws.js"
},
"browserify": {
"transform": [
"babelify",
"browserify-plain-jade"
]
},
"standard": {
"ignore": [
"dist"
],
"parser": "babel-eslint"
},
"dependencies": {
"babel-runtime": "^6.3.19",
"redux-promise": "^0.5.1"
}
}

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,24 +0,0 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -1,66 +0,0 @@
# complex-matcher [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> ${pkg.description}
## Install
Installation of the [npm package](https://npmjs.org/package/complex-matcher):
```
> npm install --save complex-matcher
```
## Usage
```js
import * as CM from 'complex-matcher'
const characters = [
{ name: 'Catwoman', costumeColor: 'black' },
{ name: 'Superman', costumeColor: 'blue', hasCape: true },
{ name: 'Wonder Woman', costumeColor: 'blue' },
]
const predicate = CM.parse('costumeColor:blue hasCape?').createPredicate()
characters.filter(predicate)
// [
// { name: 'Superman', costumeColor: 'blue', hasCape: true },
// ]
new CM.String('foo').createPredicate()
```
## Development
```
# Install dependencies
> yarn
# Run the tests
> yarn test
# Continuously compile
> yarn dev
# Continuously run the tests
> yarn dev-test
# Build for production (automatically called by npm install)
> yarn build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -1,48 +0,0 @@
{
"name": "complex-matcher",
"version": "0.3.0",
"license": "ISC",
"description": "",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/complex-matcher",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@isonoe.net"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"browserslist": [
">2%"
],
"engines": {
"node": ">=4"
},
"dependencies": {
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.1",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
}
}

View File

@@ -1,12 +0,0 @@
import { parse } from './'
import { ast, pattern } from './index.fixtures'
export default ({ benchmark }) => {
benchmark('parse', () => {
parse(pattern)
})
benchmark('toString', () => {
ast.toString()
})
}

View File

@@ -1,15 +0,0 @@
import * as CM from './'
export const pattern =
'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32'
export const ast = new CM.And([
new CM.String('foo'),
new CM.Not(new CM.String('\\ "')),
new CM.Property(
'name',
new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])
),
new CM.TruthyProperty('hasCape'),
new CM.Property('age', new CM.Number(32)),
])

View File

@@ -1,531 +0,0 @@
import { isPlainObject, some } from 'lodash'
// ===================================================================
const RAW_STRING_CHARS = (() => {
const chars = { __proto__: null }
const add = (a, b = a) => {
let i = a.charCodeAt(0)
const j = b.charCodeAt(0)
while (i <= j) {
chars[String.fromCharCode(i++)] = true
}
}
add('$')
add('-')
add('.')
add('0', '9')
add('_')
add('A', 'Z')
add('a', 'z')
return chars
})()
const isRawString = string => {
const { length } = string
for (let i = 0; i < length; ++i) {
if (!(string[i] in RAW_STRING_CHARS)) {
return false
}
}
return true
}
// -------------------------------------------------------------------
class Node {
createPredicate () {
return value => this.match(value)
}
}
export class Null extends Node {
match () {
return true
}
toString () {
return ''
}
}
const formatTerms = terms => terms.map(term => term.toString(true)).join(' ')
export class And extends Node {
constructor (children) {
super()
if (children.length === 1) {
return children[0]
}
this.children = children
}
match (value) {
return this.children.every(child => child.match(value))
}
toString (isNested) {
const terms = formatTerms(this.children)
return isNested ? `(${terms})` : terms
}
}
export class Comparison extends Node {
constructor (operator, value) {
super()
this._comparator = Comparison.comparators[operator]
this._operator = operator
this._value = value
}
match (value) {
return typeof value === 'number' && this._comparator(value, this._value)
}
toString () {
return this._operator + String(this._value)
}
}
Comparison.comparators = {
'>': (a, b) => a > b,
'>=': (a, b) => a >= b,
'<': (a, b) => a < b,
'<=': (a, b) => a <= b,
}
export class Or extends Node {
constructor (children) {
super()
if (children.length === 1) {
return children[0]
}
this.children = children
}
match (value) {
return this.children.some(child => child.match(value))
}
toString () {
return `|(${formatTerms(this.children)})`
}
}
export class Not extends Node {
constructor (child) {
super()
this.child = child
}
match (value) {
return !this.child.match(value)
}
toString () {
return '!' + this.child.toString(true)
}
}
export class NumberNode extends Node {
constructor (value) {
super()
this.value = value
// should not be enumerable for the tests
Object.defineProperty(this, 'match', {
value: this.match.bind(this),
})
}
match (value) {
return (
value === this.value ||
(value !== null && typeof value === 'object' && some(value, this.match))
)
}
toString () {
return String(this.value)
}
}
export { NumberNode as Number }
export class Property extends Node {
constructor (name, child) {
super()
this.name = name
this.child = child
}
match (value) {
return value != null && this.child.match(value[this.name])
}
toString () {
return `${formatString(this.name)}:${this.child.toString(true)}`
}
}
const escapeChar = char => '\\' + char
const formatString = value =>
Number.isNaN(+value)
? isRawString(value) ? value : `"${value.replace(/\\|"/g, escapeChar)}"`
: `"${value}"`
export class StringNode extends Node {
constructor (value) {
super()
this.lcValue = value.toLowerCase()
this.value = value
// should not be enumerable for the tests
Object.defineProperty(this, 'match', {
value: this.match.bind(this),
})
}
match (value) {
if (typeof value === 'string') {
return value.toLowerCase().indexOf(this.lcValue) !== -1
}
if (Array.isArray(value) || isPlainObject(value)) {
return some(value, this.match)
}
return false
}
toString () {
return formatString(this.value)
}
}
export { StringNode as String }
export class TruthyProperty extends Node {
constructor (name) {
super()
this.name = name
}
match (value) {
return value != null && !!value[this.name]
}
toString () {
return formatString(this.name) + '?'
}
}
// -------------------------------------------------------------------
// https://gist.github.com/yelouafi/556e5159e869952335e01f6b473c4ec1
class Failure {
constructor (pos, expected) {
this.expected = expected
this.pos = pos
}
get value () {
throw new Error(
`parse error: expected ${this.expected} at position ${this.pos}`
)
}
}
class Success {
constructor (pos, value) {
this.pos = pos
this.value = value
}
}
// -------------------------------------------------------------------
class P {
static alt (...parsers) {
const { length } = parsers
return new P((input, pos, end) => {
for (let i = 0; i < length; ++i) {
const result = parsers[i]._parse(input, pos, end)
if (result instanceof Success) {
return result
}
}
return new Failure(pos, 'alt')
})
}
static grammar (rules) {
const grammar = {}
Object.keys(rules).forEach(k => {
const rule = rules[k]
grammar[k] = rule instanceof P ? rule : P.lazy(rule, grammar)
})
return grammar
}
static lazy (parserCreator, arg) {
const parser = new P((input, pos, end) =>
(parser._parse = parserCreator(arg)._parse)(input, pos, end)
)
return parser
}
static regex (regex) {
regex = new RegExp(regex.source, 'y')
return new P((input, pos) => {
regex.lastIndex = pos
const matches = regex.exec(input)
return matches !== null
? new Success(regex.lastIndex, matches[0])
: new Failure(pos, regex)
})
}
static seq (...parsers) {
const { length } = parsers
return new P((input, pos, end) => {
const values = new Array(length)
for (let i = 0; i < length; ++i) {
const result = parsers[i]._parse(input, pos, end)
if (result instanceof Failure) {
return result
}
pos = result.pos
values[i] = result.value
}
return new Success(pos, values)
})
}
static text (text) {
const { length } = text
return new P(
(input, pos) =>
input.startsWith(text, pos)
? new Success(pos + length, text)
: new Failure(pos, `'${text}'`)
)
}
constructor (parse) {
this._parse = parse
}
map (fn) {
return new P((input, pos, end) => {
const result = this._parse(input, pos, end)
if (result instanceof Success) {
result.value = fn(result.value)
}
return result
})
}
parse (input, pos = 0, end = input.length) {
return this._parse(input, pos, end).value
}
repeat (min = 0, max = Infinity) {
return new P((input, pos, end) => {
const value = []
let result
let i = 0
while (i < min) {
++i
result = this._parse(input, pos, end)
if (result instanceof Failure) {
return result
}
value.push(result.value)
pos = result.pos
}
while (
i < max &&
(result = this._parse(input, pos, end)) instanceof Success
) {
++i
value.push(result.value)
pos = result.pos
}
return new Success(pos, value)
})
}
skip (otherParser) {
return new P((input, pos, end) => {
const result = this._parse(input, pos, end)
if (result instanceof Failure) {
return result
}
const otherResult = otherParser._parse(input, result.pos, end)
if (otherResult instanceof Failure) {
return otherResult
}
result.pos = otherResult.pos
return result
})
}
}
P.eof = new P(
(input, pos, end) =>
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
)
// -------------------------------------------------------------------
const parser = P.grammar({
default: r =>
P.seq(r.ws, r.term.repeat(), P.eof).map(
([, terms]) => (terms.length === 0 ? new Null() : new And(terms))
),
quotedString: new P((input, pos, end) => {
if (input[pos] !== '"') {
return new Failure(pos, '"')
}
++pos
const value = []
let char
while (pos < end && (char = input[pos++]) !== '"') {
if (char === '\\') {
char = input[pos++]
}
value.push(char)
}
return new Success(pos, value.join(''))
}),
rawString: new P((input, pos, end) => {
let value = ''
let c
while (pos < end && RAW_STRING_CHARS[(c = input[pos])]) {
++pos
value += c
}
return value.length === 0
? new Failure(pos, 'a raw string')
: new Success(pos, value)
}),
string: r => P.alt(r.quotedString, r.rawString),
term: r =>
P.alt(
P.seq(P.text('('), r.ws, r.term.repeat(1), P.text(')')).map(
_ => new And(_[2])
),
P.seq(
P.text('|'),
r.ws,
P.text('('),
r.ws,
r.term.repeat(1),
P.text(')')
).map(_ => new Or(_[4])),
P.seq(P.text('!'), r.ws, r.term).map(_ => new Not(_[2])),
P.seq(P.regex(/[<>]=?/), r.rawString).map(([op, val]) => {
val = +val
if (Number.isNaN(val)) {
throw new TypeError('value must be a number')
}
return new Comparison(op, val)
}),
P.seq(r.string, r.ws, P.text(':'), r.ws, r.term).map(
_ => new Property(_[0], _[4])
),
P.seq(r.string, P.text('?')).map(_ => new TruthyProperty(_[0])),
P.alt(
r.quotedString.map(_ => new StringNode(_)),
r.rawString.map(str => {
const asNum = +str
return Number.isNaN(asNum)
? new StringNode(str)
: new NumberNode(asNum)
})
)
).skip(r.ws),
ws: P.regex(/\s*/),
}).default
export const parse = parser.parse.bind(parser)
// -------------------------------------------------------------------
const _getPropertyClauseStrings = ({ child }) => {
if (child instanceof Or) {
const strings = []
child.children.forEach(child => {
if (child instanceof StringNode) {
strings.push(child.value)
}
})
return strings
}
if (child instanceof StringNode) {
return [child.value]
}
return []
}
// Find possible values for property clauses in a and clause.
export const getPropertyClausesStrings = node => {
if (!node) {
return {}
}
if (node instanceof Property) {
return {
[node.name]: _getPropertyClauseStrings(node),
}
}
if (node instanceof And) {
const strings = {}
node.children.forEach(node => {
if (node instanceof Property) {
const { name } = node
const values = strings[name]
if (values) {
values.push.apply(values, _getPropertyClauseStrings(node))
} else {
strings[name] = _getPropertyClauseStrings(node)
}
}
})
return strings
}
return {}
}
// -------------------------------------------------------------------
export const setPropertyClause = (node, name, child) => {
const property =
child &&
new Property(
name,
typeof child === 'string' ? new StringNode(child) : child
)
if (node === undefined) {
return property
}
const children = (node instanceof And ? node.children : [node]).filter(
child => !(child instanceof Property && child.name === name)
)
if (property !== undefined) {
children.push(property)
}
return new And(children)
}

View File

@@ -1,86 +0,0 @@
/* eslint-env jest */
import { ast, pattern } from './index.fixtures'
import {
getPropertyClausesStrings,
Null,
NumberNode,
parse,
setPropertyClause,
} from './'
it('getPropertyClausesStrings', () => {
const tmp = getPropertyClausesStrings(parse('foo bar:baz baz:|(foo bar)'))
expect(tmp).toEqual({
bar: ['baz'],
baz: ['foo', 'bar'],
})
})
describe('parse', () => {
it('analyses a string and returns a node/tree', () => {
expect(parse(pattern)).toEqual(ast)
})
it('supports an empty string', () => {
expect(parse('')).toEqual(new Null())
})
it('differentiate between numbers and numbers in strings', () => {
let node
node = parse('32')
expect(node.match(32)).toBe(true)
expect(node.match('32')).toBe(false)
expect(node.toString()).toBe('32')
node = parse('"32"')
expect(node.match(32)).toBe(false)
expect(node.match('32')).toBe(true)
expect(node.toString()).toBe('"32"')
})
})
describe('Number', () => {
it('match a number recursively', () => {
expect(new NumberNode(3).match([{ foo: 3 }])).toBe(true)
})
})
describe('setPropertyClause', () => {
it('creates a node if none passed', () => {
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
'foo:bar'
)
})
it('adds a property clause if there was none', () => {
expect(setPropertyClause(parse('baz'), 'foo', 'bar').toString()).toBe(
'baz foo:bar'
)
})
it('replaces the property clause if there was one', () => {
expect(
setPropertyClause(parse('plip foo:baz plop'), 'foo', 'bar').toString()
).toBe('plip plop foo:bar')
expect(
setPropertyClause(parse('foo:|(baz plop)'), 'foo', 'bar').toString()
).toBe('foo:bar')
})
it('removes the property clause if no chid is passed', () => {
expect(
setPropertyClause(parse('foo bar:baz qux'), 'bar', undefined).toString()
).toBe('foo qux')
expect(
setPropertyClause(parse('foo bar:baz qux'), 'baz', undefined).toString()
).toBe('foo bar:baz qux')
})
})
it('toString', () => {
expect(pattern).toBe(ast.toString())
})

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,24 +0,0 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -1,66 +0,0 @@
# value-matcher [![Build Status](https://travis-ci.org/vatefr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatefr/xen-orchestra)
> ${pkg.description}
## Install
Installation of the [npm package](https://npmjs.org/package/value-matcher):
```
> npm install --save value-matcher
```
## Usage
```js
import { createPredicate } from 'value-matcher'
[
{ user: 'sam', age: 65, active: false },
{ user: 'barney', age: 36, active: true },
{ user: 'fred', age: 40, active: false },
].filter(createPredicate({
__or: [
{ user: 'sam' },
{ active: true },
],
}))
// [
// { user: 'sam', age: 65, active: false },
// { user: 'barney', age: 36, active: true },
// ]
```
## Development
```
# Install dependencies
> yarn
# Run the tests
> yarn test
# Continuously compile
> yarn dev
# Continuously run the tests
> yarn dev-test
# Build for production (automatically called by npm install)
> yarn build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -1,47 +0,0 @@
{
"name": "value-matcher",
"version": "0.2.0",
"license": "ISC",
"description": "",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/value-matcher",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@isonoe.net"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"browserslist": [
">2%"
],
"engines": {
"node": ">=4"
},
"dependencies": {},
"devDependencies": {
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepare": "yarn run build",
"prepublishOnly": "yarn run build"
}
}

View File

@@ -1,81 +0,0 @@
// @flow
/* eslint-disable no-use-before-define */
export type Pattern =
| AndPattern
| OrPattern
| NotPattern
| ObjectPattern
| ArrayPattern
| ValuePattern
/* eslint-enable no-use-before-define */
// all patterns must match
type AndPattern = {| __and: Array<Pattern> |}
// one of the pattern must match
type OrPattern = {| __or: Array<Pattern> |}
// the pattern must not match
type NotPattern = {| __not: Pattern |}
// value is an object with properties matching the patterns
type ObjectPattern = { [string]: Pattern }
// value is an array and each patterns must match a different item
type ArrayPattern = Array<Pattern>
// value equals the pattern
type ValuePattern = boolean | number | string
const match = (pattern: Pattern, value: any) => {
if (Array.isArray(pattern)) {
return (
Array.isArray(value) &&
pattern.every((subpattern, i) =>
// FIXME: subpatterns should match different subvalues
value.some(subvalue => match(subpattern, subvalue))
)
)
}
if (pattern !== null && typeof pattern === 'object') {
const keys = Object.keys(pattern)
const { length } = keys
if (length === 1) {
const [key] = keys
if (key === '__and') {
const andPattern: AndPattern = (pattern: any)
return andPattern.__and.every(subpattern => match(subpattern, value))
}
if (key === '__or') {
const orPattern: OrPattern = (pattern: any)
return orPattern.__or.some(subpattern => match(subpattern, value))
}
if (key === '__not') {
const notPattern: NotPattern = (pattern: any)
return !match(notPattern.__not, value)
}
}
if (value === null || typeof value !== 'object') {
return false
}
const objectPattern: ObjectPattern = (pattern: any)
for (let i = 0; i < length; ++i) {
const key = keys[i]
const subvalue = value[key]
if (subvalue === undefined || !match(objectPattern[key], subvalue)) {
return false
}
}
return true
}
return pattern === value
}
export const createPredicate = (pattern: Pattern) => (value: any) =>
match(pattern, value)

View File

@@ -1,24 +0,0 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -1,51 +0,0 @@
# vhd-cli [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> ${pkg.description}
## Install
Installation of the [npm package](https://npmjs.org/package/vhd-cli):
```
> npm install --global vhd-cli
```
## Usage
```
> vhd-cli <VHD file>
```
## Development
```
# Install dependencies
> npm install
# Run the tests
> npm test
# Continuously compile
> npm run dev
# Continuously run the tests
> npm run dev-test
# Build for production (automatically called by npm install)
> npm run build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -1,72 +0,0 @@
{
"name": "vhd-cli",
"version": "0.0.1",
"license": "ISC",
"description": "",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/vhd-cli",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@isonoe.net"
},
"preferGlobal": false,
"main": "dist/",
"bin": {
"vhd-cli": "dist/index.js"
},
"files": [
"dist/"
],
"engines": {
"node": ">=4"
},
"dependencies": {
"@xen-orchestra/fs": "^0.0.0",
"babel-runtime": "^6.22.0",
"exec-promise": "^0.7.0",
"struct-fu": "^1.2.0",
"vhd-lib": "^0.0.0"
},
"devDependencies": {
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.5.2",
"babel-preset-stage-3": "^6.24.1",
"cross-env": "^5.1.3",
"execa": "^0.10.0",
"index-modules": "^0.3.0",
"promise-toolbox": "^0.9.5",
"rimraf": "^2.6.1",
"tmp": "^0.0.33"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "rimraf dist/ && index-modules --cjs-lazy src/commands",
"predev": "yarn run prebuild",
"prepare": "yarn run build"
},
"babel": {
"plugins": [
"lodash",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-3"
]
}
}

View File

@@ -1,15 +0,0 @@
import Vhd from 'vhd-lib'
import { getHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
export default async args => {
const handler = getHandler({ url: 'file:///' })
for (const vhd of args) {
try {
await new Vhd(handler, resolve(vhd)).readHeaderAndFooter()
console.log('ok:', vhd)
} catch (error) {
console.error('nok:', vhd, error)
}
}
}

View File

@@ -1,12 +0,0 @@
import Vhd from 'vhd-lib'
import { getHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
export default async args => {
const vhd = new Vhd(getHandler({ url: 'file:///' }), resolve(args[0]))
await vhd.readHeaderAndFooter()
console.log(vhd.header)
console.log(vhd.footer)
}

View File

@@ -1,21 +0,0 @@
import path from 'path'
import { createSyntheticStream } from 'vhd-lib'
import { createWriteStream } from 'fs'
import { getHandler } from '@xen-orchestra/fs'
export default async function main (args) {
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: ${this.command} <input VHD> <output VHD>`
}
const handler = getHandler({ url: 'file:///' })
return new Promise((resolve, reject) => {
createSyntheticStream(handler, path.resolve(args[0]))
.on('error', reject)
.pipe(
createWriteStream(args[1])
.on('error', reject)
.on('finish', resolve)
)
})
}

View File

@@ -1,44 +0,0 @@
#!/usr/bin/env node
import execPromise from 'exec-promise'
import commands from './commands'
function runCommand (commands, [command, ...args]) {
if (command === undefined || command === '-h' || command === '--help') {
command = 'help'
}
const fn = commands[command]
if (fn === undefined) {
if (command === 'help') {
return `Usage:
${Object.keys(commands)
.filter(command => command !== 'help')
.map(command => ` ${this.command} ${command}`)
.join('\n\n')}`
}
throw `invalid command ${command}` // eslint-disable-line no-throw-literal
}
return fn.call(
{
__proto__: this,
command: `${this.command} ${command}`,
},
args
)
}
execPromise(
runCommand.bind(
{
command: 'vhd-cli',
runCommand,
},
commands
)
)

View File

@@ -1,28 +0,0 @@
/* eslint-env jest */
import execa from 'execa'
import rimraf from 'rimraf'
import tmp from 'tmp'
import { fromCallback as pFromCallback } from 'promise-toolbox'
import command from './commands/info'
const initialDir = process.cwd()
jest.setTimeout(10000)
beforeEach(async () => {
const dir = await pFromCallback(cb => tmp.dir(cb))
process.chdir(dir)
})
afterEach(async () => {
const tmpDir = process.cwd()
process.chdir(initialDir)
await pFromCallback(cb => rimraf(tmpDir, cb))
})
test('can run the command', async () => {
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', '1G'])
await command(['empty.vhd'])
})

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,56 +0,0 @@
{
"name": "vhd-lib",
"version": "0.0.0",
"license": "AGPL-3.0",
"description": "Primitives for VHD file handling",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/vhd-lib",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"preferGlobal": true,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"engines": {
"node": ">=6"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.44",
"@xen-orchestra/fs": "^0.0.0",
"async-iterator-to-stream": "^1.0.2",
"execa": "^0.10.0",
"from2": "^2.3.0",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"limit-concurrency-decorator": "^0.4.0",
"promise-toolbox": "^0.9.5",
"struct-fu": "^1.2.0",
"uuid": "^3.0.1",
"tmp": "^0.0.33"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-transform-runtime": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"fs-promise": "^2.0.0",
"index-modules": "^0.3.0",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run clean",
"prepare": "yarn run build"
}
}

View File

@@ -1,7 +0,0 @@
const MASK = 0x80
export const set = (map, bit) => {
map[bit >> 3] |= MASK >> (bit & 7)
}
export const test = (map, bit) => ((map[bit >> 3] << (bit & 7)) & MASK) !== 0

View File

@@ -1,37 +0,0 @@
import { SECTOR_SIZE } from './_constants'
export default function computeGeometryForSize (size) {
const totalSectors = Math.ceil(size / 512)
let sectorsPerTrackCylinder
let heads
let cylinderTimesHeads
if (totalSectors > 65535 * 16 * 255) {
throw Error('disk is too big')
}
// straight copypasta from the file spec appendix on CHS Calculation
if (totalSectors >= 65535 * 16 * 63) {
sectorsPerTrackCylinder = 255
heads = 16
cylinderTimesHeads = totalSectors / sectorsPerTrackCylinder
} else {
sectorsPerTrackCylinder = 17
cylinderTimesHeads = totalSectors / sectorsPerTrackCylinder
heads = Math.floor((cylinderTimesHeads + 1023) / 1024)
if (heads < 4) {
heads = 4
}
if (cylinderTimesHeads >= heads * 1024 || heads > 16) {
sectorsPerTrackCylinder = 31
heads = 16
cylinderTimesHeads = totalSectors / sectorsPerTrackCylinder
}
if (cylinderTimesHeads >= heads * 1024) {
sectorsPerTrackCylinder = 63
heads = 16
cylinderTimesHeads = totalSectors / sectorsPerTrackCylinder
}
}
const cylinders = Math.ceil(cylinderTimesHeads / heads)
const actualSize = cylinders * heads * sectorsPerTrackCylinder * SECTOR_SIZE
return { cylinders, heads, sectorsPerTrackCylinder, actualSize }
}

View File

@@ -1,42 +0,0 @@
import from2 from 'from2'
const constantStream = (data, n = 1) => {
if (!Buffer.isBuffer(data)) {
data = Buffer.from(data)
}
const { length } = data
if (!length) {
throw new Error('data should not be empty')
}
n *= length
let currentLength = length
return from2((size, next) => {
if (n <= 0) {
return next(null, null)
}
if (n < size) {
size = n
}
if (size < currentLength) {
const m = Math.floor(size / length) * length || length
n -= m
return next(null, data.slice(0, m))
}
// if more than twice the data length is requested, repeat the data
if (size > currentLength * 2) {
currentLength = Math.floor(size / length) * length
data = Buffer.alloc(currentLength, data)
}
n -= currentLength
return next(null, data)
})
}
export { constantStream as default }

View File

@@ -1,30 +0,0 @@
export const BLOCK_UNUSED = 0xffffffff
// This lib has been extracted from the Xen Orchestra project.
export const CREATOR_APPLICATION = 'xo '
// Sizes in bytes.
export const FOOTER_SIZE = 512
export const HEADER_SIZE = 1024
export const SECTOR_SIZE = 512
export const DEFAULT_BLOCK_SIZE = 0x00200000 // from the spec
export const FOOTER_COOKIE = 'conectix'
export const HEADER_COOKIE = 'cxsparse'
export const DISK_TYPE_FIXED = 2
export const DISK_TYPE_DYNAMIC = 3
export const DISK_TYPE_DIFFERENCING = 4
export const PARENT_LOCATOR_ENTRIES = 8
export const PLATFORM_NONE = 0
export const PLATFORM_WI2R = 0x57693272
export const PLATFORM_WI2K = 0x5769326b
export const PLATFORM_W2RU = 0x57327275
export const PLATFORM_W2KU = 0x57326b75
export const PLATFORM_MAC = 0x4d616320
export const PLATFORM_MACX = 0x4d616358
export const FILE_FORMAT_VERSION = 1 << 16
export const HEADER_VERSION = 1 << 16

View File

@@ -1,56 +0,0 @@
import { v4 as generateUuid } from 'uuid'
import { checksumStruct, fuFooter, fuHeader } from './_structs'
import {
CREATOR_APPLICATION,
DEFAULT_BLOCK_SIZE as VHD_BLOCK_SIZE_BYTES,
DISK_TYPE_FIXED,
FILE_FORMAT_VERSION,
FOOTER_COOKIE,
FOOTER_SIZE,
HEADER_COOKIE,
HEADER_SIZE,
HEADER_VERSION,
PLATFORM_WI2K,
} from './_constants'
export function createFooter (
size,
timestamp,
geometry,
dataOffset,
diskType = DISK_TYPE_FIXED
) {
const footer = fuFooter.pack({
cookie: FOOTER_COOKIE,
features: 2,
fileFormatVersion: FILE_FORMAT_VERSION,
dataOffset,
timestamp,
creatorApplication: CREATOR_APPLICATION,
creatorHostOs: PLATFORM_WI2K, // it looks like everybody is using Wi2k
originalSize: size,
currentSize: size,
diskGeometry: geometry,
diskType,
uuid: generateUuid(null, []),
})
checksumStruct(footer, fuFooter)
return footer
}
export function createHeader (
maxTableEntries,
tableOffset = HEADER_SIZE + FOOTER_SIZE,
blockSize = VHD_BLOCK_SIZE_BYTES
) {
const header = fuHeader.pack({
cookie: HEADER_COOKIE,
tableOffset,
headerVersion: HEADER_VERSION,
maxTableEntries,
blockSize,
})
checksumStruct(header, fuHeader)
return header
}

View File

@@ -1,121 +0,0 @@
import assert from 'assert'
import fu from 'struct-fu'
import { FOOTER_SIZE, HEADER_SIZE, PARENT_LOCATOR_ENTRIES } from './_constants'
const SIZE_OF_32_BITS = Math.pow(2, 32)
const uint64 = fu.derive(
fu.uint32(2),
number => [Math.floor(number / SIZE_OF_32_BITS), number % SIZE_OF_32_BITS],
_ => _[0] * SIZE_OF_32_BITS + _[1]
)
const uint64Undefinable = fu.derive(
fu.uint32(2),
number =>
number === undefined
? [0xffffffff, 0xffffffff]
: [Math.floor(number / SIZE_OF_32_BITS), number % SIZE_OF_32_BITS],
_ =>
_[0] === 0xffffffff && _[1] === 0xffffffff
? undefined
: _[0] * SIZE_OF_32_BITS + _[1]
)
export const fuFooter = fu.struct([
fu.char('cookie', 8), // 0
fu.uint32('features'), // 8
fu.uint32('fileFormatVersion'), // 12
uint64Undefinable('dataOffset'), // offset of the header
fu.uint32('timestamp'), // 24
fu.char('creatorApplication', 4), // 28
fu.uint32('creatorVersion'), // 32
fu.uint32('creatorHostOs'), // 36
uint64('originalSize'),
uint64('currentSize'),
fu.struct('diskGeometry', [
fu.uint16('cylinders'), // 56
fu.uint8('heads'), // 58
fu.uint8('sectorsPerTrackCylinder'), // 59
]),
fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING.
fu.uint32('checksum'), // 64
fu.uint8('uuid', 16), // 68
fu.char('saved'), // 84
fu.char('hidden'), // 85 TODO: should probably be merged in reserved
fu.char('reserved', 426), // 86
])
assert.strictEqual(fuFooter.size, FOOTER_SIZE)
export const fuHeader = fu.struct([
fu.char('cookie', 8),
uint64Undefinable('dataOffset'),
uint64('tableOffset'),
fu.uint32('headerVersion'),
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
fu.uint32('checksum'),
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char16be('parentUnicodeName', 512),
fu.struct(
'parentLocatorEntry',
[
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
fu.uint32('platformDataLength'),
fu.uint32('reserved'),
uint64('platformDataOffset'), // Absolute byte offset of the locator data.
],
PARENT_LOCATOR_ENTRIES
),
fu.char('reserved2', 256),
])
assert.strictEqual(fuHeader.size, HEADER_SIZE)
export const packField = (field, value, buf) => {
const { offset } = field
field.pack(
value,
buf,
typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset
)
}
export const unpackField = (field, buf) => {
const { offset } = field
return field.unpack(
buf,
typeof offset !== 'object' ? { bytes: offset, bits: 0 } : offset
)
}
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
export function checksumStruct (buf, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Do not use the stored checksum to compute the new checksum.
const checksumOffset = checksumField.offset
for (let i = 0, n = checksumOffset; i < n; ++i) {
sum += buf[i]
}
for (
let i = checksumOffset + checksumField.size, n = struct.size;
i < n;
++i
) {
sum += buf[i]
}
sum = ~sum >>> 0
// Write new sum.
packField(checksumField, sum, buf)
return sum
}

View File

@@ -1,37 +0,0 @@
import { dirname, relative } from 'path'
import Vhd from './vhd'
import { DISK_TYPE_DIFFERENCING } from './_constants'
export default async function chain (
parentHandler,
parentPath,
childHandler,
childPath,
force = false
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await childVhd.readHeaderAndFooter()
const { header, footer } = childVhd
if (footer.diskType !== DISK_TYPE_DIFFERENCING) {
if (!force) {
throw new Error('cannot chain disk of type ' + footer.diskType)
}
footer.diskType = DISK_TYPE_DIFFERENCING
}
await Promise.all([
childVhd.readBlockAllocationTable(),
parentVhd.readHeaderAndFooter(),
])
const parentName = relative(dirname(childPath), parentPath)
header.parentUuid = parentVhd.footer.uuid
header.parentUnicodeName = parentName
await childVhd.setUniqueParentLocator(parentName)
await childVhd.writeHeader()
await childVhd.writeFooter()
}

View File

@@ -1,42 +0,0 @@
import asyncIteratorToStream from 'async-iterator-to-stream'
import computeGeometryForSize from './_computeGeometryForSize'
import { createFooter } from './_createFooterHeader'
export default asyncIteratorToStream(async function * (size, blockParser) {
const geometry = computeGeometryForSize(size)
const actualSize = geometry.actualSize
const footer = createFooter(
actualSize,
Math.floor(Date.now() / 1000),
geometry
)
let position = 0
function * filePadding (paddingLength) {
if (paddingLength > 0) {
const chunkSize = 1024 * 1024 // 1Mo
for (
let paddingPosition = 0;
paddingPosition + chunkSize < paddingLength;
paddingPosition += chunkSize
) {
yield Buffer.alloc(chunkSize)
}
yield Buffer.alloc(paddingLength % chunkSize)
}
}
let next
while ((next = await blockParser.next()) !== null) {
const paddingLength = next.offsetBytes - position
if (paddingLength < 0) {
throw new Error('Received out of order blocks')
}
yield * filePadding(paddingLength)
yield next.data
position = next.offsetBytes + next.data.length
}
yield * filePadding(actualSize - position)
yield footer
})

View File

@@ -1,143 +0,0 @@
import asyncIteratorToStream from 'async-iterator-to-stream'
import computeGeometryForSize from './_computeGeometryForSize'
import { createFooter, createHeader } from './_createFooterHeader'
import {
BLOCK_UNUSED,
DEFAULT_BLOCK_SIZE as VHD_BLOCK_SIZE_BYTES,
DISK_TYPE_DYNAMIC,
FOOTER_SIZE,
HEADER_SIZE,
SECTOR_SIZE,
} from './_constants'
import { set as setBitmap } from './_bitmap'
const VHD_BLOCK_SIZE_SECTORS = VHD_BLOCK_SIZE_BYTES / SECTOR_SIZE
/**
* @returns {Array} an array of occupation bitmap, each bit mapping an input block size of bytes
*/
function createBAT (
firstBlockPosition,
blockAddressList,
ratio,
bat,
bitmapSize
) {
const vhdOccupationTable = []
let currentVhdPositionSector = firstBlockPosition / SECTOR_SIZE
blockAddressList.forEach(blockPosition => {
const scaled = blockPosition / VHD_BLOCK_SIZE_BYTES
const vhdTableIndex = Math.floor(scaled)
if (bat.readUInt32BE(vhdTableIndex * 4) === BLOCK_UNUSED) {
bat.writeUInt32BE(currentVhdPositionSector, vhdTableIndex * 4)
currentVhdPositionSector +=
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
}
// not using bit operators to avoid the int32 coercion, that way we can go to 53 bits
vhdOccupationTable[vhdTableIndex] =
(vhdOccupationTable[vhdTableIndex] || 0) +
Math.pow(2, (scaled % 1) * ratio)
})
return vhdOccupationTable
}
function createBitmap (bitmapSize, ratio, vhdOccupationBucket) {
const bitmap = Buffer.alloc(bitmapSize)
for (let i = 0; i < VHD_BLOCK_SIZE_SECTORS / ratio; i++) {
// do not shift to avoid int32 coercion
if ((vhdOccupationBucket * Math.pow(2, -i)) & 1) {
for (let j = 0; j < ratio; j++) {
setBitmap(bitmap, i * ratio + j)
}
}
}
return bitmap
}
function * yieldIfNotEmpty (buffer) {
if (buffer.length > 0) {
yield buffer
}
}
async function * generateFileContent (
blockIterator,
bitmapSize,
ratio,
vhdOccupationTable
) {
let currentVhdBlockIndex = -1
let currentBlockBuffer = Buffer.alloc(0)
for await (const next of blockIterator) {
const batEntry = Math.floor(next.offsetBytes / VHD_BLOCK_SIZE_BYTES)
if (batEntry !== currentVhdBlockIndex) {
yield * yieldIfNotEmpty(currentBlockBuffer)
currentBlockBuffer = Buffer.alloc(VHD_BLOCK_SIZE_BYTES)
currentVhdBlockIndex = batEntry
yield createBitmap(bitmapSize, ratio, vhdOccupationTable[batEntry])
}
next.data.copy(currentBlockBuffer, next.offsetBytes % VHD_BLOCK_SIZE_BYTES)
}
yield * yieldIfNotEmpty(currentBlockBuffer)
}
export default asyncIteratorToStream(async function * (
diskSize,
incomingBlockSize,
blockAddressList,
blockIterator
) {
const ratio = VHD_BLOCK_SIZE_BYTES / incomingBlockSize
if (ratio % 1 !== 0) {
throw new Error(
`Can't import file, grain size (${incomingBlockSize}) is not a divider of VHD block size ${VHD_BLOCK_SIZE_BYTES}`
)
}
if (ratio > 53) {
throw new Error(
`Can't import file, grain size / block size ratio is > 53 (${ratio})`
)
}
const maxTableEntries = Math.ceil(diskSize / VHD_BLOCK_SIZE_BYTES) + 1
const tablePhysicalSizeBytes = Math.ceil(maxTableEntries * 4 / 512) * 512
const batPosition = FOOTER_SIZE + HEADER_SIZE
const firstBlockPosition = batPosition + tablePhysicalSizeBytes
const geometry = computeGeometryForSize(diskSize)
const actualSize = geometry.actualSize
const footer = createFooter(
actualSize,
Math.floor(Date.now() / 1000),
geometry,
FOOTER_SIZE,
DISK_TYPE_DYNAMIC
)
const header = createHeader(
maxTableEntries,
batPosition,
VHD_BLOCK_SIZE_BYTES
)
const bitmapSize =
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
const vhdOccupationTable = createBAT(
firstBlockPosition,
blockAddressList,
ratio,
bat,
bitmapSize
)
yield footer
yield header
yield bat
yield * generateFileContent(
blockIterator,
bitmapSize,
ratio,
vhdOccupationTable
)
yield footer
})

View File

@@ -1,153 +0,0 @@
import asyncIteratorToStream from 'async-iterator-to-stream'
import { dirname, resolve } from 'path'
import Vhd from './vhd'
import {
BLOCK_UNUSED,
DISK_TYPE_DYNAMIC,
FOOTER_SIZE,
HEADER_SIZE,
SECTOR_SIZE,
} from './_constants'
import { fuFooter, fuHeader, checksumStruct } from './_structs'
import { test as mapTestBit } from './_bitmap'
const resolveRelativeFromFile = (file, path) =>
resolve('/', dirname(file), path).slice(1)
export default asyncIteratorToStream(function * (handler, path) {
const fds = []
try {
const vhds = []
while (true) {
const fd = yield handler.openFile(path, 'r')
fds.push(fd)
const vhd = new Vhd(handler, fd)
vhds.push(vhd)
yield vhd.readHeaderAndFooter()
yield vhd.readBlockAllocationTable()
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
break
}
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
const nVhds = vhds.length
// this the VHD we want to synthetize
const vhd = vhds[0]
// this is the root VHD
const rootVhd = vhds[nVhds - 1]
// data of our synthetic VHD
// TODO: set parentLocatorEntry-s in header
let header = {
...vhd.header,
tableOffset: FOOTER_SIZE + HEADER_SIZE,
parentTimestamp: rootVhd.header.parentTimestamp,
parentUnicodeName: rootVhd.header.parentUnicodeName,
parentUuid: rootVhd.header.parentUuid,
}
const bat = Buffer.allocUnsafe(vhd.batSize)
let footer = {
...vhd.footer,
dataOffset: FOOTER_SIZE,
diskType: rootVhd.footer.diskType,
}
const sectorsPerBlockData = vhd.sectorsPerBlock
const sectorsPerBlock = sectorsPerBlockData + vhd.bitmapSize / SECTOR_SIZE
const nBlocks = Math.ceil(footer.currentSize / header.blockSize)
const blocksOwner = new Array(nBlocks)
for (
let iBlock = 0,
blockOffset = Math.ceil(
(header.tableOffset + bat.length) / SECTOR_SIZE
);
iBlock < nBlocks;
++iBlock
) {
let blockSector = BLOCK_UNUSED
for (let i = 0; i < nVhds; ++i) {
if (vhds[i].containsBlock(iBlock)) {
blocksOwner[iBlock] = i
blockSector = blockOffset
blockOffset += sectorsPerBlock
break
}
}
bat.writeUInt32BE(blockSector, iBlock * 4)
}
footer = fuFooter.pack(footer)
checksumStruct(footer, fuFooter)
yield footer
header = fuHeader.pack(header)
checksumStruct(header, fuHeader)
yield header
yield bat
// TODO: for generic usage the bitmap needs to be properly computed for each block
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
const owner = blocksOwner[iBlock]
if (owner === undefined) {
continue
}
yield bitmap
const blocksByVhd = new Map()
const emitBlockSectors = function * (iVhd, i, n) {
const vhd = vhds[iVhd]
const isRootVhd = vhd === rootVhd
if (!vhd.containsBlock(iBlock)) {
if (isRootVhd) {
yield Buffer.alloc((n - i) * SECTOR_SIZE)
} else {
yield * emitBlockSectors(iVhd + 1, i, n)
}
return
}
let block = blocksByVhd.get(vhd)
if (block === undefined) {
block = yield vhd._readBlock(iBlock)
blocksByVhd.set(vhd, block)
}
const { bitmap, data } = block
if (isRootVhd) {
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
return
}
while (i < n) {
const hasData = mapTestBit(bitmap, i)
const start = i
do {
++i
} while (i < n && mapTestBit(bitmap, i) === hasData)
if (hasData) {
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
} else {
yield * emitBlockSectors(iVhd + 1, start, i)
}
}
}
yield * emitBlockSectors(owner, 0, sectorsPerBlockData)
}
yield footer
} finally {
for (let i = 0, n = fds.length; i < n; ++i) {
handler.closeFile(fds[i]).catch(error => {
console.warn('createReadStream, closeFd', i, error)
})
}
}
})

View File

@@ -1,8 +0,0 @@
export { default } from './vhd'
export { default as chainVhd } from './chain'
export { default as createReadableRawStream } from './createReadableRawStream'
export {
default as createReadableSparseStream,
} from './createReadableSparseStream'
export { default as createSyntheticStream } from './createSyntheticStream'
export { default as mergeVhd } from './merge'

View File

@@ -1,283 +0,0 @@
/* eslint-env jest */
import execa from 'execa'
import fs from 'fs-extra'
import getStream from 'get-stream'
import rimraf from 'rimraf'
import tmp from 'tmp'
import { getHandler } from '@xen-orchestra/fs'
import { randomBytes } from 'crypto'
import { fromEvent, fromCallback as pFromCallback } from 'promise-toolbox'
import chainVhd from './chain'
import createReadStream from './createSyntheticStream'
import Vhd from './vhd'
import vhdMerge from './merge'
import { SECTOR_SIZE } from './_constants'
const initialDir = process.cwd()
jest.setTimeout(60000)
beforeEach(async () => {
const dir = await pFromCallback(cb => tmp.dir(cb))
process.chdir(dir)
})
afterEach(async () => {
const tmpDir = process.cwd()
process.chdir(initialDir)
await pFromCallback(cb => rimraf(tmpDir, cb))
})
async function createRandomFile (name, sizeMb) {
await execa('bash', [
'-c',
`< /dev/urandom tr -dc "\\t\\n [:alnum:]" | head -c ${sizeMb}M >${name}`,
])
}
async function checkFile (vhdName) {
await execa('vhd-util', ['check', '-p', '-b', '-t', '-n', vhdName])
}
async function recoverRawContent (vhdName, rawName, originalSize) {
await checkFile(vhdName)
await execa('qemu-img', ['convert', '-fvpc', '-Oraw', vhdName, rawName])
if (originalSize !== undefined) {
await execa('truncate', ['-s', originalSize, rawName])
}
}
async function convertFromRawToVhd (rawName, vhdName) {
await execa('qemu-img', ['convert', '-f', 'raw', '-Ovpc', rawName, vhdName])
}
test('blocks can be moved', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'randomfile.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
await newVhd._freeFirstBlockSpace(8000000)
await recoverRawContent('randomfile.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('the BAT MSB is not used for sign', async () => {
const randomBuffer = await pFromCallback(cb => randomBytes(SECTOR_SIZE, cb))
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', '1.8T'])
const handler = getHandler({ url: 'file://' + process.cwd() })
const vhd = new Vhd(handler, 'empty.vhd')
await vhd.readHeaderAndFooter()
await vhd.readBlockAllocationTable()
// we want the bit 31 to be on, to prove it's not been used for sign
const hugeWritePositionSectors = Math.pow(2, 31) + 200
await vhd.writeData(hugeWritePositionSectors, randomBuffer)
await checkFile('empty.vhd')
// here we are moving the first sector very far in the VHD to prove the BAT doesn't use signed int32
const hugePositionBytes = hugeWritePositionSectors * SECTOR_SIZE
await vhd._freeFirstBlockSpace(hugePositionBytes)
// we recover the data manually for speed reasons.
// fs.write() with offset is way faster than qemu-img when there is a 1.5To
// hole before the block of data
const recoveredFile = await fs.open('recovered', 'w')
try {
const vhd2 = new Vhd(handler, 'empty.vhd')
await vhd2.readHeaderAndFooter()
await vhd2.readBlockAllocationTable()
for (let i = 0; i < vhd.header.maxTableEntries; i++) {
const entry = vhd._getBatEntry(i)
if (entry !== 0xffffffff) {
const block = (await vhd2._readBlock(i)).data
await fs.write(
recoveredFile,
block,
0,
block.length,
vhd2.header.blockSize * i
)
}
}
} finally {
fs.close(recoveredFile)
}
const recovered = await getStream.buffer(
await fs.createReadStream('recovered', {
start: hugePositionBytes,
end: hugePositionBytes + randomBuffer.length - 1,
})
)
expect(recovered).toEqual(randomBuffer)
})
test('writeData on empty file', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
await newVhd.writeData(0, randomData)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('writeData in 2 non-overlaping operations', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
const splitPointSectors = 2
await newVhd.writeData(0, randomData.slice(0, splitPointSectors * 512))
await newVhd.writeData(
splitPointSectors,
randomData.slice(splitPointSectors * 512)
)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('writeData in 2 overlaping operations', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
const endFirstWrite = 3
const startSecondWrite = 2
await newVhd.writeData(0, randomData.slice(0, endFirstWrite * 512))
await newVhd.writeData(
startSecondWrite,
randomData.slice(startSecondWrite * 512)
)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('BAT can be extended and blocks moved', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'randomfile.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
await newVhd.ensureBatSize(2000)
await recoverRawContent('randomfile.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('coalesce works with empty parent files', async () => {
const mbOfRandom = 2
await createRandomFile('randomfile', mbOfRandom)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
await execa('qemu-img', [
'create',
'-fvpc',
'empty.vhd',
mbOfRandom + 1 + 'M',
])
await checkFile('randomfile.vhd')
await checkFile('empty.vhd')
const handler = getHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler._getSize('randomfile')
await chainVhd(handler, 'empty.vhd', handler, 'randomfile.vhd', true)
await checkFile('randomfile.vhd')
await checkFile('empty.vhd')
await vhdMerge(handler, 'empty.vhd', handler, 'randomfile.vhd')
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('coalesce works in normal cases', async () => {
const mbOfRandom = 5
await createRandomFile('randomfile', mbOfRandom)
await createRandomFile('small_randomfile', Math.ceil(mbOfRandom / 2))
await execa('qemu-img', [
'create',
'-fvpc',
'parent.vhd',
mbOfRandom + 1 + 'M',
])
await convertFromRawToVhd('randomfile', 'child1.vhd')
const handler = getHandler({ url: 'file://' + process.cwd() })
await execa('vhd-util', ['snapshot', '-n', 'child2.vhd', '-p', 'child1.vhd'])
const vhd = new Vhd(handler, 'child2.vhd')
await vhd.readHeaderAndFooter()
await vhd.readBlockAllocationTable()
vhd.footer.creatorApplication = 'xoa'
await vhd.writeFooter()
const originalSize = await handler._getSize('randomfile')
await chainVhd(handler, 'parent.vhd', handler, 'child1.vhd', true)
await execa('vhd-util', ['check', '-t', '-n', 'child1.vhd'])
await chainVhd(handler, 'child1.vhd', handler, 'child2.vhd', true)
await execa('vhd-util', ['check', '-t', '-n', 'child2.vhd'])
const smallRandom = await fs.readFile('small_randomfile')
const newVhd = new Vhd(handler, 'child2.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockAllocationTable()
await newVhd.writeData(5, smallRandom)
await checkFile('child2.vhd')
await checkFile('child1.vhd')
await checkFile('parent.vhd')
await vhdMerge(handler, 'parent.vhd', handler, 'child1.vhd')
await checkFile('parent.vhd')
await chainVhd(handler, 'parent.vhd', handler, 'child2.vhd', true)
await checkFile('child2.vhd')
await vhdMerge(handler, 'parent.vhd', handler, 'child2.vhd')
await checkFile('parent.vhd')
await recoverRawContent(
'parent.vhd',
'recovered_from_coalescing',
originalSize
)
await execa('cp', ['randomfile', 'randomfile2'])
const fd = await fs.open('randomfile2', 'r+')
try {
await fs.write(fd, smallRandom, 0, smallRandom.length, 5 * SECTOR_SIZE)
} finally {
await fs.close(fd)
}
expect(await fs.readFile('recovered_from_coalescing')).toEqual(
await fs.readFile('randomfile2')
)
})
test('createSyntheticStream passes vhd-util check', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = getHandler({ url: 'file://' + process.cwd() })
const stream = createReadStream(handler, 'randomfile.vhd')
await fromEvent(
stream.pipe(await fs.createWriteStream('recovered.vhd')),
'finish'
)
await checkFile('recovered.vhd')
await execa('qemu-img', ['compare', 'recovered.vhd', 'randomfile'])
})

View File

@@ -1,77 +0,0 @@
// TODO: remove once completely merged in vhd.js
import assert from 'assert'
import concurrency from 'limit-concurrency-decorator'
import Vhd from './vhd'
import { DISK_TYPE_DIFFERENCING, DISK_TYPE_DYNAMIC } from './_constants'
// Merge vhd child into vhd parent.
export default concurrency(2)(async function merge (
parentHandler,
parentPath,
childHandler,
childPath
) {
const parentFd = await parentHandler.openFile(parentPath, 'r+')
try {
const parentVhd = new Vhd(parentHandler, parentFd)
const childFd = await childHandler.openFile(childPath, 'r')
try {
const childVhd = new Vhd(childHandler, childFd)
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
const parentDiskType = parentVhd.footer.diskType
assert(
parentDiskType === DISK_TYPE_DIFFERENCING ||
parentDiskType === DISK_TYPE_DYNAMIC
)
assert.strictEqual(childVhd.footer.diskType, DISK_TYPE_DIFFERENCING)
// Read allocation table of child/parent.
await Promise.all([
parentVhd.readBlockAllocationTable(),
childVhd.readBlockAllocationTable(),
])
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
let mergedDataSize = 0
for (
let blockId = 0;
blockId < childVhd.header.maxTableEntries;
blockId++
) {
if (childVhd.containsBlock(blockId)) {
mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
}
}
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = cFooter.currentSize
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = cFooter.originalSize
pFooter.timestamp = cFooter.timestamp
pFooter.uuid = cFooter.uuid
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
return mergedDataSize
} finally {
await childHandler.closeFile(childFd)
}
} finally {
await parentHandler.closeFile(parentFd)
}
})

View File

@@ -1,134 +0,0 @@
/* eslint-env jest */
import execa from 'execa'
import rimraf from 'rimraf'
import tmp from 'tmp'
import { createWriteStream, readFile } from 'fs-promise'
import { fromCallback as pFromCallback, fromEvent } from 'promise-toolbox'
import { createFooter } from './_createFooterHeader'
import createReadableRawVHDStream from './createReadableRawStream'
import createReadableSparseVHDStream from './createReadableSparseStream'
const initialDir = process.cwd()
beforeEach(async () => {
const dir = await pFromCallback(cb => tmp.dir(cb))
process.chdir(dir)
})
afterEach(async () => {
const tmpDir = process.cwd()
process.chdir(initialDir)
await pFromCallback(cb => rimraf(tmpDir, cb))
})
test('createFooter() does not crash', () => {
createFooter(104448, Math.floor(Date.now() / 1000), {
cylinders: 3,
heads: 4,
sectorsPerTrack: 17,
})
})
test('ReadableRawVHDStream does not crash', async () => {
const data = [
{
offsetBytes: 100,
data: Buffer.from('azerzaerazeraze', 'ascii'),
},
{
offsetBytes: 700,
data: Buffer.from('gdfslkdfguer', 'ascii'),
},
]
let index = 0
const mockParser = {
next: () => {
if (index < data.length) {
const result = data[index]
index++
return result
} else {
return null
}
},
}
const fileSize = 1000
const stream = createReadableRawVHDStream(fileSize, mockParser)
const pipe = stream.pipe(createWriteStream('output.vhd'))
await fromEvent(pipe, 'finish')
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
})
test('ReadableRawVHDStream detects when blocks are out of order', async () => {
const data = [
{
offsetBytes: 700,
data: Buffer.from('azerzaerazeraze', 'ascii'),
},
{
offsetBytes: 100,
data: Buffer.from('gdfslkdfguer', 'ascii'),
},
]
let index = 0
const mockParser = {
next: () => {
if (index < data.length) {
const result = data[index]
index++
return result
} else {
return null
}
},
}
return expect(
new Promise((resolve, reject) => {
const stream = createReadableRawVHDStream(100000, mockParser)
stream.on('error', reject)
const pipe = stream.pipe(createWriteStream('outputStream'))
pipe.on('finish', resolve)
pipe.on('error', reject)
})
).rejects.toThrow('Received out of order blocks')
})
test('ReadableSparseVHDStream can handle a sparse file', async () => {
const blockSize = Math.pow(2, 16)
const blocks = [
{
offsetBytes: blockSize * 3,
data: Buffer.alloc(blockSize, 'azerzaerazeraze', 'ascii'),
},
{
offsetBytes: blockSize * 5,
data: Buffer.alloc(blockSize, 'gdfslkdfguer', 'ascii'),
},
]
const fileSize = blockSize * 10
const stream = createReadableSparseVHDStream(
fileSize,
blockSize,
[100, 700],
blocks
)
const pipe = stream.pipe(createWriteStream('output.vhd'))
await fromEvent(pipe, 'finish')
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
await execa('qemu-img', [
'convert',
'-f',
'vpc',
'-O',
'raw',
'output.vhd',
'out1.raw',
])
const out1 = await readFile('out1.raw')
const expected = Buffer.alloc(fileSize)
blocks.forEach(b => {
b.data.copy(expected, b.offsetBytes)
})
await expect(out1.slice(0, expected.length)).toEqual(expected)
})

View File

@@ -1,631 +0,0 @@
import assert from 'assert'
import getStream from 'get-stream'
import { fromEvent } from 'promise-toolbox'
import constantStream from './_constant-stream'
import { fuFooter, fuHeader, checksumStruct, unpackField } from './_structs'
import { set as mapSetBit, test as mapTestBit } from './_bitmap'
import {
BLOCK_UNUSED,
DISK_TYPE_DIFFERENCING,
DISK_TYPE_DYNAMIC,
FILE_FORMAT_VERSION,
FOOTER_COOKIE,
FOOTER_SIZE,
HEADER_COOKIE,
HEADER_SIZE,
HEADER_VERSION,
PARENT_LOCATOR_ENTRIES,
PLATFORM_NONE,
PLATFORM_W2KU,
SECTOR_SIZE,
} from './_constants'
const VHD_UTIL_DEBUG = 0
const debug = VHD_UTIL_DEBUG
? str => console.log(`[vhd-merge]${str}`)
: () => null
// ===================================================================
//
// Spec:
// https://www.microsoft.com/en-us/download/details.aspx?id=23850
//
// C implementation:
// https://github.com/rubiojr/vhd-util-convert
//
// ===================================================================
const computeBatSize = entries =>
sectorsToBytes(sectorsRoundUpNoZero(entries * 4))
// Sectors conversions.
const sectorsRoundUpNoZero = bytes => Math.ceil(bytes / SECTOR_SIZE) || 1
const sectorsToBytes = sectors => sectors * SECTOR_SIZE
const assertChecksum = (name, buf, struct) => {
const actual = unpackField(struct.fields.checksum, buf)
const expected = checksumStruct(buf, struct)
if (actual !== expected) {
throw new Error(`invalid ${name} checksum ${actual}, expected ${expected}`)
}
}
// unused block as buffer containing a uint32BE
const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(4)
BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
// ===================================================================
// Format:
//
// 1. Footer (512)
// 2. Header (1024)
// 3. Unordered entries
// - BAT (batSize @ header.tableOffset)
// - Blocks (@ blockOffset(i))
// - bitmap (blockBitmapSize)
// - data (header.blockSize)
// - Parent locators (parentLocatorSize(i) @ parentLocatorOffset(i))
// 4. Footer (512 @ vhdSize - 512)
//
// Variables:
//
// - batSize = min(1, ceil(header.maxTableEntries * 4 / sectorSize)) * sectorSize
// - blockBitmapSize = ceil(header.blockSize / sectorSize / 8 / sectorSize) * sectorSize
// - blockOffset(i) = bat[i] * sectorSize
// - nBlocks = ceil(footer.currentSize / header.blockSize)
// - parentLocatorOffset(i) = header.parentLocatorEntry[i].platformDataOffset
// - parentLocatorSize(i) = header.parentLocatorEntry[i].platformDataSpace * sectorSize
// - sectorSize = 512
export default class Vhd {
get batSize () {
return computeBatSize(this.header.maxTableEntries)
}
constructor (handler, path) {
this._handler = handler
this._path = path
}
// =================================================================
// Read functions.
// =================================================================
_readStream (start, n) {
return this._handler.createReadStream(this._path, {
start,
end: start + n - 1, // end is inclusive
})
}
_read (start, n) {
return this._readStream(start, n)
.then(getStream.buffer)
.then(buf => {
assert.equal(buf.length, n)
return buf
})
}
containsBlock (id) {
return this._getBatEntry(id) !== BLOCK_UNUSED
}
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
let end = FOOTER_SIZE + HEADER_SIZE
// Max(end, block allocation table end)
end = Math.max(end, header.tableOffset + this.batSize)
for (let i = 0; i < PARENT_LOCATOR_ENTRIES; i++) {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== PLATFORM_NONE) {
end = Math.max(
end,
entry.platformDataOffset + sectorsToBytes(entry.platformDataSpace)
)
}
}
debug(`End of headers: ${end}.`)
return end
}
// Returns the first sector after data.
getEndOfData () {
let end = Math.ceil(this.getEndOfHeaders() / SECTOR_SIZE)
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
const { maxTableEntries } = this.header
for (let i = 0; i < maxTableEntries; i++) {
const blockAddr = this._getBatEntry(i)
if (blockAddr !== BLOCK_UNUSED) {
end = Math.max(end, blockAddr + fullBlockSize)
}
}
debug(`End of data: ${end}.`)
return sectorsToBytes(end)
}
// TODO: extract the checks into reusable functions:
// - better human reporting
// - auto repair if possible
async readHeaderAndFooter (checkSecondFooter = true) {
const buf = await this._read(0, FOOTER_SIZE + HEADER_SIZE)
const bufFooter = buf.slice(0, FOOTER_SIZE)
const bufHeader = buf.slice(FOOTER_SIZE)
assertChecksum('footer', bufFooter, fuFooter)
assertChecksum('header', bufHeader, fuHeader)
if (checkSecondFooter) {
const size = await this._handler.getSize(this._path)
assert(
bufFooter.equals(await this._read(size - FOOTER_SIZE, FOOTER_SIZE)),
'footer1 !== footer2'
)
}
const footer = (this.footer = fuFooter.unpack(bufFooter))
assert.strictEqual(footer.cookie, FOOTER_COOKIE, 'footer cookie')
assert.strictEqual(footer.dataOffset, FOOTER_SIZE)
assert.strictEqual(footer.fileFormatVersion, FILE_FORMAT_VERSION)
assert(footer.originalSize <= footer.currentSize)
assert(
footer.diskType === DISK_TYPE_DIFFERENCING ||
footer.diskType === DISK_TYPE_DYNAMIC
)
const header = (this.header = fuHeader.unpack(bufHeader))
assert.strictEqual(header.cookie, HEADER_COOKIE)
assert.strictEqual(header.dataOffset, undefined)
assert.strictEqual(header.headerVersion, HEADER_VERSION)
assert(header.maxTableEntries >= footer.currentSize / header.blockSize)
assert(Number.isInteger(Math.log2(header.blockSize / SECTOR_SIZE)))
// Compute the number of sectors in one block.
// Default: One block contains 4096 sectors of 512 bytes.
const sectorsPerBlock = (this.sectorsPerBlock =
header.blockSize / SECTOR_SIZE)
// Compute bitmap size in sectors.
// Default: 1.
const sectorsOfBitmap = (this.sectorsOfBitmap = sectorsRoundUpNoZero(
sectorsPerBlock >> 3
))
// Full block size => data block size + bitmap size.
this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap)
// In bytes.
// Default: 512.
this.bitmapSize = sectorsToBytes(sectorsOfBitmap)
}
// Returns a buffer that contains the block allocation table of a vhd file.
async readBlockAllocationTable () {
const { header } = this
this.blockTable = await this._read(
header.tableOffset,
header.maxTableEntries * 4
)
}
// return the first sector (bitmap) of a block
_getBatEntry (block) {
return this.blockTable.readUInt32BE(block * 4)
}
_readBlock (blockId, onlyBitmap = false) {
const blockAddr = this._getBatEntry(blockId)
if (blockAddr === BLOCK_UNUSED) {
throw new Error(`no such block ${blockId}`)
}
return this._read(
sectorsToBytes(blockAddr),
onlyBitmap ? this.bitmapSize : this.fullBlockSize
).then(
buf =>
onlyBitmap
? { id: blockId, bitmap: buf }
: {
id: blockId,
bitmap: buf.slice(0, this.bitmapSize),
data: buf.slice(this.bitmapSize),
buffer: buf,
}
)
}
// get the identifiers and first sectors of the first and last block
// in the file
//
_getFirstAndLastBlocks () {
const n = this.header.maxTableEntries
const bat = this.blockTable
let i = 0
let j = 0
let first, firstSector, last, lastSector
// get first allocated block for initialization
while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) {
i += 1
j += 4
if (i === n) {
const error = new Error('no allocated block found')
error.noBlock = true
throw error
}
}
lastSector = firstSector
first = last = i
while (i < n) {
const sector = bat.readUInt32BE(j)
if (sector !== BLOCK_UNUSED) {
if (sector < firstSector) {
first = i
firstSector = sector
} else if (sector > lastSector) {
last = i
lastSector = sector
}
}
i += 1
j += 4
}
return { first, firstSector, last, lastSector }
}
// =================================================================
// Write functions.
// =================================================================
// Write a buffer/stream at a given position in a vhd file.
async _write (data, offset) {
debug(
`_write offset=${offset} size=${
Buffer.isBuffer(data) ? data.length : '???'
}`
)
// TODO: could probably be merged in remote handlers.
const stream = await this._handler.createOutputStream(this._path, {
flags: 'r+',
start: offset,
})
return Buffer.isBuffer(data)
? new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
: fromEvent(data.pipe(stream), 'finish')
}
async _freeFirstBlockSpace (spaceNeededBytes) {
try {
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
const tableOffset = this.header.tableOffset
const { batSize } = this
const newMinSector = Math.ceil(
(tableOffset + batSize + spaceNeededBytes) / SECTOR_SIZE
)
if (
tableOffset + batSize + spaceNeededBytes >=
sectorsToBytes(firstSector)
) {
const { fullBlockSize } = this
const newFirstSector = Math.max(
lastSector + fullBlockSize / SECTOR_SIZE,
newMinSector
)
debug(
`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`
)
// copy the first block at the end
const stream = await this._readStream(
sectorsToBytes(firstSector),
fullBlockSize
)
await this._write(stream, sectorsToBytes(newFirstSector))
await this._setBatEntry(first, newFirstSector)
await this.writeFooter(true)
spaceNeededBytes -= this.fullBlockSize
if (spaceNeededBytes > 0) {
return this._freeFirstBlockSpace(spaceNeededBytes)
}
}
} catch (e) {
if (!e.noBlock) {
throw e
}
}
}
async ensureBatSize (entries) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= entries) {
return
}
const newBatSize = computeBatSize(entries)
await this._freeFirstBlockSpace(newBatSize - this.batSize)
const maxTableEntries = (header.maxTableEntries = entries)
const prevBat = this.blockTable
const bat = (this.blockTable = Buffer.allocUnsafe(newBatSize))
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevMaxTableEntries * 4)
debug(
`ensureBatSize: extend BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
await this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
header.tableOffset + prevBat.length
)
await this.writeHeader()
}
// set the first sector (bitmap) of a block
_setBatEntry (block, blockSector) {
const i = block * 4
const { blockTable } = this
blockTable.writeUInt32BE(blockSector, i)
return this._write(blockTable.slice(i, i + 4), this.header.tableOffset + i)
}
// Make a new empty block at vhd end.
// Update block allocation table in context and in file.
async createBlock (blockId) {
const blockAddr = Math.ceil(this.getEndOfData() / SECTOR_SIZE)
debug(`create block ${blockId} at ${blockAddr}`)
await Promise.all([
// Write an empty block and addr in vhd file.
this._write(
constantStream([0], this.fullBlockSize),
sectorsToBytes(blockAddr)
),
this._setBatEntry(blockId, blockAddr),
])
return blockAddr
}
// Write a bitmap at a block address.
async writeBlockBitmap (blockAddr, bitmap) {
const { bitmapSize } = this
if (bitmap.length !== bitmapSize) {
throw new Error(`Bitmap length is not correct ! ${bitmap.length}`)
}
const offset = sectorsToBytes(blockAddr)
debug(
`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString(
'hex'
)})`
)
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeEntireBlock (block) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
await this._write(block.buffer, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, endSectorId, parentBitmap) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
parentBitmap = Buffer.alloc(this.bitmapSize, 0)
} else if (parentBitmap === undefined) {
parentBitmap = (await this._readBlock(block.id, true)).bitmap
}
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(
`writeBlockSectors at ${offset} block=${
block.id
}, sectors=${beginSectorId}...${endSectorId}`
)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(parentBitmap, i)
}
await this.writeBlockBitmap(blockAddr, parentBitmap)
await this._write(
block.data.slice(
sectorsToBytes(beginSectorId),
sectorsToBytes(endSectorId)
),
sectorsToBytes(offset)
)
}
async coalesceBlock (child, blockId) {
const block = await child._readBlock(blockId)
const { bitmap, data } = block
debug(`coalesceBlock block=${blockId}`)
// For each sector of block data...
const { sectorsPerBlock } = child
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(bitmap, i)) {
continue
}
let parentBitmap = null
let endSector = i + 1
// Count changed sectors.
while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) {
++endSector
}
// Write n sectors into parent.
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
const isFullBlock = i === 0 && endSector === sectorsPerBlock
if (isFullBlock) {
await this.writeEntireBlock(block)
} else {
if (parentBitmap === null) {
parentBitmap = (await this._readBlock(blockId, true)).bitmap
}
await this.writeBlockSectors(block, i, endSector, parentBitmap)
}
i = endSector
}
// Return the merged data size
return data.length
}
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter (onlyEndFooter = false) {
const { footer } = this
const rawFooter = fuFooter.pack(footer)
const eof = await this._handler.getSize(this._path)
// sometimes the file is longer than anticipated, we still need to put the footer at the end
const offset = Math.max(this.getEndOfData(), eof - rawFooter.length)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(
`Write footer at: ${offset} (checksum=${
footer.checksum
}). (data=${rawFooter.toString('hex')})`
)
if (!onlyEndFooter) {
await this._write(rawFooter, 0)
}
await this._write(rawFooter, offset)
}
writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = FOOTER_SIZE
debug(
`Write header at: ${offset} (checksum=${
header.checksum
}). (data=${rawHeader.toString('hex')})`
)
return this._write(rawHeader, offset)
}
async writeData (offsetSectors, buffer) {
const bufferSizeSectors = Math.ceil(buffer.length / SECTOR_SIZE)
const startBlock = Math.floor(offsetSectors / this.sectorsPerBlock)
const endBufferSectors = offsetSectors + bufferSizeSectors
const lastBlock = Math.ceil(endBufferSectors / this.sectorsPerBlock) - 1
await this.ensureBatSize(lastBlock)
const blockSizeBytes = this.sectorsPerBlock * SECTOR_SIZE
const coversWholeBlock = (offsetInBlockSectors, endInBlockSectors) =>
offsetInBlockSectors === 0 && endInBlockSectors === this.sectorsPerBlock
for (
let currentBlock = startBlock;
currentBlock <= lastBlock;
currentBlock++
) {
const offsetInBlockSectors = Math.max(
0,
offsetSectors - currentBlock * this.sectorsPerBlock
)
const endInBlockSectors = Math.min(
endBufferSectors - currentBlock * this.sectorsPerBlock,
this.sectorsPerBlock
)
const startInBuffer = Math.max(
0,
(currentBlock * this.sectorsPerBlock - offsetSectors) * SECTOR_SIZE
)
const endInBuffer = Math.min(
((currentBlock + 1) * this.sectorsPerBlock - offsetSectors) *
SECTOR_SIZE,
buffer.length
)
let inputBuffer
if (coversWholeBlock(offsetInBlockSectors, endInBlockSectors)) {
inputBuffer = buffer.slice(startInBuffer, endInBuffer)
} else {
inputBuffer = Buffer.alloc(blockSizeBytes, 0)
buffer.copy(
inputBuffer,
offsetInBlockSectors * SECTOR_SIZE,
startInBuffer,
endInBuffer
)
}
await this.writeBlockSectors(
{ id: currentBlock, data: inputBuffer },
offsetInBlockSectors,
endInBlockSectors
)
}
await this.writeFooter()
}
async ensureSpaceForParentLocators (neededSectors) {
const firstLocatorOffset = FOOTER_SIZE + HEADER_SIZE
const currentSpace =
Math.floor(this.header.tableOffset / SECTOR_SIZE) -
firstLocatorOffset / SECTOR_SIZE
if (currentSpace < neededSectors) {
const deltaSectors = neededSectors - currentSpace
await this._freeFirstBlockSpace(sectorsToBytes(deltaSectors))
this.header.tableOffset += sectorsToBytes(deltaSectors)
await this._write(this.blockTable, this.header.tableOffset)
}
return firstLocatorOffset
}
async setUniqueParentLocator (fileNameString) {
const { header } = this
header.parentLocatorEntry[0].platformCode = PLATFORM_W2KU
const encodedFilename = Buffer.from(fileNameString, 'utf16le')
const dataSpaceSectors = Math.ceil(encodedFilename.length / SECTOR_SIZE)
const position = await this.ensureSpaceForParentLocators(dataSpaceSectors)
await this._write(encodedFilename, position)
header.parentLocatorEntry[0].platformDataSpace =
dataSpaceSectors * SECTOR_SIZE
header.parentLocatorEntry[0].platformDataLength = encodedFilename.length
header.parentLocatorEntry[0].platformDataOffset = position
for (let i = 1; i < 8; i++) {
header.parentLocatorEntry[i].platformCode = PLATFORM_NONE
header.parentLocatorEntry[i].platformDataSpace = 0
header.parentLocatorEntry[i].platformDataLength = 0
header.parentLocatorEntry[i].platformDataOffset = 0
}
}
}

View File

@@ -1,24 +0,0 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -1,130 +0,0 @@
# xen-api [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> Connector to the Xen API
Tested with:
- XenServer 7.3
- XenServer 7.2
- XenServer 7.1
- XenServer 7
- XenServer 6.5
- XenServer 6.2
- XenServer 5.6
## Install
Installation of the [npm package](https://npmjs.org/package/xen-api):
```
> npm install --save xen-api
```
## Usage
### Library
```javascript
const { createClient } = require('xen-api')
const xapi = createClient({
url: 'https://xen1.company.net',
allowUnauthorized: false,
auth: {
user: 'root',
password: 'important secret password'
},
readOnly: false
})
```
Options:
- `url`: address of a host in the pool we are trying to connect to
- `allowUnauthorized`: whether to accept self-signed certificates
- `auth`: credentials used to sign in (can also be specified in the URL)
- `readOnly = false`: if true, no methods with side-effects can be called
```js
// Force connection.
xapi.connect().catch(error => {
console.error(error)
})
// Watch objects.
xapi.objects.on('add', objects => {
console.log('new objects:', objects)
})
```
> Note: all objects are frozen and cannot be altered!
Custom fields on objects (hidden ie. non enumerable):
- `$type`: the type of the object (`VM`, `task`, …);
- `$ref`: the (opaque) reference of the object;
- `$id`: the identifier of this object (its UUID if any, otherwise its reference);
- `$pool`: the pool object this object belongs to.
Furthermore, any field containing a reference (or references if an
array) can be resolved by prepending the field name with a `$`:
```javascript
console.log(xapi.pool.$master.$resident_VMs[0].name_label)
// vm1
```
### CLI
A CLI is provided to help exploration and discovery of the XAPI.
```
> xen-api https://xen1.company.net root
Password: ******
root@xen1.company.net> xapi.status
'connected'
root@xen1.company.net> xapi.pool.master
'OpaqueRef:ec7c5147-8aee-990f-c70b-0de916a8e993'
root@xen1.company.net> xapi.pool.$master.name_label
'xen1'
```
To ease searches, `find()` and `findAll()` functions are available:
```
root@xen1.company.net> findAll({ $type: 'vm' }).length
183
```
## Development
```
# Install dependencies
> npm install
# Run the tests
> npm test
# Continuously compile
> npm run dev
# Continuously run the tests
> npm run dev-test
# Build for production (automatically called by npm install)
> npm run build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/xen-api/issues)
you've encountered;
- fork and create a pull request.
## License
ISC © [Julien Fontanet](https://github.com/julien-f)

View File

@@ -1,50 +0,0 @@
#!/usr/bin/env node
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const pump = require('pump')
const { fromCallback } = require('promise-toolbox')
const { createClient } = require('../')
const { createOutputStream, resolveRef } = require('./utils')
defer(async ($defer, args) => {
let raw = false
if (args[0] === '--raw') {
raw = true
args.shift()
}
if (args.length < 2) {
return console.log('Usage: export-vdi [--raw] <XS URL> <VDI identifier> [<VHD file>]')
}
const xapi = createClient({
allowUnauthorized: true,
url: args[0],
watchEvents: false
})
await xapi.connect()
$defer(() => xapi.disconnect())
// https://xapi-project.github.io/xen-api/snapshots.html#downloading-a-disk-or-snapshot
const exportStream = await xapi.getResource('/export_raw_vdi/', {
query: {
format: raw ? 'raw' : 'vhd',
vdi: await resolveRef(xapi, 'VDI', args[1])
}
})
console.warn('Export task:', exportStream.headers['task-id'])
await fromCallback(cb => pump(
exportStream,
createOutputStream(args[2]),
cb
))
})(process.argv.slice(2)).catch(
console.error.bind(console, 'error')
)

View File

@@ -1,44 +0,0 @@
#!/usr/bin/env node
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const pump = require('pump')
const { fromCallback } = require('promise-toolbox')
const { createClient } = require('../')
const { createOutputStream, resolveRef } = require('./utils')
defer(async ($defer, args) => {
if (args.length < 2) {
return console.log('Usage: export-vm <XS URL> <VM identifier> [<XVA file>]')
}
const xapi = createClient({
allowUnauthorized: true,
url: args[0],
watchEvents: false
})
await xapi.connect()
$defer(() => xapi.disconnect())
// https://xapi-project.github.io/xen-api/importexport.html
const exportStream = await xapi.getResource('/export/', {
query: {
ref: await resolveRef(xapi, 'VM', args[1]),
use_compression: 'true'
}
})
console.warn('Export task:', exportStream.headers['task-id'])
await fromCallback(cb => pump(
exportStream,
createOutputStream(args[2]),
cb
))
})(process.argv.slice(2)).catch(
console.error.bind(console, 'error')
)

View File

@@ -1,40 +0,0 @@
#!/usr/bin/env node
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const { createClient } = require('../')
const { createInputStream, resolveRef } = require('./utils')
defer(async ($defer, args) => {
let raw = false
if (args[0] === '--raw') {
raw = true
args.shift()
}
if (args.length < 2) {
return console.log('Usage: import-vdi [--raw] <XS URL> <VDI identifier> [<VHD file>]')
}
const xapi = createClient({
allowUnauthorized: true,
url: args[0],
watchEvents: false
})
await xapi.connect()
$defer(() => xapi.disconnect())
// https://xapi-project.github.io/xen-api/snapshots.html#uploading-a-disk-or-snapshot
await xapi.putResource(createInputStream(args[2]), '/import_raw_vdi/', {
query: {
format: raw ? 'raw' : 'vhd',
vdi: await resolveRef(xapi, 'VDI', args[1])
}
})
})(process.argv.slice(2)).catch(
console.error.bind(console, 'error')
)

View File

@@ -1,31 +0,0 @@
#!/usr/bin/env node
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const { createClient } = require('../')
const { createInputStream, resolveRef } = require('./utils')
defer(async ($defer, args) => {
if (args.length < 1) {
return console.log('Usage: import-vm <XS URL> [<XVA file>] [<SR identifier>]')
}
const xapi = createClient({
allowUnauthorized: true,
url: args[0],
watchEvents: false
})
await xapi.connect()
$defer(() => xapi.disconnect())
// https://xapi-project.github.io/xen-api/importexport.html
await xapi.putResource(createInputStream(args[1]), '/import/', {
query: args[2] && { sr_id: await resolveRef(xapi, 'SR', args[2]) }
})
})(process.argv.slice(2)).catch(
console.error.bind(console, 'error')
)

View File

@@ -1,57 +0,0 @@
#!/usr/bin/env node
require('source-map-support').install()
const { forEach, size } = require('lodash')
const { createClient } = require('../')
// ===================================================================
if (process.argv.length < 3) {
return console.log('Usage: log-events <XS URL>')
}
// ===================================================================
// Creation
const xapi = createClient({
allowUnauthorized: true,
url: process.argv[2]
})
// ===================================================================
// Method call
xapi.connect().then(() => {
xapi.call('VM.get_all_records')
.then(function (vms) {
console.log('%s VMs fetched', size(vms))
})
.catch(function (error) {
console.error(error)
})
})
// ===================================================================
// Objects
const objects = xapi.objects
objects.on('add', objects => {
forEach(objects, object => {
console.log('+ %s: %s', object.$type, object.$id)
})
})
objects.on('update', objects => {
forEach(objects, object => {
console.log('± %s: %s', object.$type, object.$id)
})
})
objects.on('remove', objects => {
forEach(objects, (value, id) => {
console.log('- %s', id)
})
})

View File

@@ -1,6 +0,0 @@
{
"dependencies": {
"golike-defer": "^0.1.0",
"pump": "^1.0.2"
}
}

View File

@@ -1,43 +0,0 @@
const { createReadStream, createWriteStream, statSync } = require('fs')
const { PassThrough } = require('stream')
const { isOpaqueRef } = require('../')
exports.createInputStream = path => {
if (path === undefined || path === '-') {
return process.stdin
}
const { size } = statSync(path)
const stream = createReadStream(path)
stream.length = size
return stream
}
exports.createOutputStream = path => {
if (path !== undefined && path !== '-') {
return createWriteStream(path)
}
// introduce a through stream because stdout is not a normal stream!
const stream = new PassThrough()
stream.pipe(process.stdout)
return stream
}
exports.resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
isOpaqueRef(refOrUuidOrNameLabel)
? refOrUuidOrNameLabel
: xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(() =>
xapi
.call(`${type}.get_by_name_label`, refOrUuidOrNameLabel)
.then(refs => {
if (refs.length === 1) {
return refs[0]
}
throw new Error(
`no single match for ${type} with name label ${refOrUuidOrNameLabel}`
)
})
)

View File

@@ -1,4 +0,0 @@
set yrange [ 0 : ]
set grid
plot for [i=2:4] "plot.dat" using 1:i with lines

View File

@@ -1,89 +0,0 @@
{
"name": "xen-api",
"version": "0.16.9",
"license": "ISC",
"description": "Connector to the Xen API",
"keywords": [
"xen",
"api",
"xen-api",
"xenapi",
"xapi"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xen-api",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"bin": {
"xen-api": "dist/cli.js"
},
"files": [
"dist/"
],
"engines": {
"node": ">=4"
},
"dependencies": {
"babel-polyfill": "^6.23.0",
"blocked": "^1.2.1",
"debug": "^3.1.0",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.7.0",
"http-request-plus": "^0.5.0",
"iterable-backoff": "^0.0.0",
"json-rpc-protocol": "^0.11.2",
"kindof": "^2.0.0",
"lodash": "^4.17.4",
"make-error": "^1.3.0",
"minimist": "^1.2.0",
"ms": "^2.1.1",
"promise-toolbox": "^0.9.5",
"pw": "0.0.4",
"xmlrpc": "^1.3.2",
"xo-collection": "^0.4.1"
},
"devDependencies": {
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-function-bind": "^6.22.0",
"babel-preset-env": "^1.6.0",
"babel-preset-stage-3": "^6.24.1",
"cross-env": "^5.1.3",
"rimraf": "^2.6.1"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"plot": "gnuplot -p memory-test.gnu",
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
},
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-function-bind"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-3"
]
}
}

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env node
import 'babel-polyfill'
import blocked from 'blocked'
import createDebug from 'debug'
import eventToPromise from 'event-to-promise'
import execPromise from 'exec-promise'
import minimist from 'minimist'
import pw from 'pw'
import { asCallback, fromCallback } from 'promise-toolbox'
import { filter, find, isArray } from 'lodash'
import { start as createRepl } from 'repl'
import { createClient } from './'
// ===================================================================
function askPassword (prompt = 'Password: ') {
if (prompt) {
process.stdout.write(prompt)
}
return new Promise(resolve => {
pw(resolve)
})
}
// ===================================================================
const usage = 'Usage: xen-api <url> [<user> [<password>]]'
const main = async args => {
const opts = minimist(args, {
boolean: ['allow-unauthorized', 'help', 'read-only', 'verbose'],
alias: {
'allow-unauthorized': 'au',
debounce: 'd',
help: 'h',
'read-only': 'ro',
verbose: 'v',
},
})
if (opts.help) {
return usage
}
if (opts.verbose) {
// Does not work perfectly.
//
// https://github.com/visionmedia/debug/pull/156
createDebug.enable('xen-api,xen-api:*')
}
let auth
if (opts._.length > 1) {
const [, user, password = await askPassword()] = opts._
auth = { user, password }
}
{
const debug = createDebug('xen-api:perf')
blocked(ms => {
debug('blocked for %sms', ms | 0)
})
}
const xapi = createClient({
url: opts._[0],
allowUnauthorized: opts.au,
auth,
debounce: opts.debounce != null ? +opts.debounce : null,
readOnly: opts.ro,
})
await xapi.connect()
const repl = createRepl({
prompt: `${xapi._humanId}> `,
})
repl.context.xapi = xapi
repl.context.find = predicate => find(xapi.objects.all, predicate)
repl.context.findAll = predicate => filter(xapi.objects.all, predicate)
// Make the REPL waits for promise completion.
repl.eval = (evaluate => (cmd, context, filename, cb) => {
;fromCallback(cb => {
evaluate.call(repl, cmd, context, filename, cb)
})
.then(value => (isArray(value) ? Promise.all(value) : value))
::asCallback(cb)
})(repl.eval)
await eventToPromise(repl, 'exit')
try {
await xapi.disconnect()
} catch (error) {}
}
export default main
if (!module.parent) {
execPromise(main)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +0,0 @@
#!/usr/bin/env node
import { delay as pDelay } from 'promise-toolbox'
import { createClient } from './'
const xapi = (() => {
const [, , url, user, password] = process.argv
return createClient({
auth: { user, password },
url,
watchEvents: false,
})
})()
xapi
.connect()
// Get the pool record's ref.
.then(() => xapi.call('pool.get_all'))
// Injects lots of events.
.then(([poolRef]) => {
const loop = () =>
xapi
.call('event.inject', 'pool', poolRef)
::pDelay(10) // A small delay is required to avoid overloading the Xen API.
.then(loop)
return loop()
})

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env node
import { createClient } from './'
let i = 0
setInterval(() => {
const usage = process.memoryUsage()
console.log(
'%s %s %s %s',
i++,
Math.round(usage.rss / 1e6),
Math.round(usage.heapTotal / 1e6),
Math.round(usage.heapUsed / 1e6)
)
}, 1e2)
const [, , url, user, password] = process.argv
createClient({
auth: { user, password },
readOnly: true,
url,
}).connect()

View File

@@ -1,3 +0,0 @@
import makeError from 'make-error'
export const UnsupportedTransport = makeError('UnsupportedTransport')

View File

@@ -1,36 +0,0 @@
import jsonRpc from './json-rpc'
import xmlRpc from './xml-rpc'
import xmlRpcJson from './xml-rpc-json'
import { UnsupportedTransport } from './_utils'
const factories = [jsonRpc, xmlRpcJson, xmlRpc]
const { length } = factories
export default opts => {
let i = 0
let call
function create () {
const current = factories[i++](opts)
if (i < length) {
const currentI = i
call = (method, args) =>
current(method, args).catch(error => {
if (error instanceof UnsupportedTransport) {
if (currentI === i) {
// not changed yet
create()
}
return call(method, args)
}
throw error
})
} else {
call = current
}
}
create()
return (method, args) => call(method, args)
}

View File

@@ -1,43 +0,0 @@
import httpRequestPlus from 'http-request-plus'
import { format, parse } from 'json-rpc-protocol'
import { UnsupportedTransport } from './_utils'
export default ({ allowUnauthorized, url }) => {
return (method, args) =>
httpRequestPlus
.post(url, {
rejectUnauthorized: !allowUnauthorized,
body: format.request(0, method, args),
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
},
path: '/jsonrpc',
})
.readAll('utf8')
.then(
text => {
let response
try {
response = parse(text)
} catch (error) {
throw new UnsupportedTransport()
}
if (response.type === 'response') {
return response.result
}
throw response.error
},
error => {
if (error.response !== undefined) {
// HTTP error
throw new UnsupportedTransport()
}
throw error
}
)
}

View File

@@ -1,87 +0,0 @@
import { createClient, createSecureClient } from 'xmlrpc'
import { promisify } from 'promise-toolbox'
import { UnsupportedTransport } from './_utils'
const logError = error => {
if (error.res) {
console.error(
'XML-RPC Error: %s (response status %s)',
error.message,
error.res.statusCode
)
console.error('%s', error.body)
}
throw error
}
const SPECIAL_CHARS = {
'\r': '\\r',
'\t': '\\t',
}
const SPECIAL_CHARS_RE = new RegExp(Object.keys(SPECIAL_CHARS).join('|'), 'g')
const parseResult = result => {
const status = result.Status
// Return the plain result if it does not have a valid XAPI
// format.
if (status === undefined) {
return result
}
if (status !== 'Success') {
throw result.ErrorDescription
}
const value = result.Value
// XAPI returns an empty string (invalid JSON) for an empty
// result.
if (value === '') {
return ''
}
try {
return JSON.parse(value)
} catch (error) {
// XAPI JSON sometimes contains invalid characters.
if (!(error instanceof SyntaxError)) {
throw error
}
}
let replaced = false
const fixedValue = value.replace(SPECIAL_CHARS_RE, match => {
replaced = true
return SPECIAL_CHARS[match]
})
if (replaced) {
try {
return JSON.parse(fixedValue)
} catch (error) {
if (!(error instanceof SyntaxError)) {
throw error
}
}
}
throw new UnsupportedTransport()
}
export default ({
allowUnauthorized,
url: { hostname, path, port, protocol },
}) => {
const client = (protocol === 'https:' ? createSecureClient : createClient)({
host: hostname,
path: '/json',
port,
rejectUnauthorized: !allowUnauthorized,
})
const call = promisify(client.methodCall, client)
return (method, args) => call(method, args).then(parseResult, logError)
}

View File

@@ -1,45 +0,0 @@
import { createClient, createSecureClient } from 'xmlrpc'
import { promisify } from 'promise-toolbox'
const logError = error => {
if (error.res) {
console.error(
'XML-RPC Error: %s (response status %s)',
error.message,
error.res.statusCode
)
console.error('%s', error.body)
}
throw error
}
const parseResult = result => {
const status = result.Status
// Return the plain result if it does not have a valid XAPI
// format.
if (status === undefined) {
return result
}
if (status !== 'Success') {
throw result.ErrorDescription
}
return result.Value
}
export default ({
allowUnauthorized,
url: { hostname, path, port, protocol },
}) => {
const client = (protocol === 'https:' ? createSecureClient : createClient)({
host: hostname,
port,
rejectUnauthorized: !allowUnauthorized,
})
const call = promisify(client.methodCall, client)
return (method, args) => call(method, args).then(parseResult, logError)
}

View File

@@ -1,13 +0,0 @@
{
"comments": false,
"compact": true,
"presets": [
[
"env", {
"targets": {
"node": 4
}
}
]
]
}

Some files were not shown because too many files have changed in this diff Show More