Compare commits

..

1 Commits

Author SHA1 Message Date
Julien Fontanet
766175b4a0 feat(xo-server): multi processes 2018-05-15 15:47:32 +02:00
137 changed files with 5630 additions and 6544 deletions

1
.gitignore vendored
View File

@@ -10,7 +10,6 @@
/packages/vhd-cli/src/commands/index.js
/packages/xen-api/examples/node_modules/
/packages/xen-api/plot.dat
/packages/xo-server/.xo-server.*

View File

@@ -1,56 +1,40 @@
'use strict'
const PLUGINS_RE = /^(?:@babel\/|babel-)plugin-.+$/
const PLUGINS_RE = /^(?:@babel\/plugin-.+|babel-plugin-lodash)$/
const PRESETS_RE = /^@babel\/preset-.+$/
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const configs = {
'@babel/plugin-proposal-decorators': {
legacy: true,
},
'@babel/preset-env' (pkg) {
return {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
}
},
}
const getConfig = (key, ...args) => {
const config = configs[key]
return config === undefined
? {}
: typeof config === 'function'
? config(...args)
: config
}
module.exports = function (pkg, plugins, presets) {
plugins === undefined && (plugins = {})
presets === undefined && (presets = {})
presets['@babel/preset-env'] = {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && PLUGINS_RE.test(name)) {
plugins[name] = getConfig(name, pkg)
plugins[name] = {}
} else if (!(name in presets) && PRESETS_RE.test(name)) {
presets[name] = getConfig(name, pkg)
presets[name] = {}
}
})

View File

@@ -41,10 +41,10 @@
"moment-timezone": "^0.5.14"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/fs",
"version": "0.1.0",
"version": "0.0.0",
"license": "AGPL-3.0",
"description": "The File System for Xen Orchestra backups.",
"keywords": [],
@@ -20,10 +20,10 @@
"node": ">=6"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.49",
"@babel/runtime": "^7.0.0-beta.44",
"@marsaud/smb2-promise": "^0.2.1",
"execa": "^0.10.0",
"fs-extra": "^6.0.1",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"lodash": "^4.17.4",
"promise-toolbox": "^0.9.5",
@@ -32,12 +32,12 @@
"xo-remote-parser": "^0.3"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.49",
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.44",
"@babel/plugin-transform-runtime": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"index-modules": "^0.3.0",

View File

@@ -92,22 +92,6 @@ export default class RemoteHandlerAbstract {
await promise
}
async read (
file: File,
buffer: Buffer,
position?: number
): Promise<{| bytesRead: number, buffer: Buffer |}> {
return this._read(file, buffer, position)
}
_read (
file: File,
buffer: Buffer,
position?: number
): Promise<{| bytesRead: number, buffer: Buffer |}> {
throw new Error('Not implemented')
}
async readFile (file: string, options?: Object): Promise<Buffer> {
return this._readFile(file, options)
}
@@ -142,10 +126,7 @@ export default class RemoteHandlerAbstract {
prependDir = false,
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
): Promise<string[]> {
let entries = await this._list(dir)
if (filter !== undefined) {
entries = entries.filter(filter)
}
const entries = await this._list(dir)
if (prependDir) {
entries.forEach((entry, i) => {
@@ -153,7 +134,7 @@ export default class RemoteHandlerAbstract {
})
}
return entries
return filter === undefined ? entries : entries.filter(filter)
}
async _list (dir: string): Promise<string[]> {

View File

@@ -50,24 +50,6 @@ export default class LocalHandler extends RemoteHandlerAbstract {
await fs.writeFile(path, data, options)
}
async _read (file, buffer, position) {
const needsClose = typeof file === 'string'
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
try {
return await fs.read(
file,
buffer,
0,
buffer.length,
position === undefined ? null : position
)
} finally {
if (needsClose) {
await fs.close(file)
}
}
}
async _readFile (file, options) {
return fs.readFile(this._getFilePath(file), options)
}

View File

@@ -0,0 +1,24 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -0,0 +1,149 @@
# @xen-orchestra/log [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> ${pkg.description}
## Install
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/log):
```
> npm install --save @xen-orchestra/log
```
## Usage
Everywhere something should be logged:
```js
import { createLogger } from '@xen-orchestra/log'
const log = createLogger('xo-server-api')
log.warn('foo')
```
Then at application level you can choose how to handle these logs:
```js
import configure from '@xen-orchestra/log/configure'
import createConsoleTransport from '@xen-orchestra/log/transports/console'
import createEmailTransport from '@xen-orchestra/log/transports/email'
configure([
{
// if filter is a string, then it is pattern
// (https://github.com/visionmedia/debug#wildcards) which is
// matched against the namespace of the logs
filter: process.env.DEBUG,
transport: createConsoleTransport()
},
{
// only levels >= warn
level: 'warn',
transport: createEmaileTransport({
service: 'gmail',
auth: {
user: 'jane.smith@gmail.com',
pass: 'H&NbECcpXF|pyXe#%ZEb'
},
from: 'jane.smith@gmail.com',
to: [
'jane.smith@gmail.com',
'sam.doe@yahoo.com'
]
})
}
])
```
### Transports
#### Console
```js
import createConsoleTransport from '@xen-orchestra/log/transports/console'
configure(createConsoleTransport())
```
#### Email
Optional dependency:
```
> yarn add nodemailer pretty-format
```
Configuration:
```js
import createEmailTransport from '@xen-orchestra/log/transports/email'
configure(createEmailTransport({
service: 'gmail',
auth: {
user: 'jane.smith@gmail.com',
pass: 'H&NbECcpXF|pyXe#%ZEb'
},
from: 'jane.smith@gmail.com',
to: [
'jane.smith@gmail.com',
'sam.doe@yahoo.com'
]
}))
```
#### Syslog
Optional dependency:
```
> yarn add split-host syslog-client
```
Configuration:
```js
import createSyslogTransport from '@xen-orchestra/log/transports/syslog'
// By default, log to udp://localhost:514
configure(createSyslogTransport())
// But TCP, a different host, or a different port can be used
configure(createSyslogTransport('tcp://syslog.company.lan'))
```
## Development
```
# Install dependencies
> yarn
# Run the tests
> yarn test
# Continuously compile
> yarn dev
# Continuously run the tests
> yarn dev-test
# Build for production (automatically called by npm install)
> yarn build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xo-web/issues/)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -0,0 +1 @@
module.exports = require('./dist/configure')

View File

@@ -0,0 +1,52 @@
{
"private": true,
"name": "@xen-orchestra/log",
"version": "0.0.0",
"license": "ISC",
"description": "",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/@xen-orchestra/log",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"browserslist": [
">2%"
],
"engines": {
"node": ">=4"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.42",
"lodash": "^4.17.4",
"promise-toolbox": "^0.9.5"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
}
}

View File

@@ -0,0 +1,105 @@
import createConsoleTransport from './transports/console'
import LEVELS, { resolve } from './levels'
import { compileGlobPattern } from './utils'
// ===================================================================
const createTransport = config => {
if (typeof config === 'function') {
return config
}
if (Array.isArray(config)) {
const transports = config.map(createTransport)
const { length } = transports
return function () {
for (let i = 0; i < length; ++i) {
transports[i].apply(this, arguments)
}
}
}
let { filter, transport } = config
const level = resolve(config.level)
if (filter !== undefined) {
if (typeof filter === 'string') {
const re = compileGlobPattern(filter)
filter = log => re.test(log.namespace)
}
const orig = transport
transport = function (log) {
if ((level !== undefined && log.level >= level) || filter(log)) {
return orig.apply(this, arguments)
}
}
} else if (level !== undefined) {
const orig = transport
transport = function (log) {
if (log.level >= level) {
return orig.apply(this, arguments)
}
}
}
return transport
}
let transport = createTransport({
// display warnings or above, and all that are enabled via DEBUG or
// NODE_DEBUG env
filter: process.env.DEBUG || process.env.NODE_DEBUG,
level: LEVELS.INFO,
transport: createConsoleTransport(),
})
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
global[symbol] = log => transport(log)
export const configure = config => {
transport = createTransport(config)
}
// -------------------------------------------------------------------
export const catchGlobalErrors = logger => {
// patch process
const onUncaughtException = error => {
logger.error('uncaught exception', { error })
}
const onUnhandledRejection = error => {
logger.warn('possibly unhandled rejection', { error })
}
const onWarning = error => {
logger.warn('Node warning', { error })
}
process.on('uncaughtException', onUncaughtException)
process.on('unhandledRejection', onUnhandledRejection)
process.on('warning', onWarning)
// patch EventEmitter
const EventEmitter = require('events')
const { prototype } = EventEmitter
const { emit } = prototype
function patchedEmit (event, error) {
event === 'error' && !this.listenerCount(event)
? logger.error('unhandled error event', { error })
: emit.apply(this, arguments)
}
prototype.emit = patchedEmit
return () => {
process.removeListener('uncaughtException', onUncaughtException)
process.removeListener('unhandledRejection', onUnhandledRejection)
process.removeListener('warning', onWarning)
if (prototype.emit === patchedEmit) {
prototype.emit = emit
}
}
}

View File

@@ -0,0 +1,65 @@
import createTransport from './transports/console'
import LEVELS from './levels'
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
if (!(symbol in global)) {
// the default behavior, without requiring `configure` is to avoid
// logging anything unless it's a real error
const transport = createTransport()
global[symbol] = log => log.level > LEVELS.WARN && transport(log)
}
// -------------------------------------------------------------------
function Log (data, level, namespace, message, time) {
this.data = data
this.level = level
this.namespace = namespace
this.message = message
this.time = time
}
function Logger (namespace) {
this._namespace = namespace
// bind all logging methods
for (const name in LEVELS) {
const lowerCase = name.toLowerCase()
this[lowerCase] = this[lowerCase].bind(this)
}
}
const { prototype } = Logger
for (const name in LEVELS) {
const level = LEVELS[name]
prototype[name.toLowerCase()] = function (message, data) {
global[symbol](new Log(data, level, this._namespace, message, new Date()))
}
}
prototype.wrap = function (message, fn) {
const logger = this
const warnAndRethrow = error => {
logger.warn(message, { error })
throw error
}
return function () {
try {
const result = fn.apply(this, arguments)
const then = result != null && result.then
return typeof then === 'function'
? then.call(result, warnAndRethrow)
: result
} catch (error) {
warnAndRethrow(error)
}
}
}
const createLogger = namespace => new Logger(namespace)
export { createLogger }

View File

@@ -0,0 +1,24 @@
const LEVELS = Object.create(null)
export { LEVELS as default }
// https://github.com/trentm/node-bunyan#levels
LEVELS.FATAL = 60 // service/app is going to down
LEVELS.ERROR = 50 // fatal for current action
LEVELS.WARN = 40 // something went wrong but it's not fatal
LEVELS.INFO = 30 // detail on unusual but normal operation
LEVELS.DEBUG = 20
export const NAMES = Object.create(null)
for (const name in LEVELS) {
NAMES[LEVELS[name]] = name
}
export const resolve = level => {
if (typeof level === 'string') {
level = LEVELS[level.toUpperCase()]
}
return level
}
Object.freeze(LEVELS)
Object.freeze(NAMES)

View File

@@ -0,0 +1,32 @@
/* eslint-env jest */
import { forEach, isInteger } from 'lodash'
import LEVELS, { NAMES, resolve } from './levels'
describe('LEVELS', () => {
it('maps level names to their integer values', () => {
forEach(LEVELS, (value, name) => {
expect(isInteger(value)).toBe(true)
})
})
})
describe('NAMES', () => {
it('maps level values to their names', () => {
forEach(LEVELS, (value, name) => {
expect(NAMES[value]).toBe(name)
})
})
})
describe('resolve()', () => {
it('returns level values either from values or names', () => {
forEach(LEVELS, value => {
expect(resolve(value)).toBe(value)
})
forEach(NAMES, (name, value) => {
expect(resolve(name)).toBe(+value)
})
})
})

View File

@@ -0,0 +1,20 @@
import LEVELS, { NAMES } from '../levels'
// Bind console methods (necessary for browsers)
const debugConsole = console.log.bind(console)
const infoConsole = console.info.bind(console)
const warnConsole = console.warn.bind(console)
const errorConsole = console.error.bind(console)
const { ERROR, INFO, WARN } = LEVELS
const consoleTransport = ({ data, level, namespace, message, time }) => {
const fn =
level < INFO
? debugConsole
: level < WARN ? infoConsole : level < ERROR ? warnConsole : errorConsole
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
data != null && fn(data)
}
export default () => consoleTransport

View File

@@ -0,0 +1,68 @@
import prettyFormat from 'pretty-format' // eslint-disable-line node/no-extraneous-import
import { createTransport } from 'nodemailer' // eslint-disable-line node/no-extraneous-import
import { fromCallback } from 'promise-toolbox'
import { evalTemplate, required } from '../utils'
import { NAMES } from '../levels'
export default ({
// transport options (https://nodemailer.com/smtp/)
auth,
authMethod,
host,
ignoreTLS,
port,
proxy,
requireTLS,
secure,
service,
tls,
// message options (https://nodemailer.com/message/)
bcc,
cc,
from = required('from'),
to = required('to'),
subject = '[{{level}} - {{namespace}}] {{time}} {{message}}',
}) => {
const transporter = createTransport(
{
auth,
authMethod,
host,
ignoreTLS,
port,
proxy,
requireTLS,
secure,
service,
tls,
disableFileAccess: true,
disableUrlAccess: true,
},
{
bcc,
cc,
from,
to,
}
)
return log =>
fromCallback(cb =>
transporter.sendMail(
{
subject: evalTemplate(
subject,
key =>
key === 'level'
? NAMES[log.level]
: key === 'time' ? log.time.toISOString() : log[key]
),
text: prettyFormat(log.data),
},
cb
)
)
}

View File

@@ -0,0 +1,42 @@
import splitHost from 'split-host' // eslint-disable-line node/no-extraneous-import node/no-missing-import
import { createClient, Facility, Severity, Transport } from 'syslog-client' // eslint-disable-line node/no-extraneous-import node/no-missing-import
import { fromCallback } from 'promise-toolbox'
import { startsWith } from 'lodash'
import LEVELS from '../levels'
// https://github.com/paulgrove/node-syslog-client#syslogseverity
const LEVEL_TO_SEVERITY = {
[LEVELS.FATAL]: Severity.Critical,
[LEVELS.ERROR]: Severity.Error,
[LEVELS.WARN]: Severity.Warning,
[LEVELS.INFO]: Severity.Informational,
[LEVELS.DEBUG]: Severity.Debug,
}
const facility = Facility.User
export default target => {
const opts = {}
if (target !== undefined) {
if (startsWith(target, 'tcp://')) {
target = target.slice(6)
opts.transport = Transport.Tcp
} else if (startsWith(target, 'udp://')) {
target = target.slice(6)
opts.transport = Transport.Ucp
}
;({ host: target, port: opts.port } = splitHost(target))
}
const client = createClient(target, opts)
return log =>
fromCallback(cb =>
client.log(log.message, {
facility,
severity: LEVEL_TO_SEVERITY[log.level],
})
)
}

View File

@@ -0,0 +1,62 @@
import escapeRegExp from 'lodash/escapeRegExp'
// ===================================================================
const TPL_RE = /\{\{(.+?)\}\}/g
export const evalTemplate = (tpl, data) => {
const getData =
typeof data === 'function' ? (_, key) => data(key) : (_, key) => data[key]
return tpl.replace(TPL_RE, getData)
}
// -------------------------------------------------------------------
const compileGlobPatternFragment = pattern =>
pattern
.split('*')
.map(escapeRegExp)
.join('.*')
export const compileGlobPattern = pattern => {
const no = []
const yes = []
pattern.split(/[\s,]+/).forEach(pattern => {
if (pattern[0] === '-') {
no.push(pattern.slice(1))
} else {
yes.push(pattern)
}
})
const raw = ['^']
if (no.length !== 0) {
raw.push('(?!', no.map(compileGlobPatternFragment).join('|'), ')')
}
if (yes.length !== 0) {
raw.push('(?:', yes.map(compileGlobPatternFragment).join('|'), ')')
} else {
raw.push('.*')
}
raw.push('$')
return new RegExp(raw.join(''))
}
// -------------------------------------------------------------------
export const required = name => {
throw new Error(`missing required arg ${name}`)
}
// -------------------------------------------------------------------
export const serializeError = error => ({
...error,
message: error.message,
name: error.name,
stack: error.stack,
})

View File

@@ -0,0 +1,13 @@
/* eslint-env jest */
import { compileGlobPattern } from './utils'
describe('compileGlobPattern()', () => {
it('works', () => {
const re = compileGlobPattern('foo, ba*, -bar')
expect(re.test('foo')).toBe(true)
expect(re.test('bar')).toBe(false)
expect(re.test('baz')).toBe(true)
expect(re.test('qux')).toBe(false)
})
})

View File

@@ -0,0 +1 @@
dist/transports

View File

@@ -1,26 +1,6 @@
# ChangeLog
## *next*
### Enhancements
- Hide legacy backup creation view [#2956](https://github.com/vatesfr/xen-orchestra/issues/2956)
- [Delta Backup NG logs] Display wether the export is a full or a delta [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711)
- Copy VDIs' UUID from SR/disks view [#3051](https://github.com/vatesfr/xen-orchestra/issues/3051)
- [Backup NG] New option to shutdown VMs before snapshotting them [#3058](https://github.com/vatesfr/xen-orchestra/issues/3058#event-1673756438)
- [Backup NG form] Improve feedback [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711)
- [Backup NG] Different retentions for backup and replication [#2895](https://github.com/vatesfr/xen-orchestra/issues/2895)
- Possibility to use a fast clone when creating a VM from a snapshot [#2937](https://github.com/vatesfr/xen-orchestra/issues/2937)
### Bugs
- update the xentools search item to return the version number of installed xentools [#3015](https://github.com/vatesfr/xen-orchestra/issues/3015)
- Fix Nagios backup reports [#2991](https://github.com/vatesfr/xen-orchestra/issues/2991)
- Fix the retry of a single failed/interrupted VM backup [#2912](https://github.com/vatesfr/xen-orchestra/issues/2912#issuecomment-395480321)
- New VM with Self: filter out networks that are not in the template's pool [#3011](https://github.com/vatesfr/xen-orchestra/issues/3011)
- [Backup NG] Auto-detect when a full export is necessary.
## **5.20.0** (2018-05-31)
## **5.20.0** (planned 2018-05-31)
### Enhancements
@@ -29,6 +9,8 @@
- [Patches] ignore XS upgrade in missing patches counter [#2866](https://github.com/vatesfr/xen-orchestra/issues/2866)
- [Health] List VM snapshots related to non-existing backup jobs/schedules [#2828](https://github.com/vatesfr/xen-orchestra/issues/2828)
### Bugs
## **5.19.0** (2018-05-01)
### Enhancements

View File

@@ -1,19 +0,0 @@
### Check list
> Check items when done or if not relevant
- [ ] PR reference the relevant issue (e.g. `Fixes #007`)
- [ ] if UI changes, a screenshot has been added to the PR
- [ ] CHANGELOG updated
- [ ] documentation updated
### Process
1. create a PR as soon as possible
1. mark it as `WiP:` (Work in Progress) if not ready to be merged
1. when you want a review, add a reviewer
1. if necessary, update your PR, and readd a reviewer
### List of packages to release
> No need to mention xo-server and xo-web.

View File

@@ -1,5 +0,0 @@
module.exports = {
// Necessary for jest to be able to find the `.babelrc.js` closest to the file
// instead of only the one in this directory.
babelrcRoots: true,
}

View File

@@ -1,6 +0,0 @@
declare module 'limit-concurrency-decorator' {
declare function limitConcurrencyDecorator(
concurrency: number
): <T: Function>(T) => T
declare export default typeof limitConcurrencyDecorator
}

View File

@@ -1,8 +1,4 @@
declare module 'lodash' {
declare export function countBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: number }
declare export function forEach<K, V>(
object: { [K]: V },
iteratee: (V, K) => void
@@ -24,10 +20,5 @@ declare module 'lodash' {
iteratee: (V1, K) => V2
): { [K]: V2 }
declare export function noop(...args: mixed[]): void
declare export function some<T>(
collection: T[],
iteratee: (T, number) => boolean
): boolean
declare export function sum(values: number[]): number
declare export function values<K, V>(object: { [K]: V }): V[]
}

View File

@@ -1,10 +1,8 @@
{
"devDependencies": {
"@babel/core": "^7.0.0-beta.49",
"@babel/register": "^7.0.0-beta.49",
"babel-core": "^7.0.0-0",
"@babel/register": "^7.0.0-beta.44",
"babel-7-jest": "^21.3.2",
"babel-eslint": "^8.1.2",
"babel-jest": "^23.0.1",
"benchmark": "^2.1.4",
"eslint": "^4.14.0",
"eslint-config-standard": "^11.0.0-beta.0",
@@ -15,22 +13,23 @@
"eslint-plugin-react": "^7.6.1",
"eslint-plugin-standard": "^3.0.1",
"exec-promise": "^0.7.0",
"flow-bin": "^0.73.0",
"flow-bin": "^0.69.0",
"globby": "^8.0.0",
"husky": "^0.14.3",
"jest": "^23.0.1",
"jest": "^22.0.4",
"lodash": "^4.17.4",
"prettier": "^1.10.2",
"promise-toolbox": "^0.9.5",
"sorted-object": "^2.0.1"
},
"engines": {
"yarn": "^1.7.0"
"yarn": "^1.2.1"
},
"jest": {
"collectCoverage": true,
"projects": [
"<rootDir>"
"<rootDir>",
"<rootDir>/packages/xo-web"
],
"testEnvironment": "node",
"testPathIgnorePatterns": [
@@ -39,6 +38,14 @@
],
"testRegex": "\\.spec\\.js$",
"transform": {
"/@xen-orchestra/cron/.+\\.jsx?$": "babel-7-jest",
"/@xen-orchestra/fs/.+\\.jsx?$": "babel-7-jest",
"/packages/complex-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/value-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/vhd-lib/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-cli/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-server/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-vmdk-to-vhd/.+\\.jsx?$": "babel-7-jest",
"\\.jsx?$": "babel-jest"
}
},

View File

@@ -30,9 +30,9 @@
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.1",
"rimraf": "^2.6.2"

View File

@@ -28,10 +28,10 @@
},
"dependencies": {},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},

View File

@@ -23,20 +23,21 @@
"dist/"
],
"engines": {
"node": ">=6"
"node": ">=4"
},
"dependencies": {
"@xen-orchestra/fs": "^0.1.0",
"@xen-orchestra/fs": "^0.0.0",
"babel-runtime": "^6.22.0",
"exec-promise": "^0.7.0",
"struct-fu": "^1.2.0",
"vhd-lib": "^0.1.3"
"vhd-lib": "^0.0.0"
},
"devDependencies": {
"@babel/cli": "^7.0.0-beta.49",
"@babel/core": "^7.0.0-beta.49",
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
"@babel/preset-env": "^7.0.0-beta.49",
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.5.2",
"babel-preset-stage-3": "^6.24.1",
"cross-env": "^5.1.3",
"execa": "^0.10.0",
"index-modules": "^0.3.0",
@@ -50,5 +51,22 @@
"prebuild": "rimraf dist/ && index-modules --cjs-lazy src/commands",
"predev": "yarn run prebuild",
"prepare": "yarn run build"
},
"babel": {
"plugins": [
"lodash",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-3"
]
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "vhd-lib",
"version": "0.1.3",
"version": "0.0.0",
"license": "AGPL-3.0",
"description": "Primitives for VHD file handling",
"keywords": [],
@@ -20,30 +20,30 @@
"node": ">=6"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.49",
"@babel/runtime": "^7.0.0-beta.44",
"@xen-orchestra/fs": "^0.0.0",
"async-iterator-to-stream": "^1.0.2",
"execa": "^0.10.0",
"from2": "^2.3.0",
"fs-extra": "^6.0.1",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"limit-concurrency-decorator": "^0.4.0",
"promise-toolbox": "^0.9.5",
"struct-fu": "^1.2.0",
"uuid": "^3.0.1"
"uuid": "^3.0.1",
"tmp": "^0.0.33"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@xen-orchestra/fs": "^0.1.0",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-transform-runtime": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"execa": "^0.10.0",
"fs-promise": "^2.0.0",
"get-stream": "^3.0.0",
"index-modules": "^0.3.0",
"rimraf": "^2.6.2",
"tmp": "^0.0.33"
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",

View File

@@ -33,7 +33,7 @@ export function createFooter (
currentSize: size,
diskGeometry: geometry,
diskType,
uuid: generateUuid(null, Buffer.allocUnsafe(16)),
uuid: generateUuid(null, []),
})
checksumStruct(footer, fuFooter)
return footer

View File

@@ -40,7 +40,7 @@ export const fuFooter = fu.struct([
]),
fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING.
fu.uint32('checksum'), // 64
fu.byte('uuid', 16), // 68
fu.uint8('uuid', 16), // 68
fu.char('saved'), // 84
fu.char('hidden'), // 85 TODO: should probably be merged in reserved
fu.char('reserved', 426), // 86
@@ -55,7 +55,7 @@ export const fuHeader = fu.struct([
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
fu.uint32('checksum'),
fu.byte('parentUuid', 16),
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char16be('parentUnicodeName', 512),

View File

@@ -1,4 +1,3 @@
import assert from 'assert'
import asyncIteratorToStream from 'async-iterator-to-stream'
import computeGeometryForSize from './_computeGeometryForSize'
@@ -26,16 +25,62 @@ function createBAT (
bat,
bitmapSize
) {
const vhdOccupationTable = []
let currentVhdPositionSector = firstBlockPosition / SECTOR_SIZE
blockAddressList.forEach(blockPosition => {
assert.strictEqual(blockPosition % SECTOR_SIZE, 0)
const vhdTableIndex = Math.floor(blockPosition / VHD_BLOCK_SIZE_BYTES)
const scaled = blockPosition / VHD_BLOCK_SIZE_BYTES
const vhdTableIndex = Math.floor(scaled)
if (bat.readUInt32BE(vhdTableIndex * 4) === BLOCK_UNUSED) {
bat.writeUInt32BE(currentVhdPositionSector, vhdTableIndex * 4)
currentVhdPositionSector +=
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
}
// not using bit operators to avoid the int32 coercion, that way we can go to 53 bits
vhdOccupationTable[vhdTableIndex] =
(vhdOccupationTable[vhdTableIndex] || 0) +
Math.pow(2, (scaled % 1) * ratio)
})
return vhdOccupationTable
}
function createBitmap (bitmapSize, ratio, vhdOccupationBucket) {
const bitmap = Buffer.alloc(bitmapSize)
for (let i = 0; i < VHD_BLOCK_SIZE_SECTORS / ratio; i++) {
// do not shift to avoid int32 coercion
if ((vhdOccupationBucket * Math.pow(2, -i)) & 1) {
for (let j = 0; j < ratio; j++) {
setBitmap(bitmap, i * ratio + j)
}
}
}
return bitmap
}
function * yieldIfNotEmpty (buffer) {
if (buffer.length > 0) {
yield buffer
}
}
async function * generateFileContent (
blockIterator,
bitmapSize,
ratio,
vhdOccupationTable
) {
let currentVhdBlockIndex = -1
let currentBlockBuffer = Buffer.alloc(0)
for await (const next of blockIterator) {
const batEntry = Math.floor(next.offsetBytes / VHD_BLOCK_SIZE_BYTES)
if (batEntry !== currentVhdBlockIndex) {
yield * yieldIfNotEmpty(currentBlockBuffer)
currentBlockBuffer = Buffer.alloc(VHD_BLOCK_SIZE_BYTES)
currentVhdBlockIndex = batEntry
yield createBitmap(bitmapSize, ratio, vhdOccupationTable[batEntry])
}
next.data.copy(currentBlockBuffer, next.offsetBytes % VHD_BLOCK_SIZE_BYTES)
}
yield * yieldIfNotEmpty(currentBlockBuffer)
}
export default asyncIteratorToStream(async function * (
@@ -57,8 +102,7 @@ export default asyncIteratorToStream(async function * (
}
const maxTableEntries = Math.ceil(diskSize / VHD_BLOCK_SIZE_BYTES) + 1
const tablePhysicalSizeBytes =
Math.ceil(maxTableEntries * 4 / SECTOR_SIZE) * SECTOR_SIZE
const tablePhysicalSizeBytes = Math.ceil(maxTableEntries * 4 / 512) * 512
const batPosition = FOOTER_SIZE + HEADER_SIZE
const firstBlockPosition = batPosition + tablePhysicalSizeBytes
@@ -79,50 +123,21 @@ export default asyncIteratorToStream(async function * (
const bitmapSize =
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
createBAT(firstBlockPosition, blockAddressList, ratio, bat, bitmapSize)
let position = 0
function * yieldAndTrack (buffer, expectedPosition) {
if (expectedPosition !== undefined) {
assert.strictEqual(position, expectedPosition)
}
if (buffer.length > 0) {
yield buffer
position += buffer.length
}
}
async function * generateFileContent (blockIterator, bitmapSize, ratio) {
let currentBlock = -1
let currentVhdBlockIndex = -1
let currentBlockWithBitmap = Buffer.alloc(0)
for await (const next of blockIterator) {
currentBlock++
assert.strictEqual(blockAddressList[currentBlock], next.offsetBytes)
const batIndex = Math.floor(next.offsetBytes / VHD_BLOCK_SIZE_BYTES)
if (batIndex !== currentVhdBlockIndex) {
if (currentVhdBlockIndex >= 0) {
yield * yieldAndTrack(
currentBlockWithBitmap,
bat.readUInt32BE(currentVhdBlockIndex * 4) * SECTOR_SIZE
)
}
currentBlockWithBitmap = Buffer.alloc(bitmapSize + VHD_BLOCK_SIZE_BYTES)
currentVhdBlockIndex = batIndex
}
const blockOffset =
(next.offsetBytes / SECTOR_SIZE) % VHD_BLOCK_SIZE_SECTORS
for (let bitPos = 0; bitPos < VHD_BLOCK_SIZE_SECTORS / ratio; bitPos++) {
setBitmap(currentBlockWithBitmap, blockOffset + bitPos)
}
next.data.copy(
currentBlockWithBitmap,
bitmapSize + next.offsetBytes % VHD_BLOCK_SIZE_BYTES
)
}
yield * yieldAndTrack(currentBlockWithBitmap)
}
yield * yieldAndTrack(footer, 0)
yield * yieldAndTrack(header, FOOTER_SIZE)
yield * yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
yield * generateFileContent(blockIterator, bitmapSize, ratio)
yield * yieldAndTrack(footer)
const vhdOccupationTable = createBAT(
firstBlockPosition,
blockAddressList,
ratio,
bat,
bitmapSize
)
yield footer
yield header
yield bat
yield * generateFileContent(
blockIterator,
bitmapSize,
ratio,
vhdOccupationTable
)
yield footer
})

View File

@@ -102,15 +102,15 @@ test('ReadableSparseVHDStream can handle a sparse file', async () => {
data: Buffer.alloc(blockSize, 'azerzaerazeraze', 'ascii'),
},
{
offsetBytes: blockSize * 100,
offsetBytes: blockSize * 5,
data: Buffer.alloc(blockSize, 'gdfslkdfguer', 'ascii'),
},
]
const fileSize = blockSize * 110
const fileSize = blockSize * 10
const stream = createReadableSparseVHDStream(
fileSize,
blockSize,
blocks.map(b => b.offsetBytes),
[100, 700],
blocks
)
const pipe = stream.pipe(createWriteStream('output.vhd'))

View File

@@ -1,4 +1,5 @@
import assert from 'assert'
import getStream from 'get-stream'
import { fromEvent } from 'promise-toolbox'
import constantStream from './_constant-stream'
@@ -92,14 +93,20 @@ export default class Vhd {
// Read functions.
// =================================================================
async _read (start, n) {
const { bytesRead, buffer } = await this._handler.read(
this._path,
Buffer.alloc(n),
start
)
assert.equal(bytesRead, n)
return buffer
_readStream (start, n) {
return this._handler.createReadStream(this._path, {
start,
end: start + n - 1, // end is inclusive
})
}
_read (start, n) {
return this._readStream(start, n)
.then(getStream.buffer)
.then(buf => {
assert.equal(buf.length, n)
return buf
})
}
containsBlock (id) {
@@ -329,11 +336,11 @@ export default class Vhd {
`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`
)
// copy the first block at the end
const block = await this._read(
const stream = await this._readStream(
sectorsToBytes(firstSector),
fullBlockSize
)
await this._write(block, sectorsToBytes(newFirstSector))
await this._write(stream, sectorsToBytes(newFirstSector))
await this._setBatEntry(first, newFirstSector)
await this.writeFooter(true)
spaceNeededBytes -= this.fullBlockSize
@@ -469,12 +476,12 @@ export default class Vhd {
// For each sector of block data...
const { sectorsPerBlock } = child
let parentBitmap = null
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(bitmap, i)) {
continue
}
let parentBitmap = null
let endSector = i + 1
// Count changed sectors.

View File

@@ -4,7 +4,7 @@ process.env.DEBUG = '*'
const defer = require('golike-defer').default
const pump = require('pump')
const { CancelToken, fromCallback } = require('promise-toolbox')
const { fromCallback } = require('promise-toolbox')
const { createClient } = require('../')
@@ -30,11 +30,8 @@ defer(async ($defer, args) => {
await xapi.connect()
$defer(() => xapi.disconnect())
const { cancel, token } = CancelToken.source()
process.on('SIGINT', cancel)
// https://xapi-project.github.io/xen-api/snapshots.html#downloading-a-disk-or-snapshot
const exportStream = await xapi.getResource(token, '/export_raw_vdi/', {
const exportStream = await xapi.getResource('/export_raw_vdi/', {
query: {
format: raw ? 'raw' : 'vhd',
vdi: await resolveRef(xapi, 'VDI', args[1])

View File

@@ -4,7 +4,7 @@ process.env.DEBUG = '*'
const defer = require('golike-defer').default
const pump = require('pump')
const { CancelToken, fromCallback } = require('promise-toolbox')
const { fromCallback } = require('promise-toolbox')
const { createClient } = require('../')
@@ -24,11 +24,8 @@ defer(async ($defer, args) => {
await xapi.connect()
$defer(() => xapi.disconnect())
const { cancel, token } = CancelToken.source()
process.on('SIGINT', cancel)
// https://xapi-project.github.io/xen-api/importexport.html
const exportStream = await xapi.getResource(token, '/export/', {
const exportStream = await xapi.getResource('/export/', {
query: {
ref: await resolveRef(xapi, 'VM', args[1]),
use_compression: 'true'

View File

@@ -3,7 +3,6 @@
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const { CancelToken } = require('promise-toolbox')
const { createClient } = require('../')
@@ -29,11 +28,8 @@ defer(async ($defer, args) => {
await xapi.connect()
$defer(() => xapi.disconnect())
const { cancel, token } = CancelToken.source()
process.on('SIGINT', cancel)
// https://xapi-project.github.io/xen-api/snapshots.html#uploading-a-disk-or-snapshot
await xapi.putResource(token, createInputStream(args[2]), '/import_raw_vdi/', {
await xapi.putResource(createInputStream(args[2]), '/import_raw_vdi/', {
query: {
format: raw ? 'raw' : 'vhd',
vdi: await resolveRef(xapi, 'VDI', args[1])

View File

@@ -3,7 +3,6 @@
process.env.DEBUG = '*'
const defer = require('golike-defer').default
const { CancelToken } = require('promise-toolbox')
const { createClient } = require('../')
@@ -23,11 +22,8 @@ defer(async ($defer, args) => {
await xapi.connect()
$defer(() => xapi.disconnect())
const { cancel, token } = CancelToken.source()
process.on('SIGINT', cancel)
// https://xapi-project.github.io/xen-api/importexport.html
await xapi.putResource(token, createInputStream(args[1]), '/import/', {
await xapi.putResource(createInputStream(args[1]), '/import/', {
query: args[2] && { sr_id: await resolveRef(xapi, 'SR', args[2]) }
})
})(process.argv.slice(2)).catch(

View File

@@ -1,6 +1,6 @@
{
"dependencies": {
"golike-defer": "^0.4.1",
"pump": "^3.0.0"
"golike-defer": "^0.1.0",
"pump": "^1.0.2"
}
}

View File

@@ -1,30 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
end-of-stream@^1.1.0:
version "1.4.1"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
dependencies:
once "^1.4.0"
golike-defer@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/golike-defer/-/golike-defer-0.4.1.tgz#7a1cd435d61e461305805d980b133a0f3db4e1cc"
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
dependencies:
wrappy "1"
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"

View File

@@ -1,6 +1,6 @@
{
"name": "xen-api",
"version": "0.16.10",
"version": "0.16.9",
"license": "ISC",
"description": "Connector to the Xen API",
"keywords": [

View File

@@ -96,7 +96,6 @@ class XapiError extends BaseError {
// slots than can be assigned later
this.method = undefined
this.url = undefined
this.task = undefined
}
}
@@ -189,9 +188,7 @@ const getTaskResult = task => {
return Promise.reject(new Cancel('task canceled'))
}
if (status === 'failure') {
const error = wrapError(task.error_info)
error.task = task
return Promise.reject(error)
return Promise.reject(wrapError(task.error_info))
}
if (status === 'success') {
// the result might be:
@@ -598,10 +595,7 @@ export class Xapi extends EventEmitter {
if (error != null && (response = error.response) != null) {
response.req.abort()
const {
headers: { location },
statusCode,
} = response
const { headers: { location }, statusCode } = response
if (statusCode === 302 && location !== undefined) {
return doRequest(location)
}
@@ -783,13 +777,15 @@ export class Xapi extends EventEmitter {
this._pool = object
const eventWatchers = this._eventWatchers
Object.keys(object.other_config).forEach(key => {
const eventWatcher = eventWatchers[key]
if (eventWatcher !== undefined) {
delete eventWatchers[key]
eventWatcher(object)
}
})
if (eventWatchers !== undefined) {
forEach(object.other_config, (_, key) => {
const eventWatcher = eventWatchers[key]
if (eventWatcher !== undefined) {
delete eventWatchers[key]
eventWatcher(object)
}
})
}
} else if (type === 'task') {
if (prev === undefined) {
++this._nTasks

View File

@@ -1,6 +1,6 @@
{
"name": "xo-acl-resolver",
"version": "0.2.4",
"version": "0.2.3",
"license": "ISC",
"description": "Xen-Orchestra internal: do ACLs resolution",
"keywords": [],

View File

@@ -50,9 +50,7 @@ const checkAuthorizationByTypes = {
network: or(checkSelf, checkMember('$pool')),
PIF: checkMember('$host'),
SR: or(checkSelf, checkMember('$container')),
SR: or(checkSelf, checkMember('$pool')),
task: checkMember('$host'),

View File

@@ -28,7 +28,7 @@
"node": ">=6"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.49",
"@babel/polyfill": "7.0.0-beta.44",
"bluebird": "^3.5.1",
"chalk": "^2.2.0",
"event-to-promise": "^0.8.0",
@@ -49,10 +49,10 @@
"xo-lib": "^0.9.0"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -25,16 +25,17 @@
"node": ">=4"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.49",
"babel-runtime": "^6.18.0",
"kindof": "^2.0.0",
"lodash": "^4.17.2",
"make-error": "^1.0.2"
},
"devDependencies": {
"@babel/cli": "^7.0.0-beta.49",
"@babel/core": "^7.0.0-beta.49",
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
"@babel/preset-env": "^7.0.0-beta.49",
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.5.2",
"babel-preset-stage-3": "^6.24.1",
"cross-env": "^5.1.3",
"event-to-promise": "^0.8.0",
"rimraf": "^2.6.1"
@@ -45,5 +46,22 @@
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
},
"babel": {
"plugins": [
"lodash",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-3"
]
}
}

View File

@@ -1,3 +0,0 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -27,10 +27,10 @@
"lodash": "^4.13.1"
},
"devDependencies": {
"@babel/cli": "^7.0.0-beta.49",
"@babel/core": "^7.0.0-beta.49",
"@babel/preset-env": "^7.0.0-beta.49",
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-preset-env": "^1.5.2",
"babel-preset-stage-3": "^6.24.1",
"cross-env": "^5.1.3",
"deep-freeze": "^0.0.1",
"rimraf": "^2.6.1"
@@ -41,5 +41,22 @@
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepare": "yarn run build"
},
"babel": {
"plugins": [
"lodash"
],
"presets": [
[
"env",
{
"targets": {
"browsers": "> 5%",
"node": 4
}
}
],
"stage-3"
]
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-backup-reports",
"version": "0.12.2",
"version": "0.11.0",
"license": "AGPL-3.0",
"description": "Backup reports plugin for XO-Server",
"keywords": [

View File

@@ -1,6 +1,7 @@
import humanFormat from 'human-format'
import moment from 'moment-timezone'
import { forEach, get, startCase } from 'lodash'
import { find, forEach, get, startCase } from 'lodash'
import pkg from '../package'
export const configurationSchema = {
@@ -36,12 +37,6 @@ const ICON_FAILURE = '🚨'
const ICON_SKIPPED = '⏩'
const ICON_SUCCESS = '✔'
const STATUS_ICON = {
skipped: ICON_SKIPPED,
success: ICON_SUCCESS,
failure: ICON_FAILURE,
}
const DATE_FORMAT = 'dddd, MMMM Do YYYY, h:mm:ss a'
const createDateFormater = timezone =>
timezone !== undefined
@@ -62,12 +57,10 @@ const formatSize = bytes =>
})
const formatSpeed = (bytes, milliseconds) =>
milliseconds > 0
? humanFormat((bytes * 1e3) / milliseconds, {
scale: 'binary',
unit: 'B/s',
})
: 'N/A'
humanFormat(bytes * 1e3 / milliseconds, {
scale: 'binary',
unit: 'B/s',
})
const logError = e => {
console.error('backup report error:', e)
@@ -102,42 +95,43 @@ class BackupReportsXoPlugin {
this._xo.removeListener('job:terminated', this._report)
}
_wrapper (status, job, schedule, runJobId) {
_wrapper (status, job, schedule) {
return new Promise(resolve =>
resolve(
job.type === 'backup'
? this._backupNgListener(status, job, schedule, runJobId)
: this._listener(status, job, schedule, runJobId)
? this._backupNgListener(status, job, schedule)
: this._listener(status, job, schedule)
)
).catch(logError)
}
async _backupNgListener (_1, _2, { timezone }, runJobId) {
async _backupNgListener (runJobId, _, { timezone }) {
const xo = this._xo
const log = await xo.getBackupNgLogs(runJobId)
const logs = await xo.getBackupNgLogs(runJobId)
const jobLog = logs['roots'][0]
const vmsTaskLog = logs[jobLog.id]
const { reportWhen, mode } = log.data || {}
if (
reportWhen === 'never' ||
(log.status === 'success' && reportWhen === 'failure')
) {
const { reportWhen, mode } = jobLog.data || {}
if (reportWhen === 'never') {
return
}
const jobName = (await xo.getJob(log.jobId, 'backup')).name
const formatDate = createDateFormater(timezone)
if (
(log.status === 'failure' || log.status === 'skipped') &&
log.result !== undefined
) {
const jobName = (await xo.getJob(jobLog.jobId, 'backup')).name
if (jobLog.error !== undefined) {
const [globalStatus, icon] =
jobLog.error.message === NO_VMS_MATCH_THIS_PATTERN
? ['Skipped', ICON_SKIPPED]
: ['Failure', ICON_FAILURE]
let markdown = [
`## Global status: ${log.status}`,
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(log.start)}`,
`- **End time**: ${formatDate(log.end)}`,
`- **Duration**: ${formatDuration(log.end - log.start)}`,
`- **Error**: ${log.result.message}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Error**: ${jobLog.error.message}`,
'---',
'',
`*${pkg.name} v${pkg.version}*`,
@@ -145,14 +139,12 @@ class BackupReportsXoPlugin {
markdown = markdown.join('\n')
return this._sendReport({
subject: `[Xen Orchestra] ${
log.status
} Backup report for ${jobName} ${STATUS_ICON[log.status]}`,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${icon}`,
markdown,
nagiosStatus: 2,
nagiosMarkdown: `[Xen Orchestra] [${
log.status
}] Backup report for ${jobName} - Error : ${log.result.message}`,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Backup report for ${jobName} - Error : ${
jobLog.error.message
}`,
})
}
@@ -165,12 +157,14 @@ class BackupReportsXoPlugin {
let globalTransferSize = 0
let nFailures = 0
let nSkipped = 0
for (const taskLog of log.tasks) {
if (taskLog.status === 'success' && reportWhen === 'failure') {
for (const vmTaskLog of vmsTaskLog || []) {
const vmTaskStatus = vmTaskLog.status
if (vmTaskStatus === 'success' && reportWhen === 'failure') {
return
}
const vmId = taskLog.data.id
const vmId = vmTaskLog.data.id
let vm
try {
vm = xo.getObject(vmId)
@@ -179,170 +173,136 @@ class BackupReportsXoPlugin {
`### ${vm !== undefined ? vm.name_label : 'VM not found'}`,
'',
`- **UUID**: ${vm !== undefined ? vm.uuid : vmId}`,
`- **Start time**: ${formatDate(taskLog.start)}`,
`- **End time**: ${formatDate(taskLog.end)}`,
`- **Duration**: ${formatDuration(taskLog.end - taskLog.start)}`,
`- **Start time**: ${formatDate(vmTaskLog.start)}`,
`- **End time**: ${formatDate(vmTaskLog.end)}`,
`- **Duration**: ${formatDuration(vmTaskLog.duration)}`,
]
const failedSubTasks = []
const snapshotText = []
const operationsText = []
const srsText = []
const remotesText = []
for (const subTaskLog of logs[vmTaskLog.taskId] || []) {
const { data, status, result, message } = subTaskLog
const icon =
subTaskLog.status === 'success' ? ICON_SUCCESS : ICON_FAILURE
const errorMessage = ` **Error**: ${get(result, 'message')}`
for (const subTaskLog of taskLog.tasks || []) {
if (
subTaskLog.message !== 'export' &&
subTaskLog.message !== 'snapshot'
) {
continue
}
const icon = STATUS_ICON[subTaskLog.status]
const errorMessage = ` - **Error**: ${get(
subTaskLog.result,
'message'
)}`
if (subTaskLog.message === 'snapshot') {
snapshotText.push(
`- **Snapshot** ${icon}`,
` - **Start time**: ${formatDate(subTaskLog.start)}`,
` - **End time**: ${formatDate(subTaskLog.end)}`
)
} else if (subTaskLog.data.type === 'remote') {
const id = subTaskLog.data.id
const remote = await xo.getRemote(id).catch(() => {})
if (message === 'snapshot') {
operationsText.push(`- **Snapshot** ${icon}`)
if (status === 'failure') {
failedSubTasks.push('Snapshot')
operationsText.push('', errorMessage)
}
} else if (data.type === 'remote') {
const remoteId = data.id
const remote = await xo.getRemote(remoteId).catch(() => {})
remotesText.push(
` - **${
`- **${
remote !== undefined ? remote.name : `Remote Not found`
}** (${id}) ${icon}`,
` - **Start time**: ${formatDate(subTaskLog.start)}`,
` - **End time**: ${formatDate(subTaskLog.end)}`,
` - **Duration**: ${formatDuration(
subTaskLog.end - subTaskLog.start
)}`
}** (${remoteId}) ${icon}`
)
if (subTaskLog.status === 'failure') {
failedSubTasks.push(remote !== undefined ? remote.name : id)
if (status === 'failure') {
failedSubTasks.push(remote !== undefined ? remote.name : remoteId)
remotesText.push('', errorMessage)
}
} else {
const id = subTaskLog.data.id
const srId = data.id
let sr
try {
sr = xo.getObject(id)
sr = xo.getObject(srId)
} catch (e) {}
const [srName, srUuid] =
sr !== undefined ? [sr.name_label, sr.uuid] : [`SR Not found`, id]
srsText.push(
` - **${srName}** (${srUuid}) ${icon}`,
` - **Start time**: ${formatDate(subTaskLog.start)}`,
` - **End time**: ${formatDate(subTaskLog.end)}`,
` - **Duration**: ${formatDuration(
subTaskLog.end - subTaskLog.start
)}`
)
if (subTaskLog.status === 'failure') {
failedSubTasks.push(sr !== undefined ? sr.name_label : id)
sr !== undefined ? [sr.name_label, sr.uuid] : [`SR Not found`, srId]
srsText.push(`- **${srName}** (${srUuid}) ${icon}`)
if (status === 'failure') {
failedSubTasks.push(sr !== undefined ? sr.name_label : srId)
srsText.push('', errorMessage)
}
}
forEach(subTaskLog.tasks, operationLog => {
if (
operationLog.message !== 'merge' &&
operationLog.message !== 'transfer'
) {
return
}
const operationInfoText = []
if (operationLog.status === 'success') {
const size = operationLog.result.size
if (operationLog.message === 'merge') {
globalMergeSize += size
} else {
globalTransferSize += size
}
operationInfoText.push(
` - **Size**: ${formatSize(size)}`,
` - **Speed**: ${formatSpeed(
size,
operationLog.end - operationLog.start
)}`
)
} else {
operationInfoText.push(
` - **Error**: ${get(operationLog.result, 'message')}`
)
}
const operationText = [
` - **${operationLog.message}** ${
STATUS_ICON[operationLog.status]
}`,
` - **Start time**: ${formatDate(operationLog.start)}`,
` - **End time**: ${formatDate(operationLog.end)}`,
` - **Duration**: ${formatDuration(
operationLog.end - operationLog.start
)}`,
...operationInfoText,
].join('\n')
if (get(subTaskLog, 'data.type') === 'remote') {
remotesText.push(operationText)
remotesText.join('\n')
}
if (get(subTaskLog, 'data.type') === 'SR') {
srsText.push(operationText)
srsText.join('\n')
}
})
}
if (operationsText.length !== 0) {
operationsText.unshift(`#### Operations`, '')
}
if (srsText.length !== 0) {
srsText.unshift(`- **SRs**`)
srsText.unshift(`#### SRs`, '')
}
if (remotesText.length !== 0) {
remotesText.unshift(`- **Remotes**`)
remotesText.unshift(`#### remotes`, '')
}
const subText = [...snapshotText, '', ...srsText, '', ...remotesText]
if (taskLog.result !== undefined) {
if (taskLog.status === 'skipped') {
const subText = [...operationsText, '', ...srsText, '', ...remotesText]
const result = vmTaskLog.result
if (vmTaskStatus === 'failure' && result !== undefined) {
const { message } = result
if (isSkippedError(result)) {
++nSkipped
skippedVmsText.push(
...text,
`- **Reason**: ${
taskLog.result.message === UNHEALTHY_VDI_CHAIN_ERROR
message === UNHEALTHY_VDI_CHAIN_ERROR
? UNHEALTHY_VDI_CHAIN_MESSAGE
: taskLog.result.message
: message
}`,
''
)
nagiosText.push(
`[(Skipped) ${vm !== undefined ? vm.name_label : 'undefined'} : ${
taskLog.result.message
} ]`
`[(Skipped) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
} else {
++nFailures
failedVmsText.push(
...text,
`- **Error**: ${taskLog.result.message}`,
''
)
failedVmsText.push(...text, `- **Error**: ${message}`, '')
nagiosText.push(
`[(Failed) ${vm !== undefined ? vm.name_label : 'undefined'} : ${
taskLog.result.message
} ]`
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
}
} else {
if (taskLog.status === 'failure') {
let transferSize, transferDuration, mergeSize, mergeDuration
forEach(logs[vmTaskLog.taskId], ({ taskId }) => {
if (transferSize !== undefined) {
return false
}
const transferTask = find(logs[taskId], { message: 'transfer' })
if (transferTask !== undefined) {
transferSize = transferTask.result.size
transferDuration = transferTask.end - transferTask.start
}
const mergeTask = find(logs[taskId], { message: 'merge' })
if (mergeTask !== undefined) {
mergeSize = mergeTask.result.size
mergeDuration = mergeTask.end - mergeTask.start
}
})
if (transferSize !== undefined) {
globalTransferSize += transferSize
text.push(
`- **Transfer size**: ${formatSize(transferSize)}`,
`- **Transfer speed**: ${formatSpeed(
transferSize,
transferDuration
)}`
)
}
if (mergeSize !== undefined) {
globalMergeSize += mergeSize
text.push(
`- **Merge size**: ${formatSize(mergeSize)}`,
`- **Merge speed**: ${formatSpeed(mergeSize, mergeDuration)}`
)
}
if (vmTaskStatus === 'failure') {
++nFailures
failedVmsText.push(...text, '', '', ...subText, '')
nagiosText.push(
`[${
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
}: (failed)[${failedSubTasks.toString()}]]`
)
@@ -351,16 +311,23 @@ class BackupReportsXoPlugin {
}
}
}
const globalSuccess = nFailures === 0 && nSkipped === 0
if (reportWhen === 'failure' && globalSuccess) {
return
}
const nVms = log.tasks.length
const nVms = vmsTaskLog.length
const nSuccesses = nVms - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
: nFailures !== 0 ? `Failure` : `Skipped`
let markdown = [
`## Global status: ${log.status}`,
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(log.start)}`,
`- **End time**: ${formatDate(log.end)}`,
`- **Duration**: ${formatDuration(log.start - log.end)}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Successes**: ${nSuccesses} / ${nVms}`,
]
@@ -400,16 +367,17 @@ class BackupReportsXoPlugin {
markdown = markdown.join('\n')
return this._sendReport({
markdown,
subject: `[Xen Orchestra] ${log.status} Backup report for ${jobName} ${
STATUS_ICON[log.status]
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: log.status === 'success' ? 0 : 2,
nagiosMarkdown:
log.status === 'success'
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${jobName} - VMs : ${nagiosText.join(' ')}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${jobName} - VMs : ${nagiosText.join(' ')}`,
})
}
@@ -433,7 +401,7 @@ class BackupReportsXoPlugin {
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
status: nagiosStatus,
nagiosStatus,
message: nagiosMarkdown,
}),
])
@@ -599,9 +567,7 @@ class BackupReportsXoPlugin {
const nSuccesses = nCalls - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
: nFailures !== 0
? `Failure`
: `Skipped`
: nFailures !== 0 ? `Failure` : `Skipped`
let markdown = [
`## Global status: ${globalStatus}`,
@@ -659,9 +625,7 @@ class BackupReportsXoPlugin {
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${tag} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0
? ICON_FAILURE
: ICON_SKIPPED
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess

View File

@@ -26,10 +26,10 @@
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "^7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "^7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-usage-report",
"version": "0.5.0",
"version": "0.4.2",
"license": "AGPL-3.0",
"description": "",
"keywords": [

View File

@@ -139,8 +139,8 @@ Handlebars.registerHelper(
new Handlebars.SafeString(
isFinite(+value) && +value !== 0
? (value = round(value, 2)) > 0
? `(<b style="color: green;">▲ ${value}%</b>)`
: `(<b style="color: red;">▼ ${String(value).slice(1)}%</b>)`
? `(<b style="color: green;">▲ ${value}</b>)`
: `(<b style="color: red;">▼ ${String(value).slice(1)}</b>)`
: ''
)
)
@@ -270,16 +270,12 @@ async function getHostsStats ({ runningHosts, xo }) {
function getSrsStats (xoObjects) {
return orderBy(
map(filter(xoObjects, obj => obj.type === 'SR' && obj.size > 0), sr => {
map(filter(xoObjects, { type: 'SR' }), sr => {
const total = sr.size / gibPower
const used = sr.physical_usage / gibPower
let name = sr.name_label
if (!sr.shared) {
name += ` (${find(xoObjects, { id: sr.$container }).name_label})`
}
return {
uuid: sr.uuid,
name,
name: sr.name_label,
total,
used,
free: total - used,

View File

@@ -1,31 +0,0 @@
#!/usr/bin/env node
'use strict'
// ===================================================================
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

View File

@@ -1,11 +0,0 @@
'use strict'
// ===================================================================
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// Import the real main module.
module.exports = require('./dist').default

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "5.20.2",
"version": "5.19.4",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -16,6 +16,9 @@
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"preferGlobal": true,
"bin": {
"xo-server": "dist/cli"
},
"files": [
"better-stacks.js",
"bin/",
@@ -31,15 +34,15 @@
"node": ">=6"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.49",
"@babel/polyfill": "7.0.0-beta.44",
"@marsaud/smb2-promise": "^0.2.1",
"@xen-orchestra/cron": "^1.0.3",
"@xen-orchestra/fs": "^0.1.0",
"@xen-orchestra/fs": "^0.0.0",
"ajv": "^6.1.1",
"app-conf": "^0.5.0",
"archiver": "^2.1.0",
"async-iterator-to-stream": "^1.0.1",
"base64url": "^3.0.0",
"base64url": "^2.0.0",
"bind-property-descriptor": "^1.0.0",
"blocked": "^1.2.1",
"bluebird": "^3.5.1",
@@ -59,10 +62,10 @@
"express-session": "^1.15.6",
"fatfs": "^0.10.4",
"from2": "^2.3.0",
"fs-extra": "^6.0.1",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"golike-defer": "^0.4.1",
"hashy": "^0.7.1",
"hashy": "^0.6.2",
"helmet": "^3.9.0",
"highland": "^2.11.1",
"http-proxy": "^1.16.2",
@@ -70,14 +73,14 @@
"http-server-plus": "^0.10.0",
"human-format": "^0.10.0",
"is-redirect": "^1.0.0",
"jest-worker": "^23.0.0",
"jest-worker": "^22.4.3",
"js-yaml": "^3.10.0",
"json-rpc-peer": "^0.15.3",
"json5": "^1.0.0",
"julien-f-source-map-support": "0.1.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^4.0.0",
"level": "^3.0.0",
"level-party": "^3.0.4",
"level-sublevel": "^6.6.1",
"limit-concurrency-decorator": "^0.4.0",
@@ -93,16 +96,16 @@
"partial-stream": "0.0.0",
"passport": "^0.4.0",
"passport-local": "^1.0.0",
"pretty-format": "^23.0.0",
"pretty-format": "^22.0.3",
"promise-toolbox": "^0.9.5",
"proxy-agent": "^3.0.0",
"proxy-agent": "^2.1.0",
"pug": "^2.0.0-rc.4",
"pw": "^0.0.4",
"redis": "^2.8.0",
"schema-inspector": "^1.6.8",
"semver": "^5.4.1",
"serve-static": "^1.13.1",
"split-lines": "^2.0.0",
"split-lines": "^1.1.0",
"stack-chain": "^2.0.0",
"stoppable": "^1.0.5",
"struct-fu": "^1.2.0",
@@ -111,29 +114,29 @@
"tmp": "^0.0.33",
"uuid": "^3.0.1",
"value-matcher": "^0.2.0",
"vhd-lib": "^0.1.3",
"vhd-lib": "^0.0.0",
"ws": "^5.0.0",
"xen-api": "^0.16.10",
"xen-api": "^0.16.9",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.2.4",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
"xo-common": "^0.1.1",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "^0.1.3",
"xo-vmdk-to-vhd": "0.1.0",
"yazl": "^2.4.3"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/plugin-proposal-decorators": "7.0.0-beta.49",
"@babel/plugin-proposal-export-default-from": "7.0.0-beta.49",
"@babel/plugin-proposal-export-namespace-from": "7.0.0-beta.49",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.49",
"@babel/plugin-proposal-optional-chaining": "^7.0.0-beta.49",
"@babel/plugin-proposal-pipeline-operator": "^7.0.0-beta.49",
"@babel/plugin-proposal-throw-expressions": "^7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/preset-flow": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-proposal-decorators": "7.0.0-beta.44",
"@babel/plugin-proposal-export-default-from": "7.0.0-beta.44",
"@babel/plugin-proposal-export-namespace-from": "7.0.0-beta.44",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.44",
"@babel/plugin-proposal-optional-chaining": "^7.0.0-beta.44",
"@babel/plugin-proposal-pipeline-operator": "^7.0.0-beta.44",
"@babel/plugin-proposal-throw-expressions": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"@babel/preset-flow": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"index-modules": "^0.3.0",

View File

@@ -1,5 +1,4 @@
import { basename } from 'path'
import { isEmpty, pickBy } from 'lodash'
import { safeDateFormat } from '../utils'
@@ -118,8 +117,8 @@ getJob.params = {
},
}
export async function runJob ({ id, schedule, vm }) {
return this.runJobSequence([id], await this.getSchedule(schedule), vm)
export async function runJob ({ id, schedule }) {
return this.runJobSequence([id], await this.getSchedule(schedule))
}
runJob.permission = 'admin'
@@ -131,17 +130,12 @@ runJob.params = {
schedule: {
type: 'string',
},
vm: {
type: 'string',
optional: true,
},
}
// -----------------------------------------------------------------------------
export async function getAllLogs (filter) {
const logs = await this.getBackupNgLogs()
return isEmpty(filter) ? logs : pickBy(logs, filter)
export function getAllLogs () {
return this.getBackupNgLogs()
}
getAllLogs.permission = 'admin'

View File

@@ -76,21 +76,6 @@ export { restartAgent as restart_agent } // eslint-disable-line camelcase
// -------------------------------------------------------------------
export function setRemoteSyslogHost ({ host, syslogDestination }) {
return this.getXapi(host).setRemoteSyslogHost(host._xapiId, syslogDestination)
}
setRemoteSyslogHost.params = {
id: { type: 'string' },
syslogDestination: { type: 'string' },
}
setRemoteSyslogHost.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function start ({ host }) {
return this.getXapi(host).powerOnHost(host._xapiId)
}

View File

@@ -1,12 +1,5 @@
// FIXME so far, no acls for jobs
export function cancel ({ runId }) {
return this.cancelJobRun(runId)
}
cancel.permission = 'admin'
cancel.description = 'Cancel a current run'
export async function getAll () {
return /* await */ this.getAllJobs('call')
}

View File

@@ -204,8 +204,8 @@ export async function createNfs ({
}
// if NFS options given
if (nfsOptions) {
deviceConfig.options = nfsOptions
if (nfsVersion) {
deviceConfig.options = nfsVersion
}
const srRef = await xapi.call(

View File

@@ -12,10 +12,6 @@ import { forEach, map, mapFilter, parseSize } from '../utils'
// ===================================================================
export function getHaValues () {
return ['best-effort', 'restart', '']
}
function checkPermissionOnSrs (vm, permission = 'operate') {
const permissions = []
forEach(vm.$VBDs, vbdId => {
@@ -25,10 +21,8 @@ function checkPermissionOnSrs (vm, permission = 'operate') {
if (vbd.is_cd_drive || !vdiId) {
return
}
return permissions.push([
this.getObject(vdiId, ['VDI', 'VDI-snapshot']).$SR,
permission,
])
return permissions.push([this.getObject(vdiId, 'VDI').$SR, permission])
})
return this.hasPermissions(this.session.get('user_id'), permissions).then(
@@ -52,16 +46,11 @@ const extract = (obj, prop) => {
export async function create (params) {
const { user } = this
const resourceSet = extract(params, 'resourceSet')
const template = extract(params, 'template')
if (
resourceSet === undefined &&
!(await this.hasPermissions(this.user.id, [
[template.$pool, 'administrate'],
]))
) {
if (resourceSet === undefined && user.permission !== 'admin') {
throw unauthorized()
}
const template = extract(params, 'template')
params.template = template._xapiId
const xapi = this.getXapi(template)
@@ -474,7 +463,7 @@ export async function migrate ({
})
}
if (!(await this.hasPermissions(this.session.get('user_id'), permissions))) {
if (!await this.hasPermissions(this.session.get('user_id'), permissions)) {
throw unauthorized()
}
@@ -567,11 +556,11 @@ set.params = {
name_description: { type: 'string', optional: true },
high_availability: {
optional: true,
pattern: new RegExp(`^(${getHaValues().join('|')})$`),
type: 'string',
},
// TODO: provides better filtering of values for HA possible values: "best-
// effort" meaning "try to restart this VM if possible but don't consider the
// Pool to be overcommitted if this is not possible"; "restart" meaning "this
// VM should be restarted"; "" meaning "do not try to restart this VM"
high_availability: { type: 'boolean', optional: true },
// Number of virtual CPUs to allocate.
CPUs: { type: 'integer', optional: true },
@@ -663,7 +652,8 @@ clone.params = {
}
clone.resolve = {
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
// TODO: is it necessary for snapshots?
vm: ['id', 'VM', 'administrate'],
}
// -------------------------------------------------------------------
@@ -713,9 +703,9 @@ copy.resolve = {
export async function convertToTemplate ({ vm }) {
// Convert to a template requires pool admin permission.
if (
!(await this.hasPermissions(this.session.get('user_id'), [
!await this.hasPermissions(this.session.get('user_id'), [
[vm.$pool, 'administrate'],
]))
])
) {
throw unauthorized()
}
@@ -1018,12 +1008,13 @@ export async function stop ({ vm, force }) {
// Hard shutdown
if (force) {
return xapi.shutdownVm(vm._xapiRef, { hard: true })
await xapi.call('VM.hard_shutdown', vm._xapiRef)
return
}
// Clean shutdown
try {
await xapi.shutdownVm(vm._xapiRef)
await xapi.call('VM.clean_shutdown', vm._xapiRef)
} catch (error) {
const { code } = error
if (
@@ -1274,9 +1265,7 @@ export async function createInterface ({
await this.checkResourceSetConstraints(resourceSet, this.user.id, [
network.id,
])
} else if (
!(await this.hasPermissions(this.user.id, [[network.id, 'view']]))
) {
} else if (!await this.hasPermissions(this.user.id, [[network.id, 'view']])) {
throw unauthorized()
}

176
packages/xo-server/src/cli.js Executable file
View File

@@ -0,0 +1,176 @@
#!/usr/bin/env node
const APP_NAME = 'xo-server'
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// -------------------------------------------------------------------
require('@xen-orchestra/log/configure').configure([
{
filter: process.env.DEBUG,
level: 'warn',
transport: require('@xen-orchestra/log/transports/console').default(),
},
])
const { info, warn } = require('@xen-orchestra/log').createLogger('bootstrap')
process.on('unhandledRejection', reason => {
warn('possibly unhandled rejection', reason)
})
process.on('warning', warning => {
warn('Node warning', warning)
})
;(({ prototype }) => {
const { emit } = prototype
prototype.emit = function (event, error) {
event === 'error' && !this.listenerCount(event)
? warn('unhandled error event', error)
: emit.apply(this, arguments)
}
})(require('events').EventEmitter)
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
const Bluebird = require('bluebird')
Bluebird.config({
longStackTraces: true,
warnings: true,
})
global.Promise = Bluebird
// -------------------------------------------------------------------
const main = async args => {
if (args.includes('--help') || args.includes('-h')) {
const { name, version } = require('../package.json')
return console.log(`Usage: ${name} [--safe-mode]
${name} v${version}`)
}
info('starting')
const config = await require('app-conf').load(APP_NAME, {
appDir: `${__dirname}/..`,
ignoreUnknownFormats: true,
})
// Print a message if deprecated entries are specified.
;['users', 'servers'].forEach(entry => {
if (entry in config) {
warn(`${entry} configuration is deprecated`)
}
})
const httpServer = require('stoppable')(new (require('http-server-plus'))())
const readFile = Bluebird.promisify(require('fs').readFile)
await Promise.all(
config.http.listen.map(
async ({
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}) => {
if (cert !== undefined && key !== undefined) {
;[opts.cert, opts.key] = await Promise.all([
readFile(cert),
readFile(key),
])
}
try {
const niceAddress = await httpServer.listen(opts)
info(`web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress !== undefined) {
warn(`web server could not listen on ${error.niceAddress}`)
const { code } = error
if (code === 'EACCES') {
warn(' access denied.')
warn(' ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' address already in use.')
}
} else {
warn('web server could not listen', error)
}
}
}
)
)
// Now the web server is listening, drop privileges.
try {
const { group, user } = config
if (group !== undefined) {
process.setgid(group)
info('group changed to', group)
}
if (user !== undefined) {
process.setuid(user)
info('user changed to', user)
}
} catch (error) {
warn('failed to change group/user', error)
}
const child = require('child_process').fork(require.resolve('./worker.js'))
child.send([''])
const App = require('./xo').default
const app = new App({
appName: APP_NAME,
config,
httpServer,
safeMode: require('lodash/includes')(args, '--safe-mode'),
})
// Register web server close on XO stop.
app.on('stop', () => Bluebird.fromCallback(cb => httpServer.stop(cb)))
await app.start()
// Trigger a clean job.
await app.clean()
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
require('lodash/forEach')(['SIGINT', 'SIGTERM'], signal => {
let alreadyCalled = false
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
info(`${signal} caught, closing…`)
app.stop()
})
})
await require('event-to-promise')(app, 'stopped')
}
main(process.argv.slice(2)).then(
() => info('bye :-)'),
error => {
if (typeof error === 'number') {
process.exitCode = error
} else {
warn('fatal error', error)
}
}
)

View File

@@ -0,0 +1,348 @@
const compilePug = require('pug').compile
const createProxyServer = require('http-proxy').createServer
const JsonRpcPeer = require('json-rpc-peer')
const LocalStrategy = require('passport-local').Strategy
const parseCookies = require('cookie').parse
const Passport = require('passport')
const serveStatic = require('serve-static')
const WebSocket = require('ws')
const { fromCallback } = require('promise-toolbox')
const { invalidCredentials } = require('xo-common/api-errors')
const { readFile } = require('fs')
const proxyConsole = require('../proxy-console')
const { debug, warn } = require('@xen-orchestra/log').createLogger('front')
function createExpressApp ({ http: config }, httpServer) {
const express = require('express')()
express.use(require('helmet')())
if (config.redirectToHttps) {
const https = config.listen.find(
_ =>
_.port !== undefined &&
(_.cert !== undefined || _.certificate !== undefined)
)
if (https === undefined) {
warn('could not setup HTTPs redirection: no HTTPs config found')
} else {
const { port } = https
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
Object.keys(config.mounts).forEach(url => {
const paths = config.mounts[url]
;(Array.isArray(paths) ? paths : [paths]).forEach(path => {
debug('Setting up %s → %s', url, path)
express.use(url, serveStatic(path))
})
})
return express
}
function setUpApi (config, httpServer, xo) {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => fromCallback(cb => webSocketServer.close(cb)))
const onConnection = (socket, upgradeReq) => {
const { remoteAddress } = upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
connection.once('close', () => {
socket.close()
})
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = jsonRpc.notify.bind(jsonRpc)
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
// Connect the WebSocket to the JSON-RPC server.
socket.on('message', message => {
jsonRpc.write(message)
})
const onSend = error => {
if (error) {
warn('WebSocket send:', error.stack)
}
}
jsonRpc.on('data', data => {
// The socket may have been closed during the API method
// execution.
if (socket.readyState === WebSocket.OPEN) {
socket.send(data, onSend)
}
})
}
httpServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, ws =>
onConnection(ws, req)
)
}
})
}
function setUpConsoleProxy (httpServer, xo) {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
httpServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
}
const [, id] = matches
try {
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [[id, 'operate']])) {
throw invalidCredentials()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
async function setUpPassport (express, xo) {
// necessary for connect-flash
express.use(require('cookie-parser')())
express.use(
require('express-session')({
resave: false,
saveUninitialized: false,
// TODO: should be in the config file.
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
})
)
// necessary for Passport to display error messages
express.use(require('connect-flash')())
// necessary for Passport to access the username and password from the sign
// in form
express.use(require('body-parser').urlencoded({ extended: false }))
express.use(Passport.initialize())
const strategies = { __proto__: null }
xo.registerPassportStrategy = strategy => {
Passport.use(strategy)
const { name } = strategy
if (name !== 'local') {
strategies[name] = strategy.label || name
}
}
// Registers the sign in form.
const signInPage = compilePug(
await fromCallback(cb => readFile(`${__dirname}/../signin.pug`, cb))
)
express.get('/signin', (req, res, next) => {
res.send(
signInPage({
error: req.flash('error')[0],
strategies,
})
)
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches !== null) {
return Passport.authenticate(matches[1], async (err, user, info) => {
if (err) {
return next(err)
}
if (!user) {
req.flash('error', info ? info.message : 'Invalid credentials')
return res.redirect('/signin')
}
// The cookie will be set in via the next request because some
// browsers do not save cookies on redirect.
req.flash(
'token',
(await xo.createAuthenticationToken({ userId: user.id })).id
)
// The session is only persistent for internal provider and if 'Remember me' box is checked
req.flash(
'session-is-persistent',
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
const token = req.flash('token')[0]
if (token) {
const isPersistent = req.flash('session-is-persistent')[0]
if (isPersistent) {
// Persistent cookie ? => 1 year
res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 })
} else {
// Non-persistent : external provider as Github, Twitter...
res.cookie('token', token)
}
next()
} else if (req.cookies.token) {
next()
} else if (
/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)
) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
// Install the local strategy.
xo.registerPassportStrategy(
new LocalStrategy(async (username, password, done) => {
try {
const user = await xo.authenticateUser({ username, password })
done(null, user)
} catch (error) {
done(null, false, { message: error.message })
}
})
)
}
function setUpProxies ({ http: { proxies } }, httpServer, express, xo) {
if (proxies === undefined) {
return
}
const proxy = createProxyServer({
ignorePath: true,
}).on('error', error => console.error(error))
const prefixes = Object.keys(proxies).sort((a, b) => a.length - b.length)
const n = prefixes.length
// HTTP request proxy.
express.use((req, res, next) => {
const { url } = req
for (let i = 0; i < n; ++i) {
const prefix = prefixes[i]
if (url.startsWith(prefix)) {
const target = proxies[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length),
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => fromCallback(cb => webSocketServer.close(cb)))
httpServer.on('upgrade', (req, socket, head) => {
const { url } = req
for (let i = 0; i < n; ++i) {
const prefix = prefixes[i]
if (url.startsWith(prefix)) {
const target = proxies[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length),
})
return
}
}
})
}
export default async function main ({ config, httpServer, safeMode }) {
const express = createExpressApp(config, httpServer)
setUpProxies(config, httpServer, express, xo)
setUpApi(config, httpServer, xo)
// must be set up before the API
setUpConsoleProxy(httpServer, xo)
await setUpPassport(express, xo)
// TODO: express.use(xo._handleHttpRequest.bind(xo))
}

View File

@@ -1,664 +0,0 @@
import appConf from 'app-conf'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import pw from 'pw'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
import stoppable from 'stoppable'
import WebSocket from 'ws'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { fromEvent } from 'promise-toolbox'
import { join as joinPath } from 'path'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import { ensureDir, readdir, readFile } from 'fs-extra'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
forEach,
isArray,
isFunction,
mapToArray,
pFromCallback,
} from './utils'
import bodyParser from 'body-parser'
import connectFlash from 'connect-flash'
import cookieParser from 'cookie-parser'
import expressSession from 'express-session'
import passport from 'passport'
import { parse as parseCookies } from 'cookie'
import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const debug = createLogger('xo:main')
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
// ===================================================================
const DEPRECATED_ENTRIES = ['users', 'servers']
async function loadConfiguration () {
const config = await appConf.load('xo-server', {
appDir: joinPath(__dirname, '..'),
ignoreUnknownFormats: true,
})
debug('Configuration loaded.')
// Print a message if deprecated entries are specified.
forEach(DEPRECATED_ENTRIES, entry => {
if (has(config, entry)) {
warn(`${entry} configuration is deprecated.`)
}
})
return config
}
// ===================================================================
function createExpressApp () {
const app = createExpress()
app.use(helmet())
// Registers the cookie-parser and express-session middlewares,
// necessary for connect-flash.
app.use(cookieParser())
app.use(
expressSession({
resave: false,
saveUninitialized: false,
// TODO: should be in the config file.
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
})
)
// Registers the connect-flash middleware, necessary for Passport to
// display error messages.
app.use(connectFlash())
// Registers the body-parser middleware, necessary for Passport to
// access the username and password from the sign in form.
app.use(bodyParser.urlencoded({ extended: false }))
// Registers Passport's middlewares.
app.use(passport.initialize())
return app
}
async function setUpPassport (express, xo) {
const strategies = { __proto__: null }
xo.registerPassportStrategy = strategy => {
passport.use(strategy)
const { name } = strategy
if (name !== 'local') {
strategies[name] = strategy.label || name
}
}
// Registers the sign in form.
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(
signInPage({
error: req.flash('error')[0],
strategies,
})
)
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
if (err) {
return next(err)
}
if (!user) {
req.flash('error', info ? info.message : 'Invalid credentials')
return res.redirect('/signin')
}
// The cookie will be set in via the next request because some
// browsers do not save cookies on redirect.
req.flash(
'token',
(await xo.createAuthenticationToken({ userId: user.id })).id
)
// The session is only persistent for internal provider and if 'Remember me' box is checked
req.flash(
'session-is-persistent',
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
const token = req.flash('token')[0]
if (token) {
const isPersistent = req.flash('session-is-persistent')[0]
if (isPersistent) {
// Persistent cookie ? => 1 year
res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 })
} else {
// Non-persistent : external provider as Github, Twitter...
res.cookie('token', token)
}
next()
} else if (req.cookies.token) {
next()
} else if (
/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)
) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
// Install the local strategy.
xo.registerPassportStrategy(
new LocalStrategy(async (username, password, done) => {
try {
const user = await xo.authenticateUser({ username, password })
done(null, user)
} catch (error) {
done(null, false, { message: error.message })
}
})
)
}
// ===================================================================
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema,
configurationPresets,
testSchema,
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(
mapToArray(files, name => {
if (startsWith(name, PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
})
)
}
async function registerPlugins (xo) {
await Promise.all(
mapToArray(
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'],
xo::registerPluginsInPath
)
)
}
// ===================================================================
async function makeWebServerListen (
webServer,
{
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}
) {
if (cert && key) {
;[opts.cert, opts.key] = await Promise.all([readFile(cert), readFile(key)])
if (opts.key.includes('ENCRYPTED')) {
opts.passphrase = await new Promise(resolve => {
console.log('Encrypted key %s', key)
process.stdout.write(`Enter pass phrase: `)
pw(resolve)
})
}
}
try {
const niceAddress = await webServer.listen(opts)
debug(`Web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress) {
warn(`Web server could not listen on ${error.niceAddress}`)
const { code } = error
if (code === 'EACCES') {
warn(' Access denied.')
warn(' Ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' Address already in use.')
}
} else {
warn('Web server could not listen:', error.message)
}
}
}
async function createWebServer ({ listen, listenOptions }) {
const webServer = stoppable(new WebServer())
await Promise.all(
mapToArray(listen, opts =>
makeWebServerListen(webServer, { ...listenOptions, ...opts })
)
)
return webServer
}
// ===================================================================
const setUpProxies = (express, opts, xo) => {
if (!opts) {
return
}
const proxy = createProxyServer({
ignorePath: true,
}).on('error', error => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length),
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
express.on('upgrade', (req, socket, head) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length),
})
return
}
}
})
}
// ===================================================================
const setUpStaticFiles = (express, opts) => {
forEach(opts, (paths, url) => {
if (!isArray(paths)) {
paths = [paths]
}
forEach(paths, path => {
debug('Setting up %s → %s', url, path)
express.use(url, serveStatic(path))
})
})
}
// ===================================================================
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
const onConnection = (socket, upgradeReq) => {
const { remoteAddress } = upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
connection.once('close', () => {
socket.close()
})
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
// Connect the WebSocket to the JSON-RPC server.
socket.on('message', message => {
jsonRpc.write(message)
})
const onSend = error => {
if (error) {
warn('WebSocket send:', error.stack)
}
}
jsonRpc.on('data', data => {
// The socket may have been closed during the API method
// execution.
if (socket.readyState === WebSocket.OPEN) {
socket.send(data, onSend)
}
})
}
webServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, ws =>
onConnection(ws, req)
)
}
})
}
// ===================================================================
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
const setUpConsoleProxy = (webServer, xo) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
webServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
}
const [, id] = matches
try {
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!(await xo.hasPermissions(user.id, [[id, 'operate']]))) {
throw invalidCredentials()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
// ===================================================================
const USAGE = (({ name, version }) => `Usage: ${name} [--safe-mode]
${name} v${version}`)(require('../package.json'))
// ===================================================================
export default async function main (args) {
if (includes(args, '--help') || includes(args, '-h')) {
return USAGE
}
{
const debug = createLogger('xo:perf')
blocked(
ms => {
debug('blocked for %sms', ms | 0)
},
{
threshold: 500,
}
)
}
const config = await loadConfiguration()
const webServer = await createWebServer(config.http)
// Now the web server is listening, drop privileges.
try {
const { user, group } = config
if (group) {
process.setgid(group)
debug('Group changed to', group)
}
if (user) {
process.setuid(user)
debug('User changed to', user)
}
} catch (error) {
warn('Failed to change user/group:', error)
}
// Creates main object.
const xo = new Xo(config)
// Register web server close on XO stop.
xo.on('stop', () => pFromCallback(cb => webServer.stop(cb)))
// Connects to all registered servers.
await xo.start()
// Trigger a clean job.
await xo.clean()
// Express is used to manage non WebSocket connections.
const express = createExpressApp()
if (config.http.redirectToHttps) {
let port
forEach(config.http.listen, listen => {
if (listen.port && (listen.cert || listen.certificate)) {
port = listen.port
return false
}
})
if (port === undefined) {
warn('Could not setup HTTPs redirection: no HTTPs port found')
} else {
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
// Must be set up before the API.
setUpConsoleProxy(webServer, xo)
// Must be set up before the API.
express.use(bind(xo._handleHttpRequest, xo))
// Everything above is not protected by the sign in, allowing xo-cli
// to work properly.
await setUpPassport(express, xo)
// Attaches express to the web server.
webServer.on('request', express)
webServer.on('upgrade', (req, socket, head) => {
express.emit('upgrade', req, socket, head)
})
// Must be set up before the static files.
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
setUpProxies(express, config.http.proxies, xo)
setUpStaticFiles(express, config.http.mounts)
if (!includes(args, '--safe-mode')) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
forEach(['SIGINT', 'SIGTERM'], signal => {
let alreadyCalled = false
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await fromEvent(xo, 'stopped')
debug('bye :-)')
}

View File

@@ -13,10 +13,6 @@ export default {
type: 'string',
description: 'identifier of this job',
},
scheduleId: {
type: 'string',
description: 'identifier of the schedule which ran the job',
},
key: {
type: 'string',
},

View File

@@ -0,0 +1,3 @@
process.on('message', ([action, ...args]) => {
console.log(action, args)
})

View File

@@ -0,0 +1,143 @@
import blocked from 'blocked'
import { createLogger } from '@xen-orchestra/log'
import { fromEvent } from 'promise-toolbox'
import { ensureDir, readdir } from 'fs-extra'
import Xo from './xo'
// ===================================================================
const { debug } = createLogger('xo:main')
// ===================================================================
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema,
configurationPresets,
testSchema,
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance =
typeof factory === 'function'
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(
files.map(name => {
if (name.startsWith(PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
})
)
}
async function registerPlugins (xo) {
await Promise.all(
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'].map(
xo::registerPluginsInPath
)
)
}
// ===================================================================
async function main ({ config, safeMode }) {
{
const debug = createLogger('xo:perf')
blocked(ms => {
debug('blocked for %sms', ms | 0)
})
}
// Creates main object.
const xo = new Xo(config)
// Connects to all registered servers.
await xo.start()
// Trigger a clean job.
await xo.clean()
if (!safeMode) {
await registerPlugins(xo)
}
await new Promise(resolve => {
const onMessage = message => {
if (message[0] === 'STOP') {
process.removeListener('message', onMessage)
resolve()
}
}
process.on('message', onMessage)
})
await fromEvent(xo, 'stopped')
}
main().then(
() => process.send(['STOPPED']),
error => process.send(['STOPPED_WITH_ERROR', error])
)

View File

@@ -146,7 +146,6 @@ const TRANSFORMS = {
license_params: obj.license_params,
license_server: obj.license_server,
license_expiry: toTimestamp(obj.license_params.expiry),
logging: obj.logging,
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
@@ -187,14 +186,9 @@ const TRANSFORMS = {
}
}),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
rebootRequired:
softwareVersion.product_brand === 'XCP-ng'
? toTimestamp(otherConfig.boot_time) <
+otherConfig.rpm_patch_installation_time
: !isEmpty(obj.updates_requiring_reboot),
rebootRequired: !isEmpty(obj.updates_requiring_reboot),
tags: obj.tags,
version: softwareVersion.product_version,
productBrand: softwareVersion.product_brand,
// TODO: dedupe.
PIFs: link(obj, 'PIFs'),
@@ -233,20 +227,15 @@ const TRANSFORMS = {
return
}
if (guestMetrics === undefined) {
if (!guestMetrics) {
return false
}
const { major, minor } = guestMetrics.PV_drivers_version
if (major === undefined || minor === undefined) {
return false
}
return {
major: +major,
minor: +minor,
version: +`${major}.${minor}`,
major,
minor,
}
})()
@@ -303,7 +292,8 @@ const TRANSFORMS = {
}
})(),
high_availability: obj.ha_restart_priority,
// TODO: there is two possible value: "best-effort" and "restart"
high_availability: Boolean(obj.ha_restart_priority),
memory: (function () {
const dynamicMin = +obj.memory_dynamic_min

View File

@@ -70,7 +70,7 @@ import {
// ===================================================================
const TAG_BASE_DELTA = 'xo:base_delta'
export const TAG_COPY_SRC = 'xo:copy_of'
const TAG_COPY_SRC = 'xo:copy_of'
// ===================================================================
@@ -426,14 +426,6 @@ export default class Xapi extends XapiBase {
await this.call('host.restart_agent', this.getObject(hostId).$ref)
}
async setRemoteSyslogHost (hostId, syslogDestination) {
const host = this.getObject(hostId)
await this.call('host.set_logging', host.$ref, {
syslog_destination: syslogDestination,
})
await this.call('host.syslog_reconfigure', host.$ref)
}
async shutdownHost (hostId, force = false) {
const host = this.getObject(hostId)
@@ -664,7 +656,7 @@ export default class Xapi extends XapiBase {
}
// ensure the vm record is up-to-date
vm = await this.barrier($ref)
vm = await this.barrier('VM', $ref)
return Promise.all([
forceDeleteDefaultTemplate &&
@@ -824,14 +816,12 @@ export default class Xapi extends XapiBase {
} = {}
): Promise<DeltaVmExport> {
let vm = this.getObject(vmId)
if (!bypassVdiChainsCheck) {
this._assertHealthyVdiChains(vm)
}
// do not use the snapshot name in the delta export
const exportedNameLabel = vm.name_label
if (!vm.is_a_snapshot) {
if (!bypassVdiChainsCheck) {
this._assertHealthyVdiChains(vm)
}
vm = await this._snapshotVm($cancelToken, vm, snapshotNameLabel)
$defer.onFailure(() => this._deleteVm(vm))
}
@@ -968,9 +958,7 @@ export default class Xapi extends XapiBase {
)
if (!baseVm) {
throw new Error(
`could not find the base VM (copy of ${remoteBaseVmUuid})`
)
throw new Error('could not find the base VM')
}
}
}
@@ -1083,7 +1071,7 @@ export default class Xapi extends XapiBase {
.once('finish', () => {
transferSize += sizeStream.size
})
sizeStream.task = stream.task
stream.task = sizeStream.task
await this._importVdiContent(vdi, sizeStream, VDI_FORMAT_VHD)
}
}),
@@ -1154,9 +1142,7 @@ export default class Xapi extends XapiBase {
vdis[vdi.$ref] =
mapVdisSrs && mapVdisSrs[vdi.$id]
? hostXapi.getObject(mapVdisSrs[vdi.$id]).$ref
: sr !== undefined
? hostXapi.getObject(sr).$ref
: defaultSr.$ref // Will error if there are no default SR.
: sr !== undefined ? hostXapi.getObject(sr).$ref : defaultSr.$ref // Will error if there are no default SR.
}
}

View File

@@ -35,24 +35,11 @@ declare class XapiObject {
}
type Id = string | XapiObject
declare export class Vbd extends XapiObject {
type: string;
VDI: string;
}
declare export class Vdi extends XapiObject {
$snapshot_of: Vdi;
uuid: string;
}
declare export class Vm extends XapiObject {
$snapshots: Vm[];
$VBDs: Vbd[];
is_a_snapshot: boolean;
is_a_template: boolean;
name_label: string;
power_state: 'Running' | 'Halted' | 'Paused' | 'Suspended';
other_config: $Dict<string>;
snapshot_time: number;
uuid: string;
@@ -80,24 +67,21 @@ declare export class Xapi {
_snapshotVm(cancelToken: mixed, vm: Vm, nameLabel?: string): Promise<Vm>;
addTag(object: Id, tag: string): Promise<void>;
barrier(): Promise<void>;
barrier(ref: string): Promise<XapiObject>;
barrier(): void;
barrier(ref: string): XapiObject;
deleteVm(vm: Id): Promise<void>;
editVm(vm: Id, $Dict<mixed>): Promise<void>;
exportDeltaVm(
cancelToken: mixed,
snapshot: Id,
baseSnapshot ?: Id,
opts?: { fullVdisRequired?: string[] }
): Promise<DeltaVmExport>;
exportVm(
cancelToken: mixed,
vm: Vm,
options ?: Object
): Promise<AugmentedReadable>;
getObject(object: Id): XapiObject;
importDeltaVm(data: DeltaVmImport, options: Object): Promise<{ vm: Vm }>;
importVm(stream: AugmentedReadable, options: Object): Promise<Vm>;
shutdownVm(object: Id): Promise<void>;
startVm(object: Id): Promise<void>;
exportDeltaVm(
cancelToken: mixed,
snapshot: Id,
baseSnapshot ?: Id
): Promise<DeltaVmExport>;
exportVm(
cancelToken: mixed,
vm: Vm,
options ?: Object
): Promise<AugmentedReadable>;
getObject(object: Id): XapiObject;
importDeltaVm(data: DeltaVmImport, options: Object): Promise<{ vm: Vm }>;
importVm(stream: AugmentedReadable, options: Object): Promise<Vm>;
}

View File

@@ -1,6 +1,5 @@
import deferrable from 'golike-defer'
import every from 'lodash/every'
import filter from 'lodash/filter'
import find from 'lodash/find'
import includes from 'lodash/includes'
import isObject from 'lodash/isObject'
@@ -12,7 +11,6 @@ import unzip from 'julien-f-unzip'
import { debounce } from '../../decorators'
import {
asyncMap,
ensureArray,
forEach,
mapFilter,
@@ -151,12 +149,9 @@ export default {
},
async listMissingPoolPatchesOnHost (hostId) {
const host = this.getObject(hostId)
// Returns an array to not break compatibility.
return mapToArray(
await (host.software_version.product_brand === 'XCP-ng'
? this._xcpListHostUpdates(host)
: this._listMissingPoolPatchesOnHost(host))
await this._listMissingPoolPatchesOnHost(this.getObject(hostId))
)
},
@@ -445,14 +440,8 @@ export default {
},
async installAllPoolPatchesOnHost (hostId) {
const host = this.getObject(hostId)
if (host.software_version.product_brand === 'XCP-ng') {
return this._xcpInstallHostUpdates(host)
}
return this._installAllPoolPatchesOnHost(host)
},
let host = this.getObject(hostId)
async _installAllPoolPatchesOnHost (host) {
const installableByUuid =
host.license_params.sku_type !== 'free'
? await this._listMissingPoolPatchesOnHost(host)
@@ -490,13 +479,6 @@ export default {
},
async installAllPoolPatchesOnAllHosts () {
if (this.pool.$master.software_version.product_brand === 'XCP-ng') {
return this._xcpInstallAllPoolUpdatesOnHost()
}
return this._installAllPoolPatchesOnAllHosts()
},
async _installAllPoolPatchesOnAllHosts () {
const installableByUuid = assign(
{},
...(await Promise.all(
@@ -536,47 +518,4 @@ export default {
})
}
},
// ----------------------------------
// XCP-ng dedicated zone for patching
// ----------------------------------
// list all yum updates available for a XCP-ng host
async _xcpListHostUpdates (host) {
return JSON.parse(
await this.call(
'host.call_plugin',
host.$ref,
'updater.py',
'check_update',
{}
)
)
},
// install all yum updates for a XCP-ng host
async _xcpInstallHostUpdates (host) {
const update = await this.call(
'host.call_plugin',
host.$ref,
'updater.py',
'update',
{}
)
if (JSON.parse(update).exit !== 0) {
throw new Error('Update install failed')
} else {
await this._updateObjectMapProperty(host, 'other_config', {
rpm_patch_installation_time: String(Date.now() / 1000),
})
}
},
// install all yum updates for all XCP-ng hosts in a give pool
async _xcpInstallAllPoolUpdatesOnHost () {
await asyncMap(filter(this.objects.all, { $type: 'host' }), host =>
this._xcpInstallHostUpdates(host)
)
},
}

View File

@@ -1,6 +1,6 @@
import deferrable from 'golike-defer'
import { catchPlus as pCatch, ignoreErrors } from 'promise-toolbox'
import { find, gte, includes, isEmpty, lte, noop } from 'lodash'
import { find, gte, includes, isEmpty, lte } from 'lodash'
import { forEach, mapToArray, parseSize } from '../../utils'
@@ -204,7 +204,7 @@ export default {
if (cloudConfig != null) {
// Refresh the record.
await this.barrier(vm.$ref)
await this.barrier('VM', vm.$ref)
vm = this.getObjectByRef(vm.$ref)
// Find the SR of the first VDI.
@@ -224,7 +224,7 @@ export default {
}
// wait for the record with all the VBDs and VIFs
return this.barrier(vm.$ref)
return this.barrier('VM', vm.$ref)
},
// High level method to edit a VM.
@@ -310,7 +310,11 @@ export default {
highAvailability: {
set (ha, vm) {
return this.call('VM.set_ha_restart_priority', vm.$ref, ha)
return this.call(
'VM.set_ha_restart_priority',
vm.$ref,
ha ? 'restart' : ''
)
},
},
@@ -429,11 +433,4 @@ export default {
// the force parameter is always true
return this.call('VM.resume', this.getObject(vmId).$ref, false, true)
},
shutdownVm (vmId, { hard = false } = {}) {
return this.call(
`VM.${hard ? 'hard' : 'clean'}_shutdown`,
this.getObject(vmId).$ref
).then(noop)
},
}

View File

@@ -1,138 +0,0 @@
import { forEach } from 'lodash'
const isSkippedError = error =>
error.message === 'no disks found' ||
error.message === 'no such object' ||
error.message === 'no VMs match this pattern' ||
error.message === 'unhealthy VDI chain'
const getStatus = (
error,
status = error === undefined ? 'success' : 'failure'
) => (status === 'failure' && isSkippedError(error) ? 'skipped' : status)
const computeStatusAndSortTasks = (status, tasks) => {
if (status === 'failure' || tasks === undefined) {
return status
}
for (let i = 0, n = tasks.length; i < n; ++i) {
const taskStatus = tasks[i].status
if (taskStatus === 'failure') {
return taskStatus
}
if (taskStatus === 'skipped') {
status = taskStatus
}
}
tasks.sort(taskTimeComparator)
return status
}
const taskTimeComparator = ({ start: s1, end: e1 }, { start: s2, end: e2 }) => {
if (e1 !== undefined) {
if (e2 !== undefined) {
// finished tasks are ordered by their end times
return e1 - e2
}
// finished task before unfinished tasks
return -1
} else if (e2 === undefined) {
// unfinished tasks are ordered by their start times
return s1 - s2
}
// unfinished task after finished tasks
return 1
}
export default {
async getBackupNgLogs (runId?: string) {
const { runningJobs } = this
const consolidated = {}
const started = {}
forEach(await this.getLogs('jobs'), ({ data, time, message }, id) => {
const { event } = data
if (event === 'job.start') {
if (
(data.type === 'backup' || data.key === undefined) &&
(runId === undefined || runId === id)
) {
const { scheduleId, jobId } = data
consolidated[id] = started[id] = {
data: data.data,
id,
jobId,
scheduleId,
start: time,
status: runningJobs[jobId] === id ? 'pending' : 'interrupted',
}
}
} else if (event === 'job.end') {
const { runJobId } = data
const log = started[runJobId]
if (log !== undefined) {
delete started[runJobId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
} else if (event === 'task.start') {
const parent = started[data.parentId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: data.data,
id,
message,
start: time,
status: parent.status,
})
)
}
} else if (event === 'task.end') {
const { taskId } = data
const log = started[taskId]
if (log !== undefined) {
// TODO: merge/transfer work-around
delete started[taskId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.result), data.status),
log.tasks
)
}
} else if (event === 'jobCall.start') {
const parent = started[data.runJobId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: {
type: 'VM',
id: data.params.id,
},
id,
start: time,
status: parent.status,
})
)
}
} else if (event === 'jobCall.end') {
const { runCallId } = data
const log = started[runCallId]
if (log !== undefined) {
delete started[runCallId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
}
})
return runId === undefined ? consolidated : consolidated[runId]
},
}

View File

@@ -3,27 +3,19 @@
// $FlowFixMe
import type RemoteHandler from '@xen-orchestra/fs'
import defer from 'golike-defer'
import limitConcurrency from 'limit-concurrency-decorator'
import { type Pattern, createPredicate } from 'value-matcher'
import { type Readable, PassThrough } from 'stream'
import { AssertionError } from 'assert'
import { basename, dirname } from 'path'
import {
countBy,
forEach,
groupBy,
isEmpty,
last,
mapValues,
noop,
some,
sum,
values,
} from 'lodash'
import {
fromEvent as pFromEvent,
ignoreErrors,
timeout as pTimeout,
} from 'promise-toolbox'
import { timeout as pTimeout } from 'promise-toolbox'
import Vhd, {
chainVhd,
createSyntheticStream as createVhdReadStream,
@@ -36,12 +28,9 @@ import createSizeStream from '../../size-stream'
import {
type DeltaVmExport,
type DeltaVmImport,
type Vdi,
type Vm,
type Xapi,
TAG_COPY_SRC,
} from '../../xapi'
import { getVmDisks } from '../../xapi/utils'
import {
asyncMap,
resolveRelativeFromFile,
@@ -51,15 +40,12 @@ import {
import { translateLegacyJob } from './migration'
export type Mode = 'full' | 'delta'
export type ReportWhen = 'always' | 'failure' | 'never'
type Mode = 'full' | 'delta'
type ReportWhen = 'always' | 'failure' | 'never'
type Settings = {|
concurrency?: number,
deleteFirst?: boolean,
copyRetention?: number,
exportRetention?: number,
offlineSnapshot?: boolean,
reportWhen?: ReportWhen,
snapshotRetention?: number,
vmTimeout?: number,
@@ -104,6 +90,33 @@ type MetadataFull = {|
|}
type Metadata = MetadataDelta | MetadataFull
type ConsolidatedJob = {|
duration?: number,
end?: number,
error?: Object,
id: string,
jobId: string,
mode: Mode,
start: number,
type: 'backup' | 'call',
userId: string,
|}
type ConsolidatedTask = {|
data?: Object,
duration?: number,
end?: number,
parentId: string,
message: string,
result?: Object,
start: number,
status: 'canceled' | 'failure' | 'success',
taskId: string,
|}
type ConsolidatedBackupNgLog = {
roots: Array<ConsolidatedJob>,
[parentId: string]: Array<ConsolidatedTask>,
}
const compareSnapshotTime = (a: Vm, b: Vm): number =>
a.snapshot_time < b.snapshot_time ? -1 : 1
@@ -117,25 +130,20 @@ const compareTimestamp = (a: Metadata, b: Metadata): number =>
const getOldEntries = <T>(retention: number, entries?: T[]): T[] =>
entries === undefined
? []
: --retention > 0
? entries.slice(0, -retention)
: entries
: --retention > 0 ? entries.slice(0, -retention) : entries
const defaultSettings: Settings = {
concurrency: 0,
deleteFirst: false,
exportRetention: 0,
offlineSnapshot: false,
reportWhen: 'failure',
snapshotRetention: 0,
vmTimeout: 0,
}
const getSetting = <T>(
const getSetting = (
settings: $Dict<Settings>,
name: $Keys<Settings>,
keys: string[],
defaultValue?: T
): T | any => {
...keys: string[]
): any => {
for (let i = 0, n = keys.length; i < n; ++i) {
const objectSettings = settings[keys[i]]
if (objectSettings !== undefined) {
@@ -145,16 +153,12 @@ const getSetting = <T>(
}
}
}
if (defaultValue !== undefined) {
return defaultValue
}
return defaultSettings[name]
}
const BACKUP_DIR = 'xo-vm-backups'
const getVmBackupDir = (uuid: string) => `${BACKUP_DIR}/${uuid}`
const isHiddenFile = (filename: string) => filename[0] === '.'
const isMetadataFile = (filename: string) => filename.endsWith('.json')
const isVhd = (filename: string) => filename.endsWith('.vhd')
@@ -255,10 +259,6 @@ const importers: $Dict<
},
}
const PARSE_UUID_RE = /-/g
const parseUuid = (uuid: string) =>
Buffer.from(uuid.replace(PARSE_UUID_RE, ''), 'hex')
const parseVmBackupId = (id: string) => {
const i = id.indexOf('/')
return {
@@ -304,7 +304,6 @@ const writeStream = async (
const output = await handler.createOutputStream(tmpPath, { checksum })
try {
input.pipe(output)
await pFromEvent(output, 'finish')
await output.checksumWritten
// $FlowFixMe
await input.task
@@ -328,7 +327,10 @@ const wrapTask = async <T>(opts: any, task: Promise<T>): Promise<T> => {
value => {
logger.notice(message, {
event: 'task.end',
result: typeof result === 'function' ? result(value) : result,
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
@@ -364,7 +366,10 @@ const wrapTaskFn = <T>(
const value = await task.apply(this, [taskId, ...arguments])
logger.notice(message, {
event: 'task.end',
result: typeof result === 'function' ? result(value) : result,
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
@@ -426,7 +431,6 @@ export default class BackupNg {
app.on('start', () => {
const executor: Executor = async ({
cancelToken,
data: vmId,
job: job_,
logger,
runJobId,
@@ -437,36 +441,18 @@ export default class BackupNg {
}
const job: BackupJob = (job_: any)
let vms: $Dict<Vm> | void
if (vmId === undefined) {
vms = app.getObjects({
filter: createPredicate({
type: 'VM',
...job.vms,
}),
})
if (isEmpty(vms)) {
throw new Error('no VMs match this pattern')
}
const vms: $Dict<Vm> = app.getObjects({
filter: createPredicate({
type: 'VM',
...job.vms,
}),
})
if (isEmpty(vms)) {
throw new Error('no VMs match this pattern')
}
const jobId = job.id
const scheduleId = schedule.id
const srs = unboxIds(job.srs).map(id => {
const xapi = app.getXapi(id)
return {
__proto__: xapi.getObject(id),
xapi,
}
})
const remotes = await Promise.all(
unboxIds(job.remotes).map(async id => ({
id,
handler: await app.getRemoteHandler(id),
}))
)
let handleVm = async vm => {
await asyncMap(vms, async vm => {
const { name_label: name, uuid } = vm
const taskId: string = logger.notice(
`Starting backup of ${name}. (${jobId})`,
@@ -488,14 +474,16 @@ export default class BackupNg {
job,
schedule,
logger,
taskId,
srs,
remotes
taskId
)
const vmTimeout: number = getSetting(job.settings, 'vmTimeout', [
const vmTimeout: number = getSetting(
job.settings,
'vmTimeout',
uuid,
scheduleId,
])
logger,
taskId
)
if (vmTimeout !== 0) {
p = pTimeout.call(p, vmTimeout)
}
@@ -516,19 +504,7 @@ export default class BackupNg {
: serializeError(error),
})
}
}
if (vms === undefined) {
return handleVm(await app.getObject(vmId))
}
const concurrency: number = getSetting(job.settings, 'concurrency', [
'',
])
if (concurrency !== 0) {
handleVm = limitConcurrency(concurrency)(handleVm)
}
await asyncMap(vms, handleVm)
})
}
app.registerJobExecutor('backup', executor)
})
@@ -683,7 +659,7 @@ export default class BackupNg {
// 2. next run should be a full
// - [ ] add a lock on the job/VDI during merge which should prevent other merges and restoration
// - [ ] check merge/transfert duration/size are what we want for delta
// - [ ] in case of failure, correctly clean VHDs for all VDIs
// - [ ] fix backup reports
//
// Low:
// - [ ] jobs should be cancelable
@@ -691,7 +667,7 @@ export default class BackupNg {
// - [ ] display queued VMs
// - [ ] snapshots and files of an old job should be detected and removed
// - [ ] delta import should support mapVdisSrs
// - [ ] size of the path? (base64url(parseUuid(uuid)))
// - [ ] size of the path? (base64url(Buffer.from(uuid.split('-').join(''), 'hex')))
// - [ ] what does mean the vmTimeout with the new concurrency? a VM can take
// a very long time to finish if there are other VMs before…
// - [ ] detect and gc uncomplete replications
@@ -716,7 +692,6 @@ export default class BackupNg {
// - [x] replicated VMs should be discriminated by VM (vatesfr/xen-orchestra#2807)
// - [x] clones of replicated VMs should not be garbage collected
// - [x] import for delta
// - [x] fix backup reports
@defer
async _backupVm (
$defer: any,
@@ -725,9 +700,7 @@ export default class BackupNg {
job: BackupJob,
schedule: Schedule,
logger: any,
taskId: string,
srs: any[],
remotes: any[]
taskId: string
): Promise<void> {
const app = this._app
const xapi = app.getXapi(vmUuid)
@@ -736,104 +709,44 @@ export default class BackupNg {
// ensure the VM itself does not have any backup metadata which would be
// copied on manual snapshots and interfere with the backup jobs
if ('xo:backup:job' in vm.other_config) {
await wrapTask(
{
logger,
message: 'clean backup metadata on VM',
parentId: taskId,
},
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:job': null,
'xo:backup:schedule': null,
'xo:backup:vm': null,
})
)
await xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:job': null,
'xo:backup:schedule': null,
'xo:backup:vm': null,
})
}
const { id: jobId, settings } = job
const { id: scheduleId } = schedule
let exportRetention: number = getSetting(settings, 'exportRetention', [
scheduleId,
])
let copyRetention: number | void = getSetting(settings, 'copyRetention', [
scheduleId,
])
if (copyRetention === undefined) {
// if copyRetention is not defined, it uses exportRetention's value due to
// previous implementation which did not support copyRetention
copyRetention = srs.length === 0 ? 0 : exportRetention
if (remotes.length === 0) {
exportRetention = 0
}
} else if (exportRetention !== 0 && remotes.length === 0) {
throw new Error('export retention must be 0 without remotes')
}
if (copyRetention !== 0 && srs.length === 0) {
throw new Error('copy retention must be 0 without SRs')
}
if (
remotes.length !== 0 &&
srs.length !== 0 &&
(copyRetention === 0) !== (exportRetention === 0)
) {
throw new Error('both or neither copy and export retentions must be 0')
}
const exportRetention: number = getSetting(
settings,
'exportRetention',
scheduleId
)
const snapshotRetention: number = getSetting(
settings,
'snapshotRetention',
[scheduleId]
scheduleId
)
if (
copyRetention === 0 &&
exportRetention === 0 &&
snapshotRetention === 0
) {
throw new Error('copy, export and snapshot retentions cannot both be 0')
}
if (
!some(
vm.$VBDs,
vbd => vbd.type === 'Disk' && vbd.VDI !== 'OpaqueRef:NULL'
)
) {
throw new Error('no disks found')
if (exportRetention === 0) {
if (snapshotRetention === 0) {
throw new Error('export and snapshots retentions cannot both be 0')
}
}
const snapshots = vm.$snapshots
.filter(_ => _.other_config['xo:backup:job'] === jobId)
.sort(compareSnapshotTime)
xapi._assertHealthyVdiChains(vm)
const offlineSnapshot: boolean = getSetting(settings, 'offlineSnapshot', [
vmUuid,
'',
])
const startAfterSnapshot = offlineSnapshot && vm.power_state === 'Running'
if (startAfterSnapshot) {
await wrapTask(
{
logger,
message: 'shutdown VM',
parentId: taskId,
},
xapi.shutdownVm(vm)
)
}
await xapi._assertHealthyVdiChains(vm)
let snapshot: Vm = (await wrapTask(
{
parentId: taskId,
logger,
message: 'snapshot',
parentId: taskId,
result: _ => _.uuid,
},
xapi._snapshotVm(
@@ -842,23 +755,11 @@ export default class BackupNg {
`[XO Backup ${job.name}] ${vm.name_label}`
)
): any)
if (startAfterSnapshot) {
ignoreErrors.call(xapi.startVm(vm))
}
await wrapTask(
{
logger,
message: 'add metadata to snapshot',
parentId: taskId,
},
xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
'xo:backup:schedule': scheduleId,
'xo:backup:vm': vmUuid,
})
)
await xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
'xo:backup:schedule': scheduleId,
'xo:backup:vm': vmUuid,
})
$defer(() =>
asyncMap(
@@ -872,20 +773,18 @@ export default class BackupNg {
)
)
snapshot = ((await wrapTask(
{
logger,
message: 'waiting for uptodate snapshot record',
parentId: taskId,
},
xapi.barrier(snapshot.$ref)
): any): Vm)
snapshot = ((await xapi.barrier(snapshot.$ref): any): Vm)
if (copyRetention === 0 && exportRetention === 0) {
if (exportRetention === 0) {
return
}
const remotes = unboxIds(job.remotes)
const srs = unboxIds(job.srs)
const nTargets = remotes.length + srs.length
if (nTargets === 0) {
throw new Error('export retention must be 0 without remotes and SRs')
}
const now = Date.now()
const vmDir = getVmBackupDir(vmUuid)
@@ -901,21 +800,14 @@ export default class BackupNg {
$defer.call(xapi, 'deleteVm', snapshot)
}
let xva: any = await wrapTask(
{
logger,
message: 'start snapshot export',
parentId: taskId,
},
xapi.exportVm($cancelToken, snapshot, {
compress: job.compression === 'native',
})
)
let xva: any = await xapi.exportVm($cancelToken, snapshot, {
compress: job.compression === 'native',
})
const exportTask = xva.task
xva = xva.pipe(createSizeStream())
const forkExport =
nTargets === 1
nTargets === 0
? () => xva
: () => {
const fork = xva.pipe(new PassThrough())
@@ -943,15 +835,17 @@ export default class BackupNg {
[
...remotes.map(
wrapTaskFn(
({ id }) => ({
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, { handler, id: remoteId }) => {
async (taskId, remoteId) => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
@@ -961,9 +855,11 @@ export default class BackupNg {
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', [
remoteId,
])
const deleteFirst = getSetting(
settings,
'deleteFirst',
remoteId
)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
@@ -973,7 +869,9 @@ export default class BackupNg {
logger,
message: 'transfer',
parentId: taskId,
result: () => ({ size: xva.size }),
result: {
size: 0,
},
},
writeStream(fork, handler, dataFilename)
)
@@ -988,23 +886,24 @@ export default class BackupNg {
),
...srs.map(
wrapTaskFn(
({ $id: id }) => ({
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, sr) => {
async (taskId, srId) => {
const fork = forkExport()
const { $id: srId, xapi } = sr
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
copyRetention,
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', [srId])
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
@@ -1015,7 +914,9 @@ export default class BackupNg {
logger,
message: 'transfer',
parentId: taskId,
result: () => ({ size: xva.size }),
result: {
size: 0,
},
},
xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
@@ -1053,108 +954,17 @@ export default class BackupNg {
$defer.onFailure.call(xapi, 'deleteVm', snapshot)
}
// JFT: TODO: remove when enough time has passed (~2018-09)
//
// Fix VHDs UUID (= VDI.uuid), which was not done before 2018-06-16.
await asyncMap(remotes, async ({ handler }) =>
asyncMap(
this._listVmBackups(handler, vmUuid, _ => _.mode === 'delta'),
({ _filename, vdis, vhds }) => {
const vmDir = dirname(_filename)
return asyncMap(vhds, async (vhdPath, vdiId) => {
const uuid = parseUuid(vdis[vdiId].uuid)
const baseSnapshot = last(snapshots)
if (baseSnapshot !== undefined) {
console.log(baseSnapshot.$id) // TODO: remove
// check current state
// await Promise.all([asyncMap(remotes, remoteId => {})])
}
const vhd = new Vhd(handler, `${vmDir}/${vhdPath}`)
await vhd.readHeaderAndFooter()
if (!vhd.footer.uuid.equals(uuid)) {
vhd.footer.uuid = uuid
await vhd.readBlockAllocationTable()
await vhd.writeFooter()
}
})
}
)
)
let baseSnapshot, fullVdisRequired
await (async () => {
baseSnapshot = (last(snapshots): Vm | void)
if (baseSnapshot === undefined) {
return
}
const fullRequired = { __proto__: null }
const vdis: $Dict<Vdi> = getVmDisks(baseSnapshot)
for (const { $id: srId, xapi } of srs) {
const replicatedVm = listReplicatedVms(
xapi,
scheduleId,
srId,
vmUuid
).find(vm => vm.other_config[TAG_COPY_SRC] === baseSnapshot.uuid)
if (replicatedVm === undefined) {
baseSnapshot = undefined
return
}
const replicatedVdis = countBy(
getVmDisks(replicatedVm),
vdi => vdi.other_config[TAG_COPY_SRC]
)
forEach(vdis, vdi => {
if (!(vdi.uuid in replicatedVdis)) {
fullRequired[vdi.$snapshot_of.$id] = true
}
})
}
await asyncMap(remotes, ({ handler }) => {
return asyncMap(vdis, async vdi => {
const snapshotOf = vdi.$snapshot_of
const dir = `${vmDir}/vdis/${jobId}/${snapshotOf.uuid}`
const files = await handler
.list(dir, { filter: isVhd })
.catch(_ => [])
let full = true
await asyncMap(files, async file => {
if (file[0] !== '.') {
try {
const vhd = new Vhd(handler, `${dir}/${file}`)
await vhd.readHeaderAndFooter()
if (vhd.footer.uuid.equals(parseUuid(vdi.uuid))) {
full = false
}
return
} catch (error) {
if (!(error instanceof AssertionError)) {
throw error
}
}
}
// either a temporary file or an invalid VHD
await handler.unlink(`${dir}/${file}`)
})
if (full) {
fullRequired[snapshotOf.$id] = true
}
})
})
fullVdisRequired = Object.keys(fullRequired)
})()
const deltaExport = await wrapTask(
{
logger,
message: 'start snapshot export',
parentId: taskId,
},
xapi.exportDeltaVm($cancelToken, snapshot, baseSnapshot, {
fullVdisRequired,
})
const deltaExport = await xapi.exportDeltaVm(
$cancelToken,
snapshot,
baseSnapshot
)
const metadata: MetadataDelta = {
@@ -1209,23 +1019,21 @@ export default class BackupNg {
}
})()
const isFull = some(
deltaExport.vdis,
vdi => vdi.other_config['xo:base_delta'] === undefined
)
await waitAll(
[
...remotes.map(
wrapTaskFn(
({ id }) => ({
data: { id, isFull, type: 'remote' },
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, { handler, id: remoteId }) => {
async (taskId, remoteId) => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
@@ -1240,14 +1048,16 @@ export default class BackupNg {
logger,
message: 'merge',
parentId: taskId,
result: size => ({ size }),
result: {
size: 0,
},
},
this._deleteDeltaVmBackups(handler, oldBackups)
)
const deleteFirst =
exportRetention > 1 &&
getSetting(settings, 'deleteFirst', [remoteId])
getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await deleteOldBackups()
}
@@ -1257,7 +1067,9 @@ export default class BackupNg {
logger,
message: 'transfer',
parentId: taskId,
result: size => ({ size }),
result: {
size: 0,
},
},
asyncMap(
fork.vdis,
@@ -1269,19 +1081,13 @@ export default class BackupNg {
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
parentPath = (await handler.list(vdiDir, {
filter: filename =>
!isHiddenFile(filename) && isVhd(filename),
prependDir: true,
}))
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
// ensure parent exists and is a valid VHD
await new Vhd(handler, parentPath).readHeaderAndFooter()
parentPath = `${vdiDir}/${parent}`
}
// FIXME: should only be renamed after the metadata file has been written
await writeStream(
fork.streams[`${id}.vhd`](),
handler,
@@ -1297,18 +1103,10 @@ export default class BackupNg {
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
}
// set the correct UUID in the VHD
const vhd = new Vhd(handler, path)
await vhd.readHeaderAndFooter()
vhd.footer.uuid = parseUuid(vdi.uuid)
await vhd.readBlockAllocationTable() // required by writeFooter()
await vhd.writeFooter()
return handler.getSize(path)
})
).then(sum)
)
)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
@@ -1319,23 +1117,24 @@ export default class BackupNg {
),
...srs.map(
wrapTaskFn(
({ $id: id }) => ({
data: { id, isFull, type: 'SR' },
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, sr) => {
async (taskId, srId) => {
const fork = forkExport()
const { $id: srId, xapi } = sr
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
copyRetention,
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', [srId])
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
@@ -1345,14 +1144,16 @@ export default class BackupNg {
logger,
message: 'transfer',
parentId: taskId,
result: ({ transferSize }) => ({ size: transferSize }),
result: {
size: 0,
},
},
xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId,
srId: sr.$id,
})
)
@@ -1384,17 +1185,19 @@ export default class BackupNg {
async _deleteDeltaVmBackups (
handler: RemoteHandler,
backups: MetadataDelta[]
): Promise<number> {
return asyncMap(backups, async backup => {
): Promise<void> {
// TODO: remove VHD as well
await asyncMap(backups, async backup => {
const filename = ((backup._filename: any): string)
await handler.unlink(filename)
return asyncMap(backup.vhds, _ =>
// $FlowFixMe injected $defer param
this._deleteVhd(handler, resolveRelativeFromFile(filename, _))
).then(sum)
}).then(sum)
return Promise.all([
handler.unlink(filename),
asyncMap(backup.vhds, _ =>
// $FlowFixMe injected $defer param
this._deleteVhd(handler, resolveRelativeFromFile(filename, _))
),
])
})
}
async _deleteFullVmBackups (
@@ -1412,50 +1215,35 @@ export default class BackupNg {
// FIXME: synchronize by job/VDI, otherwise it can cause issues with the merge
@defer
async _deleteVhd (
$defer: any,
handler: RemoteHandler,
path: string
): Promise<number> {
async _deleteVhd ($defer: any, handler: RemoteHandler, path: string) {
const vhds = await asyncMap(
await handler.list(dirname(path), { filter: isVhd, prependDir: true }),
async path => {
try {
const vhd = new Vhd(handler, path)
await vhd.readHeaderAndFooter()
return {
footer: vhd.footer,
header: vhd.header,
path,
}
} catch (error) {
// Do not fail on corrupted VHDs (usually uncleaned temporary files),
// they are probably inconsequent to the backup process and should not
// fail it.
console.warn('BackupNg#_deleteVhd', path, error)
const vhd = new Vhd(handler, path)
await vhd.readHeaderAndFooter()
return {
footer: vhd.footer,
header: vhd.header,
path,
}
}
)
const base = basename(path)
const child = vhds.find(
_ => _ !== undefined && _.header.parentUnicodeName === base
)
const child = vhds.find(_ => _.header.parentUnicodeName === base)
if (child === undefined) {
await handler.unlink(path)
return 0
return handler.unlink(path)
}
$defer.onFailure.call(handler, 'unlink', path)
const childPath = child.path
const mergedDataSize: number = await this._app.worker.mergeVhd(
await this._app.worker.mergeVhd(
handler._remote,
path,
handler._remote,
childPath
)
await handler.rename(path, childPath)
return mergedDataSize
}
async _deleteVms (xapi: Xapi, vms: Vm[]): Promise<void> {
@@ -1500,4 +1288,54 @@ export default class BackupNg {
return backups.sort(compareTimestamp)
}
async getBackupNgLogs (runId?: string): Promise<ConsolidatedBackupNgLog> {
const rawLogs = await this._app.getLogs('jobs')
const logs: $Dict<ConsolidatedJob & ConsolidatedTask> = {}
forEach(rawLogs, (log, id) => {
const { data, time, message } = log
const { event } = data
delete data.event
switch (event) {
case 'job.start':
if (data.type === 'backup' && (runId === undefined || runId === id)) {
logs[id] = {
...data,
id,
start: time,
}
}
break
case 'job.end':
const job = logs[data.runJobId]
if (job !== undefined) {
job.end = time
job.duration = time - job.start
job.error = data.error
}
break
case 'task.start':
if (logs[data.parentId] !== undefined) {
logs[id] = {
...data,
start: time,
message,
}
}
break
case 'task.end':
const task = logs[data.taskId]
if (task !== undefined) {
task.status = data.status
task.taskId = data.taskId
task.result = data.result
task.end = time
task.duration = time - task.start
}
}
})
return groupBy(logs, log => log.parentId || 'roots')
}
}

View File

@@ -141,9 +141,7 @@ const listPartitions = (() => {
valueTransform: (value, key) =>
key === 'start' || key === 'size'
? +value
: key === 'type'
? TYPES[+value] || value
: value,
: key === 'type' ? TYPES[+value] || value : value,
})
return device =>
@@ -905,8 +903,6 @@ export default class {
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
xapi._assertHealthyVdiChains(vm)
const reg = new RegExp(
'^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_'
)

View File

@@ -2,7 +2,7 @@
import type { Pattern } from 'value-matcher'
import { CancelToken } from 'promise-toolbox'
import { cancelable } from 'promise-toolbox'
import { map as mapToArray } from 'lodash'
import { noSuchObject } from 'xo-common/api-errors'
@@ -60,7 +60,6 @@ export type CallJob = {|
export type Executor = ({|
app: Object,
cancelToken: any,
data: any,
job: Job,
logger: Logger,
runJobId: string,
@@ -121,12 +120,7 @@ export default class Jobs {
_executors: { __proto__: null, [string]: Executor }
_jobs: JobsDb
_logger: Logger
_runningJobs: { __proto__: null, [string]: string }
_runs: { __proto__: null, [string]: () => void }
get runningJobs () {
return this._runningJobs
}
_runningJobs: { __proto__: null, [string]: boolean }
constructor (xo: any) {
this._app = xo
@@ -138,7 +132,6 @@ export default class Jobs {
}))
this._logger = undefined
this._runningJobs = { __proto__: null }
this._runs = { __proto__: null }
executors.call = executeCall
@@ -157,13 +150,6 @@ export default class Jobs {
})
}
cancelJobRun (id: string) {
const run = this._runs[id]
if (run !== undefined) {
return run.cancel()
}
}
async getAllJobs (type?: string): Promise<Array<Job>> {
// $FlowFixMe don't know what is the problem (JFT)
const jobs = await this._jobs.get()
@@ -215,7 +201,7 @@ export default class Jobs {
return /* await */ this._jobs.remove(id)
}
async _runJob (job: Job, schedule?: Schedule, data_?: any) {
async _runJob (cancelToken: any, job: Job, schedule?: Schedule) {
const { id } = job
const runningJobs = this._runningJobs
@@ -246,7 +232,6 @@ export default class Jobs {
event: 'job.start',
userId: job.userId,
jobId: id,
scheduleId: schedule?.id,
// $FlowFixMe only defined for CallJob
key: job.key,
type,
@@ -254,21 +239,15 @@ export default class Jobs {
runningJobs[id] = runJobId
const runs = this._runs
const { cancel, token } = CancelToken.source()
runs[runJobId] = { cancel }
let session
try {
const app = this._app
session = app.createUserConnection()
session.set('user_id', job.userId)
const status = await executor({
await executor({
app,
cancelToken: token,
data: data_,
cancelToken,
job,
logger,
runJobId,
@@ -280,7 +259,7 @@ export default class Jobs {
runJobId,
})
app.emit('job:terminated', status, job, schedule, runJobId)
app.emit('job:terminated', runJobId, job, schedule)
} catch (error) {
logger.error(`The execution of ${id} has failed.`, {
event: 'job.end',
@@ -290,24 +269,27 @@ export default class Jobs {
throw error
} finally {
delete runningJobs[id]
delete runs[runJobId]
if (session !== undefined) {
session.close()
}
}
}
@cancelable
async runJobSequence (
$cancelToken: any,
idSequence: Array<string>,
schedule?: Schedule,
data?: any
schedule?: Schedule
) {
const jobs = await Promise.all(
mapToArray(idSequence, id => this.getJob(id))
)
for (const job of jobs) {
await this._runJob(job, schedule, data)
if ($cancelToken.requested) {
break
}
await this._runJob($cancelToken, job, schedule)
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "xo-vmdk-to-vhd",
"version": "0.1.3",
"version": "0.1.0",
"license": "AGPL-3.0",
"description": "JS lib streaming a vmdk file to a vhd",
"keywords": [
@@ -23,23 +23,24 @@
"node": ">=4"
},
"dependencies": {
"@babel/runtime": "^7.0.0-beta.49",
"@babel/runtime": "^7.0.0-beta.44",
"child-process-promise": "^2.0.3",
"fs-promise": "^2.0.0",
"pipette": "^0.9.3",
"promise-toolbox": "^0.9.5",
"tmp": "^0.0.33",
"vhd-lib": "^0.1.3"
"vhd-lib": "^0.0.0"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.49",
"@babel/core": "7.0.0-beta.49",
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
"@babel/preset-env": "7.0.0-beta.49",
"@babel/cli": "7.0.0-beta.44",
"@babel/core": "7.0.0-beta.44",
"@babel/plugin-transform-runtime": "^7.0.0-beta.44",
"@babel/preset-env": "7.0.0-beta.44",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"event-to-promise": "^0.8.0",
"execa": "^0.10.0",
"fs-extra": "^6.0.1",
"fs-extra": "^5.0.0",
"get-stream": "^3.0.0",
"index-modules": "^0.3.0",
"rimraf": "^2.6.2"

View File

@@ -1,7 +1,6 @@
import { createReadableSparseStream } from 'vhd-lib'
import VMDKDirectParser from './vmdk-read'
import readVmdkGrainTable from './vmdk-read-table'
import { VMDKDirectParser, readVmdkGrainTable } from './vmdk-read'
async function convertFromVMDK (vmdkReadStream, table) {
const parser = new VMDKDirectParser(vmdkReadStream)

View File

@@ -1,6 +1,6 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs-extra'
import { createReadStream, readFile } from 'fs-promise'
import { exec } from 'child-process-promise'
import { fromCallback as pFromCallback } from 'promise-toolbox'
import rimraf from 'rimraf'

View File

@@ -1,97 +0,0 @@
const SECTOR_SIZE = 512
const HEADER_SIZE = 512
const FOOTER_POSITION = -1024
const DISK_CAPACITY_OFFSET = 12
const GRAIN_SIZE_OFFSET = 20
const NUM_GTE_PER_GT_OFFSET = 44
const GRAIN_ADDRESS_OFFSET = 56
/**
*
* the grain table is the array of LBAs (in byte, not in sector) ordered by their position in the VDMK file
* THIS CODE RUNS ON THE BROWSER
*/
export default async function readVmdkGrainTable (fileAccessor) {
const getLongLong = (buffer, offset, name) => {
if (buffer.length < offset + 8) {
throw new Error(
`buffer ${name} is too short, expecting ${offset + 8} minimum, got ${
buffer.length
}`
)
}
const dataView = new DataView(buffer)
const res = dataView.getUint32(offset, true)
const highBits = dataView.getUint32(offset + 4, true)
const MANTISSA_BITS_IN_DOUBLE = 53
if (highBits >= Math.pow(2, MANTISSA_BITS_IN_DOUBLE - 32)) {
throw new Error(
'Unsupported file, high order bits are to high in field ' + name
)
}
return res + highBits * Math.pow(2, 32)
}
let headerBuffer = await fileAccessor(0, HEADER_SIZE)
let grainAddrBuffer = headerBuffer.slice(
GRAIN_ADDRESS_OFFSET,
GRAIN_ADDRESS_OFFSET + 8
)
if (
new Int8Array(grainAddrBuffer).reduce((acc, val) => acc && val === -1, true)
) {
headerBuffer = await fileAccessor(
FOOTER_POSITION,
FOOTER_POSITION + HEADER_SIZE
)
grainAddrBuffer = headerBuffer.slice(
GRAIN_ADDRESS_OFFSET,
GRAIN_ADDRESS_OFFSET + 8
)
}
const grainDirPosBytes =
getLongLong(grainAddrBuffer, 0, 'grain directory address') * SECTOR_SIZE
const capacity =
getLongLong(headerBuffer, DISK_CAPACITY_OFFSET, 'capacity') * SECTOR_SIZE
const grainSize =
getLongLong(headerBuffer, GRAIN_SIZE_OFFSET, 'grain size') * SECTOR_SIZE
const grainCount = Math.ceil(capacity / grainSize)
const numGTEsPerGT = getLongLong(
headerBuffer,
NUM_GTE_PER_GT_OFFSET,
'num GTE per GT'
)
const grainTablePhysicalSize = numGTEsPerGT * 4
const grainDirectoryEntries = Math.ceil(grainCount / numGTEsPerGT)
const grainDirectoryPhysicalSize = grainDirectoryEntries * 4
const grainDirBuffer = await fileAccessor(
grainDirPosBytes,
grainDirPosBytes + grainDirectoryPhysicalSize
)
const grainDir = new Uint32Array(grainDirBuffer)
const cachedGrainTables = []
for (let i = 0; i < grainDirectoryEntries; i++) {
const grainTableAddr = grainDir[i] * SECTOR_SIZE
if (grainTableAddr !== 0) {
cachedGrainTables[i] = new Uint32Array(
await fileAccessor(
grainTableAddr,
grainTableAddr + grainTablePhysicalSize
)
)
}
}
const extractedGrainTable = []
for (let i = 0; i < grainCount; i++) {
const directoryEntry = Math.floor(i / numGTEsPerGT)
const grainTable = cachedGrainTables[directoryEntry]
if (grainTable !== undefined) {
const grainAddr = grainTable[i % numGTEsPerGT]
if (grainAddr !== 0) {
extractedGrainTable.push([i, grainAddr])
}
}
}
extractedGrainTable.sort(
([i1, grainAddress1], [i2, grainAddress2]) => grainAddress1 - grainAddress2
)
return extractedGrainTable.map(([index, grainAddress]) => index * grainSize)
}

View File

@@ -1,12 +1,12 @@
/* eslint-env jest */
import { createReadStream } from 'fs-extra'
import { createReadStream } from 'fs-promise'
import { exec } from 'child-process-promise'
import { fromCallback as pFromCallback } from 'promise-toolbox'
import rimraf from 'rimraf'
import tmp from 'tmp'
import VMDKDirectParser from './vmdk-read'
import { VMDKDirectParser } from './vmdk-read'
jest.setTimeout(10000)

View File

@@ -4,9 +4,7 @@ import zlib from 'zlib'
import { VirtualBuffer } from './virtual-buffer'
const SECTOR_SIZE = 512
const HEADER_SIZE = 512
const VERSION_OFFSET = 4
const sectorSize = 512
const compressionDeflate = 'COMPRESSION_DEFLATE'
const compressionNone = 'COMPRESSION_NONE'
const compressionMap = [compressionNone, compressionDeflate]
@@ -121,7 +119,7 @@ function parseHeader (buffer) {
}
}
async function readGrain (offsetSectors, buffer, compressed) {
const offset = offsetSectors * SECTOR_SIZE
const offset = offsetSectors * sectorSize
const size = buffer.readUInt32LE(offset + 8)
const grainBuffer = buffer.slice(offset + 12, offset + 12 + size)
const grainContent = compressed
@@ -132,7 +130,7 @@ async function readGrain (offsetSectors, buffer, compressed) {
offsetSectors: offsetSectors,
offset,
lba,
lbaBytes: lba * SECTOR_SIZE,
lbaBytes: lba * sectorSize,
size,
buffer: grainBuffer,
grain: grainContent,
@@ -148,10 +146,10 @@ function tryToParseMarker (buffer) {
}
function alignSectors (number) {
return Math.ceil(number / SECTOR_SIZE) * SECTOR_SIZE
return Math.ceil(number / sectorSize) * sectorSize
}
export default class VMDKDirectParser {
export class VMDKDirectParser {
constructor (readStream) {
this.virtualBuffer = new VirtualBuffer(readStream)
this.header = null
@@ -179,9 +177,9 @@ export default class VMDKDirectParser {
l2IsContiguous = l2IsContiguous && l1Entry - previousL1Entry === 4
} else {
l2IsContiguous =
l1Entry * SECTOR_SIZE === this.virtualBuffer.position ||
l1Entry * SECTOR_SIZE === this.virtualBuffer.position + SECTOR_SIZE
l2Start = l1Entry * SECTOR_SIZE
l1Entry * sectorSize === this.virtualBuffer.position ||
l1Entry * sectorSize === this.virtualBuffer.position + 512
l2Start = l1Entry * sectorSize
}
}
if (!l2IsContiguous) {
@@ -202,29 +200,37 @@ export default class VMDKDirectParser {
l2ByteSize,
'L2 table ' + position
)
let grainsAreInAscendingOrder = true
let previousL2Entry = 0
let firstGrain = null
for (let i = 0; i < l2entries; i++) {
const l2Entry = l2Buffer.readUInt32LE(i * 4)
if (i > 0 && previousL2Entry !== 0 && l2Entry !== 0) {
grainsAreInAscendingOrder =
grainsAreInAscendingOrder && previousL2Entry < l2Entry
}
previousL2Entry = l2Entry
if (firstGrain === null) {
firstGrain = l2Entry
}
}
const freeSpace = firstGrain * SECTOR_SIZE - this.virtualBuffer.position
if (!grainsAreInAscendingOrder) {
// TODO: here we could transform the file to a sparse VHD on the fly because we have the complete table
throw new Error('Unsupported file format')
}
const freeSpace = firstGrain * sectorSize - this.virtualBuffer.position
if (freeSpace > 0) {
await this.virtualBuffer.readChunk(freeSpace, 'freeSpace after L2')
}
}
async readHeader () {
const headerBuffer = await this.virtualBuffer.readChunk(
HEADER_SIZE,
'readHeader'
)
const headerBuffer = await this.virtualBuffer.readChunk(512, 'readHeader')
const magicString = headerBuffer.slice(0, 4).toString('ascii')
if (magicString !== 'KDMV') {
throw new Error('not a VMDK file')
}
const version = headerBuffer.readUInt32LE(VERSION_OFFSET)
const version = headerBuffer.readUInt32LE(4)
if (version !== 1 && version !== 3) {
throw new Error(
'unsupported VMDK version ' +
@@ -234,7 +240,7 @@ export default class VMDKDirectParser {
}
this.header = parseHeader(headerBuffer)
// I think the multiplications are OK, because the descriptor is always at the beginning of the file
const descriptorLength = this.header.descriptorSizeSectors * SECTOR_SIZE
const descriptorLength = this.header.descriptorSizeSectors * sectorSize
const descriptorBuffer = await this.virtualBuffer.readChunk(
descriptorLength,
'descriptor'
@@ -245,16 +251,16 @@ export default class VMDKDirectParser {
this.header.grainDirectoryOffsetSectors !== -1 &&
this.header.grainDirectoryOffsetSectors !== 0
) {
l1PositionBytes = this.header.grainDirectoryOffsetSectors * SECTOR_SIZE
l1PositionBytes = this.header.grainDirectoryOffsetSectors * sectorSize
}
const endOfDescriptor = this.virtualBuffer.position
if (
l1PositionBytes !== null &&
(l1PositionBytes === endOfDescriptor ||
l1PositionBytes === endOfDescriptor + SECTOR_SIZE)
l1PositionBytes === endOfDescriptor + sectorSize)
) {
if (l1PositionBytes === endOfDescriptor + SECTOR_SIZE) {
await this.virtualBuffer.readChunk(SECTOR_SIZE, 'skipping L1 marker')
if (l1PositionBytes === endOfDescriptor + sectorSize) {
await this.virtualBuffer.readChunk(sectorSize, 'skipping L1 marker')
}
await this._readL1()
}
@@ -265,7 +271,7 @@ export default class VMDKDirectParser {
while (!this.virtualBuffer.isDepleted) {
const position = this.virtualBuffer.position
const sector = await this.virtualBuffer.readChunk(
SECTOR_SIZE,
512,
'marker start ' + position
)
if (sector.length === 0) {
@@ -275,14 +281,14 @@ export default class VMDKDirectParser {
if (marker.size === 0) {
if (marker.value !== 0) {
await this.virtualBuffer.readChunk(
marker.value * SECTOR_SIZE,
marker.value * sectorSize,
'other marker value ' + this.virtualBuffer.position
)
}
} else if (marker.size > 10) {
const grainDiskSize = marker.size + 12
const alignedGrainDiskSize = alignSectors(grainDiskSize)
const remainOfBufferSize = alignedGrainDiskSize - SECTOR_SIZE
const remainOfBufferSize = alignedGrainDiskSize - sectorSize
const remainderOfGrainBuffer = await this.virtualBuffer.readChunk(
remainOfBufferSize,
'grain remainder ' + this.virtualBuffer.position
@@ -299,3 +305,62 @@ export default class VMDKDirectParser {
}
}
}
export async function readVmdkGrainTable (fileAccessor) {
let headerBuffer = await fileAccessor(0, 512)
let grainDirAddr = headerBuffer.slice(56, 56 + 8)
if (
new Int8Array(grainDirAddr).reduce((acc, val) => acc && val === -1, true)
) {
headerBuffer = await fileAccessor(-1024, -1024 + 512)
grainDirAddr = new DataView(headerBuffer.slice(56, 56 + 8)).getUint32(
0,
true
)
}
const grainDirPosBytes = grainDirAddr * 512
const capacity =
new DataView(headerBuffer.slice(12, 12 + 8)).getUint32(0, true) * 512
const grainSize =
new DataView(headerBuffer.slice(20, 20 + 8)).getUint32(0, true) * 512
const grainCount = Math.ceil(capacity / grainSize)
const numGTEsPerGT = new DataView(headerBuffer.slice(44, 44 + 8)).getUint32(
0,
true
)
const grainTablePhysicalSize = numGTEsPerGT * 4
const grainDirectoryEntries = Math.ceil(grainCount / numGTEsPerGT)
const grainDirectoryPhysicalSize = grainDirectoryEntries * 4
const grainDirBuffer = await fileAccessor(
grainDirPosBytes,
grainDirPosBytes + grainDirectoryPhysicalSize
)
const grainDir = new Uint32Array(grainDirBuffer)
const cachedGrainTables = []
for (let i = 0; i < grainDirectoryEntries; i++) {
const grainTableAddr = grainDir[i] * 512
if (grainTableAddr !== 0) {
cachedGrainTables[i] = new Uint32Array(
await fileAccessor(
grainTableAddr,
grainTableAddr + grainTablePhysicalSize
)
)
}
}
const extractedGrainTable = []
for (let i = 0; i < grainCount; i++) {
const directoryEntry = Math.floor(i / numGTEsPerGT)
const grainTable = cachedGrainTables[directoryEntry]
if (grainTable !== undefined) {
const grainAddr = grainTable[i % numGTEsPerGT]
if (grainAddr !== 0) {
extractedGrainTable.push([i, grainAddr])
}
}
}
extractedGrainTable.sort(
([i1, grainAddress1], [i2, grainAddress2]) => grainAddress1 - grainAddress2
)
return extractedGrainTable.map(([index, grainAddress]) => index * grainSize)
}

View File

@@ -6,7 +6,7 @@ import getStream from 'get-stream'
import rimraf from 'rimraf'
import tmp from 'tmp'
import { createReadStream, createWriteStream, stat } from 'fs-extra'
import { createReadStream, createWriteStream, stat } from 'fs-promise'
import { fromCallback as pFromCallback } from 'promise-toolbox'
import convertFromVMDK, { readVmdkGrainTable } from '.'
@@ -49,7 +49,7 @@ test('VMDK to VHD can convert a random data file with VMDKDirectParser', async (
const vhdFileName = 'from-vmdk-VMDKDirectParser.vhd'
const reconvertedFromVhd = 'from-vhd.raw'
const reconvertedFromVmdk = 'from-vhd-by-vbox.raw'
const dataSize = 100 * 1024 * 1024 // this number is an integer head/cylinder/count equation solution
const dataSize = 8355840 // this number is an integer head/cylinder/count equation solution
try {
await execa.shell(
'base64 /dev/urandom | head -c ' + dataSize + ' > ' + inputRawFileName
@@ -82,7 +82,6 @@ test('VMDK to VHD can convert a random data file with VMDKDirectParser', async (
reconvertedFromVhd,
])
await execa('qemu-img', ['compare', inputRawFileName, vhdFileName])
await execa('qemu-img', ['compare', vmdkFileName, vhdFileName])
} catch (error) {
console.error(error.stdout)
console.error(error.stderr)

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "xo-web",
"version": "5.20.2",
"version": "5.19.2",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [
@@ -30,9 +30,10 @@
"node": ">=6"
},
"devDependencies": {
"@julien-f/freactal": "0.1.1",
"@julien-f/freactal": "0.1.0",
"@nraynaud/novnc": "0.6.1",
"@xen-orchestra/cron": "^1.0.3",
"xo-vmdk-to-vhd": "0.1.0",
"ansi_up": "^3.0.0",
"asap": "^2.0.6",
"babel-core": "^6.26.0",
@@ -59,7 +60,6 @@
"classnames": "^2.2.3",
"complex-matcher": "^0.3.0",
"cookies-js": "^1.2.2",
"copy-to-clipboard": "^3.0.8",
"d3": "^5.0.0",
"debounce-input-decorator": "^0.1.0",
"enzyme": "^3.3.0",
@@ -89,8 +89,8 @@
"lodash": "^4.6.1",
"loose-envify": "^1.1.0",
"make-error": "^1.3.2",
"marked": "^0.4.0",
"modular-cssify": "^10.0.0",
"marked": "^0.3.9",
"modular-cssify": "^8.0.0",
"moment": "^2.20.1",
"moment-timezone": "^0.5.14",
"notifyjs": "^3.0.0",
@@ -120,7 +120,7 @@
"react-test-renderer": "^15.6.2",
"react-virtualized": "^9.15.0",
"readable-stream": "^2.3.3",
"redux": "^4.0.0",
"redux": "^3.7.2",
"redux-thunk": "^2.0.1",
"reselect": "^2.5.4",
"rimraf": "^2.6.2",
@@ -134,11 +134,10 @@
"watchify": "^3.7.0",
"whatwg-fetch": "^2.0.3",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.2.4",
"xo-acl-resolver": "^0.2.3",
"xo-common": "^0.1.1",
"xo-lib": "^0.8.0",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "^0.1.3"
"xo-remote-parser": "^0.3"
},
"scripts": {
"build": "NODE_ENV=production gulp build",

View File

@@ -7,22 +7,26 @@ const call = fn => fn()
// callbacks have been correctly initialized when there are circular dependencies
const addSubscriptions = subscriptions => Component =>
class SubscriptionWrapper extends React.PureComponent {
constructor () {
super()
// provide all props since the beginning (better behavior with Freactal)
const state = (this.state = {})
Object.keys(subscriptions).forEach(key => {
state[key] = undefined
})
}
_unsubscribes = null
componentWillMount () {
const state = {}
this._unsubscribes = map(
typeof subscriptions === 'function'
? subscriptions(this.props)
: subscriptions,
(subscribe, prop) => {
state[prop] = undefined
return subscribe(value => this.setState({ [prop]: value }))
}
(subscribe, prop) =>
subscribe(value => this.setState({ [prop]: value }))
)
// provide all props since the beginning (better behavior with Freactal)
this.setState(state)
}
componentWillUnmount () {

View File

@@ -20,10 +20,7 @@ export const Card = propTypes({
shadow: propTypes.bool,
})(({ shadow, ...props }) => {
props.className = 'card'
props.style = {
...props.style,
...(shadow ? CARD_STYLE_WITH_SHADOW : CARD_STYLE),
}
props.style = shadow ? CARD_STYLE_WITH_SHADOW : CARD_STYLE
return <div {...props} />
})

View File

@@ -142,15 +142,15 @@ export default class Select extends React.PureComponent {
simpleValue,
value,
} = props
let option
if (
autoSelectSingleOption &&
options != null &&
options.length === 1 &&
(value == null ||
(simpleValue && value === '') ||
(multi && value.length === 0)) &&
([option] = options.filter(_ => !_.disabled)).length === 1
(multi && value.length === 0))
) {
const option = options[0]
props.onChange(
simpleValue ? option[props.valueKey] : multi ? [option] : option
)

View File

@@ -1,9 +1,10 @@
import isFunction from 'lodash/isFunction'
import isString from 'lodash/isString'
import moment from 'moment'
import PropTypes from 'prop-types'
import React, { Component } from 'react'
import { connect } from 'react-redux'
import { FormattedMessage, IntlProvider as IntlProvider_ } from 'react-intl'
import { every, isFunction, isString } from 'lodash'
import locales from './locales'
import messages from './messages'
@@ -101,16 +102,8 @@ export class FormattedDuration extends Component {
)
render () {
const parsedDuration = this._parseDuration()
return (
<Tooltip
content={getMessage(
every(parsedDuration, n => n === 0)
? 'secondsFormat'
: 'durationFormat',
parsedDuration
)}
>
<Tooltip content={getMessage('durationFormat', this._parseDuration())}>
<span>{this._humanizeDuration()}</span>
</Tooltip>
)

View File

@@ -3857,8 +3857,7 @@ export default {
xosanUsedSpace: 'Espace utilisé',
// Original text: "XOSAN pack needs to be installed on each host of the pool."
xosanNeedPack:
'Le pack XOSAN doit être installé et à jour sur tous les hôtes du pool.',
xosanNeedPack: 'La pack XOSAN doit être installé sur tous les hôtes du pool.',
// Original text: "Install it now!"
xosanInstallIt: 'Installer maintenant !',

View File

@@ -41,7 +41,6 @@ const messages = {
// ----- Copiable component -----
copyToClipboard: 'Copy to clipboard',
copyUuid: 'Copy {uuid}',
// ----- Pills -----
pillMaster: 'Master',
@@ -83,9 +82,6 @@ const messages = {
newServerPage: 'Server',
newImport: 'Import',
xosan: 'XOSAN',
backupDeprecatedMessage:
'Backup is deprecated, use Backup NG instead to create new backups.',
backupNgNewPage: 'New backup NG',
backupOverviewPage: 'Overview',
backupNewPage: 'New',
backupRemotesPage: 'Remotes',
@@ -191,7 +187,6 @@ const messages = {
// ----- Forms -----
formCancel: 'Cancel',
formCreate: 'Create',
formEdit: 'Edit',
formReset: 'Reset',
formSave: 'Save',
add: 'Add',
@@ -263,9 +258,6 @@ const messages = {
jobCallInProgess: 'In progress',
jobTransferredDataSize: 'Transfer size:',
jobTransferredDataSpeed: 'Transfer speed:',
operationSize: 'Size',
operationSpeed: 'Speed',
exportType: 'Type',
jobMergedDataSize: 'Merge size:',
jobMergedDataSpeed: 'Merge speed:',
allJobCalls: 'All',
@@ -313,7 +305,6 @@ const messages = {
taskMergedDataSize: 'Merge size',
taskMergedDataSpeed: 'Merge speed',
taskError: 'Error',
taskReason: 'Reason',
saveBackupJob: 'Save',
deleteBackupSchedule: 'Remove backup job',
deleteBackupScheduleQuestion:
@@ -325,19 +316,6 @@ const messages = {
jobEditMessage:
'You are editing job {name} ({id}). Saving will override previous job state.',
scheduleEdit: 'Edit schedule',
missingBackupName: "A name is required to create the backup's job!",
missingVms: 'Missing VMs!',
missingBackupMode: 'You need to choose a backup mode!',
missingRemotes: 'Missing remotes!',
missingSrs: 'Missing SRs!',
missingSchedules: 'Missing schedules!',
missingExportRetention:
'The Backup mode and The Delta Backup mode require export retention to be higher than 0!',
missingCopyRetention:
'The CR mode and The DR mode require copy retention to be higher than 0!',
missingSnapshotRetention:
'The Rolling Snapshot mode requires snapshot retention to be higher than 0!',
retentionNeeded: 'One of the retentions needs to be higher than 0!',
scheduleAdd: 'Add a schedule',
scheduleDelete: 'Delete',
scheduleRun: 'Run schedule',
@@ -356,7 +334,6 @@ const messages = {
jobUserNotFound: "This job's creator no longer exists",
backupUserNotFound: "This backup's creator no longer exists",
redirectToMatchingVms: 'Click here to see the matching VMs',
migrateToBackupNg: 'Migrate to backup NG',
noMatchingVms: 'There are no matching VMs!',
allMatchingVms: '{icon} See the matching VMs ({nMatchingVms, number})',
backupOwner: 'Backup owner',
@@ -371,18 +348,15 @@ const messages = {
reportWhenFailure: 'Failure',
reportWhenNever: 'Never',
reportWhen: 'Report when',
concurrency: 'Concurrency',
newBackupSelection: 'Select your backup type:',
smartBackupModeSelection: 'Select backup mode:',
normalBackup: 'Normal backup',
smartBackup: 'Smart backup',
exportRetention: 'Export retention',
copyRetention: 'Copy retention',
snapshotRetention: 'Snapshot retention',
backupName: 'Name',
useDelta: 'Use delta',
useCompression: 'Use compression',
offlineSnapshot: 'Offline snapshot',
dbAndDrRequireEntreprisePlan: 'Delta Backup and DR require Entreprise plan',
crRequiresPremiumPlan: 'CR requires Premium plan',
smartBackupModeTitle: 'Smart mode',
@@ -622,15 +596,11 @@ const messages = {
vmsTabName: 'Vms',
srsTabName: 'Srs',
// ----- Pool advanced tab -----
poolEditAll: 'Edit all',
poolEditRemoteSyslog: 'Edit remote syslog for all hosts',
poolHaStatus: 'High Availability',
poolHaEnabled: 'Enabled',
poolHaDisabled: 'Disabled',
poolGpuGroups: 'GPU groups',
poolRemoteSyslogPlaceHolder: 'Logging host',
setpoolMaster: 'Master',
syslogRemoteHost: 'Remote syslog host',
poolGpuGroups: 'GPU groups',
// ----- Pool host tab -----
hostNameLabel: 'Name',
hostDescription: 'Description',
@@ -710,7 +680,6 @@ const messages = {
hostLicenseType: 'Type',
hostLicenseSocket: 'Socket',
hostLicenseExpiry: 'Expiry',
hostRemoteSyslog: 'Remote syslog',
supplementalPacks: 'Installed supplemental packs',
supplementalPackNew: 'Install new supplemental pack',
supplementalPackPoolNew: 'Install supplemental pack on every host',
@@ -765,7 +734,6 @@ const messages = {
patchNameLabel: 'Name',
patchUpdateButton: 'Install all patches',
patchDescription: 'Description',
patchVersion: 'Version',
patchApplied: 'Applied date',
patchSize: 'Size',
patchStatus: 'Status',
@@ -783,15 +751,6 @@ const messages = {
'This will install a patch only on this host. This is NOT the recommended way: please go into the Pool patch view and follow instructions there. If you are sure about this, you can continue anyway',
installPatchWarningReject: 'Go to pool',
installPatchWarningResolve: 'Install',
patchRelease: 'Release',
updatePluginNotInstalled:
'Update plugin is not installed on this host. Please run `yum install xcp-ng-updater` first.',
showChangelog: 'Show changelog',
changelog: 'Changelog',
changelogPatch: 'Patch',
changelogAuthor: 'Author',
changelogDate: 'Date',
changelogDescription: 'Description',
// ----- Pool patch tabs -----
refreshPatches: 'Refresh patches',
installPoolPatches: 'Install pool patches',
@@ -973,7 +932,6 @@ const messages = {
defaultCpuCap: 'Default ({value, number})',
pvArgsLabel: 'PV args',
xenToolsStatus: 'Xen tools version',
xenToolsNotInstalled: 'Not installed',
osName: 'OS name',
osKernel: 'OS kernel',
autoPowerOn: 'Auto power on',
@@ -998,7 +956,6 @@ const messages = {
vmCoresPerSocketIncorrectValue: 'Incorrect cores per socket value',
vmCoresPerSocketIncorrectValueSolution:
'Please change the selected value to fix it.',
vmHaDisabled: 'disabled',
vmMemoryLimitsLabel: 'Memory limits (min/max)',
vmMaxVcpus: 'vCPUs max:',
vmMaxRam: 'Memory max:',
@@ -1147,11 +1104,6 @@ const messages = {
newVmSshKey: 'SSH key',
newVmConfigDrive: 'Config drive',
newVmCustomConfig: 'Custom config',
availableTemplateVarsInfo:
'Click here to see the available template variables',
availableTemplateVarsTitle: 'Available template variables',
templateNameInfo: 'the VM\'s name. It must not contain "_"',
templateIndexInfo: "the VM's index, it will take 0 in case of single VM",
newVmBootAfterCreate: 'Boot VM after creation',
newVmMacPlaceholder: 'Auto-generated if empty',
newVmCpuWeightLabel: 'CPU weight',
@@ -1258,7 +1210,6 @@ const messages = {
scheduleName: 'Name',
scheduleTimezone: 'Timezone',
scheduleExportRetention: 'Export ret.',
scheduleCopyRetention: 'Copy ret.',
scheduleSnapshotRetention: 'Snapshot ret.',
getRemote: 'Get remote',
listRemote: 'List Remote',
@@ -1721,7 +1672,6 @@ const messages = {
logIndicationToDisable: 'Click to disable',
reportBug: 'Report a bug',
unhealthyVdiChainError: 'Job canceled to protect the VDI chain',
backupRestartVm: "Restart VM's backup",
clickForMoreInformation: 'Click for more information',
// ----- IPs ------
@@ -1816,8 +1766,7 @@ const messages = {
xosanUsedSpace: 'Used space',
xosanLicense: 'License',
xosanMultipleLicenses: 'This XOSAN has more than 1 license!',
xosanNeedPack:
'XOSAN pack needs to be installed and up to date on each host of the pool.',
xosanNeedPack: 'XOSAN pack needs to be installed on each host of the pool.',
xosanInstallIt: 'Install it now!',
xosanNeedRestart:
'Some hosts need their toolstack to be restarted before you can create an XOSAN',
@@ -1845,14 +1794,6 @@ const messages = {
xosanPbdsDetached: 'Some SRs are detached from the XOSAN',
xosanBadStatus: 'Something is wrong with: {badStatuses}',
xosanRunning: 'Running',
xosanUpdatePacks: 'Update packs',
xosanPackUpdateChecking: 'Checking for updates',
xosanPackUpdateError:
'Error while checking XOSAN packs. Please make sure that the Cloud plugin is installed and loaded and that the updater is reachable.',
xosanPackUpdateUnavailable: 'XOSAN resources are unavailable',
xosanPackUpdateUnregistered: 'Not registered for XOSAN resources',
xosanPackUpdateUpToDate: "✓ This pool's XOSAN packs are up to date!",
xosanPackUpdateVersion: 'Update pool with latest pack v{version}',
xosanDelete: 'Delete XOSAN',
xosanFixIssue: 'Fix',
xosanCreatingOn: 'Creating XOSAN on {pool}',
@@ -1869,8 +1810,12 @@ const messages = {
xosanRegister: 'Register your appliance first',
xosanLoading: 'Loading…',
xosanNotAvailable: 'XOSAN is not available at the moment',
xosanInstallPackOnHosts: 'Install XOSAN pack on these hosts:',
xosanInstallPack: 'Install {pack} v{version}?',
xosanNoPackFound:
'No compatible XOSAN pack found for your XenServer versions.',
xosanPackRequirements:
'At least one of these version requirements must be satisfied by all the hosts in this pool:',
// SR tab XOSAN
xosanVmsNotRunning: 'Some XOSAN Virtual Machines are not running',
xosanVmsNotFound: 'Some XOSAN Virtual Machines could not be found',
@@ -1953,7 +1898,6 @@ const messages = {
xosanLoadXoaPlugin: 'Load XOA plugin first',
// ----- Utils -----
secondsFormat: '{seconds, plural, one {# second} other {# seconds}}',
durationFormat:
'{days, plural, =0 {} one {# day } other {# days }}{hours, plural, =0 {} one {# hour } other {# hours }}{minutes, plural, =0 {} one {# minute } other {# minutes }}{seconds, plural, =0 {} one {# second} other {# seconds}}',
}

View File

@@ -50,17 +50,19 @@ const SrItem = propTypes({
return (state, props) => ({
container: getContainer(state, props),
})
})(({ sr, container }) => (
<span>
<Icon icon='sr' /> {sr.name_label || sr.id}
{container !== undefined && (
<span className='text-muted'> - {container.name_label}</span>
)}
{isSrWritable(sr) && (
<span>{` (${formatSize(sr.size - sr.physical_usage)} free)`}</span>
)}
</span>
))
})(({ sr, container }) => {
let label = `${sr.name_label || sr.id}`
if (isSrWritable(sr)) {
label += ` (${formatSize(sr.size - sr.physical_usage)} free)`
}
return (
<span>
<Icon icon='sr' /> {label}
</span>
)
})
)
// VM.

View File

@@ -6,6 +6,7 @@ import {
filter,
flatten,
forEach,
get,
groupBy,
includes,
isArray,
@@ -35,7 +36,6 @@ import {
createGetObjectsOfType,
createGetTags,
createSelector,
createSort,
getObject,
} from './selectors'
import { addSubscriptions, connectStore, resolveResourceSets } from './utils'
@@ -61,9 +61,7 @@ const ADDON_BUTTON_STYLE = { lineHeight: '1.4' }
const getIds = value =>
value == null || isString(value) || isInteger(value)
? value
: isArray(value)
? map(value, getIds)
: value.id
: isArray(value) ? map(value, getIds) : value.id
const getOption = (object, container) => ({
label: container
@@ -364,10 +362,40 @@ export const SelectSr = makeStoreSelect(
const getPools = createGetObjectsOfType('pool')
const getHosts = createGetObjectsOfType('host')
const getSrsByContainer = createGetObjectsOfType('SR')
.filter((_, { predicate }) => predicate || isSrWritable)
.sort()
.groupBy('$container')
const getSrsByContainer = createSelector(
createGetObjectsOfType('SR')
.filter((_, { predicate }) => predicate || isSrWritable)
.sort(),
createSelector(getHosts, getPools, (hosts, pools) => id =>
hosts[id] || pools[id]
),
(srs, containerFinder) => {
const { length } = srs
if (length >= 2) {
let sr1, sr2
const srsToModify = {}
for (let i = 1; i < length; ++i) {
sr1 = srs[i]
for (let j = 0; j < i; ++j) {
sr2 = srs[j]
if (sr1.name_label === sr2.name_label) {
srsToModify[sr1.id] = sr1
srsToModify[sr2.id] = sr2
}
}
}
forEach(srsToModify, sr => {
sr.name_label = `(${get(
containerFinder(sr.$container),
'name_label'
)}) ${sr.name_label}`
})
}
return groupBy(srs, '$container')
}
)
const getContainerIds = createSelector(getSrsByContainer, srsByContainer =>
keys(srsByContainer)
@@ -860,15 +888,16 @@ export class SelectResourceSetsNetwork extends React.PureComponent {
this.refs.select.value = value
}
_getNetworks = createSort(
createFilter(
() => this.props.resourceSet.objectsByType.network,
createSelector(
() => this.props.predicate,
predicate => predicate || (() => true)
_getNetworks = createSelector(
() => this.props.resourceSet,
({ objectsByType }) => {
const { predicate } = this.props
const networks = objectsByType['network']
return sortBy(
predicate ? filter(networks, predicate) : networks,
'name_label'
)
),
'name_label'
}
)
render () {

View File

@@ -15,7 +15,6 @@ import {
pickBy,
size,
slice,
some,
} from 'lodash'
import invoke from './invoke'
@@ -148,9 +147,7 @@ export const createFilter = (collection, predicate) =>
_createCollectionWrapper(
(collection, predicate) =>
predicate === false
? isArrayLike(collection)
? EMPTY_ARRAY
: EMPTY_OBJECT
? isArrayLike(collection) ? EMPTY_ARRAY : EMPTY_OBJECT
: predicate
? (isArrayLike(collection) ? filter : pickBy)(collection, predicate)
: collection
@@ -544,9 +541,3 @@ export const createGetVmDisks = vmSelector =>
)
)
)
export const getIsPoolAdmin = create(
create(createGetObjectsOfType('pool'), _createCollectionWrapper(Object.keys)),
getCheckPermissions,
(poolsIds, check) => some(poolsIds, poolId => check(poolId, 'administrate'))
)

View File

@@ -167,10 +167,7 @@ class ColumnHead extends Component {
})
class Checkbox extends Component {
componentDidUpdate () {
const {
props: { indeterminate },
ref,
} = this
const { props: { indeterminate }, ref } = this
if (ref !== null) {
ref.indeterminate = indeterminate
}
@@ -212,21 +209,13 @@ class IndividualAction extends Component {
(disabled, item, userData) =>
isFunction(disabled) ? disabled(item, userData) : disabled
)
_getLabel = createSelector(
() => this.props.label,
() => this.props.item,
() => this.props.userData,
(label, item, userData) =>
isFunction(label) ? label(item, userData) : label
)
_executeAction = () => {
const p = this.props
return p.handler(p.item, p.userData)
}
render () {
const { icon, item, level, redirectOnSuccess, userData } = this.props
const { icon, item, label, level, redirectOnSuccess, userData } = this.props
return (
<ActionRowButton
@@ -237,7 +226,7 @@ class IndividualAction extends Component {
handler={this._executeAction}
icon={icon}
redirectOnSuccess={redirectOnSuccess}
tooltip={this._getLabel()}
tooltip={label}
/>
)
}
@@ -251,13 +240,6 @@ class GroupedAction extends Component {
(disabled, selectedItems, userData) =>
isFunction(disabled) ? disabled(selectedItems, userData) : disabled
)
_getLabel = createSelector(
() => this.props.label,
() => this.props.selectedItems,
() => this.props.userData,
(label, selectedItems, userData) =>
isFunction(label) ? label(selectedItems, userData) : label
)
_executeAction = () => {
const p = this.props
@@ -265,7 +247,7 @@ class GroupedAction extends Component {
}
render () {
const { icon, level } = this.props
const { icon, label, level } = this.props
return (
<ActionRowButton
@@ -273,7 +255,7 @@ class GroupedAction extends Component {
disabled={this._getIsDisabled()}
handler={this._executeAction}
icon={icon}
tooltip={this._getLabel()}
tooltip={label}
/>
)
}
@@ -490,8 +472,8 @@ export default class SortedTable extends Component {
) {
this.setState({
highlighted:
(itemIndex + visibleItems.length + 1) %
visibleItems.length || 0,
(itemIndex + visibleItems.length + 1) % visibleItems.length ||
0,
})
}
break
@@ -503,8 +485,8 @@ export default class SortedTable extends Component {
) {
this.setState({
highlighted:
(itemIndex + visibleItems.length - 1) %
visibleItems.length || 0,
(itemIndex + visibleItems.length - 1) % visibleItems.length ||
0,
})
}
break
@@ -896,7 +878,7 @@ export default class SortedTable extends Component {
</span>
)
)}
{(nSelectedItems !== 0 || all) && (
{nSelectedItems !== 0 && (
<div className='pull-right'>
<ButtonGroup>
{map(groupedActions, (props, key) => (

View File

@@ -20,7 +20,6 @@ import {
mapValues,
replace,
sample,
some,
startsWith,
} from 'lodash'
@@ -29,7 +28,6 @@ import * as actions from './store/actions'
import invoke from './invoke'
import store from './store'
import { getObject } from './selectors'
import { satisfies as versionSatisfies } from 'semver'
export const EMPTY_ARRAY = Object.freeze([])
export const EMPTY_OBJECT = Object.freeze({})
@@ -525,40 +523,6 @@ export const ShortDate = ({ timestamp }) => (
<FormattedDate value={timestamp} month='short' day='numeric' year='numeric' />
)
export const findLatestPack = (packs, hostsVersions) => {
const checkVersion = version =>
!version ||
every(hostsVersions, hostVersion => versionSatisfies(hostVersion, version))
let latestPack = { version: '0' }
forEach(packs, pack => {
if (
pack.type === 'iso' &&
compareVersions(pack.version, '>', latestPack.version) &&
checkVersion(pack.requirements && pack.requirements.xenserver)
) {
latestPack = pack
}
})
if (latestPack.version === '0') {
// No compatible pack was found
return
}
return latestPack
}
export const isLatestXosanPackInstalled = (latestXosanPack, hosts) =>
latestXosanPack !== undefined &&
every(hosts, host =>
some(
host.supplementalPacks,
({ name, version }) =>
name === 'XOSAN' && version === latestXosanPack.version
)
)
// ===================================================================
export const getMemoryUsedMetric = ({ memory, memoryFree = memory }) =>

View File

@@ -23,7 +23,7 @@ class CreateNetworkModalBody extends Component {
pool: container.$pool,
name: refs.name.value,
description: refs.description.value,
pif: refs.pif.value && refs.pif.value.id,
pif: refs.pif.value.id,
mtu: refs.mtu.value,
vlan: refs.vlan.value,
}

Some files were not shown because too many files have changed in this diff Show More