Merge branch 'xo-server/master'

This commit is contained in:
Julien Fontanet 2018-02-01 14:17:39 +01:00
commit 50b081608f
134 changed files with 28524 additions and 0 deletions

View File

@ -0,0 +1,65 @@
# http://EditorConfig.org
#
# Julien Fontanet's configuration
# https://gist.github.com/julien-f/8096213
# Top-most EditorConfig file.
root = true
# Common config.
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
# CoffeeScript
#
# https://github.com/polarmobile/coffeescript-style-guide/blob/master/README.md
[*.{,lit}coffee]
indent_size = 2
indent_style = space
# Markdown
[*.{md,mdwn,mdown,markdown}]
indent_size = 4
indent_style = space
# Package.json
#
# This indentation style is the one used by npm.
[package.json]
indent_size = 2
indent_style = space
# Pug (Jade)
[*.{jade,pug}]
indent_size = 2
indent_style = space
# JavaScript
#
# Two spaces seems to be the standard most common style, at least in
# Node.js (http://nodeguide.com/style.html#tabs-vs-spaces).
[*.{js,jsx,ts,tsx}]
indent_size = 2
indent_style = space
# Less
[*.less]
indent_size = 2
indent_style = space
# Sass
#
# Style used for http://libsass.com
[*.s[ac]ss]
indent_size = 2
indent_style = space
# YAML
#
# Only spaces are allowed.
[*.yaml]
indent_size = 2
indent_style = space

View File

@ -0,0 +1,15 @@
module.exports = {
extends: [
'standard',
],
parser: 'babel-eslint',
rules: {
'comma-dangle': ['error', 'always-multiline'],
'no-var': 'error',
'node/no-extraneous-import': 'error',
'node/no-extraneous-require': 'error',
'node/no-missing-require': 'error',
'node/no-missing-import': 'error',
'prefer-const': 'error',
},
}

14
packages/xo-server/.gitignore vendored Normal file
View File

@ -0,0 +1,14 @@
/coverage/
/dist/
/node_modules/
/src/api/index.js
/src/xapi/mixins/index.js
/src/xo-mixins/index.js
npm-debug.log
npm-debug.log.*
pnpm-debug.log
pnpm-debug.log.*
yarn-error.log
.xo-server.*

View File

@ -0,0 +1,24 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@ -0,0 +1,9 @@
language: node_js
node_js:
- stable
- 8
- 6
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
sudo: false

View File

@ -0,0 +1,3 @@
# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER!
https://github.com/vatesfr/xo-web/issues

View File

@ -0,0 +1,40 @@
# Xen Orchestra Server
![](http://i.imgur.com/HVFMrTk.png)
XO-Server is part of [Xen Orchestra](https://github.com/vatesfr/xo), a web interface for XenServer or XAPI enabled hosts.
It contains all the logic of XO and handles:
- connections to all XAPI servers/pools;
- a cache system to provide the best response time possible;
- users authentication and authorizations (work in progress);
- a JSON-RPC based interface for XO clients (i.e. [XO-Web](https://github.com/vatesfr/xo-web)).
[![Build Status](https://travis-ci.org/vatesfr/xo-server.svg?branch=next-release)](https://travis-ci.org/vatesfr/xo-server)
[![Dependency Status](https://david-dm.org/vatesfr/xo-server.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-server)
[![devDependency Status](https://david-dm.org/vatesfr/xo-server/dev-status.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-server#info=devDependencies)
___
## Installation
Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html).
## Compilation
Production build:
```
$ yarn run build
```
Development build:
```
$ yarn run dev
```
## How to report a bug?
All bug reports should go into the [bugtracker of xo-web](https://github.com/vatesfr/xo-web/issues).

View File

@ -0,0 +1,32 @@
Error.stackTraceLimit = 100
// Removes internal modules.
try {
const sep = require('path').sep
require('stack-chain').filter.attach(function (_, frames) {
const filtered = frames.filter(function (frame) {
const name = frame && frame.getFileName()
return (
// has a filename
name &&
// contains a separator (no internal modules)
name.indexOf(sep) !== -1 &&
// does not start with `internal`
name.lastIndexOf('internal', 0) !== -1
)
})
// depd (used amongst other by express requires at least 3 frames
// in the stack.
return filtered.length > 2
? filtered
: frames
})
} catch (_) {}
// Source maps.
try { require('julien-f-source-map-support/register') } catch (_) {}

View File

@ -0,0 +1,31 @@
#!/usr/bin/env node
'use strict'
// ===================================================================
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

View File

@ -0,0 +1,10 @@
#!/usr/bin/env node
'use strict'
// ===================================================================
// Better stack traces if possible.
require('../better-stacks')
require('exec-promise')(require('../dist/logs-cli').default)

View File

@ -0,0 +1,3 @@
#!/usr/bin/env node
require('exec-promise')(require('../dist/recover-account-cli').default)

View File

@ -0,0 +1,44 @@
// Vendor config: DO NOT TOUCH!
//
// See sample.config.yaml to override.
{
"http": {
"listen": [
{
"port": 80
}
],
// These options are applied to all listen entries.
"listenOptions": {
// Ciphers to use.
//
// These are the default ciphers in Node 4.2.6, we are setting
// them explicitly for older Node versions.
"ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
// Tell Node to respect the cipher order.
"honorCipherOrder": true,
// Specify to use at least TLSv1.1.
// See: https://github.com/certsimple/minimum-tls-version
"secureOptions": 117440512
},
"mounts": {}
},
"datadir": "/var/lib/xo-server/data",
// Should users be created on first sign in?
//
// Necessary for external authentication providers.
"createUserOnFirstSignin": true,
// Whether API logs should contains the full request/response on
// errors.
//
// This is disabled by default for performance (lots of data) and
// security concerns (avoiding sensitive data in the logs) but can
// be turned for investigation by the administrator.
"verboseApiLogsOnErrors": false
}

View File

@ -0,0 +1,11 @@
'use strict'
// ===================================================================
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// Import the real main module.
module.exports = require('./dist').default // eslint-disable-line node/no-missing-require

View File

@ -0,0 +1,181 @@
{
"name": "xo-server",
"version": "5.16.0",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
"xen",
"orchestra",
"xen-orchestra",
"server"
],
"homepage": "http://github.com/vatesfr/xo-server/",
"bugs": {
"url": "https://github.com/vatesfr/xo-web/issues"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
"preferGlobal": true,
"files": [
"better-stacks.js",
"bin/",
"dist/",
"config.json",
"index.js",
"signin.pug"
],
"directories": {
"bin": "bin"
},
"engines": {
"node": ">=4.5"
},
"dependencies": {
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"ajv": "^6.1.1",
"app-conf": "^0.5.0",
"archiver": "^2.1.0",
"babel-runtime": "^6.26.0",
"base64url": "^2.0.0",
"bind-property-descriptor": "^1.0.0",
"blocked": "^1.2.1",
"bluebird": "^3.5.1",
"body-parser": "^1.18.2",
"connect-flash": "^0.1.1",
"cookie": "^0.3.1",
"cookie-parser": "^1.4.3",
"cron": "^1.3.0",
"d3-time-format": "^2.1.1",
"debug": "^3.1.0",
"decorator-synchronized": "^0.3.0",
"deptree": "^1.0.0",
"escape-string-regexp": "^1.0.5",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.7.0",
"execa": "^0.9.0",
"express": "^4.16.2",
"express-session": "^1.15.6",
"fatfs": "^0.10.4",
"from2": "^2.3.0",
"fs-extra": "^5.0.0",
"golike-defer": "^0.4.1",
"hashy": "^0.6.2",
"helmet": "^3.9.0",
"highland": "^2.11.1",
"http-proxy": "^1.16.2",
"http-request-plus": "^0.5.0",
"http-server-plus": "^0.8.0",
"human-format": "^0.10.0",
"is-redirect": "^1.0.0",
"js-yaml": "^3.10.0",
"json-rpc-peer": "0.14",
"json5": "^0.5.1",
"julien-f-source-map-support": "0.1.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^2.1.1",
"level-party": "^3.0.4",
"level-sublevel": "^6.6.1",
"limit-concurrency-decorator": "^0.2.0",
"lodash": "^4.17.4",
"make-error": "^1",
"micromatch": "^3.1.4",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.14",
"ms": "^2.1.1",
"multikey-hash": "^1.0.4",
"ndjson": "^1.5.0",
"parse-pairs": "^0.2.2",
"partial-stream": "0.0.0",
"passport": "^0.4.0",
"passport-local": "^1.0.0",
"pretty-format": "^22.0.3",
"promise-toolbox": "^0.9.5",
"proxy-agent": "^2.1.0",
"pug": "^2.0.0-rc.4",
"pw": "^0.0.4",
"redis": "^2.8.0",
"schema-inspector": "^1.6.8",
"semver": "^5.4.1",
"serve-static": "^1.13.1",
"split-lines": "^1.1.0",
"stack-chain": "^2.0.0",
"tar-stream": "^1.5.5",
"through2": "^2.0.3",
"tmp": "^0.0.33",
"uuid": "^3.0.1",
"value-matcher": "^0.0.0",
"ws": "^4.0.0",
"xen-api": "^0.16.4",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
"xo-common": "^0.1.1",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-core": "^6.26.0",
"babel-eslint": "^8.0.3",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.6.1",
"babel-preset-stage-0": "^6.24.1",
"cross-env": "^5.1.3",
"eslint": "^4.13.1",
"eslint-config-standard": "^11.0.0-beta.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-node": "^5.2.1",
"eslint-plugin-promise": "^3.6.0",
"eslint-plugin-standard": "^3.0.1",
"husky": "^0.14.3",
"index-modules": "^0.3.0",
"jest": "^22.0.3",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"commitmsg": "yarn run test",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"dev-test": "jest --bail --watch",
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins && yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"pretest": "eslint --ignore-path .gitignore --fix .",
"start": "node bin/xo-server",
"test": "jest"
},
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-0"
]
},
"jest": {
"collectCoverage": true,
"testEnvironment": "node",
"roots": [
"<rootDir>/src"
],
"testRegex": "\\.spec\\.js$"
}
}

View File

@ -0,0 +1,149 @@
# BE *VERY* CAREFUL WHEN EDITING!
# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT!
# visit http://www.yamllint.com/ to validate this file as needed
#=====================================================================
# Example XO-Server configuration.
#
# This file is automatically looking for at the following places:
# - `$HOME/.config/xo-server/config.yaml`
# - `/etc/xo-server/config.yaml`
#
# The first entries have priority.
#
# Note: paths are relative to the configuration file.
#=====================================================================
# It may be necessary to run XO-Server as a privileged user (e.g.
# `root`) for instance to allow the HTTP server to listen on a
# [privileged ports](http://www.w3.org/Daemon/User/Installation/PrivilegedPorts.html).
#
# To avoid security issues, XO-Server can drop its privileges by
# changing the user and the group is running with.
#
# Note: XO-Server will change them just after reading the
# configuration.
# User to run XO-Server as.
#
# Note: The user can be specified using either its name or its numeric
# identifier.
#
# Default: undefined
#user: 'nobody'
# Group to run XO-Server as.
#
# Note: The group can be specified using either its name or its
# numeric identifier.
#
# Default: undefined
#group: 'nogroup'
#=====================================================================
# Configuration of the embedded HTTP server.
http:
# Hosts & ports on which to listen.
#
# By default, the server listens on [::]:80.
listen:
# Basic HTTP.
-
# Address on which the server is listening on.
#
# Sets it to 'localhost' for IP to listen only on the local host.
#
# Default: all IPv6 addresses if available, otherwise all IPv4
# addresses.
#hostname: 'localhost'
# Port on which the server is listening on.
#
# Default: undefined
port: 80
# Instead of `host` and `port` a path to a UNIX socket may be
# specified (overrides `host` and `port`).
#
# Default: undefined
#socket: './http.sock'
# Basic HTTPS.
#
# You can find the list of possible options there https://nodejs.org/docs/latest/api/tls.html#tls.createServer
# -
# # The only difference is the presence of the certificate and the
# # key.
# #
# #hostname: '127.0.0.1'
# port: 443
# # File containing the certificate (PEM format).
#
# # If a chain of certificates authorities is needed, you may bundle
# # them directly in the certificate.
# #
# # Note: the order of certificates does matter, your certificate
# # should come first followed by the certificate of the above
# # certificate authority up to the root.
# #
# # Default: undefined
# cert: './certificate.pem'
# # File containing the private key (PEM format).
# #
# # If the key is encrypted, the passphrase will be asked at
# # server startup.
# #
# # Default: undefined
# key: './key.pem'
# If set to true, all HTTP traffic will be redirected to the first
# HTTPs configuration.
#redirectToHttps: true
# List of files/directories which will be served.
mounts:
#'/': '/path/to/xo-web/dist/'
# List of proxied URLs (HTTP & WebSockets).
proxies:
# '/any/url': 'http://localhost:54722'
# HTTP proxy configuration used by xo-server to fetch resources on the
# Internet.
#
# See: https://github.com/TooTallNate/node-proxy-agent#maps-proxy-protocols-to-httpagent-implementations
#httpProxy: 'http://jsmith:qwerty@proxy.lan:3128'
#=====================================================================
# Connection to the Redis server.
redis:
# Unix sockets can be used
#
# Default: undefined
#socket: /var/run/redis/redis.sock
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
#
# Default: redis://localhost:6379/0
#uri: redis://redis.company.lan/42
# List of aliased commands.
#
# See http://redis.io/topics/security#disabling-of-specific-commands
#renameCommands:
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
# Directory containing the database of XO.
# Currently used for logs.
#
# Default: '/var/lib/xo-server/data'
#datadir: '/var/lib/xo-server/data'

View File

@ -0,0 +1,50 @@
doctype html
html
head
meta(charset = 'utf-8')
meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1')
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'index.css')
body(style = 'display: flex; height: 100vh;')
div(style = 'margin: auto; width: 20em;')
div.mb-2(style = 'display: flex;')
img(src = 'assets/logo.png' style = 'margin: auto;')
h2.text-xs-center.mb-2 Xen Orchestra
form(action = 'signin/local' method = 'post')
fieldset
if error
p.text-danger #{error}
.input-group.mb-1
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.input-group.mb-1
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| &nbsp;
| Remember me
div
button.btn.btn-block.btn-info
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}

View File

@ -0,0 +1,49 @@
export async function get () {
return /* await */ this.getAllAcls()
}
get.permission = 'admin'
get.description = 'get existing ACLs'
// -------------------------------------------------------------------
export async function getCurrentPermissions () {
return /* await */ this.getPermissionsForUser(this.session.get('user_id'))
}
getCurrentPermissions.permission = ''
getCurrentPermissions.description = 'get (explicit) permissions by object for the current user'
// -------------------------------------------------------------------
export async function add ({subject, object, action}) {
await this.addAcl(subject, object, action)
}
add.permission = 'admin'
add.params = {
subject: { type: 'string' },
object: { type: 'string' },
action: { type: 'string' },
}
add.description = 'add a new ACL entry'
// -------------------------------------------------------------------
export async function remove ({subject, object, action}) {
await this.removeAcl(subject, object, action)
}
remove.permission = 'admin'
remove.params = {
subject: { type: 'string' },
object: { type: 'string' },
action: { type: 'string' },
}
remove.description = 'remove an existing ACL entry'

View File

@ -0,0 +1,98 @@
import archiver from 'archiver'
import { basename } from 'path'
import { format } from 'json-rpc-peer'
import { forEach } from 'lodash'
// ===================================================================
export function list ({ remote }) {
return this.listVmBackups(remote)
}
list.permission = 'admin'
list.params = {
remote: { type: 'string' },
}
// -------------------------------------------------------------------
export function scanDisk ({ remote, disk }) {
return this.scanDiskBackup(remote, disk)
}
scanDisk.permission = 'admin'
scanDisk.params = {
remote: { type: 'string' },
disk: { type: 'string' },
}
// -------------------------------------------------------------------
export function scanFiles ({ remote, disk, partition, path }) {
return this.scanFilesInDiskBackup(remote, disk, partition, path)
}
scanFiles.permission = 'admin'
scanFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
path: { type: 'string' },
}
// -------------------------------------------------------------------
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
res.setHeader('content-disposition', 'attachment')
res.setHeader('content-type', 'application/octet-stream')
const nFiles = paths.length
// Send lone file directly
if (nFiles === 1) {
files[0].pipe(res)
return
}
const archive = archiver(archiveFormat)
archive.on('error', error => {
console.error(error)
res.end(format.error(0, error))
})
forEach(files, file => {
archive.append(file, { name: basename(file.path) })
})
archive.finalize()
archive.pipe(res)
}).catch(error => {
console.error(error)
res.writeHead(500)
res.end(format.error(0, error))
})
}
export async function fetchFiles ({ format = 'zip', ...params }) {
const fileName = params.paths.length > 1
? `restore_${new Date().toJSON()}.${format}`
: basename(params.paths[0])
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
suffix: encodeURI(`/${fileName}`),
}).then(url => ({ $getFrom: url }))
}
fetchFiles.permission = 'admin'
fetchFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
format: { type: 'string', optional: true },
partition: { type: 'string', optional: true },
paths: {
type: 'array',
items: { type: 'string' },
minLength: 1,
},
}

View File

@ -0,0 +1,69 @@
import { parseSize } from '../utils'
import { unauthorized } from 'xo-common/api-errors'
// ===================================================================
export async function create ({ name, size, sr, vm, bootable, position, mode }) {
const attach = vm !== undefined
let resourceSet
if (attach && (resourceSet = vm.resourceSet) != null) {
await this.checkResourceSetConstraints(resourceSet, this.user.id, [ sr.id ])
await this.allocateLimitsInResourceSet({ disk: size }, resourceSet)
} else if (!(await this.hasPermissions(this.user.id, [ [ sr.id, 'administrate' ] ]))) {
throw unauthorized()
}
const xapi = this.getXapi(sr)
const vdi = await xapi.createVdi({
name_label: name,
size,
sr: sr._xapiId,
})
if (attach) {
await xapi.createVbd({
bootable,
mode,
userdevice: position,
vdi: vdi.$id,
vm: vm._xapiId,
})
}
return vdi.$id
}
create.description = 'create a new disk on a SR'
create.params = {
name: { type: 'string' },
size: { type: ['integer', 'string'] },
sr: { type: 'string' },
vm: { type: 'string', optional: true },
bootable: { type: 'boolean', optional: true },
mode: { type: 'string', optional: true },
position: { type: 'string', optional: true },
}
create.resolve = {
vm: ['vm', 'VM', 'administrate'],
sr: ['sr', 'SR', false],
}
// -------------------------------------------------------------------
export async function resize ({ vdi, size }) {
await this.getXapi(vdi).resizeVdi(vdi._xapiId, parseSize(size))
}
resize.description = 'resize an existing VDI'
resize.params = {
id: { type: 'string' },
size: { type: ['integer', 'string'] },
}
resize.resolve = {
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
}

View File

@ -0,0 +1,60 @@
export async function register ({vm}) {
await this.getXapi(vm).registerDockerContainer(vm._xapiId)
}
register.description = 'Register the VM for Docker management'
register.params = {
vm: { type: 'string' },
}
register.resolve = {
vm: ['vm', 'VM', 'administrate'],
}
// -----------------------------------------------------------------------------
export async function deregister ({vm}) {
await this.getXapi(vm).unregisterDockerContainer(vm._xapiId)
}
deregister.description = 'Deregister the VM for Docker management'
deregister.params = {
vm: { type: 'string' },
}
deregister.resolve = {
vm: ['vm', 'VM', 'administrate'],
}
// -----------------------------------------------------------------------------
export async function start ({vm, container}) {
await this.getXapi(vm).startDockerContainer(vm._xapiId, container)
}
export async function stop ({vm, container}) {
await this.getXapi(vm).stopDockerContainer(vm._xapiId, container)
}
export async function restart ({vm, container}) {
await this.getXapi(vm).restartDockerContainer(vm._xapiId, container)
}
export async function pause ({vm, container}) {
await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container)
}
export async function unpause ({vm, container}) {
await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container)
}
for (const fn of [start, stop, restart, pause, unpause]) {
fn.params = {
vm: { type: 'string' },
container: { type: 'string' },
}
fn.resolve = {
vm: ['vm', 'VM', 'operate'],
}
}

View File

@ -0,0 +1,91 @@
export async function create ({name}) {
return (await this.createGroup({name})).id
}
create.description = 'creates a new group'
create.permission = 'admin'
create.params = {
name: {type: 'string'},
}
// -------------------------------------------------------------------
// Deletes an existing group.
async function delete_ ({id}) {
await this.deleteGroup(id)
}
// delete is not a valid identifier.
export {delete_ as delete}
delete_.description = 'deletes an existing group'
delete_.permission = 'admin'
delete_.params = {
id: {type: 'string'},
}
// -------------------------------------------------------------------
export async function getAll () {
return /* await */ this.getAllGroups()
}
getAll.description = 'returns all the existing group'
getAll.permission = 'admin'
// -------------------------------------------------------------------
// sets group.users with an array of user ids
export async function setUsers ({id, userIds}) {
await this.setGroupUsers(id, userIds)
}
setUsers.description = 'sets the users belonging to a group'
setUsers.permission = 'admin'
setUsers.params = {
id: {type: 'string'},
userIds: {},
}
// -------------------------------------------------------------------
// adds the user id to group.users
export async function addUser ({id, userId}) {
await this.addUserToGroup(userId, id)
}
addUser.description = 'adds a user to a group'
addUser.permission = 'admin'
addUser.params = {
id: {type: 'string'},
userId: {type: 'string'},
}
// -------------------------------------------------------------------
// remove the user id from group.users
export async function removeUser ({id, userId}) {
await this.removeUserFromGroup(userId, id)
}
// -------------------------------------------------------------------
removeUser.description = 'removes a user from a group'
removeUser.permission = 'admin'
removeUser.params = {
id: {type: 'string'},
userId: {type: 'string'},
}
// -------------------------------------------------------------------
export async function set ({id, name}) {
await this.updateGroup(id, {name})
}
set.description = 'changes the properties of an existing group'
set.permission = 'admin'
set.params = {
id: { type: 'string' },
name: { type: 'string', optional: true },
}

View File

@ -0,0 +1,295 @@
import {format} from 'json-rpc-peer'
// ===================================================================
export function set ({
host,
// TODO: use camel case.
name_label: nameLabel,
name_description: nameDescription,
}) {
return this.getXapi(host).setHostProperties(host._xapiId, {
nameLabel,
nameDescription,
})
}
set.description = 'changes the properties of an host'
set.params = {
id: { type: 'string' },
name_label: {
type: 'string',
optional: true,
},
name_description: {
type: 'string',
optional: true,
},
}
set.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// FIXME: set force to false per default when correctly implemented in
// UI.
export function restart ({ host, force = true }) {
return this.getXapi(host).rebootHost(host._xapiId, force)
}
restart.description = 'restart the host'
restart.params = {
id: { type: 'string' },
force: {
type: 'boolean',
optional: true,
},
}
restart.resolve = {
host: ['id', 'host', 'operate'],
}
// -------------------------------------------------------------------
export function restartAgent ({host}) {
return this.getXapi(host).restartHostAgent(host._xapiId)
}
restartAgent.description = 'restart the Xen agent on the host'
restartAgent.params = {
id: { type: 'string' },
}
restartAgent.resolve = {
host: ['id', 'host', 'administrate'],
}
// TODO: remove deprecated alias
export { restartAgent as restart_agent } // eslint-disable-line camelcase
// -------------------------------------------------------------------
export function start ({host}) {
return this.getXapi(host).powerOnHost(host._xapiId)
}
start.description = 'start the host'
start.params = {
id: { type: 'string' },
}
start.resolve = {
host: ['id', 'host', 'operate'],
}
// -------------------------------------------------------------------
export function stop ({host}) {
return this.getXapi(host).shutdownHost(host._xapiId)
}
stop.description = 'stop the host'
stop.params = {
id: { type: 'string' },
}
stop.resolve = {
host: ['id', 'host', 'operate'],
}
// -------------------------------------------------------------------
export function detach ({host}) {
return this.getXapi(host).ejectHostFromPool(host._xapiId)
}
detach.description = 'eject the host of a pool'
detach.params = {
id: { type: 'string' },
}
detach.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function enable ({host}) {
return this.getXapi(host).enableHost(host._xapiId)
}
enable.description = 'enable to create VM on the host'
enable.params = {
id: { type: 'string' },
}
enable.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function disable ({host}) {
return this.getXapi(host).disableHost(host._xapiId)
}
disable.description = 'disable to create VM on the hsot'
disable.params = {
id: { type: 'string' },
}
disable.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function forget ({host}) {
return this.getXapi(host).forgetHost(host._xapiId)
}
forget.description = 'remove the host record from XAPI database'
forget.params = {
id: { type: 'string' },
}
forget.resolve = {
host: ['id', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// Returns an array of missing new patches in the host
// Returns an empty array if up-to-date
// Throws an error if the host is not running the latest XS version
export function listMissingPatches ({host}) {
return this.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
}
listMissingPatches.description = 'return an array of missing new patches in the host'
listMissingPatches.params = {
host: { type: 'string' },
}
listMissingPatches.resolve = {
host: ['host', 'host', 'view'],
}
// -------------------------------------------------------------------
export function installPatch ({host, patch: patchUuid}) {
return this.getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId)
}
installPatch.description = 'install a patch on an host'
installPatch.params = {
host: { type: 'string' },
patch: { type: 'string' },
}
installPatch.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function installAllPatches ({host}) {
return this.getXapi(host).installAllPoolPatchesOnHost(host._xapiId)
}
installAllPatches.description = 'install all the missing patches on a host'
installAllPatches.params = {
host: { type: 'string' },
}
installAllPatches.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function emergencyShutdownHost ({host}) {
return this.getXapi(host).emergencyShutdownHost(host._xapiId)
}
emergencyShutdownHost.description = 'suspend all VMs and shutdown host'
emergencyShutdownHost.params = {
host: { type: 'string' },
}
emergencyShutdownHost.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function stats ({host, granularity}) {
return this.getXapiHostStats(host, granularity)
}
stats.description = 'returns statistic of the host'
stats.params = {
host: { type: 'string' },
granularity: {
type: 'string',
optional: true,
},
}
stats.resolve = {
host: ['host', 'host', 'view'],
}
// -------------------------------------------------------------------
async function handleInstallSupplementalPack (req, res, { hostId }) {
const xapi = this.getXapi(hostId)
// Timeout seems to be broken in Node 4.
// See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) // 12 hours
req.length = req.headers['content-length']
try {
await xapi.installSupplementalPack(req, { hostId })
res.end(format.response(0))
} catch (e) {
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
}
}
export async function installSupplementalPack ({host}) {
return {
$sendTo: (await this.registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })),
}
}
installSupplementalPack.description = 'installs supplemental pack from ISO file'
installSupplementalPack.params = {
host: { type: 'string' },
}
installSupplementalPack.resolve = {
host: ['host', 'host', 'admin'],
}

View File

@ -0,0 +1,44 @@
import { unauthorized } from 'xo-common/api-errors'
export function create (props) {
return this.createIpPool(props)
}
create.permission = 'admin'
create.description = 'Creates a new ipPool'
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteIpPool(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.description = 'Delete an ipPool'
// -------------------------------------------------------------------
export function getAll (params) {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getAllIpPools(user.permission === 'admin'
? params && params.userId
: user.id
)
}
getAll.description = 'List all ipPools'
// -------------------------------------------------------------------
export function set ({ id, ...props }) {
return this.updateIpPool(id, props)
}
set.permission = 'admin'
set.description = 'Allow to modify an existing ipPool'

View File

@ -0,0 +1,110 @@
// FIXME so far, no acls for jobs
export async function getAll () {
return /* await */ this.getAllJobs()
}
getAll.permission = 'admin'
getAll.description = 'Gets all available jobs'
export async function get (id) {
return /* await */ this.getJob(id)
}
get.permission = 'admin'
get.description = 'Gets an existing job'
get.params = {
id: {type: 'string'},
}
export async function create ({job}) {
if (!job.userId) {
job.userId = this.session.get('user_id')
}
return (await this.createJob(job)).id
}
create.permission = 'admin'
create.description = 'Creates a new job from description object'
create.params = {
job: {
type: 'object',
properties: {
userId: {type: 'string', optional: true},
name: {type: 'string', optional: true},
timeout: {type: 'number', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
paramsVector: {
type: 'object',
properties: {
type: {type: 'string'},
items: {
type: 'array',
items: {
type: 'object',
},
},
},
optional: true,
},
},
},
}
export async function set ({job}) {
await this.updateJob(job)
}
set.permission = 'admin'
set.description = 'Modifies an existing job from a description object'
set.params = {
job: {
type: 'object',
properties: {
id: {type: 'string'},
name: {type: 'string', optional: true},
timeout: {type: ['number', 'null'], optional: true},
type: {type: 'string', optional: true},
key: {type: 'string', optional: true},
method: {type: 'string', optional: true},
paramsVector: {
type: 'object',
properties: {
type: {type: 'string'},
items: {
type: 'array',
items: {
type: 'object',
},
},
},
optional: true,
},
},
},
}
async function delete_ ({id}) {
await this.removeJob(id)
}
delete_.permission = 'admin'
delete_.description = 'Deletes an existing job'
delete_.params = {
id: {type: 'string'},
}
export {delete_ as delete}
export async function runSequence ({idSequence}) {
await this.runJobSequence(idSequence)
}
runSequence.permission = 'admin'
runSequence.description = 'Runs jobs sequentially, in the provided order'
runSequence.params = {
idSequence: {type: 'array', items: {type: 'string'}},
}

View File

@ -0,0 +1,38 @@
export async function get ({namespace}) {
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
logger.createReadStream()
.on('data', (data) => {
logs[data.key] = data.value
})
.on('end', () => {
resolve(logs)
})
.on('error', reject)
})
}
get.description = 'returns logs list for one namespace'
get.params = {
namespace: { type: 'string' },
}
get.permission = 'admin'
// -------------------------------------------------------------------
async function delete_ ({namespace, id}) {
const logger = await this.getLogger(namespace)
logger.del(id)
}
delete_.description = 'deletes one or several logs from a namespace'
delete_.params = {
id: { type: [ 'array', 'string' ] },
namespace: { type: 'string' },
}
delete_.permission = 'admin'
export {delete_ as delete}

View File

@ -0,0 +1,12 @@
async function delete_ ({ message }) {
await this.getXapi(message).call('message.destroy', message._xapiRef)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
message: ['id', 'message', 'administrate'],
}

View File

@ -0,0 +1,120 @@
import { mapToArray } from '../utils'
export function getBondModes () {
return ['balance-slb', 'active-backup', 'lacp']
}
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
return this.getXapi(pool).createNetwork({
name,
description,
pifId: pif && this.getObject(pif, 'PIF')._xapiId,
mtu: +mtu,
vlan: +vlan,
})
}
create.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pif: { type: 'string', optional: true },
mtu: { type: ['integer', 'string'], optional: true },
vlan: { type: ['integer', 'string'], optional: true },
}
create.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
create.permission = 'admin'
// =================================================================
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
return this.getXapi(pool).createBondedNetwork({
name,
description,
pifIds: mapToArray(pifs, pif =>
this.getObject(pif, 'PIF')._xapiId
),
mtu: +mtu,
mac,
bondMode,
})
}
createBonded.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pifs: {
type: 'array',
items: {
type: 'string',
},
},
mtu: { type: ['integer', 'string'], optional: true },
mac: { type: 'string', optional: true },
// RegExp since schema-inspector does not provide a param check based on an enumeration
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) },
}
createBonded.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
createBonded.permission = 'admin'
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
// ===================================================================
export async function set ({
network,
name_description: nameDescription,
name_label: nameLabel,
defaultIsLocked,
id,
}) {
await this.getXapi(network).setNetworkProperties(network._xapiId, {
nameDescription,
nameLabel,
defaultIsLocked,
})
}
set.params = {
id: {
type: 'string',
},
name_label: {
type: 'string',
optional: true,
},
name_description: {
type: 'string',
optional: true,
},
defaultIsLocked: {
type: 'boolean',
optional: true,
},
}
set.resolve = {
network: ['id', 'network', 'administrate'],
}
// =================================================================
export async function delete_ ({ network }) {
return this.getXapi(network).deleteNetwork(network._xapiId)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
network: ['id', 'network', 'administrate'],
}

View File

@ -0,0 +1,49 @@
// FIXME: too low level, should be removed.
// ===================================================================
// Delete
async function delete_ ({PBD}) {
// TODO: check if PBD is attached before
await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
PBD: ['id', 'PBD', 'administrate'],
}
// ===================================================================
// Disconnect
export async function disconnect ({ pbd }) {
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
}
disconnect.params = {
id: { type: 'string' },
}
disconnect.resolve = {
pbd: ['id', 'PBD', 'administrate'],
}
// ===================================================================
// Connect
export async function connect ({PBD}) {
// TODO: check if PBD is attached before
await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef)
}
connect.params = {
id: { type: 'string' },
}
connect.resolve = {
PBD: ['id', 'PBD', 'administrate'],
}

View File

@ -0,0 +1,93 @@
// TODO: too low level, move into host.
import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi'
export function getIpv4ConfigurationModes () {
return IPV4_CONFIG_MODES
}
export function getIpv6ConfigurationModes () {
return IPV6_CONFIG_MODES
}
// ===================================================================
// Delete
async function delete_ ({pif}) {
// TODO: check if PIF is attached before
await this.getXapi(pif).call('PIF.destroy', pif._xapiRef)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
pif: ['id', 'PIF', 'administrate'],
}
// ===================================================================
// Disconnect
export async function disconnect ({pif}) {
// TODO: check if PIF is attached before
await this.getXapi(pif).call('PIF.unplug', pif._xapiRef)
}
disconnect.params = {
id: { type: 'string' },
}
disconnect.resolve = {
pif: ['id', 'PIF', 'administrate'],
}
// ===================================================================
// Connect
export async function connect ({pif}) {
// TODO: check if PIF is attached before
await this.getXapi(pif).call('PIF.plug', pif._xapiRef)
}
connect.params = {
id: { type: 'string' },
}
connect.resolve = {
pif: ['id', 'PIF', 'administrate'],
}
// ===================================================================
// Reconfigure IP
export async function reconfigureIp ({ pif, mode = 'DHCP', ip = '', netmask = '', gateway = '', dns = '' }) {
await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns)
}
reconfigureIp.params = {
id: { type: 'string', optional: true },
mode: { type: 'string', optional: true },
ip: { type: 'string', optional: true },
netmask: { type: 'string', optional: true },
gateway: { type: 'string', optional: true },
dns: { type: 'string', optional: true },
}
reconfigureIp.resolve = {
pif: ['id', 'PIF', 'administrate'],
}
// ===================================================================
export async function editPif ({ pif, vlan }) {
await this.getXapi(pif).editPif(pif._xapiId, { vlan })
}
editPif.params = {
id: { type: 'string' },
vlan: { type: ['integer', 'string'] },
}
editPif.resolve = {
pif: ['id', 'PIF', 'administrate'],
}

View File

@ -0,0 +1,125 @@
export async function get () {
return /* await */ this.getPlugins()
}
get.description = 'returns a list of all installed plugins'
get.permission = 'admin'
// -------------------------------------------------------------------
export async function configure ({ id, configuration }) {
await this.configurePlugin(id, configuration)
}
configure.description = 'sets the configuration of a plugin'
configure.params = {
id: {
type: 'string',
},
configuration: {},
}
configure.permission = 'admin'
// -------------------------------------------------------------------
export async function disableAutoload ({ id }) {
await this.disablePluginAutoload(id)
}
disableAutoload.description = ''
disableAutoload.params = {
id: {
type: 'string',
},
}
disableAutoload.permission = 'admin'
// -------------------------------------------------------------------
export async function enableAutoload ({ id }) {
await this.enablePluginAutoload(id)
}
enableAutoload.description = 'enables a plugin, allowing it to be loaded'
enableAutoload.params = {
id: {
type: 'string',
},
}
enableAutoload.permission = 'admin'
// -------------------------------------------------------------------
export async function load ({ id }) {
await this.loadPlugin(id)
}
load.description = 'loads a plugin'
load.params = {
id: {
type: 'string',
},
}
load.permission = 'admin'
// -------------------------------------------------------------------
export async function unload ({ id }) {
await this.unloadPlugin(id)
}
unload.description = 'unloads a plugin'
unload.params = {
id: {
type: 'string',
},
}
unload.permission = 'admin'
// -------------------------------------------------------------------
export async function purgeConfiguration ({ id }) {
await this.purgePluginConfiguration(id)
}
purgeConfiguration.description = 'removes a plugin configuration'
purgeConfiguration.params = {
id: {
type: 'string',
},
}
purgeConfiguration.permission = 'admin'
// ---------------------------------------------------------------------
export async function test ({ id, data }) {
await this.testPlugin(id, data)
}
test.description = 'Test a plugin with its current configuration'
test.params = {
id: {
type: 'string',
},
data: {
optional: true,
},
}
test.permission = 'admin'
// ---------------------------------------------------------------------

View File

@ -0,0 +1,230 @@
import { format } from 'json-rpc-peer'
import { differenceBy } from 'lodash'
import { mapToArray } from '../utils'
// ===================================================================
export async function set ({
pool,
// TODO: use camel case.
name_description: nameDescription,
name_label: nameLabel,
}) {
await this.getXapi(pool).setPoolProperties({
nameDescription,
nameLabel,
})
}
set.params = {
id: {
type: 'string',
},
name_label: {
type: 'string',
optional: true,
},
name_description: {
type: 'string',
optional: true,
},
}
set.resolve = {
pool: ['id', 'pool', 'administrate'],
}
// -------------------------------------------------------------------
export async function setDefaultSr ({ sr }) {
await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ])
await this.getXapi(sr).setDefaultSr(sr._xapiId)
}
setDefaultSr.permission = '' // signed in
setDefaultSr.params = {
sr: {
type: 'string',
},
}
setDefaultSr.resolve = {
sr: ['sr', 'SR'],
}
// -------------------------------------------------------------------
export async function setPoolMaster ({ host }) {
await this.hasPermissions(this.user.id, [ [ host.$pool, 'administrate' ] ])
await this.getXapi(host).setPoolMaster(host._xapiId)
}
setPoolMaster.params = {
host: {
type: 'string',
},
}
setPoolMaster.resolve = {
host: ['host', 'host'],
}
// -------------------------------------------------------------------
export async function installPatch ({pool, patch: patchUuid}) {
await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid)
}
installPatch.params = {
pool: {
type: 'string',
},
patch: {
type: 'string',
},
}
installPatch.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
// -------------------------------------------------------------------
export async function installAllPatches ({ pool }) {
await this.getXapi(pool).installAllPoolPatchesOnAllHosts()
}
installAllPatches.params = {
pool: {
type: 'string',
},
}
installAllPatches.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
// -------------------------------------------------------------------
async function handlePatchUpload (req, res, {pool}) {
const contentLength = req.headers['content-length']
if (!contentLength) {
res.writeHead(411)
res.end('Content length is mandatory')
return
}
await this.getXapi(pool).uploadPoolPatch(req, contentLength)
}
export async function uploadPatch ({pool}) {
return {
$sendTo: await this.registerHttpRequest(handlePatchUpload, {pool}),
}
}
uploadPatch.params = {
pool: { type: 'string' },
}
uploadPatch.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
// Compatibility
//
// TODO: remove when no longer used in xo-web
export {uploadPatch as patch}
// -------------------------------------------------------------------
export async function mergeInto ({ source, target, force }) {
const sourceHost = this.getObject(source.master)
const sourcePatches = sourceHost.patches
const targetPatches = this.getObject(target.master).patches
const counterDiff = differenceBy(sourcePatches, targetPatches, 'name')
if (counterDiff.length > 0) {
throw new Error('host has patches that are not applied on target pool')
}
const diff = differenceBy(targetPatches, sourcePatches, 'name')
// TODO: compare UUIDs
await this.getXapi(source).installSpecificPatchesOnHost(
mapToArray(diff, 'name'),
sourceHost._xapiId
)
await this.mergeXenPools(source._xapiId, target._xapiId, force)
}
mergeInto.params = {
force: { type: 'boolean', optional: true },
source: { type: 'string' },
target: { type: 'string' },
}
mergeInto.resolve = {
source: ['source', 'pool', 'administrate'],
target: ['target', 'pool', 'administrate'],
}
// -------------------------------------------------------------------
export async function getLicenseState ({pool}) {
return this.getXapi(pool).call(
'pool.get_license_state',
pool._xapiId.$ref
)
}
getLicenseState.params = {
pool: {
type: 'string',
},
}
getLicenseState.resolve = {
pool: ['pool', 'pool', 'administrate'],
}
// -------------------------------------------------------------------
async function handleInstallSupplementalPack (req, res, { poolId }) {
const xapi = this.getXapi(poolId)
// Timeout seems to be broken in Node 4.
// See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) // 12 hours
req.length = req.headers['content-length']
try {
await xapi.installSupplementalPackOnAllHosts(req)
res.end(format.response(0))
} catch (e) {
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
}
}
export async function installSupplementalPack ({ pool }) {
return {
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id }),
}
}
installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts'
installSupplementalPack.params = {
pool: { type: 'string' },
}
installSupplementalPack.resolve = {
pool: ['pool', 'pool', 'admin'],
}

View File

@ -0,0 +1,72 @@
export async function getAll () {
return this.getAllRemotes()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing fs remote points'
export async function get ({id}) {
return this.getRemote(id)
}
get.permission = 'admin'
get.description = 'Gets an existing fs remote point'
get.params = {
id: {type: 'string'},
}
export async function test ({id}) {
return this.testRemote(id)
}
test.permission = 'admin'
test.description = 'Performs a read/write matching test on a remote point'
test.params = {
id: {type: 'string'},
}
export async function list ({id}) {
return this.listRemoteBackups(id)
}
list.permission = 'admin'
list.description = 'Lists the files found in a remote point'
list.params = {
id: {type: 'string'},
}
export async function create ({name, url}) {
return this.createRemote({name, url})
}
create.permission = 'admin'
create.description = 'Creates a new fs remote point'
create.params = {
name: {type: 'string'},
url: {type: 'string'},
}
export async function set ({id, name, url, enabled}) {
await this.updateRemote(id, {name, url, enabled})
}
set.permission = 'admin'
set.description = 'Modifies an existing fs remote point'
set.params = {
id: {type: 'string'},
name: {type: 'string', optional: true},
url: {type: 'string', optional: true},
enabled: {type: 'boolean', optional: true},
}
async function delete_ ({id}) {
await this.removeRemote(id)
}
delete_.permission = 'admin'
delete_.description = 'Deletes an existing fs remote point'
delete_.params = {
id: {type: 'string'},
}
export {delete_ as delete}

View File

@ -0,0 +1,240 @@
import {
unauthorized,
} from 'xo-common/api-errors'
// ===================================================================
export function create ({ name, subjects, objects, limits }) {
return this.createResourceSet(name, subjects, objects, limits)
}
create.permission = 'admin'
create.params = {
name: {
type: 'string',
},
subjects: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
objects: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
limits: {
type: 'object',
optional: true,
},
}
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteResourceSet(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.params = {
id: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function set ({ id, name, subjects, objects, ipPools, limits }) {
return this.updateResourceSet(id, {
limits,
name,
objects,
ipPools,
subjects,
})
}
set.permission = 'admin'
set.params = {
id: {
type: 'string',
},
name: {
type: 'string',
optional: true,
},
subjects: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
objects: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
ipPools: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
limits: {
type: 'object',
optional: true,
},
}
// -------------------------------------------------------------------
export function get ({ id }) {
return this.getResourceSet(id)
}
get.permission = 'admin'
get.params = {
id: {
type: 'string',
},
}
// -------------------------------------------------------------------
export async function getAll () {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getAllResourceSets(user.id)
}
getAll.description = 'Get the list of all existing resource set'
// -------------------------------------------------------------------
export function addObject ({ id, object }) {
return this.addObjectToResourceSet(object, id)
}
addObject.permission = 'admin'
addObject.params = {
id: {
type: 'string',
},
object: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function removeObject ({ id, object }) {
return this.removeObjectFromResourceSet(object, id)
}
removeObject.permission = 'admin'
removeObject.params = {
id: {
type: 'string',
},
object: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function addSubject ({ id, subject }) {
return this.addSubjectToResourceSet(subject, id)
}
addSubject.permission = 'admin'
addSubject.params = {
id: {
type: 'string',
},
subject: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function removeSubject ({ id, subject }) {
return this.removeSubjectFromResourceSet(subject, id)
}
removeSubject.permission = 'admin'
removeSubject.params = {
id: {
type: 'string',
},
subject: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function addLimit ({ id, limitId, quantity }) {
return this.addLimitToResourceSet(limitId, quantity, id)
}
addLimit.permission = 'admin'
addLimit.params = {
id: {
type: 'string',
},
limitId: {
type: 'string',
},
quantity: {
type: 'integer',
},
}
// -------------------------------------------------------------------
export function removeLimit ({ id, limitId }) {
return this.removeLimitFromResourceSet(limitId, id)
}
removeLimit.permission = 'admin'
removeLimit.params = {
id: {
type: 'string',
},
limitId: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function recomputeAllLimits () {
return this.recomputeResourceSetsLimits()
}
recomputeAllLimits.permission = 'admin'
recomputeAllLimits.description = 'Recompute manually the current resource set usage'

View File

@ -0,0 +1,5 @@
export async function getAll () {
return /* await */ this.getRoles()
}
getAll.description = 'Returns the list of all existing roles'

View File

@ -0,0 +1,57 @@
// FIXME so far, no acls for schedules
export async function getAll () {
return /* await */ this.getAllSchedules()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing schedules'
export async function get (id) {
return /* await */ this.getSchedule(id)
}
get.permission = 'admin'
get.description = 'Gets an existing schedule'
get.params = {
id: {type: 'string'},
}
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
}
create.permission = 'admin'
create.description = 'Creates a new schedule'
create.params = {
jobId: {type: 'string'},
cron: {type: 'string'},
enabled: {type: 'boolean', optional: true},
name: {type: 'string', optional: true},
}
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
}
set.permission = 'admin'
set.description = 'Modifies an existing schedule'
set.params = {
id: {type: 'string'},
jobId: {type: 'string', optional: true},
cron: {type: 'string', optional: true},
enabled: {type: 'boolean', optional: true},
name: {type: 'string', optional: true},
}
async function delete_ ({id}) {
await this.removeSchedule(id)
}
delete_.permission = 'admin'
delete_.description = 'Deletes an existing schedule'
delete_.params = {
id: {type: 'string'},
}
export {delete_ as delete}

View File

@ -0,0 +1,30 @@
export async function enable ({id}) {
const schedule = await this.getSchedule(id)
schedule.enabled = true
await this.updateSchedule(id, schedule)
}
enable.permission = 'admin'
enable.description = 'Enables a schedule to run it\'s job as scheduled'
enable.params = {
id: {type: 'string'},
}
export async function disable ({id}) {
const schedule = await this.getSchedule(id)
schedule.enabled = false
await this.updateSchedule(id, schedule)
}
disable.permission = 'admin'
disable.description = 'Disables a schedule'
disable.params = {
id: {type: 'string'},
}
export function getScheduleTable () {
return this.scheduleTable
}
disable.permission = 'admin'
disable.description = 'Get a map of existing schedules enabled/disabled state'

View File

@ -0,0 +1,137 @@
import { ignoreErrors } from 'promise-toolbox'
export async function add ({autoConnect = true, ...props}) {
const server = await this.registerXenServer(props)
if (autoConnect) {
this.connectXenServer(server.id)::ignoreErrors()
}
return server.id
}
add.description = 'register a new Xen server'
add.permission = 'admin'
add.params = {
label: {
optional: true,
type: 'string',
},
host: {
type: 'string',
},
username: {
type: 'string',
},
password: {
type: 'string',
},
autoConnect: {
optional: true,
type: 'boolean',
},
allowUnauthorized: {
optional: true,
type: 'boolean',
},
}
// -------------------------------------------------------------------
export async function remove ({id}) {
await this.unregisterXenServer(id)
}
remove.description = 'unregister a Xen server'
remove.permission = 'admin'
remove.params = {
id: {
type: 'string',
},
}
// -------------------------------------------------------------------
// TODO: remove this function when users are integrated to the main
// collection.
export function getAll () {
return this.getAllXenServers()
}
getAll.description = 'returns all the registered Xen server'
getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, ...props}) {
await this.updateXenServer(id, props)
}
set.description = 'changes the properties of a Xen server'
set.permission = 'admin'
set.params = {
id: {
type: 'string',
},
label: {
type: 'string',
optional: true,
},
host: {
type: 'string',
optional: true,
},
username: {
type: 'string',
optional: true,
},
password: {
type: 'string',
optional: true,
},
allowUnauthorized: {
optional: true,
type: 'boolean',
},
}
// -------------------------------------------------------------------
export async function connect ({id}) {
this.updateXenServer(id, {enabled: true})::ignoreErrors()
await this.connectXenServer(id)
}
connect.description = 'connect a Xen server'
connect.permission = 'admin'
connect.params = {
id: {
type: 'string',
},
}
// -------------------------------------------------------------------
export async function disconnect ({id}) {
this.updateXenServer(id, {enabled: false})::ignoreErrors()
await this.disconnectXenServer(id)
}
disconnect.description = 'disconnect a Xen server'
disconnect.permission = 'admin'
disconnect.params = {
id: {
type: 'string',
},
}

View File

@ -0,0 +1,58 @@
import {deprecate} from 'util'
import { getUserPublicProperties } from '../utils'
import {invalidCredentials} from 'xo-common/api-errors'
// ===================================================================
export async function signIn (credentials) {
const user = await this.authenticateUser(credentials)
if (!user) {
throw invalidCredentials()
}
this.session.set('user_id', user.id)
return getUserPublicProperties(user)
}
signIn.description = 'sign in'
// -------------------------------------------------------------------
export const signInWithPassword = deprecate(signIn, 'use session.signIn() instead')
signInWithPassword.params = {
email: { type: 'string' },
password: { type: 'string' },
}
// -------------------------------------------------------------------
export const signInWithToken = deprecate(signIn, 'use session.signIn() instead')
signInWithToken.params = {
token: { type: 'string' },
}
// -------------------------------------------------------------------
export function signOut () {
this.session.unset('user_id')
}
signOut.description = 'sign out the user from the current session'
// This method requires the user to be signed in.
signOut.permission = ''
// -------------------------------------------------------------------
export async function getUser () {
const userId = this.session.get('user_id')
return userId === undefined
? null
: getUserPublicProperties(await this.getUser(userId))
}
getUser.description = 'return the currently connected user'

View File

@ -0,0 +1,845 @@
import { some } from 'lodash'
import { asInteger } from '../xapi/utils'
import {
asyncMap,
ensureArray,
forEach,
parseXml,
} from '../utils'
// ===================================================================
export async function set ({
sr,
// TODO: use camel case.
name_description: nameDescription,
name_label: nameLabel,
}) {
await this.getXapi(sr).setSrProperties(sr._xapiId, {
nameDescription,
nameLabel,
})
}
set.params = {
id: { type: 'string' },
name_label: { type: 'string', optional: true },
name_description: { type: 'string', optional: true },
}
set.resolve = {
sr: ['id', 'SR', 'operate'],
}
// -------------------------------------------------------------------
export async function scan ({ SR }) {
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
}
scan.params = {
id: { type: 'string' },
}
scan.resolve = {
SR: ['id', 'SR', 'operate'],
}
// -------------------------------------------------------------------
const srIsBackingHa = (sr) => sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr)
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({sr}) {
const xapi = this.getXapi(sr)
if (sr.SR_type !== 'xosan') {
await xapi.destroySr(sr._xapiId)
return
}
const xapiSr = xapi.getObject(sr)
if (srIsBackingHa(xapiSr)) {
throw new Error('You tried to remove a SR the High Availability is relying on. Please disable HA first.')
}
const config = xapi.xo.getData(sr, 'xosan_config')
// we simply forget because the hosted disks are being destroyed with the VMs
await xapi.forgetSr(sr._xapiId)
await asyncMap(config.nodes, node => xapi.deleteVm(node.vm.id))
await xapi.deleteNetwork(config.network)
if (sr.SR_type === 'xosan') {
await this.unbindXosanLicense({ srId: sr.id })
}
}
destroy.params = {
id: { type: 'string' },
}
destroy.resolve = {
sr: ['id', 'SR', 'administrate'],
}
// -------------------------------------------------------------------
export async function forget ({ SR }) {
await this.getXapi(SR).forgetSr(SR._xapiId)
}
forget.params = {
id: { type: 'string' },
}
forget.resolve = {
SR: ['id', 'SR', 'administrate'],
}
// -------------------------------------------------------------------
export async function connectAllPbds ({ SR }) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
connectAllPbds.params = {
id: { type: 'string' },
}
connectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate'],
}
// -------------------------------------------------------------------
export async function disconnectAllPbds ({ SR }) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
disconnectAllPbds.params = {
id: { type: 'string' },
}
disconnectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate'],
}
// -------------------------------------------------------------------
export async function createIso ({
host,
nameLabel,
nameDescription,
path,
type,
user,
password,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {}
if (type === 'local') {
deviceConfig.legacy_mode = 'true'
} else if (type === 'smb') {
path = path.replace(/\\/g, '/')
deviceConfig.type = 'cifs'
deviceConfig.username = user
deviceConfig.cifspassword = password
}
deviceConfig.location = path
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0', // SR size 0 because ISO
nameLabel,
nameDescription,
'iso', // SR type ISO
'iso', // SR content type ISO
type !== 'local',
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createIso.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
path: { type: 'string' },
type: { type: 'string' },
user: { type: 'string', optional: true },
password: { type: 'string', optional: true },
}
createIso.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// NFS SR
// This functions creates a NFS SR
export async function createNfs ({
host,
nameLabel,
nameDescription,
server,
serverPath,
nfsVersion,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
server,
serverpath: serverPath,
}
// if NFS version given
if (nfsVersion) {
deviceConfig.nfsversion = nfsVersion
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'nfs', // SR LVM over iSCSI
'user', // recommended by Citrix
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createNfs.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
server: { type: 'string' },
serverPath: { type: 'string' },
nfsVersion: { type: 'string', optional: true },
}
createNfs.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// HBA SR
// This functions creates an HBA SR
export async function createHba ({
host,
nameLabel,
nameDescription,
scsiId,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
scsiId,
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'lvmoohba', // SR LVM over HBA
'user', // recommended by Citrix
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createHba.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
scsiId: { type: 'string' },
}
createHba.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// Local LVM SR
// This functions creates a local LVM SR
export async function createLvm ({
host,
nameLabel,
nameDescription,
device,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
device,
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'lvm', // SR LVM
'user', // recommended by Citrix
false,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createLvm.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
device: { type: 'string' },
}
createLvm.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect all NFS shares (exports) on a NFS server
// Return a table of exports with their paths and ACLs
export async function probeNfs ({
host,
server,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
server,
}
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
deviceConfig,
'nfs',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code !== 'SR_BACKEND_FAILURE_101') {
throw error
}
xml = parseXml(error.params[2])
}
const nfsExports = []
forEach(ensureArray(xml['nfs-exports'].Export), nfsExport => {
nfsExports.push({
path: nfsExport.Path.trim(),
acl: nfsExport.Accesslist.trim(),
})
})
return nfsExports
}
probeNfs.params = {
host: { type: 'string' },
server: { type: 'string' },
}
probeNfs.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect all HBA devices on the host
export async function probeHba ({
host,
}) {
const xapi = this.getXapi(host)
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
'type',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code !== 'SR_BACKEND_FAILURE_107') {
throw error
}
xml = parseXml(error.params[2])
}
const hbaDevices = []
forEach(ensureArray(xml.Devlist.BlockDevice), hbaDevice => {
hbaDevices.push({
hba: hbaDevice.hba.trim(),
path: hbaDevice.path.trim(),
scsciId: hbaDevice.SCSIid.trim(),
size: hbaDevice.size.trim(),
vendor: hbaDevice.vendor.trim(),
})
})
return hbaDevices
}
probeHba.params = {
host: { type: 'string' },
}
probeHba.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// ISCSI SR
// This functions creates a iSCSI SR
export async function createIscsi ({
host,
nameLabel,
nameDescription,
size,
target,
port,
targetIqn,
scsiId,
chapUser,
chapPassword,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
target,
targetIQN: targetIqn,
SCSIid: scsiId,
}
// if we give user and password
if (chapUser && chapPassword) {
deviceConfig.chapuser = chapUser
deviceConfig.chappassword = chapPassword
}
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = asInteger(port)
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'lvmoiscsi', // SR LVM over iSCSI
'user', // recommended by Citrix
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createIscsi.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
target: { type: 'string' },
port: { type: 'integer', optional: true },
targetIqn: { type: 'string' },
scsiId: { type: 'string' },
chapUser: { type: 'string', optional: true },
chapPassword: { type: 'string', optional: true },
}
createIscsi.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect all iSCSI IQN on a Target (iSCSI "server")
// Return a table of IQN or empty table if no iSCSI connection to the target
export async function probeIscsiIqns ({
host,
target: targetIp,
port,
chapUser,
chapPassword,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp,
}
// if we give user and password
if (chapUser && chapPassword) {
deviceConfig.chapUser = chapUser
deviceConfig.chapPassword = chapPassword
}
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = asInteger(port)
}
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
deviceConfig,
'lvmoiscsi',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code === 'SR_BACKEND_FAILURE_141') {
return []
}
if (error.code !== 'SR_BACKEND_FAILURE_96') {
throw error
}
xml = parseXml(error.params[2])
}
const targets = []
forEach(ensureArray(xml['iscsi-target-iqns'].TGT), target => {
// if the target is on another IP adress, do not display it
if (target.IPAddress.trim() === targetIp) {
targets.push({
iqn: target.TargetIQN.trim(),
ip: target.IPAddress.trim(),
})
}
})
return targets
}
probeIscsiIqns.params = {
host: { type: 'string' },
target: { type: 'string' },
port: { type: 'integer', optional: true },
chapUser: { type: 'string', optional: true },
chapPassword: { type: 'string', optional: true },
}
probeIscsiIqns.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect all iSCSI ID and LUNs on a Target
// It will return a LUN table
export async function probeIscsiLuns ({
host,
target: targetIp,
port,
targetIqn,
chapUser,
chapPassword,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp,
targetIQN: targetIqn,
}
// if we give user and password
if (chapUser && chapPassword) {
deviceConfig.chapuser = chapUser
deviceConfig.chappassword = chapPassword
}
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = asInteger(port)
}
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
deviceConfig,
'lvmoiscsi',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code !== 'SR_BACKEND_FAILURE_107') {
throw error
}
xml = parseXml(error.params[2])
}
const luns = []
forEach(ensureArray(xml['iscsi-target'].LUN), lun => {
luns.push({
id: lun.LUNid.trim(),
vendor: lun.vendor.trim(),
serial: lun.serial.trim(),
size: lun.size.trim(),
scsiId: lun.SCSIid.trim(),
})
})
return luns
}
probeIscsiLuns.params = {
host: { type: 'string' },
target: { type: 'string' },
port: { type: 'integer', optional: true },
targetIqn: { type: 'string' },
chapUser: { type: 'string', optional: true },
chapPassword: { type: 'string', optional: true },
}
probeIscsiLuns.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect if this target already exists in XAPI
// It returns a table of SR UUID, empty if no existing connections
export async function probeIscsiExists ({
host,
target: targetIp,
port,
targetIqn,
scsiId,
chapUser,
chapPassword,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp,
targetIQN: targetIqn,
SCSIid: scsiId,
}
// if we give user and password
if (chapUser && chapPassword) {
deviceConfig.chapuser = chapUser
deviceConfig.chappassword = chapPassword
}
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = asInteger(port)
}
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({ uuid: sr.UUID.trim() })
})
return srs
}
probeIscsiExists.params = {
host: { type: 'string' },
target: { type: 'string' },
port: { type: 'integer', optional: true },
targetIqn: { type: 'string' },
scsiId: { type: 'string' },
chapUser: { type: 'string', optional: true },
chapPassword: { type: 'string', optional: true },
}
probeIscsiExists.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to detect if this NFS SR already exists in XAPI
// It returns a table of SR UUID, empty if no existing connections
export async function probeNfsExists ({
host,
server,
serverPath,
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
server,
serverpath: serverPath,
}
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}))
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({ uuid: sr.UUID.trim() })
})
return srs
}
probeNfsExists.params = {
host: { type: 'string' },
server: { type: 'string' },
serverPath: { type: 'string' },
}
probeNfsExists.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to reattach a forgotten NFS/iSCSI SR
export async function reattach ({
host,
uuid,
nameLabel,
nameDescription,
type,
}) {
const xapi = this.getXapi(host)
if (type === 'iscsi') {
type = 'lvmoiscsi' // the internal XAPI name
}
const srRef = await xapi.call(
'SR.introduce',
uuid,
nameLabel,
nameDescription,
type,
'user',
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
reattach.params = {
host: { type: 'string' },
uuid: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
type: { type: 'string' },
}
reattach.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
// This function helps to reattach a forgotten ISO SR
export async function reattachIso ({
host,
uuid,
nameLabel,
nameDescription,
type,
}) {
const xapi = this.getXapi(host)
if (type === 'iscsi') {
type = 'lvmoiscsi' // the internal XAPI name
}
const srRef = await xapi.call(
'SR.introduce',
uuid,
nameLabel,
nameDescription,
type,
'iso',
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
reattachIso.params = {
host: { type: 'string' },
uuid: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
type: { type: 'string' },
}
reattachIso.resolve = {
host: ['host', 'host', 'administrate'],
}
// -------------------------------------------------------------------
export function getUnhealthyVdiChainsLength ({ sr }) {
return this.getXapi(sr).getUnhealthyVdiChainsLength(sr)
}
getUnhealthyVdiChainsLength.params = {
id: { type: 'string' },
}
getUnhealthyVdiChainsLength.resolve = {
sr: ['id', 'SR', 'operate'],
}

View File

@ -0,0 +1,67 @@
import forEach from 'lodash/forEach'
import getKeys from 'lodash/keys'
import moment from 'moment-timezone'
import { noSuchObject } from 'xo-common/api-errors'
import { version as xoServerVersion } from '../../package.json'
// ===================================================================
export function getMethodsInfo () {
const methods = {}
forEach(this.apiMethods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission,
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
export const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// -------------------------------------------------------------------
export const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
export const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
export function listMethods () {
return getKeys(this.apiMethods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
export function methodSignature ({method: name}) {
const method = this.apiMethods[name]
if (!method) {
throw noSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission,
},
]
}
methodSignature.description = 'returns the signature of an API method'

View File

@ -0,0 +1,31 @@
export async function add ({tag, object}) {
await this.getXapi(object).addTag(object._xapiId, tag)
}
add.description = 'add a new tag to an object'
add.resolve = {
object: ['id', null, 'administrate'],
}
add.params = {
tag: { type: 'string' },
id: { type: 'string' },
}
// -------------------------------------------------------------------
export async function remove ({tag, object}) {
await this.getXapi(object).removeTag(object._xapiId, tag)
}
remove.description = 'remove an existing tag from an object'
remove.resolve = {
object: ['id', null, 'administrate'],
}
remove.params = {
tag: { type: 'string' },
id: { type: 'string' },
}

View File

@ -0,0 +1,25 @@
export async function cancel ({task}) {
await this.getXapi(task).call('task.cancel', task._xapiRef)
}
cancel.params = {
id: { type: 'string' },
}
cancel.resolve = {
task: ['id', 'task', 'administrate'],
}
// -------------------------------------------------------------------
export async function destroy ({task}) {
await this.getXapi(task).call('task.destroy', task._xapiRef)
}
destroy.params = {
id: { type: 'string' },
}
destroy.resolve = {
task: ['id', 'task', 'administrate'],
}

View File

@ -0,0 +1,86 @@
export function getPermissionsForUser ({ userId }) {
return this.getPermissionsForUser(userId)
}
getPermissionsForUser.permission = 'admin'
getPermissionsForUser.params = {
userId: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function hasPermission ({ userId, objectId, permission }) {
return this.hasPermissions(userId, [
[ objectId, permission ],
])
}
hasPermission.permission = 'admin'
hasPermission.params = {
userId: {
type: 'string',
},
objectId: {
type: 'string',
},
permission: {
type: 'string',
},
}
// -------------------------------------------------------------------
export function wait ({duration, returnValue}) {
return new Promise(resolve => {
setTimeout(() => {
resolve(returnValue)
}, +duration)
})
}
wait.params = {
duration: {
type: 'string',
},
}
// -------------------------------------------------------------------
export async function copyVm ({ vm, sr }) {
const srcXapi = this.getXapi(vm)
const tgtXapi = this.getXapi(sr)
// full
{
console.log('export full VM...')
const input = await srcXapi.exportVm(vm)
console.log('import full VM...')
await tgtXapi.deleteVm(await tgtXapi.importVm(input, { srId: sr }))
}
// delta
{
console.log('export delta VM...')
const input = await srcXapi.exportDeltaVm(vm)
console.log('import delta VM...')
await tgtXapi.deleteVm(await tgtXapi.importDeltaVm(input, { srId: sr }))
}
}
copyVm.description = 'export/import full/delta VM'
copyVm.permission = 'admin'
copyVm.params = {
vm: { type: 'string' },
sr: { type: 'string' },
}
copyVm.resolve = {
vm: [ 'vm', 'VM' ],
sr: [ 'sr', 'SR' ],
}

View File

@ -0,0 +1,36 @@
// TODO: Prevent token connections from creating tokens.
// TODO: Token permission.
export async function create ({ expiresIn }) {
return (await this.createAuthenticationToken({
expiresIn,
userId: this.session.get('user_id'),
})).id
}
create.description = 'create a new authentication token'
create.params = {
expiresIn: {
optional: true,
type: [ 'number', 'string' ],
},
}
create.permission = '' // sign in
// -------------------------------------------------------------------
// TODO: an user should be able to delete its own tokens.
async function delete_ ({token: id}) {
await this.deleteAuthenticationToken(id)
}
export {delete_ as delete}
delete_.description = 'delete an existing authentication token'
delete_.permission = 'admin'
delete_.params = {
token: { type: 'string' },
}

View File

@ -0,0 +1,99 @@
import {invalidParameters} from 'xo-common/api-errors'
import { getUserPublicProperties, mapToArray } from '../utils'
// ===================================================================
export async function create ({email, password, permission}) {
return (await this.createUser({email, password, permission})).id
}
create.description = 'creates a new user'
create.permission = 'admin'
create.params = {
email: { type: 'string' },
password: { type: 'string' },
permission: { type: 'string', optional: true },
}
// -------------------------------------------------------------------
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw invalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
}
// delete is not a valid identifier.
export {delete_ as delete}
delete_.description = 'deletes an existing user'
delete_.permission = 'admin'
delete_.params = {
id: { type: 'string' },
}
// -------------------------------------------------------------------
// TODO: remove this function when users are integrated to the main
// collection.
export async function getAll () {
// Retrieves the users.
const users = await this.getAllUsers()
// Filters out private properties.
return mapToArray(users, getUserPublicProperties)
}
getAll.description = 'returns all the existing users'
getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission, preferences}) {
const isAdmin = this.user && this.user.permission === 'admin'
if (isAdmin) {
if (permission && id === this.session.get('user_id')) {
throw invalidParameters('a user cannot change its own permission')
}
} else if (email || password || permission) {
throw invalidParameters('this properties can only changed by an administrator')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
set.permission = ''
set.params = {
id: { type: 'string' },
email: { type: 'string', optional: true },
password: { type: 'string', optional: true },
permission: { type: 'string', optional: true },
preferences: { type: 'object', optional: true },
}
// -------------------------------------------------------------------
export async function changePassword ({oldPassword, newPassword}) {
const id = this.session.get('user_id')
await this.changeUserPassword(id, oldPassword, newPassword)
}
changePassword.description = 'change password after checking old password (user function)'
changePassword.permission = ''
changePassword.params = {
oldPassword: {type: 'string'},
newPassword: {type: 'string'},
}

View File

@ -0,0 +1,82 @@
// FIXME: too low level, should be removed.
async function delete_ ({vbd}) {
await this.getXapi(vbd).deleteVbd(vbd)
}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
vbd: ['id', 'VBD', 'administrate'],
}
export { delete_ as delete }
// -------------------------------------------------------------------
export async function disconnect ({vbd}) {
const xapi = this.getXapi(vbd)
await xapi.disconnectVbd(vbd._xapiRef)
}
disconnect.params = {
id: { type: 'string' },
}
disconnect.resolve = {
vbd: ['id', 'VBD', 'administrate'],
}
// -------------------------------------------------------------------
export async function connect ({vbd}) {
const xapi = this.getXapi(vbd)
await xapi.connectVbd(vbd._xapiRef)
}
connect.params = {
id: { type: 'string' },
}
connect.resolve = {
vbd: ['id', 'VBD', 'administrate'],
}
// -------------------------------------------------------------------
export async function set ({position, vbd}) {
if (position !== undefined) {
const xapi = this.getXapi(vbd)
await xapi.call('VBD.set_userdevice', vbd._xapiRef, String(position))
}
}
set.params = {
// Identifier of the VBD to update.
id: { type: 'string' },
position: { type: ['string', 'number'], optional: true },
}
set.resolve = {
vbd: ['id', 'VBD', 'administrate'],
}
// -------------------------------------------------------------------
export async function setBootable ({vbd, bootable}) {
const xapi = this.getXapi(vbd)
await xapi.call('VBD.set_bootable', vbd._xapiRef, bootable)
}
setBootable.params = {
vbd: { type: 'string' },
bootable: { type: 'boolean' },
}
setBootable.resolve = {
vbd: ['vbd', 'VBD', 'administrate'],
}

View File

@ -0,0 +1,122 @@
// FIXME: rename to disk.*
import { invalidParameters, unauthorized } from 'xo-common/api-errors'
import { isArray, reduce } from 'lodash'
import { parseSize } from '../utils'
// ====================================================================
export async function delete_ ({vdi}) {
const resourceSet = reduce(
vdi.$VBDs,
(resourceSet, vbd) => resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet,
undefined
)
if (resourceSet !== undefined) {
await this.allocateLimitsInResourceSet({ disk: -vdi.size }, resourceSet)
}
await this.getXapi(vdi).deleteVdi(vdi._xapiId)
}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
}
export { delete_ as delete }
// -------------------------------------------------------------------
// FIXME: human readable strings should be handled.
export async function set (params) {
const {vdi} = params
const xapi = this.getXapi(vdi)
const ref = vdi._xapiRef
// Size.
if ('size' in params) {
let resourceSetId
const size = parseSize(params.size)
if (size < vdi.size) {
throw invalidParameters(
`cannot set new size (${size}) below the current size (${vdi.size})`
)
}
const vbds = vdi.$VBDs
if (
(vbds.length === 1) &&
((resourceSetId = xapi.xo.getData(this.getObject(vbds[0], 'VBD').VM, 'resourceSet')) !== undefined)
) {
if (this.user.permission !== 'admin') {
await this.checkResourceSetConstraints(resourceSetId, this.user.id)
}
await this.allocateLimitsInResourceSet({ disk: size - vdi.size }, resourceSetId)
} else if (!(
(this.user.permission === 'admin') ||
(await this.hasPermissions(this.user.id, [ [ vdi.$SR, 'operate' ] ]))
)) {
throw unauthorized()
}
await xapi.resizeVdi(ref, size)
}
// Other fields.
const object = {
'name_label': 'name_label',
'name_description': 'name_description',
}
for (const param in object) {
const fields = object[param]
if (!(param in params)) { continue }
for (const field of (isArray(fields) ? fields : [fields])) {
await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`)
}
}
}
set.params = {
// Identifier of the VDI to update.
id: { type: 'string' },
name_label: { type: 'string', optional: true },
name_description: { type: 'string', optional: true },
// size of VDI
size: { type: ['integer', 'string'], optional: true },
}
set.resolve = {
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
}
// -------------------------------------------------------------------
export async function migrate ({vdi, sr}) {
const xapi = this.getXapi(vdi)
await xapi.moveVdi(vdi._xapiRef, sr._xapiRef)
return true
}
migrate.params = {
id: { type: 'string' },
sr_id: { type: 'string' },
}
migrate.resolve = {
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
sr: ['sr_id', 'SR', 'administrate'],
}

View File

@ -0,0 +1,139 @@
import { ignoreErrors } from 'promise-toolbox'
import { diffItems } from '../utils'
// ===================================================================
// TODO: move into vm and rename to removeInterface
async function delete_ ({vif}) {
this.allocIpAddresses(
vif.id,
null,
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
)::ignoreErrors()
await this.getXapi(vif).deleteVif(vif._xapiId)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' },
}
delete_.resolve = {
vif: ['id', 'VIF', 'administrate'],
}
// -------------------------------------------------------------------
// TODO: move into vm and rename to disconnectInterface
export async function disconnect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).disconnectVif(vif._xapiId)
}
disconnect.params = {
id: { type: 'string' },
}
disconnect.resolve = {
vif: ['id', 'VIF', 'operate'],
}
// -------------------------------------------------------------------
// TODO: move into vm and rename to connectInterface
export async function connect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).connectVif(vif._xapiId)
}
connect.params = {
id: { type: 'string' },
}
connect.resolve = {
vif: ['id', 'VIF', 'operate'],
}
// -------------------------------------------------------------------
export async function set ({
vif,
network,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses,
attached,
}) {
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
const newIpAddresses = []
{
const { push } = newIpAddresses
push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses)
push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses)
}
if (network || mac) {
const xapi = this.getXapi(vif)
const vm = xapi.getObject(vif.$VM)
mac == null && (mac = vif.MAC)
network = xapi.getObject((network && network.id) || vif.$network)
attached == null && (attached = vif.attached)
await this.allocIpAddresses(vif.id, null, oldIpAddresses)
await xapi.deleteVif(vif._xapiId)
// create new VIF with new parameters
const newVif = await xapi.createVif(vm.$id, network.$id, {
mac,
currently_attached: attached,
ipv4_allowed: newIpAddresses,
})
await this.allocIpAddresses(newVif.$id, newIpAddresses)
return
}
const [ addAddresses, removeAddresses ] = diffItems(
newIpAddresses,
oldIpAddresses
)
await this.allocIpAddresses(
vif.id,
addAddresses,
removeAddresses
)
return this.getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses,
})
}
set.params = {
id: { type: 'string' },
network: { type: 'string', optional: true },
mac: { type: 'string', optional: true },
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string',
},
optional: true,
},
attached: { type: 'boolean', optional: true },
}
set.resolve = {
vif: ['id', 'VIF', 'operate'],
network: ['network', 'network', 'operate'],
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,55 @@
import { streamToBuffer } from '../utils'
// ===================================================================
export function clean () {
return this.clean()
}
clean.permission = 'admin'
// -------------------------------------------------------------------
export async function exportConfig () {
return {
$getFrom: await this.registerHttpRequest((req, res) => {
res.writeHead(200, 'OK', {
'content-disposition': 'attachment',
})
return this.exportConfig()
},
undefined,
{ suffix: '/config.json' }),
}
}
exportConfig.permission = 'admin'
// -------------------------------------------------------------------
export function getAllObjects ({ filter, limit }) {
return this.getObjects({ filter, limit })
}
getAllObjects.permission = ''
getAllObjects.description = 'Returns all XO objects'
getAllObjects.params = {
filter: { type: 'object', optional: true },
limit: { type: 'number', optional: true },
}
// -------------------------------------------------------------------
export async function importConfig () {
return {
$sendTo: await this.registerHttpRequest(async (req, res) => {
await this.importConfig(JSON.parse(await streamToBuffer(req)))
res.end('config successfully imported')
}),
}
}
importConfig.permission = 'admin'

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,172 @@
import Model from './model'
import {BaseError} from 'make-error'
import {EventEmitter} from 'events'
import {
isArray,
isObject,
map,
} from './utils'
// ===================================================================
export class ModelAlreadyExists extends BaseError {
constructor (id) {
super('this model already exists: ' + id)
}
}
// ===================================================================
export default class Collection extends EventEmitter {
// Default value for Model.
get Model () {
return Model
}
// Make this property writable.
set Model (Model) {
Object.defineProperty(this, 'Model', {
configurable: true,
enumerale: true,
value: Model,
writable: true,
})
}
async add (models, opts) {
const array = isArray(models)
if (!array) {
models = [models]
}
const {Model} = this
map(models, model => {
if (!(model instanceof Model)) {
model = new Model(model)
}
const error = model.validate()
if (error) {
// TODO: Better system inspired by Backbone.js
throw error
}
return model.properties
}, models)
models = await this._add(models, opts)
this.emit('add', models)
return array
? models
: new this.Model(models[0])
}
async first (properties) {
if (!isObject(properties)) {
properties = (properties !== undefined)
? { id: properties }
: {}
}
const model = await this._first(properties)
return model && new this.Model(model)
}
async get (properties) {
if (!isObject(properties)) {
properties = (properties !== undefined)
? { id: properties }
: {}
}
return /* await */ this._get(properties)
}
async remove (ids) {
if (!isArray(ids)) {
ids = [ids]
}
await this._remove(ids)
this.emit('remove', ids)
return true
}
async update (models) {
const array = isArray(models)
if (!isArray(models)) {
models = [models]
}
const {Model} = this
map(models, model => {
if (!(model instanceof Model)) {
// TODO: Problems, we may be mixing in some default
// properties which will overwrite existing ones.
model = new Model(model)
}
const id = model.get('id')
// Missing models should be added not updated.
if (id === undefined) {
// FIXME: should not throw an exception but return a rejected promise.
throw new Error('a model without an id cannot be updated')
}
const error = model.validate()
if (error !== undefined) {
// TODO: Better system inspired by Backbone.js.
throw error
}
return model.properties
}, models)
models = await this._update(models)
this.emit('update', models)
return array
? models
: new this.Model(models[0])
}
// Methods to override in implementations.
_add () {
throw new Error('not implemented')
}
_get () {
throw new Error('not implemented')
}
_remove () {
throw new Error('not implemented')
}
_update () {
throw new Error('not implemented')
}
// Methods which may be overridden in implementations.
count (properties) {
return this.get(properties).get('count')
}
exists (properties) {
/* jshint eqnull: true */
return this.first(properties).then(model => model != null)
}
async _first (properties) {
const models = await this.get(properties)
return models.length
? models[0]
: null
}
}

View File

@ -0,0 +1,237 @@
import { createClient as createRedisClient } from 'redis'
import { difference, filter, forEach, isEmpty, keys as getKeys, map } from 'lodash'
import { ignoreErrors, promisifyAll } from 'promise-toolbox'
import { v4 as generateUuid } from 'uuid'
import Collection, { ModelAlreadyExists } from '../collection'
import { asyncMap } from '../utils'
// ===================================================================
// ///////////////////////////////////////////////////////////////////
// Data model:
// - prefix +'_id': value of the last generated identifier;
// - prefix +'_ids': set containing identifier of all models;
// - prefix +'_'+ index +':' + lowerCase(value): set of identifiers
// which have value for the given index.
// - prefix +':'+ id: hash containing the properties of a model;
// ///////////////////////////////////////////////////////////////////
// TODO: then-redis sends commands in order, we should use this
// semantic to simplify the code.
// TODO: Merge the options in the object to obtain extend-time
// configuration like Backbone.
// TODO: Remote events.
const VERSION = '20170905'
export default class Redis extends Collection {
constructor ({
connection,
indexes = [],
prefix,
uri,
}) {
super()
this.indexes = indexes
this.prefix = prefix
const redis = this.redis = promisifyAll(connection || createRedisClient(uri))
const key = `${prefix}:version`
redis.get(key).then(version => {
if (version === VERSION) {
return
}
let p = redis.set(`${prefix}:version`, VERSION)
switch (version) {
case undefined:
// - clean indexes
// - indexes are now case insensitive
p = p.then(() => this.rebuildIndexes())
}
return p
})::ignoreErrors()
}
rebuildIndexes () {
const { indexes, prefix, redis } = this
if (indexes.length === 0) {
return Promise.resolve()
}
const idsIndex = `${prefix}_ids`
return asyncMap(indexes, index =>
redis.keys(`${prefix}_${index}:*`).then(keys =>
keys.length !== 0 && redis.del(keys)
)
).then(() => asyncMap(redis.smembers(idsIndex), id =>
redis.hgetall(`${prefix}:${id}`).then(values =>
values == null
? redis.srem(idsIndex, id) // entry no longer exists
: asyncMap(indexes, index => {
const value = values[index]
if (value !== undefined) {
return redis.sadd(
`${prefix}_${index}:${String(value).toLowerCase()}`,
id
)
}
})
)
))
}
_extract (ids) {
const prefix = this.prefix + ':'
const {redis} = this
const models = []
return Promise.all(map(ids, id => {
return redis.hgetall(prefix + id).then(model => {
// If empty, consider it a no match.
if (isEmpty(model)) {
return
}
// Mix the identifier in.
model.id = id
models.push(model)
})
})).then(() => models)
}
_add (models, {replace = false} = {}) {
// TODO: remove “replace” which is a temporary measure, implement
// “set()” instead.
const {indexes, prefix, redis} = this
return Promise.all(map(models, async model => {
// Generate a new identifier if necessary.
if (model.id === undefined) {
model.id = generateUuid()
}
const { id } = model
const newEntry = await redis.sadd(prefix + '_ids', id)
if (!newEntry) {
if (!replace) {
throw new ModelAlreadyExists(id)
}
// remove the previous values from indexes
if (indexes.length !== 0) {
const previous = await redis.hgetall(`${prefix}:${id}`)
await asyncMap(indexes, index => {
const value = previous[index]
if (value !== undefined) {
return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
}
})
}
}
const params = []
forEach(model, (value, name) => {
// No need to store the identifier (already in the key).
if (name === 'id') {
return
}
params.push(name, value)
})
const key = `${prefix}:${id}`
const promises = [
redis.del(key),
redis.hmset(key, ...params),
]
// Update indexes.
forEach(indexes, (index) => {
const value = model[index]
if (value === undefined) {
return
}
const key = prefix + '_' + index + ':' + String(value).toLowerCase()
promises.push(redis.sadd(key, id))
})
await Promise.all(promises)
return model
}))
}
_get (properties) {
const {prefix, redis} = this
if (isEmpty(properties)) {
return redis.smembers(prefix + '_ids').then(ids => this._extract(ids))
}
// Special treatment for the identifier.
const id = properties.id
if (id !== undefined) {
delete properties.id
return this._extract([id]).then(models => {
return (models.length && !isEmpty(properties))
? filter(models)
: models
})
}
const {indexes} = this
// Check for non indexed fields.
const unfit = difference(getKeys(properties), indexes)
if (unfit.length) {
throw new Error('fields not indexed: ' + unfit.join())
}
const keys = map(properties, (value, index) => `${prefix}_${index}:${String(value).toLowerCase()}`)
return redis.sinter(...keys).then(ids => this._extract(ids))
}
_remove (ids) {
if (isEmpty(ids)) {
return
}
const { indexes, prefix, redis } = this
// update main index
let promise = redis.srem(prefix + '_ids', ...ids)
// update other indexes
if (indexes.length !== 0) {
promise = Promise.all([ promise, asyncMap(ids, id =>
redis.hgetall(`${prefix}:${id}`).then(values =>
values != null && asyncMap(indexes, index => {
const value = values[index]
if (value !== undefined) {
return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id)
}
})
)
) ])
}
return promise.then(() =>
// remove the models
redis.del(map(ids, id => `${prefix}:${id}`))
)
}
_update (models) {
return this._add(models, { replace: true })
}
}

View File

@ -0,0 +1,50 @@
import {EventEmitter} from 'events'
import {createRawObject, noop} from './utils'
// ===================================================================
export default class Connection extends EventEmitter {
constructor () {
super()
this._data = createRawObject()
}
// Close the connection.
close () {
// Prevent errors when the connection is closed more than once.
this.close = noop
this.emit('close')
}
// Gets the value for this key.
get (key, defaultValue) {
const {_data: data} = this
if (key in data) {
return data[key]
}
if (arguments.length >= 2) {
return defaultValue
}
throw new Error('no value for `' + key + '`')
}
// Checks whether there is a value for this key.
has (key) {
return key in this._data
}
// Sets the value for this key.
set (key, value) {
this._data[key] = value
}
unset (key) {
delete this._data[key]
}
}

View File

@ -0,0 +1,42 @@
import from2 from 'from2'
const constantStream = (data, n = 1) => {
if (!Buffer.isBuffer(data)) {
data = Buffer.from(data)
}
const { length } = data
if (!length) {
throw new Error('data should not be empty')
}
n *= length
let currentLength = length
return from2((size, next) => {
if (n <= 0) {
return next(null, null)
}
if (n < size) {
size = n
}
if (size < currentLength) {
const m = Math.floor(size / length) * length || length
n -= m
return next(null, data.slice(0, m))
}
// if more than twice the data length is requested, repeat the data
if (size > currentLength * 2) {
currentLength = Math.floor(size / length) * length
data = Buffer.alloc(currentLength, data)
}
n -= currentLength
return next(null, data)
})
}
export { constantStream as default }

View File

@ -0,0 +1,175 @@
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import {
isArray,
isFunction,
} from './utils'
// ===================================================================
const {
defineProperties,
getOwnPropertyDescriptor,
} = Object
// ===================================================================
// Debounce decorator for methods.
//
// See: https://github.com/wycats/javascript-decorators
//
// TODO: make it work for single functions.
export const debounce = duration => (target, name, descriptor) => {
const fn = descriptor.value
// This symbol is used to store the related data directly on the
// current object.
const s = Symbol(`debounced ${name} data`)
function debounced () {
const data = this[s] || (this[s] = {
lastCall: 0,
wrapper: null,
})
const now = Date.now()
if (now > data.lastCall + duration) {
data.lastCall = now
try {
const result = fn.apply(this, arguments)
data.wrapper = () => result
} catch (error) {
data.wrapper = () => { throw error }
}
}
return data.wrapper()
}
debounced.reset = obj => { delete obj[s] }
descriptor.value = debounced
return descriptor
}
// -------------------------------------------------------------------
const _ownKeys = (
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
(({
getOwnPropertyNames: names,
getOwnPropertySymbols: symbols,
}) => symbols
? obj => names(obj).concat(symbols(obj))
: names
)(Object)
)
const _isIgnoredProperty = name => (
name[0] === '_' ||
name === 'constructor'
)
const _IGNORED_STATIC_PROPERTIES = {
__proto__: null,
arguments: true,
caller: true,
length: true,
name: true,
prototype: true,
}
const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name]
export const mixin = MixIns => Class => {
if (!isArray(MixIns)) {
MixIns = [ MixIns ]
}
const { name } = Class
// Copy properties of plain object mix-ins to the prototype.
{
const allMixIns = MixIns
MixIns = []
const { prototype } = Class
const descriptors = { __proto__: null }
for (const MixIn of allMixIns) {
if (isFunction(MixIn)) {
MixIns.push(MixIn)
continue
}
for (const prop of _ownKeys(MixIn)) {
if (prop in prototype) {
throw new Error(`${name}#${prop} is already defined`)
}
(
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
).enumerable = false // Object methods are enumerable but class methods are not.
}
}
defineProperties(prototype, descriptors)
}
function Decorator (...args) {
const instance = new Class(...args)
for (const MixIn of MixIns) {
const { prototype } = MixIn
const mixinInstance = new MixIn(instance, ...args)
const descriptors = { __proto__: null }
for (const prop of _ownKeys(prototype)) {
if (_isIgnoredProperty(prop)) {
continue
}
if (prop in instance) {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = getBoundPropertyDescriptor(
prototype,
prop,
mixinInstance
)
}
defineProperties(instance, descriptors)
}
return instance
}
// Copy original and mixed-in static properties on Decorator class.
const descriptors = { __proto__: null }
for (const prop of _ownKeys(Class)) {
let descriptor
if (!(
// Special properties are not defined...
_isIgnoredStaticProperty(prop) &&
// if they already exist...
(descriptor = getOwnPropertyDescriptor(Decorator, prop)) &&
// and are not configurable.
!descriptor.configurable
)) {
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
}
}
for (const MixIn of MixIns) {
for (const prop of _ownKeys(MixIn)) {
if (_isIgnoredStaticProperty(prop)) {
continue
}
if (prop in descriptors) {
throw new Error(`${name}.${prop} is already defined`)
}
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
}
}
defineProperties(Decorator, descriptors)
return Decorator
}

View File

@ -0,0 +1,39 @@
/* eslint-env jest */
import {debounce} from './decorators'
// ===================================================================
describe('debounce()', () => {
let i
class Foo {
@debounce(1e1)
foo () {
++i
}
}
beforeEach(() => {
i = 0
})
it('works', done => {
const foo = new Foo()
expect(i).toBe(0)
foo.foo()
expect(i).toBe(1)
foo.foo()
expect(i).toBe(1)
setTimeout(() => {
foo.foo()
expect(i).toBe(2)
done()
}, 2e1)
})
})

View File

@ -0,0 +1,85 @@
// Buffer driver for [fatfs](https://github.com/natevw/fatfs).
//
// Usage:
//
// ```js
// import fatfs from 'fatfs'
// import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer'
//
// const buffer = fatfsBufferinit()
//
// const fs = fatfs.createFileSystem(fatfsBuffer(buffer))
//
// fs.writeFile('/foo', 'content of foo', function (err, content) {
// if (err) {
// console.error(err)
// }
// })
import { boot16 as fat16 } from 'fatfs/structs'
const SECTOR_SIZE = 512
const TEN_MIB = 10 * 1024 * 1024
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
export function init () {
const buf = Buffer.alloc(TEN_MIB)
// https://github.com/natevw/fatfs/blob/master/structs.js
fat16.pack({
jmpBoot: Buffer.from('eb3c90', 'hex'),
OEMName: 'mkfs.fat',
BytsPerSec: SECTOR_SIZE,
SecPerClus: 4,
ResvdSecCnt: 1,
NumFATs: 2,
RootEntCnt: 512,
TotSec16: 20480,
Media: 248,
FATSz16: 20,
SecPerTrk: 32,
NumHeads: 64,
HiddSec: 0,
TotSec32: 0,
DrvNum: 128,
Reserved1: 0,
BootSig: 41,
VolID: 895111106,
VolLab: 'NO NAME ',
FilSysType: 'FAT16 ',
}, buf)
// End of sector.
buf[0x1fe] = 0x55
buf[0x1ff] = 0xaa
// Mark sector as reserved.
buf[0x200] = 0xf8
buf[0x201] = 0xff
buf[0x202] = 0xff
buf[0x203] = 0xff
// Mark sector as reserved.
buf[0x2a00] = 0xf8
buf[0x2a01] = 0xff
buf[0x2a02] = 0xff
buf[0x2a03] = 0xff
return buf
}
export default buffer => {
return {
sectorSize: SECTOR_SIZE,
numSectors: Math.floor(buffer.length / SECTOR_SIZE),
readSectors: (i, target, cb) => {
buffer.copy(target, 0, i * SECTOR_SIZE)
cb()
},
writeSectors: (i, source, cb) => {
source.copy(buffer, i * SECTOR_SIZE, 0)
cb()
},
}
}

View File

@ -0,0 +1,54 @@
// See: https://gist.github.com/julien-f/5b9a3537eb82a34b04e2
const matcher = require('micromatch').matcher
module.exports = function globMatcher (patterns, opts) {
if (!Array.isArray(patterns)) {
if (patterns[0] === '!') {
const m = matcher(patterns.slice(1), opts)
return function (string) {
return !m(string)
}
} else {
return matcher(patterns, opts)
}
}
const noneMustMatch = []
const anyMustMatch = []
// TODO: could probably be optimized by combining all positive patterns (and all negative patterns) as a single matcher.
for (let i = 0, n = patterns.length; i < n; ++i) {
const pattern = patterns[i]
if (pattern[0] === '!') {
noneMustMatch.push(matcher(pattern.slice(1), opts))
} else {
anyMustMatch.push(matcher(pattern, opts))
}
}
const nNone = noneMustMatch.length
const nAny = anyMustMatch.length
return function (string) {
let i
for (i = 0; i < nNone; ++i) {
if (noneMustMatch[i](string)) {
return false
}
}
if (nAny === 0) {
return true
}
for (i = 0; i < nAny; ++i) {
if (anyMustMatch[i](string)) {
return true
}
}
return false
}
}

View File

@ -0,0 +1,649 @@
import appConf from 'app-conf'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import eventToPromise from 'event-to-promise'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
import WebSocket from 'ws'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import {
ensureDir,
readdir,
readFile,
} from 'fs-extra'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
createRawObject,
forEach,
isArray,
isFunction,
mapToArray,
pFromCallback,
} from './utils'
import bodyParser from 'body-parser'
import connectFlash from 'connect-flash'
import cookieParser from 'cookie-parser'
import expressSession from 'express-session'
import passport from 'passport'
import { parse as parseCookies } from 'cookie'
import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const debug = createLogger('xo:main')
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
// ===================================================================
const DEPRECATED_ENTRIES = [
'users',
'servers',
]
async function loadConfiguration () {
const config = await appConf.load('xo-server', {
ignoreUnknownFormats: true,
})
debug('Configuration loaded.')
// Print a message if deprecated entries are specified.
forEach(DEPRECATED_ENTRIES, entry => {
if (has(config, entry)) {
warn(`${entry} configuration is deprecated.`)
}
})
return config
}
// ===================================================================
function createExpressApp () {
const app = createExpress()
app.use(helmet())
// Registers the cookie-parser and express-session middlewares,
// necessary for connect-flash.
app.use(cookieParser())
app.use(expressSession({
resave: false,
saveUninitialized: false,
// TODO: should be in the config file.
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
}))
// Registers the connect-flash middleware, necessary for Passport to
// display error messages.
app.use(connectFlash())
// Registers the body-parser middleware, necessary for Passport to
// access the username and password from the sign in form.
app.use(bodyParser.urlencoded({ extended: false }))
// Registers Passport's middlewares.
app.use(passport.initialize())
return app
}
async function setUpPassport (express, xo) {
const strategies = createRawObject()
xo.registerPassportStrategy = strategy => {
passport.use(strategy)
const {name} = strategy
if (name !== 'local') {
strategies[name] = strategy.label || name
}
}
// Registers the sign in form.
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(signInPage({
error: req.flash('error')[0],
strategies,
}))
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
if (err) {
return next(err)
}
if (!user) {
req.flash('error', info ? info.message : 'Invalid credentials')
return res.redirect('/signin')
}
// The cookie will be set in via the next request because some
// browsers do not save cookies on redirect.
req.flash(
'token',
(await xo.createAuthenticationToken({userId: user.id})).id
)
// The session is only persistent for internal provider and if 'Remember me' box is checked
req.flash(
'session-is-persistent',
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
const token = req.flash('token')[0]
if (token) {
const isPersistent = req.flash('session-is-persistent')[0]
if (isPersistent) {
// Persistent cookie ? => 1 year
res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 })
} else {
// Non-persistent : external provider as Github, Twitter...
res.cookie('token', token)
}
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
// Install the local strategy.
xo.registerPassportStrategy(new LocalStrategy(
async (username, password, done) => {
try {
const user = await xo.authenticateUser({username, password})
done(null, user)
} catch (error) {
done(null, false, { message: error.message })
}
}
))
}
// ===================================================================
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema,
configurationPresets,
testSchema,
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(mapToArray(files, name => {
if (startsWith(name, PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
}))
}
async function registerPlugins (xo) {
await Promise.all(mapToArray([
`${__dirname}/../node_modules/`,
'/usr/local/lib/node_modules/',
], xo::registerPluginsInPath))
}
// ===================================================================
async function makeWebServerListen (webServer, {
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}) {
if (cert && key) {
[opts.cert, opts.key] = await Promise.all([
readFile(cert),
readFile(key),
])
}
try {
const niceAddress = await webServer.listen(opts)
debug(`Web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress) {
warn(`Web server could not listen on ${error.niceAddress}`)
const {code} = error
if (code === 'EACCES') {
warn(' Access denied.')
warn(' Ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' Address already in use.')
}
} else {
warn('Web server could not listen:', error.message)
}
}
}
async function createWebServer ({ listen, listenOptions }) {
const webServer = new WebServer()
await Promise.all(mapToArray(listen,
opts => makeWebServerListen(webServer, { ...listenOptions, ...opts })
))
return webServer
}
// ===================================================================
const setUpProxies = (express, opts, xo) => {
if (!opts) {
return
}
const proxy = createProxyServer({
ignorePath: true,
}).on('error', (error) => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length),
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
express.on('upgrade', (req, socket, head) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length),
})
return
}
}
})
}
// ===================================================================
const setUpStaticFiles = (express, opts) => {
forEach(opts, (paths, url) => {
if (!isArray(paths)) {
paths = [paths]
}
forEach(paths, path => {
debug('Setting up %s → %s', url, path)
express.use(url, serveStatic(path))
})
})
}
// ===================================================================
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
const onConnection = (socket, upgradeReq) => {
const { remoteAddress } = upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
connection.once('close', () => {
socket.close()
})
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
// Connect the WebSocket to the JSON-RPC server.
socket.on('message', message => {
jsonRpc.write(message)
})
const onSend = error => {
if (error) {
warn('WebSocket send:', error.stack)
}
}
jsonRpc.on('data', data => {
// The socket may have been closed during the API method
// execution.
if (socket.readyState === WebSocket.OPEN) {
socket.send(data, onSend)
}
})
}
webServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, ws => onConnection(ws, req))
}
})
}
// ===================================================================
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
const setUpConsoleProxy = (webServer, xo) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
webServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
}
const [, id] = matches
try {
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw invalidCredentials()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
// ===================================================================
const USAGE = (({
name,
version,
}) => `Usage: ${name} [--safe-mode]
${name} v${version}`)(require('../package.json'))
// ===================================================================
export default async function main (args) {
if (includes(args, '--help') || includes(args, '-h')) {
return USAGE
}
{
const debug = createLogger('xo:perf')
blocked(ms => {
debug('blocked for %sms', ms | 0)
})
}
const config = await loadConfiguration()
const webServer = await createWebServer(config.http)
// Now the web server is listening, drop privileges.
try {
const {user, group} = config
if (group) {
process.setgid(group)
debug('Group changed to', group)
}
if (user) {
process.setuid(user)
debug('User changed to', user)
}
} catch (error) {
warn('Failed to change user/group:', error)
}
// Creates main object.
const xo = new Xo(config)
// Register web server close on XO stop.
xo.on('stop', () => pFromCallback(cb => webServer.close(cb)))
// Connects to all registered servers.
await xo.start()
// Trigger a clean job.
await xo.clean()
// Express is used to manage non WebSocket connections.
const express = createExpressApp()
if (config.http.redirectToHttps) {
let port
forEach(config.http.listen, listen => {
if (
listen.port &&
(listen.cert || listen.certificate)
) {
port = listen.port
return false
}
})
if (port === undefined) {
warn('Could not setup HTTPs redirection: no HTTPs port found')
} else {
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
// Must be set up before the API.
setUpConsoleProxy(webServer, xo)
// Must be set up before the API.
express.use(bind(xo._handleHttpRequest, xo))
// Everything above is not protected by the sign in, allowing xo-cli
// to work properly.
await setUpPassport(express, xo)
// Attaches express to the web server.
webServer.on('request', express)
webServer.on('upgrade', (req, socket, head) => {
express.emit('upgrade', req, socket, head)
})
// Must be set up before the static files.
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
setUpProxies(express, config.http.proxies, xo)
setUpStaticFiles(express, config.http.mounts)
if (!includes(args, '--safe-mode')) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
forEach([ 'SIGINT', 'SIGTERM' ], signal => {
let alreadyCalled = false
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await eventToPromise(xo, 'stopped')
debug('bye :-)')
}

View File

@ -0,0 +1,184 @@
import Bluebird from 'bluebird'
import { BaseError } from 'make-error'
import { createPredicate } from 'value-matcher'
import { timeout } from 'promise-toolbox'
import {
assign,
filter,
find,
isEmpty,
map,
mapValues,
} from 'lodash'
import { crossProduct } from './math'
import {
serializeError,
thunkToArray,
} from './utils'
export class JobExecutorError extends BaseError {}
export class UnsupportedJobType extends JobExecutorError {
constructor (job) {
super('Unknown job type: ' + job.type)
}
}
export class UnsupportedVectorType extends JobExecutorError {
constructor (vector) {
super('Unknown vector type: ' + vector.type)
}
}
// ===================================================================
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(crossProduct(
map(items, value => resolveParamsVector.call(this, value))
))
},
fetchObjects ({ pattern }) {
const objects = filter(this.xo.getObjects(), createPredicate(pattern))
if (isEmpty(objects)) {
throw new Error('no objects match this pattern')
}
return objects
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value,
})
})
},
set: ({ values }) => values,
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default class JobExecutor {
constructor (xo) {
this.xo = xo
// The logger is not available until Xo has started.
xo.on('start', () => xo.getLogger('jobs').then(logger => {
this._logger = logger
}))
}
async exec (job) {
const runJobId = this._logger.notice(`Starting execution of ${job.id}.`, {
event: 'job.start',
userId: job.userId,
jobId: job.id,
key: job.key,
})
try {
if (job.type === 'call') {
const execStatus = await this._execCall(job, runJobId)
this.xo.emit('job:terminated', execStatus)
} else {
throw new UnsupportedJobType(job)
}
this._logger.notice(`Execution terminated for ${job.id}.`, {
event: 'job.end',
runJobId,
})
} catch (error) {
this._logger.error(`The execution of ${job.id} has failed.`, {
event: 'job.end',
runJobId,
error: serializeError(error),
})
throw error
}
}
async _execCall (job, runJobId) {
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(this, paramsVector)
: [{}] // One call with no parameters
const connection = this.xo.createUserConnection()
connection.set('user_id', job.userId)
const schedule = find(await this.xo.getAllSchedules(), { job: job.id })
const execStatus = {
calls: {},
runJobId,
start: Date.now(),
timezone: schedule !== undefined ? schedule.timezone : undefined,
}
await Bluebird.map(paramsFlatVector, params => {
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
event: 'jobCall.start',
runJobId,
method: job.method,
params,
})
const call = execStatus.calls[runCallId] = {
method: job.method,
params,
start: Date.now(),
}
let promise = this.xo.callApiMethod(connection, job.method, assign({}, params))
if (job.timeout) {
promise = promise::timeout(job.timeout)
}
return promise.then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value,
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason),
})
call.error = reason
call.end = Date.now()
}
)
}, {
concurrency: 2,
})
connection.close()
execStatus.end = Date.now()
return execStatus
}
}

View File

@ -0,0 +1,100 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { resolveParamsVector } from './job-executor'
describe('resolveParamsVector', function () {
forEach({
'cross product with three sets': [
// Expected result.
[ { id: 3, value: 'foo', remote: 'local' },
{ id: 7, value: 'foo', remote: 'local' },
{ id: 10, value: 'foo', remote: 'local' },
{ id: 3, value: 'bar', remote: 'local' },
{ id: 7, value: 'bar', remote: 'local' },
{ id: 10, value: 'bar', remote: 'local' } ],
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { id: 3 }, { id: 7 }, { id: 10 } ],
}, {
type: 'set',
values: [ { value: 'foo' }, { value: 'bar' } ],
}, {
type: 'set',
values: [ { remote: 'local' } ],
}],
},
],
'cross product with `set` and `map`': [
// Expected result.
[
{ remote: 'local', id: 'vm:2' },
{ remote: 'smb', id: 'vm:2' },
],
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { remote: 'local' }, { remote: 'smb' } ],
}, {
type: 'map',
collection: {
type: 'fetchObjects',
pattern: {
$pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] },
power_state: 'Running',
tags: [ 'foo' ],
type: 'VM',
},
},
iteratee: {
type: 'extractProperties',
mapping: { id: 'id' },
},
}],
},
// Context.
{
xo: {
getObjects: function () {
return [{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted',
}, {
id: 'vm:2',
$pool: 'pool:1',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Running',
}, {
id: 'host:1',
type: 'host',
power_state: 'Running',
}, {
id: 'vm:3',
$pool: 'pool:8',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Halted',
}]
},
},
},
],
}, ([ expectedResult, entry, context ], name) => {
describe(`with ${name}`, () => {
it('Resolves params vector', () => {
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
})
})
})
})

View File

@ -0,0 +1,202 @@
import appConf from 'app-conf'
import get from 'lodash/get'
import highland from 'highland'
import levelup from 'level-party'
import ndjson from 'ndjson'
import parseArgs from 'minimist'
import sublevel from 'level-sublevel'
import util from 'util'
import { repair as repairDb } from 'level'
import {forEach} from './utils'
import globMatcher from './glob-matcher'
// ===================================================================
async function printLogs (db, args) {
let stream = highland(db.createReadStream({reverse: true}))
if (args.since) {
stream = stream.filter(({value}) => (value.time >= args.since))
}
if (args.until) {
stream = stream.filter(({value}) => (value.time <= args.until))
}
const fields = Object.keys(args.matchers)
if (fields.length > 0) {
stream = stream.filter(({value}) => {
for (const field of fields) {
const fieldValue = get(value, field)
if (fieldValue === undefined || !args.matchers[field](fieldValue)) {
return false
}
}
return true
})
}
stream = stream.take(args.limit)
if (args.json) {
stream = highland(stream.pipe(ndjson.serialize()))
.each(value => {
process.stdout.write(value)
})
} else {
stream = stream.each(value => {
console.log(util.inspect(value, { depth: null }))
})
}
return new Promise(resolve => {
stream.done(resolve)
})
}
// ===================================================================
function helper () {
console.error(`
xo-server-logs --help, -h
Display this help message.
xo-server-logs [--json] [--limit=<limit>] [--since=<date>] [--until=<date>] [<pattern>...]
Prints the logs.
--json
Display the results as new line delimited JSON for consumption
by another program.
--limit=<limit>, -n <limit>
Limit the number of results to be displayed (default 100)
--since=<date>, --until=<date>
Start showing entries on or newer than the specified date, or on
or older than the specified date.
<date> should use the format \`YYYY-MM-DD\`.
<pattern>
Patterns can be used to filter the entries.
Patterns have the following format \`<field>=<value>\`/\`<field>\`.
xo-server-logs --repair
Repair/compact the database.
This is an advanced operation and should be used only when necessary and offline (xo-server should be stopped).
`)
}
// ===================================================================
function getArgs () {
const stringArgs = ['since', 'until', 'limit']
const args = parseArgs(process.argv.slice(2), {
string: stringArgs,
boolean: ['help', 'json', 'repair'],
default: {
limit: 100,
json: false,
help: false,
},
alias: {
limit: 'n',
help: 'h',
},
})
const patterns = {}
for (let value of args._) {
value = String(value)
const i = value.indexOf('=')
if (i !== -1) {
const field = value.slice(0, i)
const pattern = value.slice(i + 1)
patterns[pattern]
? patterns[field].push(pattern)
: patterns[field] = [ pattern ]
} else if (!patterns[value]) {
patterns[value] = null
}
}
const trueFunction = () => true
args.matchers = {}
for (const field in patterns) {
const values = patterns[field]
args.matchers[field] = (values === null) ? trueFunction : globMatcher(values)
}
// Warning: minimist makes one array of values if the same option is used many times.
// (But only for strings args, not boolean)
forEach(stringArgs, arg => {
if (args[arg] instanceof Array) {
throw new Error(`error: too many values for ${arg} argument`)
}
})
;['since', 'until'].forEach(arg => {
if (args[arg] !== undefined) {
args[arg] = Date.parse(args[arg])
if (isNaN(args[arg])) {
throw new Error(`error: bad ${arg} timestamp format`)
}
}
})
if (isNaN(args.limit = +args.limit)) {
throw new Error('error: limit is not a valid number')
}
return args
}
// ===================================================================
export default async function main () {
const args = getArgs()
if (args.help) {
helper()
return
}
const config = await appConf.load('xo-server', {
ignoreUnknownFormats: true,
})
if (args.repair) {
await new Promise((resolve, reject) => {
repairDb(`${config.datadir}/leveldb`, error => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
return
}
const db = sublevel(levelup(
`${config.datadir}/leveldb`,
{ valueEncoding: 'json' }
)).sublevel('logs')
return printLogs(db, args)
}

View File

@ -0,0 +1,33 @@
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser } from 'parse-pairs'
import { isArray, map } from 'lodash'
// ===================================================================
const parse = createParser({
keyTransform: key => key.slice(5).toLowerCase(),
})
const makeFunction = command => (fields, ...args) =>
execa.stdout(command, [
'--noheading',
'--nosuffix',
'--nameprefixes',
'--unbuffered',
'--units',
'b',
'-o',
String(fields),
...args,
]).then(stdout => map(
splitLines(stdout),
isArray(fields)
? parse
: line => {
const data = parse(line)
return data[fields]
}
))
export const lvs = makeFunction('lvs')
export const pvs = makeFunction('pvs')

View File

@ -0,0 +1,48 @@
import assign from 'lodash/assign'
const _combine = (vectors, n, cb) => {
if (!n) {
return
}
const nLast = n - 1
const vector = vectors[nLast]
const m = vector.length
if (n === 1) {
for (let i = 0; i < m; ++i) {
cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal
}
return
}
for (let i = 0; i < m; ++i) {
const value = vector[i]
_combine(vectors, nLast, (vector) => {
vector.push(value)
cb(vector)
})
}
}
// Compute all combinations from vectors.
//
// Ex: combine([[2, 3], [5, 7]])
// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ]
export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
// Merge the properties of an objects set in one object.
//
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
export const mergeObjects = objects => assign({}, ...objects)
// Compute a cross product between vectors.
//
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
combine(vectors)(vector => {
cb(mergeFn(vector))
})
)

View File

@ -0,0 +1,74 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { thunkToArray } from './utils'
import {
crossProduct,
mergeObjects,
} from './math'
describe('mergeObjects', function () {
forEach({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2},
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4},
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6},
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2},
],
'Empty set': [
{a: 1}, {a: 1}, {},
],
'All empty': [
{}, {}, {},
],
'No set': [
{},
],
}, ([ resultSet, ...sets ], name) => {
describe(`with ${name}`, () => {
it('Assembles all given param sets in on set', function () {
expect(mergeObjects(sets)).toEqual(resultSet)
})
})
})
})
describe('crossProduct', function () {
// Gives the sum of all args
const addTest = args => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
forEach({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest,
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest,
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest,
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest,
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest,
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest,
],
}, ([ product, items, cb ], name) => {
describe(`with ${name}`, () => {
it('Crosses sets of values with a crossProduct callback', function () {
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
})
})
})
})

View File

@ -0,0 +1,73 @@
import {EventEmitter} from 'events'
import {
forEach,
isEmpty,
isString,
} from './utils'
// ===================================================================
export default class Model extends EventEmitter {
constructor (properties) {
super()
this.properties = { ...this.default }
if (properties) {
this.set(properties)
}
}
// Initialize the model after construction.
initialize () {}
// Validate the defined properties.
//
// Returns the error if any.
validate (properties) {}
// Get a property.
get (name, def) {
const value = this.properties[name]
return value !== undefined ? value : def
}
// Check whether a property exists.
has (name) {
return (this.properties[name] !== undefined)
}
// Set properties.
set (properties, value) {
// This method can also be used with two arguments to set a single
// property.
if (isString(properties)) {
properties = { [properties]: value }
}
const previous = {}
forEach(properties, (value, name) => {
const prev = this.properties[name]
if (value !== prev) {
previous[name] = prev
if (value === undefined) {
delete this.properties[name]
} else {
this.properties[name] = value
}
}
})
if (!isEmpty(previous)) {
this.emit('change', previous)
forEach(previous, (value, name) => {
this.emit('change:' + name, value)
})
}
}
}

View File

@ -0,0 +1,79 @@
import Collection from '../collection/redis'
import Model from '../model'
import {
forEach,
mapToArray,
multiKeyHash,
} from '../utils'
// ===================================================================
// Up until now, there were no actions, therefore the default
// action is used to update existing entries.
const DEFAULT_ACTION = 'admin'
// ===================================================================
export default class Acl extends Model {}
Acl.create = (subject, object, action) => {
return Acl.hash(subject, object, action).then(hash => new Acl({
id: hash,
subject,
object,
action,
}))
}
Acl.hash = (subject, object, action) => multiKeyHash(subject, object, action)
// -------------------------------------------------------------------
export class Acls extends Collection {
get Model () {
return Acl
}
create (subject, object, action) {
return Acl.create(subject, object, action).then(acl => this.add(acl))
}
delete (subject, object, action) {
return Acl.hash(subject, object, action).then(hash => this.remove(hash))
}
aclExists (subject, object, action) {
return Acl.hash(subject, object, action).then(hash => this.exists(hash))
}
async get (properties) {
const acls = await super.get(properties)
// Finds all records that are missing a action and need to be updated.
const toUpdate = []
forEach(acls, acl => {
if (!acl.action) {
acl.action = DEFAULT_ACTION
toUpdate.push(acl)
}
})
if (toUpdate.length) {
// Removes all existing entries.
await this.remove(mapToArray(toUpdate, 'id'))
// Compute the new ids (new hashes).
const {hash} = Acl
await Promise.all(mapToArray(
toUpdate,
(acl) => hash(acl.subject, acl.object, acl.action).then(id => {
acl.id = id
})
))
// Inserts the new (updated) entries.
await this.add(toUpdate)
}
return acls
}
}

View File

@ -0,0 +1,45 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Group extends Model {}
// ===================================================================
export class Groups extends Collection {
get Model () {
return Group
}
create (name) {
return this.add(new Group({ name }))
}
async save (group) {
// Serializes.
let tmp
group.users = isEmpty(tmp = group.users)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(group)
}
async get (properties) {
const groups = await super.get(properties)
// Deserializes.
forEach(groups, group => {
group.users = parseProp('group', group, 'users', [])
})
return groups
}
}

View File

@ -0,0 +1,43 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Job extends Model {}
export class Jobs extends Collection {
get Model () {
return Job
}
async create (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.add(new Job(job))
}
async save (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.update(job)
}
async get (properties) {
const jobs = await super.get(properties)
// Deserializes.
forEach(jobs, job => {
job.paramsVector = parseProp('job', job, 'paramsVector', {})
const { timeout } = job
if (timeout !== undefined) {
job.timeout = +timeout
}
})
return jobs
}
}

View File

@ -0,0 +1,53 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
// ===================================================================
export default class PluginMetadata extends Model {}
// ===================================================================
export class PluginsMetadata extends Collection {
get Model () {
return PluginMetadata
}
async save ({ id, autoload, configuration }) {
return /* await */ this.update({
id,
autoload: autoload ? 'true' : 'false',
configuration: configuration && JSON.stringify(configuration),
})
}
async merge (id, data) {
const pluginMetadata = await this.first(id)
if (!pluginMetadata) {
throw new Error('no such plugin metadata')
}
return /* await */ this.save({
...pluginMetadata.properties,
...data,
})
}
async get (properties) {
const pluginsMetadata = await super.get(properties)
// Deserializes.
forEach(pluginsMetadata, pluginMetadata => {
const { autoload, configuration } = pluginMetadata
pluginMetadata.autoload = autoload === 'true'
try {
pluginMetadata.configuration = configuration && JSON.parse(configuration)
} catch (error) {
console.warn('cannot parse pluginMetadata.configuration:', configuration)
pluginMetadata.configuration = []
}
})
return pluginsMetadata
}
}

View File

@ -0,0 +1,36 @@
import Collection from '../collection/redis'
import Model from '../model'
import {
forEach,
} from '../utils'
// ===================================================================
export default class Remote extends Model {}
export class Remotes extends Collection {
get Model () {
return Remote
}
create (name, url) {
return this.add(new Remote({
name,
url,
enabled: false,
error: '',
}))
}
async save (remote) {
return /* await */ this.update(remote)
}
async get (properties) {
const remotes = await super.get(properties)
forEach(remotes, remote => {
remote.enabled = (remote.enabled === 'true')
})
return remotes
}
}

View File

@ -0,0 +1,36 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
// ===================================================================
export default class Schedule extends Model {}
export class Schedules extends Collection {
get Model () {
return Schedule
}
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,
job,
cron,
enabled,
name,
timezone,
}))
}
async save (schedule) {
return /* await */ this.update(schedule)
}
async get (properties) {
const schedules = await super.get(properties)
forEach(schedules, schedule => {
schedule.enabled = (schedule.enabled === 'true')
})
return schedules
}
}

View File

@ -0,0 +1,42 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Server extends Model {}
// -------------------------------------------------------------------
export class Servers extends Collection {
get Model () {
return Server
}
async create (params) {
const { host } = params
if (await this.exists({host})) {
throw new Error('server already exists')
}
return /* await */ this.add(params)
}
async get (properties) {
const servers = await super.get(properties)
// Deserializes
forEach(servers, server => {
if (server.error) {
server.error = parseProp('server', server, 'error', '')
} else {
delete server.error
}
})
return servers
}
}

View File

@ -0,0 +1,10 @@
import Collection from '../collection/redis'
import Model from '../model'
// ===================================================================
export default class Token extends Model {}
// -------------------------------------------------------------------
export class Tokens extends Collection {}

View File

@ -0,0 +1,63 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class User extends Model {}
User.prototype.default = {
permission: 'none',
}
// -------------------------------------------------------------------
export class Users extends Collection {
get Model () {
return User
}
async create (properties) {
const { email } = properties
// Avoid duplicates.
if (await this.exists({email})) {
throw new Error(`the user ${email} already exists`)
}
// Create the user object.
const user = new User(properties)
// Adds the user to the collection.
return /* await */ this.add(user)
}
async save (user) {
// Serializes.
let tmp
user.groups = isEmpty(tmp = user.groups)
? undefined
: JSON.stringify(tmp)
user.preferences = isEmpty(tmp = user.preferences)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(user)
}
async get (properties) {
const users = await super.get(properties)
// Deserializes
forEach(users, user => {
user.groups = parseProp('user', user, 'groups', [])
user.preferences = parseProp('user', user, 'preferences', {})
})
return users
}
}

View File

@ -0,0 +1,16 @@
export const parseProp = (type, obj, name, defaultValue) => {
const value = obj[name]
if (
value == null ||
value === '' // do not warn on this trivial and minor error
) {
return defaultValue
}
try {
return JSON.parse(value)
} catch (error) {
// do not display the error because it can occurs a lot and fill
// up log files
return defaultValue
}
}

View File

@ -0,0 +1,74 @@
import createDebug from 'debug'
import partialStream from 'partial-stream'
import {connect} from 'tls'
import {parse} from 'url'
const debug = createDebug('xo:proxy-console')
export default function proxyConsole (ws, vmConsole, sessionId) {
const url = parse(vmConsole.location)
let closed = false
const socket = connect({
host: url.host,
port: url.port || 443,
rejectUnauthorized: false,
}, () => {
// Write headers.
socket.write([
`CONNECT ${url.path} HTTP/1.0`,
`Host: ${url.hostname}`,
`Cookie: session_id=${sessionId}`,
'', '',
].join('\r\n'))
const onSend = (error) => {
if (error) {
debug('error sending to the XO client: %s', error.stack || error.message || error)
}
}
socket.pipe(partialStream('\r\n\r\n', headers => {
// TODO: check status code 200.
debug('connected')
})).on('data', data => {
if (!closed) {
ws.send(data, onSend)
}
}).on('end', () => {
if (!closed) {
closed = true
debug('disconnected from the console')
}
ws.close()
})
ws
.on('error', error => {
closed = true
debug('error from the XO client: %s', error.stack || error.message || error)
socket.end()
})
.on('message', data => {
if (!closed) {
socket.write(data)
}
})
.on('close', () => {
if (!closed) {
closed = true
debug('disconnected from the XO client')
}
socket.end()
})
}).on('error', error => {
closed = true
debug('error from the console: %s', error.stack || error.message || error)
ws.close()
})
}

View File

@ -0,0 +1,44 @@
import appConf from 'app-conf'
import pw from 'pw'
import Xo from './xo'
import { generateToken } from './utils'
const recoverAccount = async ([ name ]) => {
if (
name === undefined ||
name === '--help' ||
name === '-h'
) {
return `
xo-server-recover-account <user name or email>
If the user does not exist, it is created, if it exists, updates
its password and resets its permission to Admin.
`
}
let password = await new Promise(resolve => {
process.stdout.write('Password (leave empty for random): ')
pw(resolve)
})
if (password === '') {
password = await generateToken(10)
console.log('The generated password is', password)
}
const xo = new Xo(await appConf.load('xo-server', {
ignoreUnknownFormats: true,
}))
const user = await xo.getUserByName(name, true)
if (user !== null) {
await xo.updateUser(user.id, { password, permission: 'admin' })
console.log(`user ${name} has been successfully updated`)
} else {
await xo.createUser({ name, password, permission: 'admin' })
console.log(`user ${name} has been successfully created`)
}
}
export { recoverAccount as default }

View File

@ -0,0 +1,225 @@
import eventToPromise from 'event-to-promise'
import through2 from 'through2'
import { ignoreErrors } from 'promise-toolbox'
import { parse } from 'xo-remote-parser'
import {
addChecksumToReadStream,
getPseudoRandomBytes,
streamToBuffer,
validChecksumOfReadStream,
} from '../utils'
export default class RemoteHandlerAbstract {
constructor (remote) {
this._remote = {...remote, ...parse(remote.url)}
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
}
get type () {
throw new Error('Not implemented')
}
/**
* Asks the handler to sync the state of the effective remote with its' metadata
*/
async sync () {
return this._sync()
}
async _sync () {
throw new Error('Not implemented')
}
/**
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
*/
async forget () {
return this._forget()
}
async _forget () {
throw new Error('Not implemented')
}
async test () {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true,
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error),
}
} finally {
this.unlink(testFileName)::ignoreErrors()
}
}
async outputFile (file, data, options) {
return this._outputFile(file, data, {
flags: 'wx',
...options,
})
}
async _outputFile (file, data, options) {
const stream = await this.createOutputStream(file, options)
const promise = eventToPromise(stream, 'finish')
stream.end(data)
return promise
}
async readFile (file, options) {
return this._readFile(file, options)
}
_readFile (file, options) {
return this.createReadStream(file, options).then(streamToBuffer)
}
async rename (oldPath, newPath) {
return this._rename(oldPath, newPath)
}
async _rename (oldPath, newPath) {
throw new Error('Not implemented')
}
async list (dir = '.') {
return this._list(dir)
}
async _list (dir) {
throw new Error('Not implemented')
}
createReadStream (file, {
checksum = false,
ignoreMissingChecksum = false,
...options
} = {}) {
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = eventToPromise(stream, 'readable')
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([
promise,
this.getSize(file).then(size => {
stream.length = size
})::ignoreErrors(),
])
}
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
// avoid a unhandled rejection warning
streamP::ignoreErrors()
return this.readFile(`${file}.checksum`).then(
checksum => streamP.then(stream => {
const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream.length = length
return stream
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error
}
)
}
async _createReadStream (file, options) {
throw new Error('Not implemented')
}
async refreshChecksum (path) {
const stream = addChecksumToReadStream(await this.createReadStream(path))
stream.resume() // start reading the whole file
const checksum = await stream.checksum
await this.outputFile(`${path}.checksum`, checksum)
}
async createOutputStream (file, {
checksum = false,
...options
} = {}) {
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options,
})
if (!checksum) {
return streamP
}
const connectorStream = through2()
const forwardError = error => {
connectorStream.emit('error', error)
}
const streamWithChecksum = addChecksumToReadStream(connectorStream)
streamWithChecksum.pipe(await streamP)
streamWithChecksum.checksum
.then(value => this.outputFile(`${file}.checksum`, value))
.catch(forwardError)
return connectorStream
}
async _createOutputStream (file, options) {
throw new Error('Not implemented')
}
async unlink (file, {
checksum = true,
} = {}) {
if (checksum) {
this._unlink(`${file}.checksum`)::ignoreErrors()
}
return this._unlink(file)
}
async _unlink (file) {
throw new Error('Not implemented')
}
async getSize (file) {
return this._getSize(file)
}
async _getSize (file) {
throw new Error('Not implemented')
}
}

View File

@ -0,0 +1,88 @@
import fs from 'fs-extra'
import { dirname, resolve } from 'path'
import { noop, startsWith } from 'lodash'
import RemoteHandlerAbstract from './abstract'
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
}
async _sync () {
if (this._remote.enabled) {
try {
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
return noop()
}
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {
return fs.readFile(this._getFilePath(file), options)
}
async _rename (oldPath, newPath) {
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _list (dir = '.') {
return fs.readdir(this._getFilePath(dir))
}
async _createReadStream (file, options) {
return fs.createReadStream(this._getFilePath(file), options)
}
async _createOutputStream (file, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
return fs.createWriteStream(path, options)
}
async _unlink (file) {
return fs.unlink(this._getFilePath(file)).catch(error => {
// do not throw if the file did not exist
if (error == null || error.code !== 'ENOENT') {
throw error
}
})
}
async _getSize (file) {
const stats = await fs.stat(this._getFilePath(file))
return stats.size
}
}

View File

@ -0,0 +1,82 @@
import execa from 'execa'
import fs from 'fs-extra'
import { forEach } from 'lodash'
import LocalHandler from './local'
export default class NfsHandler extends LocalHandler {
get type () {
return 'nfs'
}
_getRealPath () {
return `/run/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
try {
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
const match = regex.exec(m)
mounted[match[3]] = {
host: match[1],
share: match[2],
}
}
})
} catch (exc) {
// When no mounts are found, the call pretends to fail...
if (exc.stderr !== '') {
throw exc
}
}
this._realMounts = mounted
return mounted
}
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
try {
await this._umount(this._remote)
} catch (_) {
// We have to go on...
}
}
async _umount (remote) {
await execa('umount', ['--force', this._getRealPath()])
}
}

View File

@ -0,0 +1,191 @@
import Smb2 from '@marsaud/smb2-promise'
import RemoteHandlerAbstract from './abstract'
import {
noop,
pFinally,
} from '../utils'
// Normalize the error code for file not found.
const normalizeError = error => {
const { code } = error
return (
code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
)
? Object.create(error, {
code: {
configurable: true,
readable: true,
value: 'ENOENT',
writable: true,
},
})
: error
}
export default class SmbHandler extends RemoteHandlerAbstract {
constructor (remote) {
super(remote)
this._forget = noop
}
get type () {
return 'smb'
}
_getClient (remote) {
return new Smb2({
share: `\\\\${remote.host}`,
domain: remote.domain,
username: remote.username,
password: remote.password,
autoCloseTimeout: 0,
})
}
_getFilePath (file) {
if (file === '.') {
file = undefined
}
let path = (this._remote.path !== '')
? this._remote.path
: ''
// Ensure remote path is a directory.
if (path !== '' && path[path.length - 1] !== '\\') {
path += '\\'
}
if (file) {
path += file.replace(/\//g, '\\')
}
return path
}
_dirname (file) {
const parts = file.split('\\')
parts.pop()
return parts.join('\\')
}
async _sync () {
if (this._remote.enabled) {
try {
// Check access (smb2 does not expose connect in public so far...)
await this.list()
} catch (error) {
this._remote.enabled = false
this._remote.error = error.message
}
}
return this._remote
}
async _outputFile (file, data, options = {}) {
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
if (dir) {
await client.ensureDir(dir)
}
return client.writeFile(path, data, options)::pFinally(() => { client.close() })
}
async _readFile (file, options = {}) {
const client = this._getClient(this._remote)
let content
try {
content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return content
}
async _rename (oldPath, newPath) {
const client = this._getClient(this._remote)
try {
await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
}
async _list (dir = '.') {
const client = this._getClient(this._remote)
let list
try {
list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return list
}
async _createReadStream (file, options = {}) {
const client = this._getClient(this._remote)
let stream
try {
// FIXME ensure that options are properly handled by @marsaud/smb2
stream = await client.createReadStream(this._getFilePath(file), options)
stream.on('end', () => client.close())
} catch (error) {
throw normalizeError(error)
}
return stream
}
async _createOutputStream (file, options = {}) {
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
let stream
try {
if (dir) {
await client.ensureDir(dir)
}
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
} catch (err) {
client.close()
throw err
}
stream.on('finish', () => client.close())
return stream
}
async _unlink (file) {
const client = this._getClient(this._remote)
try {
await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
}
async _getSize (file) {
const client = await this._getClient(this._remote)
let size
try {
size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return size
}
}

View File

@ -0,0 +1,28 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this ACL',
},
action: {
type: 'string',
description: 'permission (or role)',
},
object: {
type: 'string',
description: 'item (or set)',
},
subject: {
type: 'string',
description: 'user (or group)',
},
},
required: [
'id',
'action',
'object',
'subject',
],
}

View File

@ -0,0 +1,43 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['call'],
},
id: {
type: 'string',
description: 'job identifier',
},
name: {
type: 'string',
description: 'human readable name',
},
userId: {
type: 'string',
description: 'identifier of the user who have created the job (the permissions of the user are used by the job)',
},
key: {
type: 'string',
// TODO description
},
method: {
type: 'string',
description: 'called method',
},
paramsVector: {
type: 'object',
},
timeout: {
type: 'number',
description: 'number of milliseconds after which the job is considered failed',
},
},
required: [
'type',
'id',
'userId',
'key',
'method',
],
}

View File

@ -0,0 +1,29 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this log',
},
time: {
type: 'string',
description: 'timestamp (in milliseconds) of this log',
},
message: {
type: 'string',
description: 'human readable (short) description of this log',
},
namespace: {
type: 'string',
description: 'space to store logs',
},
data: {},
},
required: [
'id',
'time',
'message',
'namespace',
],
}

View File

@ -0,0 +1,33 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['jobCall.end'],
},
runJobId: {
type: 'string',
description: 'instance id of this job',
},
runCallId: {
type: 'string',
description: 'instance id of this call',
},
error: {
type: 'object',
description: 'describe one failure, exists if the call has failed',
},
returnedValue: {
description: 'call\'s result, exists if the call is a success',
},
},
required: [
'event',
'runJobId',
'runCallId',
],
oneOf: [
{ required: ['error'] },
{ required: ['returnedValue'] },
],
}

View File

@ -0,0 +1,27 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['jobCall.start'],
},
runJobId: {
type: 'string',
description: 'instance id of this job',
},
method: {
type: 'string',
description: 'method linked to this call',
},
params: {
type: 'object',
description: 'params of the called method',
},
},
required: [
'event',
'runJobId',
'method',
'params',
],
}

View File

@ -0,0 +1,21 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['job.end'],
},
runJobId: {
type: 'string',
description: 'instance id of this job',
},
error: {
type: 'object',
description: 'describe one failure, exists if no call has been made',
},
},
required: [
'event',
'runJobId',
],
}

View File

@ -0,0 +1,26 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['job.start'],
},
userId: {
type: 'string',
description: 'user who executes this job',
},
jobId: {
type: 'string',
description: 'identifier of this job',
},
key: {
type: 'string',
},
},
required: [
'event',
'userId',
'jobId',
'key',
],
}

View File

@ -0,0 +1,49 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this plugin',
},
name: {
type: 'string',
description: 'unique human readable name for this plugin',
},
autoload: {
type: 'boolean',
description: 'whether this plugin is loaded on startup',
},
loaded: {
type: 'boolean',
description: 'whether or not this plugin is currently loaded',
},
unloadable: {
type: 'boolean',
default: true,
description: 'whether or not this plugin can be unloaded',
},
configuration: {
type: 'object',
description: 'current configuration of this plugin (not present if none)',
},
configurationSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'configuration schema for this plugin (not present if not configurable)',
},
testable: {
type: 'boolean',
description: 'whether or not this plugin can be tested',
},
testSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'test schema for this plugin',
},
},
required: [
'id',
'name',
'autoload',
'loaded',
],
}

View File

@ -0,0 +1,50 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this user',
},
email: {
type: 'string',
description: 'email address of this user',
},
groups: {
type: 'array',
items: {
type: 'string',
},
description: 'identifier of groups this user belong to',
},
permission: {
enum: ['none', 'read', 'write', 'admin'],
description: 'root permission for this user, none and admin are the only significant ones',
},
preferences: {
type: 'object',
properties: {
lang: { type: 'string' },
sshKeys: {
type: 'array',
items: {
type: 'object',
properties: {
key: { type: 'string' },
title: { type: 'string' },
},
required: [
'key',
'title',
],
},
},
},
description: 'various user preferences',
},
},
required: [
'id',
'email',
],
}

View File

@ -0,0 +1,14 @@
import through2 from 'through2'
const createSizeStream = () => {
const wrapper = through2(
(chunk, enc, cb) => {
wrapper.size += chunk.length
cb(null, chunk)
}
)
wrapper.size = 0
return wrapper
}
export { createSizeStream as default }

View File

@ -0,0 +1,44 @@
import assert from 'assert'
const streamToExistingBuffer = (
stream,
buffer,
offset = 0,
end = buffer.length
) => new Promise((resolve, reject) => {
assert(offset >= 0)
assert(end > offset)
assert(end <= buffer.length)
let i = offset
const onData = chunk => {
const prev = i
i += chunk.length
if (i > end) {
return onError(new Error('too much data'))
}
chunk.copy(buffer, prev)
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(i - offset)
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToExistingBuffer as default }

View File

@ -0,0 +1,20 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs'
import { fromCallback } from 'promise-toolbox'
import streamToExistingBuffer from './stream-to-existing-buffer'
describe('streamToExistingBuffer()', () => {
it('read the content of a stream in a buffer', async () => {
const stream = createReadStream(__filename)
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
const buf = Buffer.allocUnsafe(expected.length + 1)
buf[0] = 'A'.charCodeAt()
await streamToExistingBuffer(stream, buf, 1)
expect(String(buf)).toBe(`A${expected}`)
})
})

View File

@ -0,0 +1,27 @@
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
const chunks = []
let length = 0
const onData = chunk => {
chunks.push(chunk)
length += chunk.length
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(Buffer.concat(chunks, length))
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToNewBuffer as default }

View File

@ -0,0 +1,613 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash/forEach'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import mapToArray from 'lodash/map'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import through2 from 'through2'
import { CronJob } from 'cron'
import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
isPromise,
promisify,
reflect as pReflect,
} from 'promise-toolbox'
import {
createHash,
randomBytes,
} from 'crypto'
// ===================================================================
// Similar to map() + Promise.all() but wait for all promises to
// settle before rejecting (with the first error)
export const asyncMap = (collection, iteratee) => {
if (isPromise(collection)) {
return collection.then(collection => asyncMap(collection, iteratee))
}
let errorContainer
const onError = error => {
if (errorContainer === undefined) {
errorContainer = { error }
}
}
return Promise.all(mapToArray(collection, (item, key, collection) =>
new Promise(resolve => {
resolve(iteratee(item, key, collection))
}).catch(onError)
)).then(values => {
if (errorContainer !== undefined) {
throw errorContainer.error
}
return values
})
}
// -------------------------------------------------------------------
export streamToBuffer from './stream-to-new-buffer'
// -------------------------------------------------------------------
export function camelToSnakeCase (string) {
return string.replace(
/([a-z0-9])([A-Z])/g,
(_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}`
)
}
// -------------------------------------------------------------------
// Returns an empty object without prototype (if possible).
export const createRawObject = Object.create
? (createObject => () => createObject(null))(Object.create)
: () => ({})
// -------------------------------------------------------------------
// Only works with string items!
export const diffItems = (coll1, coll2) => {
const removed = createRawObject()
forEach(coll2, value => {
removed[value] = true
})
const added = []
forEach(coll1, value => {
if (value in removed) {
delete removed[value]
} else {
added.push(value)
}
})
return [ added, keys(removed) ]
}
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
sha512: '6',
}
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// Wrap a readable stream in a stream with a checksum promise
// attribute which is resolved at the end of an input stream.
// (Finally .checksum contains the checksum of the input stream)
//
// Example:
// const sourceStream = ...
// const targetStream = ...
// const checksumStream = addChecksumToReadStream(sourceStream)
// await Promise.all([
// eventToPromise(checksumStream.pipe(targetStream), 'finish'),
// checksumStream.checksum.then(console.log)
// ])
export const addChecksumToReadStream = (stream, algorithm = 'md5') => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithm}`)
}
const hash = createHash(algorithm)
const { promise, resolve } = defer()
const wrapper = stream.pipe(through2(
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
resolve(hash.digest('hex'))
callback()
}
))
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`)
return wrapper
}
// Check if the checksum of a readable stream is equals to an expected checksum.
// The given stream is wrapped in a stream which emits an error event
// if the computed checksum is not equals to the expected checksum.
export const validChecksumOfReadStream = (stream, expectedChecksum) => {
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
}
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
const wrapper = stream.pipe(through2(
{ highWaterMark: 0 },
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`)
: null
)
}
))
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksumVerified = eventToPromise(wrapper, 'end')
return wrapper
}
// -------------------------------------------------------------------
// Ensure the value is an array, wrap it if necessary.
export function ensureArray (value) {
if (value === undefined) {
return []
}
return isArray(value) ? value : [value]
}
// -------------------------------------------------------------------
// Returns the value of a property and removes it from the object.
export function extractProperty (obj, prop) {
const value = obj[prop]
delete obj[prop]
return value
}
// -------------------------------------------------------------------
// Returns the first defined (non-undefined) value.
export const firstDefined = function () {
const n = arguments.length
for (let i = 0; i < n; ++i) {
const arg = arguments[i]
if (arg !== undefined) {
return arg
}
}
}
// -------------------------------------------------------------------
export const getUserPublicProperties = user => pick(
user.properties || user,
'id', 'email', 'groups', 'permission', 'preferences', 'provider'
)
// -------------------------------------------------------------------
export const getPseudoRandomBytes = n => {
const bytes = Buffer.allocUnsafe(n)
const odd = n & 1
for (let i = 0, m = n - odd; i < m; i += 2) {
bytes.writeUInt16BE(Math.random() * 65536 | 0, i)
}
if (odd) {
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
}
return bytes
}
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
// Generate a secure random Base64 string.
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
})(promisify(randomBytes))
// -------------------------------------------------------------------
export const formatXml = (function () {
const builder = new xml2js.Builder({
headless: true,
})
return (...args) => builder.buildObject(...args)
})()
export const parseXml = (function () {
const opts = {
mergeAttrs: true,
explicitArray: false,
}
return (xml) => {
let result
// xml2js.parseString() use a callback for synchronous code.
xml2js.parseString(xml, opts, (error, result_) => {
if (error) {
throw error
}
result = result_
})
return result
}
})()
// -------------------------------------------------------------------
// Very light and fast set.
//
// - works only with strings
// - methods are already bound and chainable
export const lightSet = collection => {
let data = createRawObject()
if (collection) {
forEach(collection, value => {
data[value] = true
})
collection = null
}
const set = {
add: value => {
data[value] = true
return set
},
clear: () => {
data = createRawObject()
return set
},
delete: value => {
delete data[value]
return set
},
has: value => data[value],
toArray: () => keys(data),
}
return set
}
// -------------------------------------------------------------------
// This function does nothing and returns undefined.
//
// It is often used to swallow promise's errors.
export const noop = () => {}
// -------------------------------------------------------------------
// Usage: pDebug(promise, name) or promise::pDebug(name)
export function pDebug (promise, name) {
if (arguments.length === 1) {
name = promise
promise = this
}
Promise.resolve(promise).then(
value => {
console.log(
'%s',
`Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}`
)
},
reason => {
console.log(
'%s',
`Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}`
)
}
)
return promise
}
// Given a collection (array or object) which contains promises,
// return a promise that is fulfilled when all the items in the
// collection are either fulfilled or rejected.
//
// This promise will be fulfilled with a collection (of the same type,
// array or object) containing promise inspections.
//
// Usage: pSettle(promises) or promises::pSettle()
export function pSettle (promises) {
return (this || promises)::pAll(p => p::pReflect())
}
// -------------------------------------------------------------------
export {
all as pAll,
delay as pDelay,
fromCallback as pFromCallback,
lastly as pFinally,
promisify,
promisifyAll,
reflect as pReflect,
} from 'promise-toolbox'
// -------------------------------------------------------------------
export function parseSize (size) {
if (!isString(size)) {
return size
}
let bytes = humanFormat.parse.raw(size, { scale: 'binary' })
if (bytes.unit && bytes.unit !== 'B') {
bytes = humanFormat.parse.raw(size)
if (bytes.unit && bytes.unit !== 'B') {
throw new Error('invalid size: ' + size)
}
}
return Math.floor(bytes.value * bytes.factor)
}
// -------------------------------------------------------------------
const _has = Object.prototype.hasOwnProperty
// Removes an own property from an object and returns its value.
export const popProperty = obj => {
for (const prop in obj) {
if (_has.call(obj, prop)) {
return extractProperty(obj, prop)
}
}
}
// -------------------------------------------------------------------
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')
export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
// -------------------------------------------------------------------
// This functions are often used throughout xo-server.
//
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach'
export { default as isArray } from 'lodash/isArray'
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString'
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
// Special value which can be returned to stop an iteration in map()
// and mapInPlace().
export const DONE = {}
// Fill `target` by running each element in `collection` through
// `iteratee`.
//
// If `target` is undefined, it defaults to a new array if
// `collection` is array-like (has a `length` property), otherwise an
// object.
//
// The context of `iteratee` can be specified via `thisArg`.
//
// Note: the Mapping can be interrupted by returning the special value
// `DONE` provided as the fourth argument.
//
// Usage: map(collection, item => item + 1)
export function map (
collection,
iteratee,
target = has(collection, 'length') ? [] : {}
) {
forEach(collection, (item, i) => {
const value = iteratee(item, i, collection, DONE)
if (value === DONE) {
return false
}
target[i] = value
})
return target
}
// -------------------------------------------------------------------
// Create a hash from multiple values.
export const multiKeyHash = (...args) => new Promise(resolve => {
const hash = multiKeyHashInt(...args)
const buf = Buffer.allocUnsafe(4)
buf.writeUInt32LE(hash, 0)
resolve(base64url(buf))
})
// -------------------------------------------------------------------
export const resolveSubpath = (root, path) =>
resolve(root, `./${resolve('/', path)}`)
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper,
} = {}) => new Promise((resolve, reject) => {
stream = highland(stream).stopOnError(reject)
if (filter) {
stream = stream.filter(filter)
}
if (mapper) {
stream = stream.map(mapper)
}
stream.toArray(resolve)
})
// -------------------------------------------------------------------
// Contrary to most implentations this one use the range 0-11 instead
// of 1-12 for months.
export const scheduleFn = (cronTime, fn, timeZone) => {
let running = false
const job = new CronJob({
cronTime,
onTick: async () => {
if (running) {
return
}
running = true
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', (error && error.stack) || error)
} finally {
running = false
}
},
start: true,
timeZone,
})
return () => {
job.stop()
}
}
// -------------------------------------------------------------------
// Create a serializable object from an error.
export const serializeError = error => ({
message: error.message,
stack: error.stack,
...error, // Copy enumerable properties.
})
// -------------------------------------------------------------------
// Create an array which contains the results of one thunk function.
// Only works with synchronous thunks.
export const thunkToArray = thunk => {
const values = []
thunk(::values.push)
return values
}
// -------------------------------------------------------------------
// Creates a new function which throws an error.
//
// ```js
// promise.catch(throwFn('an error has occured'))
//
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw (
isString(error)
? new Error(error)
: error
)
}
// -------------------------------------------------------------------
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value
// -------------------------------------------------------------------
export const mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
// -------------------------------------------------------------------
export const splitFirst = (string, separator) => {
const i = string.indexOf(separator)
return i === -1 ? null : [
string.slice(0, i),
string.slice(i + separator.length),
]
}
// -------------------------------------------------------------------
export const getFirstPropertyName = object => {
for (const key in object) {
if (Object.prototype.hasOwnProperty.call(object, key)) {
return key
}
}
}

View File

@ -0,0 +1,213 @@
/* eslint-env jest */
import {
camelToSnakeCase,
createRawObject,
diffItems,
ensureArray,
extractProperty,
formatXml,
generateToken,
parseSize,
pSettle,
} from './utils'
// ===================================================================
describe('camelToSnakeCase()', function () {
it('converts a string from camelCase to snake_case', function () {
expect(camelToSnakeCase('fooBar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed')
})
it('does not alter snake_case strings', function () {
expect(camelToSnakeCase('foo_bar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed')
})
it('does not alter upper case letters expect those from the camelCase', function () {
expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ')
})
})
// -------------------------------------------------------------------
describe('createRawObject()', () => {
it('returns an empty object', () => {
expect(createRawObject()).toEqual({})
})
it('creates a new object each time', () => {
expect(createRawObject()).not.toBe(createRawObject())
})
if (Object.getPrototypeOf) {
it('creates an object without a prototype', () => {
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
})
}
})
// -------------------------------------------------------------------
describe('diffItems', () => {
it('computes the added/removed items between 2 iterables', () => {
expect(diffItems(
['foo', 'bar'],
['baz', 'foo']
)).toEqual([
['bar'],
['baz'],
])
})
})
// -------------------------------------------------------------------
describe('ensureArray()', function () {
it('wrap the value in an array', function () {
const value = 'foo'
expect(ensureArray(value)).toEqual([value])
})
it('returns an empty array for undefined', function () {
expect(ensureArray(undefined)).toEqual([])
})
it('returns the object itself if is already an array', function () {
const array = ['foo', 'bar', 'baz']
expect(ensureArray(array)).toBe(array)
})
})
// -------------------------------------------------------------------
describe('extractProperty()', function () {
it('returns the value of the property', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).toBe(value)
})
it('removes the property from the object', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).toBe(value)
expect(obj.prop).not.toBeDefined()
})
})
// -------------------------------------------------------------------
describe('formatXml()', function () {
it('formats a JS object to an XML string', function () {
expect(formatXml({
foo: {
bar: [
{$: {baz: 'plop'}},
{$: {baz: 'plip'}},
],
},
})).toBe(`<foo>
<bar baz="plop"/>
<bar baz="plip"/>
</foo>`)
})
})
// -------------------------------------------------------------------
describe('generateToken()', () => {
it('generates a string', async () => {
expect(typeof await generateToken()).toBe('string')
})
})
// -------------------------------------------------------------------
describe('parseSize()', function () {
it('parses a human size', function () {
expect(parseSize('1G')).toBe(1e9)
})
it('returns the parameter if already a number', function () {
expect(parseSize(1e6)).toBe(1e6)
})
it('throws if the string cannot be parsed', function () {
expect(function () {
parseSize('foo')
}).toThrow()
})
it('supports the B unit as suffix', function () {
expect(parseSize('3MB')).toBe(3e6)
})
})
// -------------------------------------------------------------------
describe('pSettle()', () => {
it('works with arrays', async () => {
const rejection = 'fatality'
const [
status1,
status2,
status3,
] = await pSettle([
Promise.resolve(42),
Math.PI,
Promise.reject(rejection),
])
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
it('works with objects', async () => {
const rejection = 'fatality'
const {
a: status1,
b: status2,
c: status3,
} = await pSettle({
a: Promise.resolve(42),
b: Math.PI,
c: Promise.reject(rejection),
})
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
})

View File

@ -0,0 +1,683 @@
// TODO: remove once completely merged in vhd.js
import assert from 'assert'
import eventToPromise from 'event-to-promise'
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import constantStream from './constant-stream'
import {
noop,
streamToBuffer,
} from './utils'
const VHD_UTIL_DEBUG = 0
const debug = VHD_UTIL_DEBUG
? str => console.log(`[vhd-util]${str}`)
: noop
// ===================================================================
//
// Spec:
// https://www.microsoft.com/en-us/download/details.aspx?id=23850
//
// C implementation:
// https://github.com/rubiojr/vhd-util-convert
//
// ===================================================================
// Sizes in bytes.
const VHD_FOOTER_SIZE = 512
const VHD_HEADER_SIZE = 1024
const VHD_SECTOR_SIZE = 512
// Block allocation table entry size. (Block addr)
const VHD_ENTRY_SIZE = 4
const VHD_PARENT_LOCATOR_ENTRIES = 8
const VHD_PLATFORM_CODE_NONE = 0
// Types of backup treated. Others are not supported.
const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
// Other.
const BLOCK_UNUSED = 0xFFFFFFFF
const BIT_MASK = 0x80
// unused block as buffer containing a uint32BE
const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(VHD_ENTRY_SIZE)
BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
// ===================================================================
const fuFooter = fu.struct([
fu.char('cookie', 8), // 0
fu.uint32('features'), // 8
fu.uint32('fileFormatVersion'), // 12
fu.struct('dataOffset', [
fu.uint32('high'), // 16
fu.uint32('low'), // 20
]),
fu.uint32('timestamp'), // 24
fu.char('creatorApplication', 4), // 28
fu.uint32('creatorVersion'), // 32
fu.uint32('creatorHostOs'), // 36
fu.struct('originalSize', [ // At the creation, current size of the hard disk.
fu.uint32('high'), // 40
fu.uint32('low'), // 44
]),
fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize.
fu.uint32('high'), // 48
fu.uint32('low'), // 52
]),
fu.struct('diskGeometry', [
fu.uint16('cylinders'), // 56
fu.uint8('heads'), // 58
fu.uint8('sectorsPerTrackCylinder'), // 59
]),
fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING.
fu.uint32('checksum'), // 64
fu.uint8('uuid', 16), // 68
fu.char('saved'), // 84
fu.char('hidden'), // 85
fu.char('reserved', 426), // 86
])
const fuHeader = fu.struct([
fu.char('cookie', 8),
fu.struct('dataOffset', [
fu.uint32('high'),
fu.uint32('low'),
]),
fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table.
fu.uint32('high'),
fu.uint32('low'),
]),
fu.uint32('headerVersion'),
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
fu.uint32('checksum'),
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char16be('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
fu.uint32('platformDataLength'),
fu.uint32('reserved'),
fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data.
fu.uint32('high'),
fu.uint32('low'),
]),
], VHD_PARENT_LOCATOR_ENTRIES),
fu.char('reserved2', 256),
])
// ===================================================================
// Helpers
// ===================================================================
const SIZE_OF_32_BITS = Math.pow(2, 32)
const uint32ToUint64 = (fu) => fu.high * SIZE_OF_32_BITS + fu.low
// Returns a 32 bits integer corresponding to a Vhd version.
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF)
// Sectors conversions.
const sectorsRoundUp = bytes => Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE)
const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1
const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE
// Check/Set a bit on a vhd map.
const mapTestBit = (map, bit) => ((map[bit >> 3] << (bit & 7)) & BIT_MASK) !== 0
const mapSetBit = (map, bit) => { map[bit >> 3] |= (BIT_MASK >> (bit & 7)) }
const packField = (field, value, buf) => {
const { offset } = field
field.pack(
value,
buf,
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
)
}
const unpackField = (field, buf) => {
const { offset } = field
return field.unpack(
buf,
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
)
}
// ===================================================================
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawStruct)
for (let i = 0, n = struct.size; i < n; i++) {
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawStruct)
return sum
}
// ===================================================================
class Vhd {
constructor (handler, path) {
this._handler = handler
this._path = path
}
// =================================================================
// Read functions.
// =================================================================
_readStream (start, n) {
return this._handler.createReadStream(this._path, {
start,
end: start + n - 1, // end is inclusive
})
}
_read (start, n) {
return this._readStream(start, n).then(streamToBuffer)
}
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
let end = uint32ToUint64(this.footer.dataOffset) + VHD_HEADER_SIZE
const blockAllocationTableSize = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
// Max(end, block allocation table end)
end = Math.max(end, uint32ToUint64(header.tableOffset) + blockAllocationTableSize)
for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
end = Math.max(end,
uint32ToUint64(entry.platformDataOffset) +
sectorsToBytes(entry.platformDataSpace)
)
}
}
debug(`End of headers: ${end}.`)
return end
}
// Returns the first sector after data.
getEndOfData () {
let end = Math.ceil(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
const { maxTableEntries } = this.header
for (let i = 0; i < maxTableEntries; i++) {
const blockAddr = this._getBatEntry(i)
if (blockAddr !== BLOCK_UNUSED) {
end = Math.max(end, blockAddr + fullBlockSize)
}
}
debug(`End of data: ${end}.`)
return sectorsToBytes(end)
}
// Get the beginning (footer + header) of a vhd file.
async readHeaderAndFooter () {
const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumStruct(buf, fuFooter)
// Checksum child & parent.
if (sumToTest !== sum) {
throw new Error(`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString('hex')})`)
}
const header = this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE))
this.footer = fuFooter.unpack(buf)
// Compute the number of sectors in one block.
// Default: One block contains 4096 sectors of 512 bytes.
const sectorsPerBlock = this.sectorsPerBlock = Math.floor(header.blockSize / VHD_SECTOR_SIZE)
// Compute bitmap size in sectors.
// Default: 1.
const sectorsOfBitmap = this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3)
// Full block size => data block size + bitmap size.
this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap)
// In bytes.
// Default: 512.
this.bitmapSize = sectorsToBytes(sectorsOfBitmap)
}
// Check if a vhd object has a block allocation table.
hasBlockAllocationTableMap () {
return this.footer.fileFormatVersion > getVhdVersion(1, 0)
}
// Returns a buffer that contains the block allocation table of a vhd file.
async readBlockTable () {
const { header } = this
const offset = uint32ToUint64(header.tableOffset)
const size = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
this.blockTable = await this._read(offset, size)
}
// return the first sector (bitmap) of a block
_getBatEntry (block) {
return this.blockTable.readUInt32BE(block * VHD_ENTRY_SIZE)
}
_readBlock (blockId, onlyBitmap = false) {
const blockAddr = this._getBatEntry(blockId)
if (blockAddr === BLOCK_UNUSED) {
throw new Error(`no such block ${blockId}`)
}
return this._read(
sectorsToBytes(blockAddr),
onlyBitmap ? this.bitmapSize : this.fullBlockSize
).then(buf => onlyBitmap
? { bitmap: buf }
: {
bitmap: buf.slice(0, this.bitmapSize),
data: buf.slice(this.bitmapSize),
}
)
}
// get the identifiers and first sectors of the first and last block
// in the file
//
// return undefined if none
_getFirstAndLastBlocks () {
const n = this.header.maxTableEntries
const bat = this.blockTable
let i = 0
let j = 0
let first, firstSector, last, lastSector
// get first allocated block for initialization
while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) {
i += 1
j += VHD_ENTRY_SIZE
if (i === n) {
return
}
}
lastSector = firstSector
first = last = i
while (i < n) {
const sector = bat.readUInt32BE(j)
if (sector !== BLOCK_UNUSED) {
if (sector < firstSector) {
first = i
firstSector = sector
} else if (sector > lastSector) {
last = i
lastSector = sector
}
}
i += 1
j += VHD_ENTRY_SIZE
}
return { first, firstSector, last, lastSector }
}
// =================================================================
// Write functions.
// =================================================================
// Write a buffer/stream at a given position in a vhd file.
_write (data, offset) {
debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`)
// TODO: could probably be merged in remote handlers.
return this._handler.createOutputStream(this._path, {
flags: 'r+',
start: offset,
}).then(
Buffer.isBuffer(data)
? stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
: stream => eventToPromise(data.pipe(stream), 'finish')
)
}
async ensureBatSize (size) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= size) {
return
}
const tableOffset = uint32ToUint64(header.tableOffset)
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
// extend BAT
const maxTableEntries = header.maxTableEntries = size
const batSize = maxTableEntries * VHD_ENTRY_SIZE
const prevBat = this.blockTable
const bat = this.blockTable = Buffer.allocUnsafe(batSize)
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevBat.length)
debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
const extendBat = () => {
debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
return this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
tableOffset + prevBat.length
)
}
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([
extendBat(),
this.writeHeader(),
])
}
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`)
return Promise.all([
// copy the first block at the end
this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream =>
this._write(stream, sectorsToBytes(newFirstSector))
).then(extendBat),
this._setBatEntry(first, newFirstSector),
this.writeHeader(),
this.writeFooter(),
])
}
// set the first sector (bitmap) of a block
_setBatEntry (block, blockSector) {
const i = block * VHD_ENTRY_SIZE
const { blockTable } = this
blockTable.writeUInt32BE(blockSector, i)
return this._write(
blockTable.slice(i, i + VHD_ENTRY_SIZE),
uint32ToUint64(this.header.tableOffset) + i
)
}
// Make a new empty block at vhd end.
// Update block allocation table in context and in file.
async createBlock (blockId) {
const blockAddr = Math.ceil(this.getEndOfData() / VHD_SECTOR_SIZE)
debug(`create block ${blockId} at ${blockAddr}`)
await Promise.all([
// Write an empty block and addr in vhd file.
this._write(
constantStream([ 0 ], this.fullBlockSize),
sectorsToBytes(blockAddr)
),
this._setBatEntry(blockId, blockAddr),
])
return blockAddr
}
// Write a bitmap at a block address.
async writeBlockBitmap (blockAddr, bitmap) {
const { bitmapSize } = this
if (bitmap.length !== bitmapSize) {
throw new Error(`Bitmap length is not correct ! ${bitmap.length}`)
}
const offset = sectorsToBytes(blockAddr)
debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`)
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, endSectorId) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`)
await this._write(
block.data.slice(
sectorsToBytes(beginSectorId),
sectorsToBytes(endSectorId)
),
sectorsToBytes(offset)
)
const { bitmap } = await this._readBlock(block.id, true)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(bitmap, i)
}
await this.writeBlockBitmap(blockAddr, bitmap)
}
// Merge block id (of vhd child) into vhd parent.
async coalesceBlock (child, blockId) {
// Get block data and bitmap of block id.
const { bitmap, data } = await child._readBlock(blockId)
debug(`coalesceBlock block=${blockId}`)
// For each sector of block data...
const { sectorsPerBlock } = child
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(bitmap, i)) {
continue
}
let endSector = i + 1
// Count changed sectors.
while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) {
++endSector
}
// Write n sectors into parent.
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
await this.writeBlockSectors(
{ id: blockId, data },
i,
endSector
)
i = endSector
}
// Return the merged data size
return data.length
}
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = VHD_FOOTER_SIZE
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
return this._write(rawHeader, offset)
}
}
// Merge vhd child into vhd parent.
//
// Child must be a delta backup !
// Parent must be a full backup !
//
// TODO: update the identifier of the parent VHD.
export default async function vhdMerge (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error('Unable to merge, child is not a delta backup.')
}
// Merging in differencing disk is prohibited in our case.
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
throw new Error('Unable to merge, parent is not a full backup.')
}
// Allocation table map is not yet implemented.
if (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error('Unsupported allocation table map.')
}
// Read allocation table of child/parent.
await Promise.all([
parentVhd.readBlockTable(),
childVhd.readBlockTable(),
])
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
let mergedDataSize = 0
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) {
mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
}
}
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.timestamp = cFooter.timestamp
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
return mergedDataSize
}
// returns true if the child was actually modified
export async function chainVhd (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
const { header } = childVhd
const parentName = parentPath.split('/').pop()
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
!isEqual(header.parentUuid, parentUuid)
) {
header.parentUuid = parentUuid
header.parentUnicodeName = parentName
await childVhd.writeHeader()
return true
}
// The checksum was broken between xo-server v5.2.4 and v5.2.5
//
// Replace by a correct checksum if necessary.
//
// TODO: remove when enough time as passed (6 months).
{
const rawHeader = fuHeader.pack(header)
const checksum = checksumStruct(rawHeader, fuHeader)
if (checksum !== header.checksum) {
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
return true
}
}
return false
}

Some files were not shown because too many files have changed in this diff Show More