diff --git a/packages/xo-server/.editorconfig b/packages/xo-server/.editorconfig new file mode 100644 index 000000000..1338fc9dc --- /dev/null +++ b/packages/xo-server/.editorconfig @@ -0,0 +1,65 @@ +# http://EditorConfig.org +# +# Julien Fontanet's configuration +# https://gist.github.com/julien-f/8096213 + +# Top-most EditorConfig file. +root = true + +# Common config. +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +# CoffeeScript +# +# https://github.com/polarmobile/coffeescript-style-guide/blob/master/README.md +[*.{,lit}coffee] +indent_size = 2 +indent_style = space + +# Markdown +[*.{md,mdwn,mdown,markdown}] +indent_size = 4 +indent_style = space + +# Package.json +# +# This indentation style is the one used by npm. +[package.json] +indent_size = 2 +indent_style = space + +# Pug (Jade) +[*.{jade,pug}] +indent_size = 2 +indent_style = space + +# JavaScript +# +# Two spaces seems to be the standard most common style, at least in +# Node.js (http://nodeguide.com/style.html#tabs-vs-spaces). +[*.{js,jsx,ts,tsx}] +indent_size = 2 +indent_style = space + +# Less +[*.less] +indent_size = 2 +indent_style = space + +# Sass +# +# Style used for http://libsass.com +[*.s[ac]ss] +indent_size = 2 +indent_style = space + +# YAML +# +# Only spaces are allowed. +[*.yaml] +indent_size = 2 +indent_style = space diff --git a/packages/xo-server/.eslintrc.js b/packages/xo-server/.eslintrc.js new file mode 100644 index 000000000..4370d8d3d --- /dev/null +++ b/packages/xo-server/.eslintrc.js @@ -0,0 +1,15 @@ +module.exports = { + extends: [ + 'standard', + ], + parser: 'babel-eslint', + rules: { + 'comma-dangle': ['error', 'always-multiline'], + 'no-var': 'error', + 'node/no-extraneous-import': 'error', + 'node/no-extraneous-require': 'error', + 'node/no-missing-require': 'error', + 'node/no-missing-import': 'error', + 'prefer-const': 'error', + }, +} diff --git a/packages/xo-server/.gitignore b/packages/xo-server/.gitignore new file mode 100644 index 000000000..557253e2d --- /dev/null +++ b/packages/xo-server/.gitignore @@ -0,0 +1,14 @@ +/coverage/ +/dist/ +/node_modules/ +/src/api/index.js +/src/xapi/mixins/index.js +/src/xo-mixins/index.js + +npm-debug.log +npm-debug.log.* +pnpm-debug.log +pnpm-debug.log.* +yarn-error.log + +.xo-server.* diff --git a/packages/xo-server/.npmignore b/packages/xo-server/.npmignore new file mode 100644 index 000000000..e058b6bc1 --- /dev/null +++ b/packages/xo-server/.npmignore @@ -0,0 +1,24 @@ +/benchmark/ +/benchmarks/ +*.bench.js +*.bench.js.map + +/examples/ +example.js +example.js.map +*.example.js +*.example.js.map + +/fixture/ +/fixtures/ +*.fixture.js +*.fixture.js.map +*.fixtures.js +*.fixtures.js.map + +/test/ +/tests/ +*.spec.js +*.spec.js.map + +__snapshots__/ diff --git a/packages/xo-server/.travis.yml b/packages/xo-server/.travis.yml new file mode 100644 index 000000000..dd123f55e --- /dev/null +++ b/packages/xo-server/.travis.yml @@ -0,0 +1,9 @@ +language: node_js +node_js: + - stable + - 8 + - 6 + +# Use containers. +# http://docs.travis-ci.com/user/workers/container-based-infrastructure/ +sudo: false diff --git a/packages/xo-server/ISSUE_TEMPLATE.md b/packages/xo-server/ISSUE_TEMPLATE.md new file mode 100644 index 000000000..6f5af1557 --- /dev/null +++ b/packages/xo-server/ISSUE_TEMPLATE.md @@ -0,0 +1,3 @@ +# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER! + +https://github.com/vatesfr/xo-web/issues diff --git a/packages/xo-server/README.md b/packages/xo-server/README.md new file mode 100644 index 000000000..0c4099a8a --- /dev/null +++ b/packages/xo-server/README.md @@ -0,0 +1,40 @@ +# Xen Orchestra Server + +![](http://i.imgur.com/HVFMrTk.png) + +XO-Server is part of [Xen Orchestra](https://github.com/vatesfr/xo), a web interface for XenServer or XAPI enabled hosts. + +It contains all the logic of XO and handles: + +- connections to all XAPI servers/pools; +- a cache system to provide the best response time possible; +- users authentication and authorizations (work in progress); +- a JSON-RPC based interface for XO clients (i.e. [XO-Web](https://github.com/vatesfr/xo-web)). + +[![Build Status](https://travis-ci.org/vatesfr/xo-server.svg?branch=next-release)](https://travis-ci.org/vatesfr/xo-server) +[![Dependency Status](https://david-dm.org/vatesfr/xo-server.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-server) +[![devDependency Status](https://david-dm.org/vatesfr/xo-server/dev-status.svg?theme=shields.io)](https://david-dm.org/vatesfr/xo-server#info=devDependencies) + +___ + +## Installation + +Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html). + +## Compilation + +Production build: + +``` +$ yarn run build +``` + +Development build: + +``` +$ yarn run dev +``` + +## How to report a bug? + +All bug reports should go into the [bugtracker of xo-web](https://github.com/vatesfr/xo-web/issues). diff --git a/packages/xo-server/better-stacks.js b/packages/xo-server/better-stacks.js new file mode 100644 index 000000000..979006f62 --- /dev/null +++ b/packages/xo-server/better-stacks.js @@ -0,0 +1,32 @@ +Error.stackTraceLimit = 100 + +// Removes internal modules. +try { + const sep = require('path').sep + + require('stack-chain').filter.attach(function (_, frames) { + const filtered = frames.filter(function (frame) { + const name = frame && frame.getFileName() + + return ( + // has a filename + name && + + // contains a separator (no internal modules) + name.indexOf(sep) !== -1 && + + // does not start with `internal` + name.lastIndexOf('internal', 0) !== -1 + ) + }) + + // depd (used amongst other by express requires at least 3 frames + // in the stack. + return filtered.length > 2 + ? filtered + : frames + }) +} catch (_) {} + +// Source maps. +try { require('julien-f-source-map-support/register') } catch (_) {} diff --git a/packages/xo-server/bin/xo-server b/packages/xo-server/bin/xo-server new file mode 100755 index 000000000..6adb2366a --- /dev/null +++ b/packages/xo-server/bin/xo-server @@ -0,0 +1,31 @@ +#!/usr/bin/env node + +'use strict' + +// =================================================================== + +// Better stack traces if possible. +require('../better-stacks') + +// Use Bluebird for all promises as it provides better performance and +// less memory usage. +global.Promise = require('bluebird') + +// Make unhandled rejected promises visible. +process.on('unhandledRejection', function (reason) { + console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason) +}) + +;(function (EE) { + var proto = EE.prototype + var emit = proto.emit + proto.emit = function patchedError (event, error) { + if (event === 'error' && !this.listenerCount(event)) { + return console.warn('[Warn] Unhandled error event:', error && error.stack || error) + } + + return emit.apply(this, arguments) + } +})(require('events').EventEmitter) + +require('exec-promise')(require('../')) diff --git a/packages/xo-server/bin/xo-server-logs b/packages/xo-server/bin/xo-server-logs new file mode 100755 index 000000000..d8b73c0db --- /dev/null +++ b/packages/xo-server/bin/xo-server-logs @@ -0,0 +1,10 @@ +#!/usr/bin/env node + +'use strict' + +// =================================================================== + +// Better stack traces if possible. +require('../better-stacks') + +require('exec-promise')(require('../dist/logs-cli').default) diff --git a/packages/xo-server/bin/xo-server-recover-account b/packages/xo-server/bin/xo-server-recover-account new file mode 100755 index 000000000..eb43519a5 --- /dev/null +++ b/packages/xo-server/bin/xo-server-recover-account @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +require('exec-promise')(require('../dist/recover-account-cli').default) diff --git a/packages/xo-server/config.json b/packages/xo-server/config.json new file mode 100644 index 000000000..7eaef46a8 --- /dev/null +++ b/packages/xo-server/config.json @@ -0,0 +1,44 @@ +// Vendor config: DO NOT TOUCH! +// +// See sample.config.yaml to override. +{ + "http": { + "listen": [ + { + "port": 80 + } + ], + + // These options are applied to all listen entries. + "listenOptions": { + // Ciphers to use. + // + // These are the default ciphers in Node 4.2.6, we are setting + // them explicitly for older Node versions. + "ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA", + + // Tell Node to respect the cipher order. + "honorCipherOrder": true, + + // Specify to use at least TLSv1.1. + // See: https://github.com/certsimple/minimum-tls-version + "secureOptions": 117440512 + }, + + "mounts": {} + }, + "datadir": "/var/lib/xo-server/data", + + // Should users be created on first sign in? + // + // Necessary for external authentication providers. + "createUserOnFirstSignin": true, + + // Whether API logs should contains the full request/response on + // errors. + // + // This is disabled by default for performance (lots of data) and + // security concerns (avoiding sensitive data in the logs) but can + // be turned for investigation by the administrator. + "verboseApiLogsOnErrors": false +} diff --git a/packages/xo-server/index.js b/packages/xo-server/index.js new file mode 100644 index 000000000..b886bd1ff --- /dev/null +++ b/packages/xo-server/index.js @@ -0,0 +1,11 @@ +'use strict' + +// =================================================================== + +// Enable xo logs by default. +if (process.env.DEBUG === undefined) { + process.env.DEBUG = 'app-conf,xo:*,-xo:api' +} + +// Import the real main module. +module.exports = require('./dist').default // eslint-disable-line node/no-missing-require diff --git a/packages/xo-server/package.json b/packages/xo-server/package.json new file mode 100644 index 000000000..86b33b5c3 --- /dev/null +++ b/packages/xo-server/package.json @@ -0,0 +1,181 @@ +{ + "name": "xo-server", + "version": "5.16.0", + "license": "AGPL-3.0", + "description": "Server part of Xen-Orchestra", + "keywords": [ + "xen", + "orchestra", + "xen-orchestra", + "server" + ], + "homepage": "http://github.com/vatesfr/xo-server/", + "bugs": { + "url": "https://github.com/vatesfr/xo-web/issues" + }, + "repository": { + "type": "git", + "url": "git://github.com/vatesfr/xo-server.git" + }, + "author": "Julien Fontanet ", + "preferGlobal": true, + "files": [ + "better-stacks.js", + "bin/", + "dist/", + "config.json", + "index.js", + "signin.pug" + ], + "directories": { + "bin": "bin" + }, + "engines": { + "node": ">=4.5" + }, + "dependencies": { + "@marsaud/smb2-promise": "^0.2.1", + "@nraynaud/struct-fu": "^1.0.1", + "ajv": "^6.1.1", + "app-conf": "^0.5.0", + "archiver": "^2.1.0", + "babel-runtime": "^6.26.0", + "base64url": "^2.0.0", + "bind-property-descriptor": "^1.0.0", + "blocked": "^1.2.1", + "bluebird": "^3.5.1", + "body-parser": "^1.18.2", + "connect-flash": "^0.1.1", + "cookie": "^0.3.1", + "cookie-parser": "^1.4.3", + "cron": "^1.3.0", + "d3-time-format": "^2.1.1", + "debug": "^3.1.0", + "decorator-synchronized": "^0.3.0", + "deptree": "^1.0.0", + "escape-string-regexp": "^1.0.5", + "event-to-promise": "^0.8.0", + "exec-promise": "^0.7.0", + "execa": "^0.9.0", + "express": "^4.16.2", + "express-session": "^1.15.6", + "fatfs": "^0.10.4", + "from2": "^2.3.0", + "fs-extra": "^5.0.0", + "golike-defer": "^0.4.1", + "hashy": "^0.6.2", + "helmet": "^3.9.0", + "highland": "^2.11.1", + "http-proxy": "^1.16.2", + "http-request-plus": "^0.5.0", + "http-server-plus": "^0.8.0", + "human-format": "^0.10.0", + "is-redirect": "^1.0.0", + "js-yaml": "^3.10.0", + "json-rpc-peer": "0.14", + "json5": "^0.5.1", + "julien-f-source-map-support": "0.1.0", + "julien-f-unzip": "^0.2.1", + "kindof": "^2.0.0", + "level": "^2.1.1", + "level-party": "^3.0.4", + "level-sublevel": "^6.6.1", + "limit-concurrency-decorator": "^0.2.0", + "lodash": "^4.17.4", + "make-error": "^1", + "micromatch": "^3.1.4", + "minimist": "^1.2.0", + "moment-timezone": "^0.5.14", + "ms": "^2.1.1", + "multikey-hash": "^1.0.4", + "ndjson": "^1.5.0", + "parse-pairs": "^0.2.2", + "partial-stream": "0.0.0", + "passport": "^0.4.0", + "passport-local": "^1.0.0", + "pretty-format": "^22.0.3", + "promise-toolbox": "^0.9.5", + "proxy-agent": "^2.1.0", + "pug": "^2.0.0-rc.4", + "pw": "^0.0.4", + "redis": "^2.8.0", + "schema-inspector": "^1.6.8", + "semver": "^5.4.1", + "serve-static": "^1.13.1", + "split-lines": "^1.1.0", + "stack-chain": "^2.0.0", + "tar-stream": "^1.5.5", + "through2": "^2.0.3", + "tmp": "^0.0.33", + "uuid": "^3.0.1", + "value-matcher": "^0.0.0", + "ws": "^4.0.0", + "xen-api": "^0.16.4", + "xml2js": "^0.4.19", + "xo-acl-resolver": "^0.2.3", + "xo-collection": "^0.4.1", + "xo-common": "^0.1.1", + "xo-remote-parser": "^0.3", + "xo-vmdk-to-vhd": "0.0.12" + }, + "devDependencies": { + "babel-cli": "^6.26.0", + "babel-core": "^6.26.0", + "babel-eslint": "^8.0.3", + "babel-plugin-lodash": "^3.3.2", + "babel-plugin-transform-decorators-legacy": "^1.3.4", + "babel-plugin-transform-runtime": "^6.23.0", + "babel-preset-env": "^1.6.1", + "babel-preset-stage-0": "^6.24.1", + "cross-env": "^5.1.3", + "eslint": "^4.13.1", + "eslint-config-standard": "^11.0.0-beta.0", + "eslint-plugin-import": "^2.8.0", + "eslint-plugin-node": "^5.2.1", + "eslint-plugin-promise": "^3.6.0", + "eslint-plugin-standard": "^3.0.1", + "husky": "^0.14.3", + "index-modules": "^0.3.0", + "jest": "^22.0.3", + "rimraf": "^2.6.2" + }, + "scripts": { + "build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/", + "clean": "rimraf dist/", + "commitmsg": "yarn run test", + "dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/", + "dev-test": "jest --bail --watch", + "prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins && yarn run clean", + "predev": "yarn run prebuild", + "prepublishOnly": "yarn run build", + "pretest": "eslint --ignore-path .gitignore --fix .", + "start": "node bin/xo-server", + "test": "jest" + }, + "babel": { + "plugins": [ + "lodash", + "transform-decorators-legacy", + "transform-runtime" + ], + "presets": [ + [ + "env", + { + "targets": { + "node": 4 + } + } + ], + "stage-0" + ] + }, + "jest": { + "collectCoverage": true, + "testEnvironment": "node", + "roots": [ + "/src" + ], + "testRegex": "\\.spec\\.js$" + } +} diff --git a/packages/xo-server/sample.config.yaml b/packages/xo-server/sample.config.yaml new file mode 100644 index 000000000..7dba11f02 --- /dev/null +++ b/packages/xo-server/sample.config.yaml @@ -0,0 +1,149 @@ +# BE *VERY* CAREFUL WHEN EDITING! +# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT! +# visit http://www.yamllint.com/ to validate this file as needed + +#===================================================================== + +# Example XO-Server configuration. +# +# This file is automatically looking for at the following places: +# - `$HOME/.config/xo-server/config.yaml` +# - `/etc/xo-server/config.yaml` +# +# The first entries have priority. +# +# Note: paths are relative to the configuration file. + +#===================================================================== + +# It may be necessary to run XO-Server as a privileged user (e.g. +# `root`) for instance to allow the HTTP server to listen on a +# [privileged ports](http://www.w3.org/Daemon/User/Installation/PrivilegedPorts.html). +# +# To avoid security issues, XO-Server can drop its privileges by +# changing the user and the group is running with. +# +# Note: XO-Server will change them just after reading the +# configuration. + +# User to run XO-Server as. +# +# Note: The user can be specified using either its name or its numeric +# identifier. +# +# Default: undefined +#user: 'nobody' + +# Group to run XO-Server as. +# +# Note: The group can be specified using either its name or its +# numeric identifier. +# +# Default: undefined +#group: 'nogroup' + +#===================================================================== + +# Configuration of the embedded HTTP server. +http: + + # Hosts & ports on which to listen. + # + # By default, the server listens on [::]:80. + listen: + # Basic HTTP. + - + # Address on which the server is listening on. + # + # Sets it to 'localhost' for IP to listen only on the local host. + # + # Default: all IPv6 addresses if available, otherwise all IPv4 + # addresses. + #hostname: 'localhost' + + # Port on which the server is listening on. + # + # Default: undefined + port: 80 + + # Instead of `host` and `port` a path to a UNIX socket may be + # specified (overrides `host` and `port`). + # + # Default: undefined + #socket: './http.sock' + + # Basic HTTPS. + # + # You can find the list of possible options there https://nodejs.org/docs/latest/api/tls.html#tls.createServer + # - + # # The only difference is the presence of the certificate and the + # # key. + # # + # #hostname: '127.0.0.1' + # port: 443 + + # # File containing the certificate (PEM format). + # + # # If a chain of certificates authorities is needed, you may bundle + # # them directly in the certificate. + # # + # # Note: the order of certificates does matter, your certificate + # # should come first followed by the certificate of the above + # # certificate authority up to the root. + # # + # # Default: undefined + # cert: './certificate.pem' + + # # File containing the private key (PEM format). + # # + # # If the key is encrypted, the passphrase will be asked at + # # server startup. + # # + # # Default: undefined + # key: './key.pem' + + # If set to true, all HTTP traffic will be redirected to the first + # HTTPs configuration. + #redirectToHttps: true + + # List of files/directories which will be served. + mounts: + #'/': '/path/to/xo-web/dist/' + + # List of proxied URLs (HTTP & WebSockets). + proxies: + # '/any/url': 'http://localhost:54722' + +# HTTP proxy configuration used by xo-server to fetch resources on the +# Internet. +# +# See: https://github.com/TooTallNate/node-proxy-agent#maps-proxy-protocols-to-httpagent-implementations +#httpProxy: 'http://jsmith:qwerty@proxy.lan:3128' + +#===================================================================== + +# Connection to the Redis server. +redis: + # Unix sockets can be used + # + # Default: undefined + #socket: /var/run/redis/redis.sock + + # Syntax: redis://[db[:password]@]hostname[:port][/db-number] + # + # Default: redis://localhost:6379/0 + #uri: redis://redis.company.lan/42 + + # List of aliased commands. + # + # See http://redis.io/topics/security#disabling-of-specific-commands + #renameCommands: + # del: '3dda29ad-3015-44f9-b13b-fa570de92489' + # srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0' + + +# Directory containing the database of XO. +# Currently used for logs. +# +# Default: '/var/lib/xo-server/data' +#datadir: '/var/lib/xo-server/data' diff --git a/packages/xo-server/signin.pug b/packages/xo-server/signin.pug new file mode 100644 index 000000000..1eed0ddd2 --- /dev/null +++ b/packages/xo-server/signin.pug @@ -0,0 +1,50 @@ +doctype html +html + head + meta(charset = 'utf-8') + meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1') + meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0') + title Xen Orchestra + meta(name = 'author' content = 'Vates SAS') + link(rel = 'stylesheet' href = 'index.css') + body(style = 'display: flex; height: 100vh;') + div(style = 'margin: auto; width: 20em;') + div.mb-2(style = 'display: flex;') + img(src = 'assets/logo.png' style = 'margin: auto;') + h2.text-xs-center.mb-2 Xen Orchestra + form(action = 'signin/local' method = 'post') + fieldset + if error + p.text-danger #{error} + .input-group.mb-1 + span.input-group-addon + i.xo-icon-user.fa-fw + input.form-control( + name = 'username' + type = 'text' + placeholder = 'Username' + required + ) + .input-group.mb-1 + span.input-group-addon + i.fa.fa-key.fa-fw + input.form-control( + name = 'password' + type = 'password' + placeholder = 'Password' + required + ) + .checkbox + label + input( + name = 'remember-me' + type = 'checkbox' + ) + |   + | Remember me + div + button.btn.btn-block.btn-info + i.fa.fa-sign-in + | Sign in + each label, id in strategies + div: a(href = 'signin/' + id) Sign in with #{label} diff --git a/packages/xo-server/src/api/.index-modules b/packages/xo-server/src/api/.index-modules new file mode 100644 index 000000000..e69de29bb diff --git a/packages/xo-server/src/api/acl.js b/packages/xo-server/src/api/acl.js new file mode 100644 index 000000000..840e9165b --- /dev/null +++ b/packages/xo-server/src/api/acl.js @@ -0,0 +1,49 @@ +export async function get () { + return /* await */ this.getAllAcls() +} + +get.permission = 'admin' + +get.description = 'get existing ACLs' + +// ------------------------------------------------------------------- + +export async function getCurrentPermissions () { + return /* await */ this.getPermissionsForUser(this.session.get('user_id')) +} + +getCurrentPermissions.permission = '' + +getCurrentPermissions.description = 'get (explicit) permissions by object for the current user' + +// ------------------------------------------------------------------- + +export async function add ({subject, object, action}) { + await this.addAcl(subject, object, action) +} + +add.permission = 'admin' + +add.params = { + subject: { type: 'string' }, + object: { type: 'string' }, + action: { type: 'string' }, +} + +add.description = 'add a new ACL entry' + +// ------------------------------------------------------------------- + +export async function remove ({subject, object, action}) { + await this.removeAcl(subject, object, action) +} + +remove.permission = 'admin' + +remove.params = { + subject: { type: 'string' }, + object: { type: 'string' }, + action: { type: 'string' }, +} + +remove.description = 'remove an existing ACL entry' diff --git a/packages/xo-server/src/api/backup.js b/packages/xo-server/src/api/backup.js new file mode 100644 index 000000000..df6162907 --- /dev/null +++ b/packages/xo-server/src/api/backup.js @@ -0,0 +1,98 @@ +import archiver from 'archiver' +import { basename } from 'path' +import { format } from 'json-rpc-peer' +import { forEach } from 'lodash' + +// =================================================================== + +export function list ({ remote }) { + return this.listVmBackups(remote) +} + +list.permission = 'admin' +list.params = { + remote: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +export function scanDisk ({ remote, disk }) { + return this.scanDiskBackup(remote, disk) +} + +scanDisk.permission = 'admin' +scanDisk.params = { + remote: { type: 'string' }, + disk: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +export function scanFiles ({ remote, disk, partition, path }) { + return this.scanFilesInDiskBackup(remote, disk, partition, path) +} + +scanFiles.permission = 'admin' +scanFiles.params = { + remote: { type: 'string' }, + disk: { type: 'string' }, + partition: { type: 'string', optional: true }, + path: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) { + this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => { + res.setHeader('content-disposition', 'attachment') + res.setHeader('content-type', 'application/octet-stream') + + const nFiles = paths.length + + // Send lone file directly + if (nFiles === 1) { + files[0].pipe(res) + return + } + + const archive = archiver(archiveFormat) + archive.on('error', error => { + console.error(error) + res.end(format.error(0, error)) + }) + + forEach(files, file => { + archive.append(file, { name: basename(file.path) }) + }) + archive.finalize() + + archive.pipe(res) + }).catch(error => { + console.error(error) + res.writeHead(500) + res.end(format.error(0, error)) + }) +} + +export async function fetchFiles ({ format = 'zip', ...params }) { + const fileName = params.paths.length > 1 + ? `restore_${new Date().toJSON()}.${format}` + : basename(params.paths[0]) + + return this.registerHttpRequest(handleFetchFiles, { ...params, format }, { + suffix: encodeURI(`/${fileName}`), + }).then(url => ({ $getFrom: url })) +} + +fetchFiles.permission = 'admin' +fetchFiles.params = { + remote: { type: 'string' }, + disk: { type: 'string' }, + format: { type: 'string', optional: true }, + partition: { type: 'string', optional: true }, + paths: { + type: 'array', + items: { type: 'string' }, + minLength: 1, + }, +} diff --git a/packages/xo-server/src/api/disk.js b/packages/xo-server/src/api/disk.js new file mode 100644 index 000000000..69f0a8b5a --- /dev/null +++ b/packages/xo-server/src/api/disk.js @@ -0,0 +1,69 @@ +import { parseSize } from '../utils' +import { unauthorized } from 'xo-common/api-errors' + +// =================================================================== + +export async function create ({ name, size, sr, vm, bootable, position, mode }) { + const attach = vm !== undefined + + let resourceSet + if (attach && (resourceSet = vm.resourceSet) != null) { + await this.checkResourceSetConstraints(resourceSet, this.user.id, [ sr.id ]) + await this.allocateLimitsInResourceSet({ disk: size }, resourceSet) + } else if (!(await this.hasPermissions(this.user.id, [ [ sr.id, 'administrate' ] ]))) { + throw unauthorized() + } + + const xapi = this.getXapi(sr) + const vdi = await xapi.createVdi({ + name_label: name, + size, + sr: sr._xapiId, + }) + + if (attach) { + await xapi.createVbd({ + bootable, + mode, + userdevice: position, + vdi: vdi.$id, + vm: vm._xapiId, + }) + } + + return vdi.$id +} + +create.description = 'create a new disk on a SR' + +create.params = { + name: { type: 'string' }, + size: { type: ['integer', 'string'] }, + sr: { type: 'string' }, + vm: { type: 'string', optional: true }, + bootable: { type: 'boolean', optional: true }, + mode: { type: 'string', optional: true }, + position: { type: 'string', optional: true }, +} + +create.resolve = { + vm: ['vm', 'VM', 'administrate'], + sr: ['sr', 'SR', false], +} + +// ------------------------------------------------------------------- + +export async function resize ({ vdi, size }) { + await this.getXapi(vdi).resizeVdi(vdi._xapiId, parseSize(size)) +} + +resize.description = 'resize an existing VDI' + +resize.params = { + id: { type: 'string' }, + size: { type: ['integer', 'string'] }, +} + +resize.resolve = { + vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'], +} diff --git a/packages/xo-server/src/api/docker.js b/packages/xo-server/src/api/docker.js new file mode 100644 index 000000000..0b79239f1 --- /dev/null +++ b/packages/xo-server/src/api/docker.js @@ -0,0 +1,60 @@ +export async function register ({vm}) { + await this.getXapi(vm).registerDockerContainer(vm._xapiId) +} +register.description = 'Register the VM for Docker management' + +register.params = { + vm: { type: 'string' }, +} + +register.resolve = { + vm: ['vm', 'VM', 'administrate'], +} + +// ----------------------------------------------------------------------------- + +export async function deregister ({vm}) { + await this.getXapi(vm).unregisterDockerContainer(vm._xapiId) +} +deregister.description = 'Deregister the VM for Docker management' + +deregister.params = { + vm: { type: 'string' }, +} + +deregister.resolve = { + vm: ['vm', 'VM', 'administrate'], +} + +// ----------------------------------------------------------------------------- + +export async function start ({vm, container}) { + await this.getXapi(vm).startDockerContainer(vm._xapiId, container) +} + +export async function stop ({vm, container}) { + await this.getXapi(vm).stopDockerContainer(vm._xapiId, container) +} + +export async function restart ({vm, container}) { + await this.getXapi(vm).restartDockerContainer(vm._xapiId, container) +} + +export async function pause ({vm, container}) { + await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container) +} + +export async function unpause ({vm, container}) { + await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container) +} + +for (const fn of [start, stop, restart, pause, unpause]) { + fn.params = { + vm: { type: 'string' }, + container: { type: 'string' }, + } + + fn.resolve = { + vm: ['vm', 'VM', 'operate'], + } +} diff --git a/packages/xo-server/src/api/group.js b/packages/xo-server/src/api/group.js new file mode 100644 index 000000000..ec208d3ff --- /dev/null +++ b/packages/xo-server/src/api/group.js @@ -0,0 +1,91 @@ +export async function create ({name}) { + return (await this.createGroup({name})).id +} + +create.description = 'creates a new group' +create.permission = 'admin' +create.params = { + name: {type: 'string'}, +} + +// ------------------------------------------------------------------- + +// Deletes an existing group. +async function delete_ ({id}) { + await this.deleteGroup(id) +} + +// delete is not a valid identifier. +export {delete_ as delete} + +delete_.description = 'deletes an existing group' +delete_.permission = 'admin' +delete_.params = { + id: {type: 'string'}, +} + +// ------------------------------------------------------------------- + +export async function getAll () { + return /* await */ this.getAllGroups() +} + +getAll.description = 'returns all the existing group' +getAll.permission = 'admin' + +// ------------------------------------------------------------------- + +// sets group.users with an array of user ids +export async function setUsers ({id, userIds}) { + await this.setGroupUsers(id, userIds) +} + +setUsers.description = 'sets the users belonging to a group' +setUsers.permission = 'admin' +setUsers.params = { + id: {type: 'string'}, + userIds: {}, +} + +// ------------------------------------------------------------------- + +// adds the user id to group.users +export async function addUser ({id, userId}) { + await this.addUserToGroup(userId, id) +} + +addUser.description = 'adds a user to a group' +addUser.permission = 'admin' +addUser.params = { + id: {type: 'string'}, + userId: {type: 'string'}, +} + +// ------------------------------------------------------------------- + +// remove the user id from group.users +export async function removeUser ({id, userId}) { + await this.removeUserFromGroup(userId, id) +} + +// ------------------------------------------------------------------- + +removeUser.description = 'removes a user from a group' +removeUser.permission = 'admin' +removeUser.params = { + id: {type: 'string'}, + userId: {type: 'string'}, +} + +// ------------------------------------------------------------------- + +export async function set ({id, name}) { + await this.updateGroup(id, {name}) +} + +set.description = 'changes the properties of an existing group' +set.permission = 'admin' +set.params = { + id: { type: 'string' }, + name: { type: 'string', optional: true }, +} diff --git a/packages/xo-server/src/api/host.js b/packages/xo-server/src/api/host.js new file mode 100644 index 000000000..08ccef37d --- /dev/null +++ b/packages/xo-server/src/api/host.js @@ -0,0 +1,295 @@ + +import {format} from 'json-rpc-peer' + +// =================================================================== + +export function set ({ + host, + + // TODO: use camel case. + name_label: nameLabel, + name_description: nameDescription, +}) { + return this.getXapi(host).setHostProperties(host._xapiId, { + nameLabel, + nameDescription, + }) +} + +set.description = 'changes the properties of an host' + +set.params = { + id: { type: 'string' }, + name_label: { + type: 'string', + optional: true, + }, + name_description: { + type: 'string', + optional: true, + }, +} + +set.resolve = { + host: ['id', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +// FIXME: set force to false per default when correctly implemented in +// UI. +export function restart ({ host, force = true }) { + return this.getXapi(host).rebootHost(host._xapiId, force) +} + +restart.description = 'restart the host' + +restart.params = { + id: { type: 'string' }, + force: { + type: 'boolean', + optional: true, + }, +} + +restart.resolve = { + host: ['id', 'host', 'operate'], +} + +// ------------------------------------------------------------------- + +export function restartAgent ({host}) { + return this.getXapi(host).restartHostAgent(host._xapiId) +} + +restartAgent.description = 'restart the Xen agent on the host' + +restartAgent.params = { + id: { type: 'string' }, +} + +restartAgent.resolve = { + host: ['id', 'host', 'administrate'], +} + +// TODO: remove deprecated alias +export { restartAgent as restart_agent } // eslint-disable-line camelcase + +// ------------------------------------------------------------------- + +export function start ({host}) { + return this.getXapi(host).powerOnHost(host._xapiId) +} + +start.description = 'start the host' + +start.params = { + id: { type: 'string' }, +} + +start.resolve = { + host: ['id', 'host', 'operate'], +} + +// ------------------------------------------------------------------- + +export function stop ({host}) { + return this.getXapi(host).shutdownHost(host._xapiId) +} + +stop.description = 'stop the host' + +stop.params = { + id: { type: 'string' }, +} + +stop.resolve = { + host: ['id', 'host', 'operate'], +} + +// ------------------------------------------------------------------- + +export function detach ({host}) { + return this.getXapi(host).ejectHostFromPool(host._xapiId) +} + +detach.description = 'eject the host of a pool' + +detach.params = { + id: { type: 'string' }, +} + +detach.resolve = { + host: ['id', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function enable ({host}) { + return this.getXapi(host).enableHost(host._xapiId) +} + +enable.description = 'enable to create VM on the host' + +enable.params = { + id: { type: 'string' }, +} + +enable.resolve = { + host: ['id', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function disable ({host}) { + return this.getXapi(host).disableHost(host._xapiId) +} + +disable.description = 'disable to create VM on the hsot' + +disable.params = { + id: { type: 'string' }, +} + +disable.resolve = { + host: ['id', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function forget ({host}) { + return this.getXapi(host).forgetHost(host._xapiId) +} + +forget.description = 'remove the host record from XAPI database' + +forget.params = { + id: { type: 'string' }, +} + +forget.resolve = { + host: ['id', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +// Returns an array of missing new patches in the host +// Returns an empty array if up-to-date +// Throws an error if the host is not running the latest XS version +export function listMissingPatches ({host}) { + return this.getXapi(host).listMissingPoolPatchesOnHost(host._xapiId) +} + +listMissingPatches.description = 'return an array of missing new patches in the host' + +listMissingPatches.params = { + host: { type: 'string' }, +} + +listMissingPatches.resolve = { + host: ['host', 'host', 'view'], +} + +// ------------------------------------------------------------------- + +export function installPatch ({host, patch: patchUuid}) { + return this.getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId) +} + +installPatch.description = 'install a patch on an host' + +installPatch.params = { + host: { type: 'string' }, + patch: { type: 'string' }, +} + +installPatch.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function installAllPatches ({host}) { + return this.getXapi(host).installAllPoolPatchesOnHost(host._xapiId) +} + +installAllPatches.description = 'install all the missing patches on a host' + +installAllPatches.params = { + host: { type: 'string' }, +} + +installAllPatches.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function emergencyShutdownHost ({host}) { + return this.getXapi(host).emergencyShutdownHost(host._xapiId) +} + +emergencyShutdownHost.description = 'suspend all VMs and shutdown host' + +emergencyShutdownHost.params = { + host: { type: 'string' }, +} + +emergencyShutdownHost.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function stats ({host, granularity}) { + return this.getXapiHostStats(host, granularity) +} + +stats.description = 'returns statistic of the host' + +stats.params = { + host: { type: 'string' }, + granularity: { + type: 'string', + optional: true, + }, +} + +stats.resolve = { + host: ['host', 'host', 'view'], +} + +// ------------------------------------------------------------------- + +async function handleInstallSupplementalPack (req, res, { hostId }) { + const xapi = this.getXapi(hostId) + + // Timeout seems to be broken in Node 4. + // See https://github.com/nodejs/node/issues/3319 + req.setTimeout(43200000) // 12 hours + req.length = req.headers['content-length'] + + try { + await xapi.installSupplementalPack(req, { hostId }) + res.end(format.response(0)) + } catch (e) { + res.writeHead(500) + res.end(format.error(0, new Error(e.message))) + } +} + +export async function installSupplementalPack ({host}) { + return { + $sendTo: (await this.registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })), + } +} + +installSupplementalPack.description = 'installs supplemental pack from ISO file' + +installSupplementalPack.params = { + host: { type: 'string' }, +} + +installSupplementalPack.resolve = { + host: ['host', 'host', 'admin'], +} diff --git a/packages/xo-server/src/api/ip-pool.js b/packages/xo-server/src/api/ip-pool.js new file mode 100644 index 000000000..6a84a8c23 --- /dev/null +++ b/packages/xo-server/src/api/ip-pool.js @@ -0,0 +1,44 @@ +import { unauthorized } from 'xo-common/api-errors' + +export function create (props) { + return this.createIpPool(props) +} + +create.permission = 'admin' +create.description = 'Creates a new ipPool' + +// ------------------------------------------------------------------- + +function delete_ ({ id }) { + return this.deleteIpPool(id) +} +export { delete_ as delete } + +delete_.permission = 'admin' +delete_.description = 'Delete an ipPool' + +// ------------------------------------------------------------------- + +export function getAll (params) { + const { user } = this + + if (!user) { + throw unauthorized() + } + + return this.getAllIpPools(user.permission === 'admin' + ? params && params.userId + : user.id + ) +} + +getAll.description = 'List all ipPools' + +// ------------------------------------------------------------------- + +export function set ({ id, ...props }) { + return this.updateIpPool(id, props) +} + +set.permission = 'admin' +set.description = 'Allow to modify an existing ipPool' diff --git a/packages/xo-server/src/api/job.js b/packages/xo-server/src/api/job.js new file mode 100644 index 000000000..1e0aefd3c --- /dev/null +++ b/packages/xo-server/src/api/job.js @@ -0,0 +1,110 @@ +// FIXME so far, no acls for jobs + +export async function getAll () { + return /* await */ this.getAllJobs() +} + +getAll.permission = 'admin' +getAll.description = 'Gets all available jobs' + +export async function get (id) { + return /* await */ this.getJob(id) +} + +get.permission = 'admin' +get.description = 'Gets an existing job' +get.params = { + id: {type: 'string'}, +} + +export async function create ({job}) { + if (!job.userId) { + job.userId = this.session.get('user_id') + } + + return (await this.createJob(job)).id +} + +create.permission = 'admin' +create.description = 'Creates a new job from description object' +create.params = { + job: { + type: 'object', + properties: { + userId: {type: 'string', optional: true}, + name: {type: 'string', optional: true}, + timeout: {type: 'number', optional: true}, + type: {type: 'string'}, + key: {type: 'string'}, + method: {type: 'string'}, + paramsVector: { + type: 'object', + properties: { + type: {type: 'string'}, + items: { + type: 'array', + items: { + type: 'object', + }, + }, + }, + optional: true, + }, + }, + }, +} + +export async function set ({job}) { + await this.updateJob(job) +} + +set.permission = 'admin' +set.description = 'Modifies an existing job from a description object' +set.params = { + job: { + type: 'object', + properties: { + id: {type: 'string'}, + name: {type: 'string', optional: true}, + timeout: {type: ['number', 'null'], optional: true}, + type: {type: 'string', optional: true}, + key: {type: 'string', optional: true}, + method: {type: 'string', optional: true}, + paramsVector: { + type: 'object', + properties: { + type: {type: 'string'}, + items: { + type: 'array', + items: { + type: 'object', + }, + }, + }, + optional: true, + }, + }, + }, +} + +async function delete_ ({id}) { + await this.removeJob(id) +} + +delete_.permission = 'admin' +delete_.description = 'Deletes an existing job' +delete_.params = { + id: {type: 'string'}, +} + +export {delete_ as delete} + +export async function runSequence ({idSequence}) { + await this.runJobSequence(idSequence) +} + +runSequence.permission = 'admin' +runSequence.description = 'Runs jobs sequentially, in the provided order' +runSequence.params = { + idSequence: {type: 'array', items: {type: 'string'}}, +} diff --git a/packages/xo-server/src/api/log.js b/packages/xo-server/src/api/log.js new file mode 100644 index 000000000..3d3e79a43 --- /dev/null +++ b/packages/xo-server/src/api/log.js @@ -0,0 +1,38 @@ +export async function get ({namespace}) { + const logger = await this.getLogger(namespace) + + return new Promise((resolve, reject) => { + const logs = {} + + logger.createReadStream() + .on('data', (data) => { + logs[data.key] = data.value + }) + .on('end', () => { + resolve(logs) + }) + .on('error', reject) + }) +} + +get.description = 'returns logs list for one namespace' +get.params = { + namespace: { type: 'string' }, +} +get.permission = 'admin' + +// ------------------------------------------------------------------- + +async function delete_ ({namespace, id}) { + const logger = await this.getLogger(namespace) + logger.del(id) +} + +delete_.description = 'deletes one or several logs from a namespace' +delete_.params = { + id: { type: [ 'array', 'string' ] }, + namespace: { type: 'string' }, +} +delete_.permission = 'admin' + +export {delete_ as delete} diff --git a/packages/xo-server/src/api/message.js b/packages/xo-server/src/api/message.js new file mode 100644 index 000000000..6e881206d --- /dev/null +++ b/packages/xo-server/src/api/message.js @@ -0,0 +1,12 @@ +async function delete_ ({ message }) { + await this.getXapi(message).call('message.destroy', message._xapiRef) +} +export {delete_ as delete} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + message: ['id', 'message', 'administrate'], +} diff --git a/packages/xo-server/src/api/network.js b/packages/xo-server/src/api/network.js new file mode 100644 index 000000000..ca90e9f56 --- /dev/null +++ b/packages/xo-server/src/api/network.js @@ -0,0 +1,120 @@ +import { mapToArray } from '../utils' + +export function getBondModes () { + return ['balance-slb', 'active-backup', 'lacp'] +} + +export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) { + return this.getXapi(pool).createNetwork({ + name, + description, + pifId: pif && this.getObject(pif, 'PIF')._xapiId, + mtu: +mtu, + vlan: +vlan, + }) +} + +create.params = { + pool: { type: 'string' }, + name: { type: 'string' }, + description: { type: 'string', optional: true }, + pif: { type: 'string', optional: true }, + mtu: { type: ['integer', 'string'], optional: true }, + vlan: { type: ['integer', 'string'], optional: true }, +} + +create.resolve = { + pool: ['pool', 'pool', 'administrate'], +} +create.permission = 'admin' + +// ================================================================= + +export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) { + return this.getXapi(pool).createBondedNetwork({ + name, + description, + pifIds: mapToArray(pifs, pif => + this.getObject(pif, 'PIF')._xapiId + ), + mtu: +mtu, + mac, + bondMode, + }) +} + +createBonded.params = { + pool: { type: 'string' }, + name: { type: 'string' }, + description: { type: 'string', optional: true }, + pifs: { + type: 'array', + items: { + type: 'string', + }, + }, + mtu: { type: ['integer', 'string'], optional: true }, + mac: { type: 'string', optional: true }, + // RegExp since schema-inspector does not provide a param check based on an enumeration + bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }, +} + +createBonded.resolve = { + pool: ['pool', 'pool', 'administrate'], +} +createBonded.permission = 'admin' +createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp' + +// =================================================================== + +export async function set ({ + network, + + name_description: nameDescription, + name_label: nameLabel, + defaultIsLocked, + id, +}) { + await this.getXapi(network).setNetworkProperties(network._xapiId, { + nameDescription, + nameLabel, + defaultIsLocked, + }) +} + +set.params = { + id: { + type: 'string', + }, + name_label: { + type: 'string', + optional: true, + }, + name_description: { + type: 'string', + optional: true, + }, + defaultIsLocked: { + type: 'boolean', + optional: true, + }, +} + +set.resolve = { + network: ['id', 'network', 'administrate'], +} + +// ================================================================= + +export async function delete_ ({ network }) { + return this.getXapi(network).deleteNetwork(network._xapiId) +} +export {delete_ as delete} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + network: ['id', 'network', 'administrate'], +} diff --git a/packages/xo-server/src/api/pbd.js b/packages/xo-server/src/api/pbd.js new file mode 100644 index 000000000..855a87bbe --- /dev/null +++ b/packages/xo-server/src/api/pbd.js @@ -0,0 +1,49 @@ +// FIXME: too low level, should be removed. + +// =================================================================== +// Delete + +async function delete_ ({PBD}) { + // TODO: check if PBD is attached before + await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef) +} +export {delete_ as delete} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + PBD: ['id', 'PBD', 'administrate'], +} + +// =================================================================== +// Disconnect + +export async function disconnect ({ pbd }) { + return this.getXapi(pbd).unplugPbd(pbd._xapiId) +} + +disconnect.params = { + id: { type: 'string' }, +} + +disconnect.resolve = { + pbd: ['id', 'PBD', 'administrate'], +} + +// =================================================================== +// Connect + +export async function connect ({PBD}) { + // TODO: check if PBD is attached before + await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef) +} + +connect.params = { + id: { type: 'string' }, +} + +connect.resolve = { + PBD: ['id', 'PBD', 'administrate'], +} diff --git a/packages/xo-server/src/api/pif.js b/packages/xo-server/src/api/pif.js new file mode 100644 index 000000000..198fd0edb --- /dev/null +++ b/packages/xo-server/src/api/pif.js @@ -0,0 +1,93 @@ +// TODO: too low level, move into host. + +import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi' + +export function getIpv4ConfigurationModes () { + return IPV4_CONFIG_MODES +} + +export function getIpv6ConfigurationModes () { + return IPV6_CONFIG_MODES +} + +// =================================================================== +// Delete + +async function delete_ ({pif}) { + // TODO: check if PIF is attached before + await this.getXapi(pif).call('PIF.destroy', pif._xapiRef) +} +export {delete_ as delete} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + pif: ['id', 'PIF', 'administrate'], +} + +// =================================================================== +// Disconnect + +export async function disconnect ({pif}) { + // TODO: check if PIF is attached before + await this.getXapi(pif).call('PIF.unplug', pif._xapiRef) +} + +disconnect.params = { + id: { type: 'string' }, +} + +disconnect.resolve = { + pif: ['id', 'PIF', 'administrate'], +} +// =================================================================== +// Connect + +export async function connect ({pif}) { + // TODO: check if PIF is attached before + await this.getXapi(pif).call('PIF.plug', pif._xapiRef) +} + +connect.params = { + id: { type: 'string' }, +} + +connect.resolve = { + pif: ['id', 'PIF', 'administrate'], +} +// =================================================================== +// Reconfigure IP + +export async function reconfigureIp ({ pif, mode = 'DHCP', ip = '', netmask = '', gateway = '', dns = '' }) { + await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns) +} + +reconfigureIp.params = { + id: { type: 'string', optional: true }, + mode: { type: 'string', optional: true }, + ip: { type: 'string', optional: true }, + netmask: { type: 'string', optional: true }, + gateway: { type: 'string', optional: true }, + dns: { type: 'string', optional: true }, +} + +reconfigureIp.resolve = { + pif: ['id', 'PIF', 'administrate'], +} + +// =================================================================== + +export async function editPif ({ pif, vlan }) { + await this.getXapi(pif).editPif(pif._xapiId, { vlan }) +} + +editPif.params = { + id: { type: 'string' }, + vlan: { type: ['integer', 'string'] }, +} + +editPif.resolve = { + pif: ['id', 'PIF', 'administrate'], +} diff --git a/packages/xo-server/src/api/plugin.js b/packages/xo-server/src/api/plugin.js new file mode 100644 index 000000000..46c4af4f4 --- /dev/null +++ b/packages/xo-server/src/api/plugin.js @@ -0,0 +1,125 @@ +export async function get () { + return /* await */ this.getPlugins() +} + +get.description = 'returns a list of all installed plugins' + +get.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function configure ({ id, configuration }) { + await this.configurePlugin(id, configuration) +} + +configure.description = 'sets the configuration of a plugin' + +configure.params = { + id: { + type: 'string', + }, + configuration: {}, +} + +configure.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function disableAutoload ({ id }) { + await this.disablePluginAutoload(id) +} + +disableAutoload.description = '' + +disableAutoload.params = { + id: { + type: 'string', + }, +} + +disableAutoload.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function enableAutoload ({ id }) { + await this.enablePluginAutoload(id) +} + +enableAutoload.description = 'enables a plugin, allowing it to be loaded' + +enableAutoload.params = { + id: { + type: 'string', + }, +} + +enableAutoload.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function load ({ id }) { + await this.loadPlugin(id) +} + +load.description = 'loads a plugin' + +load.params = { + id: { + type: 'string', + }, +} + +load.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function unload ({ id }) { + await this.unloadPlugin(id) +} + +unload.description = 'unloads a plugin' + +unload.params = { + id: { + type: 'string', + }, +} + +unload.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function purgeConfiguration ({ id }) { + await this.purgePluginConfiguration(id) +} + +purgeConfiguration.description = 'removes a plugin configuration' + +purgeConfiguration.params = { + id: { + type: 'string', + }, +} + +purgeConfiguration.permission = 'admin' + +// --------------------------------------------------------------------- + +export async function test ({ id, data }) { + await this.testPlugin(id, data) +} + +test.description = 'Test a plugin with its current configuration' + +test.params = { + id: { + type: 'string', + }, + data: { + optional: true, + }, +} + +test.permission = 'admin' + +// --------------------------------------------------------------------- diff --git a/packages/xo-server/src/api/pool.js b/packages/xo-server/src/api/pool.js new file mode 100644 index 000000000..4802bbd44 --- /dev/null +++ b/packages/xo-server/src/api/pool.js @@ -0,0 +1,230 @@ +import { format } from 'json-rpc-peer' +import { differenceBy } from 'lodash' +import { mapToArray } from '../utils' + +// =================================================================== + +export async function set ({ + pool, + + // TODO: use camel case. + name_description: nameDescription, + name_label: nameLabel, +}) { + await this.getXapi(pool).setPoolProperties({ + nameDescription, + nameLabel, + }) +} + +set.params = { + id: { + type: 'string', + }, + name_label: { + type: 'string', + optional: true, + }, + name_description: { + type: 'string', + optional: true, + }, +} + +set.resolve = { + pool: ['id', 'pool', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function setDefaultSr ({ sr }) { + await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ]) + + await this.getXapi(sr).setDefaultSr(sr._xapiId) +} + +setDefaultSr.permission = '' // signed in + +setDefaultSr.params = { + sr: { + type: 'string', + }, +} + +setDefaultSr.resolve = { + sr: ['sr', 'SR'], +} + +// ------------------------------------------------------------------- + +export async function setPoolMaster ({ host }) { + await this.hasPermissions(this.user.id, [ [ host.$pool, 'administrate' ] ]) + + await this.getXapi(host).setPoolMaster(host._xapiId) +} + +setPoolMaster.params = { + host: { + type: 'string', + }, +} + +setPoolMaster.resolve = { + host: ['host', 'host'], +} + +// ------------------------------------------------------------------- + +export async function installPatch ({pool, patch: patchUuid}) { + await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid) +} + +installPatch.params = { + pool: { + type: 'string', + }, + patch: { + type: 'string', + }, +} + +installPatch.resolve = { + pool: ['pool', 'pool', 'administrate'], +} +// ------------------------------------------------------------------- + +export async function installAllPatches ({ pool }) { + await this.getXapi(pool).installAllPoolPatchesOnAllHosts() +} + +installAllPatches.params = { + pool: { + type: 'string', + }, +} + +installAllPatches.resolve = { + pool: ['pool', 'pool', 'administrate'], +} + +installAllPatches.description = 'Install automatically all patches for every hosts of a pool' + +// ------------------------------------------------------------------- + +async function handlePatchUpload (req, res, {pool}) { + const contentLength = req.headers['content-length'] + if (!contentLength) { + res.writeHead(411) + res.end('Content length is mandatory') + return + } + + await this.getXapi(pool).uploadPoolPatch(req, contentLength) +} + +export async function uploadPatch ({pool}) { + return { + $sendTo: await this.registerHttpRequest(handlePatchUpload, {pool}), + } +} + +uploadPatch.params = { + pool: { type: 'string' }, +} + +uploadPatch.resolve = { + pool: ['pool', 'pool', 'administrate'], +} + +// Compatibility +// +// TODO: remove when no longer used in xo-web +export {uploadPatch as patch} + +// ------------------------------------------------------------------- + +export async function mergeInto ({ source, target, force }) { + const sourceHost = this.getObject(source.master) + const sourcePatches = sourceHost.patches + const targetPatches = this.getObject(target.master).patches + const counterDiff = differenceBy(sourcePatches, targetPatches, 'name') + + if (counterDiff.length > 0) { + throw new Error('host has patches that are not applied on target pool') + } + + const diff = differenceBy(targetPatches, sourcePatches, 'name') + + // TODO: compare UUIDs + await this.getXapi(source).installSpecificPatchesOnHost( + mapToArray(diff, 'name'), + sourceHost._xapiId + ) + + await this.mergeXenPools(source._xapiId, target._xapiId, force) +} + +mergeInto.params = { + force: { type: 'boolean', optional: true }, + source: { type: 'string' }, + target: { type: 'string' }, +} + +mergeInto.resolve = { + source: ['source', 'pool', 'administrate'], + target: ['target', 'pool', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function getLicenseState ({pool}) { + return this.getXapi(pool).call( + 'pool.get_license_state', + pool._xapiId.$ref + ) +} + +getLicenseState.params = { + pool: { + type: 'string', + }, +} + +getLicenseState.resolve = { + pool: ['pool', 'pool', 'administrate'], +} + +// ------------------------------------------------------------------- + +async function handleInstallSupplementalPack (req, res, { poolId }) { + const xapi = this.getXapi(poolId) + + // Timeout seems to be broken in Node 4. + // See https://github.com/nodejs/node/issues/3319 + req.setTimeout(43200000) // 12 hours + req.length = req.headers['content-length'] + + try { + await xapi.installSupplementalPackOnAllHosts(req) + res.end(format.response(0)) + } catch (e) { + res.writeHead(500) + res.end(format.error(0, new Error(e.message))) + } +} + +export async function installSupplementalPack ({ pool }) { + return { + $sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id }), + } +} + +installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts' + +installSupplementalPack.params = { + pool: { type: 'string' }, +} + +installSupplementalPack.resolve = { + pool: ['pool', 'pool', 'admin'], +} diff --git a/packages/xo-server/src/api/remote.js b/packages/xo-server/src/api/remote.js new file mode 100644 index 000000000..54333809c --- /dev/null +++ b/packages/xo-server/src/api/remote.js @@ -0,0 +1,72 @@ +export async function getAll () { + return this.getAllRemotes() +} + +getAll.permission = 'admin' +getAll.description = 'Gets all existing fs remote points' + +export async function get ({id}) { + return this.getRemote(id) +} + +get.permission = 'admin' +get.description = 'Gets an existing fs remote point' +get.params = { + id: {type: 'string'}, +} + +export async function test ({id}) { + return this.testRemote(id) +} + +test.permission = 'admin' +test.description = 'Performs a read/write matching test on a remote point' +test.params = { + id: {type: 'string'}, +} + +export async function list ({id}) { + return this.listRemoteBackups(id) +} + +list.permission = 'admin' +list.description = 'Lists the files found in a remote point' +list.params = { + id: {type: 'string'}, +} + +export async function create ({name, url}) { + return this.createRemote({name, url}) +} + +create.permission = 'admin' +create.description = 'Creates a new fs remote point' +create.params = { + name: {type: 'string'}, + url: {type: 'string'}, +} + +export async function set ({id, name, url, enabled}) { + await this.updateRemote(id, {name, url, enabled}) +} + +set.permission = 'admin' +set.description = 'Modifies an existing fs remote point' +set.params = { + id: {type: 'string'}, + name: {type: 'string', optional: true}, + url: {type: 'string', optional: true}, + enabled: {type: 'boolean', optional: true}, +} + +async function delete_ ({id}) { + await this.removeRemote(id) +} + +delete_.permission = 'admin' +delete_.description = 'Deletes an existing fs remote point' +delete_.params = { + id: {type: 'string'}, +} + +export {delete_ as delete} diff --git a/packages/xo-server/src/api/resource-set.js b/packages/xo-server/src/api/resource-set.js new file mode 100644 index 000000000..79f67bb84 --- /dev/null +++ b/packages/xo-server/src/api/resource-set.js @@ -0,0 +1,240 @@ +import { + unauthorized, +} from 'xo-common/api-errors' + +// =================================================================== + +export function create ({ name, subjects, objects, limits }) { + return this.createResourceSet(name, subjects, objects, limits) +} + +create.permission = 'admin' + +create.params = { + name: { + type: 'string', + }, + subjects: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + objects: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + limits: { + type: 'object', + optional: true, + }, +} + +// ------------------------------------------------------------------- + +function delete_ ({ id }) { + return this.deleteResourceSet(id) +} +export { delete_ as delete } + +delete_.permission = 'admin' + +delete_.params = { + id: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function set ({ id, name, subjects, objects, ipPools, limits }) { + return this.updateResourceSet(id, { + limits, + name, + objects, + ipPools, + subjects, + }) +} + +set.permission = 'admin' + +set.params = { + id: { + type: 'string', + }, + name: { + type: 'string', + optional: true, + }, + subjects: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + objects: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + ipPools: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + limits: { + type: 'object', + optional: true, + }, +} + +// ------------------------------------------------------------------- + +export function get ({ id }) { + return this.getResourceSet(id) +} + +get.permission = 'admin' + +get.params = { + id: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export async function getAll () { + const { user } = this + if (!user) { + throw unauthorized() + } + + return this.getAllResourceSets(user.id) +} + +getAll.description = 'Get the list of all existing resource set' + +// ------------------------------------------------------------------- + +export function addObject ({ id, object }) { + return this.addObjectToResourceSet(object, id) +} + +addObject.permission = 'admin' + +addObject.params = { + id: { + type: 'string', + }, + object: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function removeObject ({ id, object }) { + return this.removeObjectFromResourceSet(object, id) +} + +removeObject.permission = 'admin' + +removeObject.params = { + id: { + type: 'string', + }, + object: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function addSubject ({ id, subject }) { + return this.addSubjectToResourceSet(subject, id) +} + +addSubject.permission = 'admin' + +addSubject.params = { + id: { + type: 'string', + }, + subject: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function removeSubject ({ id, subject }) { + return this.removeSubjectFromResourceSet(subject, id) +} + +removeSubject.permission = 'admin' + +removeSubject.params = { + id: { + type: 'string', + }, + subject: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function addLimit ({ id, limitId, quantity }) { + return this.addLimitToResourceSet(limitId, quantity, id) +} + +addLimit.permission = 'admin' + +addLimit.params = { + id: { + type: 'string', + }, + limitId: { + type: 'string', + }, + quantity: { + type: 'integer', + }, +} + +// ------------------------------------------------------------------- + +export function removeLimit ({ id, limitId }) { + return this.removeLimitFromResourceSet(limitId, id) +} + +removeLimit.permission = 'admin' + +removeLimit.params = { + id: { + type: 'string', + }, + limitId: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function recomputeAllLimits () { + return this.recomputeResourceSetsLimits() +} + +recomputeAllLimits.permission = 'admin' +recomputeAllLimits.description = 'Recompute manually the current resource set usage' diff --git a/packages/xo-server/src/api/role.js b/packages/xo-server/src/api/role.js new file mode 100644 index 000000000..63f72df45 --- /dev/null +++ b/packages/xo-server/src/api/role.js @@ -0,0 +1,5 @@ +export async function getAll () { + return /* await */ this.getRoles() +} + +getAll.description = 'Returns the list of all existing roles' diff --git a/packages/xo-server/src/api/schedule.js b/packages/xo-server/src/api/schedule.js new file mode 100644 index 000000000..bb2fc1204 --- /dev/null +++ b/packages/xo-server/src/api/schedule.js @@ -0,0 +1,57 @@ +// FIXME so far, no acls for schedules + +export async function getAll () { + return /* await */ this.getAllSchedules() +} + +getAll.permission = 'admin' +getAll.description = 'Gets all existing schedules' + +export async function get (id) { + return /* await */ this.getSchedule(id) +} + +get.permission = 'admin' +get.description = 'Gets an existing schedule' +get.params = { + id: {type: 'string'}, +} + +export async function create ({ jobId, cron, enabled, name, timezone }) { + return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone }) +} + +create.permission = 'admin' +create.description = 'Creates a new schedule' +create.params = { + jobId: {type: 'string'}, + cron: {type: 'string'}, + enabled: {type: 'boolean', optional: true}, + name: {type: 'string', optional: true}, +} + +export async function set ({ id, jobId, cron, enabled, name, timezone }) { + await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone }) +} + +set.permission = 'admin' +set.description = 'Modifies an existing schedule' +set.params = { + id: {type: 'string'}, + jobId: {type: 'string', optional: true}, + cron: {type: 'string', optional: true}, + enabled: {type: 'boolean', optional: true}, + name: {type: 'string', optional: true}, +} + +async function delete_ ({id}) { + await this.removeSchedule(id) +} + +delete_.permission = 'admin' +delete_.description = 'Deletes an existing schedule' +delete_.params = { + id: {type: 'string'}, +} + +export {delete_ as delete} diff --git a/packages/xo-server/src/api/scheduler.js b/packages/xo-server/src/api/scheduler.js new file mode 100644 index 000000000..7488e5682 --- /dev/null +++ b/packages/xo-server/src/api/scheduler.js @@ -0,0 +1,30 @@ +export async function enable ({id}) { + const schedule = await this.getSchedule(id) + schedule.enabled = true + await this.updateSchedule(id, schedule) +} + +enable.permission = 'admin' +enable.description = 'Enables a schedule to run it\'s job as scheduled' +enable.params = { + id: {type: 'string'}, +} + +export async function disable ({id}) { + const schedule = await this.getSchedule(id) + schedule.enabled = false + await this.updateSchedule(id, schedule) +} + +disable.permission = 'admin' +disable.description = 'Disables a schedule' +disable.params = { + id: {type: 'string'}, +} + +export function getScheduleTable () { + return this.scheduleTable +} + +disable.permission = 'admin' +disable.description = 'Get a map of existing schedules enabled/disabled state' diff --git a/packages/xo-server/src/api/server.js b/packages/xo-server/src/api/server.js new file mode 100644 index 000000000..08fedf925 --- /dev/null +++ b/packages/xo-server/src/api/server.js @@ -0,0 +1,137 @@ +import { ignoreErrors } from 'promise-toolbox' + +export async function add ({autoConnect = true, ...props}) { + const server = await this.registerXenServer(props) + + if (autoConnect) { + this.connectXenServer(server.id)::ignoreErrors() + } + + return server.id +} + +add.description = 'register a new Xen server' + +add.permission = 'admin' + +add.params = { + label: { + optional: true, + type: 'string', + }, + host: { + type: 'string', + }, + username: { + type: 'string', + }, + password: { + type: 'string', + }, + autoConnect: { + optional: true, + type: 'boolean', + }, + allowUnauthorized: { + optional: true, + type: 'boolean', + }, +} + +// ------------------------------------------------------------------- + +export async function remove ({id}) { + await this.unregisterXenServer(id) +} + +remove.description = 'unregister a Xen server' + +remove.permission = 'admin' + +remove.params = { + id: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +// TODO: remove this function when users are integrated to the main +// collection. +export function getAll () { + return this.getAllXenServers() +} + +getAll.description = 'returns all the registered Xen server' + +getAll.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function set ({id, ...props}) { + await this.updateXenServer(id, props) +} + +set.description = 'changes the properties of a Xen server' + +set.permission = 'admin' + +set.params = { + id: { + type: 'string', + }, + label: { + type: 'string', + optional: true, + }, + host: { + type: 'string', + optional: true, + }, + username: { + type: 'string', + optional: true, + }, + password: { + type: 'string', + optional: true, + }, + allowUnauthorized: { + optional: true, + type: 'boolean', + }, +} + +// ------------------------------------------------------------------- + +export async function connect ({id}) { + this.updateXenServer(id, {enabled: true})::ignoreErrors() + await this.connectXenServer(id) +} + +connect.description = 'connect a Xen server' + +connect.permission = 'admin' + +connect.params = { + id: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export async function disconnect ({id}) { + this.updateXenServer(id, {enabled: false})::ignoreErrors() + await this.disconnectXenServer(id) +} + +disconnect.description = 'disconnect a Xen server' + +disconnect.permission = 'admin' + +disconnect.params = { + id: { + type: 'string', + }, +} diff --git a/packages/xo-server/src/api/session.js b/packages/xo-server/src/api/session.js new file mode 100644 index 000000000..ae731a25b --- /dev/null +++ b/packages/xo-server/src/api/session.js @@ -0,0 +1,58 @@ +import {deprecate} from 'util' + +import { getUserPublicProperties } from '../utils' +import {invalidCredentials} from 'xo-common/api-errors' + +// =================================================================== + +export async function signIn (credentials) { + const user = await this.authenticateUser(credentials) + if (!user) { + throw invalidCredentials() + } + this.session.set('user_id', user.id) + + return getUserPublicProperties(user) +} + +signIn.description = 'sign in' + +// ------------------------------------------------------------------- + +export const signInWithPassword = deprecate(signIn, 'use session.signIn() instead') + +signInWithPassword.params = { + email: { type: 'string' }, + password: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +export const signInWithToken = deprecate(signIn, 'use session.signIn() instead') + +signInWithToken.params = { + token: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +export function signOut () { + this.session.unset('user_id') +} + +signOut.description = 'sign out the user from the current session' + +// This method requires the user to be signed in. +signOut.permission = '' + +// ------------------------------------------------------------------- + +export async function getUser () { + const userId = this.session.get('user_id') + + return userId === undefined + ? null + : getUserPublicProperties(await this.getUser(userId)) +} + +getUser.description = 'return the currently connected user' diff --git a/packages/xo-server/src/api/sr.js b/packages/xo-server/src/api/sr.js new file mode 100644 index 000000000..dcde4d61f --- /dev/null +++ b/packages/xo-server/src/api/sr.js @@ -0,0 +1,845 @@ +import { some } from 'lodash' + +import { asInteger } from '../xapi/utils' +import { + asyncMap, + ensureArray, + forEach, + parseXml, +} from '../utils' + +// =================================================================== + +export async function set ({ + sr, + + // TODO: use camel case. + name_description: nameDescription, + name_label: nameLabel, +}) { + await this.getXapi(sr).setSrProperties(sr._xapiId, { + nameDescription, + nameLabel, + }) +} + +set.params = { + id: { type: 'string' }, + + name_label: { type: 'string', optional: true }, + + name_description: { type: 'string', optional: true }, +} + +set.resolve = { + sr: ['id', 'SR', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function scan ({ SR }) { + await this.getXapi(SR).call('SR.scan', SR._xapiRef) +} + +scan.params = { + id: { type: 'string' }, +} + +scan.resolve = { + SR: ['id', 'SR', 'operate'], +} + +// ------------------------------------------------------------------- +const srIsBackingHa = (sr) => sr.$pool.ha_enabled && some(sr.$pool.$ha_statefiles, f => f.$SR === sr) + +// TODO: find a way to call this "delete" and not destroy +export async function destroy ({sr}) { + const xapi = this.getXapi(sr) + if (sr.SR_type !== 'xosan') { + await xapi.destroySr(sr._xapiId) + return + } + const xapiSr = xapi.getObject(sr) + if (srIsBackingHa(xapiSr)) { + throw new Error('You tried to remove a SR the High Availability is relying on. Please disable HA first.') + } + const config = xapi.xo.getData(sr, 'xosan_config') + // we simply forget because the hosted disks are being destroyed with the VMs + await xapi.forgetSr(sr._xapiId) + await asyncMap(config.nodes, node => xapi.deleteVm(node.vm.id)) + await xapi.deleteNetwork(config.network) + if (sr.SR_type === 'xosan') { + await this.unbindXosanLicense({ srId: sr.id }) + } +} + +destroy.params = { + id: { type: 'string' }, +} + +destroy.resolve = { + sr: ['id', 'SR', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function forget ({ SR }) { + await this.getXapi(SR).forgetSr(SR._xapiId) +} + +forget.params = { + id: { type: 'string' }, +} + +forget.resolve = { + SR: ['id', 'SR', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function connectAllPbds ({ SR }) { + await this.getXapi(SR).connectAllSrPbds(SR._xapiId) +} + +connectAllPbds.params = { + id: { type: 'string' }, +} + +connectAllPbds.resolve = { + SR: ['id', 'SR', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function disconnectAllPbds ({ SR }) { + await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId) +} + +disconnectAllPbds.params = { + id: { type: 'string' }, +} + +disconnectAllPbds.resolve = { + SR: ['id', 'SR', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function createIso ({ + host, + nameLabel, + nameDescription, + path, + type, + user, + password, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = {} + if (type === 'local') { + deviceConfig.legacy_mode = 'true' + } else if (type === 'smb') { + path = path.replace(/\\/g, '/') + deviceConfig.type = 'cifs' + deviceConfig.username = user + deviceConfig.cifspassword = password + } + + deviceConfig.location = path + + const srRef = await xapi.call( + 'SR.create', + host._xapiRef, + deviceConfig, + '0', // SR size 0 because ISO + nameLabel, + nameDescription, + 'iso', // SR type ISO + 'iso', // SR content type ISO + type !== 'local', + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +createIso.params = { + host: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + path: { type: 'string' }, + type: { type: 'string' }, + user: { type: 'string', optional: true }, + password: { type: 'string', optional: true }, +} + +createIso.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// NFS SR + +// This functions creates a NFS SR + +export async function createNfs ({ + host, + nameLabel, + nameDescription, + server, + serverPath, + nfsVersion, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + server, + serverpath: serverPath, + } + + // if NFS version given + if (nfsVersion) { + deviceConfig.nfsversion = nfsVersion + } + + const srRef = await xapi.call( + 'SR.create', + host._xapiRef, + deviceConfig, + '0', + nameLabel, + nameDescription, + 'nfs', // SR LVM over iSCSI + 'user', // recommended by Citrix + true, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +createNfs.params = { + host: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + server: { type: 'string' }, + serverPath: { type: 'string' }, + nfsVersion: { type: 'string', optional: true }, +} + +createNfs.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// HBA SR + +// This functions creates an HBA SR + +export async function createHba ({ + host, + nameLabel, + nameDescription, + scsiId, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + scsiId, + } + + const srRef = await xapi.call( + 'SR.create', + host._xapiRef, + deviceConfig, + '0', + nameLabel, + nameDescription, + 'lvmoohba', // SR LVM over HBA + 'user', // recommended by Citrix + true, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +createHba.params = { + host: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + scsiId: { type: 'string' }, +} + +createHba.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// Local LVM SR + +// This functions creates a local LVM SR + +export async function createLvm ({ + host, + nameLabel, + nameDescription, + device, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + device, + } + + const srRef = await xapi.call( + 'SR.create', + host._xapiRef, + deviceConfig, + '0', + nameLabel, + nameDescription, + 'lvm', // SR LVM + 'user', // recommended by Citrix + false, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +createLvm.params = { + host: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + device: { type: 'string' }, +} + +createLvm.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect all NFS shares (exports) on a NFS server +// Return a table of exports with their paths and ACLs + +export async function probeNfs ({ + host, + server, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + server, + } + + let xml + + try { + await xapi.call( + 'SR.probe', + host._xapiRef, + deviceConfig, + 'nfs', + {} + ) + + throw new Error('the call above should have thrown an error') + } catch (error) { + if (error.code !== 'SR_BACKEND_FAILURE_101') { + throw error + } + + xml = parseXml(error.params[2]) + } + + const nfsExports = [] + forEach(ensureArray(xml['nfs-exports'].Export), nfsExport => { + nfsExports.push({ + path: nfsExport.Path.trim(), + acl: nfsExport.Accesslist.trim(), + }) + }) + + return nfsExports +} + +probeNfs.params = { + host: { type: 'string' }, + server: { type: 'string' }, +} + +probeNfs.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect all HBA devices on the host + +export async function probeHba ({ + host, +}) { + const xapi = this.getXapi(host) + + let xml + + try { + await xapi.call( + 'SR.probe', + host._xapiRef, + 'type', + {} + ) + + throw new Error('the call above should have thrown an error') + } catch (error) { + if (error.code !== 'SR_BACKEND_FAILURE_107') { + throw error + } + + xml = parseXml(error.params[2]) + } + + const hbaDevices = [] + forEach(ensureArray(xml.Devlist.BlockDevice), hbaDevice => { + hbaDevices.push({ + hba: hbaDevice.hba.trim(), + path: hbaDevice.path.trim(), + scsciId: hbaDevice.SCSIid.trim(), + size: hbaDevice.size.trim(), + vendor: hbaDevice.vendor.trim(), + }) + }) + + return hbaDevices +} + +probeHba.params = { + host: { type: 'string' }, +} + +probeHba.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// ISCSI SR + +// This functions creates a iSCSI SR + +export async function createIscsi ({ + host, + nameLabel, + nameDescription, + size, + target, + port, + targetIqn, + scsiId, + chapUser, + chapPassword, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + target, + targetIQN: targetIqn, + SCSIid: scsiId, + } + + // if we give user and password + if (chapUser && chapPassword) { + deviceConfig.chapuser = chapUser + deviceConfig.chappassword = chapPassword + } + + // if we give another port than default iSCSI + if (port) { + deviceConfig.port = asInteger(port) + } + + const srRef = await xapi.call( + 'SR.create', + host._xapiRef, + deviceConfig, + '0', + nameLabel, + nameDescription, + 'lvmoiscsi', // SR LVM over iSCSI + 'user', // recommended by Citrix + true, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +createIscsi.params = { + host: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + target: { type: 'string' }, + port: { type: 'integer', optional: true }, + targetIqn: { type: 'string' }, + scsiId: { type: 'string' }, + chapUser: { type: 'string', optional: true }, + chapPassword: { type: 'string', optional: true }, +} + +createIscsi.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect all iSCSI IQN on a Target (iSCSI "server") +// Return a table of IQN or empty table if no iSCSI connection to the target + +export async function probeIscsiIqns ({ + host, + target: targetIp, + port, + chapUser, + chapPassword, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + target: targetIp, + } + + // if we give user and password + if (chapUser && chapPassword) { + deviceConfig.chapUser = chapUser + deviceConfig.chapPassword = chapPassword + } + + // if we give another port than default iSCSI + if (port) { + deviceConfig.port = asInteger(port) + } + + let xml + + try { + await xapi.call( + 'SR.probe', + host._xapiRef, + deviceConfig, + 'lvmoiscsi', + {} + ) + + throw new Error('the call above should have thrown an error') + } catch (error) { + if (error.code === 'SR_BACKEND_FAILURE_141') { + return [] + } + if (error.code !== 'SR_BACKEND_FAILURE_96') { + throw error + } + + xml = parseXml(error.params[2]) + } + + const targets = [] + forEach(ensureArray(xml['iscsi-target-iqns'].TGT), target => { + // if the target is on another IP adress, do not display it + if (target.IPAddress.trim() === targetIp) { + targets.push({ + iqn: target.TargetIQN.trim(), + ip: target.IPAddress.trim(), + }) + } + }) + + return targets +} + +probeIscsiIqns.params = { + host: { type: 'string' }, + target: { type: 'string' }, + port: { type: 'integer', optional: true }, + chapUser: { type: 'string', optional: true }, + chapPassword: { type: 'string', optional: true }, +} +probeIscsiIqns.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect all iSCSI ID and LUNs on a Target +// It will return a LUN table + +export async function probeIscsiLuns ({ + host, + target: targetIp, + port, + targetIqn, + chapUser, + chapPassword, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + target: targetIp, + targetIQN: targetIqn, + } + + // if we give user and password + if (chapUser && chapPassword) { + deviceConfig.chapuser = chapUser + deviceConfig.chappassword = chapPassword + } + + // if we give another port than default iSCSI + if (port) { + deviceConfig.port = asInteger(port) + } + + let xml + + try { + await xapi.call( + 'SR.probe', + host._xapiRef, + deviceConfig, + 'lvmoiscsi', + {} + ) + + throw new Error('the call above should have thrown an error') + } catch (error) { + if (error.code !== 'SR_BACKEND_FAILURE_107') { + throw error + } + + xml = parseXml(error.params[2]) + } + + const luns = [] + forEach(ensureArray(xml['iscsi-target'].LUN), lun => { + luns.push({ + id: lun.LUNid.trim(), + vendor: lun.vendor.trim(), + serial: lun.serial.trim(), + size: lun.size.trim(), + scsiId: lun.SCSIid.trim(), + }) + }) + + return luns +} + +probeIscsiLuns.params = { + host: { type: 'string' }, + target: { type: 'string' }, + port: { type: 'integer', optional: true }, + targetIqn: { type: 'string' }, + chapUser: { type: 'string', optional: true }, + chapPassword: { type: 'string', optional: true }, +} + +probeIscsiLuns.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect if this target already exists in XAPI +// It returns a table of SR UUID, empty if no existing connections + +export async function probeIscsiExists ({ + host, + target: targetIp, + port, + targetIqn, + scsiId, + chapUser, + chapPassword, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + target: targetIp, + targetIQN: targetIqn, + SCSIid: scsiId, + } + + // if we give user and password + if (chapUser && chapPassword) { + deviceConfig.chapuser = chapUser + deviceConfig.chappassword = chapPassword + } + + // if we give another port than default iSCSI + if (port) { + deviceConfig.port = asInteger(port) + } + + const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {})) + + const srs = [] + forEach(ensureArray(xml['SRlist'].SR), sr => { + // get the UUID of SR connected to this LUN + srs.push({ uuid: sr.UUID.trim() }) + }) + + return srs +} + +probeIscsiExists.params = { + host: { type: 'string' }, + target: { type: 'string' }, + port: { type: 'integer', optional: true }, + targetIqn: { type: 'string' }, + scsiId: { type: 'string' }, + chapUser: { type: 'string', optional: true }, + chapPassword: { type: 'string', optional: true }, +} + +probeIscsiExists.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to detect if this NFS SR already exists in XAPI +// It returns a table of SR UUID, empty if no existing connections + +export async function probeNfsExists ({ + host, + server, + serverPath, +}) { + const xapi = this.getXapi(host) + + const deviceConfig = { + server, + serverpath: serverPath, + } + + const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {})) + + const srs = [] + + forEach(ensureArray(xml['SRlist'].SR), sr => { + // get the UUID of SR connected to this LUN + srs.push({ uuid: sr.UUID.trim() }) + }) + + return srs +} + +probeNfsExists.params = { + host: { type: 'string' }, + server: { type: 'string' }, + serverPath: { type: 'string' }, +} + +probeNfsExists.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to reattach a forgotten NFS/iSCSI SR + +export async function reattach ({ + host, + uuid, + nameLabel, + nameDescription, + type, +}) { + const xapi = this.getXapi(host) + + if (type === 'iscsi') { + type = 'lvmoiscsi' // the internal XAPI name + } + + const srRef = await xapi.call( + 'SR.introduce', + uuid, + nameLabel, + nameDescription, + type, + 'user', + true, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +reattach.params = { + host: { type: 'string' }, + uuid: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + type: { type: 'string' }, +} + +reattach.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- +// This function helps to reattach a forgotten ISO SR + +export async function reattachIso ({ + host, + uuid, + nameLabel, + nameDescription, + type, +}) { + const xapi = this.getXapi(host) + + if (type === 'iscsi') { + type = 'lvmoiscsi' // the internal XAPI name + } + + const srRef = await xapi.call( + 'SR.introduce', + uuid, + nameLabel, + nameDescription, + type, + 'iso', + true, + {} + ) + + const sr = await xapi.call('SR.get_record', srRef) + return sr.uuid +} + +reattachIso.params = { + host: { type: 'string' }, + uuid: { type: 'string' }, + nameLabel: { type: 'string' }, + nameDescription: { type: 'string' }, + type: { type: 'string' }, +} + +reattachIso.resolve = { + host: ['host', 'host', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function getUnhealthyVdiChainsLength ({ sr }) { + return this.getXapi(sr).getUnhealthyVdiChainsLength(sr) +} + +getUnhealthyVdiChainsLength.params = { + id: { type: 'string' }, +} + +getUnhealthyVdiChainsLength.resolve = { + sr: ['id', 'SR', 'operate'], +} diff --git a/packages/xo-server/src/api/system.js b/packages/xo-server/src/api/system.js new file mode 100644 index 000000000..352eb82d3 --- /dev/null +++ b/packages/xo-server/src/api/system.js @@ -0,0 +1,67 @@ +import forEach from 'lodash/forEach' +import getKeys from 'lodash/keys' +import moment from 'moment-timezone' + +import { noSuchObject } from 'xo-common/api-errors' +import { version as xoServerVersion } from '../../package.json' + +// =================================================================== + +export function getMethodsInfo () { + const methods = {} + + forEach(this.apiMethods, (method, name) => { + methods[name] = { + description: method.description, + params: method.params || {}, + permission: method.permission, + } + }) + + return methods +} +getMethodsInfo.description = 'returns the signatures of all available API methods' + +// ------------------------------------------------------------------- + +export const getServerTimezone = (tz => () => tz)(moment.tz.guess()) +getServerTimezone.description = 'return the timezone server' + +// ------------------------------------------------------------------- + +export const getServerVersion = () => xoServerVersion +getServerVersion.description = 'return the version of xo-server' + +// ------------------------------------------------------------------- + +export const getVersion = () => '0.1' +getVersion.description = 'API version (unstable)' + +// ------------------------------------------------------------------- + +export function listMethods () { + return getKeys(this.apiMethods) +} +listMethods.description = 'returns the name of all available API methods' + +// ------------------------------------------------------------------- + +export function methodSignature ({method: name}) { + const method = this.apiMethods[name] + + if (!method) { + throw noSuchObject() + } + + // Return an array for compatibility with XML-RPC. + return [ + // XML-RPC require the name of the method. + { + name, + description: method.description, + params: method.params || {}, + permission: method.permission, + }, + ] +} +methodSignature.description = 'returns the signature of an API method' diff --git a/packages/xo-server/src/api/tag.js b/packages/xo-server/src/api/tag.js new file mode 100644 index 000000000..52f1cb16a --- /dev/null +++ b/packages/xo-server/src/api/tag.js @@ -0,0 +1,31 @@ +export async function add ({tag, object}) { + await this.getXapi(object).addTag(object._xapiId, tag) +} + +add.description = 'add a new tag to an object' + +add.resolve = { + object: ['id', null, 'administrate'], +} + +add.params = { + tag: { type: 'string' }, + id: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +export async function remove ({tag, object}) { + await this.getXapi(object).removeTag(object._xapiId, tag) +} + +remove.description = 'remove an existing tag from an object' + +remove.resolve = { + object: ['id', null, 'administrate'], +} + +remove.params = { + tag: { type: 'string' }, + id: { type: 'string' }, +} diff --git a/packages/xo-server/src/api/task.js b/packages/xo-server/src/api/task.js new file mode 100644 index 000000000..f24438778 --- /dev/null +++ b/packages/xo-server/src/api/task.js @@ -0,0 +1,25 @@ +export async function cancel ({task}) { + await this.getXapi(task).call('task.cancel', task._xapiRef) +} + +cancel.params = { + id: { type: 'string' }, +} + +cancel.resolve = { + task: ['id', 'task', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function destroy ({task}) { + await this.getXapi(task).call('task.destroy', task._xapiRef) +} + +destroy.params = { + id: { type: 'string' }, +} + +destroy.resolve = { + task: ['id', 'task', 'administrate'], +} diff --git a/packages/xo-server/src/api/test.js b/packages/xo-server/src/api/test.js new file mode 100644 index 000000000..0eb4d3882 --- /dev/null +++ b/packages/xo-server/src/api/test.js @@ -0,0 +1,86 @@ +export function getPermissionsForUser ({ userId }) { + return this.getPermissionsForUser(userId) +} + +getPermissionsForUser.permission = 'admin' + +getPermissionsForUser.params = { + userId: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function hasPermission ({ userId, objectId, permission }) { + return this.hasPermissions(userId, [ + [ objectId, permission ], + ]) +} + +hasPermission.permission = 'admin' + +hasPermission.params = { + userId: { + type: 'string', + }, + objectId: { + type: 'string', + }, + permission: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export function wait ({duration, returnValue}) { + return new Promise(resolve => { + setTimeout(() => { + resolve(returnValue) + }, +duration) + }) +} + +wait.params = { + duration: { + type: 'string', + }, +} + +// ------------------------------------------------------------------- + +export async function copyVm ({ vm, sr }) { + const srcXapi = this.getXapi(vm) + const tgtXapi = this.getXapi(sr) + + // full + { + console.log('export full VM...') + const input = await srcXapi.exportVm(vm) + console.log('import full VM...') + await tgtXapi.deleteVm(await tgtXapi.importVm(input, { srId: sr })) + } + + // delta + { + console.log('export delta VM...') + const input = await srcXapi.exportDeltaVm(vm) + console.log('import delta VM...') + await tgtXapi.deleteVm(await tgtXapi.importDeltaVm(input, { srId: sr })) + } +} + +copyVm.description = 'export/import full/delta VM' + +copyVm.permission = 'admin' + +copyVm.params = { + vm: { type: 'string' }, + sr: { type: 'string' }, +} + +copyVm.resolve = { + vm: [ 'vm', 'VM' ], + sr: [ 'sr', 'SR' ], +} diff --git a/packages/xo-server/src/api/token.js b/packages/xo-server/src/api/token.js new file mode 100644 index 000000000..0497ccebc --- /dev/null +++ b/packages/xo-server/src/api/token.js @@ -0,0 +1,36 @@ +// TODO: Prevent token connections from creating tokens. +// TODO: Token permission. +export async function create ({ expiresIn }) { + return (await this.createAuthenticationToken({ + expiresIn, + userId: this.session.get('user_id'), + })).id +} + +create.description = 'create a new authentication token' + +create.params = { + expiresIn: { + optional: true, + type: [ 'number', 'string' ], + }, +} + +create.permission = '' // sign in + +// ------------------------------------------------------------------- + +// TODO: an user should be able to delete its own tokens. +async function delete_ ({token: id}) { + await this.deleteAuthenticationToken(id) +} + +export {delete_ as delete} + +delete_.description = 'delete an existing authentication token' + +delete_.permission = 'admin' + +delete_.params = { + token: { type: 'string' }, +} diff --git a/packages/xo-server/src/api/user.js b/packages/xo-server/src/api/user.js new file mode 100644 index 000000000..93284f96a --- /dev/null +++ b/packages/xo-server/src/api/user.js @@ -0,0 +1,99 @@ +import {invalidParameters} from 'xo-common/api-errors' +import { getUserPublicProperties, mapToArray } from '../utils' + +// =================================================================== + +export async function create ({email, password, permission}) { + return (await this.createUser({email, password, permission})).id +} + +create.description = 'creates a new user' + +create.permission = 'admin' + +create.params = { + email: { type: 'string' }, + password: { type: 'string' }, + permission: { type: 'string', optional: true }, +} + +// ------------------------------------------------------------------- + +// Deletes an existing user. +async function delete_ ({id}) { + if (id === this.session.get('user_id')) { + throw invalidParameters('a user cannot delete itself') + } + + await this.deleteUser(id) +} + +// delete is not a valid identifier. +export {delete_ as delete} + +delete_.description = 'deletes an existing user' + +delete_.permission = 'admin' + +delete_.params = { + id: { type: 'string' }, +} + +// ------------------------------------------------------------------- + +// TODO: remove this function when users are integrated to the main +// collection. +export async function getAll () { + // Retrieves the users. + const users = await this.getAllUsers() + + // Filters out private properties. + return mapToArray(users, getUserPublicProperties) +} + +getAll.description = 'returns all the existing users' + +getAll.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function set ({id, email, password, permission, preferences}) { + const isAdmin = this.user && this.user.permission === 'admin' + if (isAdmin) { + if (permission && id === this.session.get('user_id')) { + throw invalidParameters('a user cannot change its own permission') + } + } else if (email || password || permission) { + throw invalidParameters('this properties can only changed by an administrator') + } + + await this.updateUser(id, {email, password, permission, preferences}) +} + +set.description = 'changes the properties of an existing user' + +set.permission = '' + +set.params = { + id: { type: 'string' }, + email: { type: 'string', optional: true }, + password: { type: 'string', optional: true }, + permission: { type: 'string', optional: true }, + preferences: { type: 'object', optional: true }, +} + +// ------------------------------------------------------------------- + +export async function changePassword ({oldPassword, newPassword}) { + const id = this.session.get('user_id') + await this.changeUserPassword(id, oldPassword, newPassword) +} + +changePassword.description = 'change password after checking old password (user function)' + +changePassword.permission = '' + +changePassword.params = { + oldPassword: {type: 'string'}, + newPassword: {type: 'string'}, +} diff --git a/packages/xo-server/src/api/vbd.js b/packages/xo-server/src/api/vbd.js new file mode 100644 index 000000000..63d7fd1e9 --- /dev/null +++ b/packages/xo-server/src/api/vbd.js @@ -0,0 +1,82 @@ +// FIXME: too low level, should be removed. + +async function delete_ ({vbd}) { + await this.getXapi(vbd).deleteVbd(vbd) +} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + vbd: ['id', 'VBD', 'administrate'], +} + +export { delete_ as delete } + +// ------------------------------------------------------------------- + +export async function disconnect ({vbd}) { + const xapi = this.getXapi(vbd) + await xapi.disconnectVbd(vbd._xapiRef) +} + +disconnect.params = { + id: { type: 'string' }, +} + +disconnect.resolve = { + vbd: ['id', 'VBD', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function connect ({vbd}) { + const xapi = this.getXapi(vbd) + await xapi.connectVbd(vbd._xapiRef) +} + +connect.params = { + id: { type: 'string' }, +} + +connect.resolve = { + vbd: ['id', 'VBD', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function set ({position, vbd}) { + if (position !== undefined) { + const xapi = this.getXapi(vbd) + await xapi.call('VBD.set_userdevice', vbd._xapiRef, String(position)) + } +} + +set.params = { + // Identifier of the VBD to update. + id: { type: 'string' }, + + position: { type: ['string', 'number'], optional: true }, +} + +set.resolve = { + vbd: ['id', 'VBD', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function setBootable ({vbd, bootable}) { + const xapi = this.getXapi(vbd) + + await xapi.call('VBD.set_bootable', vbd._xapiRef, bootable) +} + +setBootable.params = { + vbd: { type: 'string' }, + bootable: { type: 'boolean' }, +} + +setBootable.resolve = { + vbd: ['vbd', 'VBD', 'administrate'], +} diff --git a/packages/xo-server/src/api/vdi.js b/packages/xo-server/src/api/vdi.js new file mode 100644 index 000000000..d4399da30 --- /dev/null +++ b/packages/xo-server/src/api/vdi.js @@ -0,0 +1,122 @@ +// FIXME: rename to disk.* + +import { invalidParameters, unauthorized } from 'xo-common/api-errors' +import { isArray, reduce } from 'lodash' + +import { parseSize } from '../utils' + +// ==================================================================== + +export async function delete_ ({vdi}) { + const resourceSet = reduce( + vdi.$VBDs, + (resourceSet, vbd) => resourceSet || this.getObject(this.getObject(vbd, 'VBD').VM).resourceSet, + undefined + ) + + if (resourceSet !== undefined) { + await this.allocateLimitsInResourceSet({ disk: -vdi.size }, resourceSet) + } + + await this.getXapi(vdi).deleteVdi(vdi._xapiId) +} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'], +} + +export { delete_ as delete } + +// ------------------------------------------------------------------- + +// FIXME: human readable strings should be handled. +export async function set (params) { + const {vdi} = params + const xapi = this.getXapi(vdi) + const ref = vdi._xapiRef + + // Size. + if ('size' in params) { + let resourceSetId + const size = parseSize(params.size) + + if (size < vdi.size) { + throw invalidParameters( + `cannot set new size (${size}) below the current size (${vdi.size})` + ) + } + + const vbds = vdi.$VBDs + if ( + (vbds.length === 1) && + ((resourceSetId = xapi.xo.getData(this.getObject(vbds[0], 'VBD').VM, 'resourceSet')) !== undefined) + ) { + if (this.user.permission !== 'admin') { + await this.checkResourceSetConstraints(resourceSetId, this.user.id) + } + + await this.allocateLimitsInResourceSet({ disk: size - vdi.size }, resourceSetId) + } else if (!( + (this.user.permission === 'admin') || + (await this.hasPermissions(this.user.id, [ [ vdi.$SR, 'operate' ] ])) + )) { + throw unauthorized() + } + + await xapi.resizeVdi(ref, size) + } + + // Other fields. + const object = { + 'name_label': 'name_label', + 'name_description': 'name_description', + } + for (const param in object) { + const fields = object[param] + if (!(param in params)) { continue } + + for (const field of (isArray(fields) ? fields : [fields])) { + await xapi.call(`VDI.set_${field}`, ref, `${params[param]}`) + } + } +} + +set.params = { + // Identifier of the VDI to update. + id: { type: 'string' }, + + name_label: { type: 'string', optional: true }, + + name_description: { type: 'string', optional: true }, + + // size of VDI + size: { type: ['integer', 'string'], optional: true }, +} + +set.resolve = { + vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function migrate ({vdi, sr}) { + const xapi = this.getXapi(vdi) + + await xapi.moveVdi(vdi._xapiRef, sr._xapiRef) + + return true +} + +migrate.params = { + id: { type: 'string' }, + sr_id: { type: 'string' }, +} + +migrate.resolve = { + vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'], + sr: ['sr_id', 'SR', 'administrate'], +} diff --git a/packages/xo-server/src/api/vif.js b/packages/xo-server/src/api/vif.js new file mode 100644 index 000000000..039222502 --- /dev/null +++ b/packages/xo-server/src/api/vif.js @@ -0,0 +1,139 @@ +import { ignoreErrors } from 'promise-toolbox' + +import { diffItems } from '../utils' + +// =================================================================== + +// TODO: move into vm and rename to removeInterface +async function delete_ ({vif}) { + this.allocIpAddresses( + vif.id, + null, + vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses) + )::ignoreErrors() + + await this.getXapi(vif).deleteVif(vif._xapiId) +} +export {delete_ as delete} + +delete_.params = { + id: { type: 'string' }, +} + +delete_.resolve = { + vif: ['id', 'VIF', 'administrate'], +} + +// ------------------------------------------------------------------- + +// TODO: move into vm and rename to disconnectInterface +export async function disconnect ({vif}) { + // TODO: check if VIF is attached before + await this.getXapi(vif).disconnectVif(vif._xapiId) +} + +disconnect.params = { + id: { type: 'string' }, +} + +disconnect.resolve = { + vif: ['id', 'VIF', 'operate'], +} + +// ------------------------------------------------------------------- +// TODO: move into vm and rename to connectInterface +export async function connect ({vif}) { + // TODO: check if VIF is attached before + await this.getXapi(vif).connectVif(vif._xapiId) +} + +connect.params = { + id: { type: 'string' }, +} + +connect.resolve = { + vif: ['id', 'VIF', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function set ({ + vif, + network, + mac, + allowedIpv4Addresses, + allowedIpv6Addresses, + attached, +}) { + const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses) + const newIpAddresses = [] + { + const { push } = newIpAddresses + push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses) + push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses) + } + + if (network || mac) { + const xapi = this.getXapi(vif) + + const vm = xapi.getObject(vif.$VM) + mac == null && (mac = vif.MAC) + network = xapi.getObject((network && network.id) || vif.$network) + attached == null && (attached = vif.attached) + + await this.allocIpAddresses(vif.id, null, oldIpAddresses) + await xapi.deleteVif(vif._xapiId) + + // create new VIF with new parameters + const newVif = await xapi.createVif(vm.$id, network.$id, { + mac, + currently_attached: attached, + ipv4_allowed: newIpAddresses, + }) + + await this.allocIpAddresses(newVif.$id, newIpAddresses) + + return + } + + const [ addAddresses, removeAddresses ] = diffItems( + newIpAddresses, + oldIpAddresses + ) + await this.allocIpAddresses( + vif.id, + addAddresses, + removeAddresses + ) + + return this.getXapi(vif).editVif(vif._xapiId, { + ipv4Allowed: allowedIpv4Addresses, + ipv6Allowed: allowedIpv6Addresses, + }) +} + +set.params = { + id: { type: 'string' }, + network: { type: 'string', optional: true }, + mac: { type: 'string', optional: true }, + allowedIpv4Addresses: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + allowedIpv6Addresses: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + attached: { type: 'boolean', optional: true }, +} + +set.resolve = { + vif: ['id', 'VIF', 'operate'], + network: ['network', 'network', 'operate'], +} diff --git a/packages/xo-server/src/api/vm.js b/packages/xo-server/src/api/vm.js new file mode 100644 index 000000000..b09a94446 --- /dev/null +++ b/packages/xo-server/src/api/vm.js @@ -0,0 +1,1462 @@ +import concat from 'lodash/concat' +import { format } from 'json-rpc-peer' +import { ignoreErrors } from 'promise-toolbox' +import { + forbiddenOperation, + invalidParameters, + noSuchObject, + unauthorized, +} from 'xo-common/api-errors' + +import { forEach, map, mapFilter, parseSize } from '../utils' + +// =================================================================== + +function checkPermissionOnSrs (vm, permission = 'operate') { + const permissions = [] + forEach(vm.$VBDs, vbdId => { + const vbd = this.getObject(vbdId, 'VBD') + const vdiId = vbd.VDI + + if (vbd.is_cd_drive || !vdiId) { + return + } + + return permissions.push([this.getObject(vdiId, 'VDI').$SR, permission]) + }) + + return this.hasPermissions( + this.session.get('user_id'), + permissions + ).then(success => { + if (!success) { + throw unauthorized() + } + }) +} + +// =================================================================== + +const extract = (obj, prop) => { + const value = obj[prop] + delete obj[prop] + return value +} + +// TODO: Implement ACLs +export async function create (params) { + const { user } = this + const resourceSet = extract(params, 'resourceSet') + if (resourceSet === undefined && user.permission !== 'admin') { + throw unauthorized() + } + + const template = extract(params, 'template') + params.template = template._xapiId + + const xapi = this.getXapi(template) + + const objectIds = [template.id] + const limits = { + cpus: template.CPUs.number, + disk: 0, + memory: template.memory.dynamic[1], + vms: 1, + } + const vdiSizesByDevice = {} + let highestDevice = -1 + forEach(xapi.getObject(template._xapiId).$VBDs, vbd => { + let vdi + highestDevice = Math.max(highestDevice, vbd.userdevice) + if (vbd.type === 'Disk' && (vdi = vbd.$VDI)) { + vdiSizesByDevice[vbd.userdevice] = +vdi.virtual_size + } + }) + + const vdis = extract(params, 'VDIs') + params.vdis = + vdis && + map(vdis, vdi => { + const sr = this.getObject(vdi.SR) + const size = parseSize(vdi.size) + + objectIds.push(sr.id) + limits.disk += size + + return { + ...vdi, + device: ++highestDevice, + size, + SR: sr._xapiId, + type: vdi.type, + } + }) + + const existingVdis = extract(params, 'existingDisks') + params.existingVdis = + existingVdis && + map(existingVdis, (vdi, userdevice) => { + let size, sr + if (vdi.size != null) { + size = parseSize(vdi.size) + vdiSizesByDevice[userdevice] = size + } + + if (vdi.$SR) { + sr = this.getObject(vdi.$SR) + objectIds.push(sr.id) + } + + return { + ...vdi, + size, + $SR: sr && sr._xapiId, + } + }) + + forEach(vdiSizesByDevice, size => (limits.disk += size)) + + const vifs = extract(params, 'VIFs') + params.vifs = + vifs && + map(vifs, vif => { + const network = this.getObject(vif.network) + + objectIds.push(network.id) + + return { + mac: vif.mac, + network: network._xapiId, + ipv4_allowed: vif.allowedIpv4Addresses, + ipv6_allowed: vif.allowedIpv6Addresses, + } + }) + + const installation = extract(params, 'installation') + params.installRepository = installation && installation.repository + + let checkLimits + + if (resourceSet) { + await this.checkResourceSetConstraints(resourceSet, user.id, objectIds) + checkLimits = async limits2 => { + await this.allocateLimitsInResourceSet(limits, resourceSet) + await this.allocateLimitsInResourceSet(limits2, resourceSet) + } + } + + const xapiVm = await xapi.createVm(template._xapiId, params, checkLimits) + const vm = xapi.xo.addObject(xapiVm) + + if (resourceSet) { + await Promise.all([ + params.share + ? Promise.all( + map((await this.getResourceSet(resourceSet)).subjects, subjectId => + this.addAcl(subjectId, vm.id, 'admin') + ) + ) + : this.addAcl(user.id, vm.id, 'admin'), + xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet), + ]) + } + + for (const vifId of vm.VIFs) { + const vif = this.getObject(vifId, 'VIF') + await this.allocIpAddresses( + vifId, + concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses) + ).catch(() => xapi.deleteVif(vif._xapiId)) + } + + if (params.bootAfterCreate) { + ignoreErrors.call(xapi.startVm(vm._xapiId)) + } + + return vm.id +} + +create.params = { + affinityHost: { type: 'string', optional: true }, + + bootAfterCreate: { + type: 'boolean', + optional: true, + }, + + cloudConfig: { + type: 'string', + optional: true, + }, + + coreOs: { + type: 'boolean', + optional: true, + }, + + clone: { + type: 'boolean', + optional: true, + }, + + coresPerSocket: { + type: ['string', 'number'], + optional: true, + }, + + resourceSet: { + type: 'string', + optional: true, + }, + + installation: { + type: 'object', + optional: true, + properties: { + method: { type: 'string' }, + repository: { type: 'string' }, + }, + }, + + vgpuType: { + type: 'string', + optional: true, + }, + + gpuGroup: { + type: 'string', + optional: true, + }, + + // Name/description of the new VM. + name_label: { type: 'string' }, + name_description: { type: 'string', optional: true }, + + // PV Args + pv_args: { type: 'string', optional: true }, + + share: { + type: 'boolean', + optional: true, + }, + + // TODO: add the install repository! + // VBD.insert/eject + // Also for the console! + + // UUID of the template the VM will be created from. + template: { type: 'string' }, + + // Virtual interfaces to create for the new VM. + VIFs: { + optional: true, + type: 'array', + items: { + type: 'object', + properties: { + // UUID of the network to create the interface in. + network: { type: 'string' }, + + mac: { + optional: true, // Auto-generated per default. + type: 'string', + }, + + allowedIpv4Addresses: { + optional: true, + type: 'array', + items: { type: 'string' }, + }, + + allowedIpv6Addresses: { + optional: true, + type: 'array', + items: { type: 'string' }, + }, + }, + }, + }, + + // Virtual disks to create for the new VM. + VDIs: { + optional: true, // If not defined, use the template parameters. + type: 'array', + items: { + type: 'object', + properties: { + size: { type: ['integer', 'string'] }, + SR: { type: 'string' }, + type: { type: 'string' }, + }, + }, + }, + + // TODO: rename to *existingVdis* or rename *VDIs* to *disks*. + existingDisks: { + optional: true, + type: 'object', + + // Do not for a type object. + items: { + type: 'object', + properties: { + size: { + type: ['integer', 'string'], + optional: true, + }, + $SR: { + type: 'string', + optional: true, + }, + }, + }, + }, +} + +create.resolve = { + template: ['template', 'VM-template', ''], + vgpuType: ['vgpuType', 'vgpuType', ''], + gpuGroup: ['gpuGroup', 'gpuGroup', ''], +} + +// ------------------------------------------------------------------- + +async function delete_ ({ + delete_disks, // eslint-disable-line camelcase + force, + vm, + + deleteDisks = delete_disks, +}) { + const xapi = this.getXapi(vm) + + this.getAllAcls().then(acls => { + return Promise.all( + mapFilter(acls, acl => { + if (acl.object === vm.id) { + return ignoreErrors.call( + this.removeAcl(acl.subject, acl.object, acl.action) + ) + } + }) + ) + }) + + // Update IP pools + await Promise.all( + map(vm.VIFs, vifId => { + const vif = xapi.getObject(vifId) + return ignoreErrors.call( + this.allocIpAddresses( + vifId, + null, + concat(vif.ipv4_allowed, vif.ipv6_allowed) + ) + ) + }) + ) + + // Update resource sets + const resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet') + if (resourceSet != null) { + this.setVmResourceSet(vm._xapiId, null)::ignoreErrors() + } + + return xapi.deleteVm(vm._xapiId, deleteDisks, force) +} + +delete_.params = { + id: { type: 'string' }, + + deleteDisks: { + optional: true, + type: 'boolean', + }, + + force: { + optional: true, + type: 'boolean', + }, +} +delete_.resolve = { + vm: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate'], +} + +export { delete_ as delete } + +// ------------------------------------------------------------------- + +export async function ejectCd ({ vm }) { + await this.getXapi(vm).ejectCdFromVm(vm._xapiId) +} + +ejectCd.params = { + id: { type: 'string' }, +} + +ejectCd.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function insertCd ({ vm, vdi, force }) { + await this.getXapi(vm).insertCdIntoVm(vdi._xapiId, vm._xapiId, { force }) +} + +insertCd.params = { + id: { type: 'string' }, + cd_id: { type: 'string' }, + force: { type: 'boolean' }, +} + +insertCd.resolve = { + vm: ['id', 'VM', 'operate'], + vdi: ['cd_id', 'VDI', 'view'], +} + +// ------------------------------------------------------------------- + +export async function migrate ({ + vm, + host, + sr, + mapVdisSrs, + mapVifsNetworks, + migrationNetwork, +}) { + let mapVdisSrsXapi, mapVifsNetworksXapi + const permissions = [] + + if (mapVdisSrs) { + mapVdisSrsXapi = {} + forEach(mapVdisSrs, (srId, vdiId) => { + const vdiXapiId = this.getObject(vdiId, 'VDI')._xapiId + mapVdisSrsXapi[vdiXapiId] = this.getObject(srId, 'SR')._xapiId + return permissions.push([srId, 'administrate']) + }) + } + + if (mapVifsNetworks) { + mapVifsNetworksXapi = {} + forEach(mapVifsNetworks, (networkId, vifId) => { + const vifXapiId = this.getObject(vifId, 'VIF')._xapiId + mapVifsNetworksXapi[vifXapiId] = this.getObject( + networkId, + 'network' + )._xapiId + return permissions.push([networkId, 'administrate']) + }) + } + + if (!await this.hasPermissions(this.session.get('user_id'), permissions)) { + throw unauthorized() + } + + await this.getXapi(vm).migrateVm( + vm._xapiId, + this.getXapi(host), + host._xapiId, + { + sr: sr && this.getObject(sr, 'SR')._xapiId, + migrationNetworkId: + migrationNetwork != null ? migrationNetwork._xapiId : undefined, + mapVifsNetworks: mapVifsNetworksXapi, + mapVdisSrs: mapVdisSrsXapi, + } + ) +} + +migrate.params = { + // Identifier of the VM to migrate. + vm: { type: 'string' }, + + // Identifier of the host to migrate to. + targetHost: { type: 'string' }, + + // Identifier of the default SR to migrate to. + sr: { type: 'string', optional: true }, + + // Map VDIs IDs --> SRs IDs + mapVdisSrs: { type: 'object', optional: true }, + + // Map VIFs IDs --> Networks IDs + mapVifsNetworks: { type: 'object', optional: true }, + + // Identifier of the Network use for the migration + migrationNetwork: { type: 'string', optional: true }, +} + +migrate.resolve = { + vm: ['vm', 'VM', 'administrate'], + host: ['targetHost', 'host', 'administrate'], + migrationNetwork: ['migrationNetwork', 'network', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function set (params) { + const VM = extract(params, 'VM') + const xapi = this.getXapi(VM) + const vmId = VM._xapiId + + const resourceSetId = extract(params, 'resourceSet') + + if (resourceSetId !== undefined) { + if (this.user.permission !== 'admin') { + throw unauthorized() + } + + await this.setVmResourceSet(vmId, resourceSetId) + } + + return xapi.editVm(vmId, params, async (limits, vm) => { + const resourceSet = xapi.xo.getData(vm, 'resourceSet') + + if (resourceSet) { + try { + return await this.allocateLimitsInResourceSet(limits, resourceSet) + } catch (error) { + // if the resource set no longer exist, behave as if the VM is free + if (!noSuchObject.is(error)) { + throw error + } + } + } + + if (limits.cpuWeight && this.user.permission !== 'admin') { + throw unauthorized() + } + }) +} + +set.params = { + // Identifier of the VM to update. + id: { type: 'string' }, + + name_label: { type: 'string', optional: true }, + + name_description: { type: 'string', optional: true }, + + // TODO: provides better filtering of values for HA possible values: "best- + // effort" meaning "try to restart this VM if possible but don't consider the + // Pool to be overcommitted if this is not possible"; "restart" meaning "this + // VM should be restarted"; "" meaning "do not try to restart this VM" + high_availability: { type: 'boolean', optional: true }, + + // Number of virtual CPUs to allocate. + CPUs: { type: 'integer', optional: true }, + + cpusMax: { type: ['integer', 'string'], optional: true }, + + // Memory to allocate (in bytes). + // + // Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max + memory: { type: ['integer', 'string'], optional: true }, + + // Set dynamic_min + memoryMin: { type: ['integer', 'string'], optional: true }, + + // Set dynamic_max + memoryMax: { type: ['integer', 'string'], optional: true }, + + // Set static_max + memoryStaticMax: { type: ['integer', 'string'], optional: true }, + + // Kernel arguments for PV VM. + PV_args: { type: 'string', optional: true }, + + cpuWeight: { type: ['integer', 'null'], optional: true }, + + cpuCap: { type: ['integer', 'null'], optional: true }, + + affinityHost: { type: ['string', 'null'], optional: true }, + + // Switch from Cirrus video adaptor to VGA adaptor + vga: { type: 'string', optional: true }, + + videoram: { type: ['string', 'number'], optional: true }, + + coresPerSocket: { type: ['string', 'number', 'null'], optional: true }, + + // Move the vm In to/Out of Self Service + resourceSet: { type: ['string', 'null'], optional: true }, +} + +set.resolve = { + VM: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function restart ({ vm, force }) { + const xapi = this.getXapi(vm) + + if (force) { + await xapi.call('VM.hard_reboot', vm._xapiRef) + } else { + await xapi.call('VM.clean_reboot', vm._xapiRef) + } +} + +restart.params = { + id: { type: 'string' }, + force: { type: 'boolean' }, +} + +restart.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +// TODO: implement resource sets +export async function clone ({ vm, name, full_copy: fullCopy }) { + await checkPermissionOnSrs.call(this, vm) + + return this.getXapi(vm) + .cloneVm(vm._xapiRef, { + nameLabel: name, + fast: !fullCopy, + }) + .then(vm => vm.$id) +} + +clone.params = { + id: { type: 'string' }, + name: { type: 'string' }, + full_copy: { type: 'boolean' }, +} + +clone.resolve = { + // TODO: is it necessary for snapshots? + vm: ['id', 'VM', 'administrate'], +} + +// ------------------------------------------------------------------- + +// TODO: implement resource sets +export async function copy ({ compress, name: nameLabel, sr, vm }) { + if (vm.$pool === sr.$pool) { + if (vm.power_state === 'Running') { + await checkPermissionOnSrs.call(this, vm) + } + + return this.getXapi(vm) + .copyVm(vm._xapiId, sr._xapiId, { + nameLabel, + }) + .then(vm => vm.$id) + } + + return this.getXapi(vm) + .remoteCopyVm(vm._xapiId, this.getXapi(sr), sr._xapiId, { + compress, + nameLabel, + }) + .then(({ vm }) => vm.$id) +} + +copy.params = { + compress: { + type: 'boolean', + optional: true, + }, + name: { + type: 'string', + optional: true, + }, + vm: { type: 'string' }, + sr: { type: 'string' }, +} + +copy.resolve = { + vm: ['vm', ['VM', 'VM-snapshot'], 'administrate'], + sr: ['sr', 'SR', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function convertToTemplate ({ vm }) { + // Convert to a template requires pool admin permission. + if ( + !await this.hasPermissions(this.session.get('user_id'), [ + [vm.$pool, 'administrate'], + ]) + ) { + throw unauthorized() + } + + await this.getXapi(vm).call('VM.set_is_a_template', vm._xapiRef, true) +} + +convertToTemplate.params = { + id: { type: 'string' }, +} + +convertToTemplate.resolve = { + vm: ['id', ['VM', 'VM-snapshot'], 'administrate'], +} + +// TODO: remove when no longer used. +export { convertToTemplate as convert } + +// ------------------------------------------------------------------- + +// TODO: implement resource sets +export async function snapshot ({ + vm, + name = `${vm.name_label}_${new Date().toISOString()}`, +}) { + await checkPermissionOnSrs.call(this, vm) + + return (await this.getXapi(vm).snapshotVm(vm._xapiRef, name)).$id +} + +snapshot.params = { + id: { type: 'string' }, + name: { type: 'string', optional: true }, +} + +snapshot.resolve = { + vm: ['id', 'VM', 'administrate'], +} + +// ------------------------------------------------------------------- + +export function rollingDeltaBackup ({ + vm, + remote, + tag, + depth, + retention = depth, +}) { + return this.rollingDeltaVmBackup({ + vm, + remoteId: remote, + tag, + retention, + }) +} + +rollingDeltaBackup.params = { + id: { type: 'string' }, + remote: { type: 'string' }, + tag: { type: 'string' }, + retention: { type: ['string', 'number'], optional: true }, + // This parameter is deprecated. It used to support the old saved backups jobs. + depth: { type: ['string', 'number'], optional: true }, +} + +rollingDeltaBackup.resolve = { + vm: ['id', ['VM', 'VM-snapshot'], 'administrate'], +} + +rollingDeltaBackup.permission = 'admin' + +// ------------------------------------------------------------------- + +export function importDeltaBackup ({ sr, remote, filePath, mapVdisSrs }) { + const mapVdisSrsXapi = {} + + forEach(mapVdisSrs, (srId, vdiId) => { + mapVdisSrsXapi[vdiId] = this.getObject(srId, 'SR')._xapiId + }) + + return this.importDeltaVmBackup({ + sr, + remoteId: remote, + filePath, + mapVdisSrs: mapVdisSrsXapi, + }) +} + +importDeltaBackup.params = { + sr: { type: 'string' }, + remote: { type: 'string' }, + filePath: { type: 'string' }, + // Map VDIs UUIDs --> SRs IDs + mapVdisSrs: { type: 'object', optional: true }, +} + +importDeltaBackup.resolve = { + sr: ['sr', 'SR', 'operate'], +} + +importDeltaBackup.permission = 'admin' + +// ------------------------------------------------------------------- + +export function deltaCopy ({ force, vm, retention, sr }) { + return this.deltaCopyVm(vm, sr, force, retention) +} + +deltaCopy.params = { + force: { type: 'boolean', optional: true }, + id: { type: 'string' }, + retention: { type: 'number', optional: true }, + sr: { type: 'string' }, +} + +deltaCopy.resolve = { + vm: ['id', 'VM', 'operate'], + sr: ['sr', 'SR', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function rollingSnapshot ({ vm, tag, depth, retention = depth }) { + await checkPermissionOnSrs.call(this, vm) + return this.rollingSnapshotVm(vm, tag, retention) +} + +rollingSnapshot.params = { + id: { type: 'string' }, + tag: { type: 'string' }, + retention: { type: 'number', optional: true }, + // This parameter is deprecated. It used to support the old saved backups jobs. + depth: { type: 'number', optional: true }, +} + +rollingSnapshot.resolve = { + vm: ['id', 'VM', 'administrate'], +} + +rollingSnapshot.description = + 'Snapshots a VM with a tagged name, and removes the oldest snapshot with the same tag according to retention' + +// ------------------------------------------------------------------- + +export function backup ({ vm, remoteId, file, compress }) { + return this.backupVm({ vm, remoteId, file, compress }) +} + +backup.permission = 'admin' + +backup.params = { + id: { type: 'string' }, + remoteId: { type: 'string' }, + file: { type: 'string' }, + compress: { type: 'boolean', optional: true }, +} + +backup.resolve = { + vm: ['id', 'VM', 'administrate'], +} + +backup.description = 'Exports a VM to the file system' + +// ------------------------------------------------------------------- + +export function importBackup ({ remote, file, sr }) { + return this.importVmBackup(remote, file, sr) +} + +importBackup.permission = 'admin' +importBackup.description = + 'Imports a VM into host, from a file found in the chosen remote' +importBackup.params = { + remote: { type: 'string' }, + file: { type: 'string' }, + sr: { type: 'string' }, +} + +importBackup.resolve = { + sr: ['sr', 'SR', 'operate'], +} + +importBackup.permission = 'admin' + +// ------------------------------------------------------------------- + +export function rollingBackup ({ + vm, + remoteId, + tag, + depth, + retention = depth, + compress, +}) { + return this.rollingBackupVm({ + vm, + remoteId, + tag, + retention, + compress, + }) +} + +rollingBackup.permission = 'admin' + +rollingBackup.params = { + id: { type: 'string' }, + remoteId: { type: 'string' }, + tag: { type: 'string' }, + retention: { type: 'number', optional: true }, + // This parameter is deprecated. It used to support the old saved backups jobs. + depth: { type: 'number', optional: true }, + compress: { type: 'boolean', optional: true }, +} + +rollingBackup.resolve = { + vm: ['id', ['VM', 'VM-snapshot'], 'administrate'], +} + +rollingBackup.description = + 'Exports a VM to the file system with a tagged name, and removes the oldest backup with the same tag according to retention' + +// ------------------------------------------------------------------- + +export function rollingDrCopy ({ + vm, + pool, + sr, + tag, + depth, + retention = depth, + deleteOldBackupsFirst, +}) { + if (sr === undefined) { + if (pool === undefined) { + throw invalidParameters('either pool or sr param should be specified') + } + + if (vm.$pool === pool.id) { + throw forbiddenOperation( + 'Disaster Recovery attempts to copy on the same pool' + ) + } + + sr = this.getObject(pool.default_SR, 'SR') + } + + return this.rollingDrCopyVm({ + vm, + sr, + tag, + retention, + deleteOldBackupsFirst, + }) +} + +rollingDrCopy.params = { + retention: { type: 'number', optional: true }, + // This parameter is deprecated. It used to support the old saved backups jobs. + depth: { type: 'number', optional: true }, + id: { type: 'string' }, + pool: { type: 'string', optional: true }, + sr: { type: 'string', optional: true }, + tag: { type: 'string' }, + deleteOldBackupsFirst: { type: 'boolean', optional: true }, +} + +rollingDrCopy.resolve = { + vm: ['id', ['VM', 'VM-snapshot'], 'administrate'], + pool: ['pool', 'pool', 'administrate'], + sr: ['sr', 'SR', 'administrate'], +} + +rollingDrCopy.description = + 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to retention' + +// ------------------------------------------------------------------- + +export function start ({ vm, force }) { + return this.getXapi(vm).startVm(vm._xapiId, force) +} + +start.params = { + force: { type: 'boolean', optional: true }, + id: { type: 'string' }, +} + +start.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +// TODO: implements timeout. +// - if !force → clean shutdown +// - if force is true → hard shutdown +// - if force is integer → clean shutdown and after force seconds, hard shutdown. +export async function stop ({ vm, force }) { + const xapi = this.getXapi(vm) + + // Hard shutdown + if (force) { + await xapi.call('VM.hard_shutdown', vm._xapiRef) + return + } + + // Clean shutdown + try { + await xapi.call('VM.clean_shutdown', vm._xapiRef) + } catch (error) { + const { code } = error + if ( + code === 'VM_MISSING_PV_DRIVERS' || + code === 'VM_LACKS_FEATURE_SHUTDOWN' + ) { + throw invalidParameters('clean shutdown requires PV drivers') + } + + throw error + } +} + +stop.params = { + id: { type: 'string' }, + force: { type: 'boolean', optional: true }, +} + +stop.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export async function suspend ({ vm }) { + await this.getXapi(vm).call('VM.suspend', vm._xapiRef) +} + +suspend.params = { + id: { type: 'string' }, +} + +suspend.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export function resume ({ vm }) { + return this.getXapi(vm).resumeVm(vm._xapiId) +} + +resume.params = { + id: { type: 'string' }, +} + +resume.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export function revert ({ snapshot, snapshotBefore }) { + return this.getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore) +} + +revert.params = { + id: { type: 'string' }, + snapshotBefore: { type: 'boolean', optional: true }, +} + +revert.resolve = { + snapshot: ['id', 'VM-snapshot', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function handleExport (req, res, { xapi, id, compress }) { + const stream = await xapi.exportVm(id, { + compress: compress != null ? compress : true, + }) + res.on('close', () => stream.cancel()) + // Remove the filename as it is already part of the URL. + stream.headers['content-disposition'] = 'attachment' + + res.writeHead( + stream.statusCode, + stream.statusMessage != null ? stream.statusMessage : '', + stream.headers + ) + stream.pipe(res) +} + +// TODO: integrate in xapi.js +async function export_ ({ vm, compress }) { + if (vm.power_state === 'Running') { + await checkPermissionOnSrs.call(this, vm) + } + + const data = { + xapi: this.getXapi(vm), + id: vm._xapiId, + compress, + } + + return { + $getFrom: await this.registerHttpRequest(handleExport, data, { + suffix: encodeURI(`/${vm.name_label}.xva`), + }), + } +} + +export_.params = { + vm: { type: 'string' }, + compress: { type: 'boolean', optional: true }, +} + +export_.resolve = { + vm: ['vm', ['VM', 'VM-snapshot'], 'administrate'], +} + +export { export_ as export } + +// ------------------------------------------------------------------- + +export async function handleVmImport (req, res, { data, srId, type, xapi }) { + // Timeout seems to be broken in Node 4. + // See https://github.com/nodejs/node/issues/3319 + req.setTimeout(43200000) // 12 hours + + try { + const vm = await xapi.importVm(req, { data, srId, type }) + res.end(format.response(0, vm.$id)) + } catch (e) { + res.writeHead(500) + res.end(format.error(0, new Error(e.message))) + } +} + +// TODO: "sr_id" can be passed in URL to target a specific SR +async function import_ ({ data, host, sr, type }) { + let xapi + if (data && type === 'xva') { + throw invalidParameters('unsupported field data for the file type xva') + } + + if (!sr) { + if (!host) { + throw invalidParameters('you must provide either host or SR') + } + + xapi = this.getXapi(host) + sr = xapi.pool.$default_SR + if (!sr) { + throw invalidParameters('there is not default SR in this pool') + } + + // FIXME: must have administrate permission on default SR. + } else { + xapi = this.getXapi(sr) + } + + return { + $sendTo: await this.registerHttpRequest(handleVmImport, { + data, + srId: sr._xapiId, + type, + xapi, + }), + } +} + +import_.params = { + data: { + type: 'object', + optional: true, + properties: { + descriptionLabel: { type: 'string' }, + disks: { + type: 'array', + items: { + type: 'object', + properties: { + capacity: { type: 'integer' }, + descriptionLabel: { type: 'string' }, + nameLabel: { type: 'string' }, + path: { type: 'string' }, + position: { type: 'integer' }, + }, + }, + optional: true, + }, + memory: { type: 'integer' }, + nameLabel: { type: 'string' }, + nCpus: { type: 'integer' }, + networks: { + type: 'array', + items: { type: 'string' }, + optional: true, + }, + }, + }, + host: { type: 'string', optional: true }, + type: { type: 'string', optional: true }, + sr: { type: 'string', optional: true }, +} + +import_.resolve = { + host: ['host', 'host', 'administrate'], + sr: ['sr', 'SR', 'administrate'], +} + +export { import_ as import } + +// ------------------------------------------------------------------- + +// FIXME: if position is used, all other disks after this position +// should be shifted. +export async function attachDisk ({ vm, vdi, position, mode, bootable }) { + await this.getXapi(vm).createVbd({ + bootable, + mode, + userdevice: position, + vdi: vdi._xapiId, + vm: vm._xapiId, + }) +} + +attachDisk.params = { + bootable: { + type: 'boolean', + optional: true, + }, + mode: { type: 'string', optional: true }, + position: { type: 'string', optional: true }, + vdi: { type: 'string' }, + vm: { type: 'string' }, +} + +attachDisk.resolve = { + vm: ['vm', 'VM', 'administrate'], + vdi: ['vdi', 'VDI', 'administrate'], +} + +// ------------------------------------------------------------------- + +// TODO: implement resource sets +export async function createInterface ({ + vm, + network, + position, + mac, + allowedIpv4Addresses, + allowedIpv6Addresses, +}) { + const { resourceSet } = vm + if (resourceSet != null) { + await this.checkResourceSetConstraints(resourceSet, this.user.id, [ network.id ]) + } else if (!(await this.hasPermissions(this.user.id, [ [ network.id, 'view' ] ]))) { + throw unauthorized() + } + + let ipAddresses + const vif = await this.getXapi(vm).createVif(vm._xapiId, network._xapiId, { + mac, + position, + ipv4_allowed: allowedIpv4Addresses, + ipv6_allowed: allowedIpv6Addresses, + }) + + const { push } = (ipAddresses = []) + if (allowedIpv4Addresses) { + push.apply(ipAddresses, allowedIpv4Addresses) + } + if (allowedIpv6Addresses) { + push.apply(ipAddresses, allowedIpv6Addresses) + } + if (ipAddresses.length) { + ignoreErrors.call(this.allocIpAddresses(vif.$id, ipAddresses)) + } + + return vif.$id +} + +createInterface.params = { + vm: { type: 'string' }, + network: { type: 'string' }, + position: { type: ['integer', 'string'], optional: true }, + mac: { type: 'string', optional: true }, + allowedIpv4Addresses: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, + allowedIpv6Addresses: { + type: 'array', + items: { + type: 'string', + }, + optional: true, + }, +} + +createInterface.resolve = { + // Not compatible with resource sets. + // FIXME: find a workaround. + network: ['network', 'network', ''], + vm: ['vm', 'VM', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function attachPci ({ vm, pciId }) { + const xapi = this.getXapi(vm) + + await xapi.call('VM.add_to_other_config', vm._xapiRef, 'pci', pciId) +} + +attachPci.params = { + vm: { type: 'string' }, + pciId: { type: 'string' }, +} + +attachPci.resolve = { + vm: ['vm', 'VM', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function detachPci ({ vm }) { + const xapi = this.getXapi(vm) + + await xapi.call('VM.remove_from_other_config', vm._xapiRef, 'pci') +} + +detachPci.params = { + vm: { type: 'string' }, +} + +detachPci.resolve = { + vm: ['vm', 'VM', 'administrate'], +} +// ------------------------------------------------------------------- + +export function stats ({ vm, granularity }) { + return this.getXapiVmStats(vm, granularity) +} + +stats.description = 'returns statistics about the VM' + +stats.params = { + id: { type: 'string' }, + granularity: { + type: 'string', + optional: true, + }, +} + +stats.resolve = { + vm: ['id', ['VM', 'VM-snapshot'], 'view'], +} + +// ------------------------------------------------------------------- + +export async function setBootOrder ({ vm, order }) { + const xapi = this.getXapi(vm) + + order = { order } + if (vm.virtualizationMode === 'hvm') { + await xapi.call('VM.set_HVM_boot_params', vm._xapiRef, order) + return + } + + throw invalidParameters('You can only set the boot order on a HVM guest') +} + +setBootOrder.params = { + vm: { type: 'string' }, + order: { type: 'string' }, +} + +setBootOrder.resolve = { + vm: ['vm', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export function recoveryStart ({ vm }) { + return this.getXapi(vm).startVmOnCd(vm._xapiId) +} + +recoveryStart.params = { + id: { type: 'string' }, +} + +recoveryStart.resolve = { + vm: ['id', 'VM', 'operate'], +} + +// ------------------------------------------------------------------- + +export function getCloudInitConfig ({ template }) { + return this.getXapi(template).getCloudInitConfig(template._xapiId) +} + +getCloudInitConfig.params = { + template: { type: 'string' }, +} + +getCloudInitConfig.resolve = { + template: ['template', 'VM-template', 'administrate'], +} + +// ------------------------------------------------------------------- + +export async function createCloudInitConfigDrive ({ vm, sr, config, coreos }) { + const xapi = this.getXapi(vm) + if (coreos) { + // CoreOS is a special CloudConfig drive created by XS plugin + await xapi.createCoreOsCloudInitConfigDrive(vm._xapiId, sr._xapiId, config) + } else { + // use generic Cloud Init drive + await xapi.createCloudInitConfigDrive(vm._xapiId, sr._xapiId, config) + } +} + +createCloudInitConfigDrive.params = { + vm: { type: 'string' }, + sr: { type: 'string' }, + config: { type: 'string' }, +} + +createCloudInitConfigDrive.resolve = { + vm: ['vm', 'VM', 'administrate'], + + // Not compatible with resource sets. + // FIXME: find a workaround. + sr: ['sr', 'SR', ''], // 'operate' ] +} + +// ------------------------------------------------------------------- + +export async function createVgpu ({ vm, gpuGroup, vgpuType }) { + // TODO: properly handle device. Can a VM have 2 vGPUS? + await this.getXapi(vm).createVgpu( + vm._xapiId, + gpuGroup._xapiId, + vgpuType._xapiId + ) +} + +createVgpu.params = { + vm: { type: 'string' }, + gpuGroup: { type: 'string' }, + vgpuType: { type: 'string' }, +} + +createVgpu.resolve = { + vm: ['vm', 'VM', 'administrate'], + gpuGroup: ['gpuGroup', 'gpuGroup', ''], + vgpuType: ['vgpuType', 'vgpuType', ''], +} + +// ------------------------------------------------------------------- + +export async function deleteVgpu ({ vgpu }) { + await this.getXapi(vgpu).deleteVgpu(vgpu._xapiId) +} + +deleteVgpu.params = { + vgpu: { type: 'string' }, +} + +deleteVgpu.resolve = { + vgpu: ['vgpu', 'vgpu', ''], +} diff --git a/packages/xo-server/src/api/xo.js b/packages/xo-server/src/api/xo.js new file mode 100644 index 000000000..557d05af6 --- /dev/null +++ b/packages/xo-server/src/api/xo.js @@ -0,0 +1,55 @@ +import { streamToBuffer } from '../utils' + +// =================================================================== + +export function clean () { + return this.clean() +} + +clean.permission = 'admin' + +// ------------------------------------------------------------------- + +export async function exportConfig () { + return { + $getFrom: await this.registerHttpRequest((req, res) => { + res.writeHead(200, 'OK', { + 'content-disposition': 'attachment', + }) + + return this.exportConfig() + }, + undefined, + { suffix: '/config.json' }), + } +} + +exportConfig.permission = 'admin' + +// ------------------------------------------------------------------- + +export function getAllObjects ({ filter, limit }) { + return this.getObjects({ filter, limit }) +} + +getAllObjects.permission = '' +getAllObjects.description = 'Returns all XO objects' + +getAllObjects.params = { + filter: { type: 'object', optional: true }, + limit: { type: 'number', optional: true }, +} + +// ------------------------------------------------------------------- + +export async function importConfig () { + return { + $sendTo: await this.registerHttpRequest(async (req, res) => { + await this.importConfig(JSON.parse(await streamToBuffer(req))) + + res.end('config successfully imported') + }), + } +} + +importConfig.permission = 'admin' diff --git a/packages/xo-server/src/api/xosan.js b/packages/xo-server/src/api/xosan.js new file mode 100644 index 000000000..813d0eebd --- /dev/null +++ b/packages/xo-server/src/api/xosan.js @@ -0,0 +1,1168 @@ +import createLogger from 'debug' +import defer from 'golike-defer' +import execa from 'execa' +import fs from 'fs-extra' +import map from 'lodash/map' +import { tap, delay } from 'promise-toolbox' +import { invalidParameters } from 'xo-common/api-errors' +import { v4 as generateUuid } from 'uuid' +import { + includes, + remove, + filter, + find, + range, +} from 'lodash' + +import { asInteger } from '../xapi/utils' +import { asyncMap, parseXml, ensureArray } from '../utils' + +const debug = createLogger('xo:xosan') + +const SSH_KEY_FILE = 'id_rsa_xosan' +const DEFAULT_NETWORK_PREFIX = '172.31.100.' +const VM_FIRST_NUMBER = 101 +const HOST_FIRST_NUMBER = 1 +const GIGABYTE = 1024 * 1024 * 1024 +const XOSAN_VM_SYSTEM_DISK_SIZE = 10 * GIGABYTE +const XOSAN_DATA_DISK_USEAGE_RATIO = 0.99 +const XOSAN_LICENSE_QUOTA = 50 * GIGABYTE + +const CURRENT_POOL_OPERATIONS = {} + +function getXosanConfig (xosansr, xapi = this.getXapi(xosansr)) { + const data = xapi.xo.getData(xosansr, 'xosan_config') + if (data && data.networkPrefix === undefined) { + // some xosan might have been created before this field was added + data.networkPrefix = DEFAULT_NETWORK_PREFIX + // fire and forget + xapi.xo.setData(xosansr, 'xosan_config', data) + } + return data +} + +function _getIPToVMDict (xapi, sr) { + const dict = {} + const data = getXosanConfig(sr, xapi) + if (data && data.nodes) { + data.nodes.forEach(conf => { + try { + dict[conf.brickName] = {vm: xapi.getObject(conf.vm.id), sr: conf.underlyingSr} + } catch (e) { + // pass + } + }) + } + return dict +} + +function _getGlusterEndpoint (sr) { + const xapi = this.getXapi(sr) + const data = getXosanConfig(sr, xapi) + if (!data || !data.nodes) { + return null + } + return { + xapi, + data: data, + hosts: map(data.nodes, node => xapi.getObject(node.host)), + addresses: map(data.nodes, node => node.vm.ip), + } +} + +async function rateLimitedRetry (action, shouldRetry, retryCount = 20) { + let retryDelay = 500 * (1 + Math.random() / 20) + let result + while (retryCount > 0 && (result = await action()) && shouldRetry(result)) { + retryDelay *= 1.1 + debug('waiting ' + retryDelay + 'ms and retrying') + await delay(retryDelay) + retryCount-- + } + return result +} + +function createVolumeInfoTypes () { + function parseHeal (parsed) { + const bricks = [] + parsed['healInfo']['bricks']['brick'].forEach(brick => { + bricks.push(brick) + if (brick.file) { + brick.file = ensureArray(brick.file) + } + }) + return {commandStatus: true, result: {bricks}} + } + + function parseStatus (parsed) { + const brickDictByUuid = {} + const volume = parsed['volStatus']['volumes']['volume'] + volume['node'].forEach(node => { + brickDictByUuid[node.peerid] = brickDictByUuid[node.peerid] || [] + brickDictByUuid[node.peerid].push(node) + }) + return { + commandStatus: true, + result: {nodes: brickDictByUuid, tasks: volume['tasks']}, + } + } + + async function parseInfo (parsed) { + const volume = parsed['volInfo']['volumes']['volume'] + volume['bricks'] = volume['bricks']['brick'] + volume['options'] = volume['options']['option'] + return {commandStatus: true, result: volume} + } + + const sshInfoType = (command, handler) => { + return async function (sr) { + const glusterEndpoint = this::_getGlusterEndpoint(sr) + const cmdShouldRetry = result => + !result['commandStatus'] && + ((result.parsed && result.parsed['cliOutput']['opErrno'] === '30802') || + result.stderr.match(/Another transaction is in progress/)) + const runCmd = async () => glusterCmd(glusterEndpoint, 'volume ' + command, true) + const commandResult = await rateLimitedRetry(runCmd, cmdShouldRetry, 30) + return commandResult['commandStatus'] ? this::handler(commandResult.parsed['cliOutput'], sr) : commandResult + } + } + + async function profileType (sr) { + async function parseProfile (parsed) { + const volume = parsed['volProfile'] + volume['bricks'] = ensureArray(volume['brick']) + delete volume['brick'] + return {commandStatus: true, result: volume} + } + + return this::(sshInfoType('profile xosan info', parseProfile))(sr) + } + + async function profileTopType (sr) { + async function parseTop (parsed) { + const volume = parsed['volTop'] + volume['bricks'] = ensureArray(volume['brick']) + delete volume['brick'] + return {commandStatus: true, result: volume} + } + + const topTypes = ['open', 'read', 'write', 'opendir', 'readdir'] + return asyncMap(topTypes, async type => ({ + type, result: await this::(sshInfoType(`top xosan ${type}`, parseTop))(sr), + })) + } + + function checkHosts (sr) { + const xapi = this.getXapi(sr) + const data = getXosanConfig(sr, xapi) + const network = xapi.getObject(data.network) + const badPifs = filter(network.$PIFs, pif => pif.ip_configuration_mode !== 'Static') + return badPifs.map(pif => ({pif, host: pif.$host.$id})) + } + + return { + heal: sshInfoType('heal xosan info', parseHeal), + status: sshInfoType('status xosan', parseStatus), + statusDetail: sshInfoType('status xosan detail', parseStatus), + statusMem: sshInfoType('status xosan mem', parseStatus), + info: sshInfoType('info xosan', parseInfo), + profile: profileType, + profileTop: profileTopType, + hosts: checkHosts, + } +} + +const VOLUME_INFO_TYPES = createVolumeInfoTypes() + +export async function getVolumeInfo ({sr, infoType}) { + await this.checkXosanLicense({ srId: sr.uuid }) + + const glusterEndpoint = this::_getGlusterEndpoint(sr) + + if (glusterEndpoint == null) { + return null + } + const foundType = VOLUME_INFO_TYPES[infoType] + if (!foundType) { + throw new Error('getVolumeInfo(): "' + infoType + '" is an invalid type') + } + return this::foundType(sr) +} + +getVolumeInfo.description = 'info on gluster volume' +getVolumeInfo.permission = 'admin' + +getVolumeInfo.params = { + sr: { + type: 'string', + }, + infoType: { + type: 'string', + eq: Object.keys(VOLUME_INFO_TYPES), + }, +} +getVolumeInfo.resolve = { + sr: ['sr', 'SR', 'administrate'], +} + +export async function profileStatus ({sr, changeStatus = null}) { + await this.checkXosanLicense({ srId: sr.uuid }) + + const glusterEndpoint = this::_getGlusterEndpoint(sr) + if (changeStatus === false) { + await glusterCmd(glusterEndpoint, 'volume profile xosan stop') + return null + } + if (changeStatus === true) { + await glusterCmd(glusterEndpoint, 'volume profile xosan start') + } + return this::getVolumeInfo({sr: sr, infoType: 'profile'}) +} + +profileStatus.description = 'activate, deactivate, or interrogate profile data' +profileStatus.permission = 'admin' +profileStatus.params = { + sr: { + type: 'string', + }, + changeStatus: { + type: 'bool', optional: true, + }, +} +profileStatus.resolve = { + sr: ['sr', 'SR', 'administrate'], +} + +function reconfigurePifIP (xapi, pif, newIP) { + xapi.call('PIF.reconfigure_ip', pif.$ref, 'Static', newIP, '255.255.255.0', '', '') +} + +// this function should probably become fixSomething(thingToFix, parmas) +export async function fixHostNotInNetwork ({xosanSr, host}) { + await this.checkXosanLicense({ srId: xosanSr.uuid }) + + const xapi = this.getXapi(xosanSr) + const data = getXosanConfig(xosanSr, xapi) + const network = xapi.getObject(data.network) + const usedAddresses = network.$PIFs.filter(pif => pif.ip_configuration_mode === 'Static').map(pif => pif.IP) + const pif = network.$PIFs.find(pif => pif.ip_configuration_mode !== 'Static' && pif.$host.$id === host) + if (pif) { + const newIP = _findIPAddressOutsideList(usedAddresses, HOST_FIRST_NUMBER) + reconfigurePifIP(xapi, pif, newIP) + await xapi.call('PIF.plug', pif.$ref) + const PBD = find(xosanSr.$PBDs, pbd => pbd.$host.$id === host) + if (PBD) { + await xapi.call('PBD.plug', PBD.$ref) + } + const sshKey = await getOrCreateSshKey(xapi) + await callPlugin(xapi, host, 'receive_ssh_keys', { + private_key: sshKey.private, + public_key: sshKey.public, + force: true, + }) + } +} + +fixHostNotInNetwork.description = 'put host in xosan network' +fixHostNotInNetwork.permission = 'admin' + +fixHostNotInNetwork.params = { + xosanSr: { + type: 'string', + }, + host: { + type: 'string', + }, +} +fixHostNotInNetwork.resolve = { + sr: ['sr', 'SR', 'administrate'], +} + +function floor2048 (value) { + return 2048 * Math.floor(value / 2048) +} + +async function copyVm (xapi, originalVm, sr) { + return {sr, vm: await xapi.copyVm(originalVm, sr)} +} + +async function callPlugin (xapi, host, command, params) { + debug('calling plugin', host.address, command) + return JSON.parse(await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params)) +} + +async function remoteSsh (glusterEndpoint, cmd, ignoreError = false) { + let result + const formatSshError = (result) => { + const messageArray = [] + const messageKeys = Object.keys(result) + const orderedKeys = ['stderr', 'stdout', 'exit'] + for (const key of orderedKeys) { + const idx = messageKeys.indexOf(key) + if (idx !== -1) { + messageKeys.splice(idx, 1) + } + messageArray.push(`${key}: ${result[key]}`) + } + messageArray.push('command: ' + result['command'].join(' ')) + messageKeys.splice(messageKeys.indexOf('command'), 1) + for (const key of messageKeys) { + messageArray.push(`${key}: ${JSON.stringify(result[key])}`) + } + return messageArray.join('\n') + } + + for (const address of glusterEndpoint.addresses) { + for (const host of glusterEndpoint.hosts) { + try { + result = await callPlugin(glusterEndpoint.xapi, host, 'run_ssh', {destination: 'root@' + address, cmd: cmd}) + break + } catch (exception) { + if (exception['code'] !== 'HOST_OFFLINE') { + throw exception + } + } + } + debug(result.command.join(' '), '\n =>exit:', result.exit, '\n =>err :', result.stderr, + '\n =>out (1000 chars) :', result.stdout.substring(0, 1000)) + // 255 seems to be ssh's own error codes. + if (result.exit !== 255) { + if (!ignoreError && result.exit !== 0) { + throw new Error(formatSshError(result)) + } + return result + } + } + throw new Error(result != null ? formatSshError(result) : 'no suitable SSH host: ' + + JSON.stringify(glusterEndpoint)) +} + +function findErrorMessage (commandResut) { + if (commandResut['exit'] === 0 && commandResut.parsed) { + const cliOut = commandResut.parsed['cliOutput'] + if (cliOut['opErrstr'] && cliOut['opErrstr'].length) { + return cliOut['opErrstr'] + } + // "peer probe" returns it's "already in peer" error in cliOutput/output + if (cliOut['output'] && cliOut['output'].length) { + return cliOut['output'] + } + } + return commandResut['stderr'].length ? commandResut['stderr'] : commandResut['stdout'] +} + +async function glusterCmd (glusterEndpoint, cmd, ignoreError = false) { + const result = await remoteSsh(glusterEndpoint, `gluster --mode=script --xml ${cmd}`, true) + try { + result.parsed = parseXml(result['stdout']) + } catch (e) { + // pass, we never know if a message can be parsed or not, so we just try + } + if (result['exit'] === 0) { + const cliOut = result.parsed['cliOutput'] + // we have found cases where opErrno is !=0 and opRet was 0, albeit the operation was an error. + result.commandStatus = cliOut['opRet'].trim() === '0' && cliOut['opErrno'].trim() === '0' + result.error = findErrorMessage(result) + } else { + result.commandStatus = false + // "gluster volume status" timeout error message is reported on stdout instead of stderr + result.error = findErrorMessage(result) + } + if (!ignoreError && !result.commandStatus) { + const error = new Error(`error in gluster "${result.error}"`) + error.result = result + throw error + } + return result +} + +const createNetworkAndInsertHosts = defer(async function ($defer, xapi, pif, vlan, networkPrefix) { + let hostIpLastNumber = HOST_FIRST_NUMBER + const xosanNetwork = await xapi.createNetwork({ + name: 'XOSAN network', + description: 'XOSAN network', + pifId: pif._xapiId, + mtu: pif.mtu, + vlan: +vlan, + }) + $defer.onFailure(() => xapi.deleteNetwork(xosanNetwork)) + const addresses = xosanNetwork.$PIFs.map(pif => ({pif, address: networkPrefix + (hostIpLastNumber++)})) + await asyncMap(addresses, addressAndPif => reconfigurePifIP(xapi, addressAndPif.pif, addressAndPif.address)) + const master = xapi.pool.$master + const otherAddresses = addresses.filter(addr => addr.pif.$host !== master) + await asyncMap(otherAddresses, async (address) => { + const result = await callPlugin(xapi, master, 'run_ping', {address: address.address}) + if (result.exit !== 0) { + throw invalidParameters(`Could not ping ${master.name_label}->${address.pif.$host.name_label} (${address.address}) \n${result.stdout}`) + } + }) + return xosanNetwork +}) + +async function getOrCreateSshKey (xapi) { + let sshKey = xapi.xo.getData(xapi.pool, 'xosan_ssh_key') + + if (!sshKey) { + const readKeys = async () => { + sshKey = { + private: await fs.readFile(SSH_KEY_FILE, 'ascii'), + public: await fs.readFile(SSH_KEY_FILE + '.pub', 'ascii'), + } + xapi.xo.setData(xapi.pool, 'xosan_ssh_key', sshKey) + } + + try { + await readKeys() + } catch (e) { + await execa('ssh-keygen', ['-q', '-f', SSH_KEY_FILE, '-t', 'rsa', '-b', '4096', '-N', '']) + await readKeys() + } + } + + return sshKey +} + +const _probePoolAndWaitForPresence = defer(async function ($defer, glusterEndpoint, addresses) { + await asyncMap(addresses, async (address) => { + await glusterCmd(glusterEndpoint, 'peer probe ' + address) + $defer.onFailure(() => glusterCmd(glusterEndpoint, 'peer detach ' + address, true)) + }) + + function shouldRetry (peers) { + for (const peer of peers) { + if (peer.state === '4') { + return true + } + if (peer.state === '6') { + throw new Error(`${peer.hostname} is not in pool ("${peer.stateStr}")`) + } + } + return false + } + + const getPoolStatus = async () => (await glusterCmd(glusterEndpoint, 'pool list')).parsed.cliOutput.peerStatus.peer + return rateLimitedRetry(getPoolStatus, shouldRetry) +}) + +async function configureGluster (redundancy, ipAndHosts, glusterEndpoint, glusterType, arbiter = null) { + const configByType = { + replica_arbiter: { + creation: 'replica 3 arbiter 1', + extra: [], + }, + replica: { + creation: 'replica ' + redundancy + ' ', + extra: ['volume set xosan cluster.data-self-heal on'], + }, + disperse: { + creation: 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ', + extra: [], + }, + } + const brickVms = arbiter ? ipAndHosts.concat(arbiter) : ipAndHosts + await _probePoolAndWaitForPresence(glusterEndpoint, map(brickVms.slice(1), bv => bv.address)) + const creation = configByType[glusterType].creation + const volumeCreation = 'volume create xosan ' + creation + ' ' + + brickVms.map(ipAndHost => ipAndHost.brickName).join(' ') + debug('creating volume: ', volumeCreation) + await glusterCmd(glusterEndpoint, volumeCreation) + await glusterCmd(glusterEndpoint, 'volume set xosan network.remote-dio enable') + await glusterCmd(glusterEndpoint, 'volume set xosan cluster.eager-lock enable') + await glusterCmd(glusterEndpoint, 'volume set xosan cluster.locking-scheme granular') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.io-cache off') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.read-ahead off') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.quick-read off') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.strict-write-ordering off') + await glusterCmd(glusterEndpoint, 'volume set xosan client.event-threads 8') + await glusterCmd(glusterEndpoint, 'volume set xosan server.event-threads 8') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.io-thread-count 64') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.stat-prefetch on') + await glusterCmd(glusterEndpoint, 'volume set xosan performance.low-prio-threads 32') + await glusterCmd(glusterEndpoint, 'volume set xosan features.shard on') + await glusterCmd(glusterEndpoint, 'volume set xosan features.shard-block-size 512MB') + await glusterCmd(glusterEndpoint, 'volume set xosan user.cifs off') + for (const confChunk of configByType[glusterType].extra) { + await glusterCmd(glusterEndpoint, confChunk) + } + await glusterCmd(glusterEndpoint, 'volume start xosan') + await _setQuota(glusterEndpoint) +} + +async function _setQuota (glusterEndpoint) { + await glusterCmd(glusterEndpoint, 'volume quota xosan enable', true) + await glusterCmd(glusterEndpoint, 'volume set xosan quota-deem-statfs on', true) + await glusterCmd(glusterEndpoint, `volume quota xosan limit-usage / ${XOSAN_LICENSE_QUOTA}B`, true) +} + +async function _removeQuota (glusterEndpoint) { + await glusterCmd(glusterEndpoint, 'volume quota xosan disable', true) +} + +export const createSR = defer(async function ($defer, { + template, pif, vlan, srs, glusterType, + redundancy, brickSize = this::computeBrickSize(srs), memorySize = 4 * GIGABYTE, ipRange = DEFAULT_NETWORK_PREFIX + '.0', +}) { + const OPERATION_OBJECT = { + operation: 'createSr', + states: ['configuringNetwork', 'importingVm', 'copyingVms', + 'configuringVms', 'configuringGluster', 'creatingSr', 'scanningSr'], + } + if (!this.requestResource) { + throw new Error('requestResource is not a function') + } + + if (srs.length < 1) { + return // TODO: throw an error + } + + const xapi = this.getXapi(srs[0]) + const poolId = xapi.pool.$id + if (CURRENT_POOL_OPERATIONS[poolId]) { + throw new Error('createSR is already running for this pool') + } + + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + + const tmpBoundObjectId = srs.join(',') + const license = await this.createBoundXosanTrialLicense({ boundObjectId: tmpBoundObjectId }) + $defer.onFailure(() => this.unbindXosanLicense({ srId: tmpBoundObjectId })) + + // '172.31.100.0' -> '172.31.100.' + const networkPrefix = ipRange.split('.').slice(0, 3).join('.') + '.' + let vmIpLastNumber = VM_FIRST_NUMBER + + try { + const xosanNetwork = await createNetworkAndInsertHosts(xapi, pif, vlan, networkPrefix) + $defer.onFailure(() => xapi.deleteNetwork(xosanNetwork)) + const sshKey = await getOrCreateSshKey(xapi) + const srsObjects = map(srs, srId => xapi.getObject(srId)) + await Promise.all(srsObjects.map(sr => callPlugin(xapi, sr.$PBDs[0].$host, 'receive_ssh_keys', { + private_key: sshKey.private, + public_key: sshKey.public, + force: 'true', + }))) + + const firstSr = srsObjects[0] + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} + const firstVM = await this::_importGlusterVM(xapi, template, firstSr) + $defer.onFailure(() => xapi.deleteVm(firstVM, true)) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + const copiedVms = await asyncMap(srsObjects.slice(1), sr => + copyVm(xapi, firstVM, sr)::tap(({vm}) => + $defer.onFailure(() => xapi.deleteVm(vm)) + ) + ) + const vmsAndSrs = [{ + vm: firstVM, + sr: firstSr, + }].concat(copiedVms) + let arbiter = null + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + if (srs.length === 2) { + const sr = firstSr + const arbiterIP = networkPrefix + (vmIpLastNumber++) + const arbiterVm = await xapi.copyVm(firstVM, sr) + $defer.onFailure(() => xapi.deleteVm(arbiterVm, true)) + arbiter = await _prepareGlusterVm(xapi, sr, arbiterVm, xosanNetwork, arbiterIP, { + labelSuffix: '_arbiter', + increaseDataDisk: false, + memorySize, + }) + arbiter.arbiter = true + } + const ipAndHosts = await asyncMap(vmsAndSrs, vmAndSr => _prepareGlusterVm(xapi, vmAndSr.sr, vmAndSr.vm, xosanNetwork, + networkPrefix + (vmIpLastNumber++), {maxDiskSize: brickSize, memorySize})) + const glusterEndpoint = {xapi, hosts: map(ipAndHosts, ih => ih.host), addresses: map(ipAndHosts, ih => ih.address)} + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 4} + await configureGluster(redundancy, ipAndHosts, glusterEndpoint, glusterType, arbiter) + debug('xosan gluster volume started') + // We use 10 IPs of the gluster VM range as backup, in the hope that even if the first VM gets destroyed we find at least + // one VM to give mount the volfile. + // It is not possible to edit the device_config after the SR is created and this data is only used at mount time when rebooting + // the hosts. + const backupservers = map(range(VM_FIRST_NUMBER, VM_FIRST_NUMBER + 10), ipLastByte => networkPrefix + ipLastByte).join(':') + const config = {server: ipAndHosts[0].address + ':/xosan', backupservers} + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 5} + const xosanSrRef = await xapi.call('SR.create', firstSr.$PBDs[0].$host.$ref, config, 0, 'XOSAN', 'XOSAN', + 'xosan', '', true, {}) + debug('sr created') + // we just forget because the cleanup actions are stacked in the $onFailure system + $defer.onFailure(() => xapi.forgetSr(xosanSrRef)) + if (arbiter) { + ipAndHosts.push(arbiter) + } + const nodes = ipAndHosts.map(param => ({ + brickName: param.brickName, + host: param.host.$id, + vm: {id: param.vm.$id, ip: param.address}, + underlyingSr: param.underlyingSr.$id, + arbiter: !!param['arbiter'], + })) + await xapi.xo.setData(xosanSrRef, 'xosan_config', { + version: 'beta2', + creationDate: new Date().toISOString(), + nodes: nodes, + template: template, + network: xosanNetwork.$id, + type: glusterType, + networkPrefix, + redundancy, + }) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 6} + debug('scanning new SR') + await xapi.call('SR.scan', xosanSrRef) + await this.rebindLicense({ + licenseId: license.id, + oldBoundObjectId: tmpBoundObjectId, + newBoundObjectId: xapi.getObject(xosanSrRef).uuid, + }) + } finally { + delete CURRENT_POOL_OPERATIONS[poolId] + } +}) + +createSR.description = 'create gluster VM' +createSR.permission = 'admin' +createSR.params = { + srs: { + type: 'array', + items: { + type: 'string', + }, + }, + pif: { + type: 'string', + }, + vlan: { + type: 'string', + }, + glusterType: { + type: 'string', + }, + redundancy: { + type: 'number', + }, + memorySize: { + type: 'number', optional: true, + }, + ipRange: { + type: 'string', optional: true, + }, +} + +createSR.resolve = { + srs: ['sr', 'SR', 'administrate'], + pif: ['pif', 'PIF', 'administrate'], +} + +async function umountDisk (localEndpoint, diskMountPoint) { + await remoteSsh(localEndpoint, `killall -v -w /usr/sbin/xfs_growfs; fuser -v ${diskMountPoint}; umount ${diskMountPoint} && sed -i '\\_${diskMountPoint}\\S_d' /etc/fstab && rm -rf ${diskMountPoint}`) +} + +// this is mostly what the LVM SR driver does, but we are avoiding the 2To limit it imposes. +async function createVDIOnLVMWithoutSizeLimit (xapi, lvmSr, diskSize) { + const VG_PREFIX = 'VG_XenStorage-' + const LV_PREFIX = 'LV-' + const { type, uuid: srUuid, $PBDs } = xapi.getObject(lvmSr) + if (type !== 'lvm') { + throw new Error('expecting a lvm sr type, got"' + type + '"') + } + const uuid = generateUuid() + const lvName = LV_PREFIX + uuid + const vgName = VG_PREFIX + srUuid + const host = $PBDs[0].$host + const sizeMb = Math.ceil(diskSize / 1024 / 1024) + const result = await callPlugin(xapi, host, 'run_lvcreate', {sizeMb: asInteger(sizeMb), lvName, vgName}) + if (result.exit !== 0) { + throw Error('Could not create volume ->' + result.stdout) + } + await xapi.call('SR.scan', xapi.getObject(lvmSr).$ref) + const vdi = find(xapi.getObject(lvmSr).$VDIs, vdi => vdi.uuid === uuid) + if (vdi != null) { + await xapi.setSrProperties(vdi.$ref, {nameLabel: 'xosan_data', nameDescription: 'Created by XO'}) + return vdi + } +} + +async function createNewDisk (xapi, sr, vm, diskSize) { + const newDisk = await createVDIOnLVMWithoutSizeLimit(xapi, sr, diskSize) + await xapi.createVbd({ vdi: newDisk, vm }) + let vbd = await xapi._waitObjectState(newDisk.$id, disk => Boolean(disk.$VBDs.length)).$VBDs[0] + vbd = await xapi._waitObjectState(vbd.$id, vbd => Boolean(vbd.device.length)) + return '/dev/' + vbd.device +} + +async function mountNewDisk (localEndpoint, hostname, newDeviceFiledeviceFile) { + const brickRootCmd = 'bash -c \'mkdir -p /bricks; for TESTVAR in {1..9}; do TESTDIR="/bricks/xosan$TESTVAR" ;if mkdir $TESTDIR; then echo $TESTDIR; exit 0; fi ; done ; exit 1\'' + const newBrickRoot = (await remoteSsh(localEndpoint, brickRootCmd)).stdout.trim() + const brickName = `${hostname}:${newBrickRoot}/xosandir` + const mountBrickCmd = `mkfs.xfs -i size=512 ${newDeviceFiledeviceFile}; mkdir -p ${newBrickRoot}; echo "${newDeviceFiledeviceFile} ${newBrickRoot} xfs defaults 0 0" >> /etc/fstab; mount -a` + await remoteSsh(localEndpoint, mountBrickCmd) + return brickName +} + +async function replaceBrickOnSameVM (xosansr, previousBrick, newLvmSr, brickSize) { + const OPERATION_OBJECT = { + operation: 'replaceBrick', + states: ['creatingNewDisk', 'mountingDisk', 'swappingBrick', 'disconnectingOldDisk', 'scanningSr'], + } + const xapi = this.getXapi(xosansr) + const poolId = xapi.pool.$id + try { + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + + // TODO: a bit of user input validation on 'previousBrick', it's going to ssh + const previousIp = previousBrick.split(':')[0] + brickSize = brickSize === undefined ? Infinity : brickSize + const data = this::getXosanConfig(xosansr) + const nodes = data.nodes + const nodeIndex = nodes.findIndex(node => node.vm.ip === previousIp) + const glusterEndpoint = this::_getGlusterEndpoint(xosansr) + const previousVM = _getIPToVMDict(xapi, xosansr)[previousBrick].vm + const newDeviceFile = await createNewDisk(xapi, newLvmSr, previousVM, brickSize) + const localEndpoint = { + xapi, + hosts: map(nodes, node => xapi.getObject(node.host)), + addresses: [previousIp], + } + const previousBrickRoot = previousBrick.split(':')[1].split('/').slice(0, 3).join('/') + const previousBrickDevice = (await remoteSsh(localEndpoint, `grep " ${previousBrickRoot} " /proc/mounts | cut -d ' ' -f 1 | sed 's_/dev/__'`)).stdout.trim() + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} + const brickName = await mountNewDisk(localEndpoint, previousIp, newDeviceFile) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + await glusterCmd(glusterEndpoint, `volume replace-brick xosan ${previousBrick} ${brickName} commit force`) + nodes[nodeIndex].brickName = brickName + nodes[nodeIndex].underlyingSr = newLvmSr + await xapi.xo.setData(xosansr, 'xosan_config', data) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + await umountDisk(localEndpoint, previousBrickRoot) + const previousVBD = previousVM.$VBDs.find(vbd => vbd.device === previousBrickDevice) + await xapi.disconnectVbd(previousVBD) + await xapi.deleteVdi(previousVBD.VDI) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 4} + await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) + } finally { + delete CURRENT_POOL_OPERATIONS[poolId] + } +} + +export async function replaceBrick ({xosansr, previousBrick, newLvmSr, brickSize, onSameVM = true}) { + await this.checkXosanLicense({ srId: xosansr.uuid }) + + const OPERATION_OBJECT = { + operation: 'replaceBrick', + states: ['insertingNewVm', 'swapingBrick', 'deletingVm', 'scaningSr'], + } + if (onSameVM) { + return this::replaceBrickOnSameVM(xosansr, previousBrick, newLvmSr, brickSize) + } + const xapi = this.getXapi(xosansr) + const poolId = xapi.pool.$id + try { + // TODO: a bit of user input validation on 'previousBrick', it's going to ssh + const previousIp = previousBrick.split(':')[0] + brickSize = brickSize === undefined ? Infinity : brickSize + const data = getXosanConfig(xosansr, xapi) + const nodes = data.nodes + const newIpAddress = _findAFreeIPAddress(nodes, data.networkPrefix) + const nodeIndex = nodes.findIndex(node => node.vm.ip === previousIp) + const stayingNodes = filter(nodes, (node, index) => index !== nodeIndex) + const glusterEndpoint = { + xapi, + hosts: map(stayingNodes, node => xapi.getObject(node.host)), + addresses: map(stayingNodes, node => node.vm.ip), + } + const previousVMEntry = _getIPToVMDict(xapi, xosansr)[previousBrick] + const arbiter = nodes[nodeIndex].arbiter + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + const {newVM, addressAndHost} = await this::insertNewGlusterVm(xapi, xosansr, newLvmSr, + {labelSuffix: arbiter ? '_arbiter' : '', glusterEndpoint, newIpAddress, increaseDataDisk: !arbiter, brickSize}) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} + await glusterCmd(glusterEndpoint, `volume replace-brick xosan ${previousBrick} ${addressAndHost.brickName} commit force`) + await glusterCmd(glusterEndpoint, 'peer detach ' + previousIp) + data.nodes.splice(nodeIndex, 1, { + brickName: addressAndHost.brickName, + host: addressAndHost.host.$id, + arbiter: arbiter, + vm: {ip: addressAndHost.address, id: newVM.$id}, + underlyingSr: newLvmSr, + }) + await xapi.xo.setData(xosansr, 'xosan_config', data) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + if (previousVMEntry) { + await xapi.deleteVm(previousVMEntry.vm, true) + } + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 3} + await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) + } finally { + delete CURRENT_POOL_OPERATIONS[poolId] + } +} + +replaceBrick.description = 'replaceBrick brick in gluster volume' +replaceBrick.permission = 'admin' +replaceBrick.params = { + xosansr: {type: 'string'}, + previousBrick: {type: 'string'}, + newLvmSr: {type: 'string'}, + brickSize: {type: 'number'}, +} + +replaceBrick.resolve = { + xosansr: ['sr', 'SR', 'administrate'], +} + +async function _prepareGlusterVm (xapi, lvmSr, newVM, xosanNetwork, ipAddress, { + labelSuffix = '', increaseDataDisk = true, + maxDiskSize = Infinity, memorySize = 2 * GIGABYTE, +}) { + const host = lvmSr.$PBDs[0].$host + const xenstoreData = { + 'vm-data/hostname': 'XOSAN' + lvmSr.name_label + labelSuffix, + 'vm-data/sshkey': (await getOrCreateSshKey(xapi)).public, + 'vm-data/ip': ipAddress, + 'vm-data/mtu': String(xosanNetwork.MTU), + 'vm-data/vlan': String(xosanNetwork.$PIFs[0].vlan || 0), + } + const ip = ipAddress + const sr = xapi.getObject(lvmSr.$id) + // refresh the object so that sizes are correct + await xapi._waitObjectState(sr.$id, sr => Boolean(sr.$PBDs)) + const firstVif = newVM.$VIFs[0] + if (xosanNetwork.$id !== firstVif.$network.$id) { + try { + await xapi.call('VIF.move', firstVif.$ref, xosanNetwork.$ref) + } catch (error) { + if (error.code === 'MESSAGE_METHOD_UNKNOWN') { + // VIF.move has been introduced in xenserver 7.0 + await xapi.deleteVif(firstVif.$id) + await xapi.createVif(newVM.$id, xosanNetwork.$id, firstVif) + } + } + } + await xapi.addTag(newVM.$id, `XOSAN-${xapi.pool.name_label}`) + await xapi.editVm(newVM, { + name_label: `XOSAN - ${lvmSr.name_label} - ${host.name_label} ${labelSuffix}`, + name_description: 'Xosan VM storage', + memory: memorySize, + }) + await xapi.call('VM.set_xenstore_data', newVM.$ref, xenstoreData) + const rootDisk = newVM.$VBDs.map(vbd => vbd && vbd.$VDI).find(vdi => vdi && vdi.name_label === 'xosan_root') + const rootDiskSize = rootDisk.virtual_size + await xapi.startVm(newVM) + debug('waiting for boot of ', ip) + // wait until we find the assigned IP in the networks, we are just checking the boot is complete + const vmIsUp = vm => Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip)) + const vm = await xapi._waitObjectState(newVM.$id, vmIsUp) + debug('booted ', ip) + const localEndpoint = {xapi: xapi, hosts: [host], addresses: [ip]} + const srFreeSpace = sr.physical_size - sr.physical_utilisation + // we use a percentage because it looks like the VDI overhead is proportional + const newSize = floor2048(Math.min(maxDiskSize - rootDiskSize, srFreeSpace * XOSAN_DATA_DISK_USEAGE_RATIO)) + const smallDiskSize = 1073741824 + const deviceFile = await createNewDisk(xapi, lvmSr, newVM, increaseDataDisk ? newSize : smallDiskSize) + const brickName = await mountNewDisk(localEndpoint, ip, deviceFile) + return {address: ip, host, vm, underlyingSr: lvmSr, brickName} +} + +async function _importGlusterVM (xapi, template, lvmsrId) { + const templateStream = await this.requestResource('xosan', template.id, template.version) + const newVM = await xapi.importVm(templateStream, {srId: lvmsrId, type: 'xva'}) + await xapi.editVm(newVM, { + autoPoweron: true, + name_label: 'XOSAN imported VM', + name_description: 'freshly imported', + }) + return xapi.barrier(newVM.$ref) +} + +function _findAFreeIPAddress (nodes, networkPrefix) { + return _findIPAddressOutsideList(map(nodes, n => n.vm.ip), networkPrefix) +} + +function _findIPAddressOutsideList (reservedList, networkPrefix, vmIpLastNumber = 101) { + for (let i = vmIpLastNumber; i < 255; i++) { + const candidate = networkPrefix + i + if (!reservedList.find(a => a === candidate)) { + return candidate + } + } + return null +} + +const _median = arr => { + arr.sort((a, b) => a - b) + return arr[Math.floor(arr.length / 2)] +} + +const insertNewGlusterVm = defer(async function ($defer, xapi, xosansr, lvmsrId, { + labelSuffix = '', + glusterEndpoint = null, ipAddress = null, increaseDataDisk = true, brickSize = Infinity, +}) { + const data = getXosanConfig(xosansr, xapi) + if (ipAddress === null) { + ipAddress = _findAFreeIPAddress(data.nodes, data.networkPrefix) + } + const vmsMemories = [] + for (const node of data.nodes) { + try { + vmsMemories.push(xapi.getObject(node.vm.id).memory_dynamic_max) + } catch (e) { + // pass + } + } + const xosanNetwork = xapi.getObject(data.network) + const srObject = xapi.getObject(lvmsrId) + // can't really copy an existing VM, because existing gluster VMs disks might too large to be copied. + const newVM = await this::_importGlusterVM(xapi, data.template, lvmsrId) + $defer.onFailure(() => xapi.deleteVm(newVM, true)) + const addressAndHost = await _prepareGlusterVm(xapi, srObject, newVM, xosanNetwork, ipAddress, { + labelSuffix, + increaseDataDisk, + maxDiskSize: brickSize, + memorySize: vmsMemories.length ? _median(vmsMemories) : 2 * GIGABYTE, + }) + if (!glusterEndpoint) { + glusterEndpoint = this::_getGlusterEndpoint(xosansr) + } + await _probePoolAndWaitForPresence(glusterEndpoint, [addressAndHost.address]) + return {data, newVM, addressAndHost, glusterEndpoint} +}) + +export const addBricks = defer(async function ($defer, {xosansr, lvmsrs, brickSize}) { + await this.checkXosanLicense({ srId: xosansr.uuid }) + + const OPERATION_OBJECT = { + operation: 'addBricks', + states: ['insertingNewVms', 'addingBricks', 'scaningSr'], + } + const xapi = this.getXapi(xosansr) + const poolId = xapi.pool.$id + if (CURRENT_POOL_OPERATIONS[poolId]) { + throw new Error('createSR is already running for this pool') + } + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 0} + try { + const data = getXosanConfig(xosansr, xapi) + const usedAddresses = map(data.nodes, n => n.vm.ip) + const glusterEndpoint = this::_getGlusterEndpoint(xosansr) + const newAddresses = [] + const newNodes = [] + for (const newSr of lvmsrs) { + const ipAddress = _findIPAddressOutsideList(usedAddresses.concat(newAddresses), data.networkPrefix) + newAddresses.push(ipAddress) + const {newVM, addressAndHost} = await this::insertNewGlusterVm(xapi, xosansr, newSr, {ipAddress, brickSize}) + $defer.onFailure(() => glusterCmd(glusterEndpoint, 'peer detach ' + ipAddress, true)) + $defer.onFailure(() => xapi.deleteVm(newVM, true)) + const brickName = addressAndHost.brickName + newNodes.push({brickName, host: addressAndHost.host.$id, vm: {id: newVM.$id, ip: ipAddress}, underlyingSr: newSr}) + } + const arbiterNode = data.nodes.find(n => n['arbiter']) + if (arbiterNode) { + await glusterCmd(glusterEndpoint, + `volume remove-brick xosan replica ${data.nodes.length - 1} ${arbiterNode.brickName} force`) + data.nodes = data.nodes.filter(n => n !== arbiterNode) + data.type = 'replica' + await xapi.xo.setData(xosansr, 'xosan_config', data) + await glusterCmd(glusterEndpoint, 'peer detach ' + arbiterNode.vm.ip, true) + await xapi.deleteVm(arbiterNode.vm.id, true) + } + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 1} + await glusterCmd(glusterEndpoint, `volume add-brick xosan ${newNodes.map(n => n.brickName).join(' ')}`) + data.nodes = data.nodes.concat(newNodes) + await xapi.xo.setData(xosansr, 'xosan_config', data) + CURRENT_POOL_OPERATIONS[poolId] = {...OPERATION_OBJECT, state: 2} + await xapi.call('SR.scan', xapi.getObject(xosansr).$ref) + } finally { + delete CURRENT_POOL_OPERATIONS[poolId] + } +}) + +addBricks.description = 'add brick to XOSAN SR' +addBricks.permission = 'admin' +addBricks.params = { + xosansr: {type: 'string'}, + lvmsrs: { + type: 'array', + items: { + type: 'string', + }, + }, + brickSize: {type: 'number'}, +} + +addBricks.resolve = { + xosansr: ['sr', 'SR', 'administrate'], + lvmsrs: ['sr', 'SR', 'administrate'], +} + +export const removeBricks = defer(async function ($defer, {xosansr, bricks}) { + await this.checkXosanLicense({ srId: xosansr.uuid }) + + const xapi = this.getXapi(xosansr) + if (CURRENT_POOL_OPERATIONS[xapi.pool.$id]) { + throw new Error('this there is already a XOSAN operation running on this pool') + } + CURRENT_POOL_OPERATIONS[xapi.pool.$id] = true + try { + const data = getXosanConfig(xosansr.id, xapi) + // IPV6 + const ips = map(bricks, b => b.split(':')[0]) + const glusterEndpoint = this::_getGlusterEndpoint(xosansr.id) + // "peer detach" doesn't allow removal of locahost + remove(glusterEndpoint.addresses, ip => ips.includes(ip)) + const dict = _getIPToVMDict(xapi, xosansr.id) + const brickVMs = map(bricks, b => dict[b]) + await glusterCmd(glusterEndpoint, `volume remove-brick xosan ${bricks.join(' ')} force`) + await asyncMap(ips, ip => glusterCmd(glusterEndpoint, 'peer detach ' + ip, true)) + remove(data.nodes, node => ips.includes(node.vm.ip)) + await xapi.xo.setData(xosansr.id, 'xosan_config', data) + await xapi.call('SR.scan', xapi.getObject(xosansr._xapiId).$ref) + await asyncMap(brickVMs, vm => xapi.deleteVm(vm.vm, true)) + } finally { + delete CURRENT_POOL_OPERATIONS[xapi.pool.$id] + } +}) + +removeBricks.description = 'remove brick from XOSAN SR' +removeBricks.permission = 'admin' +removeBricks.params = { + xosansr: {type: 'string'}, + bricks: { + type: 'array', + items: {type: 'string'}, + }, +} +removeBricks.resolve = { xosansr: ['sr', 'SR', 'administrate'] } + +export function checkSrCurrentState ({poolId}) { + return CURRENT_POOL_OPERATIONS[poolId] +} + +checkSrCurrentState.description = 'checks if there is an operation currently running on the SR' +checkSrCurrentState.permission = 'admin' +checkSrCurrentState.params = {poolId: {type: 'string'}} + +const POSSIBLE_CONFIGURATIONS = {} +POSSIBLE_CONFIGURATIONS[2] = [{layout: 'replica_arbiter', redundancy: 3, capacity: 1}] +POSSIBLE_CONFIGURATIONS[3] = [ + {layout: 'replica', redundancy: 3, capacity: 1}, + {layout: 'disperse', redundancy: 1, capacity: 2}, +] +POSSIBLE_CONFIGURATIONS[4] = [{layout: 'replica', redundancy: 2, capacity: 2}] +POSSIBLE_CONFIGURATIONS[5] = [{layout: 'disperse', redundancy: 1, capacity: 4}] +POSSIBLE_CONFIGURATIONS[6] = [ + {layout: 'replica', redundancy: 2, capacity: 3}, + {layout: 'replica', redundancy: 3, capacity: 2}, + {layout: 'disperse', redundancy: 2, capacity: 4}, +] +POSSIBLE_CONFIGURATIONS[7] = [{layout: 'disperse', redundancy: 3, capacity: 4}] +POSSIBLE_CONFIGURATIONS[8] = [{layout: 'replica', redundancy: 2, capacity: 4}] +POSSIBLE_CONFIGURATIONS[9] = [ + {layout: 'replica', redundancy: 3, capacity: 3}, + {layout: 'disperse', redundancy: 1, capacity: 8}, +] +POSSIBLE_CONFIGURATIONS[10] = [ + {layout: 'replica', redundancy: 2, capacity: 5}, + {layout: 'disperse', redundancy: 2, capacity: 8}, +] +POSSIBLE_CONFIGURATIONS[11] = [{layout: 'disperse', redundancy: 3, capacity: 8}] +POSSIBLE_CONFIGURATIONS[12] = [ + {layout: 'replica', redundancy: 2, capacity: 6}, + {layout: 'disperse', redundancy: 4, capacity: 8}, +] +POSSIBLE_CONFIGURATIONS[13] = [{layout: 'disperse', redundancy: 5, capacity: 8}] +POSSIBLE_CONFIGURATIONS[14] = [ + {layout: 'replica', redundancy: 2, capacity: 7}, + {layout: 'disperse', redundancy: 6, capacity: 8}, +] +POSSIBLE_CONFIGURATIONS[15] = [ + {layout: 'replica', redundancy: 3, capacity: 5}, + {layout: 'disperse', redundancy: 7, capacity: 8}, +] +POSSIBLE_CONFIGURATIONS[16] = [{layout: 'replica', redundancy: 2, capacity: 8}] + +function computeBrickSize (srs, brickSize = Infinity) { + const xapi = this.getXapi(srs[0]) + const srsObjects = map(srs, srId => xapi.getObject(srId)) + const srSizes = map(srsObjects, sr => sr.physical_size - sr.physical_utilisation) + const minSize = Math.min(brickSize, ...srSizes) + return Math.floor((minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO) +} + +export async function computeXosanPossibleOptions ({lvmSrs, brickSize = Infinity}) { + const count = lvmSrs.length + const configurations = POSSIBLE_CONFIGURATIONS[count] + if (!configurations) { + return null + } + if (count > 0) { + const finalBrickSize = this::computeBrickSize(lvmSrs, brickSize) + return configurations.map(conf => ({...conf, availableSpace: Math.max(0, finalBrickSize * conf.capacity)})) + } +} + +computeXosanPossibleOptions.params = { + lvmSrs: { + type: 'array', + items: { + type: 'string', + }, + }, + brickSize: { + type: 'number', optional: true, + }, +} + +// --------------------------------------------------------------------- + +export async function unlock ({ licenseId, sr }) { + await this.unlockXosanLicense({ licenseId, srId: sr.id }) + + const glusterEndpoint = this::_getGlusterEndpoint(sr.id) + await _removeQuota(glusterEndpoint) + await glusterEndpoint.xapi.call('SR.scan', glusterEndpoint.xapi.getObject(sr).$ref) +} + +unlock.description = 'Unlock XOSAN SR functionalities by binding it to a paid license' + +unlock.permission = 'admin' + +unlock.params = { + licenseId: { type: 'string' }, + sr: { type: 'string' }, +} + +unlock.resolve = { + sr: ['sr', 'SR', 'administrate'], +} + +// --------------------------------------------------------------------- + +export async function downloadAndInstallXosanPack ({id, version, pool}) { + if (!this.requestResource) { + throw new Error('requestResource is not a function') + } + + const xapi = this.getXapi(pool.id) + const res = await this.requestResource('xosan', id, version) + + await xapi.installSupplementalPackOnAllHosts(res) + await xapi._updateObjectMapProperty(xapi.pool, 'other_config', { + 'xosan_pack_installation_time': String(Math.floor(Date.now() / 1e3)), + }) +} + +downloadAndInstallXosanPack.description = 'Register a resource via cloud plugin' + +downloadAndInstallXosanPack.params = { + id: {type: 'string'}, + version: {type: 'string'}, + pool: {type: 'string'}, +} + +downloadAndInstallXosanPack.resolve = { + pool: ['pool', 'pool', 'administrate'], +} + +downloadAndInstallXosanPack.permission = 'admin' diff --git a/packages/xo-server/src/collection.js b/packages/xo-server/src/collection.js new file mode 100644 index 000000000..184fc749c --- /dev/null +++ b/packages/xo-server/src/collection.js @@ -0,0 +1,172 @@ +import Model from './model' +import {BaseError} from 'make-error' +import {EventEmitter} from 'events' +import { + isArray, + isObject, + map, +} from './utils' + +// =================================================================== + +export class ModelAlreadyExists extends BaseError { + constructor (id) { + super('this model already exists: ' + id) + } +} + +// =================================================================== + +export default class Collection extends EventEmitter { + // Default value for Model. + get Model () { + return Model + } + + // Make this property writable. + set Model (Model) { + Object.defineProperty(this, 'Model', { + configurable: true, + enumerale: true, + value: Model, + writable: true, + }) + } + + async add (models, opts) { + const array = isArray(models) + if (!array) { + models = [models] + } + + const {Model} = this + map(models, model => { + if (!(model instanceof Model)) { + model = new Model(model) + } + + const error = model.validate() + if (error) { + // TODO: Better system inspired by Backbone.js + throw error + } + + return model.properties + }, models) + + models = await this._add(models, opts) + this.emit('add', models) + + return array + ? models + : new this.Model(models[0]) + } + + async first (properties) { + if (!isObject(properties)) { + properties = (properties !== undefined) + ? { id: properties } + : {} + } + + const model = await this._first(properties) + return model && new this.Model(model) + } + + async get (properties) { + if (!isObject(properties)) { + properties = (properties !== undefined) + ? { id: properties } + : {} + } + + return /* await */ this._get(properties) + } + + async remove (ids) { + if (!isArray(ids)) { + ids = [ids] + } + + await this._remove(ids) + + this.emit('remove', ids) + return true + } + + async update (models) { + const array = isArray(models) + if (!isArray(models)) { + models = [models] + } + + const {Model} = this + map(models, model => { + if (!(model instanceof Model)) { + // TODO: Problems, we may be mixing in some default + // properties which will overwrite existing ones. + model = new Model(model) + } + + const id = model.get('id') + + // Missing models should be added not updated. + if (id === undefined) { + // FIXME: should not throw an exception but return a rejected promise. + throw new Error('a model without an id cannot be updated') + } + + const error = model.validate() + if (error !== undefined) { + // TODO: Better system inspired by Backbone.js. + throw error + } + + return model.properties + }, models) + + models = await this._update(models) + this.emit('update', models) + + return array + ? models + : new this.Model(models[0]) + } + + // Methods to override in implementations. + + _add () { + throw new Error('not implemented') + } + + _get () { + throw new Error('not implemented') + } + + _remove () { + throw new Error('not implemented') + } + + _update () { + throw new Error('not implemented') + } + + // Methods which may be overridden in implementations. + + count (properties) { + return this.get(properties).get('count') + } + + exists (properties) { + /* jshint eqnull: true */ + return this.first(properties).then(model => model != null) + } + + async _first (properties) { + const models = await this.get(properties) + + return models.length + ? models[0] + : null + } +} diff --git a/packages/xo-server/src/collection/redis.js b/packages/xo-server/src/collection/redis.js new file mode 100644 index 000000000..d83260dfd --- /dev/null +++ b/packages/xo-server/src/collection/redis.js @@ -0,0 +1,237 @@ +import { createClient as createRedisClient } from 'redis' +import { difference, filter, forEach, isEmpty, keys as getKeys, map } from 'lodash' +import { ignoreErrors, promisifyAll } from 'promise-toolbox' +import { v4 as generateUuid } from 'uuid' + +import Collection, { ModelAlreadyExists } from '../collection' +import { asyncMap } from '../utils' + +// =================================================================== + +// /////////////////////////////////////////////////////////////////// +// Data model: +// - prefix +'_id': value of the last generated identifier; +// - prefix +'_ids': set containing identifier of all models; +// - prefix +'_'+ index +':' + lowerCase(value): set of identifiers +// which have value for the given index. +// - prefix +':'+ id: hash containing the properties of a model; +// /////////////////////////////////////////////////////////////////// + +// TODO: then-redis sends commands in order, we should use this +// semantic to simplify the code. + +// TODO: Merge the options in the object to obtain extend-time +// configuration like Backbone. + +// TODO: Remote events. + +const VERSION = '20170905' + +export default class Redis extends Collection { + constructor ({ + connection, + indexes = [], + prefix, + uri, + }) { + super() + + this.indexes = indexes + this.prefix = prefix + const redis = this.redis = promisifyAll(connection || createRedisClient(uri)) + + const key = `${prefix}:version` + redis.get(key).then(version => { + if (version === VERSION) { + return + } + + let p = redis.set(`${prefix}:version`, VERSION) + switch (version) { + case undefined: + // - clean indexes + // - indexes are now case insensitive + p = p.then(() => this.rebuildIndexes()) + } + return p + })::ignoreErrors() + } + + rebuildIndexes () { + const { indexes, prefix, redis } = this + + if (indexes.length === 0) { + return Promise.resolve() + } + + const idsIndex = `${prefix}_ids` + return asyncMap(indexes, index => + redis.keys(`${prefix}_${index}:*`).then(keys => + keys.length !== 0 && redis.del(keys) + ) + ).then(() => asyncMap(redis.smembers(idsIndex), id => + redis.hgetall(`${prefix}:${id}`).then(values => + values == null + ? redis.srem(idsIndex, id) // entry no longer exists + : asyncMap(indexes, index => { + const value = values[index] + if (value !== undefined) { + return redis.sadd( + `${prefix}_${index}:${String(value).toLowerCase()}`, + id + ) + } + }) + ) + )) + } + + _extract (ids) { + const prefix = this.prefix + ':' + const {redis} = this + + const models = [] + return Promise.all(map(ids, id => { + return redis.hgetall(prefix + id).then(model => { + // If empty, consider it a no match. + if (isEmpty(model)) { + return + } + + // Mix the identifier in. + model.id = id + + models.push(model) + }) + })).then(() => models) + } + + _add (models, {replace = false} = {}) { + // TODO: remove “replace” which is a temporary measure, implement + // “set()” instead. + + const {indexes, prefix, redis} = this + + return Promise.all(map(models, async model => { + // Generate a new identifier if necessary. + if (model.id === undefined) { + model.id = generateUuid() + } + const { id } = model + + const newEntry = await redis.sadd(prefix + '_ids', id) + + if (!newEntry) { + if (!replace) { + throw new ModelAlreadyExists(id) + } + + // remove the previous values from indexes + if (indexes.length !== 0) { + const previous = await redis.hgetall(`${prefix}:${id}`) + await asyncMap(indexes, index => { + const value = previous[index] + if (value !== undefined) { + return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id) + } + }) + } + } + + const params = [] + forEach(model, (value, name) => { + // No need to store the identifier (already in the key). + if (name === 'id') { + return + } + + params.push(name, value) + }) + + const key = `${prefix}:${id}` + const promises = [ + redis.del(key), + redis.hmset(key, ...params), + ] + + // Update indexes. + forEach(indexes, (index) => { + const value = model[index] + if (value === undefined) { + return + } + + const key = prefix + '_' + index + ':' + String(value).toLowerCase() + promises.push(redis.sadd(key, id)) + }) + + await Promise.all(promises) + + return model + })) + } + + _get (properties) { + const {prefix, redis} = this + + if (isEmpty(properties)) { + return redis.smembers(prefix + '_ids').then(ids => this._extract(ids)) + } + + // Special treatment for the identifier. + const id = properties.id + if (id !== undefined) { + delete properties.id + return this._extract([id]).then(models => { + return (models.length && !isEmpty(properties)) + ? filter(models) + : models + }) + } + + const {indexes} = this + + // Check for non indexed fields. + const unfit = difference(getKeys(properties), indexes) + if (unfit.length) { + throw new Error('fields not indexed: ' + unfit.join()) + } + + const keys = map(properties, (value, index) => `${prefix}_${index}:${String(value).toLowerCase()}`) + return redis.sinter(...keys).then(ids => this._extract(ids)) + } + + _remove (ids) { + if (isEmpty(ids)) { + return + } + + const { indexes, prefix, redis } = this + + // update main index + let promise = redis.srem(prefix + '_ids', ...ids) + + // update other indexes + if (indexes.length !== 0) { + promise = Promise.all([ promise, asyncMap(ids, id => + redis.hgetall(`${prefix}:${id}`).then(values => + values != null && asyncMap(indexes, index => { + const value = values[index] + if (value !== undefined) { + return redis.srem(`${prefix}_${index}:${String(value).toLowerCase()}`, id) + } + }) + ) + ) ]) + } + + return promise.then(() => + // remove the models + redis.del(map(ids, id => `${prefix}:${id}`)) + ) + } + + _update (models) { + return this._add(models, { replace: true }) + } +} diff --git a/packages/xo-server/src/connection.js b/packages/xo-server/src/connection.js new file mode 100644 index 000000000..22d30cfac --- /dev/null +++ b/packages/xo-server/src/connection.js @@ -0,0 +1,50 @@ +import {EventEmitter} from 'events' + +import {createRawObject, noop} from './utils' + +// =================================================================== + +export default class Connection extends EventEmitter { + constructor () { + super() + + this._data = createRawObject() + } + + // Close the connection. + close () { + // Prevent errors when the connection is closed more than once. + this.close = noop + + this.emit('close') + } + + // Gets the value for this key. + get (key, defaultValue) { + const {_data: data} = this + + if (key in data) { + return data[key] + } + + if (arguments.length >= 2) { + return defaultValue + } + + throw new Error('no value for `' + key + '`') + } + + // Checks whether there is a value for this key. + has (key) { + return key in this._data + } + + // Sets the value for this key. + set (key, value) { + this._data[key] = value + } + + unset (key) { + delete this._data[key] + } +} diff --git a/packages/xo-server/src/constant-stream.js b/packages/xo-server/src/constant-stream.js new file mode 100644 index 000000000..05adc3bbc --- /dev/null +++ b/packages/xo-server/src/constant-stream.js @@ -0,0 +1,42 @@ +import from2 from 'from2' + +const constantStream = (data, n = 1) => { + if (!Buffer.isBuffer(data)) { + data = Buffer.from(data) + } + + const { length } = data + + if (!length) { + throw new Error('data should not be empty') + } + + n *= length + let currentLength = length + + return from2((size, next) => { + if (n <= 0) { + return next(null, null) + } + + if (n < size) { + size = n + } + + if (size < currentLength) { + const m = Math.floor(size / length) * length || length + n -= m + return next(null, data.slice(0, m)) + } + + // if more than twice the data length is requested, repeat the data + if (size > currentLength * 2) { + currentLength = Math.floor(size / length) * length + data = Buffer.alloc(currentLength, data) + } + + n -= currentLength + return next(null, data) + }) +} +export { constantStream as default } diff --git a/packages/xo-server/src/decorators.js b/packages/xo-server/src/decorators.js new file mode 100644 index 000000000..87df27291 --- /dev/null +++ b/packages/xo-server/src/decorators.js @@ -0,0 +1,175 @@ +import { getBoundPropertyDescriptor } from 'bind-property-descriptor' + +import { + isArray, + isFunction, +} from './utils' + +// =================================================================== + +const { + defineProperties, + getOwnPropertyDescriptor, +} = Object + +// =================================================================== + +// Debounce decorator for methods. +// +// See: https://github.com/wycats/javascript-decorators +// +// TODO: make it work for single functions. +export const debounce = duration => (target, name, descriptor) => { + const fn = descriptor.value + + // This symbol is used to store the related data directly on the + // current object. + const s = Symbol(`debounced ${name} data`) + + function debounced () { + const data = this[s] || (this[s] = { + lastCall: 0, + wrapper: null, + }) + + const now = Date.now() + if (now > data.lastCall + duration) { + data.lastCall = now + try { + const result = fn.apply(this, arguments) + data.wrapper = () => result + } catch (error) { + data.wrapper = () => { throw error } + } + } + return data.wrapper() + } + debounced.reset = obj => { delete obj[s] } + + descriptor.value = debounced + return descriptor +} + +// ------------------------------------------------------------------- + +const _ownKeys = ( + (typeof Reflect !== 'undefined' && Reflect.ownKeys) || + (({ + getOwnPropertyNames: names, + getOwnPropertySymbols: symbols, + }) => symbols + ? obj => names(obj).concat(symbols(obj)) + : names + )(Object) +) + +const _isIgnoredProperty = name => ( + name[0] === '_' || + name === 'constructor' +) + +const _IGNORED_STATIC_PROPERTIES = { + __proto__: null, + + arguments: true, + caller: true, + length: true, + name: true, + prototype: true, +} +const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name] + +export const mixin = MixIns => Class => { + if (!isArray(MixIns)) { + MixIns = [ MixIns ] + } + + const { name } = Class + + // Copy properties of plain object mix-ins to the prototype. + { + const allMixIns = MixIns + MixIns = [] + const { prototype } = Class + const descriptors = { __proto__: null } + for (const MixIn of allMixIns) { + if (isFunction(MixIn)) { + MixIns.push(MixIn) + continue + } + + for (const prop of _ownKeys(MixIn)) { + if (prop in prototype) { + throw new Error(`${name}#${prop} is already defined`) + } + + ( + descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop) + ).enumerable = false // Object methods are enumerable but class methods are not. + } + } + defineProperties(prototype, descriptors) + } + + function Decorator (...args) { + const instance = new Class(...args) + + for (const MixIn of MixIns) { + const { prototype } = MixIn + const mixinInstance = new MixIn(instance, ...args) + const descriptors = { __proto__: null } + for (const prop of _ownKeys(prototype)) { + if (_isIgnoredProperty(prop)) { + continue + } + + if (prop in instance) { + throw new Error(`${name}#${prop} is already defined`) + } + + descriptors[prop] = getBoundPropertyDescriptor( + prototype, + prop, + mixinInstance + ) + } + defineProperties(instance, descriptors) + } + + return instance + } + + // Copy original and mixed-in static properties on Decorator class. + const descriptors = { __proto__: null } + for (const prop of _ownKeys(Class)) { + let descriptor + if (!( + // Special properties are not defined... + _isIgnoredStaticProperty(prop) && + + // if they already exist... + (descriptor = getOwnPropertyDescriptor(Decorator, prop)) && + + // and are not configurable. + !descriptor.configurable + )) { + descriptors[prop] = getOwnPropertyDescriptor(Class, prop) + } + } + for (const MixIn of MixIns) { + for (const prop of _ownKeys(MixIn)) { + if (_isIgnoredStaticProperty(prop)) { + continue + } + + if (prop in descriptors) { + throw new Error(`${name}.${prop} is already defined`) + } + + descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop) + } + } + defineProperties(Decorator, descriptors) + + return Decorator +} diff --git a/packages/xo-server/src/decorators.spec.js b/packages/xo-server/src/decorators.spec.js new file mode 100644 index 000000000..2fcaa693e --- /dev/null +++ b/packages/xo-server/src/decorators.spec.js @@ -0,0 +1,39 @@ +/* eslint-env jest */ + +import {debounce} from './decorators' + +// =================================================================== + +describe('debounce()', () => { + let i + + class Foo { + @debounce(1e1) + foo () { + ++i + } + } + + beforeEach(() => { + i = 0 + }) + + it('works', done => { + const foo = new Foo() + + expect(i).toBe(0) + + foo.foo() + expect(i).toBe(1) + + foo.foo() + expect(i).toBe(1) + + setTimeout(() => { + foo.foo() + expect(i).toBe(2) + + done() + }, 2e1) + }) +}) diff --git a/packages/xo-server/src/fatfs-buffer.js b/packages/xo-server/src/fatfs-buffer.js new file mode 100644 index 000000000..2c0241fd9 --- /dev/null +++ b/packages/xo-server/src/fatfs-buffer.js @@ -0,0 +1,85 @@ +// Buffer driver for [fatfs](https://github.com/natevw/fatfs). +// +// Usage: +// +// ```js +// import fatfs from 'fatfs' +// import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer' +// +// const buffer = fatfsBufferinit() +// +// const fs = fatfs.createFileSystem(fatfsBuffer(buffer)) +// +// fs.writeFile('/foo', 'content of foo', function (err, content) { +// if (err) { +// console.error(err) +// } +// }) + +import { boot16 as fat16 } from 'fatfs/structs' + +const SECTOR_SIZE = 512 + +const TEN_MIB = 10 * 1024 * 1024 + +// Creates a 10MB buffer and initializes it as a FAT 16 volume. +export function init () { + const buf = Buffer.alloc(TEN_MIB) + + // https://github.com/natevw/fatfs/blob/master/structs.js + fat16.pack({ + jmpBoot: Buffer.from('eb3c90', 'hex'), + OEMName: 'mkfs.fat', + BytsPerSec: SECTOR_SIZE, + SecPerClus: 4, + ResvdSecCnt: 1, + NumFATs: 2, + RootEntCnt: 512, + TotSec16: 20480, + Media: 248, + FATSz16: 20, + SecPerTrk: 32, + NumHeads: 64, + HiddSec: 0, + TotSec32: 0, + DrvNum: 128, + Reserved1: 0, + BootSig: 41, + VolID: 895111106, + VolLab: 'NO NAME ', + FilSysType: 'FAT16 ', + }, buf) + + // End of sector. + buf[0x1fe] = 0x55 + buf[0x1ff] = 0xaa + + // Mark sector as reserved. + buf[0x200] = 0xf8 + buf[0x201] = 0xff + buf[0x202] = 0xff + buf[0x203] = 0xff + + // Mark sector as reserved. + buf[0x2a00] = 0xf8 + buf[0x2a01] = 0xff + buf[0x2a02] = 0xff + buf[0x2a03] = 0xff + + return buf +} + +export default buffer => { + return { + sectorSize: SECTOR_SIZE, + numSectors: Math.floor(buffer.length / SECTOR_SIZE), + readSectors: (i, target, cb) => { + buffer.copy(target, 0, i * SECTOR_SIZE) + cb() + }, + writeSectors: (i, source, cb) => { + source.copy(buffer, i * SECTOR_SIZE, 0) + cb() + }, + } +} diff --git a/packages/xo-server/src/glob-matcher.js b/packages/xo-server/src/glob-matcher.js new file mode 100644 index 000000000..380e7a99f --- /dev/null +++ b/packages/xo-server/src/glob-matcher.js @@ -0,0 +1,54 @@ +// See: https://gist.github.com/julien-f/5b9a3537eb82a34b04e2 + +const matcher = require('micromatch').matcher + +module.exports = function globMatcher (patterns, opts) { + if (!Array.isArray(patterns)) { + if (patterns[0] === '!') { + const m = matcher(patterns.slice(1), opts) + return function (string) { + return !m(string) + } + } else { + return matcher(patterns, opts) + } + } + + const noneMustMatch = [] + const anyMustMatch = [] + + // TODO: could probably be optimized by combining all positive patterns (and all negative patterns) as a single matcher. + for (let i = 0, n = patterns.length; i < n; ++i) { + const pattern = patterns[i] + if (pattern[0] === '!') { + noneMustMatch.push(matcher(pattern.slice(1), opts)) + } else { + anyMustMatch.push(matcher(pattern, opts)) + } + } + + const nNone = noneMustMatch.length + const nAny = anyMustMatch.length + + return function (string) { + let i + + for (i = 0; i < nNone; ++i) { + if (noneMustMatch[i](string)) { + return false + } + } + + if (nAny === 0) { + return true + } + + for (i = 0; i < nAny; ++i) { + if (anyMustMatch[i](string)) { + return true + } + } + + return false + } +} diff --git a/packages/xo-server/src/index.js b/packages/xo-server/src/index.js new file mode 100644 index 000000000..1d57b1848 --- /dev/null +++ b/packages/xo-server/src/index.js @@ -0,0 +1,649 @@ +import appConf from 'app-conf' +import bind from 'lodash/bind' +import blocked from 'blocked' +import createExpress from 'express' +import createLogger from 'debug' +import eventToPromise from 'event-to-promise' +import has from 'lodash/has' +import helmet from 'helmet' +import includes from 'lodash/includes' +import proxyConsole from './proxy-console' +import serveStatic from 'serve-static' +import startsWith from 'lodash/startsWith' +import WebSocket from 'ws' +import { compile as compilePug } from 'pug' +import { createServer as createProxyServer } from 'http-proxy' +import { join as joinPath } from 'path' + +import JsonRpcPeer from 'json-rpc-peer' +import { invalidCredentials } from 'xo-common/api-errors' +import { + ensureDir, + readdir, + readFile, +} from 'fs-extra' + +import WebServer from 'http-server-plus' +import Xo from './xo' +import { + createRawObject, + forEach, + isArray, + isFunction, + mapToArray, + pFromCallback, +} from './utils' + +import bodyParser from 'body-parser' +import connectFlash from 'connect-flash' +import cookieParser from 'cookie-parser' +import expressSession from 'express-session' +import passport from 'passport' +import { parse as parseCookies } from 'cookie' +import { Strategy as LocalStrategy } from 'passport-local' + +// =================================================================== + +const debug = createLogger('xo:main') + +const warn = (...args) => { + console.warn('[Warn]', ...args) +} + +// =================================================================== + +const DEPRECATED_ENTRIES = [ + 'users', + 'servers', +] + +async function loadConfiguration () { + const config = await appConf.load('xo-server', { + ignoreUnknownFormats: true, + }) + + debug('Configuration loaded.') + + // Print a message if deprecated entries are specified. + forEach(DEPRECATED_ENTRIES, entry => { + if (has(config, entry)) { + warn(`${entry} configuration is deprecated.`) + } + }) + + return config +} + +// =================================================================== + +function createExpressApp () { + const app = createExpress() + + app.use(helmet()) + + // Registers the cookie-parser and express-session middlewares, + // necessary for connect-flash. + app.use(cookieParser()) + app.use(expressSession({ + resave: false, + saveUninitialized: false, + + // TODO: should be in the config file. + secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS', + })) + + // Registers the connect-flash middleware, necessary for Passport to + // display error messages. + app.use(connectFlash()) + + // Registers the body-parser middleware, necessary for Passport to + // access the username and password from the sign in form. + app.use(bodyParser.urlencoded({ extended: false })) + + // Registers Passport's middlewares. + app.use(passport.initialize()) + + return app +} + +async function setUpPassport (express, xo) { + const strategies = createRawObject() + xo.registerPassportStrategy = strategy => { + passport.use(strategy) + + const {name} = strategy + if (name !== 'local') { + strategies[name] = strategy.label || name + } + } + + // Registers the sign in form. + const signInPage = compilePug( + await readFile(joinPath(__dirname, '..', 'signin.pug')) + ) + express.get('/signin', (req, res, next) => { + res.send(signInPage({ + error: req.flash('error')[0], + strategies, + })) + }) + + express.get('/signout', (req, res) => { + res.clearCookie('token') + res.redirect('/') + }) + + const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/ + express.use(async (req, res, next) => { + const { url } = req + const matches = url.match(SIGNIN_STRATEGY_RE) + + if (matches) { + return passport.authenticate(matches[1], async (err, user, info) => { + if (err) { + return next(err) + } + + if (!user) { + req.flash('error', info ? info.message : 'Invalid credentials') + return res.redirect('/signin') + } + + // The cookie will be set in via the next request because some + // browsers do not save cookies on redirect. + req.flash( + 'token', + (await xo.createAuthenticationToken({userId: user.id})).id + ) + + // The session is only persistent for internal provider and if 'Remember me' box is checked + req.flash( + 'session-is-persistent', + matches[1] === 'local' && req.body['remember-me'] === 'on' + ) + + res.redirect(req.flash('return-url')[0] || '/') + })(req, res, next) + } + + const token = req.flash('token')[0] + + if (token) { + const isPersistent = req.flash('session-is-persistent')[0] + + if (isPersistent) { + // Persistent cookie ? => 1 year + res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 }) + } else { + // Non-persistent : external provider as Github, Twitter... + res.cookie('token', token) + } + + next() + } else if (req.cookies.token) { + next() + } else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) { + next() + } else { + req.flash('return-url', url) + return res.redirect('/signin') + } + }) + + // Install the local strategy. + xo.registerPassportStrategy(new LocalStrategy( + async (username, password, done) => { + try { + const user = await xo.authenticateUser({username, password}) + done(null, user) + } catch (error) { + done(null, false, { message: error.message }) + } + } + )) +} + +// =================================================================== + +async function registerPlugin (pluginPath, pluginName) { + const plugin = require(pluginPath) + const { description, version = 'unknown' } = (() => { + try { + return require(pluginPath + '/package.json') + } catch (_) { + return {} + } + })() + + // Supports both “normal” CommonJS and Babel's ES2015 modules. + const { + default: factory = plugin, + configurationSchema, + configurationPresets, + testSchema, + } = plugin + + // The default export can be either a factory or directly a plugin + // instance. + const instance = isFunction(factory) + ? factory({ + xo: this, + getDataDir: () => { + const dir = `${this._config.datadir}/${pluginName}` + return ensureDir(dir).then(() => dir) + }, + }) + : factory + + await this.registerPlugin( + pluginName, + instance, + configurationSchema, + configurationPresets, + description, + testSchema, + version + ) +} + +const debugPlugin = createLogger('xo:plugin') + +function registerPluginWrapper (pluginPath, pluginName) { + debugPlugin('register %s', pluginName) + + return registerPlugin.call(this, pluginPath, pluginName).then( + () => { + debugPlugin(`successfully register ${pluginName}`) + }, + error => { + debugPlugin(`failed register ${pluginName}`) + debugPlugin(error) + } + ) +} + +const PLUGIN_PREFIX = 'xo-server-' +const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length + +async function registerPluginsInPath (path) { + const files = await readdir(path).catch(error => { + if (error.code === 'ENOENT') { + return [] + } + throw error + }) + + await Promise.all(mapToArray(files, name => { + if (startsWith(name, PLUGIN_PREFIX)) { + return registerPluginWrapper.call( + this, + `${path}/${name}`, + name.slice(PLUGIN_PREFIX_LENGTH) + ) + } + })) +} + +async function registerPlugins (xo) { + await Promise.all(mapToArray([ + `${__dirname}/../node_modules/`, + '/usr/local/lib/node_modules/', + ], xo::registerPluginsInPath)) +} + +// =================================================================== + +async function makeWebServerListen (webServer, { + certificate, + + // The properties was called `certificate` before. + cert = certificate, + + key, + ...opts +}) { + if (cert && key) { + [opts.cert, opts.key] = await Promise.all([ + readFile(cert), + readFile(key), + ]) + } + try { + const niceAddress = await webServer.listen(opts) + debug(`Web server listening on ${niceAddress}`) + } catch (error) { + if (error.niceAddress) { + warn(`Web server could not listen on ${error.niceAddress}`) + + const {code} = error + if (code === 'EACCES') { + warn(' Access denied.') + warn(' Ports < 1024 are often reserved to privileges users.') + } else if (code === 'EADDRINUSE') { + warn(' Address already in use.') + } + } else { + warn('Web server could not listen:', error.message) + } + } +} + +async function createWebServer ({ listen, listenOptions }) { + const webServer = new WebServer() + + await Promise.all(mapToArray(listen, + opts => makeWebServerListen(webServer, { ...listenOptions, ...opts }) + )) + + return webServer +} + +// =================================================================== + +const setUpProxies = (express, opts, xo) => { + if (!opts) { + return + } + + const proxy = createProxyServer({ + ignorePath: true, + }).on('error', (error) => console.error(error)) + + // TODO: sort proxies by descending prefix length. + + // HTTP request proxy. + express.use((req, res, next) => { + const { url } = req + + for (const prefix in opts) { + if (startsWith(url, prefix)) { + const target = opts[prefix] + + proxy.web(req, res, { + target: target + url.slice(prefix.length), + }) + + return + } + } + + next() + }) + + // WebSocket proxy. + const webSocketServer = new WebSocket.Server({ + noServer: true, + }) + xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb))) + + express.on('upgrade', (req, socket, head) => { + const { url } = req + + for (const prefix in opts) { + if (startsWith(url, prefix)) { + const target = opts[prefix] + + proxy.ws(req, socket, head, { + target: target + url.slice(prefix.length), + }) + + return + } + } + }) +} + +// =================================================================== + +const setUpStaticFiles = (express, opts) => { + forEach(opts, (paths, url) => { + if (!isArray(paths)) { + paths = [paths] + } + + forEach(paths, path => { + debug('Setting up %s → %s', url, path) + + express.use(url, serveStatic(path)) + }) + }) +} + +// =================================================================== + +const setUpApi = (webServer, xo, verboseLogsOnErrors) => { + const webSocketServer = new WebSocket.Server({ + noServer: true, + }) + xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb))) + + const onConnection = (socket, upgradeReq) => { + const { remoteAddress } = upgradeReq.socket + + debug('+ WebSocket connection (%s)', remoteAddress) + + // Create the abstract XO object for this connection. + const connection = xo.createUserConnection() + connection.once('close', () => { + socket.close() + }) + + // Create the JSON-RPC server for this connection. + const jsonRpc = new JsonRpcPeer(message => { + if (message.type === 'request') { + return xo.callApiMethod(connection, message.method, message.params) + } + }) + connection.notify = bind(jsonRpc.notify, jsonRpc) + + // Close the XO connection with this WebSocket. + socket.once('close', () => { + debug('- WebSocket connection (%s)', remoteAddress) + + connection.close() + }) + + // Connect the WebSocket to the JSON-RPC server. + socket.on('message', message => { + jsonRpc.write(message) + }) + + const onSend = error => { + if (error) { + warn('WebSocket send:', error.stack) + } + } + jsonRpc.on('data', data => { + // The socket may have been closed during the API method + // execution. + if (socket.readyState === WebSocket.OPEN) { + socket.send(data, onSend) + } + }) + } + webServer.on('upgrade', (req, socket, head) => { + if (req.url === '/api/') { + webSocketServer.handleUpgrade(req, socket, head, ws => onConnection(ws, req)) + } + }) +} + +// =================================================================== + +const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/ + +const setUpConsoleProxy = (webServer, xo) => { + const webSocketServer = new WebSocket.Server({ + noServer: true, + }) + xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb))) + + webServer.on('upgrade', async (req, socket, head) => { + const matches = CONSOLE_PROXY_PATH_RE.exec(req.url) + if (!matches) { + return + } + + const [, id] = matches + try { + // TODO: factorize permissions checking in an Express middleware. + { + const { token } = parseCookies(req.headers.cookie) + + const user = await xo.authenticateUser({ token }) + if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { + throw invalidCredentials() + } + + const { remoteAddress } = socket + debug('+ Console proxy (%s - %s)', user.name, remoteAddress) + socket.on('close', () => { + debug('- Console proxy (%s - %s)', user.name, remoteAddress) + }) + } + + const xapi = xo.getXapi(id, ['VM', 'VM-controller']) + const vmConsole = xapi.getVmConsole(id) + + // FIXME: lost connection due to VM restart is not detected. + webSocketServer.handleUpgrade(req, socket, head, connection => { + proxyConsole(connection, vmConsole, xapi.sessionId) + }) + } catch (error) { + console.error((error && error.stack) || error) + } + }) +} + +// =================================================================== + +const USAGE = (({ + name, + version, +}) => `Usage: ${name} [--safe-mode] + +${name} v${version}`)(require('../package.json')) + +// =================================================================== + +export default async function main (args) { + if (includes(args, '--help') || includes(args, '-h')) { + return USAGE + } + + { + const debug = createLogger('xo:perf') + blocked(ms => { + debug('blocked for %sms', ms | 0) + }) + } + + const config = await loadConfiguration() + + const webServer = await createWebServer(config.http) + + // Now the web server is listening, drop privileges. + try { + const {user, group} = config + if (group) { + process.setgid(group) + debug('Group changed to', group) + } + if (user) { + process.setuid(user) + debug('User changed to', user) + } + } catch (error) { + warn('Failed to change user/group:', error) + } + + // Creates main object. + const xo = new Xo(config) + + // Register web server close on XO stop. + xo.on('stop', () => pFromCallback(cb => webServer.close(cb))) + + // Connects to all registered servers. + await xo.start() + + // Trigger a clean job. + await xo.clean() + + // Express is used to manage non WebSocket connections. + const express = createExpressApp() + + if (config.http.redirectToHttps) { + let port + forEach(config.http.listen, listen => { + if ( + listen.port && + (listen.cert || listen.certificate) + ) { + port = listen.port + return false + } + }) + + if (port === undefined) { + warn('Could not setup HTTPs redirection: no HTTPs port found') + } else { + express.use((req, res, next) => { + if (req.secure) { + return next() + } + + res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`) + }) + } + } + + // Must be set up before the API. + setUpConsoleProxy(webServer, xo) + + // Must be set up before the API. + express.use(bind(xo._handleHttpRequest, xo)) + + // Everything above is not protected by the sign in, allowing xo-cli + // to work properly. + await setUpPassport(express, xo) + + // Attaches express to the web server. + webServer.on('request', express) + webServer.on('upgrade', (req, socket, head) => { + express.emit('upgrade', req, socket, head) + }) + + // Must be set up before the static files. + setUpApi(webServer, xo, config.verboseApiLogsOnErrors) + + setUpProxies(express, config.http.proxies, xo) + + setUpStaticFiles(express, config.http.mounts) + + if (!includes(args, '--safe-mode')) { + await registerPlugins(xo) + } + + // Gracefully shutdown on signals. + // + // TODO: implements a timeout? (or maybe it is the services launcher + // responsibility?) + forEach([ 'SIGINT', 'SIGTERM' ], signal => { + let alreadyCalled = false + + process.on(signal, () => { + if (alreadyCalled) { + warn('forced exit') + process.exit(1) + } + alreadyCalled = true + + debug('%s caught, closing…', signal) + xo.stop() + }) + }) + + await eventToPromise(xo, 'stopped') + + debug('bye :-)') +} diff --git a/packages/xo-server/src/job-executor.js b/packages/xo-server/src/job-executor.js new file mode 100644 index 000000000..4dc608ba7 --- /dev/null +++ b/packages/xo-server/src/job-executor.js @@ -0,0 +1,184 @@ +import Bluebird from 'bluebird' +import { BaseError } from 'make-error' +import { createPredicate } from 'value-matcher' +import { timeout } from 'promise-toolbox' +import { + assign, + filter, + find, + isEmpty, + map, + mapValues, +} from 'lodash' + +import { crossProduct } from './math' +import { + serializeError, + thunkToArray, +} from './utils' + +export class JobExecutorError extends BaseError {} +export class UnsupportedJobType extends JobExecutorError { + constructor (job) { + super('Unknown job type: ' + job.type) + } +} +export class UnsupportedVectorType extends JobExecutorError { + constructor (vector) { + super('Unknown vector type: ' + vector.type) + } +} + +// =================================================================== + +const paramsVectorActionsMap = { + extractProperties ({ mapping, value }) { + return mapValues(mapping, key => value[key]) + }, + crossProduct ({ items }) { + return thunkToArray(crossProduct( + map(items, value => resolveParamsVector.call(this, value)) + )) + }, + fetchObjects ({ pattern }) { + const objects = filter(this.xo.getObjects(), createPredicate(pattern)) + if (isEmpty(objects)) { + throw new Error('no objects match this pattern') + } + return objects + }, + map ({ collection, iteratee, paramName = 'value' }) { + return map(resolveParamsVector.call(this, collection), value => { + return resolveParamsVector.call(this, { + ...iteratee, + [paramName]: value, + }) + }) + }, + set: ({ values }) => values, +} + +export function resolveParamsVector (paramsVector) { + const visitor = paramsVectorActionsMap[paramsVector.type] + if (!visitor) { + throw new Error(`Unsupported function '${paramsVector.type}'.`) + } + + return visitor.call(this, paramsVector) +} + +// =================================================================== + +export default class JobExecutor { + constructor (xo) { + this.xo = xo + + // The logger is not available until Xo has started. + xo.on('start', () => xo.getLogger('jobs').then(logger => { + this._logger = logger + })) + } + + async exec (job) { + const runJobId = this._logger.notice(`Starting execution of ${job.id}.`, { + event: 'job.start', + userId: job.userId, + jobId: job.id, + key: job.key, + }) + + try { + if (job.type === 'call') { + const execStatus = await this._execCall(job, runJobId) + + this.xo.emit('job:terminated', execStatus) + } else { + throw new UnsupportedJobType(job) + } + + this._logger.notice(`Execution terminated for ${job.id}.`, { + event: 'job.end', + runJobId, + }) + } catch (error) { + this._logger.error(`The execution of ${job.id} has failed.`, { + event: 'job.end', + runJobId, + error: serializeError(error), + }) + + throw error + } + } + + async _execCall (job, runJobId) { + const { paramsVector } = job + const paramsFlatVector = paramsVector + ? resolveParamsVector.call(this, paramsVector) + : [{}] // One call with no parameters + + const connection = this.xo.createUserConnection() + + connection.set('user_id', job.userId) + + const schedule = find(await this.xo.getAllSchedules(), { job: job.id }) + + const execStatus = { + calls: {}, + runJobId, + start: Date.now(), + timezone: schedule !== undefined ? schedule.timezone : undefined, + } + + await Bluebird.map(paramsFlatVector, params => { + const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, { + event: 'jobCall.start', + runJobId, + method: job.method, + params, + }) + + const call = execStatus.calls[runCallId] = { + method: job.method, + params, + start: Date.now(), + } + let promise = this.xo.callApiMethod(connection, job.method, assign({}, params)) + if (job.timeout) { + promise = promise::timeout(job.timeout) + } + + return promise.then( + value => { + this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, { + event: 'jobCall.end', + runJobId, + runCallId, + returnedValue: value, + }) + + call.returnedValue = value + call.end = Date.now() + }, + reason => { + this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, { + event: 'jobCall.end', + runJobId, + runCallId, + error: serializeError(reason), + }) + + call.error = reason + call.end = Date.now() + } + ) + }, { + concurrency: 2, + }) + + connection.close() + execStatus.end = Date.now() + + return execStatus + } +} diff --git a/packages/xo-server/src/job-executor.spec.js b/packages/xo-server/src/job-executor.spec.js new file mode 100644 index 000000000..b47ce0569 --- /dev/null +++ b/packages/xo-server/src/job-executor.spec.js @@ -0,0 +1,100 @@ +/* eslint-env jest */ + +import { forEach } from 'lodash' +import { resolveParamsVector } from './job-executor' + +describe('resolveParamsVector', function () { + forEach({ + 'cross product with three sets': [ + // Expected result. + [ { id: 3, value: 'foo', remote: 'local' }, + { id: 7, value: 'foo', remote: 'local' }, + { id: 10, value: 'foo', remote: 'local' }, + { id: 3, value: 'bar', remote: 'local' }, + { id: 7, value: 'bar', remote: 'local' }, + { id: 10, value: 'bar', remote: 'local' } ], + // Entry. + { + type: 'crossProduct', + items: [{ + type: 'set', + values: [ { id: 3 }, { id: 7 }, { id: 10 } ], + }, { + type: 'set', + values: [ { value: 'foo' }, { value: 'bar' } ], + }, { + type: 'set', + values: [ { remote: 'local' } ], + }], + }, + ], + 'cross product with `set` and `map`': [ + // Expected result. + [ + { remote: 'local', id: 'vm:2' }, + { remote: 'smb', id: 'vm:2' }, + ], + + // Entry. + { + type: 'crossProduct', + items: [{ + type: 'set', + values: [ { remote: 'local' }, { remote: 'smb' } ], + }, { + type: 'map', + collection: { + type: 'fetchObjects', + pattern: { + $pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] }, + power_state: 'Running', + tags: [ 'foo' ], + type: 'VM', + }, + }, + iteratee: { + type: 'extractProperties', + mapping: { id: 'id' }, + }, + }], + }, + + // Context. + { + xo: { + getObjects: function () { + return [{ + id: 'vm:1', + $pool: 'pool:1', + tags: [], + type: 'VM', + power_state: 'Halted', + }, { + id: 'vm:2', + $pool: 'pool:1', + tags: [ 'foo' ], + type: 'VM', + power_state: 'Running', + }, { + id: 'host:1', + type: 'host', + power_state: 'Running', + }, { + id: 'vm:3', + $pool: 'pool:8', + tags: [ 'foo' ], + type: 'VM', + power_state: 'Halted', + }] + }, + }, + }, + ], + }, ([ expectedResult, entry, context ], name) => { + describe(`with ${name}`, () => { + it('Resolves params vector', () => { + expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult) + }) + }) + }) +}) diff --git a/packages/xo-server/src/logs-cli.js b/packages/xo-server/src/logs-cli.js new file mode 100644 index 000000000..a657f5284 --- /dev/null +++ b/packages/xo-server/src/logs-cli.js @@ -0,0 +1,202 @@ +import appConf from 'app-conf' +import get from 'lodash/get' +import highland from 'highland' +import levelup from 'level-party' +import ndjson from 'ndjson' +import parseArgs from 'minimist' +import sublevel from 'level-sublevel' +import util from 'util' +import { repair as repairDb } from 'level' + +import {forEach} from './utils' +import globMatcher from './glob-matcher' + +// =================================================================== + +async function printLogs (db, args) { + let stream = highland(db.createReadStream({reverse: true})) + + if (args.since) { + stream = stream.filter(({value}) => (value.time >= args.since)) + } + + if (args.until) { + stream = stream.filter(({value}) => (value.time <= args.until)) + } + + const fields = Object.keys(args.matchers) + + if (fields.length > 0) { + stream = stream.filter(({value}) => { + for (const field of fields) { + const fieldValue = get(value, field) + if (fieldValue === undefined || !args.matchers[field](fieldValue)) { + return false + } + } + + return true + }) + } + + stream = stream.take(args.limit) + + if (args.json) { + stream = highland(stream.pipe(ndjson.serialize())) + .each(value => { + process.stdout.write(value) + }) + } else { + stream = stream.each(value => { + console.log(util.inspect(value, { depth: null })) + }) + } + + return new Promise(resolve => { + stream.done(resolve) + }) +} + +// =================================================================== + +function helper () { + console.error(` +xo-server-logs --help, -h + + Display this help message. + +xo-server-logs [--json] [--limit=] [--since=] [--until=] [...] + + Prints the logs. + + --json + Display the results as new line delimited JSON for consumption + by another program. + + --limit=, -n + Limit the number of results to be displayed (default 100) + + --since=, --until= + Start showing entries on or newer than the specified date, or on + or older than the specified date. + + should use the format \`YYYY-MM-DD\`. + + + Patterns can be used to filter the entries. + + Patterns have the following format \`=\`/\`\`. + +xo-server-logs --repair + + Repair/compact the database. + + This is an advanced operation and should be used only when necessary and offline (xo-server should be stopped). +`) +} + +// =================================================================== + +function getArgs () { + const stringArgs = ['since', 'until', 'limit'] + const args = parseArgs(process.argv.slice(2), { + string: stringArgs, + boolean: ['help', 'json', 'repair'], + default: { + limit: 100, + json: false, + help: false, + }, + alias: { + limit: 'n', + help: 'h', + }, + }) + + const patterns = {} + + for (let value of args._) { + value = String(value) + + const i = value.indexOf('=') + + if (i !== -1) { + const field = value.slice(0, i) + const pattern = value.slice(i + 1) + + patterns[pattern] + ? patterns[field].push(pattern) + : patterns[field] = [ pattern ] + } else if (!patterns[value]) { + patterns[value] = null + } + } + + const trueFunction = () => true + args.matchers = {} + + for (const field in patterns) { + const values = patterns[field] + args.matchers[field] = (values === null) ? trueFunction : globMatcher(values) + } + + // Warning: minimist makes one array of values if the same option is used many times. + // (But only for strings args, not boolean) + forEach(stringArgs, arg => { + if (args[arg] instanceof Array) { + throw new Error(`error: too many values for ${arg} argument`) + } + }) + + ;['since', 'until'].forEach(arg => { + if (args[arg] !== undefined) { + args[arg] = Date.parse(args[arg]) + + if (isNaN(args[arg])) { + throw new Error(`error: bad ${arg} timestamp format`) + } + } + }) + + if (isNaN(args.limit = +args.limit)) { + throw new Error('error: limit is not a valid number') + } + + return args +} + +// =================================================================== + +export default async function main () { + const args = getArgs() + + if (args.help) { + helper() + return + } + + const config = await appConf.load('xo-server', { + ignoreUnknownFormats: true, + }) + + if (args.repair) { + await new Promise((resolve, reject) => { + repairDb(`${config.datadir}/leveldb`, error => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) + + return + } + + const db = sublevel(levelup( + `${config.datadir}/leveldb`, + { valueEncoding: 'json' } + )).sublevel('logs') + + return printLogs(db, args) +} diff --git a/packages/xo-server/src/lvm.js b/packages/xo-server/src/lvm.js new file mode 100644 index 000000000..3e2fd0530 --- /dev/null +++ b/packages/xo-server/src/lvm.js @@ -0,0 +1,33 @@ +import execa from 'execa' +import splitLines from 'split-lines' +import { createParser } from 'parse-pairs' +import { isArray, map } from 'lodash' + +// =================================================================== + +const parse = createParser({ + keyTransform: key => key.slice(5).toLowerCase(), +}) +const makeFunction = command => (fields, ...args) => + execa.stdout(command, [ + '--noheading', + '--nosuffix', + '--nameprefixes', + '--unbuffered', + '--units', + 'b', + '-o', + String(fields), + ...args, + ]).then(stdout => map( + splitLines(stdout), + isArray(fields) + ? parse + : line => { + const data = parse(line) + return data[fields] + } + )) + +export const lvs = makeFunction('lvs') +export const pvs = makeFunction('pvs') diff --git a/packages/xo-server/src/math.js b/packages/xo-server/src/math.js new file mode 100644 index 000000000..81063a679 --- /dev/null +++ b/packages/xo-server/src/math.js @@ -0,0 +1,48 @@ +import assign from 'lodash/assign' + +const _combine = (vectors, n, cb) => { + if (!n) { + return + } + + const nLast = n - 1 + + const vector = vectors[nLast] + const m = vector.length + if (n === 1) { + for (let i = 0; i < m; ++i) { + cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal + } + return + } + + for (let i = 0; i < m; ++i) { + const value = vector[i] + + _combine(vectors, nLast, (vector) => { + vector.push(value) + cb(vector) + }) + } +} + +// Compute all combinations from vectors. +// +// Ex: combine([[2, 3], [5, 7]]) +// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ] +export const combine = vectors => cb => _combine(vectors, vectors.length, cb) + +// Merge the properties of an objects set in one object. +// +// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 } +export const mergeObjects = objects => assign({}, ...objects) + +// Compute a cross product between vectors. +// +// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] ) +// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ] +export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => ( + combine(vectors)(vector => { + cb(mergeFn(vector)) + }) +) diff --git a/packages/xo-server/src/math.spec.js b/packages/xo-server/src/math.spec.js new file mode 100644 index 000000000..588d4ac9f --- /dev/null +++ b/packages/xo-server/src/math.spec.js @@ -0,0 +1,74 @@ +/* eslint-env jest */ + +import { forEach } from 'lodash' +import { thunkToArray } from './utils' +import { + crossProduct, + mergeObjects, +} from './math' + +describe('mergeObjects', function () { + forEach({ + 'Two sets of one': [ + {a: 1, b: 2}, {a: 1}, {b: 2}, + ], + 'Two sets of two': [ + {a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}, + ], + 'Three sets': [ + {a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}, + ], + 'One set': [ + {a: 1, b: 2}, {a: 1, b: 2}, + ], + 'Empty set': [ + {a: 1}, {a: 1}, {}, + ], + 'All empty': [ + {}, {}, {}, + ], + 'No set': [ + {}, + ], + }, ([ resultSet, ...sets ], name) => { + describe(`with ${name}`, () => { + it('Assembles all given param sets in on set', function () { + expect(mergeObjects(sets)).toEqual(resultSet) + }) + }) + }) +}) + +describe('crossProduct', function () { + // Gives the sum of all args + const addTest = args => args.reduce((prev, curr) => prev + curr, 0) + // Gives the product of all args + const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1) + + forEach({ + '2 sets of 2 items to multiply': [ + [10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest, + ], + '3 sets of 2 items to multiply': [ + [110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest, + ], + '2 sets of 3 items to multiply': [ + [14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest, + ], + '2 sets of 2 items to add': [ + [7, 9, 8, 10], [[2, 3], [5, 7]], addTest, + ], + '3 sets of 2 items to add': [ + [18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest, + ], + '2 sets of 3 items to add': [ + [9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest, + ], + }, ([ product, items, cb ], name) => { + describe(`with ${name}`, () => { + it('Crosses sets of values with a crossProduct callback', function () { + expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort()) + }) + }) + }) +}) diff --git a/packages/xo-server/src/model.js b/packages/xo-server/src/model.js new file mode 100644 index 000000000..658a7d3d0 --- /dev/null +++ b/packages/xo-server/src/model.js @@ -0,0 +1,73 @@ +import {EventEmitter} from 'events' + +import { + forEach, + isEmpty, + isString, +} from './utils' + +// =================================================================== + +export default class Model extends EventEmitter { + constructor (properties) { + super() + + this.properties = { ...this.default } + + if (properties) { + this.set(properties) + } + } + + // Initialize the model after construction. + initialize () {} + + // Validate the defined properties. + // + // Returns the error if any. + validate (properties) {} + + // Get a property. + get (name, def) { + const value = this.properties[name] + return value !== undefined ? value : def + } + + // Check whether a property exists. + has (name) { + return (this.properties[name] !== undefined) + } + + // Set properties. + set (properties, value) { + // This method can also be used with two arguments to set a single + // property. + if (isString(properties)) { + properties = { [properties]: value } + } + + const previous = {} + + forEach(properties, (value, name) => { + const prev = this.properties[name] + + if (value !== prev) { + previous[name] = prev + + if (value === undefined) { + delete this.properties[name] + } else { + this.properties[name] = value + } + } + }) + + if (!isEmpty(previous)) { + this.emit('change', previous) + + forEach(previous, (value, name) => { + this.emit('change:' + name, value) + }) + } + } +} diff --git a/packages/xo-server/src/models/acl.js b/packages/xo-server/src/models/acl.js new file mode 100644 index 000000000..0bc9aa7be --- /dev/null +++ b/packages/xo-server/src/models/acl.js @@ -0,0 +1,79 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { + forEach, + mapToArray, + multiKeyHash, +} from '../utils' + +// =================================================================== + +// Up until now, there were no actions, therefore the default +// action is used to update existing entries. +const DEFAULT_ACTION = 'admin' + +// =================================================================== + +export default class Acl extends Model {} + +Acl.create = (subject, object, action) => { + return Acl.hash(subject, object, action).then(hash => new Acl({ + id: hash, + subject, + object, + action, + })) +} + +Acl.hash = (subject, object, action) => multiKeyHash(subject, object, action) + +// ------------------------------------------------------------------- + +export class Acls extends Collection { + get Model () { + return Acl + } + + create (subject, object, action) { + return Acl.create(subject, object, action).then(acl => this.add(acl)) + } + + delete (subject, object, action) { + return Acl.hash(subject, object, action).then(hash => this.remove(hash)) + } + + aclExists (subject, object, action) { + return Acl.hash(subject, object, action).then(hash => this.exists(hash)) + } + + async get (properties) { + const acls = await super.get(properties) + + // Finds all records that are missing a action and need to be updated. + const toUpdate = [] + forEach(acls, acl => { + if (!acl.action) { + acl.action = DEFAULT_ACTION + toUpdate.push(acl) + } + }) + if (toUpdate.length) { + // Removes all existing entries. + await this.remove(mapToArray(toUpdate, 'id')) + + // Compute the new ids (new hashes). + const {hash} = Acl + await Promise.all(mapToArray( + toUpdate, + (acl) => hash(acl.subject, acl.object, acl.action).then(id => { + acl.id = id + }) + )) + + // Inserts the new (updated) entries. + await this.add(toUpdate) + } + + return acls + } +} diff --git a/packages/xo-server/src/models/group.js b/packages/xo-server/src/models/group.js new file mode 100644 index 000000000..bb5914c3f --- /dev/null +++ b/packages/xo-server/src/models/group.js @@ -0,0 +1,45 @@ +import isEmpty from 'lodash/isEmpty' + +import Collection from '../collection/redis' +import Model from '../model' + +import { forEach } from '../utils' + +import { parseProp } from './utils' + +// =================================================================== + +export default class Group extends Model {} + +// =================================================================== + +export class Groups extends Collection { + get Model () { + return Group + } + + create (name) { + return this.add(new Group({ name })) + } + + async save (group) { + // Serializes. + let tmp + group.users = isEmpty(tmp = group.users) + ? undefined + : JSON.stringify(tmp) + + return /* await */ this.update(group) + } + + async get (properties) { + const groups = await super.get(properties) + + // Deserializes. + forEach(groups, group => { + group.users = parseProp('group', group, 'users', []) + }) + + return groups + } +} diff --git a/packages/xo-server/src/models/job.js b/packages/xo-server/src/models/job.js new file mode 100644 index 000000000..a3cbacf5a --- /dev/null +++ b/packages/xo-server/src/models/job.js @@ -0,0 +1,43 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { forEach } from '../utils' + +import { parseProp } from './utils' + +// =================================================================== + +export default class Job extends Model {} + +export class Jobs extends Collection { + get Model () { + return Job + } + + async create (job) { + // Serializes. + job.paramsVector = JSON.stringify(job.paramsVector) + return /* await */ this.add(new Job(job)) + } + + async save (job) { + // Serializes. + job.paramsVector = JSON.stringify(job.paramsVector) + return /* await */ this.update(job) + } + + async get (properties) { + const jobs = await super.get(properties) + + // Deserializes. + forEach(jobs, job => { + job.paramsVector = parseProp('job', job, 'paramsVector', {}) + + const { timeout } = job + if (timeout !== undefined) { + job.timeout = +timeout + } + }) + + return jobs + } +} diff --git a/packages/xo-server/src/models/plugin-metadata.js b/packages/xo-server/src/models/plugin-metadata.js new file mode 100644 index 000000000..f35312148 --- /dev/null +++ b/packages/xo-server/src/models/plugin-metadata.js @@ -0,0 +1,53 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { forEach } from '../utils' + +// =================================================================== + +export default class PluginMetadata extends Model {} + +// =================================================================== + +export class PluginsMetadata extends Collection { + get Model () { + return PluginMetadata + } + + async save ({ id, autoload, configuration }) { + return /* await */ this.update({ + id, + autoload: autoload ? 'true' : 'false', + configuration: configuration && JSON.stringify(configuration), + }) + } + + async merge (id, data) { + const pluginMetadata = await this.first(id) + if (!pluginMetadata) { + throw new Error('no such plugin metadata') + } + + return /* await */ this.save({ + ...pluginMetadata.properties, + ...data, + }) + } + + async get (properties) { + const pluginsMetadata = await super.get(properties) + + // Deserializes. + forEach(pluginsMetadata, pluginMetadata => { + const { autoload, configuration } = pluginMetadata + pluginMetadata.autoload = autoload === 'true' + try { + pluginMetadata.configuration = configuration && JSON.parse(configuration) + } catch (error) { + console.warn('cannot parse pluginMetadata.configuration:', configuration) + pluginMetadata.configuration = [] + } + }) + + return pluginsMetadata + } +} diff --git a/packages/xo-server/src/models/remote.js b/packages/xo-server/src/models/remote.js new file mode 100644 index 000000000..74c795acf --- /dev/null +++ b/packages/xo-server/src/models/remote.js @@ -0,0 +1,36 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { + forEach, +} from '../utils' + +// =================================================================== + +export default class Remote extends Model {} + +export class Remotes extends Collection { + get Model () { + return Remote + } + + create (name, url) { + return this.add(new Remote({ + name, + url, + enabled: false, + error: '', + })) + } + + async save (remote) { + return /* await */ this.update(remote) + } + + async get (properties) { + const remotes = await super.get(properties) + forEach(remotes, remote => { + remote.enabled = (remote.enabled === 'true') + }) + return remotes + } +} diff --git a/packages/xo-server/src/models/schedule.js b/packages/xo-server/src/models/schedule.js new file mode 100644 index 000000000..a2b3184db --- /dev/null +++ b/packages/xo-server/src/models/schedule.js @@ -0,0 +1,36 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { forEach } from '../utils' + +// =================================================================== + +export default class Schedule extends Model {} + +export class Schedules extends Collection { + get Model () { + return Schedule + } + + create (userId, job, cron, enabled, name = undefined, timezone = undefined) { + return this.add(new Schedule({ + userId, + job, + cron, + enabled, + name, + timezone, + })) + } + + async save (schedule) { + return /* await */ this.update(schedule) + } + + async get (properties) { + const schedules = await super.get(properties) + forEach(schedules, schedule => { + schedule.enabled = (schedule.enabled === 'true') + }) + return schedules + } +} diff --git a/packages/xo-server/src/models/server.js b/packages/xo-server/src/models/server.js new file mode 100644 index 000000000..3fd176954 --- /dev/null +++ b/packages/xo-server/src/models/server.js @@ -0,0 +1,42 @@ +import Collection from '../collection/redis' +import Model from '../model' +import { forEach } from '../utils' + +import { parseProp } from './utils' + +// =================================================================== + +export default class Server extends Model {} + +// ------------------------------------------------------------------- + +export class Servers extends Collection { + get Model () { + return Server + } + + async create (params) { + const { host } = params + + if (await this.exists({host})) { + throw new Error('server already exists') + } + + return /* await */ this.add(params) + } + + async get (properties) { + const servers = await super.get(properties) + + // Deserializes + forEach(servers, server => { + if (server.error) { + server.error = parseProp('server', server, 'error', '') + } else { + delete server.error + } + }) + + return servers + } +} diff --git a/packages/xo-server/src/models/token.js b/packages/xo-server/src/models/token.js new file mode 100644 index 000000000..b12ab7fd1 --- /dev/null +++ b/packages/xo-server/src/models/token.js @@ -0,0 +1,10 @@ +import Collection from '../collection/redis' +import Model from '../model' + +// =================================================================== + +export default class Token extends Model {} + +// ------------------------------------------------------------------- + +export class Tokens extends Collection {} diff --git a/packages/xo-server/src/models/user.js b/packages/xo-server/src/models/user.js new file mode 100644 index 000000000..b4e28ab6e --- /dev/null +++ b/packages/xo-server/src/models/user.js @@ -0,0 +1,63 @@ +import isEmpty from 'lodash/isEmpty' + +import Collection from '../collection/redis' +import Model from '../model' +import { forEach } from '../utils' + +import { parseProp } from './utils' + +// =================================================================== + +export default class User extends Model {} + +User.prototype.default = { + permission: 'none', +} + +// ------------------------------------------------------------------- + +export class Users extends Collection { + get Model () { + return User + } + + async create (properties) { + const { email } = properties + + // Avoid duplicates. + if (await this.exists({email})) { + throw new Error(`the user ${email} already exists`) + } + + // Create the user object. + const user = new User(properties) + + // Adds the user to the collection. + return /* await */ this.add(user) + } + + async save (user) { + // Serializes. + let tmp + user.groups = isEmpty(tmp = user.groups) + ? undefined + : JSON.stringify(tmp) + user.preferences = isEmpty(tmp = user.preferences) + ? undefined + : JSON.stringify(tmp) + + return /* await */ this.update(user) + } + + async get (properties) { + const users = await super.get(properties) + + // Deserializes + forEach(users, user => { + user.groups = parseProp('user', user, 'groups', []) + user.preferences = parseProp('user', user, 'preferences', {}) + }) + + return users + } +} diff --git a/packages/xo-server/src/models/utils.js b/packages/xo-server/src/models/utils.js new file mode 100644 index 000000000..656509f00 --- /dev/null +++ b/packages/xo-server/src/models/utils.js @@ -0,0 +1,16 @@ +export const parseProp = (type, obj, name, defaultValue) => { + const value = obj[name] + if ( + value == null || + value === '' // do not warn on this trivial and minor error + ) { + return defaultValue + } + try { + return JSON.parse(value) + } catch (error) { + // do not display the error because it can occurs a lot and fill + // up log files + return defaultValue + } +} diff --git a/packages/xo-server/src/proxy-console.js b/packages/xo-server/src/proxy-console.js new file mode 100644 index 000000000..dfcaa714d --- /dev/null +++ b/packages/xo-server/src/proxy-console.js @@ -0,0 +1,74 @@ +import createDebug from 'debug' +import partialStream from 'partial-stream' +import {connect} from 'tls' +import {parse} from 'url' + +const debug = createDebug('xo:proxy-console') + +export default function proxyConsole (ws, vmConsole, sessionId) { + const url = parse(vmConsole.location) + + let closed = false + + const socket = connect({ + host: url.host, + port: url.port || 443, + rejectUnauthorized: false, + }, () => { + // Write headers. + socket.write([ + `CONNECT ${url.path} HTTP/1.0`, + `Host: ${url.hostname}`, + `Cookie: session_id=${sessionId}`, + '', '', + ].join('\r\n')) + + const onSend = (error) => { + if (error) { + debug('error sending to the XO client: %s', error.stack || error.message || error) + } + } + + socket.pipe(partialStream('\r\n\r\n', headers => { + // TODO: check status code 200. + debug('connected') + })).on('data', data => { + if (!closed) { + ws.send(data, onSend) + } + }).on('end', () => { + if (!closed) { + closed = true + debug('disconnected from the console') + } + + ws.close() + }) + + ws + .on('error', error => { + closed = true + debug('error from the XO client: %s', error.stack || error.message || error) + + socket.end() + }) + .on('message', data => { + if (!closed) { + socket.write(data) + } + }) + .on('close', () => { + if (!closed) { + closed = true + debug('disconnected from the XO client') + } + + socket.end() + }) + }).on('error', error => { + closed = true + debug('error from the console: %s', error.stack || error.message || error) + + ws.close() + }) +} diff --git a/packages/xo-server/src/recover-account-cli.js b/packages/xo-server/src/recover-account-cli.js new file mode 100644 index 000000000..82c09a74c --- /dev/null +++ b/packages/xo-server/src/recover-account-cli.js @@ -0,0 +1,44 @@ +import appConf from 'app-conf' +import pw from 'pw' + +import Xo from './xo' +import { generateToken } from './utils' + +const recoverAccount = async ([ name ]) => { + if ( + name === undefined || + name === '--help' || + name === '-h' + ) { + return ` +xo-server-recover-account + + If the user does not exist, it is created, if it exists, updates + its password and resets its permission to Admin. +` + } + + let password = await new Promise(resolve => { + process.stdout.write('Password (leave empty for random): ') + pw(resolve) + }) + + if (password === '') { + password = await generateToken(10) + console.log('The generated password is', password) + } + + const xo = new Xo(await appConf.load('xo-server', { + ignoreUnknownFormats: true, + })) + + const user = await xo.getUserByName(name, true) + if (user !== null) { + await xo.updateUser(user.id, { password, permission: 'admin' }) + console.log(`user ${name} has been successfully updated`) + } else { + await xo.createUser({ name, password, permission: 'admin' }) + console.log(`user ${name} has been successfully created`) + } +} +export { recoverAccount as default } diff --git a/packages/xo-server/src/remote-handlers/abstract.js b/packages/xo-server/src/remote-handlers/abstract.js new file mode 100644 index 000000000..ca322d1e1 --- /dev/null +++ b/packages/xo-server/src/remote-handlers/abstract.js @@ -0,0 +1,225 @@ +import eventToPromise from 'event-to-promise' +import through2 from 'through2' +import { ignoreErrors } from 'promise-toolbox' +import { parse } from 'xo-remote-parser' + +import { + addChecksumToReadStream, + getPseudoRandomBytes, + streamToBuffer, + validChecksumOfReadStream, +} from '../utils' + +export default class RemoteHandlerAbstract { + constructor (remote) { + this._remote = {...remote, ...parse(remote.url)} + if (this._remote.type !== this.type) { + throw new Error('Incorrect remote type') + } + } + + get type () { + throw new Error('Not implemented') + } + + /** + * Asks the handler to sync the state of the effective remote with its' metadata + */ + async sync () { + return this._sync() + } + + async _sync () { + throw new Error('Not implemented') + } + + /** + * Free the resources possibly dedicated to put the remote at work, when it is no more needed + */ + async forget () { + return this._forget() + } + + async _forget () { + throw new Error('Not implemented') + } + + async test () { + const testFileName = `${Date.now()}.test` + const data = getPseudoRandomBytes(1024 * 1024) + let step = 'write' + try { + await this.outputFile(testFileName, data) + step = 'read' + const read = await this.readFile(testFileName) + if (data.compare(read) !== 0) { + throw new Error('output and input did not match') + } + return { + success: true, + } + } catch (error) { + return { + success: false, + step, + file: testFileName, + error: error.message || String(error), + } + } finally { + this.unlink(testFileName)::ignoreErrors() + } + } + + async outputFile (file, data, options) { + return this._outputFile(file, data, { + flags: 'wx', + ...options, + }) + } + + async _outputFile (file, data, options) { + const stream = await this.createOutputStream(file, options) + const promise = eventToPromise(stream, 'finish') + stream.end(data) + return promise + } + + async readFile (file, options) { + return this._readFile(file, options) + } + + _readFile (file, options) { + return this.createReadStream(file, options).then(streamToBuffer) + } + + async rename (oldPath, newPath) { + return this._rename(oldPath, newPath) + } + + async _rename (oldPath, newPath) { + throw new Error('Not implemented') + } + + async list (dir = '.') { + return this._list(dir) + } + + async _list (dir) { + throw new Error('Not implemented') + } + + createReadStream (file, { + checksum = false, + ignoreMissingChecksum = false, + ...options + } = {}) { + const streamP = this._createReadStream(file, options).then(stream => { + // detect early errors + let promise = eventToPromise(stream, 'readable') + + // try to add the length prop if missing and not a range stream + if ( + stream.length === undefined && + options.end === undefined && + options.start === undefined + ) { + promise = Promise.all([ + promise, + this.getSize(file).then(size => { + stream.length = size + })::ignoreErrors(), + ]) + } + + return promise.then(() => stream) + }) + + if (!checksum) { + return streamP + } + + // avoid a unhandled rejection warning + streamP::ignoreErrors() + + return this.readFile(`${file}.checksum`).then( + checksum => streamP.then(stream => { + const { length } = stream + stream = validChecksumOfReadStream(stream, String(checksum).trim()) + stream.length = length + + return stream + }), + error => { + if (ignoreMissingChecksum && error && error.code === 'ENOENT') { + return streamP + } + throw error + } + ) + } + + async _createReadStream (file, options) { + throw new Error('Not implemented') + } + + async refreshChecksum (path) { + const stream = addChecksumToReadStream(await this.createReadStream(path)) + stream.resume() // start reading the whole file + const checksum = await stream.checksum + await this.outputFile(`${path}.checksum`, checksum) + } + + async createOutputStream (file, { + checksum = false, + ...options + } = {}) { + const streamP = this._createOutputStream(file, { + flags: 'wx', + ...options, + }) + + if (!checksum) { + return streamP + } + + const connectorStream = through2() + const forwardError = error => { + connectorStream.emit('error', error) + } + + const streamWithChecksum = addChecksumToReadStream(connectorStream) + streamWithChecksum.pipe(await streamP) + + streamWithChecksum.checksum + .then(value => this.outputFile(`${file}.checksum`, value)) + .catch(forwardError) + + return connectorStream + } + + async _createOutputStream (file, options) { + throw new Error('Not implemented') + } + + async unlink (file, { + checksum = true, + } = {}) { + if (checksum) { + this._unlink(`${file}.checksum`)::ignoreErrors() + } + + return this._unlink(file) + } + + async _unlink (file) { + throw new Error('Not implemented') + } + + async getSize (file) { + return this._getSize(file) + } + + async _getSize (file) { + throw new Error('Not implemented') + } +} diff --git a/packages/xo-server/src/remote-handlers/local.js b/packages/xo-server/src/remote-handlers/local.js new file mode 100644 index 000000000..5d2ebba55 --- /dev/null +++ b/packages/xo-server/src/remote-handlers/local.js @@ -0,0 +1,88 @@ +import fs from 'fs-extra' +import { dirname, resolve } from 'path' +import { noop, startsWith } from 'lodash' + +import RemoteHandlerAbstract from './abstract' + +export default class LocalHandler extends RemoteHandlerAbstract { + get type () { + return 'file' + } + + _getRealPath () { + return this._remote.path + } + + _getFilePath (file) { + const realPath = this._getRealPath() + const parts = [realPath] + if (file) { + parts.push(file) + } + const path = resolve.apply(null, parts) + if (!startsWith(path, realPath)) { + throw new Error('Remote path is unavailable') + } + return path + } + + async _sync () { + if (this._remote.enabled) { + try { + const path = this._getRealPath() + await fs.ensureDir(path) + await fs.access(path, fs.R_OK | fs.W_OK) + } catch (exc) { + this._remote.enabled = false + this._remote.error = exc.message + } + } + return this._remote + } + + async _forget () { + return noop() + } + + async _outputFile (file, data, options) { + const path = this._getFilePath(file) + await fs.ensureDir(dirname(path)) + await fs.writeFile(path, data, options) + } + + async _readFile (file, options) { + return fs.readFile(this._getFilePath(file), options) + } + + async _rename (oldPath, newPath) { + return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath)) + } + + async _list (dir = '.') { + return fs.readdir(this._getFilePath(dir)) + } + + async _createReadStream (file, options) { + return fs.createReadStream(this._getFilePath(file), options) + } + + async _createOutputStream (file, options) { + const path = this._getFilePath(file) + await fs.ensureDir(dirname(path)) + return fs.createWriteStream(path, options) + } + + async _unlink (file) { + return fs.unlink(this._getFilePath(file)).catch(error => { + // do not throw if the file did not exist + if (error == null || error.code !== 'ENOENT') { + throw error + } + }) + } + + async _getSize (file) { + const stats = await fs.stat(this._getFilePath(file)) + return stats.size + } +} diff --git a/packages/xo-server/src/remote-handlers/nfs.js b/packages/xo-server/src/remote-handlers/nfs.js new file mode 100644 index 000000000..a36717637 --- /dev/null +++ b/packages/xo-server/src/remote-handlers/nfs.js @@ -0,0 +1,82 @@ +import execa from 'execa' +import fs from 'fs-extra' +import { forEach } from 'lodash' + +import LocalHandler from './local' + +export default class NfsHandler extends LocalHandler { + get type () { + return 'nfs' + } + + _getRealPath () { + return `/run/xo-server/mounts/${this._remote.id}` + } + + async _loadRealMounts () { + let stdout + const mounted = {} + try { + stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings']) + const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/ + forEach(stdout.split('\n'), m => { + if (m) { + const match = regex.exec(m) + mounted[match[3]] = { + host: match[1], + share: match[2], + } + } + }) + } catch (exc) { + // When no mounts are found, the call pretends to fail... + if (exc.stderr !== '') { + throw exc + } + } + + this._realMounts = mounted + return mounted + } + + _matchesRealMount () { + return this._getRealPath() in this._realMounts + } + + async _mount () { + await fs.ensureDir(this._getRealPath()) + return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()]) + } + + async _sync () { + await this._loadRealMounts() + if (this._matchesRealMount() && !this._remote.enabled) { + try { + await this._umount(this._remote) + } catch (exc) { + this._remote.enabled = true + this._remote.error = exc.message + } + } else if (!this._matchesRealMount() && this._remote.enabled) { + try { + await this._mount() + } catch (exc) { + this._remote.enabled = false + this._remote.error = exc.message + } + } + return this._remote + } + + async _forget () { + try { + await this._umount(this._remote) + } catch (_) { + // We have to go on... + } + } + + async _umount (remote) { + await execa('umount', ['--force', this._getRealPath()]) + } +} diff --git a/packages/xo-server/src/remote-handlers/smb.js b/packages/xo-server/src/remote-handlers/smb.js new file mode 100644 index 000000000..4ab6685bd --- /dev/null +++ b/packages/xo-server/src/remote-handlers/smb.js @@ -0,0 +1,191 @@ +import Smb2 from '@marsaud/smb2-promise' + +import RemoteHandlerAbstract from './abstract' +import { + noop, + pFinally, +} from '../utils' + +// Normalize the error code for file not found. +const normalizeError = error => { + const { code } = error + + return ( + code === 'STATUS_OBJECT_NAME_NOT_FOUND' || + code === 'STATUS_OBJECT_PATH_NOT_FOUND' + ) + ? Object.create(error, { + code: { + configurable: true, + readable: true, + value: 'ENOENT', + writable: true, + }, + }) + : error +} + +export default class SmbHandler extends RemoteHandlerAbstract { + constructor (remote) { + super(remote) + this._forget = noop + } + + get type () { + return 'smb' + } + + _getClient (remote) { + return new Smb2({ + share: `\\\\${remote.host}`, + domain: remote.domain, + username: remote.username, + password: remote.password, + autoCloseTimeout: 0, + }) + } + + _getFilePath (file) { + if (file === '.') { + file = undefined + } + + let path = (this._remote.path !== '') + ? this._remote.path + : '' + + // Ensure remote path is a directory. + if (path !== '' && path[path.length - 1] !== '\\') { + path += '\\' + } + + if (file) { + path += file.replace(/\//g, '\\') + } + + return path + } + + _dirname (file) { + const parts = file.split('\\') + parts.pop() + return parts.join('\\') + } + + async _sync () { + if (this._remote.enabled) { + try { + // Check access (smb2 does not expose connect in public so far...) + await this.list() + } catch (error) { + this._remote.enabled = false + this._remote.error = error.message + } + } + return this._remote + } + + async _outputFile (file, data, options = {}) { + const client = this._getClient(this._remote) + const path = this._getFilePath(file) + const dir = this._dirname(path) + + if (dir) { + await client.ensureDir(dir) + } + + return client.writeFile(path, data, options)::pFinally(() => { client.close() }) + } + + async _readFile (file, options = {}) { + const client = this._getClient(this._remote) + let content + + try { + content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() }) + } catch (error) { + throw normalizeError(error) + } + + return content + } + + async _rename (oldPath, newPath) { + const client = this._getClient(this._remote) + + try { + await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() }) + } catch (error) { + throw normalizeError(error) + } + } + + async _list (dir = '.') { + const client = this._getClient(this._remote) + let list + + try { + list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() }) + } catch (error) { + throw normalizeError(error) + } + + return list + } + + async _createReadStream (file, options = {}) { + const client = this._getClient(this._remote) + let stream + + try { + // FIXME ensure that options are properly handled by @marsaud/smb2 + stream = await client.createReadStream(this._getFilePath(file), options) + stream.on('end', () => client.close()) + } catch (error) { + throw normalizeError(error) + } + + return stream + } + + async _createOutputStream (file, options = {}) { + const client = this._getClient(this._remote) + const path = this._getFilePath(file) + const dir = this._dirname(path) + let stream + try { + if (dir) { + await client.ensureDir(dir) + } + stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2 + } catch (err) { + client.close() + throw err + } + stream.on('finish', () => client.close()) + return stream + } + + async _unlink (file) { + const client = this._getClient(this._remote) + + try { + await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() }) + } catch (error) { + throw normalizeError(error) + } + } + + async _getSize (file) { + const client = await this._getClient(this._remote) + let size + + try { + size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() }) + } catch (error) { + throw normalizeError(error) + } + + return size + } +} diff --git a/packages/xo-server/src/schemas/acl.js b/packages/xo-server/src/schemas/acl.js new file mode 100644 index 000000000..84362c289 --- /dev/null +++ b/packages/xo-server/src/schemas/acl.js @@ -0,0 +1,28 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + id: { + type: 'string', + description: 'unique identifier for this ACL', + }, + action: { + type: 'string', + description: 'permission (or role)', + }, + object: { + type: 'string', + description: 'item (or set)', + }, + subject: { + type: 'string', + description: 'user (or group)', + }, + }, + required: [ + 'id', + 'action', + 'object', + 'subject', + ], +} diff --git a/packages/xo-server/src/schemas/job.js b/packages/xo-server/src/schemas/job.js new file mode 100644 index 000000000..7da1fe069 --- /dev/null +++ b/packages/xo-server/src/schemas/job.js @@ -0,0 +1,43 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + type: { + enum: ['call'], + }, + id: { + type: 'string', + description: 'job identifier', + }, + name: { + type: 'string', + description: 'human readable name', + }, + userId: { + type: 'string', + description: 'identifier of the user who have created the job (the permissions of the user are used by the job)', + }, + key: { + type: 'string', + // TODO description + }, + method: { + type: 'string', + description: 'called method', + }, + paramsVector: { + type: 'object', + }, + timeout: { + type: 'number', + description: 'number of milliseconds after which the job is considered failed', + }, + }, + required: [ + 'type', + 'id', + 'userId', + 'key', + 'method', + ], +} diff --git a/packages/xo-server/src/schemas/log.js b/packages/xo-server/src/schemas/log.js new file mode 100644 index 000000000..9778ae969 --- /dev/null +++ b/packages/xo-server/src/schemas/log.js @@ -0,0 +1,29 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + id: { + type: 'string', + description: 'unique identifier for this log', + }, + time: { + type: 'string', + description: 'timestamp (in milliseconds) of this log', + }, + message: { + type: 'string', + description: 'human readable (short) description of this log', + }, + namespace: { + type: 'string', + description: 'space to store logs', + }, + data: {}, + }, + required: [ + 'id', + 'time', + 'message', + 'namespace', + ], +} diff --git a/packages/xo-server/src/schemas/log/jobCallEnd.js b/packages/xo-server/src/schemas/log/jobCallEnd.js new file mode 100644 index 000000000..a1f32afe1 --- /dev/null +++ b/packages/xo-server/src/schemas/log/jobCallEnd.js @@ -0,0 +1,33 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + event: { + enum: ['jobCall.end'], + }, + runJobId: { + type: 'string', + description: 'instance id of this job', + }, + runCallId: { + type: 'string', + description: 'instance id of this call', + }, + error: { + type: 'object', + description: 'describe one failure, exists if the call has failed', + }, + returnedValue: { + description: 'call\'s result, exists if the call is a success', + }, + }, + required: [ + 'event', + 'runJobId', + 'runCallId', + ], + oneOf: [ + { required: ['error'] }, + { required: ['returnedValue'] }, + ], +} diff --git a/packages/xo-server/src/schemas/log/jobCallStart.js b/packages/xo-server/src/schemas/log/jobCallStart.js new file mode 100644 index 000000000..8e5e6af49 --- /dev/null +++ b/packages/xo-server/src/schemas/log/jobCallStart.js @@ -0,0 +1,27 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + event: { + enum: ['jobCall.start'], + }, + runJobId: { + type: 'string', + description: 'instance id of this job', + }, + method: { + type: 'string', + description: 'method linked to this call', + }, + params: { + type: 'object', + description: 'params of the called method', + }, + }, + required: [ + 'event', + 'runJobId', + 'method', + 'params', + ], +} diff --git a/packages/xo-server/src/schemas/log/jobEnd.js b/packages/xo-server/src/schemas/log/jobEnd.js new file mode 100644 index 000000000..a74aa3089 --- /dev/null +++ b/packages/xo-server/src/schemas/log/jobEnd.js @@ -0,0 +1,21 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + event: { + enum: ['job.end'], + }, + runJobId: { + type: 'string', + description: 'instance id of this job', + }, + error: { + type: 'object', + description: 'describe one failure, exists if no call has been made', + }, + }, + required: [ + 'event', + 'runJobId', + ], +} diff --git a/packages/xo-server/src/schemas/log/jobStart.js b/packages/xo-server/src/schemas/log/jobStart.js new file mode 100644 index 000000000..1c29df2aa --- /dev/null +++ b/packages/xo-server/src/schemas/log/jobStart.js @@ -0,0 +1,26 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + event: { + enum: ['job.start'], + }, + userId: { + type: 'string', + description: 'user who executes this job', + }, + jobId: { + type: 'string', + description: 'identifier of this job', + }, + key: { + type: 'string', + }, + }, + required: [ + 'event', + 'userId', + 'jobId', + 'key', + ], +} diff --git a/packages/xo-server/src/schemas/plugin.js b/packages/xo-server/src/schemas/plugin.js new file mode 100644 index 000000000..a6a364cbf --- /dev/null +++ b/packages/xo-server/src/schemas/plugin.js @@ -0,0 +1,49 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + id: { + type: 'string', + description: 'unique identifier for this plugin', + }, + name: { + type: 'string', + description: 'unique human readable name for this plugin', + }, + autoload: { + type: 'boolean', + description: 'whether this plugin is loaded on startup', + }, + loaded: { + type: 'boolean', + description: 'whether or not this plugin is currently loaded', + }, + unloadable: { + type: 'boolean', + default: true, + description: 'whether or not this plugin can be unloaded', + }, + configuration: { + type: 'object', + description: 'current configuration of this plugin (not present if none)', + }, + configurationSchema: { + $ref: 'http://json-schema.org/draft-04/schema#', + description: 'configuration schema for this plugin (not present if not configurable)', + }, + testable: { + type: 'boolean', + description: 'whether or not this plugin can be tested', + }, + testSchema: { + $ref: 'http://json-schema.org/draft-04/schema#', + description: 'test schema for this plugin', + }, + }, + required: [ + 'id', + 'name', + 'autoload', + 'loaded', + ], +} diff --git a/packages/xo-server/src/schemas/user.js b/packages/xo-server/src/schemas/user.js new file mode 100644 index 000000000..aa61c586c --- /dev/null +++ b/packages/xo-server/src/schemas/user.js @@ -0,0 +1,50 @@ +export default { + $schema: 'http://json-schema.org/draft-04/schema#', + type: 'object', + properties: { + id: { + type: 'string', + description: 'unique identifier for this user', + }, + email: { + type: 'string', + description: 'email address of this user', + }, + groups: { + type: 'array', + items: { + type: 'string', + }, + description: 'identifier of groups this user belong to', + }, + permission: { + enum: ['none', 'read', 'write', 'admin'], + description: 'root permission for this user, none and admin are the only significant ones', + }, + preferences: { + type: 'object', + properties: { + lang: { type: 'string' }, + sshKeys: { + type: 'array', + items: { + type: 'object', + properties: { + key: { type: 'string' }, + title: { type: 'string' }, + }, + required: [ + 'key', + 'title', + ], + }, + }, + }, + description: 'various user preferences', + }, + }, + required: [ + 'id', + 'email', + ], +} diff --git a/packages/xo-server/src/size-stream.js b/packages/xo-server/src/size-stream.js new file mode 100644 index 000000000..acc31903b --- /dev/null +++ b/packages/xo-server/src/size-stream.js @@ -0,0 +1,14 @@ +import through2 from 'through2' + +const createSizeStream = () => { + const wrapper = through2( + (chunk, enc, cb) => { + wrapper.size += chunk.length + cb(null, chunk) + } + ) + wrapper.size = 0 + return wrapper +} + +export { createSizeStream as default } diff --git a/packages/xo-server/src/stream-to-existing-buffer.js b/packages/xo-server/src/stream-to-existing-buffer.js new file mode 100644 index 000000000..1e11725bb --- /dev/null +++ b/packages/xo-server/src/stream-to-existing-buffer.js @@ -0,0 +1,44 @@ +import assert from 'assert' + +const streamToExistingBuffer = ( + stream, + buffer, + offset = 0, + end = buffer.length +) => new Promise((resolve, reject) => { + assert(offset >= 0) + assert(end > offset) + assert(end <= buffer.length) + + let i = offset + + const onData = chunk => { + const prev = i + i += chunk.length + + if (i > end) { + return onError(new Error('too much data')) + } + + chunk.copy(buffer, prev) + } + stream.on('data', onData) + + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(i - offset) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) +}) + +export { streamToExistingBuffer as default } diff --git a/packages/xo-server/src/stream-to-existing-buffer.spec.js b/packages/xo-server/src/stream-to-existing-buffer.spec.js new file mode 100644 index 000000000..2367d4ab3 --- /dev/null +++ b/packages/xo-server/src/stream-to-existing-buffer.spec.js @@ -0,0 +1,20 @@ +/* eslint-env jest */ + +import { createReadStream, readFile } from 'fs' +import { fromCallback } from 'promise-toolbox' + +import streamToExistingBuffer from './stream-to-existing-buffer' + +describe('streamToExistingBuffer()', () => { + it('read the content of a stream in a buffer', async () => { + const stream = createReadStream(__filename) + + const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb)) + + const buf = Buffer.allocUnsafe(expected.length + 1) + buf[0] = 'A'.charCodeAt() + await streamToExistingBuffer(stream, buf, 1) + + expect(String(buf)).toBe(`A${expected}`) + }) +}) diff --git a/packages/xo-server/src/stream-to-new-buffer.js b/packages/xo-server/src/stream-to-new-buffer.js new file mode 100644 index 000000000..4018b9979 --- /dev/null +++ b/packages/xo-server/src/stream-to-new-buffer.js @@ -0,0 +1,27 @@ +const streamToNewBuffer = stream => new Promise((resolve, reject) => { + const chunks = [] + let length = 0 + + const onData = chunk => { + chunks.push(chunk) + length += chunk.length + } + stream.on('data', onData) + + const clean = () => { + stream.removeListener('data', onData) + stream.removeListener('end', onEnd) + stream.removeListener('error', onError) + } + const onEnd = () => { + resolve(Buffer.concat(chunks, length)) + clean() + } + stream.on('end', onEnd) + const onError = error => { + reject(error) + clean() + } + stream.on('error', onError) +}) +export { streamToNewBuffer as default } diff --git a/packages/xo-server/src/utils.js b/packages/xo-server/src/utils.js new file mode 100644 index 000000000..8a311ce92 --- /dev/null +++ b/packages/xo-server/src/utils.js @@ -0,0 +1,613 @@ +import base64url from 'base64url' +import eventToPromise from 'event-to-promise' +import forEach from 'lodash/forEach' +import has from 'lodash/has' +import highland from 'highland' +import humanFormat from 'human-format' +import invert from 'lodash/invert' +import isArray from 'lodash/isArray' +import isString from 'lodash/isString' +import keys from 'lodash/keys' +import kindOf from 'kindof' +import mapToArray from 'lodash/map' +import multiKeyHashInt from 'multikey-hash' +import pick from 'lodash/pick' +import tmp from 'tmp' +import xml2js from 'xml2js' +import { resolve } from 'path' + +// Moment timezone can be loaded only one time, it's a workaround to load +// the latest version because cron module uses an old version of moment which +// does not implement `guess` function for example. +import 'moment-timezone' + +import through2 from 'through2' +import { CronJob } from 'cron' +import { utcFormat, utcParse } from 'd3-time-format' +import { + all as pAll, + defer, + fromCallback, + isPromise, + promisify, + reflect as pReflect, +} from 'promise-toolbox' +import { + createHash, + randomBytes, +} from 'crypto' + +// =================================================================== + +// Similar to map() + Promise.all() but wait for all promises to +// settle before rejecting (with the first error) +export const asyncMap = (collection, iteratee) => { + if (isPromise(collection)) { + return collection.then(collection => asyncMap(collection, iteratee)) + } + + let errorContainer + const onError = error => { + if (errorContainer === undefined) { + errorContainer = { error } + } + } + + return Promise.all(mapToArray(collection, (item, key, collection) => + new Promise(resolve => { + resolve(iteratee(item, key, collection)) + }).catch(onError) + )).then(values => { + if (errorContainer !== undefined) { + throw errorContainer.error + } + return values + }) +} + +// ------------------------------------------------------------------- + +export streamToBuffer from './stream-to-new-buffer' + +// ------------------------------------------------------------------- + +export function camelToSnakeCase (string) { + return string.replace( + /([a-z0-9])([A-Z])/g, + (_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}` + ) +} + +// ------------------------------------------------------------------- + +// Returns an empty object without prototype (if possible). +export const createRawObject = Object.create + ? (createObject => () => createObject(null))(Object.create) + : () => ({}) + +// ------------------------------------------------------------------- + +// Only works with string items! +export const diffItems = (coll1, coll2) => { + const removed = createRawObject() + forEach(coll2, value => { + removed[value] = true + }) + + const added = [] + forEach(coll1, value => { + if (value in removed) { + delete removed[value] + } else { + added.push(value) + } + }) + + return [ added, keys(removed) ] +} + +// ------------------------------------------------------------------- + +const ALGORITHM_TO_ID = { + md5: '1', + sha256: '5', + sha512: '6', +} + +const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID) + +// Wrap a readable stream in a stream with a checksum promise +// attribute which is resolved at the end of an input stream. +// (Finally .checksum contains the checksum of the input stream) +// +// Example: +// const sourceStream = ... +// const targetStream = ... +// const checksumStream = addChecksumToReadStream(sourceStream) +// await Promise.all([ +// eventToPromise(checksumStream.pipe(targetStream), 'finish'), +// checksumStream.checksum.then(console.log) +// ]) +export const addChecksumToReadStream = (stream, algorithm = 'md5') => { + const algorithmId = ALGORITHM_TO_ID[algorithm] + + if (!algorithmId) { + throw new Error(`unknown algorithm: ${algorithm}`) + } + + const hash = createHash(algorithm) + const { promise, resolve } = defer() + + const wrapper = stream.pipe(through2( + (chunk, enc, callback) => { + hash.update(chunk) + callback(null, chunk) + }, + callback => { + resolve(hash.digest('hex')) + callback() + } + )) + + stream.on('error', error => wrapper.emit('error', error)) + wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`) + + return wrapper +} + +// Check if the checksum of a readable stream is equals to an expected checksum. +// The given stream is wrapped in a stream which emits an error event +// if the computed checksum is not equals to the expected checksum. +export const validChecksumOfReadStream = (stream, expectedChecksum) => { + const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1)) + + if (!algorithmId) { + throw new Error(`unknown algorithm: ${algorithmId}`) + } + + const hash = createHash(ID_TO_ALGORITHM[algorithmId]) + + const wrapper = stream.pipe(through2( + { highWaterMark: 0 }, + (chunk, enc, callback) => { + hash.update(chunk) + callback(null, chunk) + }, + callback => { + const checksum = `$${algorithmId}$$${hash.digest('hex')}` + + callback( + checksum !== expectedChecksum + ? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`) + : null + ) + } + )) + + stream.on('error', error => wrapper.emit('error', error)) + wrapper.checksumVerified = eventToPromise(wrapper, 'end') + + return wrapper +} + +// ------------------------------------------------------------------- + +// Ensure the value is an array, wrap it if necessary. +export function ensureArray (value) { + if (value === undefined) { + return [] + } + + return isArray(value) ? value : [value] +} + +// ------------------------------------------------------------------- + +// Returns the value of a property and removes it from the object. +export function extractProperty (obj, prop) { + const value = obj[prop] + delete obj[prop] + return value +} + +// ------------------------------------------------------------------- + +// Returns the first defined (non-undefined) value. +export const firstDefined = function () { + const n = arguments.length + for (let i = 0; i < n; ++i) { + const arg = arguments[i] + if (arg !== undefined) { + return arg + } + } +} + +// ------------------------------------------------------------------- + +export const getUserPublicProperties = user => pick( + user.properties || user, + 'id', 'email', 'groups', 'permission', 'preferences', 'provider' +) + +// ------------------------------------------------------------------- + +export const getPseudoRandomBytes = n => { + const bytes = Buffer.allocUnsafe(n) + + const odd = n & 1 + for (let i = 0, m = n - odd; i < m; i += 2) { + bytes.writeUInt16BE(Math.random() * 65536 | 0, i) + } + + if (odd) { + bytes.writeUInt8(Math.random() * 256 | 0, n - 1) + } + + return bytes +} + +export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n)) + +// Generate a secure random Base64 string. +export const generateToken = (randomBytes => { + return (n = 32) => randomBytes(n).then(base64url) +})(promisify(randomBytes)) + +// ------------------------------------------------------------------- + +export const formatXml = (function () { + const builder = new xml2js.Builder({ + headless: true, + }) + + return (...args) => builder.buildObject(...args) +})() + +export const parseXml = (function () { + const opts = { + mergeAttrs: true, + explicitArray: false, + } + + return (xml) => { + let result + + // xml2js.parseString() use a callback for synchronous code. + xml2js.parseString(xml, opts, (error, result_) => { + if (error) { + throw error + } + + result = result_ + }) + + return result + } +})() + +// ------------------------------------------------------------------- + +// Very light and fast set. +// +// - works only with strings +// - methods are already bound and chainable +export const lightSet = collection => { + let data = createRawObject() + if (collection) { + forEach(collection, value => { + data[value] = true + }) + collection = null + } + + const set = { + add: value => { + data[value] = true + return set + }, + clear: () => { + data = createRawObject() + return set + }, + delete: value => { + delete data[value] + return set + }, + has: value => data[value], + toArray: () => keys(data), + } + return set +} + +// ------------------------------------------------------------------- + +// This function does nothing and returns undefined. +// +// It is often used to swallow promise's errors. +export const noop = () => {} + +// ------------------------------------------------------------------- + +// Usage: pDebug(promise, name) or promise::pDebug(name) +export function pDebug (promise, name) { + if (arguments.length === 1) { + name = promise + promise = this + } + + Promise.resolve(promise).then( + value => { + console.log( + '%s', + `Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}` + ) + }, + reason => { + console.log( + '%s', + `Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}` + ) + } + ) + + return promise +} + +// Given a collection (array or object) which contains promises, +// return a promise that is fulfilled when all the items in the +// collection are either fulfilled or rejected. +// +// This promise will be fulfilled with a collection (of the same type, +// array or object) containing promise inspections. +// +// Usage: pSettle(promises) or promises::pSettle() +export function pSettle (promises) { + return (this || promises)::pAll(p => p::pReflect()) +} + +// ------------------------------------------------------------------- + +export { + all as pAll, + delay as pDelay, + fromCallback as pFromCallback, + lastly as pFinally, + promisify, + promisifyAll, + reflect as pReflect, +} from 'promise-toolbox' + +// ------------------------------------------------------------------- + +export function parseSize (size) { + if (!isString(size)) { + return size + } + + let bytes = humanFormat.parse.raw(size, { scale: 'binary' }) + if (bytes.unit && bytes.unit !== 'B') { + bytes = humanFormat.parse.raw(size) + + if (bytes.unit && bytes.unit !== 'B') { + throw new Error('invalid size: ' + size) + } + } + return Math.floor(bytes.value * bytes.factor) +} + +// ------------------------------------------------------------------- + +const _has = Object.prototype.hasOwnProperty + +// Removes an own property from an object and returns its value. +export const popProperty = obj => { + for (const prop in obj) { + if (_has.call(obj, prop)) { + return extractProperty(obj, prop) + } + } +} + +// ------------------------------------------------------------------- + +// Format a date in ISO 8601 in a safe way to be used in filenames +// (even on Windows). +export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ') + +export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ') + +// ------------------------------------------------------------------- + +// This functions are often used throughout xo-server. +// +// Exports them from here to avoid direct dependencies on lodash/ +export { default as forEach } from 'lodash/forEach' +export { default as isArray } from 'lodash/isArray' +export { default as isBoolean } from 'lodash/isBoolean' +export { default as isEmpty } from 'lodash/isEmpty' +export { default as isFunction } from 'lodash/isFunction' +export { default as isInteger } from 'lodash/isInteger' +export { default as isObject } from 'lodash/isObject' +export { default as isString } from 'lodash/isString' +export { default as mapToArray } from 'lodash/map' + +// ------------------------------------------------------------------- + +// Special value which can be returned to stop an iteration in map() +// and mapInPlace(). +export const DONE = {} + +// Fill `target` by running each element in `collection` through +// `iteratee`. +// +// If `target` is undefined, it defaults to a new array if +// `collection` is array-like (has a `length` property), otherwise an +// object. +// +// The context of `iteratee` can be specified via `thisArg`. +// +// Note: the Mapping can be interrupted by returning the special value +// `DONE` provided as the fourth argument. +// +// Usage: map(collection, item => item + 1) +export function map ( + collection, + iteratee, + target = has(collection, 'length') ? [] : {} +) { + forEach(collection, (item, i) => { + const value = iteratee(item, i, collection, DONE) + if (value === DONE) { + return false + } + + target[i] = value + }) + + return target +} + +// ------------------------------------------------------------------- + +// Create a hash from multiple values. +export const multiKeyHash = (...args) => new Promise(resolve => { + const hash = multiKeyHashInt(...args) + + const buf = Buffer.allocUnsafe(4) + buf.writeUInt32LE(hash, 0) + + resolve(base64url(buf)) +}) + +// ------------------------------------------------------------------- + +export const resolveSubpath = (root, path) => + resolve(root, `./${resolve('/', path)}`) + +// ------------------------------------------------------------------- + +export const streamToArray = (stream, { + filter, + mapper, +} = {}) => new Promise((resolve, reject) => { + stream = highland(stream).stopOnError(reject) + if (filter) { + stream = stream.filter(filter) + } + if (mapper) { + stream = stream.map(mapper) + } + stream.toArray(resolve) +}) + +// ------------------------------------------------------------------- + +// Contrary to most implentations this one use the range 0-11 instead +// of 1-12 for months. +export const scheduleFn = (cronTime, fn, timeZone) => { + let running = false + + const job = new CronJob({ + cronTime, + onTick: async () => { + if (running) { + return + } + + running = true + + try { + await fn() + } catch (error) { + console.error('[WARN] scheduled function:', (error && error.stack) || error) + } finally { + running = false + } + }, + start: true, + timeZone, + }) + + return () => { + job.stop() + } +} + +// ------------------------------------------------------------------- + +// Create a serializable object from an error. +export const serializeError = error => ({ + message: error.message, + stack: error.stack, + ...error, // Copy enumerable properties. +}) + +// ------------------------------------------------------------------- + +// Create an array which contains the results of one thunk function. +// Only works with synchronous thunks. +export const thunkToArray = thunk => { + const values = [] + thunk(::values.push) + return values +} + +// ------------------------------------------------------------------- + +// Creates a new function which throws an error. +// +// ```js +// promise.catch(throwFn('an error has occured')) +// +// function foo (param = throwFn('param is required')()) {} +// ``` +export const throwFn = error => () => { + throw ( + isString(error) + ? new Error(error) + : error + ) +} + +// ------------------------------------------------------------------- + +export const tmpDir = () => fromCallback(cb => tmp.dir(cb)) + +// ------------------------------------------------------------------- + +// Wrap a value in a function. +export const wrap = value => () => value + +// ------------------------------------------------------------------- + +export const mapFilter = (collection, iteratee) => { + const result = [] + forEach(collection, (...args) => { + const value = iteratee(...args) + if (value) { + result.push(value) + } + }) + return result +} + +// ------------------------------------------------------------------- + +export const splitFirst = (string, separator) => { + const i = string.indexOf(separator) + return i === -1 ? null : [ + string.slice(0, i), + string.slice(i + separator.length), + ] +} + +// ------------------------------------------------------------------- + +export const getFirstPropertyName = object => { + for (const key in object) { + if (Object.prototype.hasOwnProperty.call(object, key)) { + return key + } + } +} diff --git a/packages/xo-server/src/utils.spec.js b/packages/xo-server/src/utils.spec.js new file mode 100644 index 000000000..a2bb80594 --- /dev/null +++ b/packages/xo-server/src/utils.spec.js @@ -0,0 +1,213 @@ +/* eslint-env jest */ + +import { + camelToSnakeCase, + createRawObject, + diffItems, + ensureArray, + extractProperty, + formatXml, + generateToken, + parseSize, + pSettle, +} from './utils' + +// =================================================================== + +describe('camelToSnakeCase()', function () { + it('converts a string from camelCase to snake_case', function () { + expect(camelToSnakeCase('fooBar')).toBe('foo_bar') + expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed') + }) + + it('does not alter snake_case strings', function () { + expect(camelToSnakeCase('foo_bar')).toBe('foo_bar') + expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed') + }) + + it('does not alter upper case letters expect those from the camelCase', function () { + expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ') + }) +}) + +// ------------------------------------------------------------------- + +describe('createRawObject()', () => { + it('returns an empty object', () => { + expect(createRawObject()).toEqual({}) + }) + + it('creates a new object each time', () => { + expect(createRawObject()).not.toBe(createRawObject()) + }) + + if (Object.getPrototypeOf) { + it('creates an object without a prototype', () => { + expect(Object.getPrototypeOf(createRawObject())).toBe(null) + }) + } +}) + +// ------------------------------------------------------------------- + +describe('diffItems', () => { + it('computes the added/removed items between 2 iterables', () => { + expect(diffItems( + ['foo', 'bar'], + ['baz', 'foo'] + )).toEqual([ + ['bar'], + ['baz'], + ]) + }) +}) + +// ------------------------------------------------------------------- + +describe('ensureArray()', function () { + it('wrap the value in an array', function () { + const value = 'foo' + + expect(ensureArray(value)).toEqual([value]) + }) + + it('returns an empty array for undefined', function () { + expect(ensureArray(undefined)).toEqual([]) + }) + + it('returns the object itself if is already an array', function () { + const array = ['foo', 'bar', 'baz'] + + expect(ensureArray(array)).toBe(array) + }) +}) + +// ------------------------------------------------------------------- + +describe('extractProperty()', function () { + it('returns the value of the property', function () { + const value = {} + const obj = { prop: value } + + expect(extractProperty(obj, 'prop')).toBe(value) + }) + + it('removes the property from the object', function () { + const value = {} + const obj = { prop: value } + + expect(extractProperty(obj, 'prop')).toBe(value) + expect(obj.prop).not.toBeDefined() + }) +}) + +// ------------------------------------------------------------------- + +describe('formatXml()', function () { + it('formats a JS object to an XML string', function () { + expect(formatXml({ + foo: { + bar: [ + {$: {baz: 'plop'}}, + {$: {baz: 'plip'}}, + ], + }, + })).toBe(` + + +`) + }) +}) + +// ------------------------------------------------------------------- + +describe('generateToken()', () => { + it('generates a string', async () => { + expect(typeof await generateToken()).toBe('string') + }) +}) + +// ------------------------------------------------------------------- + +describe('parseSize()', function () { + it('parses a human size', function () { + expect(parseSize('1G')).toBe(1e9) + }) + + it('returns the parameter if already a number', function () { + expect(parseSize(1e6)).toBe(1e6) + }) + + it('throws if the string cannot be parsed', function () { + expect(function () { + parseSize('foo') + }).toThrow() + }) + + it('supports the B unit as suffix', function () { + expect(parseSize('3MB')).toBe(3e6) + }) +}) + +// ------------------------------------------------------------------- + +describe('pSettle()', () => { + it('works with arrays', async () => { + const rejection = 'fatality' + const [ + status1, + status2, + status3, + ] = await pSettle([ + Promise.resolve(42), + Math.PI, + Promise.reject(rejection), + ]) + + expect(status1.isRejected()).toBe(false) + expect(status2.isRejected()).toBe(false) + expect(status3.isRejected()).toBe(true) + + expect(status1.isFulfilled()).toBe(true) + expect(status2.isFulfilled()).toBe(true) + expect(status3.isFulfilled()).toBe(false) + + expect(status1.value()).toBe(42) + expect(status2.value()).toBe(Math.PI) + expect(::status3.value).toThrow() + + expect(::status1.reason).toThrow() + expect(::status2.reason).toThrow() + expect(status3.reason()).toBe(rejection) + }) + + it('works with objects', async () => { + const rejection = 'fatality' + + const { + a: status1, + b: status2, + c: status3, + } = await pSettle({ + a: Promise.resolve(42), + b: Math.PI, + c: Promise.reject(rejection), + }) + + expect(status1.isRejected()).toBe(false) + expect(status2.isRejected()).toBe(false) + expect(status3.isRejected()).toBe(true) + + expect(status1.isFulfilled()).toBe(true) + expect(status2.isFulfilled()).toBe(true) + expect(status3.isFulfilled()).toBe(false) + + expect(status1.value()).toBe(42) + expect(status2.value()).toBe(Math.PI) + expect(::status3.value).toThrow() + + expect(::status1.reason).toThrow() + expect(::status2.reason).toThrow() + expect(status3.reason()).toBe(rejection) + }) +}) diff --git a/packages/xo-server/src/vhd-merge.js b/packages/xo-server/src/vhd-merge.js new file mode 100644 index 000000000..425874645 --- /dev/null +++ b/packages/xo-server/src/vhd-merge.js @@ -0,0 +1,683 @@ +// TODO: remove once completely merged in vhd.js + +import assert from 'assert' +import eventToPromise from 'event-to-promise' +import fu from '@nraynaud/struct-fu' +import isEqual from 'lodash/isEqual' + +import constantStream from './constant-stream' +import { + noop, + streamToBuffer, +} from './utils' + +const VHD_UTIL_DEBUG = 0 +const debug = VHD_UTIL_DEBUG + ? str => console.log(`[vhd-util]${str}`) + : noop + +// =================================================================== +// +// Spec: +// https://www.microsoft.com/en-us/download/details.aspx?id=23850 +// +// C implementation: +// https://github.com/rubiojr/vhd-util-convert +// +// =================================================================== + +// Sizes in bytes. +const VHD_FOOTER_SIZE = 512 +const VHD_HEADER_SIZE = 1024 +const VHD_SECTOR_SIZE = 512 + +// Block allocation table entry size. (Block addr) +const VHD_ENTRY_SIZE = 4 + +const VHD_PARENT_LOCATOR_ENTRIES = 8 +const VHD_PLATFORM_CODE_NONE = 0 + +// Types of backup treated. Others are not supported. +const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup. +const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup. + +// Other. +const BLOCK_UNUSED = 0xFFFFFFFF +const BIT_MASK = 0x80 + +// unused block as buffer containing a uint32BE +const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(VHD_ENTRY_SIZE) +BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0) + +// =================================================================== + +const fuFooter = fu.struct([ + fu.char('cookie', 8), // 0 + fu.uint32('features'), // 8 + fu.uint32('fileFormatVersion'), // 12 + fu.struct('dataOffset', [ + fu.uint32('high'), // 16 + fu.uint32('low'), // 20 + ]), + fu.uint32('timestamp'), // 24 + fu.char('creatorApplication', 4), // 28 + fu.uint32('creatorVersion'), // 32 + fu.uint32('creatorHostOs'), // 36 + fu.struct('originalSize', [ // At the creation, current size of the hard disk. + fu.uint32('high'), // 40 + fu.uint32('low'), // 44 + ]), + fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize. + fu.uint32('high'), // 48 + fu.uint32('low'), // 52 + ]), + fu.struct('diskGeometry', [ + fu.uint16('cylinders'), // 56 + fu.uint8('heads'), // 58 + fu.uint8('sectorsPerTrackCylinder'), // 59 + ]), + fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING. + fu.uint32('checksum'), // 64 + fu.uint8('uuid', 16), // 68 + fu.char('saved'), // 84 + fu.char('hidden'), // 85 + fu.char('reserved', 426), // 86 +]) + +const fuHeader = fu.struct([ + fu.char('cookie', 8), + fu.struct('dataOffset', [ + fu.uint32('high'), + fu.uint32('low'), + ]), + fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table. + fu.uint32('high'), + fu.uint32('low'), + ]), + fu.uint32('headerVersion'), + fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table. + fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB) + fu.uint32('checksum'), + fu.uint8('parentUuid', 16), + fu.uint32('parentTimestamp'), + fu.uint32('reserved1'), + fu.char16be('parentUnicodeName', 512), + fu.struct('parentLocatorEntry', [ + fu.uint32('platformCode'), + fu.uint32('platformDataSpace'), + fu.uint32('platformDataLength'), + fu.uint32('reserved'), + fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data. + fu.uint32('high'), + fu.uint32('low'), + ]), + ], VHD_PARENT_LOCATOR_ENTRIES), + fu.char('reserved2', 256), +]) + +// =================================================================== +// Helpers +// =================================================================== + +const SIZE_OF_32_BITS = Math.pow(2, 32) +const uint32ToUint64 = (fu) => fu.high * SIZE_OF_32_BITS + fu.low + +// Returns a 32 bits integer corresponding to a Vhd version. +const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF) + +// Sectors conversions. +const sectorsRoundUp = bytes => Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE) +const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1 +const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE + +// Check/Set a bit on a vhd map. +const mapTestBit = (map, bit) => ((map[bit >> 3] << (bit & 7)) & BIT_MASK) !== 0 +const mapSetBit = (map, bit) => { map[bit >> 3] |= (BIT_MASK >> (bit & 7)) } + +const packField = (field, value, buf) => { + const { offset } = field + + field.pack( + value, + buf, + (typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset + ) +} + +const unpackField = (field, buf) => { + const { offset } = field + + return field.unpack( + buf, + (typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset + ) +} +// =================================================================== + +// Returns the checksum of a raw struct. +// The raw struct (footer or header) is altered with the new sum. +function checksumStruct (rawStruct, struct) { + const checksumField = struct.fields.checksum + + let sum = 0 + + // Reset current sum. + packField(checksumField, 0, rawStruct) + + for (let i = 0, n = struct.size; i < n; i++) { + sum = (sum + rawStruct[i]) & 0xFFFFFFFF + } + + sum = 0xFFFFFFFF - sum + + // Write new sum. + packField(checksumField, sum, rawStruct) + + return sum +} + +// =================================================================== + +class Vhd { + constructor (handler, path) { + this._handler = handler + this._path = path + } + + // ================================================================= + // Read functions. + // ================================================================= + + _readStream (start, n) { + return this._handler.createReadStream(this._path, { + start, + end: start + n - 1, // end is inclusive + }) + } + + _read (start, n) { + return this._readStream(start, n).then(streamToBuffer) + } + + // Returns the first address after metadata. (In bytes) + getEndOfHeaders () { + const { header } = this + + let end = uint32ToUint64(this.footer.dataOffset) + VHD_HEADER_SIZE + + const blockAllocationTableSize = sectorsToBytes( + sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE) + ) + + // Max(end, block allocation table end) + end = Math.max(end, uint32ToUint64(header.tableOffset) + blockAllocationTableSize) + + for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) { + const entry = header.parentLocatorEntry[i] + + if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) { + end = Math.max(end, + uint32ToUint64(entry.platformDataOffset) + + sectorsToBytes(entry.platformDataSpace) + ) + } + } + + debug(`End of headers: ${end}.`) + + return end + } + + // Returns the first sector after data. + getEndOfData () { + let end = Math.ceil(this.getEndOfHeaders() / VHD_SECTOR_SIZE) + + const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock + const { maxTableEntries } = this.header + for (let i = 0; i < maxTableEntries; i++) { + const blockAddr = this._getBatEntry(i) + + if (blockAddr !== BLOCK_UNUSED) { + end = Math.max(end, blockAddr + fullBlockSize) + } + } + + debug(`End of data: ${end}.`) + + return sectorsToBytes(end) + } + + // Get the beginning (footer + header) of a vhd file. + async readHeaderAndFooter () { + const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE) + + const sum = unpackField(fuFooter.fields.checksum, buf) + const sumToTest = checksumStruct(buf, fuFooter) + + // Checksum child & parent. + if (sumToTest !== sum) { + throw new Error(`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString('hex')})`) + } + + const header = this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE)) + this.footer = fuFooter.unpack(buf) + + // Compute the number of sectors in one block. + // Default: One block contains 4096 sectors of 512 bytes. + const sectorsPerBlock = this.sectorsPerBlock = Math.floor(header.blockSize / VHD_SECTOR_SIZE) + + // Compute bitmap size in sectors. + // Default: 1. + const sectorsOfBitmap = this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3) + + // Full block size => data block size + bitmap size. + this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap) + + // In bytes. + // Default: 512. + this.bitmapSize = sectorsToBytes(sectorsOfBitmap) + } + + // Check if a vhd object has a block allocation table. + hasBlockAllocationTableMap () { + return this.footer.fileFormatVersion > getVhdVersion(1, 0) + } + + // Returns a buffer that contains the block allocation table of a vhd file. + async readBlockTable () { + const { header } = this + + const offset = uint32ToUint64(header.tableOffset) + const size = sectorsToBytes( + sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE) + ) + + this.blockTable = await this._read(offset, size) + } + + // return the first sector (bitmap) of a block + _getBatEntry (block) { + return this.blockTable.readUInt32BE(block * VHD_ENTRY_SIZE) + } + + _readBlock (blockId, onlyBitmap = false) { + const blockAddr = this._getBatEntry(blockId) + if (blockAddr === BLOCK_UNUSED) { + throw new Error(`no such block ${blockId}`) + } + + return this._read( + sectorsToBytes(blockAddr), + onlyBitmap ? this.bitmapSize : this.fullBlockSize + ).then(buf => onlyBitmap + ? { bitmap: buf } + : { + bitmap: buf.slice(0, this.bitmapSize), + data: buf.slice(this.bitmapSize), + } + ) + } + + // get the identifiers and first sectors of the first and last block + // in the file + // + // return undefined if none + _getFirstAndLastBlocks () { + const n = this.header.maxTableEntries + const bat = this.blockTable + let i = 0 + let j = 0 + let first, firstSector, last, lastSector + + // get first allocated block for initialization + while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) { + i += 1 + j += VHD_ENTRY_SIZE + + if (i === n) { + return + } + } + lastSector = firstSector + first = last = i + + while (i < n) { + const sector = bat.readUInt32BE(j) + if (sector !== BLOCK_UNUSED) { + if (sector < firstSector) { + first = i + firstSector = sector + } else if (sector > lastSector) { + last = i + lastSector = sector + } + } + + i += 1 + j += VHD_ENTRY_SIZE + } + + return { first, firstSector, last, lastSector } + } + + // ================================================================= + // Write functions. + // ================================================================= + + // Write a buffer/stream at a given position in a vhd file. + _write (data, offset) { + debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`) + // TODO: could probably be merged in remote handlers. + return this._handler.createOutputStream(this._path, { + flags: 'r+', + start: offset, + }).then( + Buffer.isBuffer(data) + ? stream => new Promise((resolve, reject) => { + stream.on('error', reject) + stream.end(data, resolve) + }) + : stream => eventToPromise(data.pipe(stream), 'finish') + ) + } + + async ensureBatSize (size) { + const { header } = this + + const prevMaxTableEntries = header.maxTableEntries + if (prevMaxTableEntries >= size) { + return + } + + const tableOffset = uint32ToUint64(header.tableOffset) + const { first, firstSector, lastSector } = this._getFirstAndLastBlocks() + + // extend BAT + const maxTableEntries = header.maxTableEntries = size + const batSize = maxTableEntries * VHD_ENTRY_SIZE + const prevBat = this.blockTable + const bat = this.blockTable = Buffer.allocUnsafe(batSize) + prevBat.copy(bat) + bat.fill(BUF_BLOCK_UNUSED, prevBat.length) + debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`) + + const extendBat = () => { + debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`) + + return this._write( + constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries), + tableOffset + prevBat.length + ) + } + + if (tableOffset + batSize < sectorsToBytes(firstSector)) { + return Promise.all([ + extendBat(), + this.writeHeader(), + ]) + } + + const { fullBlockSize } = this + const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE + debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`) + + return Promise.all([ + // copy the first block at the end + this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream => + this._write(stream, sectorsToBytes(newFirstSector)) + ).then(extendBat), + + this._setBatEntry(first, newFirstSector), + this.writeHeader(), + this.writeFooter(), + ]) + } + + // set the first sector (bitmap) of a block + _setBatEntry (block, blockSector) { + const i = block * VHD_ENTRY_SIZE + const { blockTable } = this + + blockTable.writeUInt32BE(blockSector, i) + + return this._write( + blockTable.slice(i, i + VHD_ENTRY_SIZE), + uint32ToUint64(this.header.tableOffset) + i + ) + } + + // Make a new empty block at vhd end. + // Update block allocation table in context and in file. + async createBlock (blockId) { + const blockAddr = Math.ceil(this.getEndOfData() / VHD_SECTOR_SIZE) + + debug(`create block ${blockId} at ${blockAddr}`) + + await Promise.all([ + // Write an empty block and addr in vhd file. + this._write( + constantStream([ 0 ], this.fullBlockSize), + sectorsToBytes(blockAddr) + ), + + this._setBatEntry(blockId, blockAddr), + ]) + + return blockAddr + } + + // Write a bitmap at a block address. + async writeBlockBitmap (blockAddr, bitmap) { + const { bitmapSize } = this + + if (bitmap.length !== bitmapSize) { + throw new Error(`Bitmap length is not correct ! ${bitmap.length}`) + } + + const offset = sectorsToBytes(blockAddr) + + debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`) + await this._write(bitmap, sectorsToBytes(blockAddr)) + } + + async writeBlockSectors (block, beginSectorId, endSectorId) { + let blockAddr = this._getBatEntry(block.id) + + if (blockAddr === BLOCK_UNUSED) { + blockAddr = await this.createBlock(block.id) + } + + const offset = blockAddr + this.sectorsOfBitmap + beginSectorId + + debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`) + + await this._write( + block.data.slice( + sectorsToBytes(beginSectorId), + sectorsToBytes(endSectorId) + ), + sectorsToBytes(offset) + ) + + const { bitmap } = await this._readBlock(block.id, true) + + for (let i = beginSectorId; i < endSectorId; ++i) { + mapSetBit(bitmap, i) + } + + await this.writeBlockBitmap(blockAddr, bitmap) + } + + // Merge block id (of vhd child) into vhd parent. + async coalesceBlock (child, blockId) { + // Get block data and bitmap of block id. + const { bitmap, data } = await child._readBlock(blockId) + + debug(`coalesceBlock block=${blockId}`) + + // For each sector of block data... + const { sectorsPerBlock } = child + for (let i = 0; i < sectorsPerBlock; i++) { + // If no changes on one sector, skip. + if (!mapTestBit(bitmap, i)) { + continue + } + + let endSector = i + 1 + + // Count changed sectors. + while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) { + ++endSector + } + + // Write n sectors into parent. + debug(`coalesceBlock: write sectors=${i}...${endSector}`) + await this.writeBlockSectors( + { id: blockId, data }, + i, + endSector + ) + + i = endSector + } + + // Return the merged data size + return data.length + } + + // Write a context footer. (At the end and beginning of a vhd file.) + async writeFooter () { + const { footer } = this + + const offset = this.getEndOfData() + const rawFooter = fuFooter.pack(footer) + + footer.checksum = checksumStruct(rawFooter, fuFooter) + debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`) + + await this._write(rawFooter, 0) + await this._write(rawFooter, offset) + } + + writeHeader () { + const { header } = this + const rawHeader = fuHeader.pack(header) + header.checksum = checksumStruct(rawHeader, fuHeader) + const offset = VHD_FOOTER_SIZE + debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`) + return this._write(rawHeader, offset) + } +} + +// Merge vhd child into vhd parent. +// +// Child must be a delta backup ! +// Parent must be a full backup ! +// +// TODO: update the identifier of the parent VHD. +export default async function vhdMerge ( + parentHandler, parentPath, + childHandler, childPath +) { + const parentVhd = new Vhd(parentHandler, parentPath) + const childVhd = new Vhd(childHandler, childPath) + + // Reading footer and header. + await Promise.all([ + parentVhd.readHeaderAndFooter(), + childVhd.readHeaderAndFooter(), + ]) + + assert(childVhd.header.blockSize === parentVhd.header.blockSize) + + // Child must be a delta. + if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) { + throw new Error('Unable to merge, child is not a delta backup.') + } + + // Merging in differencing disk is prohibited in our case. + if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) { + throw new Error('Unable to merge, parent is not a full backup.') + } + + // Allocation table map is not yet implemented. + if ( + parentVhd.hasBlockAllocationTableMap() || + childVhd.hasBlockAllocationTableMap() + ) { + throw new Error('Unsupported allocation table map.') + } + + // Read allocation table of child/parent. + await Promise.all([ + parentVhd.readBlockTable(), + childVhd.readBlockTable(), + ]) + + await parentVhd.ensureBatSize(childVhd.header.maxTableEntries) + + let mergedDataSize = 0 + + for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) { + if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) { + mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId) + } + } + + const cFooter = childVhd.footer + const pFooter = parentVhd.footer + + pFooter.currentSize = { ...cFooter.currentSize } + pFooter.diskGeometry = { ...cFooter.diskGeometry } + pFooter.originalSize = { ...cFooter.originalSize } + pFooter.timestamp = cFooter.timestamp + + // necessary to update values and to recreate the footer after block + // creation + await parentVhd.writeFooter() + + return mergedDataSize +} + +// returns true if the child was actually modified +export async function chainVhd ( + parentHandler, parentPath, + childHandler, childPath +) { + const parentVhd = new Vhd(parentHandler, parentPath) + const childVhd = new Vhd(childHandler, childPath) + await Promise.all([ + parentVhd.readHeaderAndFooter(), + childVhd.readHeaderAndFooter(), + ]) + + const { header } = childVhd + + const parentName = parentPath.split('/').pop() + const parentUuid = parentVhd.footer.uuid + if ( + header.parentUnicodeName !== parentName || + !isEqual(header.parentUuid, parentUuid) + ) { + header.parentUuid = parentUuid + header.parentUnicodeName = parentName + await childVhd.writeHeader() + return true + } + + // The checksum was broken between xo-server v5.2.4 and v5.2.5 + // + // Replace by a correct checksum if necessary. + // + // TODO: remove when enough time as passed (6 months). + { + const rawHeader = fuHeader.pack(header) + const checksum = checksumStruct(rawHeader, fuHeader) + if (checksum !== header.checksum) { + await childVhd._write(rawHeader, VHD_FOOTER_SIZE) + return true + } + } + + return false +} diff --git a/packages/xo-server/src/xapi-object-to-xo.js b/packages/xo-server/src/xapi-object-to-xo.js new file mode 100644 index 000000000..a2ebbedb8 --- /dev/null +++ b/packages/xo-server/src/xapi-object-to-xo.js @@ -0,0 +1,786 @@ +import { + startsWith, +} from 'lodash' + +import { + ensureArray, + extractProperty, + forEach, + isArray, + isEmpty, + mapFilter, + mapToArray, + parseXml, +} from './utils' +import { + isHostRunning, + isVmHvm, + isVmRunning, + parseDateTime, +} from './xapi' +import { + useUpdateSystem, +} from './xapi/utils' + +// =================================================================== + +const { + defineProperties, + freeze, +} = Object + +function link (obj, prop, idField = '$id') { + const dynamicValue = obj[`$${prop}`] + if (dynamicValue == null) { + return dynamicValue // Properly handles null and undefined. + } + + if (isArray(dynamicValue)) { + return mapToArray(dynamicValue, idField) + } + + return dynamicValue[idField] +} + +// Parse a string date time to a Unix timestamp (in seconds). +// +// If the value is a number or can be converted as one, it is assumed +// to already be a timestamp and returned. +// +// If there are no data or if the timestamp is 0, returns null. +function toTimestamp (date) { + if (!date) { + return null + } + + const timestamp = +date + + // Not NaN. + if (timestamp === timestamp) { // eslint-disable-line no-self-compare + return timestamp + } + + const ms = parseDateTime(date) + if (!ms) { + return null + } + + return Math.round(ms.getTime() / 1000) +} + +// =================================================================== + +const TRANSFORMS = { + pool (obj) { + const cpuInfo = obj.cpu_info + return { + default_SR: link(obj, 'default_SR'), + HA_enabled: Boolean(obj.ha_enabled), + master: link(obj, 'master'), + tags: obj.tags, + name_description: obj.name_description, + name_label: obj.name_label || obj.$master.name_label, + xosanPackInstallationTime: toTimestamp(obj.other_config.xosan_pack_installation_time), + cpus: { + cores: cpuInfo && +cpuInfo.cpu_count, + sockets: cpuInfo && +cpuInfo.socket_count, + }, + + // TODO + // - ? networks = networksByPool.items[pool.id] (network.$pool.id) + // - hosts = hostsByPool.items[pool.id] (host.$pool.$id) + // - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id) + // - SRs = srsByContainer.items[pool.id] (sr.$container.id) + // - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id) + // - VMs = vmsByContainer.items[pool.id] (vm.$container.id) + // - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id) + // - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id) + // - $VMs = vmsByPool.items[pool.id] (vm.$pool.id) + } + }, + + // ----------------------------------------------------------------- + + host (obj) { + const { + $metrics: metrics, + other_config: otherConfig, + software_version: softwareVersion, + } = obj + + const isRunning = isHostRunning(obj) + let supplementalPacks, patches + + if (useUpdateSystem(obj)) { + supplementalPacks = [] + patches = [] + + forEach(obj.$updates, update => { + const formattedUpdate = { + name: update.name_label, + description: update.name_description, + author: update.key.split('-')[3], + version: update.version, + guidance: update.after_apply_guidance, + hosts: link(update, 'hosts'), + vdi: link(update, 'vdi'), + size: update.installation_size, + } + + if (startsWith(update.name_label, 'XS')) { + patches.push(formattedUpdate) + } else { + supplementalPacks.push(formattedUpdate) + } + }) + } + + const cpuInfo = obj.cpu_info + + return { + // Deprecated + CPUs: cpuInfo, + + address: obj.address, + bios_strings: obj.bios_strings, + build: softwareVersion.build_number, + enabled: Boolean(obj.enabled), + cpus: { + cores: cpuInfo && +cpuInfo.cpu_count, + sockets: cpuInfo && +cpuInfo.socket_count, + }, + current_operations: obj.current_operations, + hostname: obj.hostname, + iSCSI_name: otherConfig.iscsi_iqn || null, + license_params: obj.license_params, + license_server: obj.license_server, + license_expiry: toTimestamp(obj.license_params.expiry), + name_description: obj.name_description, + name_label: obj.name_label, + memory: (function () { + if (metrics) { + const free = +metrics.memory_free + const total = +metrics.memory_total + + return { + usage: total - free, + size: total, + } + } + + return { + usage: 0, + size: 0, + + // Deprecated + total: 0, + } + })(), + patches: patches || link(obj, 'patches'), + powerOnMode: obj.power_on_mode, + power_state: metrics + ? (isRunning ? 'Running' : 'Halted') + : 'Unknown', + startTime: toTimestamp(otherConfig.boot_time), + supplementalPacks: supplementalPacks || + mapFilter(softwareVersion, (value, key) => { + let author, name + if (([ author, name ] = key.split(':')).length === 2) { + const [ description, version ] = value.split(', ') + return { + name, + description, + author, + version: version.split(' ')[1], + } + } + }), + agentStartTime: toTimestamp(otherConfig.agent_start_time), + rebootRequired: !isEmpty(obj.updates_requiring_reboot), + tags: obj.tags, + version: softwareVersion.product_version, + + // TODO: dedupe. + PIFs: link(obj, 'PIFs'), + $PIFs: link(obj, 'PIFs'), + PCIs: link(obj, 'PCIs'), + $PCIs: link(obj, 'PCIs'), + PGPUs: link(obj, 'PGPUs'), + $PGPUs: link(obj, 'PGPUs'), + + $PBDs: link(obj, 'PBDs'), + + // TODO: + // - controller = vmControllersByContainer.items[host.id] + // - SRs = srsByContainer.items[host.id] + // - tasks = tasksByHost.items[host.id] + // - templates = vmTemplatesByContainer.items[host.id] + // - VMs = vmsByContainer.items[host.id] + // - $vCPUs = sum(host.VMs, vm => host.CPUs.number) + } + }, + + // ----------------------------------------------------------------- + + vm (obj) { + const { + $guest_metrics: guestMetrics, + $metrics: metrics, + other_config: otherConfig, + } = obj + + const isHvm = isVmHvm(obj) + const isRunning = isVmRunning(obj) + const xenTools = (() => { + if (!isRunning || !metrics) { + // Unknown status, returns nothing. + return + } + + if (!guestMetrics) { + return false + } + + const { major, minor } = guestMetrics.PV_drivers_version + const [ hostMajor, hostMinor ] = (obj.$resident_on || obj.$pool.$master) + .software_version + .product_version + .split('.') + + return major >= hostMajor && minor >= hostMinor + ? 'up to date' + : 'out of date' + })() + + let resourceSet = otherConfig['xo:resource_set'] + if (resourceSet) { + try { + resourceSet = JSON.parse(resourceSet) + } catch (_) { + resourceSet = undefined + } + } + + const vm = { + // type is redefined after for controllers/, templates & + // snapshots. + type: 'VM', + + addresses: (guestMetrics && guestMetrics.networks) || null, + affinityHost: link(obj, 'affinity'), + auto_poweron: otherConfig.auto_poweron === 'true', + boot: obj.HVM_boot_params, + CPUs: { + max: +obj.VCPUs_max, + number: ( + isRunning && metrics && xenTools + ? +metrics.VCPUs_number + : +obj.VCPUs_at_startup + ), + }, + current_operations: obj.current_operations, + docker: (function () { + const monitor = otherConfig['xscontainer-monitor'] + if (!monitor) { + return + } + + if (monitor === 'False') { + return { + enabled: false, + } + } + + const { + docker_ps: process, + docker_info: info, + docker_version: version, + } = otherConfig + + return { + enabled: true, + info: info && parseXml(info).docker_info, + containers: ensureArray(process && parseXml(process).docker_ps.item), + process: process && parseXml(process).docker_ps, // deprecated (only used in v4) + version: version && parseXml(version).docker_version, + } + })(), + + // TODO: there is two possible value: "best-effort" and "restart" + high_availability: Boolean(obj.ha_restart_priority), + + memory: (function () { + const dynamicMin = +obj.memory_dynamic_min + const dynamicMax = +obj.memory_dynamic_max + const staticMin = +obj.memory_static_min + const staticMax = +obj.memory_static_max + + const memory = { + dynamic: [ dynamicMin, dynamicMax ], + static: [ staticMin, staticMax ], + } + + const gmMemory = guestMetrics && guestMetrics.memory + + if (!isRunning) { + memory.size = dynamicMax + } else if (gmMemory && gmMemory.used) { + memory.usage = +gmMemory.used + memory.size = +gmMemory.total + } else if (metrics) { + memory.size = +metrics.memory_actual + } else { + memory.size = dynamicMax + } + + return memory + })(), + installTime: metrics && toTimestamp(metrics.install_time), + name_description: obj.name_description, + name_label: obj.name_label, + other: otherConfig, + os_version: (guestMetrics && guestMetrics.os_version) || null, + power_state: obj.power_state, + resourceSet, + snapshots: link(obj, 'snapshots'), + startTime: metrics && toTimestamp(metrics.start_time), + tags: obj.tags, + VIFs: link(obj, 'VIFs'), + virtualizationMode: isHvm ? 'hvm' : 'pv', + + // <=> Are the Xen Server tools installed? + // + // - undefined: unknown status + // - false: not optimized + // - 'out of date': optimized but drivers should be updated + // - 'up to date': optimized + xenTools, + + $container: ( + isRunning + ? link(obj, 'resident_on') + : link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`). + ), + $VBDs: link(obj, 'VBDs'), + + // TODO: dedupe + VGPUs: link(obj, 'VGPUs'), + $VGPUs: link(obj, 'VGPUs'), + } + + if (isHvm) { + ({ + vga: vm.vga = 'cirrus', + videoram: vm.videoram = 4, + } = obj.platform) + } + + const coresPerSocket = obj.platform['cores-per-socket'] + if (coresPerSocket !== undefined) { + vm.coresPerSocket = +coresPerSocket + } + + if (obj.is_control_domain) { + vm.type += '-controller' + } else if (obj.is_a_snapshot) { + vm.type += '-snapshot' + + vm.snapshot_time = toTimestamp(obj.snapshot_time) + vm.$snapshot_of = link(obj, 'snapshot_of') + } else if (obj.is_a_template) { + vm.type += '-template' + + if (obj.other_config.default_template === 'true') { + vm.id = obj.$ref // use refs for templates as they + } + + vm.CPUs.number = +obj.VCPUs_at_startup + vm.template_info = { + arch: otherConfig['install-arch'], + disks: (function () { + const {disks: xml} = otherConfig + let data + if (!xml || !(data = parseXml(xml)).provision) { + return [] + } + + const disks = ensureArray(data.provision.disk) + forEach(disks, function normalize (disk) { + disk.bootable = disk.bootable === 'true' + disk.size = +disk.size + disk.SR = extractProperty(disk, 'sr') + }) + + return disks + })(), + install_methods: (function () { + const methods = otherConfig['install-methods'] + + return methods ? methods.split(',') : [] + })(), + install_repository: otherConfig['install-repository'], + } + } + + let tmp + if ((tmp = obj.VCPUs_params)) { + tmp.cap && (vm.cpuCap = +tmp.cap) + tmp.weight && (vm.cpuWeight = +tmp.weight) + } + + if (!isHvm) { + vm.PV_args = obj.PV_args + } + + return vm + }, + + // ----------------------------------------------------------------- + + sr (obj) { + return { + type: 'SR', + + content_type: obj.content_type, + + // TODO: Should it replace usage? + physical_usage: +obj.physical_utilisation, + + name_description: obj.name_description, + name_label: obj.name_label, + size: +obj.physical_size, + shared: Boolean(obj.shared), + SR_type: obj.type, + tags: obj.tags, + usage: +obj.virtual_allocation, + VDIs: link(obj, 'VDIs'), + other_config: obj.other_config, + sm_config: obj.sm_config, + + $container: ( + obj.shared || !obj.$PBDs[0] + ? link(obj, 'pool') + : link(obj.$PBDs[0], 'host') + ), + $PBDs: link(obj, 'PBDs'), + } + }, + + // ----------------------------------------------------------------- + + pbd (obj) { + return { + type: 'PBD', + + attached: Boolean(obj.currently_attached), + host: link(obj, 'host'), + SR: link(obj, 'SR'), + device_config: obj.device_config, + } + }, + + // ----------------------------------------------------------------- + + pif (obj) { + const metrics = obj.$metrics + + return { + type: 'PIF', + + attached: Boolean(obj.currently_attached), + isBondMaster: !isEmpty(obj.bond_master_of), + device: obj.device, + deviceName: metrics && metrics.device_name, + dns: obj.DNS, + disallowUnplug: Boolean(obj.disallow_unplug), + gateway: obj.gateway, + ip: obj.IP, + mac: obj.MAC, + management: Boolean(obj.management), // TODO: find a better name. + carrier: Boolean(metrics && metrics.carrier), + mode: obj.ip_configuration_mode, + mtu: +obj.MTU, + netmask: obj.netmask, + // A non physical PIF is a "copy" of an existing physical PIF (same device) + // A physical PIF cannot be unplugged + physical: Boolean(obj.physical), + vlan: +obj.VLAN, + $host: link(obj, 'host'), + $network: link(obj, 'network'), + } + }, + + // ----------------------------------------------------------------- + + vdi (obj) { + const vdi = { + type: 'VDI', + + name_description: obj.name_description, + name_label: obj.name_label, + size: +obj.virtual_size, + snapshots: link(obj, 'snapshots'), + tags: obj.tags, + usage: +obj.physical_utilisation, + + $SR: link(obj, 'SR'), + $VBDs: link(obj, 'VBDs'), + } + + if (obj.is_a_snapshot) { + vdi.type += '-snapshot' + vdi.snapshot_time = toTimestamp(obj.snapshot_time) + vdi.$snapshot_of = link(obj, 'snapshot_of') + } + + if (!obj.managed) { + vdi.type += '-unmanaged' + } + + return vdi + }, + + // ----------------------------------------------------------------- + + vbd (obj) { + return { + type: 'VBD', + + attached: Boolean(obj.currently_attached), + bootable: Boolean(obj.bootable), + device: obj.device || null, + is_cd_drive: obj.type === 'CD', + position: obj.userdevice, + read_only: obj.mode === 'RO', + VDI: link(obj, 'VDI'), + VM: link(obj, 'VM'), + } + }, + + // ----------------------------------------------------------------- + + vif (obj) { + return { + type: 'VIF', + + allowedIpv4Addresses: obj.ipv4_allowed, + allowedIpv6Addresses: obj.ipv6_allowed, + attached: Boolean(obj.currently_attached), + device: obj.device, // TODO: should it be cast to a number? + MAC: obj.MAC, + MTU: +obj.MTU, + + $network: link(obj, 'network'), + $VM: link(obj, 'VM'), + } + }, + + // ----------------------------------------------------------------- + + network (obj) { + return { + bridge: obj.bridge, + defaultIsLocked: obj.default_locking_mode === 'disabled', + MTU: +obj.MTU, + name_description: obj.name_description, + name_label: obj.name_label, + other_config: obj.other_config, + tags: obj.tags, + PIFs: link(obj, 'PIFs'), + VIFs: link(obj, 'VIFs'), + } + }, + + // ----------------------------------------------------------------- + + message (obj) { + return { + body: obj.body, + name: obj.name, + time: toTimestamp(obj.timestamp), + + $object: obj.obj_uuid, // Special link as it is already an UUID. + } + }, + + // ----------------------------------------------------------------- + + task (obj) { + return { + created: toTimestamp(obj.created), + current_operations: obj.current_operations, + finished: toTimestamp(obj.finished), + name_description: obj.name_description, + name_label: obj.name_label, + progress: +obj.progress, + result: obj.result, + status: obj.status, + + $host: link(obj, 'resident_on'), + } + }, + + // ----------------------------------------------------------------- + + host_patch (obj) { + return { + applied: Boolean(obj.applied), + time: toTimestamp(obj.timestamp_applied), + pool_patch: link(obj, 'pool_patch', '$ref'), + + $host: link(obj, 'host'), + } + }, + + // ----------------------------------------------------------------- + + pool_patch (obj) { + return { + id: obj.$ref, + + applied: Boolean(obj.pool_applied), + description: obj.name_description, + guidance: obj.after_apply_guidance, + name: obj.name_label, + size: +obj.size, + uuid: obj.uuid, + + // TODO: what does it mean, should we handle it? + // version: obj.version, + + // TODO: host.[$]pool_patches ←→ pool.[$]host_patches + $host_patches: link(obj, 'host_patches'), + } + }, + + // ----------------------------------------------------------------- + + pci (obj) { + return { + type: 'PCI', + + class_name: obj.class_name, + device_name: obj.device_name, + pci_id: obj.pci_id, + + $host: link(obj, 'host'), + } + }, + + // ----------------------------------------------------------------- + + pgpu (obj) { + return { + type: 'PGPU', + + dom0Access: obj.dom0_access, + enabledVgpuTypes: link(obj, 'enabled_VGPU_types'), + gpuGroup: link(obj, 'GPU_group'), + isSystemDisplayDevice: Boolean(obj.is_system_display_device), + pci: link(obj, 'PCI'), + supportedVgpuMaxCapcities: link(obj, 'supported_VGPU_max_capacities'), + supportedVgpuTypes: link(obj, 'supported_VGPU_types'), + + // TODO: dedupe. + host: link(obj, 'host'), + $host: link(obj, 'host'), + vgpus: link(obj, 'resident_VGPUs'), + $vgpus: link(obj, 'resident_VGPUs'), + } + }, + + // ----------------------------------------------------------------- + + vgpu (obj) { + return { + type: 'vgpu', + + currentlyAttached: Boolean(obj.currently_attached), + device: obj.device, + gpuGroup: link(obj, 'GPU_group'), + otherConfig: obj.other_config, + resident_on: link(obj, 'resident_on'), + vgpuType: link(obj, '$type'), + vm: link(obj, 'VM'), + } + }, + + // ----------------------------------------------------------------- + + gpu_group (obj) { + return { + type: 'gpuGroup', + + allocation: obj.allocation_algorithm, + enabledVgpuTypes: link(obj, 'enabled_VGPU_types'), + gpuTypes: obj.GPU_types, + name_description: obj.name_description, + name_label: obj.name_label, + otherConfig: obj.other_config, + pgpus: link(obj, 'PGPUs'), + supportedVgpuTypes: link(obj, 'supported_VGPU_types'), + vgpus: link(obj, 'VGPUs'), + } + }, + + // ----------------------------------------------------------------- + + vgpu_type (obj) { + return { + type: 'vgpuType', + + experimental: Boolean(obj.experimental), + framebufferSize: obj.framebuffer_size, + gpuGroups: link(obj, 'enabled_on_GPU_groups'), + maxHeads: obj.max_heads, + maxResolutionX: obj.max_resolution_x, + maxResolutionY: obj.max_resolution_y, + modelName: obj.model_name, + pgpus: link(obj, 'enabled_on_PGPUs'), + vendorName: obj.vendor_name, + vgpus: link(obj, 'VGPUs'), + } + }, +} + +// =================================================================== + +export default xapiObj => { + const transform = TRANSFORMS[xapiObj.$type.toLowerCase()] + if (!transform) { + return + } + + const xoObj = transform(xapiObj) + if (!xoObj) { + return + } + + if (!('id' in xoObj)) { + xoObj.id = xapiObj.$id + } + if (!('type' in xoObj)) { + xoObj.type = xapiObj.$type + } + if ( + 'uuid' in xapiObj && + !('uuid' in xoObj) + ) { + xoObj.uuid = xapiObj.uuid + } + xoObj.$pool = xapiObj.$pool.$id + xoObj.$poolId = xoObj.$pool // TODO: deprecated, remove when no longer used in xo-web + + // Internal properties. + defineProperties(xoObj, { + _xapiId: { + value: xapiObj.$id, + }, + _xapiRef: { + value: xapiObj.$ref, + }, + }) + + // Freezes and returns the new object. + return freeze(xoObj) +} diff --git a/packages/xo-server/src/xapi-stats.js b/packages/xo-server/src/xapi-stats.js new file mode 100644 index 000000000..7812e4111 --- /dev/null +++ b/packages/xo-server/src/xapi-stats.js @@ -0,0 +1,544 @@ +import endsWith from 'lodash/endsWith' +import JSON5 from 'json5' +import limitConcurrency from 'limit-concurrency-decorator' +import { BaseError } from 'make-error' + +import { parseDateTime } from './xapi' + +const RRD_STEP_SECONDS = 5 +const RRD_STEP_MINUTES = 60 +const RRD_STEP_HOURS = 3600 +const RRD_STEP_DAYS = 86400 + +const RRD_STEP_FROM_STRING = { + 'seconds': RRD_STEP_SECONDS, + 'minutes': RRD_STEP_MINUTES, + 'hours': RRD_STEP_HOURS, + 'days': RRD_STEP_DAYS, +} + +const RRD_POINTS_PER_STEP = { + [RRD_STEP_SECONDS]: 120, + [RRD_STEP_MINUTES]: 120, + [RRD_STEP_HOURS]: 168, + [RRD_STEP_DAYS]: 366, +} + +export class XapiStatsError extends BaseError {} + +export class UnknownLegendFormat extends XapiStatsError { + constructor (line) { + super('Unknown legend line: ' + line) + } +} + +export class FaultyGranularity extends XapiStatsError {} + +// ------------------------------------------------------------------- +// Utils +// ------------------------------------------------------------------- + +// Return current local timestamp in seconds +function getCurrentTimestamp () { + return Date.now() / 1000 +} + +function convertNanToNull (value) { + return isNaN(value) ? null : value +} + +async function getServerTimestamp (xapi, host) { + const serverLocalTime = await xapi.call('host.get_servertime', host.$ref) + return Math.floor(parseDateTime(serverLocalTime).getTime() / 1000) +} + +// ------------------------------------------------------------------- +// Stats +// ------------------------------------------------------------------- + +function getNewHostStats () { + return { + cpus: [], + pifs: { + rx: [], + tx: [], + }, + load: [], + memory: [], + memoryFree: [], + memoryUsed: [], + } +} + +function getNewVmStats () { + return { + cpus: [], + vifs: { + rx: [], + tx: [], + }, + xvds: { + r: {}, + w: {}, + }, + memory: [], + memoryFree: [], + memoryUsed: [], + } +} + +// ------------------------------------------------------------------- +// Stats legends +// ------------------------------------------------------------------- + +function getNewHostLegends () { + return { + cpus: [], + pifs: { + rx: [], + tx: [], + }, + load: null, + memoryFree: null, + memory: null, + } +} + +function getNewVmLegends () { + return { + cpus: [], + vifs: { + rx: [], + tx: [], + }, + xvds: { + r: [], + w: [], + }, + memoryFree: null, + memory: null, + } +} + +// Compute one legend line for one host +function parseOneHostLegend (hostLegend, type, index) { + let resReg + + if ((resReg = /^cpu([0-9]+)$/.exec(type)) !== null) { + hostLegend.cpus[resReg[1]] = index + } else if ((resReg = /^pif_eth([0-9]+)_(rx|tx)$/.exec(type)) !== null) { + if (resReg[2] === 'rx') { + hostLegend.pifs.rx[resReg[1]] = index + } else { + hostLegend.pifs.tx[resReg[1]] = index + } + } else if (type === 'loadavg') { + hostLegend.load = index + } else if (type === 'memory_free_kib') { + hostLegend.memoryFree = index + } else if (type === 'memory_total_kib') { + hostLegend.memory = index + } +} + +// Compute one legend line for one vm +function parseOneVmLegend (vmLegend, type, index) { + let resReg + + if ((resReg = /^cpu([0-9]+)$/.exec(type)) !== null) { + vmLegend.cpus[resReg[1]] = index + } else if ((resReg = /^vif_([0-9]+)_(rx|tx)$/.exec(type)) !== null) { + if (resReg[2] === 'rx') { + vmLegend.vifs.rx[resReg[1]] = index + } else { + vmLegend.vifs.tx[resReg[1]] = index + } + } else if ((resReg = /^vbd_xvd(.)_(read|write)$/.exec(type))) { + if (resReg[2] === 'read') { + vmLegend.xvds.r[resReg[1]] = index + } else { + vmLegend.xvds.w[resReg[1]] = index + } + } else if (type === 'memory_internal_free') { + vmLegend.memoryFree = index + } else if (endsWith(type, 'memory')) { + vmLegend.memory = index + } +} + +// Compute Stats Legends for host and vms from RRD update +function parseLegends (json) { + const hostLegends = getNewHostLegends() + const vmsLegends = {} + + json.meta.legend.forEach((value, index) => { + const parsedLine = /^AVERAGE:(host|vm):(.+):(.+)$/.exec(value) + + if (parsedLine === null) { + throw new UnknownLegendFormat(value) + } + + const [ , name, uuid, type, , ] = parsedLine + + if (name !== 'vm') { + parseOneHostLegend(hostLegends, type, index) + } else { + if (vmsLegends[uuid] === undefined) { + vmsLegends[uuid] = getNewVmLegends() + } + + parseOneVmLegend(vmsLegends[uuid], type, index) + } + }) + + return [hostLegends, vmsLegends] +} + +export default class XapiStats { + constructor () { + this._vms = {} + this._hosts = {} + } + + // ------------------------------------------------------------------- + // Remove stats (Helper) + // ------------------------------------------------------------------- + + _removeOlderStats (source, dest, pointsPerStep) { + for (const key in source) { + if (key === 'cpus') { + for (const cpuIndex in source.cpus) { + dest.cpus[cpuIndex].splice(0, dest.cpus[cpuIndex].length - pointsPerStep) + } + + // If the number of cpus has been decreased, remove ! + let offset + + if ((offset = dest.cpus.length - source.cpus.length) > 0) { + dest.cpus.splice(-offset) + } + } else if (endsWith(key, 'ifs')) { + // For each pif or vif + for (const ifType in source[key]) { + for (const pifIndex in source[key][ifType]) { + dest[key][ifType][pifIndex].splice(0, dest[key][ifType][pifIndex].length - pointsPerStep) + } + + // If the number of pifs has been decreased, remove ! + let offset + + if ((offset = dest[key][ifType].length - source[key][ifType].length) > 0) { + dest[key][ifType].splice(-offset) + } + } + } else if (key === 'xvds') { + for (const xvdType in source.xvds) { + for (const xvdLetter in source.xvds[xvdType]) { + dest.xvds[xvdType][xvdLetter].splice(0, dest.xvds[xvdType][xvdLetter].length - pointsPerStep) + } + + // If the number of xvds has been decreased, remove ! + // FIXME + } + } else if (key === 'load') { + dest.load.splice(0, dest[key].length - pointsPerStep) + } else if (key === 'memory') { + // Load, memory, memoryFree, memoryUsed + const length = dest.memory.length - pointsPerStep + dest.memory.splice(0, length) + dest.memoryFree.splice(0, length) + dest.memoryUsed.splice(0, length) + } + } + } + + // ------------------------------------------------------------------- + // HOST: Computation and stats update + // ------------------------------------------------------------------- + + // Compute one stats row for one host + _parseRowHostStats (hostLegends, hostStats, values) { + // Cpus + hostLegends.cpus.forEach((cpuIndex, index) => { + if (hostStats.cpus[index] === undefined) { + hostStats.cpus[index] = [] + } + + hostStats.cpus[index].push(values[cpuIndex] * 100) + }) + + // Pifs + for (const pifType in hostLegends.pifs) { + hostLegends.pifs[pifType].forEach((pifIndex, index) => { + if (hostStats.pifs[pifType][index] === undefined) { + hostStats.pifs[pifType][index] = [] + } + + hostStats.pifs[pifType][index].push(convertNanToNull(values[pifIndex])) + }) + } + + // Load + hostStats.load.push(convertNanToNull(values[hostLegends.load])) + + // Memory. + // WARNING! memory/memoryFree are in kB. + const memory = values[hostLegends.memory] * 1024 + const memoryFree = values[hostLegends.memoryFree] * 1024 + + hostStats.memory.push(memory) + + if (hostLegends.memoryFree !== undefined) { + hostStats.memoryFree.push(memoryFree) + hostStats.memoryUsed.push(memory - memoryFree) + } + } + + // Compute stats for host from RRD update + _parseHostStats (json, hostname, hostLegends, step) { + const host = this._hosts[hostname][step] + + if (host.stats === undefined) { + host.stats = getNewHostStats() + } + + for (const row of json.data) { + this._parseRowHostStats(hostLegends, host.stats, row.values) + } + } + + // ------------------------------------------------------------------- + // VM: Computation and stats update + // ------------------------------------------------------------------- + + // Compute stats for vms from RRD update + _parseRowVmStats (vmLegends, vmStats, values) { + // Cpus + vmLegends.cpus.forEach((cpuIndex, index) => { + if (vmStats.cpus[index] === undefined) { + vmStats.cpus[index] = [] + } + + vmStats.cpus[index].push(values[cpuIndex] * 100) + }) + + // Vifs + for (const vifType in vmLegends.vifs) { + vmLegends.vifs[vifType].forEach((vifIndex, index) => { + if (vmStats.vifs[vifType][index] === undefined) { + vmStats.vifs[vifType][index] = [] + } + + vmStats.vifs[vifType][index].push(convertNanToNull(values[vifIndex])) + }) + } + + // Xvds + for (const xvdType in vmLegends.xvds) { + for (const index in vmLegends.xvds[xvdType]) { + if (vmStats.xvds[xvdType][index] === undefined) { + vmStats.xvds[xvdType][index] = [] + } + + vmStats.xvds[xvdType][index].push(convertNanToNull(values[vmLegends.xvds[xvdType][index]])) + } + } + + // Memory + // WARNING! memoryFree is in Kb not in b, memory is in b + const memory = values[vmLegends.memory] + const memoryFree = values[vmLegends.memoryFree] * 1024 + + vmStats.memory.push(memory) + + if (vmLegends.memoryFree !== undefined) { + vmStats.memoryFree.push(memoryFree) + vmStats.memoryUsed.push(memory - memoryFree) + } + } + + // Compute stats for vms + _parseVmsStats (json, hostname, vmsLegends, step) { + if (this._vms[hostname][step] === undefined) { + this._vms[hostname][step] = {} + } + + const vms = this._vms[hostname][step] + + for (const uuid in vmsLegends) { + if (vms[uuid] === undefined) { + vms[uuid] = getNewVmStats() + } + } + + for (const row of json.data) { + for (const uuid in vmsLegends) { + this._parseRowVmStats(vmsLegends[uuid], vms[uuid], row.values) + } + } + } + + // ------------------------------------------------------------------- + // ------------------------------------------------------------------- + + // Execute one http request on a XenServer for get stats + // Return stats (Json format) or throws got exception + @limitConcurrency(3) + _getJson (xapi, host, timestamp) { + return xapi.getResource('/rrd_updates', { + host, + query: { + cf: 'AVERAGE', + host: 'true', + json: 'true', + start: timestamp, + }, + }).then(response => response.readAll().then(JSON5.parse)) + } + + async _getLastTimestamp (xapi, host, step) { + if (this._hosts[host.address][step] === undefined) { + const serverTimeStamp = await getServerTimestamp(xapi, host) + return serverTimeStamp - step * RRD_POINTS_PER_STEP[step] + step + } + + return this._hosts[host.address][step].endTimestamp + } + + _getPoints (hostname, step, vmId) { + const hostStats = this._hosts[hostname][step] + + // Return host points + if (vmId === undefined) { + return { + interval: step, + ...hostStats, + } + } + + const vmsStats = this._vms[hostname][step] + + // Return vm points + return { + interval: step, + endTimestamp: hostStats.endTimestamp, + stats: (vmsStats && vmsStats[vmId]) || getNewVmStats(), + } + } + + async _getAndUpdatePoints (xapi, host, vmId, granularity) { + // Get granularity to use + const step = (granularity === undefined || granularity === 0) + ? RRD_STEP_SECONDS : RRD_STEP_FROM_STRING[granularity] + + if (step === undefined) { + throw new FaultyGranularity(`Unknown granularity: '${granularity}'. Use 'seconds', 'minutes', 'hours', or 'days'.`) + } + + // Limit the number of http requests + const hostname = host.address + + if (this._hosts[hostname] === undefined) { + this._hosts[hostname] = {} + this._vms[hostname] = {} + } + + if (this._hosts[hostname][step] !== undefined && + this._hosts[hostname][step].localTimestamp + step > getCurrentTimestamp()) { + return this._getPoints(hostname, step, vmId) + } + + // Check if we are in the good interval, use this._hosts[hostname][step].localTimestamp + // for avoid bad requests + // TODO + + // Get json + const timestamp = await this._getLastTimestamp(xapi, host, step) + let json = await this._getJson(xapi, host, timestamp) + + // Check if the granularity is linked to 'step' + // If it's not the case, we retry other url with the json timestamp + if (json.meta.step !== step) { + console.log(`RRD call: Expected step: ${step}, received step: ${json.meta.step}. Retry with other timestamp`) + const serverTimestamp = await getServerTimestamp(xapi, host) + + // Approximately: half points are asked + // FIXME: Not the best solution + json = await this._getJson(xapi, host, serverTimestamp - step * (RRD_POINTS_PER_STEP[step] / 2) + step) + + if (json.meta.step !== step) { + throw new FaultyGranularity(`Unable to get the true granularity: ${json.meta.step}`) + } + } + + // Make new backup slot if necessary + if (this._hosts[hostname][step] === undefined) { + this._hosts[hostname][step] = { + endTimestamp: 0, + localTimestamp: 0, + } + } + + // It exists data + if (json.data.length !== 0) { + // Warning: Sometimes, the json.xport.meta.start value does not match with the + // timestamp of the oldest data value + // So, we use the timestamp of the oldest data value ! + const startTimestamp = json.data[json.meta.rows - 1].t + + // Remove useless data and reorder + // Note: Older values are at end of json.data.row + const parseOffset = (this._hosts[hostname][step].endTimestamp - startTimestamp + step) / step + + json.data.splice(json.data.length - parseOffset) + json.data.reverse() + + // It exists useful data + if (json.data.length > 0) { + const [hostLegends, vmsLegends] = parseLegends(json) + + // Compute and update host/vms stats + this._parseVmsStats(json, hostname, vmsLegends, step) + this._parseHostStats(json, hostname, hostLegends, step) + + // Remove older stats + this._removeOlderStats(hostLegends, this._hosts[hostname][step].stats, RRD_POINTS_PER_STEP[step]) + + for (const uuid in vmsLegends) { + this._removeOlderStats(vmsLegends[uuid], this._vms[hostname][step][uuid], RRD_POINTS_PER_STEP[step]) + } + } + } + + // Update timestamp + this._hosts[hostname][step].endTimestamp = json.meta.end + this._hosts[hostname][step].localTimestamp = getCurrentTimestamp() + + return this._getPoints(hostname, step, vmId) + } + + // ------------------------------------------------------------------- + // ------------------------------------------------------------------- + + // Warning: This functions returns one reference on internal data + // So, data can be changed by a parallel call on this functions + // It is forbidden to modify the returned data + + // Return host stats + async getHostPoints (xapi, hostId, granularity) { + const host = xapi.getObject(hostId) + return this._getAndUpdatePoints(xapi, host, undefined, granularity) + } + + // Return vms stats + async getVmPoints (xapi, vmId, granularity) { + const vm = xapi.getObject(vmId) + const host = vm.$resident_on + + if (!host) { + throw new Error(`VM ${vmId} is halted or host could not be found.`) + } + + return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity) + } +} diff --git a/packages/xo-server/src/xapi/index.js b/packages/xo-server/src/xapi/index.js new file mode 100644 index 000000000..3c5e2c1fc --- /dev/null +++ b/packages/xo-server/src/xapi/index.js @@ -0,0 +1,2168 @@ +/* eslint-disable camelcase */ +import deferrable from 'golike-defer' +import fatfs from 'fatfs' +import synchronized from 'decorator-synchronized' +import tarStream from 'tar-stream' +import vmdkToVhd from 'xo-vmdk-to-vhd' +import { cancellable, catchPlus as pCatch, defer, ignoreErrors } from 'promise-toolbox' +import { PassThrough } from 'stream' +import { forbiddenOperation } from 'xo-common/api-errors' +import { + every, + find, + filter, + flatten, + groupBy, + includes, + isEmpty, + omit, + startsWith, + uniq, +} from 'lodash' +import { + Xapi as XapiBase, +} from 'xen-api' +import { + satisfies as versionSatisfies, +} from 'semver' + +import createSizeStream from '../size-stream' +import fatfsBuffer, { init as fatfsBufferInit } from '../fatfs-buffer' +import { mixin } from '../decorators' +import { + asyncMap, + camelToSnakeCase, + createRawObject, + ensureArray, + forEach, + isFunction, + map, + mapToArray, + pAll, + parseSize, + pDelay, + pFinally, + promisifyAll, + pSettle, +} from '../utils' + +import mixins from './mixins' // eslint-disable-line node/no-missing-import +import OTHER_CONFIG_TEMPLATE from './other-config-template' +import { + asBoolean, + asInteger, + debug, + extractOpaqueRef, + filterUndefineds, + getNamespaceForType, + getVmDisks, + canSrHaveNewVdiOfSize, + isVmHvm, + isVmRunning, + NULL_REF, + optional, + prepareXapiParam, +} from './utils' + +// =================================================================== + +const TAG_BASE_DELTA = 'xo:base_delta' +const TAG_COPY_SRC = 'xo:copy_of' + +// =================================================================== + +// FIXME: remove this work around when fixed, https://phabricator.babeljs.io/T2877 +// export * from './utils' +require('lodash/assign')(module.exports, require('./utils')) + +// VDI formats. (Raw is not available for delta vdi.) +export const VDI_FORMAT_VHD = 'vhd' +export const VDI_FORMAT_RAW = 'raw' + +export const IPV4_CONFIG_MODES = ['None', 'DHCP', 'Static'] +export const IPV6_CONFIG_MODES = ['None', 'DHCP', 'Static', 'Autoconf'] + +// =================================================================== + +@mixin(mapToArray(mixins)) +export default class Xapi extends XapiBase { + constructor (...args) { + super(...args) + + // Patch getObject to resolve _xapiId property. + this.getObject = (getObject => (...args) => { + let tmp + if ((tmp = args[0]) != null && (tmp = tmp._xapiId) != null) { + args[0] = tmp + } + return getObject.apply(this, args) + })(this.getObject) + + const genericWatchers = this._genericWatchers = createRawObject() + const objectsWatchers = this._objectWatchers = createRawObject() + + const onAddOrUpdate = objects => { + forEach(objects, object => { + const { + $id: id, + $ref: ref, + } = object + + // Run generic watchers. + for (const watcherId in genericWatchers) { + genericWatchers[watcherId](object) + } + + // Watched object. + if (id in objectsWatchers) { + objectsWatchers[id].resolve(object) + delete objectsWatchers[id] + } + if (ref in objectsWatchers) { + objectsWatchers[ref].resolve(object) + delete objectsWatchers[ref] + } + }) + } + this.objects.on('add', onAddOrUpdate) + this.objects.on('update', onAddOrUpdate) + } + + call (...args) { + const fn = super.call + + const loop = () => fn.apply(this, args)::pCatch({ + code: 'TOO_MANY_PENDING_TASKS', + }, () => pDelay(5e3).then(loop)) + + return loop() + } + + createTask (name = 'untitled task', description) { + return super.createTask(`[XO] ${name}`, description) + } + + // ================================================================= + + _registerGenericWatcher (fn) { + const watchers = this._genericWatchers + const id = String(Math.random()) + + watchers[id] = fn + + return () => { + delete watchers[id] + } + } + + // Wait for an object to appear or to be updated. + // + // Predicate can be either an id, a UUID, an opaque reference or a + // function. + // + // TODO: implements a timeout. + _waitObject (predicate) { + if (isFunction(predicate)) { + const { promise, resolve } = defer() + + const unregister = this._registerGenericWatcher(obj => { + if (predicate(obj)) { + unregister() + + resolve(obj) + } + }) + + return promise + } + + let watcher = this._objectWatchers[predicate] + if (!watcher) { + const { promise, resolve } = defer() + + // Register the watcher. + watcher = this._objectWatchers[predicate] = { + promise, + resolve, + } + } + + return watcher.promise + } + + // Wait for an object to be in a given state. + // + // Faster than _waitObject() with a function. + _waitObjectState (idOrUuidOrRef, predicate) { + const object = this.getObject(idOrUuidOrRef, null) + if (object && predicate(object)) { + return object + } + + const loop = () => this._waitObject(idOrUuidOrRef).then( + (object) => predicate(object) ? object : loop() + ) + + return loop() + } + + // Returns the objects if already presents or waits for it. + async _getOrWaitObject (idOrUuidOrRef) { + return ( + this.getObject(idOrUuidOrRef, null) || + this._waitObject(idOrUuidOrRef) + ) + } + + // ================================================================= + + _setObjectProperty (object, name, value) { + return this.call( + `${getNamespaceForType(object.$type)}.set_${camelToSnakeCase(name)}`, + object.$ref, + prepareXapiParam(value) + ) + } + + _setObjectProperties (object, props) { + const { + $ref: ref, + $type: type, + } = object + + const namespace = getNamespaceForType(type) + + // TODO: the thrown error should contain the name of the + // properties that failed to be set. + return Promise.all(mapToArray(props, (value, name) => { + if (value != null) { + return this.call(`${namespace}.set_${camelToSnakeCase(name)}`, ref, prepareXapiParam(value)) + } + }))::ignoreErrors() + } + + async _updateObjectMapProperty (object, prop, values) { + const { + $ref: ref, + $type: type, + } = object + + prop = camelToSnakeCase(prop) + + const namespace = getNamespaceForType(type) + const add = `${namespace}.add_to_${prop}` + const remove = `${namespace}.remove_from_${prop}` + + await Promise.all(mapToArray(values, (value, name) => { + if (value !== undefined) { + name = camelToSnakeCase(name) + const removal = this.call(remove, ref, name) + + return value === null + ? removal + : removal::ignoreErrors().then(() => this.call(add, ref, name, prepareXapiParam(value))) + } + })) + } + + async setHostProperties (id, { + nameLabel, + nameDescription, + }) { + await this._setObjectProperties(this.getObject(id), { + nameLabel, + nameDescription, + }) + } + + async setPoolProperties ({ + autoPoweron, + nameLabel, + nameDescription, + }) { + const { pool } = this + + await Promise.all([ + this._setObjectProperties(pool, { + nameLabel, + nameDescription, + }), + autoPoweron != null && this._updateObjectMapProperty(pool, 'other_config', { + autoPoweron: autoPoweron ? 'true' : null, + }), + ]) + } + + async setSrProperties (id, { + nameLabel, + nameDescription, + }) { + await this._setObjectProperties(this.getObject(id), { + nameLabel, + nameDescription, + }) + } + + async setNetworkProperties (id, { + nameLabel, + nameDescription, + defaultIsLocked, + }) { + let defaultLockingMode + if (defaultIsLocked != null) { + defaultLockingMode = defaultIsLocked ? 'disabled' : 'unlocked' + } + await this._setObjectProperties(this.getObject(id), { + nameLabel, + nameDescription, + defaultLockingMode, + }) + } + + // ================================================================= + + async addTag (id, tag) { + const { + $ref: ref, + $type: type, + } = this.getObject(id) + + const namespace = getNamespaceForType(type) + await this.call(`${namespace}.add_tags`, ref, tag) + } + + async removeTag (id, tag) { + const { + $ref: ref, + $type: type, + } = this.getObject(id) + + const namespace = getNamespaceForType(type) + await this.call(`${namespace}.remove_tags`, ref, tag) + } + + // ================================================================= + + async setDefaultSr (srId) { + this._setObjectProperties(this.pool, { + default_SR: this.getObject(srId).$ref, + }) + } + + // ================================================================= + + async setPoolMaster (hostId) { + await this.call('pool.designate_new_master', this.getObject(hostId).$ref) + } + + // ================================================================= + + async joinPool (masterAddress, masterUsername, masterPassword, force = false) { + await this.call( + force ? 'pool.join_force' : 'pool.join', + masterAddress, + masterUsername, + masterPassword + ) + } + + // ================================================================= + + async emergencyShutdownHost (hostId) { + const host = this.getObject(hostId) + const vms = host.$resident_VMs + debug(`Emergency shutdown: ${host.name_label}`) + await pSettle( + mapToArray(vms, vm => { + if (!vm.is_control_domain) { + return this.call('VM.suspend', vm.$ref) + } + }) + ) + await this.call('host.disable', host.$ref) + await this.call('host.shutdown', host.$ref) + } + + // ================================================================= + + // Disable the host and evacuate all its VMs. + // + // If `force` is false and the evacuation failed, the host is re- + // enabled and the error is thrown. + async _clearHost ({ $ref: ref }, force) { + await this.call('host.disable', ref) + + try { + await this.call('host.evacuate', ref) + } catch (error) { + if (!force) { + await this.call('host.enable', ref) + + throw error + } + } + } + + async disableHost (hostId) { + await this.call('host.disable', this.getObject(hostId).$ref) + } + + async forgetHost (hostId) { + await this.call('host.destroy', this.getObject(hostId).$ref) + } + + async ejectHostFromPool (hostId) { + await this.call('pool.eject', this.getObject(hostId).$ref) + } + + async enableHost (hostId) { + await this.call('host.enable', this.getObject(hostId).$ref) + } + + async powerOnHost (hostId) { + await this.call('host.power_on', this.getObject(hostId).$ref) + } + + async rebootHost (hostId, force = false) { + const host = this.getObject(hostId) + + await this._clearHost(host, force) + await this.call('host.reboot', host.$ref) + } + + async restartHostAgent (hostId) { + await this.call('host.restart_agent', this.getObject(hostId).$ref) + } + + async shutdownHost (hostId, force = false) { + const host = this.getObject(hostId) + + await this._clearHost(host, force) + await this.call('host.shutdown', host.$ref) + } + + // ================================================================= + + // Clone a VM: make a fast copy by fast copying each of its VDIs + // (using snapshots where possible) on the same SRs. + _cloneVm (vm, nameLabel = vm.name_label) { + debug(`Cloning VM ${vm.name_label}${ + nameLabel !== vm.name_label + ? ` as ${nameLabel}` + : '' + }`) + + return this.call('VM.clone', vm.$ref, nameLabel) + } + + // Copy a VM: make a normal copy of a VM and all its VDIs. + // + // If a SR is specified, it will contains the copies of the VDIs, + // otherwise they will use the SRs they are on. + async _copyVm (vm, nameLabel = vm.name_label, sr = undefined) { + let snapshot + if (isVmRunning(vm)) { + snapshot = await this._snapshotVm(vm) + } + + debug(`Copying VM ${vm.name_label}${ + nameLabel !== vm.name_label + ? ` as ${nameLabel}` + : '' + }${ + sr + ? ` on ${sr.name_label}` + : '' + }`) + + try { + return await this.call( + 'VM.copy', + snapshot ? snapshot.$ref : vm.$ref, + nameLabel, + sr ? sr.$ref : '' + ) + } finally { + if (snapshot) { + await this._deleteVm(snapshot) + } + } + } + + async cloneVm (vmId, { + nameLabel = undefined, + fast = true, + } = {}) { + const vm = this.getObject(vmId) + + const cloneRef = await ( + fast + ? this._cloneVm(vm, nameLabel) + : this._copyVm(vm, nameLabel) + ) + + return /* await */ this._getOrWaitObject(cloneRef) + } + + async copyVm (vmId, srId, { + nameLabel = undefined, + } = {}) { + return /* await */ this._getOrWaitObject( + await this._copyVm( + this.getObject(vmId), + nameLabel, + this.getObject(srId) + ) + ) + } + + async remoteCopyVm (vmId, targetXapi, targetSrId, { + compress = true, + nameLabel = undefined, + } = {}) { + // Fall back on local copy if possible. + if (targetXapi === this) { + return { + vm: await this.copyVm(vmId, targetSrId, { nameLabel }), + } + } + + const sr = targetXapi.getObject(targetSrId) + let stream = await this.exportVm(vmId, { + compress, + }) + + const sizeStream = createSizeStream() + stream = stream.pipe(sizeStream) + + const onVmCreation = nameLabel !== undefined + ? vm => targetXapi._setObjectProperties(vm, { + nameLabel, + }) + : null + + const vm = await targetXapi._getOrWaitObject( + await targetXapi._importVm( + stream, + sr, + onVmCreation + ) + ) + + return { + size: sizeStream.size, + vm, + } + } + + // Low level create VM. + _createVmRecord ({ + actions_after_crash, + actions_after_reboot, + actions_after_shutdown, + affinity, + // appliance, + blocked_operations, + generation_id, + ha_always_run, + ha_restart_priority, + has_vendor_device = false, // Avoid issue with some Dundee builds. + hardware_platform_version, + HVM_boot_params, + HVM_boot_policy, + HVM_shadow_multiplier, + is_a_template, + memory_dynamic_max, + memory_dynamic_min, + memory_static_max, + memory_static_min, + name_description, + name_label, + order, + other_config, + PCI_bus, + platform, + protection_policy, + PV_args, + PV_bootloader, + PV_bootloader_args, + PV_kernel, + PV_legacy_args, + PV_ramdisk, + recommendations, + shutdown_delay, + start_delay, + // suspend_SR, + tags, + user_version, + VCPUs_at_startup, + VCPUs_max, + VCPUs_params, + version, + xenstore_data, + }) { + debug(`Creating VM ${name_label}`) + + return this.call('VM.create', filterUndefineds({ + actions_after_crash, + actions_after_reboot, + actions_after_shutdown, + affinity: affinity == null ? NULL_REF : affinity, + HVM_boot_params, + HVM_boot_policy, + is_a_template: asBoolean(is_a_template), + memory_dynamic_max: asInteger(memory_dynamic_max), + memory_dynamic_min: asInteger(memory_dynamic_min), + memory_static_max: asInteger(memory_static_max), + memory_static_min: asInteger(memory_static_min), + other_config, + PCI_bus, + platform, + PV_args, + PV_bootloader, + PV_bootloader_args, + PV_kernel, + PV_legacy_args, + PV_ramdisk, + recommendations, + user_version: asInteger(user_version), + VCPUs_at_startup: asInteger(VCPUs_at_startup), + VCPUs_max: asInteger(VCPUs_max), + VCPUs_params, + + // Optional fields. + blocked_operations, + generation_id, + ha_always_run: asBoolean(ha_always_run), + ha_restart_priority, + has_vendor_device, + hardware_platform_version: optional(hardware_platform_version, asInteger), + // HVM_shadow_multiplier: asFloat(HVM_shadow_multiplier), // FIXME: does not work FIELD_TYPE_ERROR(hVM_shadow_multiplier) + name_description, + name_label, + order: optional(order, asInteger), + protection_policy, + shutdown_delay: asInteger(shutdown_delay), + start_delay: asInteger(start_delay), + tags, + version: asInteger(version), + xenstore_data, + })) + } + + async _deleteVm (vm, deleteDisks = true, force = false) { + debug(`Deleting VM ${vm.name_label}`) + + const { $ref } = vm + + // It is necessary for suspended VMs to be shut down + // to be able to delete their VDIs. + if (vm.power_state !== 'Halted') { + await this.call('VM.hard_shutdown', $ref) + } + + if (force) { + await this._updateObjectMapProperty(vm, 'blocked_operations', { + destroy: null, + }) + } + + // ensure the vm record is up-to-date + vm = await this.barrier('VM', $ref) + + return Promise.all([ + this.call('VM.destroy', $ref), + + asyncMap(vm.$snapshots, snapshot => + this._deleteVm(snapshot) + )::ignoreErrors(), + + deleteDisks && asyncMap(getVmDisks(vm), ({ $ref: vdiRef }) => { + let onFailure = () => { + onFailure = vdi => { + console.error(`cannot delete VDI ${vdi.name_label} (from VM ${vm.name_label})`) + forEach(vdi.$VBDs, vbd => { + if (vbd.VM !== $ref) { + const vm = vbd.$VM + console.error('- %s (%s)', vm.name_label, vm.uuid) + } + }) + } + + // maybe the control domain has not yet unmounted the VDI, + // check and retry after 5 seconds + return pDelay(5e3).then(test) + } + const test = () => { + const vdi = this.getObjectByRef(vdiRef) + return ( + // Only remove VBDs not attached to other VMs. + vdi.VBDs.length < 2 || + every(vdi.$VBDs, vbd => vbd.VM === $ref) + ) + ? this._deleteVdi(vdi) + : onFailure(vdi) + } + return test() + })::ignoreErrors(), + ]) + } + + async deleteVm (vmId, deleteDisks, force) { + return /* await */ this._deleteVm( + this.getObject(vmId), + deleteDisks, + force + ) + } + + getVmConsole (vmId) { + const vm = this.getObject(vmId) + + const console = find(vm.$consoles, { protocol: 'rfb' }) + if (!console) { + throw new Error('no RFB console found') + } + + return console + } + + // Returns a stream to the exported VM. + async exportVm (vmId, { + compress = true, + } = {}) { + const vm = this.getObject(vmId) + + let host + let snapshotRef + if (isVmRunning(vm)) { + host = vm.$resident_on + snapshotRef = (await this._snapshotVm(vm)).$ref + } + + const promise = this.getResource('/export/', { + host, + query: { + ref: snapshotRef || vm.$ref, + use_compression: compress ? 'true' : 'false', + }, + task: this.createTask('VM export', vm.name_label), + }) + + if (snapshotRef !== undefined) { + promise.then(_ => _.task::pFinally(() => + this.deleteVm(snapshotRef)::ignoreErrors() + )) + } + + return promise + } + + _assertHealthyVdiChain (vdi, cache) { + if (vdi == null) { + return + } + + if (!vdi.managed) { + const { SR } = vdi + let childrenMap = cache[SR] + if (childrenMap === undefined) { + childrenMap = cache[SR] = groupBy(vdi.$SR.$VDIs, _ => _.sm_config['vhd-parent']) + } + + // an unmanaged VDI should not have exactly one child: they + // should coalesce + const children = childrenMap[vdi.uuid] + if ( + children.length === 1 && + !children[0].managed // some SRs do not coalesce the leaf + ) { + throw new Error('unhealthy VDI chain') + } + } + + this._assertHealthyVdiChain( + this.getObjectByUuid(vdi.sm_config['vhd-parent'], null), + cache + ) + } + + _assertHealthyVdiChains (vm) { + const cache = createRawObject() + forEach(vm.$VBDs, ({ $VDI }) => { + this._assertHealthyVdiChain($VDI, cache) + }) + } + + // Create a snapshot of the VM and returns a delta export object. + @cancellable + @deferrable + async exportDeltaVm ($defer, $cancelToken, vmId, baseVmId = undefined, { + bypassVdiChainsCheck = false, + + // Contains a vdi.$id set of vmId. + fullVdisRequired = [], + + disableBaseTags = false, + snapshotNameLabel = undefined, + } = {}) { + if (!bypassVdiChainsCheck) { + this._assertHealthyVdiChains(this.getObject(vmId)) + } + + const vm = await this.snapshotVm(vmId) + $defer.onFailure(() => this._deleteVm(vm)) + if (snapshotNameLabel) { + this._setObjectProperties(vm, { + nameLabel: snapshotNameLabel, + })::ignoreErrors() + } + + const baseVm = baseVmId && this.getObject(baseVmId) + + // refs of VM's VDIs → base's VDIs. + const baseVdis = {} + baseVm && forEach(baseVm.$VBDs, vbd => { + let vdi, snapshotOf + if ( + (vdi = vbd.$VDI) && + (snapshotOf = vdi.$snapshot_of) && + !find(fullVdisRequired, id => snapshotOf.$id === id) + ) { + baseVdis[vdi.snapshot_of] = vdi + } + }) + + const streams = {} + const vdis = {} + const vbds = {} + forEach(vm.$VBDs, vbd => { + let vdi + if ( + vbd.type !== 'Disk' || + !(vdi = vbd.$VDI) + ) { + // Ignore this VBD. + return + } + + // If the VDI name start with `[NOBAK]`, do not export it. + if (startsWith(vdi.name_label, '[NOBAK]')) { + // FIXME: find a way to not create the VDI snapshot in the + // first time. + // + // The snapshot must not exist otherwise it could break the + // next export. + this._deleteVdi(vdi)::ignoreErrors() + return + } + + vbds[vbd.$ref] = vbd + + const vdiRef = vdi.$ref + if (vdiRef in vdis) { + // This VDI has already been managed. + return + } + + // Look for a snapshot of this vdi in the base VM. + const baseVdi = baseVdis[vdi.snapshot_of] + + vdis[vdiRef] = baseVdi && !disableBaseTags + ? { + ...vdi, + other_config: { + ...vdi.other_config, + [TAG_BASE_DELTA]: baseVdi.uuid, + }, + $SR$uuid: vdi.$SR.uuid, + } + : { + ...vdi, + $SR$uuid: vdi.$SR.uuid, + } + const stream = streams[`${vdiRef}.vhd`] = this._exportVdi($cancelToken, vdi, baseVdi, VDI_FORMAT_VHD) + $defer.onFailure(stream.cancel) + }) + + const vifs = {} + forEach(vm.$VIFs, vif => { + vifs[vif.$ref] = { + ...vif, + $network$uuid: vif.$network.uuid, + } + }) + + return Object.defineProperty({ + version: '1.1.0', + vbds, + vdis, + vifs, + vm: { + ...vm, + other_config: baseVm && !disableBaseTags + ? { + ...vm.other_config, + [TAG_BASE_DELTA]: baseVm.uuid, + } + : omit(vm.other_config, TAG_BASE_DELTA), + }, + }, 'streams', { + value: await streams::pAll(), + }) + } + + @deferrable + async importDeltaVm ($defer, delta, { + deleteBase = false, + disableStartAfterImport = true, + mapVdisSrs = {}, + name_label = delta.vm.name_label, + srId = this.pool.default_SR, + } = {}) { + const { version } = delta + + if (!versionSatisfies(version, '^1')) { + throw new Error(`Unsupported delta backup version: ${version}`) + } + + const remoteBaseVmUuid = delta.vm.other_config[TAG_BASE_DELTA] + let baseVm + if (remoteBaseVmUuid) { + baseVm = find(this.objects.all, obj => ( + (obj = obj.other_config) && + obj[TAG_COPY_SRC] === remoteBaseVmUuid + )) + + if (!baseVm) { + throw new Error('could not find the base VM') + } + } + + const baseVdis = {} + baseVm && forEach(baseVm.$VBDs, vbd => { + baseVdis[vbd.VDI] = vbd.$VDI + }) + + const { streams } = delta + + // 1. Create the VMs. + const vm = await this._getOrWaitObject( + await this._createVmRecord({ + ...delta.vm, + affinity: null, + is_a_template: false, + }) + ) + $defer.onFailure(() => this._deleteVm(vm)) + + await Promise.all([ + this._setObjectProperties(vm, { + name_label: `[Importing…] ${name_label}`, + }), + this._updateObjectMapProperty(vm, 'blocked_operations', { + start: 'Importing…', + }), + this._updateObjectMapProperty(vm, 'other_config', { + [TAG_COPY_SRC]: delta.vm.uuid, + }), + ]) + + // 2. Delete all VBDs which may have been created by the import. + await asyncMap( + vm.$VBDs, + vbd => this._deleteVbd(vbd) + )::ignoreErrors() + + // 3. Create VDIs. + const newVdis = await map(delta.vdis, async vdi => { + const remoteBaseVdiUuid = vdi.other_config[TAG_BASE_DELTA] + if (!remoteBaseVdiUuid) { + const newVdi = await this.createVdi({ + ...vdi, + other_config: { + ...vdi.other_config, + [TAG_BASE_DELTA]: undefined, + [TAG_COPY_SRC]: vdi.uuid, + }, + sr: mapVdisSrs[vdi.uuid] || srId, + }) + $defer.onFailure(() => this._deleteVdi(newVdi)) + + return newVdi + } + + const baseVdi = find( + baseVdis, + vdi => vdi.other_config[TAG_COPY_SRC] === remoteBaseVdiUuid + ) + if (!baseVdi) { + throw new Error(`missing base VDI (copy of ${remoteBaseVdiUuid})`) + } + + const newVdi = await this._getOrWaitObject( + await this._cloneVdi(baseVdi) + ) + $defer.onFailure(() => this._deleteVdi(newVdi)) + + await this._updateObjectMapProperty(newVdi, 'other_config', { + [TAG_COPY_SRC]: vdi.uuid, + }) + + return newVdi + })::pAll() + + const networksOnPoolMasterByDevice = {} + let defaultNetwork + forEach(this.pool.$master.$PIFs, pif => { + defaultNetwork = networksOnPoolMasterByDevice[pif.device] = pif.$network + }) + + await Promise.all([ + // Create VBDs. + asyncMap( + delta.vbds, + vbd => this.createVbd({ + ...vbd, + vdi: newVdis[vbd.VDI], + vm, + }) + ), + + // Import VDI contents. + asyncMap( + newVdis, + async (vdi, id) => { + for (const stream of ensureArray(streams[`${id}.vhd`])) { + await this._importVdiContent(vdi, stream, VDI_FORMAT_VHD) + } + } + ), + + // Wait for VDI export tasks (if any) termination. + asyncMap( + streams, + stream => stream.task + ), + + // Create VIFs. + asyncMap(delta.vifs, vif => { + const network = + (vif.$network$uuid && this.getObject(vif.$network$uuid, null)) || + networksOnPoolMasterByDevice[vif.device] || + defaultNetwork + + if (network) { + return this._createVif( + vm, + network, + vif + ) + } + }), + ]) + + if (deleteBase && baseVm) { + this._deleteVm(baseVm)::ignoreErrors() + } + + await Promise.all([ + this._setObjectProperties(vm, { + name_label, + }), + // FIXME: move + this._updateObjectMapProperty(vm, 'blocked_operations', { + start: disableStartAfterImport + ? 'Do not start this VM, clone it if you want to use it.' + : null, + }), + ]) + + return vm + } + + async _migrateVmWithStorageMotion (vm, hostXapi, host, { + migrationNetwork = find(host.$PIFs, pif => pif.management).$network, // TODO: handle not found + sr, + mapVdisSrs, + mapVifsNetworks, + }) { + // VDIs/SRs mapping + const vdis = {} + const defaultSr = host.$pool.$default_SR + for (const vbd of vm.$VBDs) { + const vdi = vbd.$VDI + if (vbd.type === 'Disk') { + vdis[vdi.$ref] = mapVdisSrs && mapVdisSrs[vdi.$id] + ? hostXapi.getObject(mapVdisSrs[vdi.$id]).$ref + : sr !== undefined + ? hostXapi.getObject(sr).$ref + : defaultSr.$ref // Will error if there are no default SR. + } + } + + // VIFs/Networks mapping + const vifsMap = {} + if (vm.$pool !== host.$pool) { + const defaultNetworkRef = find(host.$PIFs, pif => pif.management).$network.$ref + for (const vif of vm.$VIFs) { + vifsMap[vif.$ref] = mapVifsNetworks && mapVifsNetworks[vif.$id] + ? hostXapi.getObject(mapVifsNetworks[vif.$id]).$ref + : defaultNetworkRef + } + } + + const token = await hostXapi.call( + 'host.migrate_receive', + host.$ref, + migrationNetwork.$ref, + {} + ) + + const loop = () => this.call( + 'VM.migrate_send', + vm.$ref, + token, + true, // Live migration. + vdis, + vifsMap, + { + force: 'true', + } + )::pCatch( + { code: 'TOO_MANY_STORAGE_MIGRATES' }, + () => pDelay(1e4).then(loop) + ) + + return loop() + } + + @synchronized + _callInstallationPlugin (hostRef, vdi) { + return this.call('host.call_plugin', hostRef, 'install-supp-pack', 'install', { vdi }).catch(error => { + if (error.code !== 'XENAPI_PLUGIN_FAILURE') { + console.warn('_callInstallationPlugin', error) + throw error + } + }) + } + + @deferrable + async installSupplementalPack ($defer, stream, { hostId }) { + if (!stream.length) { + throw new Error('stream must have a length') + } + + const vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') + $defer(() => this._deleteVdi(vdi)) + + await this._callInstallationPlugin(this.getObject(hostId).$ref, vdi.uuid) + } + + @deferrable + async installSupplementalPackOnAllHosts ($defer, stream) { + if (!stream.length) { + throw new Error('stream must have a length') + } + + const isSrAvailable = sr => + sr && sr.content_type === 'user' && sr.physical_size - sr.physical_utilisation >= stream.length + + const hosts = filter(this.objects.all, { $type: 'host' }) + + const sr = this.findAvailableSharedSr(stream.length) + + // Shared SR available: create only 1 VDI for all the installations + if (sr) { + const vdi = await this.createTemporaryVdiOnSr(stream, sr, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') + $defer(() => this._deleteVdi(vdi)) + + // Install pack sequentially to prevent concurrent access to the unique VDI + for (const host of hosts) { + await this._callInstallationPlugin(host.$ref, vdi.uuid) + } + + return + } + + // No shared SR available: find an available local SR on each host + return Promise.all(mapToArray(hosts, deferrable(async ($defer, host) => { + // pipe stream synchronously to several PassThroughs to be able to pipe them asynchronously later + const pt = stream.pipe(new PassThrough()) + pt.length = stream.length + + const sr = find( + mapToArray(host.$PBDs, '$SR'), + isSrAvailable + ) + + if (!sr) { + throw new Error('no SR available to store installation file') + } + + const vdi = await this.createTemporaryVdiOnSr(pt, sr, '[XO] Supplemental pack ISO', 'small temporary VDI to store a supplemental pack ISO') + $defer(() => this._deleteVdi(vdi)) + + await this._callInstallationPlugin(host.$ref, vdi.uuid) + }))) + } + + async _importVm (stream, sr, onVmCreation = undefined) { + const taskRef = await this.createTask('VM import') + const query = {} + + let host + if (sr != null) { + host = sr.$PBDs[0].$host + query.sr_id = sr.$ref + } + + if (onVmCreation) { + this._waitObject( + obj => obj && obj.current_operations && taskRef in obj.current_operations + ).then(onVmCreation)::ignoreErrors() + } + + const vmRef = await this.putResource( + stream, + '/import/', + { + host, + query, + task: taskRef, + } + ).then(extractOpaqueRef) + + return vmRef + } + + @deferrable + async _importOvaVm ($defer, stream, { + descriptionLabel, + disks, + memory, + nameLabel, + networks, + nCpus, + }, sr) { + // 1. Create VM. + const vm = await this._getOrWaitObject( + await this._createVmRecord({ + ...OTHER_CONFIG_TEMPLATE, + memory_dynamic_max: memory, + memory_dynamic_min: memory, + memory_static_max: memory, + name_description: descriptionLabel, + name_label: nameLabel, + VCPUs_at_startup: nCpus, + VCPUs_max: nCpus, + }) + ) + $defer.onFailure(() => this._deleteVm(vm)) + // Disable start and change the VM name label during import. + await Promise.all([ + this.addForbiddenOperationToVm(vm.$id, 'start', 'OVA import in progress...'), + this._setObjectProperties(vm, { name_label: `[Importing...] ${nameLabel}` }), + ]) + + // 2. Create VDIs & Vifs. + const vdis = {} + const vifDevices = await this.call('VM.get_allowed_VIF_devices', vm.$ref) + await Promise.all( + map(disks, async disk => { + const vdi = vdis[disk.path] = await this.createVdi({ + name_description: disk.descriptionLabel, + name_label: disk.nameLabel, + size: disk.capacity, + sr: sr.$ref, + }) + $defer.onFailure(() => this._deleteVdi(vdi)) + + return this.createVbd({ + userdevice: disk.position, + vdi, + vm, + }) + }).concat(map(networks, (networkId, i) => ( + this._createVif(vm, this.getObject(networkId), { + device: vifDevices[i], + }) + ))) + ) + + // 3. Import VDIs contents. + await new Promise((resolve, reject) => { + const extract = tarStream.extract() + + stream.on('error', reject) + + extract.on('finish', resolve) + extract.on('error', reject) + extract.on('entry', async (entry, stream, cb) => { + // Not a disk to import. + const vdi = vdis[entry.name] + if (!vdi) { + stream.on('end', cb) + stream.resume() + return + } + + const vhdStream = await vmdkToVhd(stream) + await this._importVdiContent(vdi, vhdStream, VDI_FORMAT_RAW) + + // See: https://github.com/mafintosh/tar-stream#extracting + // No import parallelization. + cb() + }) + stream.pipe(extract) + }) + + // Enable start and restore the VM name label after import. + await Promise.all([ + this.removeForbiddenOperationFromVm(vm.$id, 'start'), + this._setObjectProperties(vm, { name_label: nameLabel }), + ]) + return vm + } + + // TODO: an XVA can contain multiple VMs + async importVm (stream, { + data, + srId, + type = 'xva', + } = {}) { + const sr = srId && this.getObject(srId) + + if (type === 'xva') { + return /* await */ this._getOrWaitObject(await this._importVm( + stream, + sr, + )) + } + + if (type === 'ova') { + return this._getOrWaitObject(await this._importOvaVm(stream, data, sr)) + } + + throw new Error(`unsupported type: '${type}'`) + } + + async migrateVm (vmId, hostXapi, hostId, { + sr, + migrationNetworkId, + mapVifsNetworks, + mapVdisSrs, + } = {}) { + const vm = this.getObject(vmId) + const host = hostXapi.getObject(hostId) + + const accrossPools = vm.$pool !== host.$pool + const useStorageMotion = ( + accrossPools || + sr !== undefined || + migrationNetworkId !== undefined || + !isEmpty(mapVifsNetworks) || + !isEmpty(mapVdisSrs) + ) + + if (useStorageMotion) { + await this._migrateVmWithStorageMotion(vm, hostXapi, host, { + migrationNetwork: migrationNetworkId && hostXapi.getObject(migrationNetworkId), + sr, + mapVdisSrs, + mapVifsNetworks, + }) + } else { + try { + await this.call('VM.pool_migrate', vm.$ref, host.$ref, { force: 'true' }) + } catch (error) { + if (error.code !== 'VM_REQUIRES_SR') { + throw error + } + + // Retry using motion storage. + await this._migrateVmWithStorageMotion(vm, hostXapi, host, {}) + } + } + } + + async _snapshotVm (vm, nameLabel = vm.name_label) { + debug(`Snapshotting VM ${vm.name_label}${ + nameLabel !== vm.name_label + ? ` as ${nameLabel}` + : '' + }`) + + let ref + try { + ref = await this.call('VM.snapshot_with_quiesce', vm.$ref, nameLabel) + this.addTag(ref, 'quiesce')::ignoreErrors() + + await this._waitObjectState(ref, vm => includes(vm.tags, 'quiesce')) + } catch (error) { + const { code } = error + if ( + code !== 'VM_SNAPSHOT_WITH_QUIESCE_NOT_SUPPORTED' && + + // quiesce only work on a running VM + code !== 'VM_BAD_POWER_STATE' && + + // quiesce failed, fallback on standard snapshot + // TODO: emit warning + code !== 'VM_SNAPSHOT_WITH_QUIESCE_FAILED' + ) { + throw error + } + ref = await this.call('VM.snapshot', vm.$ref, nameLabel) + } + // Convert the template to a VM and wait to have receive the up- + // to-date object. + const [ , snapshot ] = await Promise.all([ + this.call('VM.set_is_a_template', ref, false), + this._waitObjectState(ref, snapshot => !snapshot.is_a_template), + ]) + + return snapshot + } + + async snapshotVm (vmId, nameLabel = undefined) { + return /* await */ this._snapshotVm( + this.getObject(vmId), + nameLabel + ) + } + + async setVcpuWeight (vmId, weight) { + weight = weight || null // Take all falsy values as a removal (0 included) + const vm = this.getObject(vmId) + await this._updateObjectMapProperty(vm, 'VCPUs_params', {weight}) + } + + async _startVm (vm, force) { + debug(`Starting VM ${vm.name_label}`) + + if (force) { + await this._updateObjectMapProperty(vm, 'blocked_operations', { + start: null, + }) + } + + return this.call( + 'VM.start', + vm.$ref, + false, // Start paused? + false // Skip pre-boot checks? + ) + } + + async startVm (vmId, force) { + try { + await this._startVm(this.getObject(vmId), force) + } catch (e) { + if (e.code === 'OPERATION_BLOCKED') { + throw forbiddenOperation('Start', e.params[1]) + } + if (e.code === 'VM_BAD_POWER_STATE') { + return this.resumeVm(vmId) + } + throw e + } + } + + async startVmOnCd (vmId) { + const vm = this.getObject(vmId) + + if (isVmHvm(vm)) { + const { order } = vm.HVM_boot_params + + await this._updateObjectMapProperty(vm, 'HVM_boot_params', { + order: 'd', + }) + + try { + await this._startVm(vm) + } finally { + await this._updateObjectMapProperty(vm, 'HVM_boot_params', { + order, + }) + } + } else { + // Find the original template by name (*sigh*). + const templateNameLabel = vm.other_config['base_template_name'] + const template = templateNameLabel && + find(this.objects.all, obj => ( + obj.$type === 'vm' && + obj.is_a_template && + obj.name_label === templateNameLabel + )) + + const bootloader = vm.PV_bootloader + const bootables = [] + try { + const promises = [] + + const cdDrive = this._getVmCdDrive(vm) + forEach(vm.$VBDs, vbd => { + promises.push( + this._setObjectProperties(vbd, { + bootable: vbd === cdDrive, + }) + ) + + bootables.push([ vbd, Boolean(vbd.bootable) ]) + }) + + promises.push( + this._setObjectProperties(vm, { + PV_bootloader: 'eliloader', + }), + this._updateObjectMapProperty(vm, 'other_config', { + 'install-distro': template && template.other_config['install-distro'], + 'install-repository': 'cdrom', + }) + ) + + await Promise.all(promises) + + await this._startVm(vm) + } finally { + this._setObjectProperties(vm, { + PV_bootloader: bootloader, + })::ignoreErrors() + + forEach(bootables, ([ vbd, bootable ]) => { + this._setObjectProperties(vbd, { bootable })::ignoreErrors() + }) + } + } + } + + // vm_operations: http://xapi-project.github.io/xen-api/classes/vm.html + async addForbiddenOperationToVm (vmId, operation, reason) { + await this.call('VM.add_to_blocked_operations', this.getObject(vmId).$ref, operation, `[XO] ${reason}`) + } + + async removeForbiddenOperationFromVm (vmId, operation) { + await this.call('VM.remove_from_blocked_operations', this.getObject(vmId).$ref, operation) + } + + // ================================================================= + + async createVbd ({ + bootable = false, + other_config = {}, + qos_algorithm_params = {}, + qos_algorithm_type = '', + type = 'Disk', + unpluggable = false, + userdevice, + VDI, + VM, + + vdi = VDI, + + empty = vdi === undefined, + mode = (type === 'Disk') ? 'RW' : 'RO', + vm = VM, + }) { + vdi = this.getObject(vdi) + vm = this.getObject(vm) + + debug(`Creating VBD for VDI ${vdi.name_label} on VM ${vm.name_label}`) + + if (userdevice == null) { + const allowed = await this.call('VM.get_allowed_VBD_devices', vm.$ref) + const {length} = allowed + if (length === 0) { + throw new Error('no allowed VBD devices') + } + + if (type === 'CD') { + // Choose position 3 if allowed. + userdevice = includes(allowed, '3') + ? '3' + : allowed[0] + } else { + userdevice = allowed[0] + + // Avoid userdevice 3 if possible. + if (userdevice === '3' && length > 1) { + userdevice = allowed[1] + } + } + } + + // By default a VBD is unpluggable. + const vbdRef = await this.call('VBD.create', { + bootable: Boolean(bootable), + empty: Boolean(empty), + mode, + other_config, + qos_algorithm_params, + qos_algorithm_type, + type, + unpluggable: Boolean(unpluggable), + userdevice, + VDI: vdi && vdi.$ref, + VM: vm.$ref, + }) + + if (isVmRunning(vm)) { + await this.call('VBD.plug', vbdRef) + } + } + + _cloneVdi (vdi) { + debug(`Cloning VDI ${vdi.name_label}`) + + return this.call('VDI.clone', vdi.$ref) + } + + async createVdi ({ + name_description, + name_label, + other_config = {}, + read_only = false, + sharable = false, + sm_config, + SR, + tags, + type = 'user', + virtual_size, + xenstore_data, + + size, + sr = SR !== undefined && SR !== NULL_REF ? SR : this.pool.default_SR, + }) { + sr = this.getObject(sr) + debug(`Creating VDI ${name_label} on ${sr.name_label}`) + + return this._getOrWaitObject(await this.call('VDI.create', { + name_description, + name_label, + other_config, + read_only: Boolean(read_only), + sharable: Boolean(sharable), + sm_config, + SR: sr.$ref, + tags, + type, + virtual_size: size !== undefined ? parseSize(size) : virtual_size, + xenstore_data, + })) + } + + async moveVdi (vdiId, srId) { + const vdi = this.getObject(vdiId) + const sr = this.getObject(srId) + + if (vdi.SR === sr.$ref) { + return // nothing to do + } + + debug(`Moving VDI ${vdi.name_label} from ${vdi.$SR.name_label} to ${sr.name_label}`) + try { + await this.call('VDI.pool_migrate', vdi.$ref, sr.$ref, {}) + } catch (error) { + const { code } = error + if (code !== 'LICENCE_RESTRICTION' && code !== 'VDI_NEEDS_VM_FOR_MIGRATE') { + throw error + } + const newVdi = await this.barrier( + await this.call('VDI.copy', vdi.$ref, sr.$ref) + ) + await asyncMap(vdi.$VBDs, vbd => Promise.all([ + this.call('VBD.destroy', vbd.$ref), + this.createVbd({ + ...vbd, + vdi: newVdi, + }), + ])) + await this._deleteVdi(vdi) + } + } + + // TODO: check whether the VDI is attached. + async _deleteVdi (vdi) { + debug(`Deleting VDI ${vdi.name_label}`) + + await this.call('VDI.destroy', vdi.$ref) + } + + _resizeVdi (vdi, size) { + debug(`Resizing VDI ${vdi.name_label} from ${vdi.virtual_size} to ${size}`) + + return this.call('VDI.resize', vdi.$ref, size) + } + + _getVmCdDrive (vm) { + for (const vbd of vm.$VBDs) { + if (vbd.type === 'CD') { + return vbd + } + } + } + + async _ejectCdFromVm (vm) { + const cdDrive = this._getVmCdDrive(vm) + if (cdDrive) { + await this.call('VBD.eject', cdDrive.$ref) + } + } + + async _insertCdIntoVm (cd, vm, { + bootable = false, + force = false, + } = {}) { + const cdDrive = await this._getVmCdDrive(vm) + if (cdDrive) { + try { + await this.call('VBD.insert', cdDrive.$ref, cd.$ref) + } catch (error) { + if (!force || error.code !== 'VBD_NOT_EMPTY') { + throw error + } + + await this.call('VBD.eject', cdDrive.$ref)::ignoreErrors() + + // Retry. + await this.call('VBD.insert', cdDrive.$ref, cd.$ref) + } + + if (bootable !== Boolean(cdDrive.bootable)) { + await this._setObjectProperties(cdDrive, {bootable}) + } + } else { + await this.createVbd({ + bootable, + type: 'CD', + vdi: cd, + vm, + }) + } + } + + async connectVbd (vbdId) { + await this.call('VBD.plug', vbdId) + } + + async _disconnectVbd (vbd) { + // TODO: check if VBD is attached before + try { + await this.call('VBD.unplug_force', vbd.$ref) + } catch (error) { + if (error.code === 'VBD_NOT_UNPLUGGABLE') { + await this.call('VBD.set_unpluggable', vbd.$ref, true) + return this.call('VBD.unplug_force', vbd.$ref) + } + } + } + + async disconnectVbd (vbdId) { + await this._disconnectVbd(this.getObject(vbdId)) + } + + async _deleteVbd (vbd) { + await this._disconnectVbd(vbd)::ignoreErrors() + await this.call('VBD.destroy', vbd.$ref) + } + + deleteVbd (vbdId) { + return this._deleteVbd(this.getObject(vbdId)) + } + + // TODO: remove when no longer used. + async destroyVbdsFromVm (vmId) { + await Promise.all( + mapToArray(this.getObject(vmId).$VBDs, async vbd => { + await this.disconnectVbd(vbd.$ref)::ignoreErrors() + return this.call('VBD.destroy', vbd.$ref) + }) + ) + } + + async deleteVdi (vdiId) { + await this._deleteVdi(this.getObject(vdiId)) + } + + async resizeVdi (vdiId, size) { + await this._resizeVdi(this.getObject(vdiId), size) + } + + async ejectCdFromVm (vmId) { + await this._ejectCdFromVm(this.getObject(vmId)) + } + + async insertCdIntoVm (cdId, vmId, opts = undefined) { + await this._insertCdIntoVm( + this.getObject(cdId), + this.getObject(vmId), + opts + ) + } + + // ----------------------------------------------------------------- + + async snapshotVdi (vdiId, nameLabel) { + const vdi = this.getObject(vdiId) + + const snap = await this._getOrWaitObject( + await this.call('VDI.snapshot', vdi.$ref) + ) + + if (nameLabel) { + await this.call('VDI.set_name_label', snap.$ref, nameLabel) + } + + return snap + } + + @cancellable + _exportVdi ($cancelToken, vdi, base, format = VDI_FORMAT_VHD) { + const host = vdi.$SR.$PBDs[0].$host + + const query = { + format, + vdi: vdi.$ref, + } + if (base) { + query.base = base.$ref + } + + debug(`exporting VDI ${vdi.name_label}${base + ? ` (from base ${vdi.name_label})` + : '' + }`) + + return this.getResource($cancelToken, '/export_raw_vdi/', { + host, + query, + task: this.createTask('VDI Export', vdi.name_label), + }) + } + + // Returns a stream to the exported VDI. + exportVdi (vdiId, { + baseId, + format, + } = {}) { + return this._exportVdi( + this.getObject(vdiId), + baseId && this.getObject(baseId), + format + ) + } + + // ----------------------------------------------------------------- + + async _importVdiContent (vdi, body, format = VDI_FORMAT_VHD) { + const pbd = find(vdi.$SR.$PBDs, 'currently_attached') + if (pbd === undefined) { + throw new Error('no valid PBDs found') + } + + await Promise.all([ + body.task, + body.checksumVerified, + this.putResource(body, '/import_raw_vdi/', { + host: pbd.host, + query: { + format, + vdi: vdi.$ref, + }, + task: this.createTask('VDI Content Import', vdi.name_label), + }), + ]) + } + + importVdiContent (vdiId, body, { + format, + } = {}) { + return this._importVdiContent( + this.getObject(vdiId), + body, + format + ) + } + + // ================================================================= + + async _createVif (vm, network, { + mac = '', + position = undefined, + + currently_attached = true, + device = position != null ? String(position) : undefined, + ipv4_allowed = undefined, + ipv6_allowed = undefined, + locking_mode = undefined, + MAC = mac, + other_config = {}, + qos_algorithm_params = {}, + qos_algorithm_type = '', + } = {}) { + debug(`Creating VIF for VM ${vm.name_label} on network ${network.name_label}`) + + if (device == null) { + device = (await this.call('VM.get_allowed_VIF_devices', vm.$ref))[0] + } + + const vifRef = await this.call('VIF.create', filterUndefineds({ + device, + ipv4_allowed, + ipv6_allowed, + locking_mode, + MAC, + MTU: asInteger(network.MTU), + network: network.$ref, + other_config, + qos_algorithm_params, + qos_algorithm_type, + VM: vm.$ref, + })) + + if (currently_attached && isVmRunning(vm)) { + await this.call('VIF.plug', vifRef) + } + + return vifRef + } + + async createVif (vmId, networkId, opts = undefined) { + return /* await */ this._getOrWaitObject( + await this._createVif( + this.getObject(vmId), + this.getObject(networkId), + opts + ) + ) + } + @deferrable + async createNetwork ($defer, { + name, + description = 'Created with Xen Orchestra', + pifId, + mtu, + vlan, + }) { + const networkRef = await this.call('network.create', { + name_label: name, + name_description: description, + MTU: asInteger(mtu), + other_config: {}, + }) + $defer.onFailure(() => this.call('network.destroy', networkRef)) + if (pifId) { + await this.call('pool.create_VLAN_from_PIF', this.getObject(pifId).$ref, networkRef, asInteger(vlan)) + } + + return this._getOrWaitObject(networkRef) + } + + async editPif ( + pifId, + { vlan } + ) { + const pif = this.getObject(pifId) + const physPif = find(this.objects.all, obj => ( + obj.$type === 'pif' && + (obj.physical || !isEmpty(obj.bond_master_of)) && + obj.$pool === pif.$pool && + obj.device === pif.device + )) + + if (!physPif) { + throw new Error('PIF not found') + } + + const pifs = this.getObject(pif.network).$PIFs + + const wasAttached = {} + forEach(pifs, pif => { + wasAttached[pif.host] = pif.currently_attached + }) + + const vlans = uniq(mapToArray(pifs, pif => pif.VLAN_master_of)) + await Promise.all( + mapToArray(vlans, vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan)) + ) + + const newPifs = await this.call('pool.create_VLAN_from_PIF', physPif.$ref, pif.network, asInteger(vlan)) + await Promise.all( + mapToArray(newPifs, pifRef => + !wasAttached[this.getObject(pifRef).host] && this.call('PIF.unplug', pifRef)::ignoreErrors() + ) + ) + } + + @deferrable + async createBondedNetwork ($defer, { + bondMode, + mac = '', + pifIds, + ...params + }) { + const network = await this.createNetwork(params) + $defer.onFailure(() => this.deleteNetwork(network)) + // TODO: test and confirm: + // Bond.create is called here with PIFs from one host but XAPI should then replicate the + // bond on each host in the same pool with the corresponding PIFs (ie same interface names?). + await this.call('Bond.create', network.$ref, map(pifIds, pifId => this.getObject(pifId).$ref), mac, bondMode) + + return network + } + + async deleteNetwork (networkId) { + const network = this.getObject(networkId) + const pifs = network.$PIFs + + const vlans = uniq(mapToArray(pifs, pif => pif.VLAN_master_of)) + await Promise.all( + mapToArray(vlans, vlan => vlan !== NULL_REF && this.call('VLAN.destroy', vlan)) + ) + + const bonds = uniq(flatten(mapToArray(pifs, pif => pif.bond_master_of))) + await Promise.all( + mapToArray(bonds, bond => this.call('Bond.destroy', bond)) + ) + + await this.call('network.destroy', network.$ref) + } + + // ================================================================= + + async _doDockerAction (vmId, action, containerId) { + const vm = this.getObject(vmId) + const host = vm.$resident_on || this.pool.$master + + return /* await */ this.call('host.call_plugin', host.$ref, 'xscontainer', action, { + vmuuid: vm.uuid, + container: containerId, + }) + } + + async registerDockerContainer (vmId) { + await this._doDockerAction(vmId, 'register') + } + + async deregisterDockerContainer (vmId) { + await this._doDockerAction(vmId, 'deregister') + } + + async startDockerContainer (vmId, containerId) { + await this._doDockerAction(vmId, 'start', containerId) + } + + async stopDockerContainer (vmId, containerId) { + await this._doDockerAction(vmId, 'stop', containerId) + } + + async restartDockerContainer (vmId, containerId) { + await this._doDockerAction(vmId, 'restart', containerId) + } + + async pauseDockerContainer (vmId, containerId) { + await this._doDockerAction(vmId, 'pause', containerId) + } + + async unpauseDockerContainer (vmId, containerId) { + await this._doDockerAction(vmId, 'unpause', containerId) + } + + async getCloudInitConfig (templateId) { + const template = this.getObject(templateId) + const host = this.pool.$master + + const config = await this.call('host.call_plugin', host.$ref, 'xscontainer', 'get_config_drive_default', { + templateuuid: template.uuid, + }) + return config.slice(4) // FIXME remove the "True" string on the begining + } + + // Specific CoreOS Config Drive + async createCoreOsCloudInitConfigDrive (vmId, srId, config) { + const vm = this.getObject(vmId) + const host = this.pool.$master + const sr = this.getObject(srId) + + await this.call('host.call_plugin', host.$ref, 'xscontainer', 'create_config_drive', { + vmuuid: vm.uuid, + sruuid: sr.uuid, + configuration: config, + }) + await this.registerDockerContainer(vmId) + } + + // Generic Config Drive + @deferrable + async createCloudInitConfigDrive ($defer, vmId, srId, config) { + const vm = this.getObject(vmId) + const sr = this.getObject(srId) + + // First, create a small VDI (10MB) which will become the ConfigDrive + const buffer = fatfsBufferInit() + const vdi = await this.createVdi({ + name_label: 'XO CloudConfigDrive', + size: buffer.length, + sr: sr.$ref, + }) + $defer.onFailure(() => this._deleteVdi(vdi)) + + // Then, generate a FAT fs + const fs = promisifyAll(fatfs.createFileSystem(fatfsBuffer(buffer))) + + await fs.mkdir('openstack') + await fs.mkdir('openstack/latest') + await Promise.all([ + fs.writeFile( + 'openstack/latest/meta_data.json', + '{\n "uuid": "' + vm.uuid + '"\n}\n' + ), + fs.writeFile('openstack/latest/user_data', config), + ]) + + // ignore errors, I (JFT) don't understand why they are emitted + // because it works + await this._importVdiContent(vdi, buffer, VDI_FORMAT_RAW).catch(console.warn) + + await this.createVbd({ vdi, vm }) + } + + @deferrable + async createTemporaryVdiOnSr ($defer, stream, sr, name_label, name_description) { + const vdi = await this.createVdi({ + name_description, + name_label, + size: stream.length, + sr: sr.$ref, + }) + $defer.onFailure(() => this._deleteVdi(vdi)) + + await this.importVdiContent(vdi.$id, stream, { format: VDI_FORMAT_RAW }) + + return vdi + } + + // Create VDI on an adequate local SR + async createTemporaryVdiOnHost (stream, hostId, name_label, name_description) { + const pbd = find( + this.getObject(hostId).$PBDs, + pbd => canSrHaveNewVdiOfSize(pbd.$SR, stream.length) + ) + + if (pbd == null) { + throw new Error('no SR available') + } + + return this.createTemporaryVdiOnSr(stream, pbd.SR, name_label, name_description) + } + + findAvailableSharedSr (minSize) { + return find( + this.objects.all, + obj => obj.$type === 'sr' && obj.shared && canSrHaveNewVdiOfSize(obj, minSize) + ) + } + + // ================================================================= +} diff --git a/packages/xo-server/src/xapi/mixins/.index-modules b/packages/xo-server/src/xapi/mixins/.index-modules new file mode 100644 index 000000000..e69de29bb diff --git a/packages/xo-server/src/xapi/mixins/gpu.js b/packages/xo-server/src/xapi/mixins/gpu.js new file mode 100644 index 000000000..7df8b9bef --- /dev/null +++ b/packages/xo-server/src/xapi/mixins/gpu.js @@ -0,0 +1,9 @@ +export default { + createVgpu (vm, gpuGroup, vgpuType) { + // TODO: properly handle device. Can a VM have 2 vGPUS? + return this.call('VGPU.create', this.getObject(vm).$ref, this.getObject(gpuGroup).$ref, '0', {}, this.getObject(vgpuType).$ref) + }, + deleteVgpu (vgpu) { + return this.call('VGPU.destroy', this.getObject(vgpu).$ref) + }, +} diff --git a/packages/xo-server/src/xapi/mixins/networking.js b/packages/xo-server/src/xapi/mixins/networking.js new file mode 100644 index 000000000..2e9952124 --- /dev/null +++ b/packages/xo-server/src/xapi/mixins/networking.js @@ -0,0 +1,60 @@ +import { isEmpty } from '../../utils' + +import { makeEditObject } from '../utils' + +export default { + async _connectVif (vif) { + await this.call('VIF.plug', vif.$ref) + }, + async connectVif (vifId) { + await this._connectVif(this.getObject(vifId)) + }, + async _deleteVif (vif) { + await this.call('VIF.destroy', vif.$ref) + }, + async deleteVif (vifId) { + const vif = this.getObject(vifId) + if (vif.currently_attached) { + await this._disconnectVif(vif) + } + await this._deleteVif(vif) + }, + async _disconnectVif (vif) { + await this.call('VIF.unplug_force', vif.$ref) + }, + async disconnectVif (vifId) { + await this._disconnectVif(this.getObject(vifId)) + }, + editVif: makeEditObject({ + ipv4Allowed: { + get: true, + set: [ + 'ipv4Allowed', + function (value, vif) { + const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed) + ? 'network_default' + : 'locked' + + if (lockingMode !== vif.locking_mode) { + return this._set('locking_mode', lockingMode) + } + }, + ], + }, + ipv6Allowed: { + get: true, + set: [ + 'ipv6Allowed', + function (value, vif) { + const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed) + ? 'network_default' + : 'locked' + + if (lockingMode !== vif.locking_mode) { + return this._set('locking_mode', lockingMode) + } + }, + ], + }, + }), +} diff --git a/packages/xo-server/src/xapi/mixins/patching.js b/packages/xo-server/src/xapi/mixins/patching.js new file mode 100644 index 000000000..a5f954e8c --- /dev/null +++ b/packages/xo-server/src/xapi/mixins/patching.js @@ -0,0 +1,478 @@ +import deferrable from 'golike-defer' +import every from 'lodash/every' +import find from 'lodash/find' +import filter from 'lodash/filter' +import includes from 'lodash/includes' +import isObject from 'lodash/isObject' +import some from 'lodash/some' +import sortBy from 'lodash/sortBy' +import assign from 'lodash/assign' +import unzip from 'julien-f-unzip' + +import { debounce } from '../../decorators' +import { + createRawObject, + ensureArray, + forEach, + mapFilter, + mapToArray, + parseXml, +} from '../../utils' + +import { + debug, + extractOpaqueRef, + useUpdateSystem, +} from '../utils' + +export default { + // FIXME: should be static + @debounce(24 * 60 * 60 * 1000) + async _getXenUpdates () { + const { readAll, statusCode } = await this.xo.httpRequest( + 'http://updates.xensource.com/XenServer/updates.xml' + ) + + if (statusCode !== 200) { + throw new Error('cannot fetch patches list from Citrix') + } + + const data = parseXml(await readAll()).patchdata + + const patches = createRawObject() + forEach(data.patches.patch, patch => { + patches[patch.uuid] = { + date: patch.timestamp, + description: patch['name-description'], + documentationUrl: patch.url, + guidance: patch['after-apply-guidance'], + name: patch['name-label'], + url: patch['patch-url'], + uuid: patch.uuid, + conflicts: mapToArray(ensureArray(patch.conflictingpatches), patch => { + return patch.conflictingpatch.uuid + }), + requirements: mapToArray(ensureArray(patch.requiredpatches), patch => { + return patch.requiredpatch.uuid + }), + paid: patch['update-stream'] === 'premium', + upgrade: /^XS\d{2,}$/.test(patch['name-label']), + // TODO: what does it mean, should we handle it? + // version: patch.version, + } + if (patches[patch.uuid].conflicts[0] === undefined) { + patches[patch.uuid].conflicts.length = 0 + } + if (patches[patch.uuid].requirements[0] === undefined) { + patches[patch.uuid].requirements.length = 0 + } + }) + + const resolveVersionPatches = function (uuids) { + const versionPatches = createRawObject() + + forEach(ensureArray(uuids), ({uuid}) => { + versionPatches[uuid] = patches[uuid] + }) + + return versionPatches + } + + const versions = createRawObject() + let latestVersion + forEach(data.serverversions.version, version => { + versions[version.value] = { + date: version.timestamp, + name: version.name, + id: version.value, + documentationUrl: version.url, + patches: resolveVersionPatches(version.patch), + } + + if (version.latest) { + latestVersion = versions[version.value] + } + }) + + return { + patches, + latestVersion, + versions, + } + }, + + // ================================================================= + + // Returns installed and not installed patches for a given host. + async _getPoolPatchesForHost (host) { + const versions = (await this._getXenUpdates()).versions + + const hostVersions = host.software_version + const version = + versions[hostVersions.product_version] || + versions[hostVersions.product_version_text] + + return version + ? version.patches + : [] + }, + + _getInstalledPoolPatchesOnHost (host) { + const installed = createRawObject() + + // platform_version < 2.1.1 + forEach(host.$patches, hostPatch => { + installed[hostPatch.$pool_patch.uuid] = true + }) + + // platform_version >= 2.1.1 + forEach(host.$updates, update => { + installed[update.uuid] = true // TODO: ignore packs + }) + + return installed + }, + + async _listMissingPoolPatchesOnHost (host) { + const all = await this._getPoolPatchesForHost(host) + const installed = this._getInstalledPoolPatchesOnHost(host) + + const installable = createRawObject() + forEach(all, (patch, uuid) => { + if (installed[uuid]) { + return + } + + for (const uuid of patch.conflicts) { + if (uuid in installed) { + return + } + } + + installable[uuid] = patch + }) + + return installable + }, + + async listMissingPoolPatchesOnHost (hostId) { + // Returns an array to not break compatibility. + return mapToArray( + await this._listMissingPoolPatchesOnHost(this.getObject(hostId)) + ) + }, + + async _ejectToolsIsos (hostRef) { + return Promise.all(mapFilter( + this.objects.all, + vm => { + if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) { + return + } + + const shouldEjectCd = some(vm.$VBDs, vbd => { + const vdi = vbd.$VDI + + return vdi && vdi.is_tools_iso + }) + + if (shouldEjectCd) { + return this.ejectCdFromVm(vm.$id) + } + } + )) + }, + + // ----------------------------------------------------------------- + + _isPoolPatchInstallableOnHost (patchUuid, host) { + const installed = this._getInstalledPoolPatchesOnHost(host) + + if (installed[patchUuid]) { + return false + } + + let installable = true + + forEach(installed, patch => { + if (includes(patch.conflicts, patchUuid)) { + installable = false + + return false + } + }) + + return installable + }, + + _isPoolPatchInstallableOnPool (patchUuid) { + return every( + this.objects.all, + obj => obj.$type !== 'host' || this._isPoolPatchInstallableOnHost(patchUuid, obj) + ) + }, + + // ----------------------------------------------------------------- + + // platform_version < 2.1.1 ---------------------------------------- + async uploadPoolPatch (stream, patchName) { + const patchRef = await this.putResource( + stream, + '/pool_patch_upload', + { + task: this.createTask('Patch upload', patchName), + } + ).then(extractOpaqueRef) + + return this._getOrWaitObject(patchRef) + }, + + async _getOrUploadPoolPatch (uuid) { + try { + return this.getObjectByUuid(uuid) + } catch (error) {} + + debug('downloading patch %s', uuid) + + const patchInfo = (await this._getXenUpdates()).patches[uuid] + if (!patchInfo) { + throw new Error('no such patch ' + uuid) + } + + let stream = await this.xo.httpRequest(patchInfo.url) + stream = await new Promise((resolve, reject) => { + const PATCH_RE = /\.xsupdate$/ + stream.pipe(unzip.Parse()).on('entry', entry => { + if (PATCH_RE.test(entry.path)) { + entry.length = entry.size + resolve(entry) + } else { + entry.autodrain() + } + }).on('error', reject) + }) + + return this.uploadPoolPatch(stream, patchInfo.name) + }, + + // patform_version >= 2.1.1 ---------------------------------------- + async _getUpdateVdi ($defer, patchUuid, hostId) { + debug('downloading patch %s', patchUuid) + + const patchInfo = (await this._getXenUpdates()).patches[patchUuid] + if (!patchInfo) { + throw new Error('no such patch ' + patchUuid) + } + + let stream = await this.xo.httpRequest(patchInfo.url) + stream = await new Promise((resolve, reject) => { + stream.pipe(unzip.Parse()).on('entry', entry => { + entry.length = entry.size + resolve(entry) + }).on('error', reject) + }) + + let vdi + + // If no hostId provided, try and find a shared SR + if (!hostId) { + const sr = this.findAvailableSharedSr(stream.length) + + if (!sr) { + return + } + + vdi = await this.createTemporaryVdiOnSr(stream, sr, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO') + } else { + vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO') + } + $defer(() => this._deleteVdi(vdi)) + + return vdi + }, + + // ----------------------------------------------------------------- + + // patform_version < 2.1.1 ----------------------------------------- + async _installPoolPatchOnHost (patchUuid, host) { + const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ]) + + await this.call('pool_patch.apply', patch.$ref, host.$ref) + }, + + // patform_version >= 2.1.1 + _installPatchUpdateOnHost: deferrable(async function ($defer, patchUuid, host) { + const [ vdi ] = await Promise.all([ + this._getUpdateVdi($defer, patchUuid, host.$id), + this._ejectToolsIsos(host.$ref), + ]) + + const updateRef = await this.call('pool_update.introduce', vdi.$ref) + // TODO: check update status + // const precheck = await this.call('pool_update.precheck', updateRef, host.$ref) + // - ok_livepatch_complete An applicable live patch exists for every required component + // - ok_livepatch_incomplete An applicable live patch exists but it is not sufficient + // - ok There is no applicable live patch + return this.call('pool_update.apply', updateRef, host.$ref) + }), + + // ----------------------------------------------------------------- + + async installPoolPatchOnHost (patchUuid, host) { + debug('installing patch %s', patchUuid) + if (!isObject(host)) { + host = this.getObject(host) + } + + return useUpdateSystem(host) + ? this._installPatchUpdateOnHost(patchUuid, host) + : this._installPoolPatchOnHost(patchUuid, host) + }, + + // ----------------------------------------------------------------- + + // platform_version < 2.1.1 + async _installPoolPatchOnAllHosts (patchUuid) { + const [ patch ] = await Promise.all([ + this._getOrUploadPoolPatch(patchUuid), + this._ejectToolsIsos(), + ]) + + await this.call('pool_patch.pool_apply', patch.$ref) + }, + + // platform_version >= 2.1.1 + _installPatchUpdateOnAllHosts: deferrable(async function ($defer, patchUuid) { + let [ vdi ] = await Promise.all([ + this._getUpdateVdi($defer, patchUuid), + this._ejectToolsIsos(), + ]) + if (vdi == null) { + vdi = await this._getUpdateVdi($defer, patchUuid, this.pool.master) + } + + return this.call( + 'pool_update.pool_apply', + await this.call('pool_update.introduce', vdi.$ref) + ) + }), + + async installPoolPatchOnAllHosts (patchUuid) { + debug('installing patch %s on all hosts', patchUuid) + + return useUpdateSystem(this.pool.$master) + ? this._installPatchUpdateOnAllHosts(patchUuid) + : this._installPoolPatchOnAllHosts(patchUuid) + }, + + // ----------------------------------------------------------------- + + // If no host is provided, install on pool + async _installPoolPatchAndRequirements (patch, patchesByUuid, host) { + if (host == null + ? !this._isPoolPatchInstallableOnPool(patch.uuid) + : !this._isPoolPatchInstallableOnHost(patch.uuid, host) + ) { + return + } + + const { requirements } = patch + + if (requirements.length) { + for (const requirementUuid of requirements) { + const requirement = patchesByUuid[requirementUuid] + + if (requirement != null) { + await this._installPoolPatchAndRequirements(requirement, patchesByUuid, host) + host = host && this.getObject(host.$id) + } + } + } + + return host == null + ? this.installPoolPatchOnAllHosts(patch.uuid) + : this.installPoolPatchOnHost(patch.uuid, host) + }, + + async installSpecificPatchesOnHost (patchNames, hostId) { + const host = this.getObject(hostId) + const missingPatches = await this._listMissingPoolPatchesOnHost(host) + + const patchesToInstall = [] + const addPatchesToList = patches => { + forEach(patches, patch => { + addPatchesToList(mapToArray(patch.requirements, { uuid: patch.uuid })) + + if (!find(patchesToInstall, { name: patch.name })) { + patchesToInstall.push(patch) + } + }) + } + addPatchesToList(mapToArray(patchNames, name => + find(missingPatches, { name }) + )) + + for (let i = 0, n = patchesToInstall.length; i < n; i++) { + await this._installPoolPatchAndRequirements(patchesToInstall[i], missingPatches, host) + } + }, + + async installAllPoolPatchesOnHost (hostId) { + let host = this.getObject(hostId) + + const installableByUuid = host.license_params.sku_type !== 'free' + ? await this._listMissingPoolPatchesOnHost(host) + : filter(await this._listMissingPoolPatchesOnHost(host), { paid: false, upgrade: false }) + + // List of all installable patches sorted from the newest to the + // oldest. + const installable = sortBy( + installableByUuid, + patch => -Date.parse(patch.date) + ) + + for (let i = 0, n = installable.length; i < n; ++i) { + const patch = installable[i] + + if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) { + await this._installPoolPatchAndRequirements(patch, installableByUuid, host).catch(error => { + if (error.code !== 'PATCH_ALREADY_APPLIED' && error.code !== 'UPDATE_ALREADY_APPLIED') { + throw error + } + }) + host = this.getObject(host.$id) + } + } + }, + + async installAllPoolPatchesOnAllHosts () { + const installableByUuid = assign( + {}, + ...await Promise.all(mapFilter(this.objects.all, host => { + if (host.$type === 'host') { + return this._listMissingPoolPatchesOnHost(host).then(patches => host.license_params.sku_type !== 'free' + ? patches + : filter(patches, { paid: false, upgrade: false }) + ) + } + })) + ) + + // List of all installable patches sorted from the newest to the + // oldest. + const installable = sortBy( + installableByUuid, + patch => -Date.parse(patch.date) + ) + + for (let i = 0, n = installable.length; i < n; ++i) { + const patch = installable[i] + + await this._installPoolPatchAndRequirements(patch, installableByUuid).catch(error => { + if (error.code !== 'PATCH_ALREADY_APPLIED' && error.code !== 'UPDATE_ALREADY_APPLIED_IN_POOL') { + throw error + } + }) + } + }, +} diff --git a/packages/xo-server/src/xapi/mixins/storage.js b/packages/xo-server/src/xapi/mixins/storage.js new file mode 100644 index 000000000..bfd003b33 --- /dev/null +++ b/packages/xo-server/src/xapi/mixins/storage.js @@ -0,0 +1,96 @@ +import { + forEach, + groupBy, +} from 'lodash' + +import { + createRawObject, + mapToArray, +} from '../../utils' + +export default { + _connectAllSrPbds (sr) { + return Promise.all( + mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd)) + ) + }, + + async connectAllSrPbds (id) { + await this._connectAllSrPbds(this.getObject(id)) + }, + + _disconnectAllSrPbds (sr) { + return Promise.all( + mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd)) + ) + }, + + async disconnectAllSrPbds (id) { + await this._disconnectAllSrPbds(this.getObject(id)) + }, + + async destroySr (id) { + const sr = this.getObject(id) + await this._disconnectAllSrPbds(sr) + await this.call('SR.destroy', sr.$ref) + }, + + async forgetSr (id) { + const sr = this.getObject(id) + await this._disconnectAllSrPbds(sr) + await this.call('SR.forget', sr.$ref) + }, + + _plugPbd (pbd) { + return this.call('PBD.plug', pbd.$ref) + }, + + async plugPbd (id) { + await this._plugPbd(this.getObject(id)) + }, + + _unplugPbd (pbd) { + return this.call('PBD.unplug', pbd.$ref) + }, + + async unplugPbd (id) { + await this._unplugPbd(this.getObject(id)) + }, + + _getUnhealthyVdiChainLength (uuid, childrenMap, cache) { + let length = cache[uuid] + if (length === undefined) { + const children = childrenMap[uuid] + length = children !== undefined && children.length === 1 + ? 1 + : 0 + try { + const parent = this.getObjectByUuid(uuid).sm_config['vhd-parent'] + if (parent !== undefined) { + length += this._getUnhealthyVdiChainLength(parent, childrenMap, cache) + } + } catch (error) { + console.warn('Xapi#_getUnhealthyVdiChainLength(%s)', uuid, error) + } + cache[uuid] = length + } + return length + }, + + getUnhealthyVdiChainsLength (sr) { + const vdis = this.getObject(sr).$VDIs + const unhealthyVdis = createRawObject() + const children = groupBy(vdis, 'sm_config.vhd-parent') + const cache = createRawObject() + forEach(vdis, vdi => { + if (vdi.managed && !vdi.is_a_snapshot) { + const { uuid } = vdi + const length = this._getUnhealthyVdiChainLength(uuid, children, cache) + if (length !== 0) { + unhealthyVdis[uuid] = length + } + } + }) + return unhealthyVdis + }, +} diff --git a/packages/xo-server/src/xapi/mixins/vm.js b/packages/xo-server/src/xapi/mixins/vm.js new file mode 100644 index 000000000..bd628e4aa --- /dev/null +++ b/packages/xo-server/src/xapi/mixins/vm.js @@ -0,0 +1,418 @@ +import deferrable from 'golike-defer' +import { catchPlus as pCatch, ignoreErrors } from 'promise-toolbox' +import { + find, + gte, + includes, + isEmpty, + lte, +} from 'lodash' + +import { + forEach, + mapToArray, + parseSize, +} from '../../utils' + +import { + isVmHvm, + isVmRunning, + makeEditObject, + NULL_REF, +} from '../utils' + +// According to: https://xenserver.org/blog/entry/vga-over-cirrus-in-xenserver-6-2.html. +const XEN_VGA_VALUES = ['std', 'cirrus'] +const XEN_VIDEORAM_VALUES = [1, 2, 4, 8, 16] + +export default { + // TODO: clean up on error. + @deferrable + async createVm ($defer, templateId, { + name_label, // eslint-disable-line camelcase + nameLabel = name_label, // eslint-disable-line camelcase + + clone = true, + installRepository = undefined, + vdis = undefined, + vifs = undefined, + existingVdis = undefined, + + coreOs = false, + cloudConfig = undefined, + + vgpuType = undefined, + gpuGroup = undefined, + + ...props + } = {}, checkLimits) { + const installMethod = (() => { + if (installRepository == null) { + return 'none' + } + + try { + installRepository = this.getObject(installRepository) + return 'cd' + } catch (_) { + return 'network' + } + })() + const template = this.getObject(templateId) + + // Clones the template. + const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel) + $defer.onFailure(() => this.deleteVm(vmRef)) + + // TODO: copy BIOS strings? + + // Removes disks from the provision XML, we will create them by + // ourselves. + await this.call('VM.remove_from_other_config', vmRef, 'disks')::ignoreErrors() + + // Creates the VDIs and executes the initial steps of the + // installation. + await this.call('VM.provision', vmRef) + + let vm = await this._getOrWaitObject(vmRef) + + // Set VMs params. + await this._editVm(vm, props, checkLimits) + + // Sets boot parameters. + { + const isHvm = isVmHvm(vm) + + if (isHvm) { + if (!isEmpty(vdis) || installMethod === 'network') { + const { HVM_boot_params: bootParams } = vm + let order = bootParams.order + if (order) { + order = 'n' + order.replace('n', '') + } else { + order = 'ncd' + } + + this._setObjectProperties(vm, { + HVM_boot_params: { ...bootParams, order }, + }) + } + } else { // PV + if (vm.PV_bootloader === 'eliloader') { + if (installMethod === 'network') { + // TODO: normalize RHEL URL? + + await this._updateObjectMapProperty(vm, 'other_config', { + 'install-repository': installRepository, + }) + } else if (installMethod === 'cd') { + await this._updateObjectMapProperty(vm, 'other_config', { + 'install-repository': 'cdrom', + }) + } + } + } + } + + let hasBootableDisk = !!find(vm.$VBDs, 'bootable') + + // Inserts the CD if necessary. + if (installMethod === 'cd') { + // When the VM is started, if PV, the CD drive will become not + // bootable and the first disk bootable. + await this._insertCdIntoVm(installRepository, vm, { + bootable: true, + }) + hasBootableDisk = true + } + + // Modify existing (previous template) disks if necessary + existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => { + const vbd = find(vm.$VBDs, { userdevice }) + if (!vbd) { + return + } + const vdi = vbd.$VDI + await this._setObjectProperties(vdi, properties) + + // if the disk is bigger + if ( + size != null && + size > vdi.virtual_size + ) { + await this.resizeVdi(vdi.$id, size) + } + // if another SR is set, move it there + if (srId) { + await this.moveVdi(vdi.$id, srId) + } + })) + + // Creates the user defined VDIs. + // + // TODO: set vm.suspend_SR + if (!isEmpty(vdis)) { + const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref) + await Promise.all(mapToArray(vdis, (vdiDescription, i) => this.createVdi({ + name_description: vdiDescription.name_description, + name_label: vdiDescription.name_label, + size: vdiDescription.size, + sr: vdiDescription.sr || vdiDescription.SR, + }) + .then(vdi => this.createVbd({ + // Either the CD or the 1st disk is bootable (only useful for PV VMs) + bootable: !(hasBootableDisk || i), + + userdevice: devices[i], + vdi, + vm, + })) + )) + } + + // Destroys the VIFs cloned from the template. + await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif))) + + // Creates the VIFs specified by the user. + if (vifs) { + const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref) + await Promise.all(mapToArray(vifs, (vif, index) => this._createVif( + vm, + this.getObject(vif.network), + { + ipv4_allowed: vif.ipv4_allowed, + ipv6_allowed: vif.ipv6_allowed, + device: devices[index], + locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked', + mac: vif.mac, + mtu: vif.mtu, + } + ))) + } + + if (vgpuType !== undefined && gpuGroup !== undefined) { + await this.createVgpu(vm, gpuGroup, vgpuType) + } + + if (cloudConfig != null) { + // Refresh the record. + await this.barrier('VM', vm.$ref) + vm = this.getObjectByRef(vm.$ref) + + // Find the SR of the first VDI. + let srRef + forEach(vm.$VBDs, vbd => { + let vdi + if ( + vbd.type === 'Disk' && + (vdi = vbd.$VDI) + ) { + srRef = vdi.SR + return false + } + }) + + const method = coreOs + ? 'createCoreOsCloudInitConfigDrive' + : 'createCloudInitConfigDrive' + await this[method](vm.$id, srRef, cloudConfig) + } + + // wait for the record with all the VBDs and VIFs + return this.barrier('VM', vm.$ref) + }, + + // High level method to edit a VM. + // + // Params do not correspond directly to XAPI props. + _editVm: makeEditObject({ + affinityHost: { + get: 'affinity', + set (value, vm) { + return this._setObjectProperty( + vm, + 'affinity', + value ? this.getObject(value).$ref : NULL_REF + ) + }, + }, + + autoPoweron: { + set (value, vm) { + return Promise.all([ + this._updateObjectMapProperty(vm, 'other_config', { + autoPoweron: value ? 'true' : null, + }), + value && this.setPoolProperties({ + autoPoweron: true, + }), + ]) + }, + }, + + coresPerSocket: { + set (coresPerSocket, vm) { + return this._updateObjectMapProperty(vm, 'platform', {'cores-per-socket': coresPerSocket}) + }, + }, + + CPUs: 'cpus', + cpus: { + addToLimits: true, + + // Current value may have constraints with other values. + // + // If the other value is not set and the constraint is not + // respected, the other value is changed first. + constraints: { + cpusStaticMax: gte, + }, + + get: vm => +vm.VCPUs_at_startup, + set: [ + 'VCPUs_at_startup', + function (value, vm) { + return isVmRunning(vm) && this._set('VCPUs_number_live', value) + }, + ], + }, + + cpuCap: { + get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap, + set (cap, vm) { + return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap }) + }, + }, + + cpusMax: 'cpusStaticMax', + cpusStaticMax: { + constraints: { + cpus: lte, + }, + get: vm => +vm.VCPUs_max, + set: 'VCPUs_max', + }, + + cpuWeight: { + get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight, + set (weight, vm) { + return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight }) + }, + }, + + highAvailability: { + set (ha, vm) { + return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '') + }, + }, + + memoryMin: { + constraints: { + memoryMax: gte, + }, + get: vm => +vm.memory_dynamic_min, + preprocess: parseSize, + set: 'memory_dynamic_min', + }, + + memory: 'memoryMax', + memoryMax: { + addToLimits: true, + limitName: 'memory', + get: vm => +vm.memory_dynamic_max, + preprocess: parseSize, + set (dynamicMax, vm) { + const { $ref } = vm + const dynamicMin = Math.min(vm.memory_dynamic_min, dynamicMax) + + if (isVmRunning(vm)) { + return this.call( + 'VM.set_memory_dynamic_range', + $ref, + dynamicMin, + dynamicMax + ) + } + + const staticMin = Math.min(vm.memory_static_min, dynamicMax) + return this.call( + 'VM.set_memory_limits', + $ref, + staticMin, + Math.max(dynamicMax, vm.memory_static_max), + dynamicMin, + dynamicMax + )::pCatch({ code: 'MEMORY_CONSTRAINT_VIOLATION' }, () => + this.call( + 'VM.set_memory_limits', + $ref, + staticMin, + dynamicMax, + dynamicMax, + dynamicMax + ) + ) + }, + }, + + memoryStaticMax: { + constraints: { + memoryMax: lte, + }, + get: vm => +vm.memory_static_max, + preprocess: parseSize, + set: 'memory_static_max', + }, + + nameDescription: true, + + nameLabel: true, + + PV_args: true, + + tags: true, + + vga: { + set (vga, vm) { + if (!includes(XEN_VGA_VALUES, vga)) { + throw new Error(`The different values that the VGA can take are: ${XEN_VGA_VALUES}`) + } + return this._updateObjectMapProperty(vm, 'platform', { vga }) + }, + }, + + videoram: { + set (videoram, vm) { + if (!includes(XEN_VIDEORAM_VALUES, videoram)) { + throw new Error(`The different values that the video RAM can take are: ${XEN_VIDEORAM_VALUES}`) + } + return this._updateObjectMapProperty(vm, 'platform', { videoram }) + }, + }, + }), + + async editVm (id, props, checkLimits) { + return /* await */ this._editVm(this.getObject(id), props, checkLimits) + }, + + async revertVm (snapshotId, snapshotBefore = true) { + const snapshot = this.getObject(snapshotId) + if (snapshotBefore) { + await this._snapshotVm(snapshot.$snapshot_of) + } + await this.call('VM.revert', snapshot.$ref) + if (snapshot.snapshot_info['power-state-at-snapshot'] === 'Running') { + const vm = snapshot.$snapshot_of + if (vm.power_state === 'Halted') { + this.startVm(vm.$id)::ignoreErrors() + } else if (vm.power_state === 'Suspended') { + this.resumeVm(vm.$id)::ignoreErrors() + } + } + }, + + async resumeVm (vmId) { + // the force parameter is always true + return this.call('VM.resume', this.getObject(vmId).$ref, false, true) + }, +} diff --git a/packages/xo-server/src/xapi/other-config-template.js b/packages/xo-server/src/xapi/other-config-template.js new file mode 100644 index 000000000..4aa74fa74 --- /dev/null +++ b/packages/xo-server/src/xapi/other-config-template.js @@ -0,0 +1,53 @@ +import { NULL_REF } from './utils' + +const OTHER_CONFIG_TEMPLATE = { + actions_after_crash: 'restart', + actions_after_reboot: 'restart', + actions_after_shutdown: 'destroy', + affinity: null, + blocked_operations: {}, + ha_always_run: false, + HVM_boot_params: { + order: 'cdn', + }, + HVM_boot_policy: 'BIOS order', + HVM_shadow_multiplier: 1, + is_a_template: false, + memory_dynamic_max: 4294967296, + memory_dynamic_min: 4294967296, + memory_static_max: 4294967296, + memory_static_min: 134217728, + order: 0, + other_config: { + vgpu_pci: '', + base_template_name: 'Other install media', + mac_seed: '5e88eb6a-d680-c47f-a94a-028886971ba4', + 'install-methods': 'cdrom', + }, + PCI_bus: '', + platform: { + timeoffset: '0', + nx: 'true', + acpi: '1', + apic: 'true', + pae: 'true', + hpet: 'true', + viridian: 'true', + }, + protection_policy: NULL_REF, + PV_args: '', + PV_bootloader: '', + PV_bootloader_args: '', + PV_kernel: '', + PV_legacy_args: '', + PV_ramdisk: '', + recommendations: '', + shutdown_delay: 0, + start_delay: 0, + user_version: 1, + VCPUs_at_startup: 1, + VCPUs_max: 1, + VCPUs_params: {}, + version: 0, +} +export { OTHER_CONFIG_TEMPLATE as default } diff --git a/packages/xo-server/src/xapi/utils.js b/packages/xo-server/src/xapi/utils.js new file mode 100644 index 000000000..435343081 --- /dev/null +++ b/packages/xo-server/src/xapi/utils.js @@ -0,0 +1,379 @@ +// import isFinite from 'lodash/isFinite' +import camelCase from 'lodash/camelCase' +import createDebug from 'debug' +import isEqual from 'lodash/isEqual' +import isPlainObject from 'lodash/isPlainObject' +import pickBy from 'lodash/pickBy' +import { utcFormat, utcParse } from 'd3-time-format' +import { satisfies as versionSatisfies } from 'semver' + +import { + camelToSnakeCase, + createRawObject, + forEach, + isArray, + isBoolean, + isFunction, + isInteger, + isString, + map, + mapFilter, + mapToArray, + noop, +} from '../utils' + +// =================================================================== + +export const asBoolean = value => Boolean(value) + +// const asFloat = value => { +// value = String(value) +// return value.indexOf('.') === -1 +// ? `${value}.0` +// : value +// } + +export const asInteger = value => String(value) + +export const filterUndefineds = obj => pickBy(obj, value => value !== undefined) + +export const optional = (value, fn) => value == null + ? undefined + : fn ? fn(value) : value + +export const prepareXapiParam = param => { + // if (isFinite(param) && !isInteger(param)) { + // return asFloat(param) + // } + if (isInteger(param)) { + return asInteger(param) + } + if (isBoolean(param)) { + return asBoolean(param) + } + if (isArray(param)) { + return map(param, prepareXapiParam) + } + if (isPlainObject(param)) { + return map(filterUndefineds(param), prepareXapiParam) + } + + return param +} + +// ------------------------------------------------------------------- + +export const debug = createDebug('xo:xapi') + +// ------------------------------------------------------------------- + +const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/ +export const extractOpaqueRef = str => { + const matches = OPAQUE_REF_RE.exec(str) + if (!matches) { + throw new Error('no opaque ref found') + } + return matches[0] +} + +// ------------------------------------------------------------------- + +const TYPE_TO_NAMESPACE = createRawObject() +forEach([ + 'Bond', + 'DR_task', + 'GPU_group', + 'PBD', + 'PCI', + 'PGPU', + 'PIF', + 'PIF_metrics', + 'SM', + 'SR', + 'VBD', + 'VBD_metrics', + 'VDI', + 'VGPU', + 'VGPU_type', + 'VIF', + 'VLAN', + 'VM', + 'VM_appliance', + 'VM_guest_metrics', + 'VM_metrics', + 'VMPP', + 'VTPM', +], namespace => { + TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace +}) + +// Object types given by `xen-api` are always lowercase but the +// namespaces in the Xen API can have a different casing. +export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type + +// ------------------------------------------------------------------- + +export const getVmDisks = vm => { + const disks = createRawObject(null) + forEach(vm.$VBDs, vbd => { + let vdi + if ( + // Do not remove CDs and Floppies. + vbd.type === 'Disk' && + + // Ignore VBD without VDI. + (vdi = vbd.$VDI) + ) { + disks[vdi.$id] = vdi + } + }) + return disks +} + +// ------------------------------------------------------------------- + +// Format a date (pseudo ISO 8601) from one XenServer get by +// xapi.call('host.get_servertime', host.$ref) for example +export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ') + +export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ') + +// ------------------------------------------------------------------- + +export const isHostRunning = host => { + const { $metrics } = host + + return $metrics && $metrics.live +} + +// ------------------------------------------------------------------- + +export const isVmHvm = vm => Boolean(vm.HVM_boot_policy) + +const VM_RUNNING_POWER_STATES = { + Running: true, + Paused: true, +} +export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state] + +// ------------------------------------------------------------------- + +const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current + +export const makeEditObject = specs => { + const normalizeGet = (get, name) => { + if (get === true) { + const prop = camelToSnakeCase(name) + return object => object[prop] + } + + if (isString(get)) { + return object => object[get] + } + + return get + } + const normalizeSet = (set, name) => { + if (isFunction(set)) { + return set + } + + if (set === true) { + const prop = camelToSnakeCase(name) + return function (value) { + return this._set(prop, value) + } + } + + if (isString(set)) { + const index = set.indexOf('.') + if (index === -1) { + const prop = camelToSnakeCase(set) + return function (value) { + return this._set(prop, value) + } + } + + const map = set.slice(0, index) + const prop = set.slice(index + 1) + + return function (value, object) { + return this._updateObjectMapProperty(object, map, { [prop]: value }) + } + } + + if (!isArray(set)) { + throw new Error('must be an array, a function or a string') + } + + set = mapToArray(set, normalizeSet) + + const { length } = set + if (!length) { + throw new Error('invalid setter') + } + + if (length === 1) { + return set[0] + } + + return function (value, object) { + return Promise.all(mapToArray(set, set => set.call(this, value, object))) + } + } + + const normalizeSpec = (spec, name) => { + if (spec === true) { + spec = { + get: true, + set: true, + } + } + + if (spec.addToLimits === true) { + spec.addToLimits = _DEFAULT_ADD_TO_LIMITS + } + if (!spec.limitName) { + spec.limitName = name + } + + forEach(spec.constraints, (constraint, constraintName) => { + if (!isFunction(constraint)) { + throw new Error('constraint must be a function') + } + + const constraintSpec = specs[constraintName] + if (!constraintSpec.get) { + throw new Error('constraint values must have a get') + } + }) + + const { get } = spec + if (get) { + spec.get = normalizeGet(get, name) + } else if (spec.addToLimits) { + throw new Error('addToLimits cannot be defined without get') + } + + spec.set = normalizeSet(spec.set, name) + + return spec + } + forEach(specs, (spec, name) => { + isString(spec) || (specs[name] = normalizeSpec(spec, name)) + }) + + // Resolves aliases and add camelCase and snake_case aliases. + forEach(specs, (spec, name) => { + if (isString(spec)) { + do { + spec = specs[spec] + } while (isString(spec)) + specs[name] = spec + } + + let tmp + specs[tmp = camelCase(name)] || (specs[tmp] = spec) + specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec) + }) + + return async function _editObject_ (id, values, checkLimits) { + const limits = checkLimits && {} + const object = this.getObject(id) + + const _objectRef = object.$ref + const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_` + + // Context used to execute functions. + const context = { + __proto__: this, + _set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value)), + } + + const set = (value, name) => { + if (value === undefined) { + return + } + + const spec = specs[name] + if (!spec) { + return + } + + const { preprocess } = spec + if (preprocess) { + value = preprocess(value) + } + + const { get } = spec + if (get) { + const current = get(object) + if (isEqual(value, current)) { + return + } + + let addToLimits + if (limits && (addToLimits = spec.addToLimits)) { + limits[spec.limitName] = addToLimits(value, current) + } + } + + const cb = () => spec.set.call(context, value, object) + + const { constraints } = spec + if (constraints) { + const cbs = [] + + forEach(constraints, (constraint, constraintName) => { + // Before setting a property to a new value, if the constraint check fails (e.g. memoryMin > memoryMax): + // - if the user wants to set the constraint (ie constraintNewValue is defined): + // constraint <-- constraintNewValue THEN property <-- value (e.g. memoryMax <-- 2048 THEN memoryMin <-- 1024) + // - if the user DOES NOT want to set the constraint (ie constraintNewValue is NOT defined): + // constraint <-- value THEN property <-- value (e.g. memoryMax <-- 1024 THEN memoryMin <-- 1024) + // FIXME: Some values combinations will lead to setting the same property twice, which is not perfect but works for now. + const constraintCurrentValue = specs[constraintName].get(object) + const constraintNewValue = values[constraintName] + + if (!constraint(constraintCurrentValue, value)) { + const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName) + if (cb) { + cbs.push(cb) + } + } + }) + + if (cbs.length) { + return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb) + } + } + + return cb + } + + const cbs = mapFilter(values, set) + + if (checkLimits) { + await checkLimits(limits, object) + } + + return Promise.all(mapToArray(cbs, cb => cb())).then(noop) + } +} + +// =================================================================== + +export const NULL_REF = 'OpaqueRef:NULL' + +// =================================================================== + +export const useUpdateSystem = host => { + // Match Xen Center's condition: https://github.com/xenserver/xenadmin/blob/f3a64fc54bbff239ca6f285406d9034f57537d64/XenModel/Utils/Helpers.cs#L420 + return versionSatisfies(host.software_version.platform_version, '^2.1.1') +} + +export const canSrHaveNewVdiOfSize = (sr, minSize) => + sr != null && + // content_type values are not documented: this may be incorrect + sr.content_type !== 'disk' && // removable + sr.content_type !== 'iso' && // read only + sr.physical_size - sr.physical_utilisation >= minSize diff --git a/packages/xo-server/src/xo-mixins/.index-modules b/packages/xo-server/src/xo-mixins/.index-modules new file mode 100644 index 000000000..e69de29bb diff --git a/packages/xo-server/src/xo-mixins/acls.js b/packages/xo-server/src/xo-mixins/acls.js new file mode 100644 index 000000000..502261380 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/acls.js @@ -0,0 +1,193 @@ +import checkAuthorization from 'xo-acl-resolver' +import { forEach, includes, map } from 'lodash' + +import { + ModelAlreadyExists, +} from '../collection' +import { + Acls, +} from '../models/acl' +import { + createRawObject, +} from '../utils' + +// =================================================================== + +export default class { + constructor (xo) { + this._xo = xo + + const aclsDb = this._acls = new Acls({ + connection: xo._redis, + prefix: 'xo:acl', + indexes: ['subject', 'object'], + }) + + xo.on('start', () => { + xo.addConfigManager('acls', + () => aclsDb.get(), + acls => aclsDb.update(acls), + [ 'groups', 'users' ] + ) + }) + + xo.on('clean', async () => { + const acls = await aclsDb.get() + const toRemove = [] + forEach(acls, ({ subject, object, action, id }) => { + if (!subject || !object || !action) { + toRemove.push(id) + } + }) + await aclsDb.remove(toRemove) + return aclsDb.rebuildIndexes() + }) + } + + async _getAclsForUser (userId) { + const user = await this._xo.getUser(userId) + const { groups } = user + + const subjects = groups + ? groups.concat(userId) + : [ userId ] + + const acls = [] + const pushAcls = (push => entries => { + push.apply(acls, entries) + })(acls.push) + + await Promise.all(map( + subjects, + subject => this.getAclsForSubject(subject).then(pushAcls) + )) + + return acls + } + + async addAcl (subjectId, objectId, action) { + try { + await this._acls.create(subjectId, objectId, action) + } catch (error) { + if (!(error instanceof ModelAlreadyExists)) { + throw error + } + } + } + + async removeAcl (subjectId, objectId, action) { + await this._acls.delete(subjectId, objectId, action) + } + + // TODO: remove when new collection. + async getAllAcls () { + return this._acls.get() + } + + async getAclsForSubject (subjectId) { + return this._acls.get({ subject: subjectId }) + } + + async getPermissionsForUser (userId) { + const [ + acls, + permissionsByRole, + ] = await Promise.all([ + this._getAclsForUser(userId), + this._getPermissionsByRole(), + ]) + + const permissions = createRawObject() + for (const { action, object: objectId } of acls) { + const current = ( + permissions[objectId] || + (permissions[objectId] = createRawObject()) + ) + + const permissionsForRole = permissionsByRole[action] + if (permissionsForRole) { + for (const permission of permissionsForRole) { + current[permission] = 1 + } + } else { + current[action] = 1 + } + } + return permissions + } + + async hasPermissions (userId, permissions) { + const user = await this._xo.getUser(userId) + + // Special case for super XO administrators. + if (user.permission === 'admin') { + return true + } + + return checkAuthorization( + await this.getPermissionsForUser(userId), + id => this._xo.getObject(id), + permissions + ) + } + + async removeAclsForObject (objectId) { + const acls = this._acls + await acls.remove(map(await acls.get({ object: objectId }), 'id')) + } + + // ----------------------------------------------------------------- + + async _getPermissionsByRole () { + const roles = await this.getRoles() + + const permissions = createRawObject() + for (const role of roles) { + permissions[role.id] = role.permissions + } + return permissions + } + + // TODO: delete when merged with the new collection. + async getRoles () { + return [ + { + id: 'viewer', + name: 'Viewer', + permissions: [ + 'view', + ], + }, + { + id: 'operator', + name: 'Operator', + permissions: [ + 'view', + 'operate', + ], + }, + { + id: 'admin', + name: 'Admin', + permissions: [ + 'view', + 'operate', + 'administrate', + ], + }, + ] + } + + // Returns an array of roles which have a given permission. + async getRolesForPermission (permission) { + const roles = [] + + forEach(await this.getRoles(), role => { + if (includes(role.permissions, permission)) { + roles.push(role.id) + } + }) + + return roles + } +} diff --git a/packages/xo-server/src/xo-mixins/api.js b/packages/xo-server/src/xo-mixins/api.js new file mode 100644 index 000000000..329b0859f --- /dev/null +++ b/packages/xo-server/src/xo-mixins/api.js @@ -0,0 +1,343 @@ +import createDebug from 'debug' +import kindOf from 'kindof' +import ms from 'ms' +import schemaInspector from 'schema-inspector' +import { + forEach, + isArray, + isFunction, + map, + mapValues, +} from 'lodash' + +import * as methods from '../api' // eslint-disable-line node/no-missing-import +import { + MethodNotFound, +} from 'json-rpc-peer' +import { + createRawObject, + noop, + serializeError, +} from '../utils' + +import * as errors from 'xo-common/api-errors' + +// =================================================================== + +const debug = createDebug('xo:api') + +const PERMISSIONS = { + none: 0, + read: 1, + write: 2, + admin: 3, +} + +// TODO: +// - error when adding a server to a pool with incompatible version +// - error when halted VM migration failure is due to XS < 7 +const XAPI_ERROR_TO_XO_ERROR = { + EHOSTUNREACH: errors.serverUnreachable, + HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }), + NO_HOSTS_AVAILABLE: errors.noHostsAvailable, + NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade, + OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }), + PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }), + PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }), + SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed, + VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }), + VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }), + VM_IS_TEMPLATE: errors.vmIsTemplate, + VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }), + VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }), + VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) }), +} + +const hasPermission = (user, permission) => ( + PERMISSIONS[user.permission] >= PERMISSIONS[permission] +) + +function checkParams (method, params) { + const schema = method.params + if (!schema) { + return + } + + const result = schemaInspector.validate({ + type: 'object', + properties: schema, + }, params) + + if (!result.valid) { + throw errors.invalidParameters(result.error) + } +} + +function checkPermission (method) { + /* jshint validthis: true */ + + const {permission} = method + + // No requirement. + if (permission === undefined) { + return + } + + const {user} = this + if (!user) { + throw errors.unauthorized() + } + + // The only requirement is login. + if (!permission) { + return + } + + if (!hasPermission(user, permission)) { + throw errors.unauthorized() + } +} + +function resolveParams (method, params) { + const resolve = method.resolve + if (!resolve) { + return params + } + + const {user} = this + if (!user) { + throw errors.unauthorized() + } + + const userId = user.id + + // Do not alter the original object. + params = { ...params } + + const permissions = [] + forEach(resolve, ([param, types, permission = 'administrate'], key) => { + const id = params[param] + if (id === undefined) { + return + } + + const object = this.getObject(id, types) + + // This parameter has been handled, remove it. + delete params[param] + + // Register this new value. + params[key] = object + + // Permission default to 'administrate' but can be set to a falsy + // value (except null or undefined which trigger the default + // value) to simply do a resolve without checking any permissions. + if (permission) { + permissions.push([ object.id, permission ]) + } + }) + + return this.hasPermissions(userId, permissions).then(success => { + if (success) { + return params + } + + throw errors.unauthorized() + }) +} + +// ------------------------------------------------------------------- + +const removeSensitiveParams = (value, name) => { + if (name === 'password' && typeof value === 'string') { + return '* obfuscated *' + } + + if (typeof value !== 'object' || value === null) { + return value + } + + return isArray(value) + ? map(value, removeSensitiveParams) + : mapValues(value, removeSensitiveParams) +} + +// =================================================================== + +export default class Api { + constructor (xo) { + this._logger = null + this._methods = createRawObject() + this._xo = xo + + this.addApiMethods(methods) + xo.on('start', async () => { + this._logger = await xo.getLogger('api') + }) + } + + get apiMethods () { + return this._methods + } + + addApiMethod (name, method) { + const methods = this._methods + + if (name in methods) { + throw new Error(`API method ${name} already exists`) + } + + methods[name] = method + + let remove = () => { + delete methods[name] + remove = noop + } + return () => remove() + } + + addApiMethods (methods) { + let base = '' + const removes = [] + + const addMethod = (method, name) => { + name = base + name + + if (isFunction(method)) { + removes.push(this.addApiMethod(name, method)) + return + } + + const oldBase = base + base = name + '.' + forEach(method, addMethod) + base = oldBase + } + + try { + forEach(methods, addMethod) + } catch (error) { + // Remove all added methods. + forEach(removes, remove => remove()) + + // Forward the error + throw error + } + + let remove = () => { + forEach(removes, remove => remove()) + remove = noop + } + return remove + } + + async callApiMethod (session, name, params = {}) { + const startTime = Date.now() + + const method = this._methods[name] + if (!method) { + throw new MethodNotFound(name) + } + + // FIXME: it can cause issues if there any property assignments in + // XO methods called from the API. + const context = Object.create(this._xo, { + api: { // Used by system.*(). + value: this, + }, + session: { + value: session, + }, + }) + + // Fetch and inject the current user. + const userId = session.get('user_id', undefined) + context.user = userId && await this._xo.getUser(userId) + const userName = context.user + ? context.user.email + : '(unknown user)' + + try { + await checkPermission.call(context, method) + + // API methods are in a namespace. + // Some methods use the namespace or an id parameter like: + // + // vm.detachPci vm= + // vm.ejectCd id= + // + // The goal here is to standardize the calls by always providing + // an id parameter when possible to simplify calls to the API. + if (params != null && params.id === undefined) { + const namespace = name.slice(0, name.indexOf('.')) + const id = params[namespace] + if (typeof id === 'string') { + params.id = id + } + } + + checkParams.call(context, method, params) + + const resolvedParams = await resolveParams.call(context, method, params) + + let result = await method.call(context, resolvedParams) + + // If nothing was returned, consider this operation a success + // and return true. + if (result === undefined) { + result = true + } + + debug( + '%s | %s(...) [%s] ==> %s', + userName, + name, + ms(Date.now() - startTime), + kindOf(result) + ) + + return result + } catch (error) { + const data = { + userId, + method: name, + params: removeSensitiveParams(params), + duration: Date.now() - startTime, + error: serializeError(error), + } + const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}` + + this._logger.error(message, data) + + if (this._xo._config.verboseLogsOnErrors) { + debug(message) + + const stack = error && error.stack + if (stack) { + console.error(stack) + } + } else { + debug( + '%s | %s(...) [%s] =!> %s', + userName, + name, + ms(Date.now() - startTime), + error + ) + } + + const xoError = XAPI_ERROR_TO_XO_ERROR[error.code] + if (xoError) { + throw xoError(error.params, ref => { + try { + return this._xo.getObject(ref).id + } catch (e) { + return ref + } + }) + } + + throw error + } + } +} diff --git a/packages/xo-server/src/xo-mixins/authentication.js b/packages/xo-server/src/xo-mixins/authentication.js new file mode 100644 index 000000000..ea276fe0f --- /dev/null +++ b/packages/xo-server/src/xo-mixins/authentication.js @@ -0,0 +1,203 @@ +import ms from 'ms' +import { noSuchObject } from 'xo-common/api-errors' +import { ignoreErrors } from 'promise-toolbox' + +import Token, { Tokens } from '../models/token' +import { + createRawObject, + forEach, + generateToken, +} from '../utils' + +// =================================================================== + +const noSuchAuthenticationToken = id => + noSuchObject(id, 'authenticationToken') + +const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30 + +export default class { + constructor (xo) { + this._xo = xo + + // Store last failures by user to throttle tries (slow bruteforce + // attacks). + this._failures = createRawObject() + + this._providers = new Set() + + // Creates persistent collections. + const tokensDb = this._tokens = new Tokens({ + connection: xo._redis, + prefix: 'xo:token', + indexes: ['user_id'], + }) + + // Password authentication provider. + this.registerAuthenticationProvider(async ({ + username, + password, + }) => { + if (username === undefined || password === undefined) { + return + } + + const user = await xo.getUserByName(username, true) + if (user && await xo.checkUserPassword(user.id, password)) { + return user.id + } + }) + + // Token authentication provider. + this.registerAuthenticationProvider(async ({ + token: tokenId, + }) => { + if (!tokenId) { + return + } + + try { + return (await xo.getAuthenticationToken(tokenId)).user_id + } catch (error) {} + }) + + xo.on('clean', async () => { + const tokens = await tokensDb.get() + const toRemove = [] + const now = Date.now() + forEach(tokens, ({ expiration, id }) => { + if (!expiration || expiration < now) { + toRemove.push(id) + } + }) + await tokensDb.remove(toRemove) + return tokensDb.rebuildIndexes() + }) + + xo.on('start', () => { + xo.addConfigManager('authTokens', + () => tokensDb.get(), + tokens => tokensDb.update(tokens) + ) + }) + } + + registerAuthenticationProvider (provider) { + return this._providers.add(provider) + } + + unregisterAuthenticationProvider (provider) { + return this._providers.delete(provider) + } + + async _authenticateUser (credentials) { + for (const provider of this._providers) { + try { + // A provider can return: + // - `null` if the user could not be authenticated + // - the identifier of the authenticated user + // - an object with a property `username` containing the name + // of the authenticated user + const result = await provider(credentials) + + // No match. + if (!result) { + continue + } + + return result.username + ? await this._xo.registerUser(undefined, result.username) + : await this._xo.getUser(result) + } catch (error) { + // DEPRECATED: Authentication providers may just throw `null` + // to indicate they could not authenticate the user without + // any special errors. + if (error) console.error(error.stack || error) + } + } + + return false + } + + async authenticateUser (credentials) { + // TODO: remove when email has been replaced by username. + if (credentials.email) { + credentials.username = credentials.email + } else if (credentials.username) { + credentials.email = credentials.username + } + + const failures = this._failures + + const { username } = credentials + const now = Date.now() + let lastFailure + if ( + username && + (lastFailure = failures[username]) && + (lastFailure + 2e3) > now + ) { + throw new Error('too fast authentication tries') + } + + const user = await this._authenticateUser(credentials) + if (user) { + delete failures[username] + } else { + failures[username] = now + } + + return user + } + + // ----------------------------------------------------------------- + + async createAuthenticationToken ({ + expiresIn = ONE_MONTH, + userId, + }) { + const token = new Token({ + id: await generateToken(), + user_id: userId, + expiration: Date.now() + ( + typeof expiresIn === 'string' + ? ms(expiresIn) + : expiresIn + ), + }) + + await this._tokens.add(token) + + // TODO: use plain properties directly. + return token.properties + } + + async deleteAuthenticationToken (id) { + if (!await this._tokens.remove(id)) { + throw noSuchAuthenticationToken(id) + } + } + + async getAuthenticationToken (id) { + let token = await this._tokens.first(id) + if (!token) { + throw noSuchAuthenticationToken(id) + } + + token = token.properties + + if (!( + token.expiration > Date.now() + )) { + this._tokens.remove(id)::ignoreErrors() + + throw noSuchAuthenticationToken(id) + } + + return token + } + + async getAuthenticationTokensForUser (userId) { + return this._tokens.get({ user_id: userId }) + } +} diff --git a/packages/xo-server/src/xo-mixins/backups.js b/packages/xo-server/src/xo-mixins/backups.js new file mode 100644 index 000000000..695233c40 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/backups.js @@ -0,0 +1,1114 @@ +import deferrable from 'golike-defer' +import escapeStringRegexp from 'escape-string-regexp' +import eventToPromise from 'event-to-promise' +import execa from 'execa' +import splitLines from 'split-lines' +import { CancelToken, ignoreErrors } from 'promise-toolbox' +import { createParser as createPairsParser } from 'parse-pairs' +import { createReadStream, readdir, stat } from 'fs' +import { satisfies as versionSatisfies } from 'semver' +import { utcFormat } from 'd3-time-format' +import { + basename, + dirname, +} from 'path' +import { + endsWith, + filter, + find, + findIndex, + includes, + once, + range, + sortBy, + startsWith, + trim, +} from 'lodash' + +import createSizeStream from '../size-stream' +import vhdMerge, { chainVhd } from '../vhd-merge' +import xapiObjectToXo from '../xapi-object-to-xo' +import { lvs, pvs } from '../lvm' +import { + asyncMap, + forEach, + getFirstPropertyName, + mapFilter, + mapToArray, + pFinally, + pFromCallback, + pSettle, + resolveSubpath, + safeDateFormat, + safeDateParse, + tmpDir, +} from '../utils' + +// =================================================================== + +const DELTA_BACKUP_EXT = '.json' +const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length +const TAG_SOURCE_VM = 'xo:source_vm' +const TAG_EXPORT_TIME = 'xo:export_time' + +const shortDate = utcFormat('%Y-%m-%d') + +// Test if a file is a vdi backup. (full or delta) +const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name) + +// Test if a file is a delta/full vdi backup. +const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name) +const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name) + +const toTimestamp = date => date && Math.round(date.getTime() / 1000) + +const parseVmBackupPath = name => { + const base = basename(name) + let baseMatches + + baseMatches = /^([^_]+)_([^_]+)_(.+)\.xva$/.exec(base) + if (baseMatches) { + return { + datetime: toTimestamp(safeDateParse(baseMatches[1])), + id: name, + name: baseMatches[3], + tag: baseMatches[2], + type: 'xva', + } + } + + let dirMatches + if ( + (baseMatches = /^([^_]+)_(.+)\.json$/.exec(base)) && + (dirMatches = /^vm_delta_([^_]+)_(.+)$/.exec(basename(dirname(name)))) + ) { + return { + datetime: toTimestamp(safeDateParse(baseMatches[1])), + id: name, + name: baseMatches[2], + tag: dirMatches[1], + type: 'delta', + uuid: dirMatches[2], + } + } + + throw new Error('invalid VM backup filename') +} + +// Get the timestamp of a vdi backup. (full or delta) +const getVdiTimestamp = name => { + const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name) + return arr[1] +} + +const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH) +const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT) + +// Checksums have been corrupted between 5.2.6 and 5.2.7. +// +// For a short period of time, bad checksums will be regenerated +// instead of rejected. +// +// TODO: restore when enough time has passed (a week/a month). +async function checkFileIntegrity (handler, name) { + await handler.refreshChecksum(name) + // let stream + // + // try { + // stream = await handler.createReadStream(name, { checksum: true }) + // } catch (error) { + // if (error.code === 'ENOENT') { + // return + // } + // + // throw error + // } + // + // stream.resume() + // await eventToPromise(stream, 'finish') +} + +// ------------------------------------------------------------------- + +const listPartitions = (() => { + const IGNORED = {} + forEach([ + // https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38 + 0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5, + + 0x82, // swap + ], type => { + IGNORED[type] = true + }) + + const TYPES = { + 0x7: 'NTFS', + 0x83: 'linux', + 0xc: 'FAT', + } + + const parseLine = createPairsParser({ + keyTransform: key => key === 'UUID' + ? 'id' + : key.toLowerCase(), + valueTransform: (value, key) => key === 'start' || key === 'size' + ? +value + : key === 'type' + ? TYPES[+value] || value + : value, + }) + + return device => execa.stdout('partx', [ + '--bytes', + '--output=NR,START,SIZE,NAME,UUID,TYPE', + '--pairs', + device.path, + ]).then(stdout => mapFilter(splitLines(stdout), line => { + const partition = parseLine(line) + const { type } = partition + if (type != null && !IGNORED[+type]) { + return partition + } + })) +})() + +// handle LVM logical volumes automatically +const listPartitions2 = device => listPartitions(device).then(partitions => { + const partitions2 = [] + const promises = [] + forEach(partitions, partition => { + if (+partition.type === 0x8e) { + promises.push(mountLvmPv(device, partition).then(device => { + const promise = listLvmLvs(device).then(lvs => { + forEach(lvs, lv => { + partitions2.push({ + name: lv.lv_name, + size: +lv.lv_size, + id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`, + }) + }) + }) + promise::pFinally(device.unmount) + return promise + })) + } else { + partitions2.push(partition) + } + }) + return Promise.all(promises).then(() => partitions2) +}) + +const mountPartition = (device, partitionId) => Promise.all([ + partitionId != null && listPartitions(device), + tmpDir(), +]).then(([ partitions, path ]) => { + const options = [ + 'loop', + 'ro', + ] + + if (partitions) { + const partition = find(partitions, { id: partitionId }) + + const { start } = partition + if (start != null) { + options.push(`offset=${start * 512}`) + } + } + + const mount = options => execa('mount', [ + `--options=${options.join(',')}`, + `--source=${device.path}`, + `--target=${path}`, + ]) + + // `norecovery` option is used for ext3/ext4/xfs, if it fails it + // might be another fs, try without + return mount([ ...options, 'norecovery' ]).catch(() => + mount(options) + ).then(() => ({ + path, + unmount: once(() => execa('umount', [ '--lazy', path ])), + }), error => { + console.log(error) + + throw error + }) +}) + +// handle LVM logical volumes automatically +const mountPartition2 = (device, partitionId) => { + if ( + partitionId == null || + !includes(partitionId, '/') + ) { + return mountPartition(device, partitionId) + } + + const [ pvId, vgName, lvName ] = partitionId.split('/') + + return listPartitions(device).then(partitions => + find(partitions, { id: pvId }) + ).then(pvId => mountLvmPv(device, pvId)).then(device1 => + execa('vgchange', [ '-ay', vgName ]).then(() => + lvs([ 'lv_name', 'lv_path' ], vgName).then(lvs => + find(lvs, { lv_name: lvName }).lv_path + ) + ).then(path => + mountPartition({ path }).then(device2 => ({ + ...device2, + unmount: () => device2.unmount().then(device1.unmount), + })) + ).catch(error => device1.unmount().then(() => { + throw error + })) + ) +} + +// ------------------------------------------------------------------- + +const listLvmLvs = device => pvs([ + 'lv_name', + 'lv_path', + 'lv_size', + 'vg_name', +], device.path).then(pvs => filter(pvs, 'lv_name')) + +const mountLvmPv = (device, partition) => { + const args = [] + if (partition) { + args.push('-o', partition.start * 512) + } + args.push( + '--show', + '-f', + device.path + ) + + return execa.stdout('losetup', args).then(stdout => { + const path = trim(stdout) + return { + path, + unmount: once(() => Promise.all([ + execa('losetup', [ '-d', path ]), + pvs('vg_name', path).then(vgNames => execa('vgchange', [ + '-an', + ...vgNames, + ])), + ])), + } + }) +} + +// =================================================================== + +export default class { + constructor (xo) { + this._xo = xo + + // clean any LVM volumes that might have not been properly + // unmounted + xo.on('start', () => Promise.all([ + execa('losetup', [ '-D' ]), + execa('vgchange', [ '-an' ]), + ]).then(() => + execa('pvscan', [ '--cache' ]) + )) + } + + async listRemoteBackups (remoteId) { + const handler = await this._xo.getRemoteHandler(remoteId) + + // List backups. (No delta) + const backupFilter = file => endsWith(file, '.xva') + + const files = await handler.list() + const backups = filter(files, backupFilter) + + // List delta backups. + const deltaDirs = filter(files, file => startsWith(file, 'vm_delta_')) + + for (const deltaDir of deltaDirs) { + const files = await handler.list(deltaDir) + const deltaBackups = filter(files, isDeltaBackup) + + backups.push(...mapToArray( + deltaBackups, + deltaBackup => { + return `${deltaDir}/${getDeltaBackupNameWithoutExt(deltaBackup)}` + } + )) + } + + return backups + } + + async listVmBackups (remoteId) { + const handler = await this._xo.getRemoteHandler(remoteId) + + const backups = [] + + await asyncMap(handler.list(), entry => { + if (endsWith(entry, '.xva')) { + backups.push(parseVmBackupPath(entry)) + } else if (startsWith(entry, 'vm_delta_')) { + return handler.list(entry).then(children => + asyncMap(children, child => { + if (endsWith(child, '.json')) { + const path = `${entry}/${child}` + + const record = parseVmBackupPath(path) + backups.push(record) + + return handler.readFile(path).then(data => { + record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({ + id: `${entry}/${vdi.xoPath}`, + name: vdi.name_label, + uuid: vdi.uuid, + })) + }) + } + }) + ) + } + })::ignoreErrors() + + return backups + } + + async importVmBackup (remoteId, file, sr) { + const handler = await this._xo.getRemoteHandler(remoteId) + const stream = await handler.createReadStream(file) + const xapi = this._xo.getXapi(sr) + + const vm = await xapi.importVm(stream, { srId: sr._xapiId }) + + const { datetime } = parseVmBackupPath(file) + await Promise.all([ + xapi.addTag(vm.$id, 'restored from backup'), + xapi.editVm(vm.$id, { + name_label: `${vm.name_label} (${shortDate(datetime * 1e3)})`, + }), + ]) + + return xapiObjectToXo(vm).id + } + + // ----------------------------------------------------------------- + + @deferrable + async deltaCopyVm ($defer, srcVm, targetSr, force = false, retention = 1) { + const transferStart = Date.now() + const srcXapi = this._xo.getXapi(srcVm) + const targetXapi = this._xo.getXapi(targetSr) + + // Get Xen objects from XO objects. + const { uuid } = srcVm = srcXapi.getObject(srcVm._xapiId) + targetSr = targetXapi.getObject(targetSr._xapiId) + + // 1. Find the local base for this SR (if any). + const TAG_LAST_BASE_DELTA = `xo:base_delta:${targetSr.uuid}` + const localBaseUuid = (id => { + if (id != null) { + const base = srcXapi.getObject(id, null) + return base && base.uuid + } + })(srcVm.other_config[TAG_LAST_BASE_DELTA]) + + // 2. Copy. + let size = 0 + const dstVm = await (async () => { + const { cancel, token } = CancelToken.source() + const delta = await srcXapi.exportDeltaVm(token, srcVm.$id, localBaseUuid, { + bypassVdiChainsCheck: force, + snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${targetSr.uuid})`, + }) + $defer.onFailure(() => srcXapi.deleteVm(delta.vm.uuid)) + $defer.onFailure(cancel) + + const date = safeDateFormat(Date.now()) + delta.vm.name_label += ` (${date})` + delta.vm.other_config[TAG_SOURCE_VM] = uuid + delta.vm.other_config[TAG_EXPORT_TIME] = date + delta.vm.tags = [ ...delta.vm.tags, 'Continuous Replication' ] + + const { streams } = delta + forEach(delta.vdis, (vdi, key) => { + const id = `${key}.vhd` + const stream = streams[id] + const sizeStream = createSizeStream().once('finish', () => { + size += sizeStream.size + }) + sizeStream.task = stream.task + streams[id] = stream.pipe(sizeStream) + }) + + let toRemove = filter(targetXapi.objects.all, obj => + obj.$type === 'vm' && + obj.other_config[TAG_SOURCE_VM] === uuid + ) + const { length } = toRemove + const deleteBase = length === 0 // old replications are not captured in toRemove + const n = length - retention + 1 // take into account the future copy + toRemove = n > 0 + ? sortBy(toRemove, _ => _.other_config[TAG_EXPORT_TIME]).slice(0, n) + : undefined + + const promise = targetXapi.importDeltaVm( + delta, + { + deleteBase, + srId: targetSr.$id, + } + ) + + // Once done, (asynchronously) remove the (now obsolete) local + // base. + if (localBaseUuid) { + promise.then(() => srcXapi.deleteVm(localBaseUuid))::ignoreErrors() + } + + if (toRemove !== undefined) { + promise.then(() => asyncMap(toRemove, _ => + targetXapi.deleteVm(_.$id)) + )::ignoreErrors() + } + + // (Asynchronously) Identify snapshot as future base. + promise.then(() => { + return srcXapi._updateObjectMapProperty(srcVm, 'other_config', { + [TAG_LAST_BASE_DELTA]: delta.vm.uuid, + }) + })::ignoreErrors() + + return promise + })() + + return { + // 5. Return the identifier of the new XO VM object. + id: xapiObjectToXo(dstVm).id, + transferDuration: Date.now() - transferStart, + transferSize: size, + } + } + + // ----------------------------------------------------------------- + + // TODO: The other backup methods must use this function ! + // Prerequisite: The backups array must be ordered. (old to new backups) + async _removeOldBackups (backups, handler, dir, n) { + if (n <= 0) { + return + } + + const getPath = (file, dir) => dir ? `${dir}/${file}` : file + + await asyncMap(backups.slice(0, n), backup => + handler.unlink(getPath(backup, dir)) + ) + } + + // ----------------------------------------------------------------- + + async _listVdiBackups (handler, dir) { + let files + + try { + files = await handler.list(dir) + } catch (error) { + if (error.code === 'ENOENT') { + files = [] + } else { + throw error + } + } + + const backups = sortBy(filter(files, fileName => isVdiBackup(fileName))) + let i + + // Avoid unstable state: No full vdi found to the beginning of array. (base) + for (i = 0; i < backups.length && isDeltaVdiBackup(backups[i]); i++); + await this._removeOldBackups(backups, handler, dir, i) + + return backups.slice(i) + } + + // fix the parent UUID and filename in delta files after download from xapi or backup compression + async _chainDeltaVdiBackups ({handler, dir}) { + const backups = await this._listVdiBackups(handler, dir) + for (let i = 1; i < backups.length; i++) { + const childPath = dir + '/' + backups[i] + const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath) + if (modified) { + await handler.refreshChecksum(childPath) + } + } + } + + async _mergeDeltaVdiBackups ({handler, dir, retention}) { + const backups = await this._listVdiBackups(handler, dir) + const i = backups.length - retention + + // No merge. + if (i <= 0) { + return + } + + const timestamp = getVdiTimestamp(backups[i]) + const newFullBackup = `${dir}/${timestamp}_full.vhd` + + await checkFileIntegrity(handler, `${dir}/${backups[i]}`) + + let j = i + for (; j > 0 && isDeltaVdiBackup(backups[j]); j--); + const fullBackupId = j + + // Remove old backups before the most recent full. + await asyncMap(range(0, j), i => + handler.unlink(`${dir}/${backups[i]}`) + ) + + const parent = `${dir}/${backups[fullBackupId]}` + + let mergedDataSize = 0 + for (j = fullBackupId + 1; j <= i; j++) { + const backup = `${dir}/${backups[j]}` + + try { + await checkFileIntegrity(handler, backup) + mergedDataSize += await vhdMerge(handler, parent, handler, backup) + } catch (e) { + console.error('Unable to use vhd-util.', e) + throw e + } + + await handler.unlink(backup) + } + + // Rename the first old full backup to the new full backup. + await handler.rename(parent, newFullBackup) + + return mergedDataSize + } + + async _listDeltaVdiDependencies (handler, filePath) { + const dir = dirname(filePath) + const filename = basename(filePath) + const backups = await this._listVdiBackups(handler, dir) + + // Search file. (delta or full backup) + const i = findIndex(backups, backup => + getVdiTimestamp(backup) === getVdiTimestamp(filename) + ) + + if (i === -1) { + throw new Error('VDI to import not found in this remote.') + } + + // Search full backup. + let j + + for (j = i; j >= 0 && isDeltaVdiBackup(backups[j]); j--); + + if (j === -1) { + throw new Error(`Unable to found full vdi backup of: ${filePath}`) + } + + return backups.slice(j, i + 1) + } + + // ----------------------------------------------------------------- + + async _listDeltaVmBackups (handler, dir) { + const files = await handler.list(dir) + return sortBy(filter(files, isDeltaBackup)) + } + + async _saveDeltaVdiBackup (xapi, { vdiParent, isFull, handler, stream, dir, retention }) { + const backupDirectory = `vdi_${vdiParent.uuid}` + dir = `${dir}/${backupDirectory}` + + const date = safeDateFormat(new Date()) + + // For old versions: remove old bases if exists. + const bases = sortBy( + filter(vdiParent.$snapshots, { name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT' }), + base => base.snapshot_time + ) + forEach(bases, base => { xapi.deleteVdi(base.$id)::ignoreErrors() }) + + // Export full or delta backup. + const vdiFilename = `${date}_${isFull ? 'full' : 'delta'}.vhd` + const backupFullPath = `${dir}/${vdiFilename}` + + const sizeStream = createSizeStream() + + try { + const targetStream = await handler.createOutputStream(backupFullPath, { + // FIXME: Checksum is not computed for full vdi backups. + // The problem is in the merge case, a delta merged in a full vdi + // backup forces us to browse the resulting file => + // Significant transfer time on the network ! + checksum: !isFull, + }) + + stream.on('error', error => targetStream.emit('error', error)) + + await Promise.all([ + eventToPromise( + stream + .pipe(sizeStream) + .pipe(targetStream), + 'finish' + ), + stream.task, + ]) + } catch (error) { + // Remove new backup. (corrupt). + await handler.unlink(backupFullPath)::ignoreErrors() + + throw error + } + + return { + // Returns relative path. + path: `${backupDirectory}/${vdiFilename}`, + size: sizeStream.size, + } + } + + async _removeOldDeltaVmBackups (xapi, { handler, dir, retention }) { + const backups = await this._listDeltaVmBackups(handler, dir) + const nOldBackups = backups.length - retention + + if (nOldBackups > 0) { + await asyncMap(backups.slice(0, nOldBackups), backup => + // Remove json file. + handler.unlink(`${dir}/${backup}`) + ) + } + } + + @deferrable + async rollingDeltaVmBackup ($defer, {vm, remoteId, tag, retention}) { + const transferStart = Date.now() + const handler = await this._xo.getRemoteHandler(remoteId) + const xapi = this._xo.getXapi(vm) + + vm = xapi.getObject(vm._xapiId) + + // Get most recent base. + const bases = sortBy( + filter(vm.$snapshots, { name_label: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}` }), + base => base.snapshot_time + ) + const baseVm = bases.pop() + forEach(bases, base => { xapi.deleteVm(base.$id)::ignoreErrors() }) + + // Check backup dirs. + const dir = `vm_delta_${tag}_${vm.uuid}` + const fullVdisRequired = [] + + await Promise.all( + mapToArray(vm.$VBDs, async vbd => { + if (!vbd.VDI || vbd.type !== 'Disk') { + return + } + + const vdi = vbd.$VDI + const backups = await this._listVdiBackups(handler, `${dir}/vdi_${vdi.uuid}`) + + // Force full if missing full. + if (!find(backups, isFullVdiBackup)) { + fullVdisRequired.push(vdi.$id) + } + }) + ) + + // Export... + const { cancel, token } = CancelToken.source() + const delta = await xapi.exportDeltaVm(token, vm.$id, baseVm && baseVm.$id, { + snapshotNameLabel: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}`, + fullVdisRequired, + disableBaseTags: true, + }) + $defer.onFailure(() => xapi.deleteVm(delta.vm.uuid)) + $defer.onFailure(cancel) + + // Save vdis. + const vdiBackups = await pSettle( + mapToArray(delta.vdis, async (vdi, key) => { + const vdiParent = xapi.getObject(vdi.snapshot_of) + + return this._saveDeltaVdiBackup(xapi, { + vdiParent, + isFull: !baseVm || find(fullVdisRequired, id => vdiParent.$id === id), + handler, + stream: delta.streams[`${key}.vhd`], + dir, + retention, + }) + .then(data => { + delta.vdis[key] = { + ...delta.vdis[key], + xoPath: data.path, + } + + return data + }) + }) + ) + + const fulFilledVdiBackups = [] + let error + + // One or many vdi backups have failed. + for (const vdiBackup of vdiBackups) { + if (vdiBackup.isFulfilled()) { + fulFilledVdiBackups.push(vdiBackup) + } else { + error = vdiBackup.reason() + console.error('Rejected backup:', error) + } + } + + $defer.onFailure(() => asyncMap(fulFilledVdiBackups, vdiBackup => + handler.unlink(`${dir}/${vdiBackup.value()}`)::ignoreErrors() + )) + + if (error) { + throw error + } + + const date = safeDateFormat(new Date()) + const backupFormat = `${date}_${vm.name_label}` + const infoPath = `${dir}/${backupFormat}${DELTA_BACKUP_EXT}` + + $defer.onFailure(() => handler.unlink(infoPath)) + + // Write Metadata. + await handler.outputFile(infoPath, JSON.stringify(delta, null, 2)) + + let dataSize = 0 + let mergedDataSize = 0 + const mergeStart = Date.now() + + // Here we have a completed backup. We can merge old vdis. + await Promise.all( + mapToArray(vdiBackups, vdiBackup => { + const backupName = vdiBackup.value().path + const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/')) + const backupDir = `${dir}/${backupDirectory}` + dataSize += vdiBackup.value().size + + return this._mergeDeltaVdiBackups({ handler, dir: backupDir, retention }) + .then(size => { + this._chainDeltaVdiBackups({ handler, dir: backupDir }) + + if (size !== undefined) { + mergedDataSize += size + } + }) + }) + ) + + const mergeDuration = Date.now() - mergeStart + + // Delete old backups. + await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, retention }) + + if (baseVm) { + xapi.deleteVm(baseVm.$id)::ignoreErrors() + } + + return { + // Returns relative path. + path: `${dir}/${backupFormat}`, + mergeDuration: mergedDataSize !== 0 ? mergeDuration : undefined, + mergeSize: mergedDataSize !== 0 ? mergedDataSize : undefined, + transferDuration: Date.now() - transferStart - mergeDuration, + transferSize: dataSize, + } + } + + async importDeltaVmBackup ({sr, remoteId, filePath, mapVdisSrs = {}}) { + filePath = `${filePath}${DELTA_BACKUP_EXT}` + const { datetime } = parseVmBackupPath(filePath) + + const handler = await this._xo.getRemoteHandler(remoteId) + const xapi = this._xo.getXapi(sr || mapVdisSrs[getFirstPropertyName(mapVdisSrs)]) + + const delta = JSON.parse(await handler.readFile(filePath)) + let vm + const { version = '0.0.0' } = delta + + if (versionSatisfies(version, '^1')) { + const basePath = dirname(filePath) + const streams = delta.streams = {} + + await Promise.all( + mapToArray( + delta.vdis, + async (vdi, id) => { + const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}` + const backups = await this._listDeltaVdiDependencies(handler, `${basePath}/${vdi.xoPath}`) + + streams[`${id}.vhd`] = await Promise.all(mapToArray(backups, async backup => + handler.createReadStream(`${vdisFolder}/${backup}`, { checksum: true, ignoreMissingChecksum: true }) + )) + } + ) + ) + + delta.vm.name_label += ` (${shortDate(datetime * 1e3)})` + delta.vm.tags.push('restored from backup') + + vm = await xapi.importDeltaVm(delta, { + disableStartAfterImport: false, + srId: sr !== undefined && sr._xapiId, + mapVdisSrs, + }) + } else { + throw new Error(`Unsupported delta backup version: ${version}`) + } + + return xapiObjectToXo(vm).id + } + + // ----------------------------------------------------------------- + + async backupVm ({vm, remoteId, file, compress}) { + const handler = await this._xo.getRemoteHandler(remoteId) + return this._backupVm(vm, handler, file, {compress}) + } + + @deferrable + async _backupVm ($defer, vm, handler, file, {compress}) { + const targetStream = await handler.createOutputStream(file) + $defer.onFailure.call(handler, 'unlink', file) + $defer.onFailure.call(targetStream, 'close') + + const promise = eventToPromise(targetStream, 'finish') + + const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, { + compress, + }) + + const sizeStream = createSizeStream() + + sourceStream + .pipe(sizeStream) + .pipe(targetStream) + + await promise + + return { + transferSize: sizeStream.size, + } + } + + async rollingBackupVm ({vm, remoteId, tag, retention, compress}) { + const transferStart = Date.now() + const handler = await this._xo.getRemoteHandler(remoteId) + + const files = await handler.list() + + const reg = new RegExp('^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`)) + const backups = sortBy(filter(files, (fileName) => reg.test(fileName))) + + const date = safeDateFormat(new Date()) + const file = `${date}_${tag}_${vm.name_label}.xva` + + const data = await this._backupVm(vm, handler, file, {compress}) + await this._removeOldBackups(backups, handler, undefined, backups.length - (retention - 1)) + data.transferDuration = Date.now() - transferStart + + return data + } + + async rollingSnapshotVm (vm, tag, retention) { + const xapi = this._xo.getXapi(vm) + vm = xapi.getObject(vm._xapiId) + + const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_') + const snapshots = sortBy(filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), 'name_label') + const date = safeDateFormat(new Date()) + + await xapi.snapshotVm(vm.$id, `rollingSnapshot_${date}_${tag}_${vm.name_label}`) + + const promises = [] + for (let surplus = snapshots.length - (retention - 1); surplus > 0; surplus--) { + const oldSnap = snapshots.shift() + promises.push(xapi.deleteVm(oldSnap.uuid)) + } + await Promise.all(promises) + } + + _removeVms (xapi, vms) { + return Promise.all(mapToArray(vms, vm => + // Do not consider a failure to delete an old copy as a fatal error. + xapi.deleteVm(vm.$id)::ignoreErrors() + )) + } + + async rollingDrCopyVm ({vm, sr, tag, retention, deleteOldBackupsFirst}) { + const transferStart = Date.now() + tag = 'DR_' + tag + const reg = new RegExp('^' + escapeStringRegexp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$') + + const targetXapi = this._xo.getXapi(sr) + sr = targetXapi.getObject(sr._xapiId) + const sourceXapi = this._xo.getXapi(vm) + vm = sourceXapi.getObject(vm._xapiId) + + const vms = {} + forEach(sr.$VDIs, vdi => { + const vbds = vdi.$VBDs + const vm = vbds && vbds[0] && vbds[0].$VM + if (vm && reg.test(vm.name_label)) { + vms[vm.$id] = vm + } + }) + + let vmsToRemove = sortBy(vms, 'name_label') + + if (retention > 1) { + vmsToRemove = vmsToRemove.slice(0, 1 - retention) + } + + if (deleteOldBackupsFirst) { + await this._removeVms(targetXapi, vmsToRemove) + } + + const copyName = `${vm.name_label}_${tag}_${safeDateFormat(new Date())}` + const data = await sourceXapi.remoteCopyVm(vm.$id, targetXapi, sr.$id, { + nameLabel: copyName, + }) + + targetXapi._updateObjectMapProperty(data.vm, 'blocked_operations', { + start: 'Start operation for this vm is blocked, clone it if you want to use it.', + }) + + await targetXapi.addTag(data.vm.$id, 'Disaster Recovery') + + if (!deleteOldBackupsFirst) { + await this._removeVms(targetXapi, vmsToRemove) + } + + return { + transferDuration: Date.now() - transferStart, + transferSize: data.size, + } + } + + // ----------------------------------------------------------------- + + _mountVhd (remoteId, vhdPath) { + return Promise.all([ + this._xo.getRemoteHandler(remoteId), + tmpDir(), + ]).then(([ handler, mountDir ]) => { + if (!handler._getRealPath) { + throw new Error(`this remote is not supported`) + } + + const remotePath = handler._getRealPath() + vhdPath = resolveSubpath(remotePath, vhdPath) + + return Promise.resolve().then(() => { + // TODO: remove when no longer necessary. + // + // Currently, the filenames of the VHD changes over time + // (delta → full), but the JSON is not updated, therefore the + // VHD path may need to be fixed. + return endsWith(vhdPath, '_delta.vhd') + ? pFromCallback(cb => stat(vhdPath, cb)).then( + () => vhdPath, + error => { + if (error && error.code === 'ENOENT') { + return `${vhdPath.slice(0, -10)}_full.vhd` + } + } + ) + : vhdPath + }).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() => + pFromCallback(cb => readdir(mountDir, cb)).then(entries => { + let max = 0 + forEach(entries, entry => { + const matches = /^vhdi(\d+)/.exec(entry) + if (matches) { + const value = +matches[1] + if (value > max) { + max = value + } + } + }) + + if (!max) { + throw new Error('no disks found') + } + + return { + path: `${mountDir}/vhdi${max}`, + unmount: once(() => execa('fusermount', [ '-uz', mountDir ])), + } + }) + ) + }) + } + + _mountPartition (remoteId, vhdPath, partitionId) { + return this._mountVhd(remoteId, vhdPath).then(device => + mountPartition2(device, partitionId).then(partition => ({ + ...partition, + unmount: () => partition.unmount().then(device.unmount), + })).catch(error => device.unmount().then(() => { + throw error + })) + ) + } + + @deferrable + async scanDiskBackup ($defer, remoteId, vhdPath) { + const device = await this._mountVhd(remoteId, vhdPath) + $defer(device.unmount) + + return { + partitions: await listPartitions2(device), + } + } + + @deferrable + async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) { + const partition = await this._mountPartition(remoteId, vhdPath, partitionId) + $defer(partition.unmount) + + path = resolveSubpath(partition.path, path) + + const entries = await pFromCallback(cb => readdir(path, cb)) + + const entriesMap = {} + await Promise.all(mapToArray(entries, async name => { + const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::ignoreErrors() + if (stats) { + entriesMap[stats.isDirectory() ? `${name}/` : name] = {} + } + })) + return entriesMap + } + + async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) { + const partition = await this._mountPartition(remoteId, vhdPath, partitionId) + + let i = 0 + const onEnd = () => { + if (!--i) { + partition.unmount() + } + } + return mapToArray(paths, path => { + ++i + return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd) + }) + } +} diff --git a/packages/xo-server/src/xo-mixins/config-management.js b/packages/xo-server/src/xo-mixins/config-management.js new file mode 100644 index 000000000..6b71fd938 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/config-management.js @@ -0,0 +1,42 @@ +import createDebug from 'debug' +import DepTree from 'deptree' +import { all as pAll } from 'promise-toolbox' +import { mapValues } from 'lodash' + +const debug = createDebug('xo:config-management') + +export default class ConfigManagement { + constructor (app) { + this._app = app + this._depTree = new DepTree() + this._managers = { __proto__: null } + } + + addConfigManager (id, exporter, importer, dependencies) { + const managers = this._managers + if (id in managers) { + throw new Error(`${id} is already taken`) + } + + this._depTree.add(id, dependencies) + this._managers[id] = { exporter, importer } + } + + exportConfig () { + return mapValues(this._managers, ({ exporter }, key) => exporter())::pAll() + } + + async importConfig (config) { + const managers = this._managers + for (const key of this._depTree.resolve()) { + const manager = managers[key] + + const data = config[key] + if (data !== undefined) { + debug('importing', key) + await manager.importer(data) + } + } + await this._app.clean() + } +} diff --git a/packages/xo-server/src/xo-mixins/hooks.js b/packages/xo-server/src/xo-mixins/hooks.js new file mode 100644 index 000000000..bfe10201d --- /dev/null +++ b/packages/xo-server/src/xo-mixins/hooks.js @@ -0,0 +1,76 @@ +import createLogger from 'debug' + +const debug = createLogger('xo:hooks') + +function emitAsync (event) { + let opts + let i = 1 + + // an option object has been passed as first param + if (typeof event !== 'string') { + opts = event + event = arguments[i++] + } + + const n = arguments.length - i + const args = new Array(n) + for (let j = 0; j < n; ++j) { + args[j] = arguments[j + i] + } + + const onError = opts != null && opts.onError + + return Promise.all(this.listeners(event).map( + listener => new Promise(resolve => { + resolve(listener.apply(this, args)) + }).catch(onError) + )) +} + +const makeSingletonHook = (hook, postEvent) => { + let promise + return function () { + if (promise === undefined) { + promise = runHook(this, hook) + promise.then(() => { + this.removeAllListeners(hook) + this.emit(postEvent) + this.removeAllListeners(postEvent) + }) + } + return promise + } +} + +const runHook = (app, hook) => { + debug(`${hook} start…`) + const promise = emitAsync.call(app, { + onError: error => console.error( + `[WARN] hook ${hook} failure:`, + (error != null && error.stack) || error + ), + }, hook) + promise.then(() => { + debug(`${hook} finished`) + }) + return promise +} + +export default { + // Run *clean* async listeners. + // + // They normalize existing data, clear invalid entries, etc. + clean () { + return runHook(this, 'clean') + }, + + // Run *start* async listeners. + // + // They initialize the application. + start: makeSingletonHook('start', 'started'), + + // Run *stop* async listeners. + // + // They close connections, unmount file systems, save states, etc. + stop: makeSingletonHook('stop', 'stopped'), +} diff --git a/packages/xo-server/src/xo-mixins/http.js b/packages/xo-server/src/xo-mixins/http.js new file mode 100644 index 000000000..b814f38c7 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/http.js @@ -0,0 +1,23 @@ +import hrp from 'http-request-plus' +import ProxyAgent from 'proxy-agent' + +import { + firstDefined, +} from '../utils' + +export default class Http { + constructor (_, { + httpProxy = firstDefined( + process.env.http_proxy, + process.env.HTTP_PROXY + ), + }) { + this._proxy = httpProxy && new ProxyAgent(httpProxy) + } + + httpRequest (...args) { + return hrp({ + agent: this._proxy, + }, ...args) + } +} diff --git a/packages/xo-server/src/xo-mixins/ip-pools.js b/packages/xo-server/src/xo-mixins/ip-pools.js new file mode 100644 index 000000000..283feb548 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/ip-pools.js @@ -0,0 +1,307 @@ +import concat from 'lodash/concat' +import countBy from 'lodash/countBy' +import diff from 'lodash/difference' +import findIndex from 'lodash/findIndex' +import flatten from 'lodash/flatten' +import highland from 'highland' +import includes from 'lodash/includes' +import isObject from 'lodash/isObject' +import keys from 'lodash/keys' +import mapValues from 'lodash/mapValues' +import pick from 'lodash/pick' +import remove from 'lodash/remove' +import synchronized from 'decorator-synchronized' +import { noSuchObject } from 'xo-common/api-errors' +import { fromCallback } from 'promise-toolbox' + +import { + forEach, + generateUnsecureToken, + isEmpty, + lightSet, + mapToArray, + streamToArray, + throwFn, +} from '../utils' + +// =================================================================== + +const normalize = ({ + addresses, + id = throwFn('id is a required field'), + name = '', + networks, + resourceSets, +}) => ({ + addresses, + id, + name, + networks, + resourceSets, +}) + +const _isAddressInIpPool = (address, network, ipPool) => ( + ipPool.addresses && (address in ipPool.addresses) && + includes(ipPool.networks, isObject(network) ? network.id : network) +) + +// =================================================================== + +// Note: an address cannot be in two different pools sharing a +// network. +export default class IpPools { + constructor (xo) { + this._store = null + this._xo = xo + + xo.on('start', async () => { + this._store = await xo.getStore('ipPools') + + xo.addConfigManager('ipPools', + () => this.getAllIpPools(), + ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool))) + ) + }) + } + + async createIpPool ({ addresses, name, networks }) { + const id = await this._generateId() + + await this._save({ + addresses, + id, + name, + networks, + }) + + return id + } + + async deleteIpPool (id) { + const store = this._store + + if (await store.has(id)) { + await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => { + await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id) + return this._xo.removeIpPoolFromResourceSet(id, set.id) + })) + await this._removeIpAddressesFromVifs( + mapValues((await this.getIpPool(id)).addresses, 'vifs') + ) + + return store.del(id) + } + + throw noSuchObject(id, 'ipPool') + } + + _getAllIpPools (filter) { + return streamToArray(this._store.createValueStream(), { + filter, + mapper: normalize, + }) + } + + async getAllIpPools (userId) { + let filter + if (userId != null) { + const user = await this._xo.getUser(userId) + if (user.permission !== 'admin') { + const resourceSets = await this._xo.getAllResourceSets(userId) + const ipPools = lightSet(flatten(mapToArray(resourceSets, 'ipPools'))) + filter = ({ id }) => ipPools.has(id) + } + } + + return this._getAllIpPools(filter) + } + + getIpPool (id) { + return this._store.get(id).then(normalize, error => { + throw error.notFound ? noSuchObject(id, 'ipPool') : error + }) + } + + async _getAddressIpPool (address, network) { + const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool)) + + return ipPools && ipPools[0] + } + + // Returns a map that indicates how many IPs from each IP pool the VM uses + // e.g.: { 'ipPool:abc': 3, 'ipPool:xyz': 7 } + async computeVmIpPoolsUsage (vm) { + const vifs = vm.VIFs + const ipPools = [] + for (const vifId of vifs) { + const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId) + + for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) { + const ipPool = await this._getAddressIpPool(address, $network) + ipPool && ipPools.push(ipPool.id) + } + } + + return countBy(ipPools, ({ id }) => `ipPool:${id}`) + } + + @synchronized + allocIpAddresses (vifId, addAddresses, removeAddresses) { + const updatedIpPools = {} + const limits = {} + + const xoVif = this._xo.getObject(vifId) + const xapi = this._xo.getXapi(xoVif) + const vif = xapi.getObject(xoVif._xapiId) + + const allocAndSave = (() => { + const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet') + + return () => { + const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool))) + return resourseSetId + ? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then( + saveIpPools + ) + : saveIpPools() + } + })() + + return fromCallback(cb => { + const network = vif.$network + const networkId = network.$id + + const isVif = id => id === vifId + + highland(this._store.createValueStream()).each(ipPool => { + const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool + if (!(addresses && networks && includes(networks, networkId))) { + return false + } + + let allocations = 0 + let changed = false + forEach(removeAddresses, address => { + let vifs, i + if ( + (vifs = addresses[address]) && + (vifs = vifs.vifs) && + (i = findIndex(vifs, isVif)) !== -1 + ) { + vifs.splice(i, 1) + --allocations + changed = true + } + }) + forEach(addAddresses, address => { + const data = addresses[address] + if (!data) { + return + } + const vifs = data.vifs || (data.vifs = []) + if (!includes(vifs, vifId)) { + vifs.push(vifId) + ++allocations + changed = true + } + }) + + if (changed) { + const { id } = ipPool + updatedIpPools[id] = ipPool + limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations + } + }).toCallback(cb) + }).then(allocAndSave) + } + + async _removeIpAddressesFromVifs (mapAddressVifs) { + const mapVifAddresses = {} + forEach(mapAddressVifs, (vifs, address) => { + forEach(vifs, vifId => { + if (mapVifAddresses[vifId]) { + mapVifAddresses[vifId].push(address) + } else { + mapVifAddresses[vifId] = [ address ] + } + }) + }) + + const { getXapi } = this._xo + return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => { + let vif + try { + // The IP may not have been correctly deallocated from the IP pool when the VIF was deleted + vif = this._xo.getObject(vifId) + } catch (error) { + return + } + const { allowedIpv4Addresses, allowedIpv6Addresses } = vif + remove(allowedIpv4Addresses, address => includes(addresses, address)) + remove(allowedIpv6Addresses, address => includes(addresses, address)) + this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses)) + + return getXapi(vif).editVif(vif._xapiId, { + ipv4Allowed: allowedIpv4Addresses, + ipv6Allowed: allowedIpv6Addresses, + }) + })) + } + + async updateIpPool (id, { + addresses, + name, + networks, + resourceSets, + }) { + const ipPool = await this.getIpPool(id) + const previousAddresses = { ...ipPool.addresses } + + name != null && (ipPool.name = name) + if (addresses) { + const addresses_ = ipPool.addresses || {} + forEach(addresses, (props, address) => { + if (props === null) { + delete addresses_[address] + } else { + addresses_[address] = props + } + }) + + // Remove the addresses that are no longer in the IP pool from the concerned VIFs + const deletedAddresses = diff(keys(previousAddresses), keys(addresses_)) + await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses)) + + if (isEmpty(addresses_)) { + delete ipPool.addresses + } else { + ipPool.addresses = addresses_ + } + } + + // TODO: Implement patching like for addresses. + if (networks) { + ipPool.networks = networks + } + + // TODO: Implement patching like for addresses. + if (resourceSets) { + ipPool.resourceSets = resourceSets + } + + await this._save(ipPool) + } + + async _generateId () { + let id + do { + id = generateUnsecureToken(8) + } while (await this._store.has(id)) + return id + } + + _save (ipPool) { + ipPool = normalize(ipPool) + return this._store.put(ipPool.id, ipPool) + } +} diff --git a/packages/xo-server/src/xo-mixins/jobs.js b/packages/xo-server/src/xo-mixins/jobs.js new file mode 100644 index 000000000..4f2bea755 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/jobs.js @@ -0,0 +1,86 @@ +import { assign } from 'lodash' +import { lastly } from 'promise-toolbox' +import { noSuchObject } from 'xo-common/api-errors' + +import JobExecutor from '../job-executor' +import { Jobs as JobsDb } from '../models/job' +import { mapToArray } from '../utils' + +// =================================================================== + +export default class Jobs { + constructor (xo) { + this._executor = new JobExecutor(xo) + const jobsDb = this._jobs = new JobsDb({ + connection: xo._redis, + prefix: 'xo:job', + indexes: ['user_id', 'key'], + }) + this._runningJobs = Object.create(null) + + xo.on('clean', () => jobsDb.rebuildIndexes()) + xo.on('start', () => { + xo.addConfigManager('jobs', + () => jobsDb.get(), + jobs => Promise.all(mapToArray(jobs, job => + jobsDb.save(job) + )), + [ 'users' ] + ) + }) + } + + async getAllJobs () { + return /* await */ this._jobs.get() + } + + async getJob (id) { + const job = await this._jobs.first(id) + if (!job) { + throw noSuchObject(id, 'job') + } + + return job.properties + } + + async createJob (job) { + // TODO: use plain objects + const job_ = await this._jobs.create(job) + return job_.properties + } + + async updateJob ({id, ...props}) { + const job = await this.getJob(id) + + assign(job, props) + if (job.timeout === null) { + delete job.timeout + } + + return /* await */ this._jobs.save(job) + } + + async removeJob (id) { + return /* await */ this._jobs.remove(id) + } + + _runJob (job) { + const { id } = job + const runningJobs = this._runningJobs + if (runningJobs[id]) { + throw new Error(`job ${id} is already running`) + } + runningJobs[id] = true + return this._executor.exec(job)::lastly(() => { + delete runningJobs[id] + }) + } + + async runJobSequence (idSequence) { + const jobs = await Promise.all(mapToArray(idSequence, id => this.getJob(id))) + + for (const job of jobs) { + await this._runJob(job) + } + } +} diff --git a/packages/xo-server/src/xo-mixins/logs/index.js b/packages/xo-server/src/xo-mixins/logs/index.js new file mode 100644 index 000000000..f2c669806 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/logs/index.js @@ -0,0 +1,54 @@ +import { defer, fromEvent } from 'promise-toolbox' + +import LevelDbLogger from './loggers/leveldb' + +export default class Logs { + constructor (app) { + this._app = app + + app.on('clean', () => this._gc()) + } + + async _gc (keep = 1e4) { + const db = await this._app.getStore('logs') + + let count = 1 + const { promise, resolve } = defer() + + const cb = () => { + if (--count === 0) { + resolve() + } + } + const stream = db.createKeyStream({ + reverse: true, + }) + + const deleteEntry = key => { + ++count + db.del(key, cb) + } + + const onData = keep !== 0 + ? () => { + if (--keep === 0) { + stream.on('data', deleteEntry) + stream.removeListener('data', onData) + } + } + : deleteEntry + stream.on('data', onData) + + await fromEvent(stream, 'end') + cb() + + return promise + } + + getLogger (namespace) { + return this._app.getStore('logs').then(store => new LevelDbLogger( + store, + namespace + )) + } +} diff --git a/packages/xo-server/src/xo-mixins/logs/loggers/abstract.js b/packages/xo-server/src/xo-mixins/logs/loggers/abstract.js new file mode 100644 index 000000000..058d79bea --- /dev/null +++ b/packages/xo-server/src/xo-mixins/logs/loggers/abstract.js @@ -0,0 +1,22 @@ +export default class AbstractLogger {} + +// See: https://en.wikipedia.org/wiki/Syslog#Severity_level +const LEVELS = [ + 'emergency', + 'alert', + 'critical', + 'error', + 'warning', + 'notice', + 'informational', + 'debug', +] + +// Create high level log methods. +for (const level of LEVELS) { + Object.defineProperty(AbstractLogger.prototype, level, { + value (message, data) { + return this._add(level, message, data) + }, + }) +} diff --git a/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js b/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js new file mode 100644 index 000000000..afc13c06b --- /dev/null +++ b/packages/xo-server/src/xo-mixins/logs/loggers/leveldb.js @@ -0,0 +1,57 @@ +import highland from 'highland' + +import AbstractLogger from './abstract' + +import { forEach, noop } from '../../../utils' + +let lastDate = 0 +let increment = 0 + +function generateUniqueKey (date) { + if (date === lastDate) { + return `${date}:${increment++}` + } + + increment = 0 + return String(lastDate = date) +} + +export default class LevelDbLogger extends AbstractLogger { + constructor (db, namespace) { + super() + + this._db = db + this._namespace = namespace + } + + _add (level, message, data) { + const time = Date.now() + + const log = { + level, + message, + data, + namespace: this._namespace, + time, + } + + const key = generateUniqueKey(time) + this._db.putSync(key, log) + return key + } + + createReadStream () { + return highland(this._db.createReadStream()) + .filter(({value}) => value.namespace === this._namespace) + } + + del (id) { + forEach(Array.isArray(id) ? id : [id], id => { + this._db.get(id).then(value => { + if (value.namespace === this._namespace) { + this._db.delSync(id, noop) + } + }) + }) + } +} diff --git a/packages/xo-server/src/xo-mixins/plugins.js b/packages/xo-server/src/xo-mixins/plugins.js new file mode 100644 index 000000000..caac19743 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/plugins.js @@ -0,0 +1,249 @@ +import Ajv from 'ajv' + +import { PluginsMetadata } from '../models/plugin-metadata' +import { + invalidParameters, + noSuchObject, +} from 'xo-common/api-errors' +import { + createRawObject, + isFunction, + mapToArray, +} from '../utils' + +// =================================================================== + +export default class { + constructor (xo) { + this._ajv = new Ajv({ + useDefaults: true, + }) + this._plugins = createRawObject() + + this._pluginsMetadata = new PluginsMetadata({ + connection: xo._redis, + prefix: 'xo:plugin-metadata', + }) + + xo.on('start', () => { + xo.addConfigManager('plugins', + () => this._pluginsMetadata.get(), + plugins => Promise.all(mapToArray(plugins, plugin => + this._pluginsMetadata.save(plugin) + )) + ) + }) + } + + _getRawPlugin (id) { + const plugin = this._plugins[id] + if (!plugin) { + throw noSuchObject(id, 'plugin') + } + return plugin + } + + async _getPluginMetadata (id) { + const metadata = await this._pluginsMetadata.first(id) + return metadata + ? metadata.properties + : null + } + + async registerPlugin ( + name, + instance, + configurationSchema, + configurationPresets, + description, + testSchema, + version + ) { + const id = name + const plugin = this._plugins[id] = { + configurationPresets, + configurationSchema, + configured: !configurationSchema, + description, + id, + instance, + name, + testable: isFunction(instance.test), + testSchema, + unloadable: isFunction(instance.unload), + version, + } + + const metadata = await this._getPluginMetadata(id) + let autoload = true + let configuration + if (metadata) { + ({ + autoload, + configuration, + } = metadata) + } else { + console.log(`[NOTICE] register plugin ${name} for the first time`) + await this._pluginsMetadata.save({ + id, + autoload, + }) + } + + if (configurationSchema !== undefined) { + if (configuration === undefined) { + return + } + + await this._configurePlugin(plugin, configuration) + } + + if (autoload) { + await this.loadPlugin(id) + } + } + + async _getPlugin (id) { + const { + configurationPresets, + configurationSchema, + description, + loaded, + name, + testable, + testSchema, + unloadable, + version, + } = this._getRawPlugin(id) + const { + autoload, + configuration, + } = (await this._getPluginMetadata(id)) || {} + + return { + id, + name, + autoload, + description, + loaded, + unloadable, + version, + configuration, + configurationPresets, + configurationSchema, + testable, + testSchema, + } + } + + async getPlugins () { + return /* await */ Promise.all( + mapToArray(this._plugins, ({ id }) => this._getPlugin(id)) + ) + } + + // Validate the configuration and configure the plugin instance. + async _configurePlugin (plugin, configuration) { + const { configurationSchema } = plugin + + if (!configurationSchema) { + throw invalidParameters('plugin not configurable') + } + + const validate = this._ajv.compile(configurationSchema) + if (!validate(configuration)) { + throw invalidParameters(validate.errors) + } + + // Sets the plugin configuration. + await plugin.instance.configure({ + // Shallow copy of the configuration object to avoid most of the + // errors when the plugin is altering the configuration object + // which is handed over to it. + ...configuration, + }) + plugin.configured = true + } + + // Validate the configuration, configure the plugin instance and + // save the new configuration. + async configurePlugin (id, configuration) { + const plugin = this._getRawPlugin(id) + + await this._configurePlugin(plugin, configuration) + + // Saves the configuration. + await this._pluginsMetadata.merge(id, { configuration }) + } + + async disablePluginAutoload (id) { + // TODO: handle case where autoload is already disabled. + + await this._pluginsMetadata.merge(id, { autoload: false }) + } + + async enablePluginAutoload (id) { + // TODO: handle case where autoload is already enabled. + + await this._pluginsMetadata.merge(id, { autoload: true }) + } + + async loadPlugin (id) { + const plugin = this._getRawPlugin(id) + if (plugin.loaded) { + throw invalidParameters('plugin already loaded') + } + + if (!plugin.configured) { + throw invalidParameters('plugin not configured') + } + + await plugin.instance.load() + plugin.loaded = true + } + + async unloadPlugin (id) { + const plugin = this._getRawPlugin(id) + if (!plugin.loaded) { + throw invalidParameters('plugin already unloaded') + } + + if (plugin.unloadable === false) { + throw invalidParameters('plugin cannot be unloaded') + } + + await plugin.instance.unload() + plugin.loaded = false + } + + async purgePluginConfiguration (id) { + await this._pluginsMetadata.merge(id, { configuration: undefined }) + } + + async testPlugin (id, data) { + const plugin = this._getRawPlugin(id) + if (!plugin.testable) { + throw invalidParameters('plugin not testable') + } + if (!plugin.loaded) { + throw invalidParameters('plugin not loaded') + } + + const { testSchema } = plugin + if (testSchema) { + if (data == null) { + throw invalidParameters([{ + field: 'data', + message: 'is the wrong type', + }]) + } + + const validate = this._ajv.compile(testSchema) + if (!validate(data)) { + throw invalidParameters(validate.errors) + } + } + + await plugin.instance.test(data) + } +} diff --git a/packages/xo-server/src/xo-mixins/remotes.js b/packages/xo-server/src/xo-mixins/remotes.js new file mode 100644 index 000000000..8c1fd2b2d --- /dev/null +++ b/packages/xo-server/src/xo-mixins/remotes.js @@ -0,0 +1,142 @@ +import { noSuchObject } from 'xo-common/api-errors' + +import RemoteHandlerLocal from '../remote-handlers/local' +import RemoteHandlerNfs from '../remote-handlers/nfs' +import RemoteHandlerSmb from '../remote-handlers/smb' +import { + forEach, + mapToArray, +} from '../utils' +import { + Remotes, +} from '../models/remote' + +// =================================================================== + +export default class { + constructor (xo) { + this._remotes = new Remotes({ + connection: xo._redis, + prefix: 'xo:remote', + indexes: ['enabled'], + }) + + xo.on('clean', () => this._remotes.rebuildIndexes()) + xo.on('start', async () => { + xo.addConfigManager('remotes', + () => this._remotes.get(), + remotes => Promise.all(mapToArray(remotes, remote => + this._remotes.save(remote) + )) + ) + + await this.initRemotes() + await this.syncAllRemotes() + }) + xo.on('stop', () => this.forgetAllRemotes()) + } + + async getRemoteHandler (remote, ignoreDisabled) { + if (typeof remote === 'string') { + remote = await this.getRemote(remote) + } + + if (!(ignoreDisabled || remote.enabled)) { + throw new Error('remote is disabled') + } + + const HANDLERS = { + file: RemoteHandlerLocal, + smb: RemoteHandlerSmb, + nfs: RemoteHandlerNfs, + } + + // FIXME: should be done in xo-remote-parser. + const type = remote.url.split('://')[0] + + const Handler = HANDLERS[type] + if (!Handler) { + throw new Error('Unhandled remote type') + } + return new Handler(remote) + } + + async testRemote (remote) { + const handler = await this.getRemoteHandler(remote, true) + return handler.test() + } + + async getAllRemotes () { + return this._remotes.get() + } + + async _getRemote (id) { + const remote = await this._remotes.first(id) + if (!remote) { + throw noSuchObject(id, 'remote') + } + + return remote + } + + async getRemote (id) { + return (await this._getRemote(id)).properties + } + + async createRemote ({name, url}) { + const remote = await this._remotes.create(name, url) + return /* await */ this.updateRemote(remote.get('id'), {enabled: true}) + } + + async updateRemote (id, {name, url, enabled, error}) { + const remote = await this._getRemote(id) + this._updateRemote(remote, {name, url, enabled, error}) + const handler = await this.getRemoteHandler(remote.properties, true) + const props = await handler.sync() + this._updateRemote(remote, props) + return (await this._remotes.save(remote)).properties + } + + _updateRemote (remote, {name, url, enabled, error}) { + if (name) remote.set('name', name) + if (url) remote.set('url', url) + if (enabled !== undefined) remote.set('enabled', enabled) + if (error) { + remote.set('error', error) + } else { + remote.set('error', '') + } + } + + async removeRemote (id) { + const handler = await this.getRemoteHandler(id, true) + await handler.forget() + await this._remotes.remove(id) + } + + // TODO: Should it be private? + async syncAllRemotes () { + const remotes = await this.getAllRemotes() + forEach(remotes, remote => { + this.updateRemote(remote.id, {}) + }) + } + + // TODO: Should it be private? + async forgetAllRemotes () { + const remotes = await this.getAllRemotes() + for (const remote of remotes) { + try { + (await this.getRemoteHandler(remote, true)).forget() + } catch (_) {} + } + } + + // TODO: Should it be private? + async initRemotes () { + const remotes = await this.getAllRemotes() + if (!remotes || !remotes.length) { + await this.createRemote({name: 'default', url: 'file://var/lib/xoa-backups'}) + } + } +} diff --git a/packages/xo-server/src/xo-mixins/resource-sets.js b/packages/xo-server/src/xo-mixins/resource-sets.js new file mode 100644 index 000000000..75e5af8d2 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/resource-sets.js @@ -0,0 +1,384 @@ +import synchronized from 'decorator-synchronized' +import { + assign, + every, + forEach, + isObject, + keyBy, + map as mapToArray, + remove, + some, +} from 'lodash' +import { + noSuchObject, + unauthorized, +} from 'xo-common/api-errors' + +import { + asyncMap, + generateUnsecureToken, + lightSet, + map, + streamToArray, +} from '../utils' + +// =================================================================== + +const VM_RESOURCES = { + cpus: true, + disk: true, + disks: true, + memory: true, + vms: true, +} + +const computeVmResourcesUsage = vm => { + const processed = {} + let disks = 0 + let disk = 0 + + forEach(vm.$VBDs, vbd => { + let vdi, vdiId + if ( + vbd.type === 'Disk' && + !processed[vdiId = vbd.VDI] && + (vdi = vbd.$VDI) + ) { + processed[vdiId] = true + ++disks + disk += +vdi.virtual_size + } + }) + + return { + cpus: vm.VCPUs_at_startup, + disk, + disks, + memory: vm.memory_dynamic_max, + vms: 1, + } +} + +const normalize = set => ({ + id: set.id, + ipPools: set.ipPools || [], + limits: set.limits + ? map(set.limits, limit => isObject(limit) + ? limit + : { + available: limit, + total: limit, + } + ) + : {}, + name: set.name || '', + objects: set.objects || [], + subjects: set.subjects || [], +}) + +// =================================================================== + +export default class { + constructor (xo) { + this._xo = xo + + this._store = null + xo.on('start', async () => { + xo.addConfigManager('resourceSets', + () => this.getAllResourceSets(), + resourceSets => Promise.all(mapToArray(resourceSets, resourceSet => + this._save(resourceSet) + )), + [ 'groups', 'users' ] + ) + + this._store = await xo.getStore('resourceSets') + }) + } + + async _generateId () { + let id + do { + id = generateUnsecureToken(8) + } while (await this._store.has(id)) + return id + } + + _save (set) { + return this._store.put(set.id, set) + } + + async checkResourceSetConstraints (id, userId, objectIds) { + const set = await this.getResourceSet(id) + + const user = await this._xo.getUser(userId) + if (( + user.permission !== 'admin' && + + // The set does not contains ANY subjects related to this user + // (itself or its groups). + !some(set.subjects, lightSet(user.groups).add(user.id).has) + ) || ( + objectIds && + + // The set does not contains ALL objects. + !every(objectIds, lightSet(set.objects).has) + )) { + throw unauthorized() + } + } + + async computeVmResourcesUsage (vm) { + return assign( + computeVmResourcesUsage( + this._xo.getXapi(vm).getObject(vm._xapiId) + ), + await this._xo.computeVmIpPoolsUsage(vm) + ) + } + + async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) { + const id = await this._generateId() + const set = normalize({ + id, + name, + objects, + subjects, + limits, + }) + + await this._store.put(id, set) + + return set + } + + async deleteResourceSet (id) { + const store = this._store + + if (await store.has(id)) { + return store.del(id) + } + + throw noSuchObject(id, 'resourceSet') + } + + async updateResourceSet (id, { + name = undefined, + subjects = undefined, + objects = undefined, + limits = undefined, + ipPools = undefined, + }) { + const set = await this.getResourceSet(id) + if (name) { + set.name = name + } + if (subjects) { + set.subjects = subjects + } + if (objects) { + set.objects = objects + } + if (limits) { + const previousLimits = set.limits + set.limits = map(limits, (quantity, id) => { + const previous = previousLimits[id] + if (!previous) { + return { + available: quantity, + total: quantity, + } + } + + const { available, total } = previous + + return { + available: available - total + quantity, + total: quantity, + } + }) + } + if (ipPools) { + set.ipPools = ipPools + } + + await this._save(set) + } + + // If userId is provided, only resource sets available to that user + // will be returned. + async getAllResourceSets (userId = undefined) { + let filter + if (userId != null) { + const user = await this._xo.getUser(userId) + if (user.permission !== 'admin') { + const userHasSubject = lightSet(user.groups).add(user.id).has + filter = set => some(set.subjects, userHasSubject) + } + } + + return streamToArray(this._store.createValueStream(), { + filter, + mapper: normalize, + }) + } + + getResourceSet (id) { + return this._store.get(id).then(normalize, error => { + if (error.notFound) { + throw noSuchObject(id, 'resourceSet') + } + + throw error + }) + } + + async addObjectToResourceSet (objectId, setId) { + const set = await this.getResourceSet(setId) + set.objects.push(objectId) + await this._save(set) + } + + async removeObjectFromResourceSet (objectId, setId) { + const set = await this.getResourceSet(setId) + remove(set.objects, id => id === objectId) + await this._save(set) + } + + async addIpPoolToResourceSet (ipPoolId, setId) { + const set = await this.getResourceSet(setId) + set.ipPools.push(ipPoolId) + await this._save(set) + } + + async removeIpPoolFromResourceSet (ipPoolId, setId) { + const set = await this.getResourceSet(setId) + remove(set.ipPools, id => id === ipPoolId) + await this._save(set) + } + + async addSubjectToResourceSet (subjectId, setId) { + const set = await this.getResourceSet(setId) + set.subjects.push(subjectId) + await this._save(set) + } + + async removeSubjectToResourceSet (subjectId, setId) { + const set = await this.getResourceSet(setId) + remove(set.subjects, id => id === subjectId) + await this._save(set) + } + + async addLimitToResourceSet (limitId, quantity, setId) { + const set = await this.getResourceSet(setId) + set.limits[limitId] = quantity + await this._save(set) + } + + async removeLimitFromResourceSet (limitId, setId) { + const set = await this.getResourceSet(setId) + delete set.limits[limitId] + await this._save(set) + } + + @synchronized + async allocateLimitsInResourceSet (limits, setId) { + const set = await this.getResourceSet(setId) + forEach(limits, (quantity, id) => { + const limit = set.limits[id] + if (!limit) { + return + } + + if ((limit.available -= quantity) < 0) { + throw new Error(`not enough ${id} available in the set ${setId}`) + } + }) + await this._save(set) + } + + @synchronized + async releaseLimitsInResourceSet (limits, setId) { + const set = await this.getResourceSet(setId) + forEach(limits, (quantity, id) => { + const limit = set.limits[id] + if (!limit) { + return + } + + if ((limit.available += quantity) > limit.total) { + limit.available = limit.total + } + }) + await this._save(set) + } + + async recomputeResourceSetsLimits () { + const sets = keyBy(await this.getAllResourceSets(), 'id') + forEach(sets, ({ limits }) => { + forEach(limits, (limit, id) => { + if (VM_RESOURCES[id]) { // only reset VMs related limits + limit.available = limit.total + } + }) + }) + + forEach(this._xo.getAllXapis(), xapi => { + forEach(xapi.objects.all, object => { + let id + let set + if ( + object.$type !== 'vm' || + + // No set for this VM. + !(id = xapi.xo.getData(object, 'resourceSet')) || + + // Not our set. + !(set = sets[id]) + ) { + return + } + + const { limits } = set + forEach(computeVmResourcesUsage(object), (usage, resource) => { + const limit = limits[resource] + if (limit) { + limit.available -= usage + } + }) + }) + }) + + await Promise.all(mapToArray(sets, set => this._save(set))) + } + + async setVmResourceSet (vmId, resourceSetId) { + const xapi = this._xo.getXapi(vmId) + const previousResourceSetId = xapi.xo.getData(vmId, 'resourceSet') + + if (resourceSetId === previousResourceSetId || (previousResourceSetId === undefined && resourceSetId === null)) { + return + } + + const resourcesUsage = await this.computeVmResourcesUsage(this._xo.getObject(vmId)) + + if (resourceSetId != null) { + await this.allocateLimitsInResourceSet(resourcesUsage, resourceSetId) + } + if (previousResourceSetId !== undefined) { + await this.releaseLimitsInResourceSet(resourcesUsage, previousResourceSetId) + } + + await xapi.xo.setData(vmId, 'resourceSet', resourceSetId === undefined ? null : resourceSetId) + + if (previousResourceSetId !== undefined) { + await this._xo.removeAclsForObject(vmId) + } + if (resourceSetId != null) { + const { subjects } = await this.getResourceSet(resourceSetId) + await asyncMap(subjects, subject => + this._xo.addAcl(subject, vmId, 'admin') + ) + } + } +} diff --git a/packages/xo-server/src/xo-mixins/scheduling.js b/packages/xo-server/src/xo-mixins/scheduling.js new file mode 100644 index 000000000..3236010f4 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/scheduling.js @@ -0,0 +1,204 @@ +import { BaseError } from 'make-error' +import { noSuchObject } from 'xo-common/api-errors.js' + +import { Schedules } from '../models/schedule' +import { + forEach, + mapToArray, + scheduleFn, +} from '../utils' + +// =================================================================== + +const _resolveId = scheduleOrId => scheduleOrId.id || scheduleOrId + +export class SchedulerError extends BaseError {} + +export class ScheduleOverride extends SchedulerError { + constructor (scheduleOrId) { + super('Schedule ID ' + _resolveId(scheduleOrId) + ' is already added') + } +} + +export class ScheduleNotEnabled extends SchedulerError { + constructor (scheduleOrId) { + super('Schedule ' + _resolveId(scheduleOrId) + ' is not enabled') + } +} + +export class ScheduleAlreadyEnabled extends SchedulerError { + constructor (scheduleOrId) { + super('Schedule ' + _resolveId(scheduleOrId) + ' is already enabled') + } +} + +// =================================================================== + +export default class { + constructor (xo) { + this.xo = xo + const schedules = this._redisSchedules = new Schedules({ + connection: xo._redis, + prefix: 'xo:schedule', + indexes: ['user_id', 'job'], + }) + this._scheduleTable = undefined + + xo.on('clean', () => schedules.rebuildIndexes()) + xo.on('start', () => { + xo.addConfigManager('schedules', + () => schedules.get(), + schedules_ => Promise.all(mapToArray(schedules_, schedule => + schedules.save(schedule) + )), + [ 'jobs' ] + ) + + return this._loadSchedules() + }) + xo.on('stop', () => this._disableAll()) + } + + _add (schedule) { + const { id } = schedule + this._schedules[id] = schedule + this._scheduleTable[id] = false + try { + if (schedule.enabled) { + this._enable(schedule) + } + } catch (error) { + console.warn('Scheduling#_add(%s)', id, error) + } + } + + _exists (scheduleOrId) { + const id_ = _resolveId(scheduleOrId) + return id_ in this._schedules + } + + _isEnabled (scheduleOrId) { + return this._scheduleTable[_resolveId(scheduleOrId)] + } + + _enable (schedule) { + const { id } = schedule + + const stopSchedule = scheduleFn( + schedule.cron, + () => this.xo.runJobSequence([ schedule.job ]), + schedule.timezone + ) + + this._cronJobs[id] = stopSchedule + this._scheduleTable[id] = true + } + + _disable (scheduleOrId) { + if (!this._exists(scheduleOrId)) { + throw noSuchObject(scheduleOrId, 'schedule') + } + if (!this._isEnabled(scheduleOrId)) { + throw new ScheduleNotEnabled(scheduleOrId) + } + const id = _resolveId(scheduleOrId) + this._cronJobs[id]() // Stop cron job. + delete this._cronJobs[id] + this._scheduleTable[id] = false + } + + _disableAll () { + forEach(this._scheduleTable, (enabled, id) => { + if (enabled) { + this._disable(id) + } + }) + } + + get scheduleTable () { + return this._scheduleTable + } + + async _loadSchedules () { + this._schedules = {} + this._scheduleTable = {} + this._cronJobs = {} + + const schedules = await this.xo.getAllSchedules() + + forEach(schedules, schedule => { + this._add(schedule) + }) + } + + async _getSchedule (id) { + const schedule = await this._redisSchedules.first(id) + + if (!schedule) { + throw noSuchObject(id, 'schedule') + } + + return schedule + } + + async getSchedule (id) { + return (await this._getSchedule(id)).properties + } + + async getAllSchedules () { + return /* await */ this._redisSchedules.get() + } + + async createSchedule (userId, { job, cron, enabled, name, timezone }) { + const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name, timezone) + const schedule = schedule_.properties + + this._add(schedule) + + return schedule + } + + async updateSchedule (id, { job, cron, enabled, name, timezone }) { + const schedule = await this._getSchedule(id) + + if (job !== undefined) schedule.set('job', job) + if (cron !== undefined) schedule.set('cron', cron) + if (enabled !== undefined) schedule.set('enabled', enabled) + if (name !== undefined) schedule.set('name', name) + if (timezone === null) { + schedule.set('timezone', undefined) // Remove current timezone + } else if (timezone !== undefined) { + schedule.set('timezone', timezone) + } + + await this._redisSchedules.save(schedule) + + const { properties } = schedule + + if (!this._exists(id)) { + throw noSuchObject(id, 'schedule') + } + + // disable the schedule, _add() will enable it if necessary + if (this._isEnabled(id)) { + this._disable(id) + } + + this._add(properties) + } + + async removeSchedule (id) { + await this._redisSchedules.remove(id) + + try { + this._disable(id) + } catch (exc) { + if (!(exc instanceof SchedulerError)) { + throw exc + } + } finally { + delete this._schedules[id] + delete this._scheduleTable[id] + } + } +} diff --git a/packages/xo-server/src/xo-mixins/store.js b/packages/xo-server/src/xo-mixins/store.js new file mode 100644 index 000000000..98a9e7d74 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/store.js @@ -0,0 +1,81 @@ +import endsWith from 'lodash/endsWith' +import levelup from 'level-party' +import startsWith from 'lodash/startsWith' +import sublevel from 'level-sublevel' +import { ensureDir } from 'fs-extra' + +import { + forEach, + isFunction, + promisify, +} from '../utils' + +// =================================================================== + +const _levelHas = function has (key, cb) { + if (cb) { + return this.get(key, (error, value) => error + ? ( + error.notFound + ? cb(null, false) + : cb(error) + ) + : cb(null, true) + ) + } + + try { + this.get(key) + return true + } catch (error) { + if (!error.notFound) { + throw error + } + } + return false +} +const levelHas = db => { + db.has = _levelHas + + return db +} + +const levelPromise = db => { + const dbP = {} + forEach(db, (value, name) => { + if (!isFunction(value)) { + return + } + + if ( + endsWith(name, 'Stream') || + startsWith(name, 'is') + ) { + dbP[name] = db::value + } else { + dbP[`${name}Sync`] = db::value + dbP[name] = promisify(value, db) + } + }) + + return dbP +} + +// =================================================================== + +export default class { + constructor (xo) { + const dir = `${xo._config.datadir}/leveldb` + this._db = ensureDir(dir).then(() => { + return sublevel(levelup(dir, { + valueEncoding: 'json', + })) + }) + } + + getStore (namespace) { + return this._db.then(db => levelPromise( + levelHas(db.sublevel(namespace)) + )) + } +} diff --git a/packages/xo-server/src/xo-mixins/subjects.js b/packages/xo-server/src/xo-mixins/subjects.js new file mode 100644 index 000000000..c5bf20435 --- /dev/null +++ b/packages/xo-server/src/xo-mixins/subjects.js @@ -0,0 +1,379 @@ +import { filter, includes } from 'lodash' +import { ignoreErrors } from 'promise-toolbox' +import { + hash, + needsRehash, + verify, +} from 'hashy' +import { + invalidCredentials, + noSuchObject, +} from 'xo-common/api-errors' + +import { + Groups, +} from '../models/group' +import { + Users, +} from '../models/user' +import { + forEach, + isEmpty, + lightSet, + mapToArray, +} from '../utils' + +// =================================================================== + +const addToArraySet = (set, value) => set && !includes(set, value) + ? set.concat(value) + : [ value ] +const removeFromArraySet = (set, value) => set && filter(set, current => current !== value) + +// =================================================================== + +export default class { + constructor (xo) { + this._xo = xo + + const redis = xo._redis + + const groupsDb = this._groups = new Groups({ + connection: redis, + prefix: 'xo:group', + }) + const usersDb = this._users = new Users({ + connection: redis, + prefix: 'xo:user', + indexes: ['email'], + }) + + xo.on('clean', () => Promise.all([ + groupsDb.rebuildIndexes(), + usersDb.rebuildIndexes(), + ])) + xo.on('start', async () => { + xo.addConfigManager('groups', + () => groupsDb.get(), + groups => Promise.all(mapToArray(groups, group => groupsDb.save(group))), + [ 'users' ] + ) + xo.addConfigManager('users', + () => usersDb.get(), + users => Promise.all(mapToArray(users, async user => { + const userId = user.id + const conflictUsers = await usersDb.get({ email: user.email }) + if (!isEmpty(conflictUsers)) { + await Promise.all(mapToArray(conflictUsers, ({ id }) => + (id !== userId) && this.deleteUser(id) + )) + } + return usersDb.save(user) + })) + ) + + if (!await usersDb.exists()) { + const email = 'admin@admin.net' + const password = 'admin' + + await this.createUser({email, password, permission: 'admin'}) + console.log('[INFO] Default user created:', email, ' with password', password) + } + }) + } + + // ----------------------------------------------------------------- + + async createUser ({ name, password, ...properties }) { + if (name) { + properties.email = name + } + + if (password) { + properties.pw_hash = await hash(password) + } + + // TODO: use plain objects + const user = await this._users.create(properties) + + return user.properties + } + + async deleteUser (id) { + const user = await this.getUser(id) + + await this._users.remove(id) + + // Remove tokens of user. + this._xo.getAuthenticationTokensForUser(id) + .then(tokens => { + forEach(tokens, token => { + this._xo.deleteAuthenticationToken(id)::ignoreErrors() + }) + }) + ::ignoreErrors() + + // Remove ACLs for this user. + this._xo.getAclsForSubject(id).then(acls => { + forEach(acls, acl => { + this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() + }) + }) + + // Remove the user from all its groups. + forEach(user.groups, groupId => { + this.getGroup(groupId) + .then(group => this._removeUserFromGroup(id, group)) + ::ignoreErrors() + }) + } + + async updateUser (id, { + // TODO: remove + email, + + name = email, + password, + permission, + preferences, + }) { + const user = await this.getUser(id) + + if (name) { + user.name = name + } + if (permission) { + user.permission = permission + } + if (password) { + user.pw_hash = await hash(password) + } + + const newPreferences = { ...user.preferences } + forEach(preferences, (value, name) => { + if (value == null) { + delete newPreferences[name] + } else { + newPreferences[name] = value + } + }) + user.preferences = isEmpty(newPreferences) + ? undefined + : newPreferences + + // TODO: remove + user.email = user.name + delete user.name + + await this._users.save(user) + } + + // Merge this method in getUser() when plain objects. + async _getUser (id) { + const user = await this._users.first(id) + if (!user) { + throw noSuchObject(id, 'user') + } + + return user + } + + // TODO: this method will no longer be async when users are + // integrated to the main collection. + async getUser (id) { + const user = (await this._getUser(id)).properties + + // TODO: remove when no longer the email property has been + // completely eradicated. + user.name = user.email + + return user + } + + async getAllUsers () { + return this._users.get() + } + + async getUserByName (username, returnNullIfMissing) { + // TODO: change `email` by `username`. + const user = await this._users.first({ email: username }) + if (user) { + return user.properties + } + + if (returnNullIfMissing) { + return null + } + + throw noSuchObject(username, 'user') + } + + // Get or create a user associated with an auth provider. + async registerUser (provider, name) { + const user = await this.getUserByName(name, true) + if (user) { + if (user._provider !== provider) { + throw new Error(`the name ${name} is already taken`) + } + + return user + } + + if (!this._xo._config.createUserOnFirstSignin) { + throw new Error(`registering ${name} user is forbidden`) + } + + return /* await */ this.createUser({ + name, + _provider: provider, + }) + } + + async changeUserPassword (userId, oldPassword, newPassword) { + if (!(await this.checkUserPassword(userId, oldPassword, false))) { + throw invalidCredentials() + } + + await this.updateUser(userId, { password: newPassword }) + } + + async checkUserPassword (userId, password, updateIfNecessary = true) { + const { pw_hash: hash } = await this.getUser(userId) + if (!( + hash && + await verify(password, hash) + )) { + return false + } + + if (updateIfNecessary && needsRehash(hash)) { + await this.updateUser(userId, { password }) + } + + return true + } + + // ----------------------------------------------------------------- + + async createGroup ({name}) { + // TODO: use plain objects. + const group = (await this._groups.create(name)).properties + + return group + } + + async deleteGroup (id) { + const group = await this.getGroup(id) + + await this._groups.remove(id) + + // Remove ACLs for this group. + this._xo.getAclsForSubject(id).then(acls => { + forEach(acls, acl => { + this._xo.removeAcl(id, acl.object, acl.action)::ignoreErrors() + }) + }) + + // Remove the group from all its users. + forEach(group.users, userId => { + this.getUser(userId) + .then(user => this._removeGroupFromUser(id, user)) + ::ignoreErrors() + }) + } + + async updateGroup (id, {name}) { + const group = await this.getGroup(id) + + if (name) group.name = name + + await this._groups.save(group) + } + + async getGroup (id) { + const group = await this._groups.first(id) + if (!group) { + throw noSuchObject(id, 'group') + } + + return group.properties + } + + async getAllGroups () { + return this._groups.get() + } + + async addUserToGroup (userId, groupId) { + const [user, group] = await Promise.all([ + this.getUser(userId), + this.getGroup(groupId), + ]) + + user.groups = addToArraySet(user.groups, groupId) + group.users = addToArraySet(group.users, userId) + + await Promise.all([ + this._users.save(user), + this._groups.save(group), + ]) + } + + async _removeUserFromGroup (userId, group) { + group.users = removeFromArraySet(group.users, userId) + return this._groups.save(group) + } + + async _removeGroupFromUser (groupId, user) { + user.groups = removeFromArraySet(user.groups, groupId) + return this._users.save(user) + } + + async removeUserFromGroup (userId, groupId) { + const [user, group] = await Promise.all([ + this.getUser(userId), + this.getGroup(groupId), + ]) + + await Promise.all([ + this._removeUserFromGroup(userId, group), + this._removeGroupFromUser(groupId, user), + ]) + } + + async setGroupUsers (groupId, userIds) { + const group = await this.getGroup(groupId) + + let newUsersIds = lightSet(userIds) + const oldUsersIds = [] + forEach(group.users, id => { + if (newUsersIds.has(id)) { + newUsersIds.delete(id) + } else { + oldUsersIds.push(id) + } + }) + newUsersIds = newUsersIds.toArray() + + const getUser = ::this.getUser + const [newUsers, oldUsers] = await Promise.all([ + Promise.all(newUsersIds.map(getUser)), + Promise.all(oldUsersIds.map(getUser)), + ]) + + forEach(newUsers, user => { + user.groups = addToArraySet(user.groups, groupId) + }) + forEach(oldUsers, user => { + user.groups = removeFromArraySet(user.groups, groupId) + }) + + group.users = userIds + + const saveUser = ::this._users.save + await Promise.all([ + Promise.all(mapToArray(newUsers, saveUser)), + Promise.all(mapToArray(oldUsers, saveUser)), + this._groups.save(group), + ]) + } +} diff --git a/packages/xo-server/src/xo-mixins/xen-servers.js b/packages/xo-server/src/xo-mixins/xen-servers.js new file mode 100644 index 000000000..9bd94d0ab --- /dev/null +++ b/packages/xo-server/src/xo-mixins/xen-servers.js @@ -0,0 +1,430 @@ +import { ignoreErrors } from 'promise-toolbox' +import { noSuchObject } from 'xo-common/api-errors' + +import Xapi from '../xapi' +import xapiObjectToXo from '../xapi-object-to-xo' +import XapiStats from '../xapi-stats' +import { + camelToSnakeCase, + createRawObject, + forEach, + isEmpty, + isString, + popProperty, + serializeError, +} from '../utils' +import { + Servers, +} from '../models/server' + +// =================================================================== + +export default class { + constructor (xo) { + this._objectConflicts = createRawObject() // TODO: clean when a server is disconnected. + const serversDb = this._servers = new Servers({ + connection: xo._redis, + prefix: 'xo:server', + indexes: ['host'], + }) + this._stats = new XapiStats() + this._xapis = createRawObject() + this._xapisByPool = createRawObject() + this._xo = xo + + xo.on('clean', () => serversDb.rebuildIndexes()) + xo.on('start', async () => { + xo.addConfigManager('xenServers', + () => serversDb.get(), + servers => serversDb.update(servers) + ) + + // Connects to existing servers. + const servers = await serversDb.get() + for (const server of servers) { + if (server.enabled) { + this.connectXenServer(server.id).catch(error => { + console.error( + `[WARN] ${server.host}:`, + error[0] || error.stack || error.code || error + ) + }) + } + } + }) + + // TODO: disconnect servers on stop. + } + + async registerXenServer ({ + allowUnauthorized, + host, + label, + password, + readOnly, + username, + }) { + // FIXME: We are storing passwords which is bad! + // Could we use tokens instead? + // TODO: use plain objects + const server = await this._servers.create({ + allowUnauthorized: allowUnauthorized ? 'true' : undefined, + enabled: 'true', + host, + label: label || undefined, + password, + readOnly: readOnly ? 'true' : undefined, + username, + }) + + return server.properties + } + + async unregisterXenServer (id) { + this.disconnectXenServer(id)::ignoreErrors() + + if (!await this._servers.remove(id)) { + throw noSuchObject(id, 'xenServer') + } + } + + async updateXenServer (id, { + allowUnauthorized, + enabled, + error, + host, + label, + password, + readOnly, + username, + }) { + const server = await this._getXenServer(id) + const xapi = this._xapis[id] + const requireDisconnected = + allowUnauthorized !== undefined || + host !== undefined || + password !== undefined || + username !== undefined + + if ( + requireDisconnected && + xapi !== undefined && + xapi.status !== 'disconnected' + ) { + throw new Error('this entry require disconnecting the server to update it') + } + + if (label !== undefined) server.set('label', label || undefined) + if (host) server.set('host', host) + if (username) server.set('username', username) + if (password) server.set('password', password) + + if (error !== undefined) { + server.set('error', error ? JSON.stringify(error) : '') + } + + if (enabled !== undefined) { + server.set('enabled', enabled ? 'true' : undefined) + } + + if (readOnly !== undefined) { + server.set('readOnly', readOnly ? 'true' : undefined) + if (xapi !== undefined) { + xapi.readOnly = readOnly + } + } + + if (allowUnauthorized !== undefined) { + server.set('allowUnauthorized', allowUnauthorized ? 'true' : undefined) + } + + await this._servers.update(server) + } + + // TODO: this method will no longer be async when servers are + // integrated to the main collection. + async _getXenServer (id) { + const server = await this._servers.first(id) + if (!server) { + throw noSuchObject(id, 'xenServer') + } + + return server + } + + _onXenAdd (xapiObjects, xapiIdsToXo, toRetry, conId) { + const conflicts = this._objectConflicts + const objects = this._xo._objects + + forEach(xapiObjects, (xapiObject, xapiId) => { + try { + const xoObject = xapiObjectToXo(xapiObject) + if (!xoObject) { + return + } + + const xoId = xoObject.id + xapiIdsToXo[xapiId] = xoId + + const previous = objects.get(xoId, undefined) + if ( + previous && + previous._xapiRef !== xapiObject.$ref + ) { + ( + conflicts[xoId] || + (conflicts[xoId] = createRawObject()) + )[conId] = xoObject + } else { + objects.set(xoId, xoObject) + } + } catch (error) { + console.error('ERROR: xapiObjectToXo', error) + + toRetry[xapiId] = xapiObject + } + }) + } + + _onXenRemove (xapiObjects, xapiIdsToXo, toRetry, conId) { + const conflicts = this._objectConflicts + const objects = this._xo._objects + + forEach(xapiObjects, (_, xapiId) => { + toRetry && delete toRetry[xapiId] + + const xoId = xapiIdsToXo[xapiId] + if (!xoId) { + // This object was not known previously. + return + } + + delete xapiIdsToXo[xapiId] + + const objConflicts = conflicts[xoId] + if (objConflicts) { + if (objConflicts[conId]) { + delete objConflicts[conId] + } else { + objects.set(xoId, popProperty(objConflicts)) + } + + if (isEmpty(objConflicts)) { + delete conflicts[xoId] + } + } else { + objects.unset(xoId) + } + }) + } + + async connectXenServer (id) { + const server = (await this._getXenServer(id)).properties + + const xapi = this._xapis[server.id] = new Xapi({ + allowUnauthorized: Boolean(server.allowUnauthorized), + auth: { + user: server.username, + password: server.password, + }, + readOnly: Boolean(server.readOnly), + url: server.host, + }) + + xapi.xo = (() => { + const conId = server.id + + // Maps ids of XAPI objects to ids of XO objects. + const xapiIdsToXo = createRawObject() + + // Map of XAPI objects which failed to be transformed to XO + // objects. + // + // At each `finish` there will be another attempt to transform + // until they succeed. + let toRetry + let toRetryNext = createRawObject() + + const onAddOrUpdate = objects => { + this._onXenAdd(objects, xapiIdsToXo, toRetryNext, conId) + } + const onRemove = objects => { + this._onXenRemove(objects, xapiIdsToXo, toRetry, conId) + } + + const xapisByPool = this._xapisByPool + const onFinish = () => { + const { pool } = xapi + if (pool) { + xapisByPool[pool.$id] = xapi + } + + if (!isEmpty(toRetry)) { + onAddOrUpdate(toRetry) + toRetry = null + } + + if (!isEmpty(toRetryNext)) { + toRetry = toRetryNext + toRetryNext = createRawObject() + } + } + + const { objects } = xapi + + const addObject = object => { + // TODO: optimize. + onAddOrUpdate({ [object.$id]: object }) + return xapiObjectToXo(object) + } + + return { + httpRequest: this._xo.httpRequest.bind(this), + + install () { + objects.on('add', onAddOrUpdate) + objects.on('update', onAddOrUpdate) + objects.on('remove', onRemove) + objects.on('finish', onFinish) + + onAddOrUpdate(objects.all) + }, + uninstall () { + objects.removeListener('add', onAddOrUpdate) + objects.removeListener('update', onAddOrUpdate) + objects.removeListener('remove', onRemove) + objects.removeListener('finish', onFinish) + + onRemove(objects.all) + }, + + addObject, + getData: (id, key) => { + const value = ( + typeof id === 'string' + ? xapi.getObject(id) + : id + ).other_config[`xo:${camelToSnakeCase(key)}`] + return value && JSON.parse(value) + }, + setData: async (id, key, value) => { + await xapi._updateObjectMapProperty( + xapi.getObject(id), + 'other_config', + { [`xo:${camelToSnakeCase(key)}`]: value !== null ? JSON.stringify(value) : value } + ) + + // Register the updated object. + addObject(await xapi._waitObject(id)) + }, + } + })() + + xapi.xo.install() + + await xapi.connect().then( + () => this.updateXenServer(id, { error: null }), + error => { + this.updateXenServer(id, { error: serializeError(error) }) + + throw error + } + ) + } + + async disconnectXenServer (id) { + const xapi = this._xapis[id] + if (!xapi) { + throw noSuchObject(id, 'xenServer') + } + + delete this._xapis[id] + + const { pool } = xapi + if (pool) { + delete this._xapisByPool[pool.id] + } + + xapi.xo.uninstall() + return xapi.disconnect() + } + + getAllXapis () { + return this._xapis + } + + // Returns the XAPI connection associated to an object. + getXapi (object, type) { + if (isString(object)) { + object = this._xo.getObject(object, type) + } + + const { $pool: poolId } = object + if (!poolId) { + throw new Error(`object ${object.id} does not belong to a pool`) + } + + const xapi = this._xapisByPool[poolId] + if (!xapi) { + throw new Error(`no connection found for object ${object.id}`) + } + + return xapi + } + + async getAllXenServers () { + const servers = await this._servers.get() + const xapis = this._xapis + forEach(servers, server => { + const xapi = xapis[server.id] + if (xapi !== undefined) { + server.status = xapi.status + + let pool + if ( + server.label === undefined && + (pool = xapi.pool) != null + ) { + server.label = pool.name_label + } + } + + // Do not expose password. + delete server.password + }) + + return servers + } + + getXapiVmStats (vm, granularity) { + const xapi = this.getXapi(vm) + return this._stats.getVmPoints(xapi, vm._xapiId, granularity) + } + + getXapiHostStats (host, granularity) { + const xapi = this.getXapi(host) + return this._stats.getHostPoints(xapi, host._xapiId, granularity) + } + + async mergeXenPools (sourceId, targetId, force = false) { + const sourceXapi = this.getXapi(sourceId) + const { + _auth: { user, password }, + _url: { hostname }, + } = this.getXapi(targetId) + + // We don't want the events of the source XAPI to interfere with + // the events of the new XAPI. + sourceXapi.xo.uninstall() + + try { + await sourceXapi.joinPool(hostname, user, password, force) + } catch (e) { + sourceXapi.xo.install() + + throw e + } + + await this.unregisterXenServer(sourceId) + } +} diff --git a/packages/xo-server/src/xo.js b/packages/xo-server/src/xo.js new file mode 100644 index 000000000..a0df8a746 --- /dev/null +++ b/packages/xo-server/src/xo.js @@ -0,0 +1,318 @@ +import XoCollection from 'xo-collection' +import XoUniqueIndex from 'xo-collection/unique-index' +import {createClient as createRedisClient} from 'redis' +import {EventEmitter} from 'events' +import { noSuchObject } from 'xo-common/api-errors' +import { + forEach, + includes, + isEmpty, + isFunction, + isString, + iteratee, + map as mapToArray, + stubTrue, +} from 'lodash' + +import mixins from './xo-mixins' // eslint-disable-line node/no-missing-import +import Connection from './connection' +import { + mixin, +} from './decorators' +import { + createRawObject, + generateToken, + noop, +} from './utils' + +// =================================================================== + +@mixin(mapToArray(mixins)) +export default class Xo extends EventEmitter { + constructor (config) { + super() + + // a lot of mixins adds listener for start/stop/… events + this.setMaxListeners(0) + + this._config = config + + this._objects = new XoCollection() + this._objects.createIndex('byRef', new XoUniqueIndex('_xapiRef')) + + // Connections to users. + this._nextConId = 0 + this._connections = createRawObject() + + this._httpRequestWatchers = createRawObject() + + // Connects to Redis. + { + const { + renameCommands, + socket: path, + uri: url, + } = config.redis || {} + + this._redis = createRedisClient({ path, rename_commands: renameCommands, url }) + } + + this.on('start', () => this._watchObjects()) + } + + // ----------------------------------------------------------------- + + // Returns an object from its key or UUID. + getObject (key, type) { + const { + all, + indexes: { + byRef, + }, + } = this._objects + + const obj = all[key] || byRef[key] + if (!obj) { + throw noSuchObject(key, type) + } + + if (type != null && ( + (isString(type) && type !== obj.type) || + !includes(type, obj.type) // Array + )) { + throw noSuchObject(key, type) + } + + return obj + } + + getObjects ({ filter, limit } = {}) { + const { all } = this._objects + + if (filter === undefined) { + if (limit === undefined || limit === Infinity) { + return all + } + filter = stubTrue + } else { + filter = iteratee(filter) + if (limit === undefined) { + limit = Infinity + } + } + + const results = createRawObject(null) + for (const id in all) { + const object = all[id] + if (filter(object, id, all)) { + if (limit-- <= 0) { + break + } + results[id] = object + } + } + return results + } + + // ----------------------------------------------------------------- + + createUserConnection () { + const {_connections: connections} = this + + const connection = new Connection() + const id = connection.id = this._nextConId++ + + connections[id] = connection + connection.on('close', () => { + delete connections[id] + }) + + return connection + } + + // ----------------------------------------------------------------- + + _handleHttpRequest (req, res, next) { + const {url} = req + + const {_httpRequestWatchers: watchers} = this + const watcher = watchers[url] + if (!watcher) { + next() + return + } + if (!watcher.persistent) { + delete watchers[url] + } + + const {fn, data} = watcher + new Promise(resolve => { + resolve(fn.call(this, req, res, data, next)) + }).then( + result => { + if (result != null) { + res.end(JSON.stringify(result)) + } + }, + error => { + console.error('HTTP request error', error.stack || error) + + if (!res.headersSent) { + res.writeHead(500) + } + res.end('unknown error') + } + ) + } + + async registerHttpRequest (fn, data, { suffix = '' } = {}) { + const {_httpRequestWatchers: watchers} = this + + const url = await (function generateUniqueUrl () { + return generateToken().then(token => { + const url = `/api/${token}${suffix}` + + return url in watchers + ? generateUniqueUrl() + : url + }) + })() + + watchers[url] = { + data, + fn, + } + + return url + } + + async registerHttpRequestHandler (url, fn, { + data = undefined, + persistent = true, + } = {}) { + const {_httpRequestWatchers: watchers} = this + + if (url in watchers) { + throw new Error(`a handler is already registered for ${url}`) + } + + watchers[url] = { + data, + fn, + persistent, + } + } + + async unregisterHttpRequestHandler (url) { + delete this._httpRequestWatchers[url] + } + + // ----------------------------------------------------------------- + + // Plugins can use this method to expose methods directly on XO. + defineProperty (name, value, thisArg = null) { + if (name in this) { + throw new Error(`Xo#${name} is already defined`) + } + + // For security, prevent from accessing `this`. + if (isFunction(value)) { + value = (value => function () { + return value.apply(thisArg, arguments) + })(value) + } + + Object.defineProperty(this, name, { + configurable: true, + value, + }) + + let unset = () => { + delete this[name] + unset = noop + } + return () => unset() + } + + // Convenience method to define multiple properties at once. + defineProperties (props, thisArg) { + const unsets = [] + const unset = () => forEach(unsets, unset => unset()) + + try { + forEach(props, (value, name) => { + unsets.push(this.defineProperty(name, value, thisArg)) + }) + } catch (error) { + unset() + throw error + } + + return unset + } + + // ----------------------------------------------------------------- + + // Watches objects changes. + // + // Some should be forwarded to connected clients. + // Some should be persistently saved. + _watchObjects () { + const { + _connections: connections, + _objects: objects, + } = this + + let entered, exited + function reset () { + entered = createRawObject() + exited = createRawObject() + } + reset() + + function onAdd (items) { + forEach(items, (item, id) => { + entered[id] = item + }) + } + objects.on('add', onAdd) + objects.on('update', onAdd) + + objects.on('remove', (items) => { + forEach(items, (_, id) => { + // We don't care about the value here, so we choose `0` + // because it is small in JSON. + exited[id] = 0 + }) + }) + + objects.on('finish', () => { + const enteredMessage = !isEmpty(entered) && { + type: 'enter', + items: entered, + } + const exitedMessage = !isEmpty(exited) && { + type: 'exit', + items: exited, + } + + if (!enteredMessage && !exitedMessage) { + return + } + + forEach(connections, connection => { + // Notifies only authenticated clients. + if (connection.has('user_id') && connection.notify) { + if (enteredMessage) { + connection.notify('all', enteredMessage) + } + if (exitedMessage) { + connection.notify('all', exitedMessage) + } + } + }) + + reset() + }) + } +} diff --git a/packages/xo-server/xo-server.service b/packages/xo-server/xo-server.service new file mode 100644 index 000000000..73ee79196 --- /dev/null +++ b/packages/xo-server/xo-server.service @@ -0,0 +1,14 @@ +# systemd service for XO-Server. + +[Unit] +Description= XO Server +After=network-online.target + +[Service] +Environment="DEBUG=xo:main" +ExecStart=/usr/local/bin/xo-server +Restart=always +SyslogIdentifier=xo-server + +[Install] +WantedBy=multi-user.target diff --git a/packages/xo-server/yarn.lock b/packages/xo-server/yarn.lock new file mode 100644 index 000000000..17c16029c --- /dev/null +++ b/packages/xo-server/yarn.lock @@ -0,0 +1,6568 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.36.tgz#2349d7ec04b3a06945ae173280ef8579b63728e4" + dependencies: + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^3.0.0" + +"@babel/code-frame@^7.0.0-beta.35": + version "7.0.0-beta.39" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.0.0-beta.39.tgz#91c90bb65207fc5a55128cb54956ded39e850457" + dependencies: + chalk "^2.0.0" + esutils "^2.0.2" + js-tokens "^3.0.0" + +"@babel/helper-function-name@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.0.0-beta.36.tgz#366e3bc35147721b69009f803907c4d53212e88d" + dependencies: + "@babel/helper-get-function-arity" "7.0.0-beta.36" + "@babel/template" "7.0.0-beta.36" + "@babel/types" "7.0.0-beta.36" + +"@babel/helper-get-function-arity@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0-beta.36.tgz#f5383bac9a96b274828b10d98900e84ee43e32b8" + dependencies: + "@babel/types" "7.0.0-beta.36" + +"@babel/polyfill@^7.0.0-beta.36": + version "7.0.0-beta.39" + resolved "https://registry.yarnpkg.com/@babel/polyfill/-/polyfill-7.0.0-beta.39.tgz#3711a88b11f5982b936d5a60f239410ce75c6529" + dependencies: + core-js "^2.5.3" + regenerator-runtime "^0.11.1" + +"@babel/template@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.0.0-beta.36.tgz#02e903de5d68bd7899bce3c5b5447e59529abb00" + dependencies: + "@babel/code-frame" "7.0.0-beta.36" + "@babel/types" "7.0.0-beta.36" + babylon "7.0.0-beta.36" + lodash "^4.2.0" + +"@babel/traverse@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.0.0-beta.36.tgz#1dc6f8750e89b6b979de5fe44aa993b1a2192261" + dependencies: + "@babel/code-frame" "7.0.0-beta.36" + "@babel/helper-function-name" "7.0.0-beta.36" + "@babel/types" "7.0.0-beta.36" + babylon "7.0.0-beta.36" + debug "^3.0.1" + globals "^11.1.0" + invariant "^2.2.0" + lodash "^4.2.0" + +"@babel/types@7.0.0-beta.36": + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.0.0-beta.36.tgz#64f2004353de42adb72f9ebb4665fc35b5499d23" + dependencies: + esutils "^2.0.2" + lodash "^4.2.0" + to-fast-properties "^2.0.0" + +"@marsaud/smb2-promise@^0.2.1": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@marsaud/smb2-promise/-/smb2-promise-0.2.1.tgz#fee95f4baba6e4d930e8460d3377aa12560e0f0e" + dependencies: + "@marsaud/smb2" "^0.7.2" + bluebird "^3.1.1" + lodash.foreach "^3.0.3" + lodash.isfunction "^3.0.6" + +"@marsaud/smb2@^0.7.2": + version "0.7.2" + resolved "https://registry.yarnpkg.com/@marsaud/smb2/-/smb2-0.7.2.tgz#79b1761234aa3ec35297ff473611f6713501330f" + dependencies: + babel-runtime "^5.8.34" + bluebird "^2.10.2" + ntlm "~0.1.1" + +"@nraynaud/struct-fu@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@nraynaud/struct-fu/-/struct-fu-1.0.1.tgz#059a0588dea50647c3677783692dafdadfcadf97" + +abab@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/abab/-/abab-1.0.4.tgz#5faad9c2c07f60dd76770f71cf025b62a63cfd4e" + +abbrev@1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + +abstract-leveldown@^2.4.1: + version "2.7.2" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-2.7.2.tgz#87a44d7ebebc341d59665204834c8b7e0932cc93" + dependencies: + xtend "~4.0.0" + +abstract-leveldown@^3.0.0, abstract-leveldown@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-3.0.0.tgz#5cb89f958a44f526779d740d1440e743e0c30a57" + dependencies: + xtend "~4.0.0" + +abstract-leveldown@~0.12.1: + version "0.12.4" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-0.12.4.tgz#29e18e632e60e4e221d5810247852a63d7b2e410" + dependencies: + xtend "~3.0.0" + +abstract-leveldown@~2.6.0, abstract-leveldown@~2.6.1: + version "2.6.3" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-2.6.3.tgz#1c5e8c6a5ef965ae8c35dfb3a8770c476b82c4b8" + dependencies: + xtend "~4.0.0" + +accepts@~1.3.4: + version "1.3.4" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.4.tgz#86246758c7dd6d21a6474ff084a4740ec05eb21f" + dependencies: + mime-types "~2.1.16" + negotiator "0.6.1" + +acorn-globals@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-3.1.0.tgz#fd8270f71fbb4996b004fa880ee5d46573a731bf" + dependencies: + acorn "^4.0.4" + +acorn-globals@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-4.1.0.tgz#ab716025dbe17c54d3ef81d32ece2b2d99fe2538" + dependencies: + acorn "^5.0.0" + +acorn-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" + dependencies: + acorn "^3.0.4" + +acorn@^3.0.4, acorn@^3.1.0, acorn@~3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" + +acorn@^4.0.4, acorn@~4.0.2: + version "4.0.13" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-4.0.13.tgz#105495ae5361d697bd195c825192e1ad7f253787" + +acorn@^5.0.0, acorn@^5.2.1, acorn@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.3.0.tgz#7446d39459c54fb49a80e6ee6478149b940ec822" + +agent-base@2, agent-base@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-2.1.1.tgz#d6de10d5af6132d5bd692427d46fc538539094c7" + dependencies: + extend "~3.0.0" + semver "~5.0.1" + +agent-base@^4.1.0, agent-base@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.2.0.tgz#9838b5c3392b962bad031e6a4c5e1024abec45ce" + dependencies: + es6-promisify "^5.0.0" + +ajv-keywords@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-2.1.1.tgz#617997fc5f60576894c435f940d819e135b80762" + +ajv@^4.9.1: + version "4.11.8" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" + dependencies: + co "^4.6.0" + json-stable-stringify "^1.0.1" + +ajv@^5.1.0, ajv@^5.2.3, ajv@^5.3.0: + version "5.5.2" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.2.tgz#73b5eeca3fab653e3d3f9422b341ad42205dc965" + dependencies: + co "^4.6.0" + fast-deep-equal "^1.0.0" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.3.0" + +ajv@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.1.1.tgz#978d597fbc2b7d0e5a5c3ddeb149a682f2abfa0e" + dependencies: + fast-deep-equal "^1.0.0" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.3.0" + +align-text@^0.1.1, align-text@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117" + dependencies: + kind-of "^3.0.2" + longest "^1.0.1" + repeat-string "^1.5.2" + +amdefine@>=0.0.4: + version "1.0.1" + resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" + +ansi-escapes@^1.1.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" + +ansi-escapes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.0.0.tgz#ec3e8b4e9f8064fc02c3ac9b65f1c275bda8ef92" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + +ansi-styles@^3.1.0, ansi-styles@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.0.tgz#c159b8d5be0f9e5a6f346dab94f16ce022161b88" + dependencies: + color-convert "^1.9.0" + +any-promise@^1.0.0, any-promise@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + +anymatch@^1.3.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a" + dependencies: + micromatch "^2.1.5" + normalize-path "^2.0.0" + +app-conf@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/app-conf/-/app-conf-0.5.0.tgz#2f8373b95972fda537857ef100dcd10268be9032" + dependencies: + bluebird "^3.4.0" + debug "^2.6.8" + glob "^7.1.2" + lodash "^4.13.1" + make-error "^1.3.0" + xdg-basedir "^3.0.0" + +append-transform@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-0.4.0.tgz#d76ebf8ca94d276e247a36bad44a4b74ab611991" + dependencies: + default-require-extensions "^1.0.0" + +aproba@^1.0.3: + version "1.2.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" + +archiver-utils@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-1.3.0.tgz#e50b4c09c70bf3d680e32ff1b7994e9f9d895174" + dependencies: + glob "^7.0.0" + graceful-fs "^4.1.0" + lazystream "^1.0.0" + lodash "^4.8.0" + normalize-path "^2.0.0" + readable-stream "^2.0.0" + +archiver@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/archiver/-/archiver-2.1.1.tgz#ff662b4a78201494a3ee544d3a33fe7496509ebc" + dependencies: + archiver-utils "^1.3.0" + async "^2.0.0" + buffer-crc32 "^0.2.1" + glob "^7.0.0" + lodash "^4.8.0" + readable-stream "^2.0.0" + tar-stream "^1.5.0" + zip-stream "^1.2.0" + +are-we-there-yet@~1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.4.tgz#bb5dca382bb94f05e15194373d16fd3ba1ca110d" + dependencies: + delegates "^1.0.0" + readable-stream "^2.0.6" + +argon2@^0.16.1: + version "0.16.2" + resolved "https://registry.yarnpkg.com/argon2/-/argon2-0.16.2.tgz#e9aac43ea67b7288dc0128325c55c6eb5cec0e7e" + dependencies: + any-promise "^1.3.0" + bindings "^1.2.1" + nan "^2.4.0" + +argparse@^1.0.7: + version "1.0.9" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.9.tgz#73d83bc263f86e97f8cc4f6bae1b0e90a7d22c86" + dependencies: + sprintf-js "~1.0.2" + +arr-diff@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" + dependencies: + arr-flatten "^1.0.1" + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + +arr-flatten@^1.0.1, arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + +array-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + +array-union@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" + dependencies: + array-uniq "^1.0.1" + +array-uniq@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" + +array-unique@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + +arrify@^1.0.0, arrify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" + +asap@~2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + +asn1@~0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.3.tgz#dac8787713c9966849fc8180777ebe9c1ddf3b86" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + +assert-plus@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-0.2.0.tgz#d74e1b87e7affc0db8aadb7021f3fe48101ab234" + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + +ast-types@0.x.x: + version "0.10.1" + resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.10.1.tgz#f52fca9715579a14f841d67d7f8d25432ab6a3dd" + +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + +async-each@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" + +async-limiter@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.0.tgz#78faed8c3d074ab81f22b4e985d79e8738f720f8" + +async@^1.4.0, async@^1.5.0: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + +async@^2.0.0, async@^2.1.4: + version "2.6.0" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.0.tgz#61a29abb6fcc026fea77e56d1c6ec53a795951f4" + dependencies: + lodash "^4.14.0" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + +atob@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.0.3.tgz#19c7a760473774468f20b2d2d03372ad7d4cbf5d" + +aws-sign2@~0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f" + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + +aws4@^1.2.1, aws4@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e" + +babel-cli@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-cli/-/babel-cli-6.26.0.tgz#502ab54874d7db88ad00b887a06383ce03d002f1" + dependencies: + babel-core "^6.26.0" + babel-polyfill "^6.26.0" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + commander "^2.11.0" + convert-source-map "^1.5.0" + fs-readdir-recursive "^1.0.0" + glob "^7.1.2" + lodash "^4.17.4" + output-file-sync "^1.1.2" + path-is-absolute "^1.0.1" + slash "^1.0.0" + source-map "^0.5.6" + v8flags "^2.1.1" + optionalDependencies: + chokidar "^1.6.1" + +babel-code-frame@^6.22.0, babel-code-frame@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + dependencies: + chalk "^1.1.3" + esutils "^2.0.2" + js-tokens "^3.0.2" + +babel-core@^6.0.0, babel-core@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.0.tgz#af32f78b31a6fcef119c87b0fd8d9753f03a0bb8" + dependencies: + babel-code-frame "^6.26.0" + babel-generator "^6.26.0" + babel-helpers "^6.24.1" + babel-messages "^6.23.0" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + convert-source-map "^1.5.0" + debug "^2.6.8" + json5 "^0.5.1" + lodash "^4.17.4" + minimatch "^3.0.4" + path-is-absolute "^1.0.1" + private "^0.1.7" + slash "^1.0.0" + source-map "^0.5.6" + +babel-eslint@^8.0.3: + version "8.2.1" + resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-8.2.1.tgz#136888f3c109edc65376c23ebf494f36a3e03951" + dependencies: + "@babel/code-frame" "7.0.0-beta.36" + "@babel/traverse" "7.0.0-beta.36" + "@babel/types" "7.0.0-beta.36" + babylon "7.0.0-beta.36" + eslint-scope "~3.7.1" + eslint-visitor-keys "^1.0.0" + +babel-generator@^6.18.0, babel-generator@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.0.tgz#ac1ae20070b79f6e3ca1d3269613053774f20dc5" + dependencies: + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + detect-indent "^4.0.0" + jsesc "^1.3.0" + lodash "^4.17.4" + source-map "^0.5.6" + trim-right "^1.0.1" + +babel-helper-bindify-decorators@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-bindify-decorators/-/babel-helper-bindify-decorators-6.24.1.tgz#14c19e5f142d7b47f19a52431e52b1ccbc40a330" + dependencies: + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-builder-binary-assignment-operator-visitor@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz#cce4517ada356f4220bcae8a02c2b346f9a56664" + dependencies: + babel-helper-explode-assignable-expression "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-call-delegate@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz#ece6aacddc76e41c3461f88bfc575bd0daa2df8d" + dependencies: + babel-helper-hoist-variables "^6.24.1" + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-define-map@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz#a5f56dab41a25f97ecb498c7ebaca9819f95be5f" + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-helper-explode-assignable-expression@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz#f25b82cf7dc10433c55f70592d5746400ac22caa" + dependencies: + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-explode-class@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-explode-class/-/babel-helper-explode-class-6.24.1.tgz#7dc2a3910dee007056e1e31d640ced3d54eaa9eb" + dependencies: + babel-helper-bindify-decorators "^6.24.1" + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-function-name@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz#d3475b8c03ed98242a25b48351ab18399d3580a9" + dependencies: + babel-helper-get-function-arity "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-get-function-arity@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz#8f7782aa93407c41d3aa50908f89b031b1b6853d" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-hoist-variables@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz#1ecb27689c9d25513eadbc9914a73f5408be7a76" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-module-imports@^7.0.0-beta.3: + version "7.0.0-beta.3" + resolved "https://registry.yarnpkg.com/babel-helper-module-imports/-/babel-helper-module-imports-7.0.0-beta.3.tgz#e15764e3af9c8e11810c09f78f498a2bdc71585a" + dependencies: + babel-types "7.0.0-beta.3" + lodash "^4.2.0" + +babel-helper-optimise-call-expression@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz#f7a13427ba9f73f8f4fa993c54a97882d1244257" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-regex@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz#325c59f902f82f24b74faceed0363954f6495e72" + dependencies: + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-helper-remap-async-to-generator@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz#5ec581827ad723fecdd381f1c928390676e4551b" + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-replace-supers@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz#bf6dbfe43938d17369a213ca8a8bf74b6a90ab1a" + dependencies: + babel-helper-optimise-call-expression "^6.24.1" + babel-messages "^6.23.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helpers@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" + dependencies: + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-jest@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-22.1.0.tgz#7fae6f655fffe77e818a8c2868c754a42463fdfd" + dependencies: + babel-plugin-istanbul "^4.1.5" + babel-preset-jest "^22.1.0" + +babel-messages@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-check-es2015-constants@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz#35157b101426fd2ffd3da3f75c7d1e91835bbf8a" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-istanbul@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.5.tgz#6760cdd977f411d3e175bb064f2bc327d99b2b6e" + dependencies: + find-up "^2.1.0" + istanbul-lib-instrument "^1.7.5" + test-exclude "^4.1.1" + +babel-plugin-jest-hoist@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.1.0.tgz#c1281dd7887d77a1711dc760468c3b8285dde9ee" + +babel-plugin-lodash@^3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/babel-plugin-lodash/-/babel-plugin-lodash-3.3.2.tgz#da3a5b49ba27447f54463f6c4fa81396ccdd463f" + dependencies: + babel-helper-module-imports "^7.0.0-beta.3" + babel-types "^6.26.0" + glob "^7.1.1" + lodash "^4.17.4" + require-package-name "^2.0.1" + +babel-plugin-syntax-async-functions@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95" + +babel-plugin-syntax-async-generators@^6.5.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-generators/-/babel-plugin-syntax-async-generators-6.13.0.tgz#6bc963ebb16eccbae6b92b596eb7f35c342a8b9a" + +babel-plugin-syntax-class-constructor-call@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-constructor-call/-/babel-plugin-syntax-class-constructor-call-6.18.0.tgz#9cb9d39fe43c8600bec8146456ddcbd4e1a76416" + +babel-plugin-syntax-class-properties@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz#d7eb23b79a317f8543962c505b827c7d6cac27de" + +babel-plugin-syntax-decorators@^6.1.18, babel-plugin-syntax-decorators@^6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-decorators/-/babel-plugin-syntax-decorators-6.13.0.tgz#312563b4dbde3cc806cee3e416cceeaddd11ac0b" + +babel-plugin-syntax-do-expressions@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-do-expressions/-/babel-plugin-syntax-do-expressions-6.13.0.tgz#5747756139aa26d390d09410b03744ba07e4796d" + +babel-plugin-syntax-dynamic-import@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz#8d6a26229c83745a9982a441051572caa179b1da" + +babel-plugin-syntax-exponentiation-operator@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz#9ee7e8337290da95288201a6a57f4170317830de" + +babel-plugin-syntax-export-extensions@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-export-extensions/-/babel-plugin-syntax-export-extensions-6.13.0.tgz#70a1484f0f9089a4e84ad44bac353c95b9b12721" + +babel-plugin-syntax-function-bind@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-function-bind/-/babel-plugin-syntax-function-bind-6.13.0.tgz#48c495f177bdf31a981e732f55adc0bdd2601f46" + +babel-plugin-syntax-object-rest-spread@^6.13.0, babel-plugin-syntax-object-rest-spread@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5" + +babel-plugin-syntax-trailing-function-commas@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz#ba0360937f8d06e40180a43fe0d5616fff532cf3" + +babel-plugin-transform-async-generator-functions@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-generator-functions/-/babel-plugin-transform-async-generator-functions-6.24.1.tgz#f058900145fd3e9907a6ddf28da59f215258a5db" + dependencies: + babel-helper-remap-async-to-generator "^6.24.1" + babel-plugin-syntax-async-generators "^6.5.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-async-to-generator@^6.22.0, babel-plugin-transform-async-to-generator@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz#6536e378aff6cb1d5517ac0e40eb3e9fc8d08761" + dependencies: + babel-helper-remap-async-to-generator "^6.24.1" + babel-plugin-syntax-async-functions "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-class-constructor-call@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-constructor-call/-/babel-plugin-transform-class-constructor-call-6.24.1.tgz#80dc285505ac067dcb8d6c65e2f6f11ab7765ef9" + dependencies: + babel-plugin-syntax-class-constructor-call "^6.18.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-class-properties@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz#6a79763ea61d33d36f37b611aa9def81a81b46ac" + dependencies: + babel-helper-function-name "^6.24.1" + babel-plugin-syntax-class-properties "^6.8.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-decorators-legacy@^1.3.4: + version "1.3.4" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-decorators-legacy/-/babel-plugin-transform-decorators-legacy-1.3.4.tgz#741b58f6c5bce9e6027e0882d9c994f04f366925" + dependencies: + babel-plugin-syntax-decorators "^6.1.18" + babel-runtime "^6.2.0" + babel-template "^6.3.0" + +babel-plugin-transform-decorators@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-decorators/-/babel-plugin-transform-decorators-6.24.1.tgz#788013d8f8c6b5222bdf7b344390dfd77569e24d" + dependencies: + babel-helper-explode-class "^6.24.1" + babel-plugin-syntax-decorators "^6.13.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-types "^6.24.1" + +babel-plugin-transform-do-expressions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-do-expressions/-/babel-plugin-transform-do-expressions-6.22.0.tgz#28ccaf92812d949c2cd1281f690c8fdc468ae9bb" + dependencies: + babel-plugin-syntax-do-expressions "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-arrow-functions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz#bbc51b49f964d70cb8d8e0b94e820246ce3a6141" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-block-scoping@^6.23.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz#d70f5299c1308d05c12f463813b0a09e73b1895f" + dependencies: + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-plugin-transform-es2015-classes@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz#5a4c58a50c9c9461e564b4b2a3bfabc97a2584db" + dependencies: + babel-helper-define-map "^6.24.1" + babel-helper-function-name "^6.24.1" + babel-helper-optimise-call-expression "^6.24.1" + babel-helper-replace-supers "^6.24.1" + babel-messages "^6.23.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-computed-properties@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz#6fe2a8d16895d5634f4cd999b6d3480a308159b3" + dependencies: + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-destructuring@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz#997bb1f1ab967f682d2b0876fe358d60e765c56d" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-duplicate-keys@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz#73eb3d310ca969e3ef9ec91c53741a6f1576423e" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-for-of@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz#f47c95b2b613df1d3ecc2fdb7573623c75248691" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-function-name@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz#834c89853bc36b1af0f3a4c5dbaa94fd8eacaa8b" + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-literals@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz#4f54a02d6cd66cf915280019a31d31925377ca2e" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-modules-amd@^6.22.0, babel-plugin-transform-es2015-modules-amd@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz#3b3e54017239842d6d19c3011c4bd2f00a00d154" + dependencies: + babel-plugin-transform-es2015-modules-commonjs "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-modules-commonjs@^6.23.0, babel-plugin-transform-es2015-modules-commonjs@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.0.tgz#0d8394029b7dc6abe1a97ef181e00758dd2e5d8a" + dependencies: + babel-plugin-transform-strict-mode "^6.24.1" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-types "^6.26.0" + +babel-plugin-transform-es2015-modules-systemjs@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz#ff89a142b9119a906195f5f106ecf305d9407d23" + dependencies: + babel-helper-hoist-variables "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-modules-umd@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz#ac997e6285cd18ed6176adb607d602344ad38468" + dependencies: + babel-plugin-transform-es2015-modules-amd "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-object-super@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz#24cef69ae21cb83a7f8603dad021f572eb278f8d" + dependencies: + babel-helper-replace-supers "^6.24.1" + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-parameters@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz#57ac351ab49caf14a97cd13b09f66fdf0a625f2b" + dependencies: + babel-helper-call-delegate "^6.24.1" + babel-helper-get-function-arity "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-shorthand-properties@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz#24f875d6721c87661bbd99a4622e51f14de38aa0" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-spread@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz#d6d68a99f89aedc4536c81a542e8dd9f1746f8d1" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-sticky-regex@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz#00c1cdb1aca71112cdf0cf6126c2ed6b457ccdbc" + dependencies: + babel-helper-regex "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-template-literals@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz#a84b3450f7e9f8f1f6839d6d687da84bb1236d8d" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-typeof-symbol@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz#dec09f1cddff94b52ac73d505c84df59dcceb372" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-unicode-regex@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz#d38b12f42ea7323f729387f18a7c5ae1faeb35e9" + dependencies: + babel-helper-regex "^6.24.1" + babel-runtime "^6.22.0" + regexpu-core "^2.0.0" + +babel-plugin-transform-exponentiation-operator@^6.22.0, babel-plugin-transform-exponentiation-operator@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz#2ab0c9c7f3098fa48907772bb813fe41e8de3a0e" + dependencies: + babel-helper-builder-binary-assignment-operator-visitor "^6.24.1" + babel-plugin-syntax-exponentiation-operator "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-export-extensions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-export-extensions/-/babel-plugin-transform-export-extensions-6.22.0.tgz#53738b47e75e8218589eea946cbbd39109bbe653" + dependencies: + babel-plugin-syntax-export-extensions "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-function-bind@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-function-bind/-/babel-plugin-transform-function-bind-6.22.0.tgz#c6fb8e96ac296a310b8cf8ea401462407ddf6a97" + dependencies: + babel-plugin-syntax-function-bind "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-object-rest-spread@^6.22.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06" + dependencies: + babel-plugin-syntax-object-rest-spread "^6.8.0" + babel-runtime "^6.26.0" + +babel-plugin-transform-regenerator@^6.22.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz#e0703696fbde27f0a3efcacf8b4dca2f7b3a8f2f" + dependencies: + regenerator-transform "^0.10.0" + +babel-plugin-transform-runtime@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-runtime/-/babel-plugin-transform-runtime-6.23.0.tgz#88490d446502ea9b8e7efb0fe09ec4d99479b1ee" + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-strict-mode@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758" + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-polyfill@6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.23.0.tgz#8364ca62df8eafb830499f699177466c3b03499d" + dependencies: + babel-runtime "^6.22.0" + core-js "^2.4.0" + regenerator-runtime "^0.10.0" + +babel-polyfill@^6.23.0, babel-polyfill@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153" + dependencies: + babel-runtime "^6.26.0" + core-js "^2.5.0" + regenerator-runtime "^0.10.5" + +babel-preset-env@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.6.1.tgz#a18b564cc9b9afdf4aae57ae3c1b0d99188e6f48" + dependencies: + babel-plugin-check-es2015-constants "^6.22.0" + babel-plugin-syntax-trailing-function-commas "^6.22.0" + babel-plugin-transform-async-to-generator "^6.22.0" + babel-plugin-transform-es2015-arrow-functions "^6.22.0" + babel-plugin-transform-es2015-block-scoped-functions "^6.22.0" + babel-plugin-transform-es2015-block-scoping "^6.23.0" + babel-plugin-transform-es2015-classes "^6.23.0" + babel-plugin-transform-es2015-computed-properties "^6.22.0" + babel-plugin-transform-es2015-destructuring "^6.23.0" + babel-plugin-transform-es2015-duplicate-keys "^6.22.0" + babel-plugin-transform-es2015-for-of "^6.23.0" + babel-plugin-transform-es2015-function-name "^6.22.0" + babel-plugin-transform-es2015-literals "^6.22.0" + babel-plugin-transform-es2015-modules-amd "^6.22.0" + babel-plugin-transform-es2015-modules-commonjs "^6.23.0" + babel-plugin-transform-es2015-modules-systemjs "^6.23.0" + babel-plugin-transform-es2015-modules-umd "^6.23.0" + babel-plugin-transform-es2015-object-super "^6.22.0" + babel-plugin-transform-es2015-parameters "^6.23.0" + babel-plugin-transform-es2015-shorthand-properties "^6.22.0" + babel-plugin-transform-es2015-spread "^6.22.0" + babel-plugin-transform-es2015-sticky-regex "^6.22.0" + babel-plugin-transform-es2015-template-literals "^6.22.0" + babel-plugin-transform-es2015-typeof-symbol "^6.23.0" + babel-plugin-transform-es2015-unicode-regex "^6.22.0" + babel-plugin-transform-exponentiation-operator "^6.22.0" + babel-plugin-transform-regenerator "^6.22.0" + browserslist "^2.1.2" + invariant "^2.2.2" + semver "^5.3.0" + +babel-preset-jest@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-22.1.0.tgz#ff4e704102f9642765e2254226050561d8942ec9" + dependencies: + babel-plugin-jest-hoist "^22.1.0" + babel-plugin-syntax-object-rest-spread "^6.13.0" + +babel-preset-stage-0@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-preset-stage-0/-/babel-preset-stage-0-6.24.1.tgz#5642d15042f91384d7e5af8bc88b1db95b039e6a" + dependencies: + babel-plugin-transform-do-expressions "^6.22.0" + babel-plugin-transform-function-bind "^6.22.0" + babel-preset-stage-1 "^6.24.1" + +babel-preset-stage-1@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-preset-stage-1/-/babel-preset-stage-1-6.24.1.tgz#7692cd7dcd6849907e6ae4a0a85589cfb9e2bfb0" + dependencies: + babel-plugin-transform-class-constructor-call "^6.24.1" + babel-plugin-transform-export-extensions "^6.22.0" + babel-preset-stage-2 "^6.24.1" + +babel-preset-stage-2@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-preset-stage-2/-/babel-preset-stage-2-6.24.1.tgz#d9e2960fb3d71187f0e64eec62bc07767219bdc1" + dependencies: + babel-plugin-syntax-dynamic-import "^6.18.0" + babel-plugin-transform-class-properties "^6.24.1" + babel-plugin-transform-decorators "^6.24.1" + babel-preset-stage-3 "^6.24.1" + +babel-preset-stage-3@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-preset-stage-3/-/babel-preset-stage-3-6.24.1.tgz#836ada0a9e7a7fa37cb138fb9326f87934a48395" + dependencies: + babel-plugin-syntax-trailing-function-commas "^6.22.0" + babel-plugin-transform-async-generator-functions "^6.24.1" + babel-plugin-transform-async-to-generator "^6.24.1" + babel-plugin-transform-exponentiation-operator "^6.24.1" + babel-plugin-transform-object-rest-spread "^6.22.0" + +babel-register@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" + dependencies: + babel-core "^6.26.0" + babel-runtime "^6.26.0" + core-js "^2.5.0" + home-or-tmp "^2.0.0" + lodash "^4.17.4" + mkdirp "^0.5.1" + source-map-support "^0.4.15" + +babel-runtime@^5.8.3, babel-runtime@^5.8.34: + version "5.8.38" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-5.8.38.tgz#1c0b02eb63312f5f087ff20450827b425c9d4c19" + dependencies: + core-js "^1.0.0" + +babel-runtime@^6.11.6, babel-runtime@^6.18.0, babel-runtime@^6.2.0, babel-runtime@^6.22.0, babel-runtime@^6.23.0, babel-runtime@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" + dependencies: + core-js "^2.4.0" + regenerator-runtime "^0.11.0" + +babel-template@^6.16.0, babel-template@^6.24.1, babel-template@^6.26.0, babel-template@^6.3.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" + dependencies: + babel-runtime "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + lodash "^4.17.4" + +babel-traverse@^6.18.0, babel-traverse@^6.24.1, babel-traverse@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" + dependencies: + babel-code-frame "^6.26.0" + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + debug "^2.6.8" + globals "^9.18.0" + invariant "^2.2.2" + lodash "^4.17.4" + +babel-types@7.0.0-beta.3: + version "7.0.0-beta.3" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-7.0.0-beta.3.tgz#cd927ca70e0ae8ab05f4aab83778cfb3e6eb20b4" + dependencies: + esutils "^2.0.2" + lodash "^4.2.0" + to-fast-properties "^2.0.0" + +babel-types@^6.18.0, babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" + dependencies: + babel-runtime "^6.26.0" + esutils "^2.0.2" + lodash "^4.17.4" + to-fast-properties "^1.0.3" + +babylon@7.0.0-beta.36: + version "7.0.0-beta.36" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.36.tgz#3a3683ba6a9a1e02b0aa507c8e63435e39305b9e" + +babylon@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + +base64url@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/base64url/-/base64url-2.0.0.tgz#eac16e03ea1438eff9423d69baa36262ed1f70bb" + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +bcrypt-pbkdf@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz#63bc5dcb61331b92bc05fd528953c33462a06f8d" + dependencies: + tweetnacl "^0.14.3" + +bcrypt@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/bcrypt/-/bcrypt-1.0.3.tgz#b02ddc6c0b52ea16b8d3cf375d5a32e780dab548" + dependencies: + nan "2.6.2" + node-pre-gyp "0.6.36" + +bcryptjs@^2.4.3: + version "2.4.3" + resolved "https://registry.yarnpkg.com/bcryptjs/-/bcryptjs-2.4.3.tgz#9ab5627b93e60621ff7cdac5da9733027df1d0cb" + +binary-extensions@^1.0.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.11.0.tgz#46aa1751fb6a2f93ee5e689bb1087d4b14c6c205" + +binary@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79" + dependencies: + buffers "~0.1.1" + chainsaw "~0.1.0" + +bind-property-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/bind-property-descriptor/-/bind-property-descriptor-1.0.0.tgz#ded43e6e394c52e28ee6e4d45a7e76d9213764d0" + dependencies: + lodash "^4.17.4" + +bindings@^1.2.1, bindings@~1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.3.0.tgz#b346f6ecf6a95f5a815c5839fc7cdb22502f1ed7" + +bindings@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.2.1.tgz#14ad6113812d2d37d72e67b4cacb4bb726505f11" + +bl@^1.0.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.1.tgz#cac328f7bee45730d404b692203fcb590e172d5e" + dependencies: + readable-stream "^2.0.5" + +bl@~0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/bl/-/bl-0.8.2.tgz#c9b6bca08d1bc2ea00fc8afb4f1a5fd1e1c66e4e" + dependencies: + readable-stream "~1.0.26" + +block-stream@*: + version "0.0.9" + resolved "https://registry.yarnpkg.com/block-stream/-/block-stream-0.0.9.tgz#13ebfe778a03205cfe03751481ebb4b3300c126a" + dependencies: + inherits "~2.0.0" + +blocked@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/blocked/-/blocked-1.2.1.tgz#e22efe767863c65ab8197f6252929104e1ec9ce2" + +bluebird@^2.10.2: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-2.11.0.tgz#534b9033c022c9579c56ba3b3e5a5caafbb650e1" + +bluebird@^3.1.1, bluebird@^3.4.0, bluebird@^3.5.1: + version "3.5.1" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9" + +body-parser@1.18.2, body-parser@^1.18.2: + version "1.18.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.2.tgz#87678a19d84b47d859b83199bd59bce222b10454" + dependencies: + bytes "3.0.0" + content-type "~1.0.4" + debug "2.6.9" + depd "~1.1.1" + http-errors "~1.6.2" + iconv-lite "0.4.19" + on-finished "~2.3.0" + qs "6.5.1" + raw-body "2.3.2" + type-is "~1.6.15" + +boom@2.x.x: + version "2.10.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-2.10.1.tgz#39c8918ceff5799f83f9492a848f625add0c766f" + dependencies: + hoek "2.x.x" + +boom@4.x.x: + version "4.3.1" + resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31" + dependencies: + hoek "4.x.x" + +boom@5.x.x: + version "5.2.0" + resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02" + dependencies: + hoek "4.x.x" + +brace-expansion@^1.1.7: + version "1.1.8" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292" + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^1.8.2: + version "1.8.5" + resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" + dependencies: + expand-range "^1.8.1" + preserve "^0.2.0" + repeat-element "^1.1.2" + +braces@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.0.tgz#a46941cb5fb492156b3d6a656e06c35364e3e66e" + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + define-property "^1.0.0" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +browser-process-hrtime@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-0.1.2.tgz#425d68a58d3447f02a04aa894187fce8af8b7b8e" + +browser-resolve@^1.11.2: + version "1.11.2" + resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.2.tgz#8ff09b0a2c421718a1051c260b32e48f442938ce" + dependencies: + resolve "1.1.7" + +browserslist@^2.1.2: + version "2.11.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-2.11.3.tgz#fe36167aed1bbcde4827ebfe71347a2cc70b99b2" + dependencies: + caniuse-lite "^1.0.30000792" + electron-to-chromium "^1.3.30" + +bser@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.0.0.tgz#9ac78d3ed5d915804fd87acb158bc797147a1719" + dependencies: + node-int64 "^0.4.0" + +buffer-crc32@^0.2.1: + version "0.2.13" + resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" + +buffers@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb" + +builtin-modules@^1.0.0, builtin-modules@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" + +bytes@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" + +bytewise-core@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/bytewise-core/-/bytewise-core-1.2.3.tgz#3fb410c7e91558eb1ab22a82834577aa6bd61d42" + dependencies: + typewise-core "^1.2" + +bytewise@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/bytewise/-/bytewise-1.1.0.tgz#1d13cbff717ae7158094aa881b35d081b387253e" + dependencies: + bytewise-core "^1.2.2" + typewise "^1.0.3" + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +caller-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" + dependencies: + callsites "^0.2.0" + +callsites@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" + +camelcase@^1.0.2: + version "1.2.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39" + +camelcase@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + +camelcase@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" + +camelize@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.0.tgz#164a5483e630fa4321e5af07020e531831b2609b" + +caniuse-lite@^1.0.30000792: + version "1.0.30000792" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000792.tgz#d0cea981f8118f3961471afbb43c9a1e5bbf0332" + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + +center-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/center-align/-/center-align-0.1.3.tgz#aa0d32629b6ee972200411cbd4461c907bc2b7ad" + dependencies: + align-text "^0.1.3" + lazy-cache "^1.0.3" + +chainsaw@~0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98" + dependencies: + traverse ">=0.3.0 <0.4" + +chalk@1.1.3, chalk@^1.0.0, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.0.tgz#b5ea48efc9c1793dccc9b4767c93914d3f2d52ba" + dependencies: + ansi-styles "^3.1.0" + escape-string-regexp "^1.0.5" + supports-color "^4.0.0" + +character-parser@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/character-parser/-/character-parser-2.2.0.tgz#c7ce28f36d4bcd9744e5ffc2c5fcde1c73261fc0" + dependencies: + is-regex "^1.0.3" + +chardet@^0.4.0: + version "0.4.2" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" + +child-process-promise@^2.0.3: + version "2.2.1" + resolved "https://registry.yarnpkg.com/child-process-promise/-/child-process-promise-2.2.1.tgz#4730a11ef610fad450b8f223c79d31d7bdad8074" + dependencies: + cross-spawn "^4.0.2" + node-version "^1.0.0" + promise-polyfill "^6.0.1" + +chokidar@^1.6.1: + version "1.7.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" + dependencies: + anymatch "^1.3.0" + async-each "^1.0.0" + glob-parent "^2.0.0" + inherits "^2.0.1" + is-binary-path "^1.0.0" + is-glob "^2.0.0" + path-is-absolute "^1.0.0" + readdirp "^2.0.0" + optionalDependencies: + fsevents "^1.0.0" + +chownr@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181" + +ci-info@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.1.2.tgz#03561259db48d0474c8bdc90f5b47b068b6bbfb4" + +circular-json@^0.3.1: + version "0.3.3" + resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +clean-css@^3.3.0: + version "3.4.28" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-3.4.28.tgz#bf1945e82fc808f55695e6ddeaec01400efd03ff" + dependencies: + commander "2.8.x" + source-map "0.4.x" + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + dependencies: + restore-cursor "^2.0.0" + +cli-width@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" + +cliui@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-2.1.0.tgz#4b475760ff80264c762c3a1719032e91c7fea0d1" + dependencies: + center-align "^0.1.1" + right-align "^0.1.1" + wordwrap "0.0.2" + +cliui@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrap-ansi "^2.0.0" + +cliui@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.0.0.tgz#743d4650e05f36d1ed2575b59638d87322bfbbcc" + dependencies: + string-width "^2.1.1" + strip-ansi "^4.0.0" + wrap-ansi "^2.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed" + dependencies: + color-name "^1.1.1" + +color-name@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + +combined-stream@^1.0.5, combined-stream@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" + dependencies: + delayed-stream "~1.0.0" + +commander@2.8.x: + version "2.8.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4" + dependencies: + graceful-readlink ">= 1.0.0" + +commander@^2.11.0: + version "2.13.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.13.0.tgz#6964bca67685df7c1f1430c584f07d7597885b9c" + +component-emitter@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" + +compress-commons@^1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-1.2.2.tgz#524a9f10903f3a813389b0225d27c48bb751890f" + dependencies: + buffer-crc32 "^0.2.1" + crc32-stream "^2.0.0" + normalize-path "^2.0.0" + readable-stream "^2.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + +concat-stream@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.0.tgz#0aac662fd52be78964d5532f694784e70110acf7" + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +connect-flash@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/connect-flash/-/connect-flash-0.1.1.tgz#d8630f26d95a7f851f9956b1e8cc6732f3b6aa30" + +console-control-strings@^1.0.0, console-control-strings@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + +constantinople@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/constantinople/-/constantinople-3.1.0.tgz#7569caa8aa3f8d5935d62e1fa96f9f702cd81c79" + dependencies: + acorn "^3.1.0" + is-expression "^2.0.1" + +contains-path@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" + +content-disposition@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4" + +content-security-policy-builder@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/content-security-policy-builder/-/content-security-policy-builder-2.0.0.tgz#8749a1d542fcbe82237281ea9f716ce68b394dd2" + +content-type-parser@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/content-type-parser/-/content-type-parser-1.0.2.tgz#caabe80623e63638b2502fd4c7f12ff4ce2352e7" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + +convert-source-map@^1.4.0, convert-source-map@^1.5.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5" + +cookie-parser@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/cookie-parser/-/cookie-parser-1.4.3.tgz#0fe31fa19d000b95f4aadf1f53fdc2b8a203baa5" + dependencies: + cookie "0.3.1" + cookie-signature "1.0.6" + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + +cookie@0.3.1, cookie@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + +core-js@^1.0.0: + version "1.2.7" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" + +core-js@^2.4.0, core-js@^2.5.0, core-js@^2.5.3: + version "2.5.3" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.3.tgz#8acc38345824f16d8365b7c9b4259168e8ed603e" + +core-util-is@1.0.2, core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + +crc32-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-2.0.0.tgz#e3cdd3b4df3168dd74e3de3fbbcb7b297fe908f4" + dependencies: + crc "^3.4.4" + readable-stream "^2.0.0" + +crc@3.4.4: + version "3.4.4" + resolved "https://registry.yarnpkg.com/crc/-/crc-3.4.4.tgz#9da1e980e3bd44fc5c93bf5ab3da3378d85e466b" + +crc@^3.4.4: + version "3.5.0" + resolved "https://registry.yarnpkg.com/crc/-/crc-3.5.0.tgz#98b8ba7d489665ba3979f59b21381374101a1964" + +cron@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/cron/-/cron-1.3.0.tgz#7e459968eaf94e1a445be796ce402166c234659d" + dependencies: + moment-timezone "^0.5.x" + +cross-env@^5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-5.1.3.tgz#f8ae18faac87692b0a8b4d2f7000d4ec3a85dfd7" + dependencies: + cross-spawn "^5.1.0" + is-windows "^1.0.0" + +cross-spawn@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-4.0.2.tgz#7b9247621c23adfdd3856004a823cbe397424d41" + dependencies: + lru-cache "^4.0.1" + which "^1.2.9" + +cross-spawn@^5.0.1, cross-spawn@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + +cryptiles@2.x.x: + version "2.0.5" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-2.0.5.tgz#3bdfecdc608147c1c67202fa291e7dca59eaa3b8" + dependencies: + boom "2.x.x" + +cryptiles@3.x.x: + version "3.1.2" + resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.2.tgz#a89fbb220f5ce25ec56e8c4aa8a4fd7b5b0d29fe" + dependencies: + boom "5.x.x" + +cssom@0.3.x, "cssom@>= 0.3.2 < 0.4.0": + version "0.3.2" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.2.tgz#b8036170c79f07a90ff2f16e22284027a243848b" + +"cssstyle@>= 0.2.37 < 0.3.0": + version "0.2.37" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-0.2.37.tgz#541097234cb2513c83ceed3acddc27ff27987d54" + dependencies: + cssom "0.3.x" + +d3-time-format@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-2.1.1.tgz#85b7cdfbc9ffca187f14d3c456ffda268081bb31" + dependencies: + d3-time "1" + +d3-time@1: + version "1.0.8" + resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.0.8.tgz#dbd2d6007bf416fe67a76d17947b784bffea1e84" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + dependencies: + assert-plus "^1.0.0" + +dasherize@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dasherize/-/dasherize-2.0.0.tgz#6d809c9cd0cf7bb8952d80fc84fa13d47ddb1308" + +data-uri-to-buffer@1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-1.2.0.tgz#77163ea9c20d8641b4707e8f18abdf9a78f34835" + +debug@2, debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + dependencies: + ms "2.0.0" + +debug@^3.0.1, debug@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + dependencies: + ms "2.0.0" + +decamelize@^1.0.0, decamelize@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + +decorator-synchronized@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/decorator-synchronized/-/decorator-synchronized-0.3.0.tgz#72a21e7f0208abba2cd1ecd622aafb4a65b1096c" + +deep-extend@~0.4.0: + version "0.4.2" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + +default-require-extensions@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-1.0.0.tgz#f37ea15d3e13ffd9b437d33e1a75b5fb97874cb8" + dependencies: + strip-bom "^2.0.0" + +deferred-leveldown@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-0.2.0.tgz#2cef1f111e1c57870d8bbb8af2650e587cd2f5b4" + dependencies: + abstract-leveldown "~0.12.1" + +deferred-leveldown@~1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-1.2.2.tgz#3acd2e0b75d1669924bc0a4b642851131173e1eb" + dependencies: + abstract-leveldown "~2.6.0" + +deferred-leveldown@~2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-2.0.3.tgz#91fbc7699ac85f3920df035792d96d97cbf50c0f" + dependencies: + abstract-leveldown "~3.0.0" + +define-properties@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94" + dependencies: + foreach "^2.0.5" + object-keys "^1.0.8" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + dependencies: + is-descriptor "^1.0.0" + +deflate-js@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/deflate-js/-/deflate-js-0.2.3.tgz#f85abb58ebc5151a306147473d57c3e4f7e4426b" + +degenerator@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/degenerator/-/degenerator-1.0.4.tgz#fcf490a37ece266464d9cc431ab98c5819ced095" + dependencies: + ast-types "0.x.x" + escodegen "1.x.x" + esprima "3.x.x" + +del@^2.0.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" + dependencies: + globby "^5.0.0" + is-path-cwd "^1.0.0" + is-path-in-cwd "^1.0.0" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + rimraf "^2.2.8" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +delegates@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + +depd@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359" + +depd@~1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + +deptree@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/deptree/-/deptree-1.0.0.tgz#d4f8fa7c21444b1977e719ce3d29fc3650336b7b" + +destroy@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + +detect-indent@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" + dependencies: + repeating "^2.0.0" + +detect-libc@^1.0.2, detect-libc@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" + +detect-newline@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2" + +diff@^3.2.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.4.0.tgz#b1d85507daf3964828de54b37d0d73ba67dda56c" + +dns-prefetch-control@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/dns-prefetch-control/-/dns-prefetch-control-0.1.0.tgz#60ddb457774e178f1f9415f0cabb0e85b0b300b2" + +doctrine@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-1.5.0.tgz#379dce730f6166f76cefa4e6707a159b02c5a6fa" + dependencies: + esutils "^2.0.2" + isarray "^1.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + dependencies: + esutils "^2.0.2" + +doctypes@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/doctypes/-/doctypes-1.1.0.tgz#ea80b106a87538774e8a3a4a5afe293de489e0a9" + +domexception@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-1.0.1.tgz#937442644ca6a31261ef36e3ec677fe805582c90" + dependencies: + webidl-conversions "^4.0.2" + +dont-sniff-mimetype@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/dont-sniff-mimetype/-/dont-sniff-mimetype-1.0.0.tgz#5932890dc9f4e2f19e5eb02a20026e5e5efc8f58" + +double-ended-queue@^2.1.0-0: + version "2.1.0-0" + resolved "https://registry.yarnpkg.com/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz#103d3527fd31528f40188130c841efdd78264e5c" + +duplexify@^3.4.2: + version "3.5.3" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-3.5.3.tgz#8b5818800df92fd0125b27ab896491912858243e" + dependencies: + end-of-stream "^1.0.0" + inherits "^2.0.1" + readable-stream "^2.0.0" + stream-shift "^1.0.0" + +ecc-jsbn@~0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz#0fc73a9ed5f0d53c38193398523ef7e543777505" + dependencies: + jsbn "~0.1.0" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + +electron-to-chromium@^1.3.30: + version "1.3.32" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.32.tgz#11d0684c0840e003c4be8928f8ac5f35dbc2b4e6" + +encodeurl@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + +encoding-down@~3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-3.0.1.tgz#80a4415ec4d2805a02a67e5eac880d9754b796e0" + dependencies: + abstract-leveldown "^3.0.0" + level-codec "^8.0.0" + level-errors "^1.0.4" + +encoding@^0.1.11: + version "0.1.12" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.12.tgz#538b66f3ee62cd1ab51ec323829d1f9480c74beb" + dependencies: + iconv-lite "~0.4.13" + +end-of-stream@^1.0.0, end-of-stream@^1.1.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43" + dependencies: + once "^1.4.0" + +errno@~0.1.1: + version "0.1.6" + resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.6.tgz#c386ce8a6283f14fc09563b71560908c9bf53026" + dependencies: + prr "~1.0.1" + +error-ex@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.5.1: + version "1.10.0" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.10.0.tgz#1ecb36c197842a00d8ee4c2dfd8646bb97d60864" + dependencies: + es-to-primitive "^1.1.1" + function-bind "^1.1.1" + has "^1.0.1" + is-callable "^1.1.3" + is-regex "^1.0.4" + +es-to-primitive@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d" + dependencies: + is-callable "^1.1.1" + is-date-object "^1.0.1" + is-symbol "^1.0.1" + +es6-promise@^4.0.3: + version "4.2.4" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.4.tgz#dc4221c2b16518760bd8c39a52d8f356fc00ed29" + +es6-promisify@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203" + dependencies: + es6-promise "^4.0.3" + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + +escodegen@1.x.x, escodegen@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.9.0.tgz#9811a2f265dc1cd3894420ee3717064b632b8852" + dependencies: + esprima "^3.1.3" + estraverse "^4.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.5.6" + +eslint-config-standard@^11.0.0-beta.0: + version "11.0.0-beta.0" + resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-11.0.0-beta.0.tgz#f8afe69803d95c685a4b8392b8793188eb03cbb3" + +eslint-import-resolver-node@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz#58f15fb839b8d0576ca980413476aab2472db66a" + dependencies: + debug "^2.6.9" + resolve "^1.5.0" + +eslint-module-utils@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.1.1.tgz#abaec824177613b8a95b299639e1b6facf473449" + dependencies: + debug "^2.6.8" + pkg-dir "^1.0.0" + +eslint-plugin-import@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.8.0.tgz#fa1b6ef31fcb3c501c09859c1b86f1fc5b986894" + dependencies: + builtin-modules "^1.1.1" + contains-path "^0.1.0" + debug "^2.6.8" + doctrine "1.5.0" + eslint-import-resolver-node "^0.3.1" + eslint-module-utils "^2.1.1" + has "^1.0.1" + lodash.cond "^4.3.0" + minimatch "^3.0.3" + read-pkg-up "^2.0.0" + +eslint-plugin-node@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-5.2.1.tgz#80df3253c4d7901045ec87fa660a284e32bdca29" + dependencies: + ignore "^3.3.6" + minimatch "^3.0.4" + resolve "^1.3.3" + semver "5.3.0" + +eslint-plugin-promise@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.6.0.tgz#54b7658c8f454813dc2a870aff8152ec4969ba75" + +eslint-plugin-standard@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-3.0.1.tgz#34d0c915b45edc6f010393c7eef3823b08565cf2" + +eslint-scope@^3.7.1, eslint-scope@~3.7.1: + version "3.7.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-3.7.1.tgz#3d63c3edfda02e06e01a452ad88caacc7cdcb6e8" + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-visitor-keys@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz#3f3180fb2e291017716acb4c9d6d5b5c34a6a81d" + +eslint@^4.13.1: + version "4.16.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-4.16.0.tgz#934ada9e98715e1d7bbfd6f6f0519ed2fab35cc1" + dependencies: + ajv "^5.3.0" + babel-code-frame "^6.22.0" + chalk "^2.1.0" + concat-stream "^1.6.0" + cross-spawn "^5.1.0" + debug "^3.1.0" + doctrine "^2.1.0" + eslint-scope "^3.7.1" + eslint-visitor-keys "^1.0.0" + espree "^3.5.2" + esquery "^1.0.0" + esutils "^2.0.2" + file-entry-cache "^2.0.0" + functional-red-black-tree "^1.0.1" + glob "^7.1.2" + globals "^11.0.1" + ignore "^3.3.3" + imurmurhash "^0.1.4" + inquirer "^3.0.6" + is-resolvable "^1.0.0" + js-yaml "^3.9.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.3.0" + lodash "^4.17.4" + minimatch "^3.0.2" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.2" + pluralize "^7.0.0" + progress "^2.0.0" + require-uncached "^1.0.3" + semver "^5.3.0" + strip-ansi "^4.0.0" + strip-json-comments "~2.0.1" + table "^4.0.1" + text-table "~0.2.0" + +espree@^3.5.2: + version "3.5.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.2.tgz#756ada8b979e9dcfcdb30aad8d1a9304a905e1ca" + dependencies: + acorn "^5.2.1" + acorn-jsx "^3.0.0" + +esprima@3.x.x, esprima@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" + +esprima@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" + +esquery@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.0.tgz#cfba8b57d7fba93f17298a8a006a04cda13d80fa" + dependencies: + estraverse "^4.0.0" + +esrecurse@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.0.tgz#fa9568d98d3823f9a41d91e902dcab9ea6e5b163" + dependencies: + estraverse "^4.1.0" + object-assign "^4.0.1" + +estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" + +esutils@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + +event-to-promise@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/event-to-promise/-/event-to-promise-0.7.0.tgz#cb07dfcd418da2221d90f77eab713bc235e2090f" + +event-to-promise@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/event-to-promise/-/event-to-promise-0.8.0.tgz#4b84f11772b6f25f7752fc74d971531ac6f5b626" + +eventemitter3@1.x.x: + version "1.2.0" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-1.2.0.tgz#1c86991d816ad1e504750e73874224ecf3bec508" + +exec-promise@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/exec-promise/-/exec-promise-0.6.1.tgz#dea16a4af0150e34eb0b1660e55dd9b2837ada6f" + dependencies: + log-symbols "^1.0.0" + +exec-promise@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/exec-promise/-/exec-promise-0.7.0.tgz#74d55e60c858a94b325e8e20b66a1bd2ae9c184e" + dependencies: + log-symbols "^1.0.2" + +exec-sh@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.1.tgz#163b98a6e89e6b65b47c2a28d215bc1f63989c38" + dependencies: + merge "^1.1.3" + +execa@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +execa@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.9.0.tgz#adb7ce62cf985071f60580deb4a88b9e34712d01" + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + +expand-brackets@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" + dependencies: + is-posix-bracket "^0.1.0" + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +expand-range@^1.8.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" + dependencies: + fill-range "^2.1.0" + +expand-template@^1.0.2: + version "1.1.0" + resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-1.1.0.tgz#e09efba977bf98f9ee0ed25abd0c692e02aec3fc" + +expect-ct@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/expect-ct/-/expect-ct-0.1.0.tgz#52735678de18530890d8d7b95f0ac63640958094" + +expect@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/expect/-/expect-22.1.0.tgz#f8f9b019ab275d859cbefed531fbaefe8972431d" + dependencies: + ansi-styles "^3.2.0" + jest-diff "^22.1.0" + jest-get-type "^22.1.0" + jest-matcher-utils "^22.1.0" + jest-message-util "^22.1.0" + jest-regex-util "^22.1.0" + +express-session@^1.15.6: + version "1.15.6" + resolved "https://registry.yarnpkg.com/express-session/-/express-session-1.15.6.tgz#47b4160c88f42ab70fe8a508e31cbff76757ab0a" + dependencies: + cookie "0.3.1" + cookie-signature "1.0.6" + crc "3.4.4" + debug "2.6.9" + depd "~1.1.1" + on-headers "~1.0.1" + parseurl "~1.3.2" + uid-safe "~2.1.5" + utils-merge "1.0.1" + +express@^4.16.2: + version "4.16.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.16.2.tgz#e35c6dfe2d64b7dca0a5cd4f21781be3299e076c" + dependencies: + accepts "~1.3.4" + array-flatten "1.1.1" + body-parser "1.18.2" + content-disposition "0.5.2" + content-type "~1.0.4" + cookie "0.3.1" + cookie-signature "1.0.6" + debug "2.6.9" + depd "~1.1.1" + encodeurl "~1.0.1" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.1.0" + fresh "0.5.2" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "~2.3.0" + parseurl "~1.3.2" + path-to-regexp "0.1.7" + proxy-addr "~2.0.2" + qs "6.5.1" + range-parser "~1.2.0" + safe-buffer "5.1.1" + send "0.16.1" + serve-static "1.13.1" + setprototypeof "1.1.0" + statuses "~1.3.1" + type-is "~1.6.15" + utils-merge "1.0.1" + vary "~1.1.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend@3, extend@~3.0.0, extend@~3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" + +external-editor@^2.0.1, external-editor@^2.0.4: + version "2.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.1.0.tgz#3d026a21b7f95b5726387d4200ac160d372c3b48" + dependencies: + chardet "^0.4.0" + iconv-lite "^0.4.17" + tmp "^0.0.33" + +extglob@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" + dependencies: + is-extglob "^1.0.0" + +extglob@^2.0.2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + +fast-deep-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff" + +fast-future@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fast-future/-/fast-future-1.0.2.tgz#8435a9aaa02d79248d17d704e76259301d99280a" + +fast-json-stable-stringify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2" + +fast-levenshtein@~2.0.4: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + +fatfs@^0.10.4: + version "0.10.6" + resolved "https://registry.yarnpkg.com/fatfs/-/fatfs-0.10.6.tgz#e401029858a84638677a77fbc6ed03dae9ebddba" + dependencies: + fifolock "^1.0.0" + struct-fu "^1.0.0" + xok "^1.0.0" + +fb-watchman@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.0.tgz#54e9abf7dfa2f26cd9b1636c588c1afc05de5d58" + dependencies: + bser "^2.0.0" + +fifolock@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fifolock/-/fifolock-1.0.0.tgz#a37e54f3ebe69d13480d95a82abc42b7a5c1792d" + +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" + dependencies: + flat-cache "^1.2.1" + object-assign "^4.0.1" + +file-uri-to-path@1: + version "1.0.0" + resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + +filename-regex@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" + +fileset@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/fileset/-/fileset-2.0.3.tgz#8e7548a96d3cc2327ee5e674168723a333bba2a0" + dependencies: + glob "^7.0.3" + minimatch "^3.0.3" + +fill-range@^2.1.0: + version "2.2.3" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.3.tgz#50b77dfd7e469bc7492470963699fe7a8485a723" + dependencies: + is-number "^2.1.0" + isobject "^2.0.0" + randomatic "^1.1.3" + repeat-element "^1.1.2" + repeat-string "^1.5.2" + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +finalhandler@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.0.tgz#ce0b6855b45853e791b2fcc680046d88253dd7f5" + dependencies: + debug "2.6.9" + encodeurl "~1.0.1" + escape-html "~1.0.3" + on-finished "~2.3.0" + parseurl "~1.3.2" + statuses "~1.3.1" + unpipe "~1.0.0" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.0.0, find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + dependencies: + locate-path "^2.0.0" + +flat-cache@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481" + dependencies: + circular-json "^0.3.1" + del "^2.0.2" + graceful-fs "^4.1.2" + write "^0.2.1" + +for-in@^1.0.1, for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + +for-own@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" + dependencies: + for-in "^1.0.1" + +foreach@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + +form-data@~2.1.1: + version "2.1.4" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +form-data@~2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf" + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.5" + mime-types "^2.1.12" + +forwarded@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + dependencies: + map-cache "^0.2.2" + +frameguard@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/frameguard/-/frameguard-3.0.0.tgz#7bcad469ee7b96e91d12ceb3959c78235a9272e9" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + +from2@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.0" + +fs-extra@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-5.0.0.tgz#414d0110cdd06705734d055652c5411260c31abd" + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-promise@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/fs-promise/-/fs-promise-0.4.1.tgz#9d57aed89dbcea0fdb6d4cb9c2044aedd9722efd" + dependencies: + any-promise "^1.0.0" + +fs-readdir-recursive@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz#e32fc030a2ccee44a6b5371308da54be0b397d27" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + +fsevents@^1.0.0, fsevents@^1.1.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.1.3.tgz#11f82318f5fe7bb2cd22965a108e9306208216d8" + dependencies: + nan "^2.3.0" + node-pre-gyp "^0.6.39" + +fstream-ignore@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/fstream-ignore/-/fstream-ignore-1.0.5.tgz#9c31dae34767018fe1d249b24dada67d092da105" + dependencies: + fstream "^1.0.0" + inherits "2" + minimatch "^3.0.0" + +fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2: + version "1.0.11" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-1.0.11.tgz#5c1fb1f117477114f0632a0eb4b71b3cb0fd3171" + dependencies: + graceful-fs "^4.1.2" + inherits "~2.0.0" + mkdirp ">=0.5 0" + rimraf "2" + +fstream@~0.1.21: + version "0.1.31" + resolved "https://registry.yarnpkg.com/fstream/-/fstream-0.1.31.tgz#7337f058fbbbbefa8c9f561a28cab0849202c988" + dependencies: + graceful-fs "~3.0.2" + inherits "~2.0.0" + mkdirp "0.5" + rimraf "2" + +ftp@~0.3.10: + version "0.3.10" + resolved "https://registry.yarnpkg.com/ftp/-/ftp-0.3.10.tgz#9197d861ad8142f3e63d5a83bfe4c59f7330885d" + dependencies: + readable-stream "1.1.x" + xregexp "2.0.0" + +function-bind@^1.0.2, function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + +gauge@~2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" + dependencies: + aproba "^1.0.3" + console-control-strings "^1.0.0" + has-unicode "^2.0.0" + object-assign "^4.1.0" + signal-exit "^3.0.0" + string-width "^1.0.1" + strip-ansi "^3.0.1" + wide-align "^1.1.0" + +get-caller-file@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5" + +get-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" + +get-uri@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/get-uri/-/get-uri-2.0.1.tgz#dbdcacacd8c608a38316869368117697a1631c59" + dependencies: + data-uri-to-buffer "1" + debug "2" + extend "3" + file-uri-to-path "1" + ftp "~0.3.10" + readable-stream "2" + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + dependencies: + assert-plus "^1.0.0" + +github-from-package@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" + +glob-base@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" + dependencies: + glob-parent "^2.0.0" + is-glob "^2.0.0" + +glob-parent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" + dependencies: + is-glob "^2.0.0" + +glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1, glob@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.0.1, globals@^11.1.0: + version "11.3.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.3.0.tgz#e04fdb7b9796d8adac9c8f64c14837b2313378b0" + +globals@^9.18.0: + version "9.18.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" + +globby@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" + dependencies: + array-union "^1.0.1" + arrify "^1.0.0" + glob "^7.0.3" + object-assign "^4.0.1" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +golike-defer@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/golike-defer/-/golike-defer-0.4.1.tgz#7a1cd435d61e461305805d980b133a0f3db4e1cc" + +graceful-fs@^4.1.0, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.4, graceful-fs@^4.1.6: + version "4.1.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" + +graceful-fs@~3.0.2: + version "3.0.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-3.0.11.tgz#7613c778a1afea62f25c630a086d7f3acbbdd818" + dependencies: + natives "^1.1.0" + +"graceful-readlink@>= 1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" + +growly@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" + +handlebars@^4.0.3: + version "4.0.11" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc" + dependencies: + async "^1.4.0" + optimist "^0.6.1" + source-map "^0.4.4" + optionalDependencies: + uglify-js "^2.6" + +har-schema@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + +har-validator@~4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a" + dependencies: + ajv "^4.9.1" + har-schema "^1.0.5" + +har-validator@~5.0.3: + version "5.0.3" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd" + dependencies: + ajv "^5.1.0" + har-schema "^2.0.0" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + dependencies: + ansi-regex "^2.0.0" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + +has-flag@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" + +has-unicode@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has@^1.0.0, has@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.1.tgz#8461733f538b0837c9361e39a9ab9e9704dc2f28" + dependencies: + function-bind "^1.0.2" + +hashy@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/hashy/-/hashy-0.6.2.tgz#d20fd533826bff9590a6578b39cb3374fded993c" + dependencies: + bcryptjs "^2.4.3" + exec-promise "^0.6.1" + promise-toolbox "^0.8.0" + yargs "^6.6.0" + optionalDependencies: + argon2 "^0.16.1" + bcrypt "^1.0.2" + +hawk@3.1.3, hawk@~3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4" + dependencies: + boom "2.x.x" + cryptiles "2.x.x" + hoek "2.x.x" + sntp "1.x.x" + +hawk@~6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038" + dependencies: + boom "4.x.x" + cryptiles "3.x.x" + hoek "4.x.x" + sntp "2.x.x" + +helmet-csp@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/helmet-csp/-/helmet-csp-2.7.0.tgz#7934094617d1feb7bb2dc43bb7d9e8830f774716" + dependencies: + camelize "1.0.0" + content-security-policy-builder "2.0.0" + dasherize "2.0.0" + lodash.reduce "4.6.0" + platform "1.3.5" + +helmet@^3.9.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/helmet/-/helmet-3.10.0.tgz#96a2a9fec53c26009d3d6265c6cfdada38ddfa7f" + dependencies: + dns-prefetch-control "0.1.0" + dont-sniff-mimetype "1.0.0" + expect-ct "0.1.0" + frameguard "3.0.0" + helmet-csp "2.7.0" + hide-powered-by "1.0.0" + hpkp "2.0.0" + hsts "2.1.0" + ienoopen "1.0.0" + nocache "2.0.0" + referrer-policy "1.1.0" + x-xss-protection "1.0.0" + +hide-powered-by@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hide-powered-by/-/hide-powered-by-1.0.0.tgz#4a85ad65881f62857fc70af7174a1184dccce32b" + +highland@^2.11.1: + version "2.11.1" + resolved "https://registry.yarnpkg.com/highland/-/highland-2.11.1.tgz#39b4d9299b6e07da3d15e7af7b2a6f127522acaf" + dependencies: + util-deprecate "^1.0.2" + +hoek@2.x.x: + version "2.16.3" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed" + +hoek@4.x.x: + version "4.2.0" + resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.0.tgz#72d9d0754f7fe25ca2d01ad8f8f9a9449a89526d" + +home-or-tmp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.1" + +hosted-git-info@^2.1.4: + version "2.5.0" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c" + +hpkp@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hpkp/-/hpkp-2.0.0.tgz#10e142264e76215a5d30c44ec43de64dee6d1672" + +hsts@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/hsts/-/hsts-2.1.0.tgz#cbd6c918a2385fee1dd5680bfb2b3a194c0121cc" + +html-encoding-sniffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" + dependencies: + whatwg-encoding "^1.0.1" + +http-errors@1.6.2, http-errors@~1.6.2: + version "1.6.2" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.2.tgz#0a002cc85707192a7e7946ceedc11155f60ec736" + dependencies: + depd "1.1.1" + inherits "2.0.3" + setprototypeof "1.0.3" + statuses ">= 1.3.1 < 2" + +http-proxy-agent@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-1.0.0.tgz#cc1ce38e453bf984a0f7702d2dd59c73d081284a" + dependencies: + agent-base "2" + debug "2" + extend "3" + +http-proxy@^1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.16.2.tgz#06dff292952bf64dbe8471fa9df73066d4f37742" + dependencies: + eventemitter3 "1.x.x" + requires-port "1.x.x" + +http-request-plus@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/http-request-plus/-/http-request-plus-0.5.0.tgz#70f93ad514b77c4d31bbfa877f802ad267c15d34" + dependencies: + is-redirect "^1.0.0" + lodash "^4.17.4" + promise-toolbox "^0.9.5" + +http-server-plus@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/http-server-plus/-/http-server-plus-0.8.0.tgz#36d8e6a1897dd2ad71f790ac02f02f2db608c155" + dependencies: + event-to-promise "^0.7.0" + lodash "^4.16.0" + +http-signature@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.1.1.tgz#df72e267066cd0ac67fb76adf8e134a8fbcf91bf" + dependencies: + assert-plus "^0.2.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +https-proxy-agent@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-1.0.0.tgz#35f7da6c48ce4ddbfa264891ac593ee5ff8671e6" + dependencies: + agent-base "2" + debug "2" + extend "3" + +human-format@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/human-format/-/human-format-0.10.0.tgz#0583c91bfcef0e465a31097f1c627db32c7b502f" + +husky@^0.14.3: + version "0.14.3" + resolved "https://registry.yarnpkg.com/husky/-/husky-0.14.3.tgz#c69ed74e2d2779769a17ba8399b54ce0b63c12c3" + dependencies: + is-ci "^1.0.10" + normalize-path "^1.0.0" + strip-indent "^2.0.0" + +iconv-lite@0.4.19, iconv-lite@^0.4.17, iconv-lite@~0.4.13: + version "0.4.19" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b" + +ienoopen@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/ienoopen/-/ienoopen-1.0.0.tgz#346a428f474aac8f50cf3784ea2d0f16f62bda6b" + +ignore@^3.3.3, ignore@^3.3.6: + version "3.3.7" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021" + +import-local@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-1.0.0.tgz#5e4ffdc03f4fe6c009c6729beb29631c2f8227bc" + dependencies: + pkg-dir "^2.0.0" + resolve-cwd "^2.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + +index-modules@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/index-modules/-/index-modules-0.3.0.tgz#70a0d80be07ad3e1a6e4849c9aaf08348050a9ae" + dependencies: + lodash "^4.17.4" + promise-toolbox "^0.8.0" + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + +ini@~1.3.0: + version "1.3.5" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" + +inquirer@3.0.6: + version "3.0.6" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.0.6.tgz#e04aaa9d05b7a3cb9b0f407d04375f0447190347" + dependencies: + ansi-escapes "^1.1.0" + chalk "^1.0.0" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^2.0.1" + figures "^2.0.0" + lodash "^4.3.0" + mute-stream "0.0.7" + run-async "^2.2.0" + rx "^4.1.0" + string-width "^2.0.0" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^3.0.6: + version "3.3.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.3.0.tgz#9dd2f2ad765dcab1ff0443b491442a20ba227dc9" + dependencies: + ansi-escapes "^3.0.0" + chalk "^2.0.0" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^2.0.4" + figures "^2.0.0" + lodash "^4.3.0" + mute-stream "0.0.7" + run-async "^2.2.0" + rx-lite "^4.0.8" + rx-lite-aggregates "^4.0.8" + string-width "^2.1.0" + strip-ansi "^4.0.0" + through "^2.3.6" + +invariant@^2.2.0, invariant@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360" + dependencies: + loose-envify "^1.0.0" + +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + +ip@^1.1.4, ip@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" + +ipaddr.js@1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.5.2.tgz#d4b505bde9946987ccf0fc58d9010ff9607e3fa0" + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + dependencies: + kind-of "^6.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + +is-binary-path@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + dependencies: + binary-extensions "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + +is-builtin-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" + dependencies: + builtin-modules "^1.0.0" + +is-callable@^1.1.1, is-callable@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2" + +is-ci@^1.0.10: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.1.0.tgz#247e4162e7860cebbdaf30b774d6b0ac7dcfe7a5" + dependencies: + ci-info "^1.0.0" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16" + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-dotfile@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" + +is-equal-shallow@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" + dependencies: + is-primitive "^2.0.0" + +is-expression@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-2.1.0.tgz#91be9d47debcfef077977e9722be6dcfb4465ef0" + dependencies: + acorn "~3.3.0" + object-assign "^4.0.1" + +is-expression@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-expression/-/is-expression-3.0.0.tgz#39acaa6be7fd1f3471dc42c7416e61c24317ac9f" + dependencies: + acorn "~4.0.2" + object-assign "^4.0.1" + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" + +is-finite@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + +is-generator-fn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-1.0.0.tgz#969d49e1bb3329f6bb7f09089be26578b2ddd46a" + +is-glob@^2.0.0, is-glob@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" + dependencies: + is-extglob "^1.0.0" + +is-number@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" + dependencies: + kind-of "^3.0.2" + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + dependencies: + kind-of "^3.0.2" + +is-odd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-1.0.0.tgz#3b8a932eb028b3775c39bb09e91767accdb69088" + dependencies: + is-number "^3.0.0" + +is-path-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" + +is-path-in-cwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.0.tgz#6477582b8214d602346094567003be8a9eac04dc" + dependencies: + is-path-inside "^1.0.0" + +is-path-inside@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" + dependencies: + path-is-inside "^1.0.1" + +is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + dependencies: + isobject "^3.0.1" + +is-posix-bracket@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" + +is-primitive@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" + +is-promise@^2.0.0, is-promise@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.1.0.tgz#79a2a9ece7f096e80f36d2b2f3bc16c1ff4bf3fa" + +is-redirect@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" + +is-regex@^1.0.3, is-regex@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491" + dependencies: + has "^1.0.1" + +is-resolvable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" + +is-stream@^1.0.1, is-stream@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + +is-symbol@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + +is-utf8@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + +is-windows@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.1.tgz#310db70f742d259a16a369202b51af84233310d9" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + +istanbul-api@^1.1.14: + version "1.2.1" + resolved "https://registry.yarnpkg.com/istanbul-api/-/istanbul-api-1.2.1.tgz#0c60a0515eb11c7d65c6b50bba2c6e999acd8620" + dependencies: + async "^2.1.4" + fileset "^2.0.2" + istanbul-lib-coverage "^1.1.1" + istanbul-lib-hook "^1.1.0" + istanbul-lib-instrument "^1.9.1" + istanbul-lib-report "^1.1.2" + istanbul-lib-source-maps "^1.2.2" + istanbul-reports "^1.1.3" + js-yaml "^3.7.0" + mkdirp "^0.5.1" + once "^1.4.0" + +istanbul-lib-coverage@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da" + +istanbul-lib-hook@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.1.0.tgz#8538d970372cb3716d53e55523dd54b557a8d89b" + dependencies: + append-transform "^0.4.0" + +istanbul-lib-instrument@^1.7.5, istanbul-lib-instrument@^1.8.0, istanbul-lib-instrument@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.9.1.tgz#250b30b3531e5d3251299fdd64b0b2c9db6b558e" + dependencies: + babel-generator "^6.18.0" + babel-template "^6.16.0" + babel-traverse "^6.18.0" + babel-types "^6.18.0" + babylon "^6.18.0" + istanbul-lib-coverage "^1.1.1" + semver "^5.3.0" + +istanbul-lib-report@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.2.tgz#922be27c13b9511b979bd1587359f69798c1d425" + dependencies: + istanbul-lib-coverage "^1.1.1" + mkdirp "^0.5.1" + path-parse "^1.0.5" + supports-color "^3.1.2" + +istanbul-lib-source-maps@^1.2.1, istanbul-lib-source-maps@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.2.tgz#750578602435f28a0c04ee6d7d9e0f2960e62c1c" + dependencies: + debug "^3.1.0" + istanbul-lib-coverage "^1.1.1" + mkdirp "^0.5.1" + rimraf "^2.6.1" + source-map "^0.5.3" + +istanbul-reports@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.3.tgz#3b9e1e8defb6d18b1d425da8e8b32c5a163f2d10" + dependencies: + handlebars "^4.0.3" + +iterable-backoff@^0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/iterable-backoff/-/iterable-backoff-0.0.0.tgz#e49660be7e1727fc52ee5c86c67cc3a427b4e109" + +jest-changed-files@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-22.1.4.tgz#1f7844bcb739dec07e5899a633c0cb6d5069834e" + dependencies: + throat "^4.0.0" + +jest-cli@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-22.1.4.tgz#0fe9f3ac881b0cdc00227114c58583a2ebefcc04" + dependencies: + ansi-escapes "^3.0.0" + chalk "^2.0.1" + exit "^0.1.2" + glob "^7.1.2" + graceful-fs "^4.1.11" + import-local "^1.0.0" + is-ci "^1.0.10" + istanbul-api "^1.1.14" + istanbul-lib-coverage "^1.1.1" + istanbul-lib-instrument "^1.8.0" + istanbul-lib-source-maps "^1.2.1" + jest-changed-files "^22.1.4" + jest-config "^22.1.4" + jest-environment-jsdom "^22.1.4" + jest-get-type "^22.1.0" + jest-haste-map "^22.1.0" + jest-message-util "^22.1.0" + jest-regex-util "^22.1.0" + jest-resolve-dependencies "^22.1.0" + jest-runner "^22.1.4" + jest-runtime "^22.1.4" + jest-snapshot "^22.1.2" + jest-util "^22.1.4" + jest-worker "^22.1.0" + micromatch "^2.3.11" + node-notifier "^5.1.2" + realpath-native "^1.0.0" + rimraf "^2.5.4" + slash "^1.0.0" + string-length "^2.0.0" + strip-ansi "^4.0.0" + which "^1.2.12" + yargs "^10.0.3" + +jest-config@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-22.1.4.tgz#075ffacce83c3e38cf85b1b9ba0d21bd3ee27ad0" + dependencies: + chalk "^2.0.1" + glob "^7.1.1" + jest-environment-jsdom "^22.1.4" + jest-environment-node "^22.1.4" + jest-get-type "^22.1.0" + jest-jasmine2 "^22.1.4" + jest-regex-util "^22.1.0" + jest-resolve "^22.1.4" + jest-util "^22.1.4" + jest-validate "^22.1.2" + pretty-format "^22.1.0" + +jest-diff@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-22.1.0.tgz#0fad9d96c87b453896bf939df3dc8aac6919ac38" + dependencies: + chalk "^2.0.1" + diff "^3.2.0" + jest-get-type "^22.1.0" + pretty-format "^22.1.0" + +jest-docblock@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-22.1.0.tgz#3fe5986d5444cbcb149746eb4b07c57c5a464dfd" + dependencies: + detect-newline "^2.1.0" + +jest-environment-jsdom@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.1.4.tgz#704518ce8375f7ec5de048d1e9c4268b08a03e00" + dependencies: + jest-mock "^22.1.0" + jest-util "^22.1.4" + jsdom "^11.5.1" + +jest-environment-node@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.1.4.tgz#0f2946e8f8686ce6c5d8fa280ce1cd8d58e869eb" + dependencies: + jest-mock "^22.1.0" + jest-util "^22.1.4" + +jest-get-type@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-22.1.0.tgz#4e90af298ed6181edc85d2da500dbd2753e0d5a9" + +jest-haste-map@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-22.1.0.tgz#1174c6ff393f9818ebf1163710d8868b5370da2a" + dependencies: + fb-watchman "^2.0.0" + graceful-fs "^4.1.11" + jest-docblock "^22.1.0" + jest-worker "^22.1.0" + micromatch "^2.3.11" + sane "^2.0.0" + +jest-jasmine2@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.1.4.tgz#cada0baf50a220c616a9575728b80d4ddedebe8b" + dependencies: + callsites "^2.0.0" + chalk "^2.0.1" + co "^4.6.0" + expect "^22.1.0" + graceful-fs "^4.1.11" + is-generator-fn "^1.0.0" + jest-diff "^22.1.0" + jest-matcher-utils "^22.1.0" + jest-message-util "^22.1.0" + jest-snapshot "^22.1.2" + source-map-support "^0.5.0" + +jest-leak-detector@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-22.1.0.tgz#08376644cee07103da069baac19adb0299b772c2" + dependencies: + pretty-format "^22.1.0" + +jest-matcher-utils@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-22.1.0.tgz#e164665b5d313636ac29f7f6fe9ef0a6ce04febc" + dependencies: + chalk "^2.0.1" + jest-get-type "^22.1.0" + pretty-format "^22.1.0" + +jest-message-util@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-22.1.0.tgz#51ba0794cb6e579bfc4e9adfac452f9f1a0293fc" + dependencies: + "@babel/code-frame" "^7.0.0-beta.35" + chalk "^2.0.1" + micromatch "^2.3.11" + slash "^1.0.0" + stack-utils "^1.0.1" + +jest-mock@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-22.1.0.tgz#87ec21c0599325671c9a23ad0e05c86fb5879b61" + +jest-regex-util@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-22.1.0.tgz#5daf2fe270074b6da63e5d85f1c9acc866768f53" + +jest-resolve-dependencies@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-22.1.0.tgz#340e4139fb13315cd43abc054e6c06136be51e31" + dependencies: + jest-regex-util "^22.1.0" + +jest-resolve@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.1.4.tgz#72b9b371eaac48f84aad4ad732222ffe37692602" + dependencies: + browser-resolve "^1.11.2" + chalk "^2.0.1" + +jest-runner@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-22.1.4.tgz#e039039110cb1b31febc0f99e349bf7c94304a2f" + dependencies: + exit "^0.1.2" + jest-config "^22.1.4" + jest-docblock "^22.1.0" + jest-haste-map "^22.1.0" + jest-jasmine2 "^22.1.4" + jest-leak-detector "^22.1.0" + jest-message-util "^22.1.0" + jest-runtime "^22.1.4" + jest-util "^22.1.4" + jest-worker "^22.1.0" + throat "^4.0.0" + +jest-runtime@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-22.1.4.tgz#1474d9f5cda518b702e0b25a17d4ef3fc563a20c" + dependencies: + babel-core "^6.0.0" + babel-jest "^22.1.0" + babel-plugin-istanbul "^4.1.5" + chalk "^2.0.1" + convert-source-map "^1.4.0" + exit "^0.1.2" + graceful-fs "^4.1.11" + jest-config "^22.1.4" + jest-haste-map "^22.1.0" + jest-regex-util "^22.1.0" + jest-resolve "^22.1.4" + jest-util "^22.1.4" + json-stable-stringify "^1.0.1" + micromatch "^2.3.11" + realpath-native "^1.0.0" + slash "^1.0.0" + strip-bom "3.0.0" + write-file-atomic "^2.1.0" + yargs "^10.0.3" + +jest-snapshot@^22.1.2: + version "22.1.2" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-22.1.2.tgz#b270cf6e3098f33aceeafda02b13eb0933dc6139" + dependencies: + chalk "^2.0.1" + jest-diff "^22.1.0" + jest-matcher-utils "^22.1.0" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + pretty-format "^22.1.0" + +jest-util@^22.1.4: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-22.1.4.tgz#ac8cbd43ee654102f1941f3f0e9d1d789a8b6a9b" + dependencies: + callsites "^2.0.0" + chalk "^2.0.1" + graceful-fs "^4.1.11" + is-ci "^1.0.10" + jest-message-util "^22.1.0" + jest-validate "^22.1.2" + mkdirp "^0.5.1" + +jest-validate@^22.1.2: + version "22.1.2" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.1.2.tgz#c3b06bcba7bd9a850919fe336b5f2a8c3a239404" + dependencies: + chalk "^2.0.1" + jest-get-type "^22.1.0" + leven "^2.1.0" + pretty-format "^22.1.0" + +jest-worker@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-22.1.0.tgz#0987832fe58fbdc205357f4c19b992446368cafb" + dependencies: + merge-stream "^1.0.1" + +jest@^22.0.3: + version "22.1.4" + resolved "https://registry.yarnpkg.com/jest/-/jest-22.1.4.tgz#9ec71373a38f40ff92a3e5e96ae85687c181bb72" + dependencies: + jest-cli "^22.1.4" + +js-stringify@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/js-stringify/-/js-stringify-1.0.2.tgz#1736fddfd9724f28a3682adc6230ae7e4e9679db" + +js-tokens@^3.0.0, js-tokens@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + +js-yaml@^3.10.0, js-yaml@^3.7.0, js-yaml@^3.9.1: + version "3.10.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.10.0.tgz#2e78441646bd4682e963f22b6e92823c309c62dc" + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + +jsdom@^11.5.1: + version "11.6.2" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-11.6.2.tgz#25d1ef332d48adf77fc5221fe2619967923f16bb" + dependencies: + abab "^1.0.4" + acorn "^5.3.0" + acorn-globals "^4.1.0" + array-equal "^1.0.0" + browser-process-hrtime "^0.1.2" + content-type-parser "^1.0.2" + cssom ">= 0.3.2 < 0.4.0" + cssstyle ">= 0.2.37 < 0.3.0" + domexception "^1.0.0" + escodegen "^1.9.0" + html-encoding-sniffer "^1.0.2" + left-pad "^1.2.0" + nwmatcher "^1.4.3" + parse5 "4.0.0" + pn "^1.1.0" + request "^2.83.0" + request-promise-native "^1.0.5" + sax "^1.2.4" + symbol-tree "^3.2.2" + tough-cookie "^2.3.3" + w3c-hr-time "^1.0.1" + webidl-conversions "^4.0.2" + whatwg-encoding "^1.0.3" + whatwg-url "^6.4.0" + ws "^4.0.0" + xml-name-validator "^3.0.0" + +jsesc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + +json-rpc-peer@0.14: + version "0.14.0" + resolved "https://registry.yarnpkg.com/json-rpc-peer/-/json-rpc-peer-0.14.0.tgz#7bcccde3b22a781973930049e0c3de2e6ef2b798" + dependencies: + babel-runtime "^6.23.0" + json-rpc-protocol "^0.11.3" + lodash "^4.17.4" + +json-rpc-protocol@^0.11.2, json-rpc-protocol@^0.11.3: + version "0.11.3" + resolved "https://registry.yarnpkg.com/json-rpc-protocol/-/json-rpc-protocol-0.11.3.tgz#9290f49efa7e57951aa56cc2dc4d3a00204d6ebb" + dependencies: + lodash "^4.17.4" + make-error "^1.3.0" + +json-schema-traverse@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340" + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + +json-stable-stringify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" + dependencies: + jsonify "~0.0.0" + +json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + +json5@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonify@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + +jstransformer@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/jstransformer/-/jstransformer-1.0.0.tgz#ed8bf0921e2f3f1ed4d5c1a44f68709ed24722c3" + dependencies: + is-promise "^2.0.0" + promise "^7.0.1" + +julien-f-source-map-support@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/julien-f-source-map-support/-/julien-f-source-map-support-0.1.0.tgz#174152c8509538cf666d813eff77cb261711f0af" + dependencies: + source-map "^0.6.1" + stack-chain "^2.0.0" + +julien-f-unzip@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/julien-f-unzip/-/julien-f-unzip-0.2.1.tgz#62e6a85fa7fa4b875156442079adb4a8617c066a" + dependencies: + binary "~0.3.0" + fstream "~0.1.21" + match-stream "~0.0.2" + pullstream "~0.4.0" + readable-stream "~1.0.0" + setimmediate "~1.0.1" + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0, kind-of@^5.0.2: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" + +kindof@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/kindof/-/kindof-2.0.0.tgz#c335baf603a77cc37f8b406b73b6463fdbdf1abe" + +lazy-cache@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-1.0.4.tgz#a1d78fc3a50474cb80845d3b3b6e1da49a446e8e" + +lazy-cache@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lazy-cache/-/lazy-cache-2.0.2.tgz#b9190a4f913354694840859f8a8f7084d8822264" + dependencies: + set-getter "^0.1.0" + +lazystream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.0.tgz#f6995fe0f820392f61396be89462407bb77168e4" + dependencies: + readable-stream "^2.0.5" + +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + dependencies: + invert-kv "^1.0.0" + +left-pad@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/left-pad/-/left-pad-1.2.0.tgz#d30a73c6b8201d8f7d8e7956ba9616087a68e0ee" + +length-prefixed-stream@^1.4.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/length-prefixed-stream/-/length-prefixed-stream-1.5.1.tgz#99eaf51672dddefbfdd8881ee7b7b7df35d1ed73" + dependencies: + readable-stream "^2.0.0" + varint "^5.0.0" + +level-codec@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/level-codec/-/level-codec-8.0.0.tgz#3a4a0de06dae20c2f5a57b3372c7651e67083e03" + +level-codec@~7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/level-codec/-/level-codec-7.0.1.tgz#341f22f907ce0f16763f24bddd681e395a0fb8a7" + +level-errors@^1.0.3, level-errors@^1.0.4, level-errors@~1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-1.1.2.tgz#4399c2f3d3ab87d0625f7e3676e2d807deff404d" + dependencies: + errno "~0.1.1" + +level-errors@~1.0.3: + version "1.0.5" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-1.0.5.tgz#83dbfb12f0b8a2516bdc9a31c4876038e227b859" + dependencies: + errno "~0.1.1" + +level-iterator-stream@~1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-1.3.1.tgz#e43b78b1a8143e6fa97a4f485eb8ea530352f2ed" + dependencies: + inherits "^2.0.1" + level-errors "^1.0.3" + readable-stream "^1.0.33" + xtend "^4.0.0" + +level-iterator-stream@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-2.0.0.tgz#e0fe4273a0322177c81bb87684016bb5b90a98b4" + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.5" + xtend "^4.0.0" + +level-packager@^2.0.2: + version "2.1.0" + resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-2.1.0.tgz#d81d81c5cb6d44e87ce2d71baca35eb945cf4d45" + dependencies: + encoding-down "~3.0.0" + levelup "^2.0.0" + +level-packager@~1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-1.2.1.tgz#067fedfd072b7fe3c6bec6080c0cbd4a6b2e11f4" + dependencies: + levelup "~1.3.0" + +level-party@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/level-party/-/level-party-3.0.4.tgz#80207acbd9c18b9cda4982538d588684041b4fcc" + dependencies: + has "^1.0.0" + level "^1.4.0" + multileveldown "^2.1.1" + pump "^1.0.0" + +level-post@~1.0.3: + version "1.0.5" + resolved "https://registry.yarnpkg.com/level-post/-/level-post-1.0.5.tgz#2a66390409bf6a1621a444bab6f016444cc9802c" + dependencies: + ltgt "^2.1.2" + +level-sublevel@^6.6.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/level-sublevel/-/level-sublevel-6.6.1.tgz#f9a77f7521ab70a8f8e92ed56f21a3c7886a4485" + dependencies: + bytewise "~1.1.0" + levelup "~0.19.0" + ltgt "~2.1.1" + pull-level "^2.0.3" + pull-stream "^3.4.5" + typewiselite "~1.0.0" + xtend "~4.0.0" + +level@^1.4.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/level/-/level-1.7.0.tgz#43464a3a8ba73b2f3de56a24292805146da213a1" + dependencies: + level-packager "~1.2.0" + leveldown "~1.7.0" + +level@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/level/-/level-2.1.2.tgz#3aaca16af50f4953c4433815fe0cccf999d98d39" + dependencies: + level-packager "^2.0.2" + leveldown "^2.1.1" + opencollective "^1.0.3" + +leveldown@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-2.1.1.tgz#2f4d786dfe6ba3ecc63409784ce29eca110413bf" + dependencies: + abstract-leveldown "~3.0.0" + bindings "~1.3.0" + fast-future "~1.0.2" + nan "~2.8.0" + prebuild-install "^2.1.0" + +leveldown@~1.7.0: + version "1.7.2" + resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-1.7.2.tgz#5e3467bb27ee246a4a7b8dbd8fb2b16206a6eb8b" + dependencies: + abstract-leveldown "~2.6.1" + bindings "~1.2.1" + fast-future "~1.0.2" + nan "~2.6.1" + prebuild-install "^2.1.0" + +levelup@^1.3.1, levelup@~1.3.0: + version "1.3.9" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-1.3.9.tgz#2dbcae845b2bb2b6bea84df334c475533bbd82ab" + dependencies: + deferred-leveldown "~1.2.1" + level-codec "~7.0.0" + level-errors "~1.0.3" + level-iterator-stream "~1.3.0" + prr "~1.0.1" + semver "~5.4.1" + xtend "~4.0.0" + +levelup@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-2.0.1.tgz#3dc91b3e632d37c9e546239c864118b004c9f860" + dependencies: + deferred-leveldown "~2.0.2" + level-errors "~1.1.0" + level-iterator-stream "~2.0.0" + xtend "~4.0.0" + +levelup@~0.19.0: + version "0.19.1" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-0.19.1.tgz#f3a6a7205272c4b5f35e412ff004a03a0aedf50b" + dependencies: + bl "~0.8.1" + deferred-leveldown "~0.2.0" + errno "~0.1.1" + prr "~0.0.0" + readable-stream "~1.0.26" + semver "~5.1.0" + xtend "~3.0.0" + +leven@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-2.1.0.tgz#c2e7a9f772094dee9d34202ae8acce4687875580" + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +limit-concurrency-decorator@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/limit-concurrency-decorator/-/limit-concurrency-decorator-0.2.0.tgz#5ed8b8f830d36e22cbe0bcaf281b62498aec3e73" + +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" + +load-json-file@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8" + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +lodash._arrayeach@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lodash._arrayeach/-/lodash._arrayeach-3.0.0.tgz#bab156b2a90d3f1bbd5c653403349e5e5933ef9e" + +lodash._baseeach@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/lodash._baseeach/-/lodash._baseeach-3.0.4.tgz#cf8706572ca144e8d9d75227c990da982f932af3" + dependencies: + lodash.keys "^3.0.0" + +lodash._bindcallback@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/lodash._bindcallback/-/lodash._bindcallback-3.0.1.tgz#e531c27644cf8b57a99e17ed95b35c748789392e" + +lodash._getnative@^3.0.0: + version "3.9.1" + resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5" + +lodash.cond@^4.3.0: + version "4.5.2" + resolved "https://registry.yarnpkg.com/lodash.cond/-/lodash.cond-4.5.2.tgz#f471a1da486be60f6ab955d17115523dd1d255d5" + +lodash.foreach@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.foreach/-/lodash.foreach-3.0.3.tgz#6fd7efb79691aecd67fdeac2761c98e701d6c39a" + dependencies: + lodash._arrayeach "^3.0.0" + lodash._baseeach "^3.0.0" + lodash._bindcallback "^3.0.0" + lodash.isarray "^3.0.0" + +lodash.isarguments@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a" + +lodash.isarray@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55" + +lodash.isfunction@^3.0.6: + version "3.0.8" + resolved "https://registry.yarnpkg.com/lodash.isfunction/-/lodash.isfunction-3.0.8.tgz#4db709fc81bc4a8fd7127a458a5346c5cdce2c6b" + +lodash.keys@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a" + dependencies: + lodash._getnative "^3.0.0" + lodash.isarguments "^3.0.0" + lodash.isarray "^3.0.0" + +lodash.reduce@4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/lodash.reduce/-/lodash.reduce-4.6.0.tgz#f1ab6b839299ad48f784abbf476596f03b914d3b" + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + +lodash@^3.10.1: + version "3.10.1" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" + +lodash@^4.13.1, lodash@^4.14.0, lodash@^4.16.0, lodash@^4.16.6, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.8.0: + version "4.17.4" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" + +log-symbols@^1.0.0, log-symbols@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-1.0.2.tgz#376ff7b58ea3086a0f09facc74617eca501e1a18" + dependencies: + chalk "^1.0.0" + +longest@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/longest/-/longest-1.0.1.tgz#30a0b2da38f73770e8294a0d22e6625ed77d0097" + +looper@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/looper/-/looper-2.0.0.tgz#66cd0c774af3d4fedac53794f742db56da8f09ec" + +looper@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/looper/-/looper-3.0.0.tgz#2efa54c3b1cbaba9b94aee2e5914b0be57fbb749" + +loose-envify@^1.0.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.3.1.tgz#d1a8ad33fa9ce0e713d65fdd0ac8b748d478c848" + dependencies: + js-tokens "^3.0.0" + +lru-cache@^2.6.5: + version "2.7.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952" + +lru-cache@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + +ltgt@^2.1.2: + version "2.2.0" + resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.0.tgz#b65ba5fcb349a29924c8e333f7c6a5562f2e4842" + +ltgt@~2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.1.3.tgz#10851a06d9964b971178441c23c9e52698eece34" + +make-error@^1, make-error@^1.0.2, make-error@^1.2.1, make-error@^1.2.3, make-error@^1.3.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.2.tgz#8762ffad2444dd8ff1f7c819629fa28e24fea1c4" + +makeerror@1.0.x: + version "1.0.11" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" + dependencies: + tmpl "1.0.x" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + dependencies: + object-visit "^1.0.0" + +match-stream@~0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/match-stream/-/match-stream-0.0.2.tgz#99eb050093b34dffade421b9ac0b410a9cfa17cf" + dependencies: + buffers "~0.1.1" + readable-stream "~1.0.0" + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + +mem@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + dependencies: + mimic-fn "^1.0.0" + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + +merge-stream@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-1.0.1.tgz#4041202d508a342ba00174008df0c251b8c135e1" + dependencies: + readable-stream "^2.0.1" + +merge@^1.1.3: + version "1.2.0" + resolved "https://registry.yarnpkg.com/merge/-/merge-1.2.0.tgz#7531e39d4949c281a66b8c5a6e0265e8b05894da" + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + +micromatch@^2.1.5, micromatch@^2.3.11: + version "2.3.11" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" + dependencies: + arr-diff "^2.0.0" + array-unique "^0.2.1" + braces "^1.8.2" + expand-brackets "^0.1.4" + extglob "^0.3.1" + filename-regex "^2.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.1" + kind-of "^3.0.2" + normalize-path "^2.0.1" + object.omit "^2.0.0" + parse-glob "^3.0.4" + regex-cache "^0.4.2" + +micromatch@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.5.tgz#d05e168c206472dfbca985bfef4f57797b4cd4ba" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.0" + define-property "^1.0.0" + extend-shallow "^2.0.1" + extglob "^2.0.2" + fragment-cache "^0.2.1" + kind-of "^6.0.0" + nanomatch "^1.2.5" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +mime-db@~1.30.0: + version "1.30.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01" + +mime-types@^2.1.12, mime-types@~2.1.15, mime-types@~2.1.16, mime-types@~2.1.17, mime-types@~2.1.7: + version "2.1.17" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a" + dependencies: + mime-db "~1.30.0" + +mime@1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.4.1.tgz#121f9ebc49e3766f311a76e1fa1c8003c4b03aa6" + +mimic-fn@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.1.0.tgz#e667783d92e89dbd342818b5230b9d62a672ad18" + +minimatch@^3.0.0, minimatch@^3.0.2, minimatch@^3.0.3, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + dependencies: + brace-expansion "^1.1.7" + +minimist@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" + +minimist@1.2.0, minimist@^1.1.1, minimist@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" + +minimist@~0.0.1: + version "0.0.10" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" + +mixin-deep@^1.2.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.0.tgz#47a8732ba97799457c8c1eca28f95132d7e8150a" + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp@0.5, "mkdirp@>=0.5 0", mkdirp@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" + dependencies: + minimist "0.0.8" + +moment-timezone@^0.5.14, moment-timezone@^0.5.x: + version "0.5.14" + resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.14.tgz#4eb38ff9538b80108ba467a458f3ed4268ccfcb1" + dependencies: + moment ">= 2.9.0" + +"moment@>= 2.9.0": + version "2.20.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.20.1.tgz#d6eb1a46cbcc14a2b2f9434112c1ff8907f313fd" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + +ms@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + +multikey-hash@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/multikey-hash/-/multikey-hash-1.0.4.tgz#f062fd512deb57d1a6f376cb74dbb50671d4fcc3" + dependencies: + imurmurhash "^0.1.4" + simple-token "^0.1.0" + +multileveldown@^2.1.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/multileveldown/-/multileveldown-2.3.1.tgz#4d2dab6d249abadee02541caec9d28bc3fc2040b" + dependencies: + abstract-leveldown "^2.4.1" + duplexify "^3.4.2" + end-of-stream "^1.1.0" + length-prefixed-stream "^1.4.0" + levelup "^1.3.1" + numeric-id-map "^1.1.0" + protocol-buffers-encodings "^1.1.0" + +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + +nan@2.6.2, nan@~2.6.1: + version "2.6.2" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45" + +nan@^2.3.0, nan@^2.4.0, nan@~2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.8.0.tgz#ed715f3fe9de02b57a5e6252d90a96675e1f085a" + +nanomatch@^1.2.5: + version "1.2.7" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.7.tgz#53cd4aa109ff68b7f869591fdc9d10daeeea3e79" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^1.0.0" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + is-odd "^1.0.0" + kind-of "^5.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +natives@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/natives/-/natives-1.1.1.tgz#011acce1f7cbd87f7ba6b3093d6cd9392be1c574" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + +ndjson@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/ndjson/-/ndjson-1.5.0.tgz#ae603b36b134bcec347b452422b0bf98d5832ec8" + dependencies: + json-stringify-safe "^5.0.1" + minimist "^1.2.0" + split2 "^2.1.0" + through2 "^2.0.3" + +negotiator@0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" + +netmask@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/netmask/-/netmask-1.0.6.tgz#20297e89d86f6f6400f250d9f4f6b4c1945fcd35" + +nocache@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/nocache/-/nocache-2.0.0.tgz#202b48021a0c4cbde2df80de15a17443c8b43980" + +node-abi@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-2.2.0.tgz#e802ac7a2408e2c0593fb3176ffdf8a99a9b4dec" + dependencies: + semver "^5.4.1" + +node-fetch@1.6.3: + version "1.6.3" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.6.3.tgz#dc234edd6489982d58e8f0db4f695029abcd8c04" + dependencies: + encoding "^0.1.11" + is-stream "^1.0.1" + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + +node-notifier@^5.1.2: + version "5.2.1" + resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-5.2.1.tgz#fa313dd08f5517db0e2502e5758d664ac69f9dea" + dependencies: + growly "^1.3.0" + semver "^5.4.1" + shellwords "^0.1.1" + which "^1.3.0" + +node-pre-gyp@0.6.36: + version "0.6.36" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.36.tgz#db604112cb74e0d477554e9b505b17abddfab786" + dependencies: + mkdirp "^0.5.1" + nopt "^4.0.1" + npmlog "^4.0.2" + rc "^1.1.7" + request "^2.81.0" + rimraf "^2.6.1" + semver "^5.3.0" + tar "^2.2.1" + tar-pack "^3.4.0" + +node-pre-gyp@^0.6.39: + version "0.6.39" + resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.39.tgz#c00e96860b23c0e1420ac7befc5044e1d78d8649" + dependencies: + detect-libc "^1.0.2" + hawk "3.1.3" + mkdirp "^0.5.1" + nopt "^4.0.1" + npmlog "^4.0.2" + rc "^1.1.7" + request "2.81.0" + rimraf "^2.6.1" + semver "^5.3.0" + tar "^2.2.1" + tar-pack "^3.4.0" + +node-version@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/node-version/-/node-version-1.1.0.tgz#f437d7ba407e65e2c4eaef8887b1718ba523d4f0" + +noop-logger@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/noop-logger/-/noop-logger-0.1.1.tgz#94a2b1633c4f1317553007d8966fd0e841b6a4c2" + +nopt@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" + dependencies: + abbrev "1" + osenv "^0.1.4" + +normalize-package-data@^2.3.2: + version "2.4.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" + dependencies: + hosted-git-info "^2.1.4" + is-builtin-module "^1.0.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-1.0.0.tgz#32d0e472f91ff345701c15a8311018d3b0a90379" + +normalize-path@^2.0.0, normalize-path@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + dependencies: + remove-trailing-separator "^1.0.1" + +npm-run-path@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" + dependencies: + path-key "^2.0.0" + +npmlog@^4.0.1, npmlog@^4.0.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" + dependencies: + are-we-there-yet "~1.1.2" + console-control-strings "~1.1.0" + gauge "~2.7.3" + set-blocking "~2.0.0" + +ntlm@~0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/ntlm/-/ntlm-0.1.3.tgz#3b814ebc530a1e6cd712dcf0cf590155931195c1" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + +numeric-id-map@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/numeric-id-map/-/numeric-id-map-1.1.0.tgz#1110e4445ae683e83d479eadd0f6d9249fda3bd6" + +nwmatcher@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/nwmatcher/-/nwmatcher-1.4.3.tgz#64348e3b3d80f035b40ac11563d278f8b72db89c" + +oauth-sign@~0.8.1, oauth-sign@~0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43" + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-keys@^1.0.8: + version "1.0.11" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d" + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + dependencies: + isobject "^3.0.0" + +object.getownpropertydescriptors@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz#8758c846f5b407adab0f236e0986f14b051caa16" + dependencies: + define-properties "^1.1.2" + es-abstract "^1.5.1" + +object.omit@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" + dependencies: + for-own "^0.1.4" + is-extendable "^0.1.1" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + dependencies: + isobject "^3.0.1" + +on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + dependencies: + ee-first "1.1.1" + +on-headers@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.1.tgz#928f5d0f470d49342651ea6794b0857c100693f7" + +once@^1.3.0, once@^1.3.1, once@^1.3.3, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + dependencies: + mimic-fn "^1.0.0" + +opencollective@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/opencollective/-/opencollective-1.0.3.tgz#aee6372bc28144583690c3ca8daecfc120dd0ef1" + dependencies: + babel-polyfill "6.23.0" + chalk "1.1.3" + inquirer "3.0.6" + minimist "1.2.0" + node-fetch "1.6.3" + opn "4.0.2" + +opn@4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/opn/-/opn-4.0.2.tgz#7abc22e644dff63b0a96d5ab7f2790c0f01abc95" + dependencies: + object-assign "^4.0.1" + pinkie-promise "^2.0.0" + +optimist@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" + dependencies: + minimist "~0.0.1" + wordwrap "~0.0.2" + +optionator@^0.8.1, optionator@^0.8.2: + version "0.8.2" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.4" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + wordwrap "~1.0.0" + +os-homedir@^1.0.0, os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + +os-locale@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + dependencies: + lcid "^1.0.0" + +os-locale@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-2.1.0.tgz#42bc2900a6b5b8bd17376c8e882b65afccf24bf2" + dependencies: + execa "^0.7.0" + lcid "^1.0.0" + mem "^1.1.0" + +os-tmpdir@^1.0.0, os-tmpdir@^1.0.1, os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + +osenv@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.4.tgz#42fe6d5953df06c8064be6f176c3d05aaaa34644" + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.0" + +output-file-sync@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/output-file-sync/-/output-file-sync-1.1.2.tgz#d0a33eefe61a205facb90092e826598d5245ce76" + dependencies: + graceful-fs "^4.1.4" + mkdirp "^0.5.1" + object-assign "^4.1.0" + +"over@>= 0.0.5 < 1": + version "0.0.5" + resolved "https://registry.yarnpkg.com/over/-/over-0.0.5.tgz#f29852e70fd7e25f360e013a8ec44c82aedb5708" + +p-finally@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" + +p-limit@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.2.0.tgz#0e92b6bedcb59f022c13d0f1949dc82d15909f1c" + dependencies: + p-try "^1.0.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + dependencies: + p-limit "^1.1.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + +pac-proxy-agent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pac-proxy-agent/-/pac-proxy-agent-2.0.0.tgz#beb17cd2b06a20b379d57e1b2e2c29be0dfe5f9a" + dependencies: + agent-base "^2.1.1" + debug "^2.6.8" + get-uri "^2.0.0" + http-proxy-agent "^1.0.0" + https-proxy-agent "^1.0.0" + pac-resolver "^3.0.0" + raw-body "^2.2.0" + socks-proxy-agent "^3.0.0" + +pac-resolver@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pac-resolver/-/pac-resolver-3.0.0.tgz#6aea30787db0a891704deb7800a722a7615a6f26" + dependencies: + co "^4.6.0" + degenerator "^1.0.4" + ip "^1.1.5" + netmask "^1.0.6" + thunkify "^2.1.2" + +parse-glob@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" + dependencies: + glob-base "^0.3.0" + is-dotfile "^1.0.0" + is-extglob "^1.0.0" + is-glob "^2.0.0" + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + dependencies: + error-ex "^1.2.0" + +parse-pairs@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/parse-pairs/-/parse-pairs-0.2.2.tgz#86d3cc90fa4d3acd403b5556f68d9fcd208c3abe" + +parse5@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-4.0.0.tgz#6d78656e3da8d78b4ec0b906f7c08ef1dfe3f608" + +parseurl@~1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3" + +partial-stream@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/partial-stream/-/partial-stream-0.0.0.tgz#3731755355f6fab47a1cf28ed8edfd07221f12af" + dependencies: + babel-runtime "^5.8.3" + through2 "^2.0.0" + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + +passport-local@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/passport-local/-/passport-local-1.0.0.tgz#1fe63268c92e75606626437e3b906662c15ba6ee" + dependencies: + passport-strategy "1.x.x" + +passport-strategy@1.x.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" + +passport@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.0.tgz#c5095691347bd5ad3b5e180238c3914d16f05811" + dependencies: + passport-strategy "1.x.x" + pause "0.0.1" + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + +path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + +path-is-inside@^1.0.1, path-is-inside@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + +path-key@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + +path-parse@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +path-type@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73" + dependencies: + pify "^2.0.0" + +pause@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/pause/-/pause-0.0.1.tgz#1d408b3fdb76923b9543d96fb4c9dfd535d9cb5d" + +performance-now@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + +pipette@^0.9.3: + version "0.9.3" + resolved "https://registry.yarnpkg.com/pipette/-/pipette-0.9.3.tgz#80c048e08629dd13b1bb0f01ef4532ee85b6a2a7" + dependencies: + typ "~0.6.1" + +pkg-dir@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-1.0.0.tgz#7a4b508a8d5bb2d629d447056ff4e9c9314cf3d4" + dependencies: + find-up "^1.0.0" + +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + dependencies: + find-up "^2.1.0" + +platform@1.3.5: + version "1.3.5" + resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.5.tgz#fb6958c696e07e2918d2eeda0f0bc9448d733444" + +pluralize@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-7.0.0.tgz#298b89df8b93b0221dbf421ad2b1b1ea23fc6777" + +pn@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + +prebuild-install@^2.1.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-2.5.0.tgz#6fdd8436069971c76688071f4847d4c891a119f4" + dependencies: + detect-libc "^1.0.3" + expand-template "^1.0.2" + github-from-package "0.0.0" + minimist "^1.2.0" + mkdirp "^0.5.1" + node-abi "^2.1.1" + noop-logger "^0.1.1" + npmlog "^4.0.1" + os-homedir "^1.0.1" + pump "^1.0.1" + rc "^1.1.6" + simple-get "^1.4.2" + tar-fs "^1.13.0" + tunnel-agent "^0.6.0" + xtend "4.0.1" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + +preserve@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" + +pretty-format@^22.0.3, pretty-format@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-22.1.0.tgz#2277605b40ed4529ae4db51ff62f4be817647914" + dependencies: + ansi-regex "^3.0.0" + ansi-styles "^3.2.0" + +private@^0.1.6, private@^0.1.7: + version "0.1.8" + resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" + +process-nextick-args@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" + +progress@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.0.tgz#8a1be366bf8fc23db2bd23f10c6fe920b4389d1f" + +promise-polyfill@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/promise-polyfill/-/promise-polyfill-6.1.0.tgz#dfa96943ea9c121fca4de9b5868cb39d3472e057" + +promise-toolbox@^0.8.0: + version "0.8.3" + resolved "https://registry.yarnpkg.com/promise-toolbox/-/promise-toolbox-0.8.3.tgz#b757232a21d246d8702df50da6784932dd0f5348" + dependencies: + make-error "^1.2.3" + +promise-toolbox@^0.9.5: + version "0.9.5" + resolved "https://registry.yarnpkg.com/promise-toolbox/-/promise-toolbox-0.9.5.tgz#ca33e53714cfde924a9bd3d2d23c53b21cb75acc" + dependencies: + make-error "^1.2.3" + +promise@^7.0.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" + dependencies: + asap "~2.0.3" + +protocol-buffers-encodings@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/protocol-buffers-encodings/-/protocol-buffers-encodings-1.1.0.tgz#f3905631106669b85381bad47a336add7d206873" + dependencies: + signed-varint "^2.0.1" + varint "^5.0.0" + +proxy-addr@~2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.2.tgz#6571504f47bb988ec8180253f85dd7e14952bdec" + dependencies: + forwarded "~0.1.2" + ipaddr.js "1.5.2" + +proxy-agent@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-2.2.0.tgz#e853cd8400013562d23c8dc9e1deaf9b0b0a153a" + dependencies: + agent-base "^4.2.0" + debug "^2.6.8" + http-proxy-agent "^1.0.0" + https-proxy-agent "^1.0.0" + lru-cache "^2.6.5" + pac-proxy-agent "^2.0.0" + socks-proxy-agent "^3.0.0" + +prr@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a" + +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + +pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + +pug-attrs@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/pug-attrs/-/pug-attrs-2.0.2.tgz#8be2b2225568ffa75d1b866982bff9f4111affcb" + dependencies: + constantinople "^3.0.1" + js-stringify "^1.0.1" + pug-runtime "^2.0.3" + +pug-code-gen@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pug-code-gen/-/pug-code-gen-2.0.0.tgz#96aea39a9e62f1ec5d2b6a5b42a29d528c70b43d" + dependencies: + constantinople "^3.0.1" + doctypes "^1.1.0" + js-stringify "^1.0.1" + pug-attrs "^2.0.2" + pug-error "^1.3.2" + pug-runtime "^2.0.3" + void-elements "^2.0.1" + with "^5.0.0" + +pug-error@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/pug-error/-/pug-error-1.3.2.tgz#53ae7d9d29bb03cf564493a026109f54c47f5f26" + +pug-filters@^2.1.5: + version "2.1.5" + resolved "https://registry.yarnpkg.com/pug-filters/-/pug-filters-2.1.5.tgz#66bf6e80d97fbef829bab0aa35eddff33fc964f3" + dependencies: + clean-css "^3.3.0" + constantinople "^3.0.1" + jstransformer "1.0.0" + pug-error "^1.3.2" + pug-walk "^1.1.5" + resolve "^1.1.6" + uglify-js "^2.6.1" + +pug-lexer@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/pug-lexer/-/pug-lexer-3.1.0.tgz#fd087376d4a675b4f59f8fef422883434e9581a2" + dependencies: + character-parser "^2.1.1" + is-expression "^3.0.0" + pug-error "^1.3.2" + +pug-linker@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/pug-linker/-/pug-linker-3.0.3.tgz#25f59eb750237f0368e59c3379764229c0189c41" + dependencies: + pug-error "^1.3.2" + pug-walk "^1.1.5" + +pug-load@^2.0.9: + version "2.0.9" + resolved "https://registry.yarnpkg.com/pug-load/-/pug-load-2.0.9.tgz#ee217c914cc1d9324d44b86c32d1df241d36de7a" + dependencies: + object-assign "^4.1.0" + pug-walk "^1.1.5" + +pug-parser@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/pug-parser/-/pug-parser-4.0.0.tgz#c9f52322e4eabe4bf5beeba64ed18373bb627801" + dependencies: + pug-error "^1.3.2" + token-stream "0.0.1" + +pug-runtime@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pug-runtime/-/pug-runtime-2.0.3.tgz#98162607b0fce9e254d427f33987a5aee7168bda" + +pug-strip-comments@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pug-strip-comments/-/pug-strip-comments-1.0.2.tgz#d313afa01bcc374980e1399e23ebf2eb9bdc8513" + dependencies: + pug-error "^1.3.2" + +pug-walk@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/pug-walk/-/pug-walk-1.1.5.tgz#90e943acbcf7021e6454cf1b32245891cba6f851" + +pug@^2.0.0-rc.4: + version "2.0.0-rc.4" + resolved "https://registry.yarnpkg.com/pug/-/pug-2.0.0-rc.4.tgz#b7b08f6599bd5302568042b7436984fb28c80a13" + dependencies: + pug-code-gen "^2.0.0" + pug-filters "^2.1.5" + pug-lexer "^3.1.0" + pug-linker "^3.0.3" + pug-load "^2.0.9" + pug-parser "^4.0.0" + pug-runtime "^2.0.3" + pug-strip-comments "^1.0.2" + +pull-cat@^1.1.9: + version "1.1.11" + resolved "https://registry.yarnpkg.com/pull-cat/-/pull-cat-1.1.11.tgz#b642dd1255da376a706b6db4fa962f5fdb74c31b" + +pull-level@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pull-level/-/pull-level-2.0.3.tgz#9500635e257945d6feede185f5d7a24773455b17" + dependencies: + level-post "~1.0.3" + pull-cat "^1.1.9" + pull-live "^1.0.1" + pull-pushable "^2.0.0" + pull-stream "^3.4.0" + pull-window "^2.1.4" + stream-to-pull-stream "^1.7.1" + +pull-live@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/pull-live/-/pull-live-1.0.1.tgz#a4ecee01e330155e9124bbbcf4761f21b38f51f5" + dependencies: + pull-cat "^1.1.9" + pull-stream "^3.4.0" + +pull-pushable@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/pull-pushable/-/pull-pushable-2.1.2.tgz#3fe15b8f7eec89f3972d238bc04890c9405a6dbb" + +pull-stream@^3.2.3, pull-stream@^3.4.0, pull-stream@^3.4.5: + version "3.6.1" + resolved "https://registry.yarnpkg.com/pull-stream/-/pull-stream-3.6.1.tgz#c5c2ae4a51246efeebcc65c0412a3d725a92ce00" + +pull-window@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/pull-window/-/pull-window-2.1.4.tgz#fc3b86feebd1920c7ae297691e23f705f88552f0" + dependencies: + looper "^2.0.0" + +pullstream@~0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/pullstream/-/pullstream-0.4.1.tgz#d6fb3bf5aed697e831150eb1002c25a3f8ae1314" + dependencies: + over ">= 0.0.5 < 1" + readable-stream "~1.0.31" + setimmediate ">= 1.0.2 < 2" + slice-stream ">= 1.0.0 < 2" + +pump@^1.0.0, pump@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954" + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + +punycode@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.0.tgz#5f863edc89b96db09074bad7947bf09056ca4e7d" + +pw@0.0.4, pw@^0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/pw/-/pw-0.0.4.tgz#8015982ef8bebfd9d8eb8c795e751774871fde46" + +qs@6.5.1, qs@~6.5.1: + version "6.5.1" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8" + +qs@~6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233" + +random-bytes@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/random-bytes/-/random-bytes-1.0.0.tgz#4f68a1dc0ae58bd3fb95848c30324db75d64360b" + +randomatic@^1.1.3: + version "1.1.7" + resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-1.1.7.tgz#c7abe9cc8b87c0baa876b19fde83fd464797e38c" + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +range-parser@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e" + +raw-body@2.3.2, raw-body@^2.2.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.2.tgz#bcd60c77d3eb93cde0050295c3f379389bc88f89" + dependencies: + bytes "3.0.0" + http-errors "1.6.2" + iconv-lite "0.4.19" + unpipe "1.0.0" + +rc@^1.1.6, rc@^1.1.7: + version "1.2.5" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.5.tgz#275cd687f6e3b36cc756baa26dfee80a790301fd" + dependencies: + deep-extend "~0.4.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg-up@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be" + dependencies: + find-up "^2.0.0" + read-pkg "^2.0.0" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +read-pkg@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8" + dependencies: + load-json-file "^2.0.0" + normalize-package-data "^2.3.2" + path-type "^2.0.0" + +readable-stream@1.1.x, readable-stream@^1.0.33: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@2, readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.5, readable-stream@^2.0.6, readable-stream@^2.1.4, readable-stream@^2.1.5, readable-stream@^2.2.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.3.tgz#368f2512d79f9d46fdfc71349ae7878bbc1eb95c" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~1.0.6" + safe-buffer "~5.1.1" + string_decoder "~1.0.3" + util-deprecate "~1.0.1" + +readable-stream@~1.0.0, readable-stream@~1.0.26, readable-stream@~1.0.31: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readdirp@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" + dependencies: + graceful-fs "^4.1.2" + minimatch "^3.0.2" + readable-stream "^2.0.2" + set-immediate-shim "^1.0.1" + +realpath-native@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/realpath-native/-/realpath-native-1.0.0.tgz#7885721a83b43bd5327609f0ddecb2482305fdf0" + dependencies: + util.promisify "^1.0.0" + +redis-commands@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.3.1.tgz#81d826f45fa9c8b2011f4cd7a0fe597d241d442b" + +redis-parser@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-2.6.0.tgz#52ed09dacac108f1a631c07e9b69941e7a19504b" + +redis@^2.8.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/redis/-/redis-2.8.0.tgz#202288e3f58c49f6079d97af7a10e1303ae14b02" + dependencies: + double-ended-queue "^2.1.0-0" + redis-commands "^1.2.0" + redis-parser "^2.6.0" + +referrer-policy@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/referrer-policy/-/referrer-policy-1.1.0.tgz#35774eb735bf50fb6c078e83334b472350207d79" + +regenerate@^1.2.1: + version "1.3.3" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.3.3.tgz#0c336d3980553d755c39b586ae3b20aa49c82b7f" + +regenerator-runtime@^0.10.0, regenerator-runtime@^0.10.5: + version "0.10.5" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658" + +regenerator-runtime@^0.11.0, regenerator-runtime@^0.11.1: + version "0.11.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" + +regenerator-transform@^0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.10.1.tgz#1e4996837231da8b7f3cf4114d71b5691a0680dd" + dependencies: + babel-runtime "^6.18.0" + babel-types "^6.19.0" + private "^0.1.6" + +regex-cache@^0.4.2: + version "0.4.4" + resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd" + dependencies: + is-equal-shallow "^0.1.3" + +regex-not@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.0.tgz#42f83e39771622df826b02af176525d6a5f157f9" + dependencies: + extend-shallow "^2.0.1" + +regexpu-core@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" + dependencies: + regenerate "^1.2.1" + regjsgen "^0.2.0" + regjsparser "^0.1.4" + +regjsgen@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" + +regjsparser@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" + dependencies: + jsesc "~0.5.0" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + +repeat-element@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" + +repeat-string@^1.5.2, repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + +repeating@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + dependencies: + is-finite "^1.0.0" + +request-promise-core@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.1.tgz#3eee00b2c5aa83239cfb04c5700da36f81cd08b6" + dependencies: + lodash "^4.13.1" + +request-promise-native@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.5.tgz#5281770f68e0c9719e5163fd3fab482215f4fda5" + dependencies: + request-promise-core "1.1.1" + stealthy-require "^1.1.0" + tough-cookie ">=2.3.3" + +request@2.81.0: + version "2.81.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0" + dependencies: + aws-sign2 "~0.6.0" + aws4 "^1.2.1" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.0" + forever-agent "~0.6.1" + form-data "~2.1.1" + har-validator "~4.2.1" + hawk "~3.1.3" + http-signature "~1.1.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.7" + oauth-sign "~0.8.1" + performance-now "^0.2.0" + qs "~6.4.0" + safe-buffer "^5.0.1" + stringstream "~0.0.4" + tough-cookie "~2.3.0" + tunnel-agent "^0.6.0" + uuid "^3.0.0" + +request@^2.81.0, request@^2.83.0: + version "2.83.0" + resolved "https://registry.yarnpkg.com/request/-/request-2.83.0.tgz#ca0b65da02ed62935887808e6f510381034e3356" + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.6.0" + caseless "~0.12.0" + combined-stream "~1.0.5" + extend "~3.0.1" + forever-agent "~0.6.1" + form-data "~2.3.1" + har-validator "~5.0.3" + hawk "~6.0.2" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.17" + oauth-sign "~0.8.2" + performance-now "^2.1.0" + qs "~6.5.1" + safe-buffer "^5.1.1" + stringstream "~0.0.5" + tough-cookie "~2.3.3" + tunnel-agent "^0.6.0" + uuid "^3.1.0" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + +require-package-name@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/require-package-name/-/require-package-name-2.0.1.tgz#c11e97276b65b8e2923f75dabf5fb2ef0c3841b9" + +require-uncached@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" + dependencies: + caller-path "^0.1.0" + resolve-from "^1.0.0" + +requires-port@1.x.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + +resolve-cwd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" + dependencies: + resolve-from "^3.0.0" + +resolve-from@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + +resolve@1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + +resolve@^1.1.6, resolve@^1.3.3, resolve@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36" + dependencies: + path-parse "^1.0.5" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +right-align@^0.1.1: + version "0.1.3" + resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" + dependencies: + align-text "^0.1.1" + +rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1, rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" + dependencies: + glob "^7.0.5" + +run-async@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.3.0.tgz#0371ab4ae0bdd720d4166d7dfda64ff7a445a6c0" + dependencies: + is-promise "^2.1.0" + +rx-lite-aggregates@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" + dependencies: + rx-lite "*" + +rx-lite@*, rx-lite@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" + +rx@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782" + +safe-buffer@5.1.1, safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" + +sane@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/sane/-/sane-2.3.0.tgz#3f3df584abf69e63d4bb74f0f8c42468e4d7d46b" + dependencies: + anymatch "^1.3.0" + exec-sh "^0.2.0" + fb-watchman "^2.0.0" + minimatch "^3.0.2" + minimist "^1.1.1" + walker "~1.0.5" + watch "~0.18.0" + optionalDependencies: + fsevents "^1.1.1" + +sax@1.2.x, sax@>=0.6.0, sax@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + +schema-inspector@^1.6.8: + version "1.6.8" + resolved "https://registry.yarnpkg.com/schema-inspector/-/schema-inspector-1.6.8.tgz#b9e53983cc55ff2dbd7b65e3dbe085d9d1285f2a" + dependencies: + async "^1.5.0" + +"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1: + version "5.5.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab" + +semver@5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + +semver@~5.0.1: + version "5.0.3" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.0.3.tgz#77466de589cd5d3c95f138aa78bc569a3cb5d27a" + +semver@~5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.1.1.tgz#a3292a373e6f3e0798da0b20641b9a9c5bc47e19" + +semver@~5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" + +send@0.16.1: + version "0.16.1" + resolved "https://registry.yarnpkg.com/send/-/send-0.16.1.tgz#a70e1ca21d1382c11d0d9f6231deb281080d7ab3" + dependencies: + debug "2.6.9" + depd "~1.1.1" + destroy "~1.0.4" + encodeurl "~1.0.1" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "~1.6.2" + mime "1.4.1" + ms "2.0.0" + on-finished "~2.3.0" + range-parser "~1.2.0" + statuses "~1.3.1" + +serve-static@1.13.1, serve-static@^1.13.1: + version "1.13.1" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.13.1.tgz#4c57d53404a761d8f2e7c1e8a18a47dbf278a719" + dependencies: + encodeurl "~1.0.1" + escape-html "~1.0.3" + parseurl "~1.3.2" + send "0.16.1" + +set-blocking@^2.0.0, set-blocking@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + +set-getter@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/set-getter/-/set-getter-0.1.0.tgz#d769c182c9d5a51f409145f2fba82e5e86e80376" + dependencies: + to-object-path "^0.3.0" + +set-immediate-shim@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" + +set-value@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.1" + to-object-path "^0.3.0" + +set-value@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +"setimmediate@>= 1.0.2 < 2", setimmediate@~1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + +setprototypeof@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04" + +setprototypeof@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + +shellwords@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" + +signal-exit@^3.0.0, signal-exit@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" + +signed-varint@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/signed-varint/-/signed-varint-2.0.1.tgz#50a9989da7c98c2c61dad119bc97470ef8528129" + dependencies: + varint "~5.0.0" + +simple-get@^1.4.2: + version "1.4.3" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-1.4.3.tgz#e9755eda407e96da40c5e5158c9ea37b33becbeb" + dependencies: + once "^1.3.1" + unzip-response "^1.0.0" + xtend "^4.0.0" + +simple-token@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/simple-token/-/simple-token-0.1.0.tgz#5882259356222a316a27bdc1f11b8e9f035fa0f8" + +slash@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" + +slice-ansi@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-1.0.0.tgz#044f1a49d8842ff307aad6b505ed178bd950134d" + dependencies: + is-fullwidth-code-point "^2.0.0" + +"slice-stream@>= 1.0.0 < 2": + version "1.0.0" + resolved "https://registry.yarnpkg.com/slice-stream/-/slice-stream-1.0.0.tgz#5b33bd66f013b1a7f86460b03d463dec39ad3ea0" + dependencies: + readable-stream "~1.0.31" + +smart-buffer@^1.0.13: + version "1.1.15" + resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-1.1.15.tgz#7f114b5b65fab3e2a35aa775bb12f0d1c649bf16" + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.1" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.1.tgz#e12b5487faded3e3dea0ac91e9400bf75b401370" + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^2.0.0" + +sntp@1.x.x: + version "1.0.9" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" + dependencies: + hoek "2.x.x" + +sntp@2.x.x: + version "2.1.0" + resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.1.0.tgz#2c6cec14fedc2222739caf9b5c3d85d1cc5a2cc8" + dependencies: + hoek "4.x.x" + +socks-proxy-agent@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-3.0.1.tgz#2eae7cf8e2a82d34565761539a7f9718c5617659" + dependencies: + agent-base "^4.1.0" + socks "^1.1.10" + +socks@^1.1.10: + version "1.1.10" + resolved "https://registry.yarnpkg.com/socks/-/socks-1.1.10.tgz#5b8b7fc7c8f341c53ed056e929b7bf4de8ba7b5a" + dependencies: + ip "^1.1.4" + smart-buffer "^1.0.13" + +source-map-resolve@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.1.tgz#7ad0f593f2281598e854df80f19aae4b92d7a11a" + dependencies: + atob "^2.0.0" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-support@^0.4.15: + version "0.4.18" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" + dependencies: + source-map "^0.5.6" + +source-map-support@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.3.tgz#2b3d5fff298cfa4d1afd7d4352d569e9a0158e76" + dependencies: + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + +source-map@0.4.x, source-map@^0.4.4: + version "0.4.4" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.4.4.tgz#eba4f5da9c0dc999de68032d8b4f76173652036b" + dependencies: + amdefine ">=0.0.4" + +source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1, source-map@~0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + +spdx-correct@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-1.0.2.tgz#4b3073d933ff51f3912f03ac5519498a4150db40" + dependencies: + spdx-license-ids "^1.0.2" + +spdx-expression-parse@~1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-1.0.4.tgz#9bdf2f20e1f40ed447fbe273266191fced51626c" + +spdx-license-ids@^1.0.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" + +split-lines@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/split-lines/-/split-lines-1.1.0.tgz#3abba8f598614142f9db8d27ab6ab875662a1e09" + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + dependencies: + extend-shallow "^3.0.0" + +split2@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493" + dependencies: + through2 "^2.0.2" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + +sshpk@^1.7.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.13.1.tgz#512df6da6287144316dc4c18fe1cf1d940739be3" + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + dashdash "^1.12.0" + getpass "^0.1.1" + optionalDependencies: + bcrypt-pbkdf "^1.0.0" + ecc-jsbn "~0.1.1" + jsbn "~0.1.0" + tweetnacl "~0.14.0" + +stack-chain@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/stack-chain/-/stack-chain-2.0.0.tgz#d73d1172af89565f07438b5bcc086831b6689b2d" + +stack-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.1.tgz#d4f33ab54e8e38778b0ca5cfd3b3afb12db68620" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +"statuses@>= 1.3.1 < 2": + version "1.4.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.4.0.tgz#bb73d446da2796106efcc1b601a253d6c46bd087" + +statuses@~1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e" + +stealthy-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" + +stream-shift@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.0.tgz#d5c752825e5367e786f78e18e445ea223a155952" + +stream-to-pull-stream@^1.7.1: + version "1.7.2" + resolved "https://registry.yarnpkg.com/stream-to-pull-stream/-/stream-to-pull-stream-1.7.2.tgz#757609ae1cebd33c7432d4afbe31ff78650b9dde" + dependencies: + looper "^3.0.0" + pull-stream "^3.2.3" + +string-length@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-2.0.0.tgz#d40dbb686a3ace960c1cffca562bf2c45f8363ed" + dependencies: + astral-regex "^1.0.0" + strip-ansi "^4.0.0" + +string-width@^1.0.1, string-width@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + +string_decoder@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" + dependencies: + safe-buffer "~5.1.0" + +stringstream@~0.0.4, stringstream@~0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + dependencies: + ansi-regex "^3.0.0" + +strip-bom@3.0.0, strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + dependencies: + is-utf8 "^0.2.0" + +strip-eof@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" + +strip-indent@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-2.0.0.tgz#5ef8db295d01e6ed6cbf7aab96998d7822527b68" + +strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + +struct-fu@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/struct-fu/-/struct-fu-1.2.0.tgz#a40b9eb60a41bb341228cff125fde4887daa85ac" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + +supports-color@^3.1.2: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + dependencies: + has-flag "^1.0.0" + +supports-color@^4.0.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b" + dependencies: + has-flag "^2.0.0" + +symbol-tree@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.2.tgz#ae27db38f660a7ae2e1c3b7d1bc290819b8519e6" + +table@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/table/-/table-4.0.2.tgz#a33447375391e766ad34d3486e6e2aedc84d2e36" + dependencies: + ajv "^5.2.3" + ajv-keywords "^2.1.0" + chalk "^2.1.0" + lodash "^4.17.4" + slice-ansi "1.0.0" + string-width "^2.1.1" + +tar-fs@^1.13.0: + version "1.16.0" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.0.tgz#e877a25acbcc51d8c790da1c57c9cf439817b896" + dependencies: + chownr "^1.0.1" + mkdirp "^0.5.1" + pump "^1.0.0" + tar-stream "^1.1.2" + +tar-pack@^3.4.0: + version "3.4.1" + resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f" + dependencies: + debug "^2.2.0" + fstream "^1.0.10" + fstream-ignore "^1.0.5" + once "^1.3.3" + readable-stream "^2.1.4" + rimraf "^2.5.1" + tar "^2.2.1" + uid-number "^0.0.6" + +tar-stream@^1.1.2, tar-stream@^1.5.0, tar-stream@^1.5.5: + version "1.5.5" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.5.tgz#5cad84779f45c83b1f2508d96b09d88c7218af55" + dependencies: + bl "^1.0.0" + end-of-stream "^1.0.0" + readable-stream "^2.0.0" + xtend "^4.0.0" + +tar@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1" + dependencies: + block-stream "*" + fstream "^1.0.2" + inherits "2" + +test-exclude@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-4.1.1.tgz#4d84964b0966b0087ecc334a2ce002d3d9341e26" + dependencies: + arrify "^1.0.1" + micromatch "^2.3.11" + object-assign "^4.1.0" + read-pkg-up "^1.0.1" + require-main-filename "^1.0.1" + +text-table@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + +throat@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/throat/-/throat-4.1.0.tgz#89037cbc92c56ab18926e6ba4cbb200e15672a6a" + +through2@^2.0.0, through2@^2.0.2, through2@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.3.tgz#0004569b37c7c74ba39c43f3ced78d1ad94140be" + dependencies: + readable-stream "^2.1.5" + xtend "~4.0.1" + +through@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + +thunkify@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/thunkify/-/thunkify-2.1.2.tgz#faa0e9d230c51acc95ca13a361ac05ca7e04553d" + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + dependencies: + os-tmpdir "~1.0.2" + +tmpl@1.0.x: + version "1.0.4" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" + +to-fast-properties@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.1.tgz#15358bee4a2c83bd76377ba1dc049d0f18837aae" + dependencies: + define-property "^0.2.5" + extend-shallow "^2.0.1" + regex-not "^1.0.0" + +token-stream@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/token-stream/-/token-stream-0.0.1.tgz#ceeefc717a76c4316f126d0b9dbaa55d7e7df01a" + +tough-cookie@>=2.3.3, tough-cookie@^2.3.3, tough-cookie@~2.3.0, tough-cookie@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.3.tgz#0b618a5565b6dea90bf3425d04d55edc475a7561" + dependencies: + punycode "^1.4.1" + +tr46@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + dependencies: + punycode "^2.1.0" + +"traverse@>=0.3.0 <0.4": + version "0.3.9" + resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9" + +trim-right@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + +typ@~0.6.1: + version "0.6.3" + resolved "https://registry.yarnpkg.com/typ/-/typ-0.6.3.tgz#9fbe66d8e4131e127c0d782daa456fd5c6a5ded3" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + dependencies: + prelude-ls "~1.1.2" + +type-is@~1.6.15: + version "1.6.15" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.15.tgz#cab10fb4909e441c82842eafe1ad646c81804410" + dependencies: + media-typer "0.3.0" + mime-types "~2.1.15" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + +typewise-core@^1.2, typewise-core@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/typewise-core/-/typewise-core-1.2.0.tgz#97eb91805c7f55d2f941748fa50d315d991ef195" + +typewise@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/typewise/-/typewise-1.0.3.tgz#1067936540af97937cc5dcf9922486e9fa284651" + dependencies: + typewise-core "^1.2.0" + +typewiselite@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typewiselite/-/typewiselite-1.0.0.tgz#c8882fa1bb1092c06005a97f34ef5c8508e3664e" + +uglify-js@^2.6, uglify-js@^2.6.1: + version "2.8.29" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-2.8.29.tgz#29c5733148057bb4e1f75df35b7a9cb72e6a59dd" + dependencies: + source-map "~0.5.1" + yargs "~3.10.0" + optionalDependencies: + uglify-to-browserify "~1.0.0" + +uglify-to-browserify@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7" + +uid-number@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/uid-number/-/uid-number-0.0.6.tgz#0ea10e8035e8eb5b8e4449f06da1c730663baa81" + +uid-safe@~2.1.5: + version "2.1.5" + resolved "https://registry.yarnpkg.com/uid-safe/-/uid-safe-2.1.5.tgz#2b3d5c7240e8fc2e58f8aa269e5ee49c0857bd3a" + dependencies: + random-bytes "~1.0.0" + +ultron@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" + +union-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^0.4.3" + +universalify@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.1.tgz#fa71badd4437af4c148841e3b3b165f9e9e590b7" + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +unzip-response@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-1.0.2.tgz#b984f0877fc0a89c2c773cc1ef7b5b232b5b06fe" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + +use@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/use/-/use-2.0.2.tgz#ae28a0d72f93bf22422a18a2e379993112dec8e8" + dependencies: + define-property "^0.2.5" + isobject "^3.0.0" + lazy-cache "^2.0.2" + +user-home@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" + +util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + +util.promisify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.0.tgz#440f7165a459c9a16dc145eb8e72f35687097030" + dependencies: + define-properties "^1.1.2" + object.getownpropertydescriptors "^2.0.3" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + +uuid@^3.0.0, uuid@^3.0.1, uuid@^3.1.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.2.1.tgz#12c528bb9d58d0b9265d9a2f6f0fe8be17ff1f14" + +v8flags@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-2.1.1.tgz#aab1a1fa30d45f88dd321148875ac02c0b55e5b4" + dependencies: + user-home "^1.1.1" + +validate-npm-package-license@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.1.tgz#2804babe712ad3379459acfbe24746ab2c303fbc" + dependencies: + spdx-correct "~1.0.0" + spdx-expression-parse "~1.0.0" + +value-matcher@^0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/value-matcher/-/value-matcher-0.0.0.tgz#c0caf87dc3998a68ea56b31fd1916adefe39f7be" + dependencies: + "@babel/polyfill" "^7.0.0-beta.36" + +varint@^5.0.0, varint@~5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.0.tgz#d826b89f7490732fabc0c0ed693ed475dcb29ebf" + +vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +void-elements@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" + +w3c-hr-time@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.1.tgz#82ac2bff63d950ea9e3189a58a65625fedf19045" + dependencies: + browser-process-hrtime "^0.1.2" + +walker@~1.0.5: + version "1.0.7" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" + dependencies: + makeerror "1.0.x" + +watch@~0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/watch/-/watch-0.18.0.tgz#28095476c6df7c90c963138990c0a5423eb4b986" + dependencies: + exec-sh "^0.2.0" + minimist "^1.2.0" + +webidl-conversions@^4.0.1, webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + +whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.3.tgz#57c235bc8657e914d24e1a397d3c82daee0a6ba3" + dependencies: + iconv-lite "0.4.19" + +whatwg-url@^6.4.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-6.4.0.tgz#08fdf2b9e872783a7a1f6216260a1d66cc722e08" + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.0" + webidl-conversions "^4.0.1" + +which-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + +which-module@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + +which@^1.2.12, which@^1.2.9, which@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.0.tgz#ff04bdfc010ee547d780bec38e1ac1c2777d253a" + dependencies: + isexe "^2.0.0" + +wide-align@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.2.tgz#571e0f1b0604636ebc0dfc21b0339bbe31341710" + dependencies: + string-width "^1.0.2" + +window-size@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.1.0.tgz#5438cd2ea93b202efa3a19fe8887aee7c94f9c9d" + +with@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/with/-/with-5.1.1.tgz#fa4daa92daf32c4ea94ed453c81f04686b575dfe" + dependencies: + acorn "^3.1.0" + acorn-globals "^3.0.0" + +wordwrap@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.2.tgz#b79669bb42ecb409f83d583cad52ca17eaa1643f" + +wordwrap@~0.0.2: + version "0.0.3" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + +wordwrap@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + +write-file-atomic@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.3.0.tgz#1ff61575c2e2a4e8e510d6fa4e243cce183999ab" + dependencies: + graceful-fs "^4.1.11" + imurmurhash "^0.1.4" + signal-exit "^3.0.2" + +write@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" + dependencies: + mkdirp "^0.5.1" + +ws@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-4.0.0.tgz#bfe1da4c08eeb9780b986e0e4d10eccd7345999f" + dependencies: + async-limiter "~1.0.0" + safe-buffer "~5.1.0" + ultron "~1.1.0" + +x-xss-protection@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/x-xss-protection/-/x-xss-protection-1.0.0.tgz#898afb93869b24661cf9c52f9ee8db8ed0764dd9" + +xdg-basedir@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" + +xen-api@^0.16.4: + version "0.16.4" + resolved "https://registry.yarnpkg.com/xen-api/-/xen-api-0.16.4.tgz#a01111688fadb70464d20aa5d9c4baac6d1f65e5" + dependencies: + babel-polyfill "^6.23.0" + blocked "^1.2.1" + debug "^3.1.0" + event-to-promise "^0.8.0" + exec-promise "^0.7.0" + http-request-plus "^0.5.0" + iterable-backoff "^0.0.0" + json-rpc-protocol "^0.11.2" + kindof "^2.0.0" + lodash "^4.17.4" + make-error "^1.3.0" + minimist "^1.2.0" + ms "^2.1.1" + promise-toolbox "^0.9.5" + pw "0.0.4" + xmlrpc "^1.3.2" + xo-collection "^0.4.1" + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + +xml2js@^0.4.19: + version "0.4.19" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7" + dependencies: + sax ">=0.6.0" + xmlbuilder "~9.0.1" + +xmlbuilder@8.2.x: + version "8.2.2" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-8.2.2.tgz#69248673410b4ba42e1a6136551d2922335aa773" + +xmlbuilder@~9.0.1: + version "9.0.4" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.4.tgz#519cb4ca686d005a8420d3496f3f0caeecca580f" + +xmlrpc@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/xmlrpc/-/xmlrpc-1.3.2.tgz#26b2ea347848d028aac7e7514b5351976de3e83d" + dependencies: + sax "1.2.x" + xmlbuilder "8.2.x" + +xo-acl-resolver@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/xo-acl-resolver/-/xo-acl-resolver-0.2.3.tgz#693f4181727379be0d969f7c22d660f3bddf935a" + +xo-collection@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/xo-collection/-/xo-collection-0.4.1.tgz#e3fb7c42dd79950a71db6669db2e53b73f80e351" + dependencies: + babel-runtime "^6.18.0" + kindof "^2.0.0" + lodash "^3.10.1" + make-error "^1.0.2" + +xo-common@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/xo-common/-/xo-common-0.1.1.tgz#bdad9ea7926c1f27d8fdaecc92d672854c911815" + dependencies: + babel-runtime "^6.18.0" + lodash "^4.16.6" + make-error "^1.2.1" + +xo-remote-parser@^0.3: + version "0.3.0" + resolved "https://registry.yarnpkg.com/xo-remote-parser/-/xo-remote-parser-0.3.0.tgz#4cbf8391151eb9820a7df8ece64ba79ea44fd070" + dependencies: + lodash "^4.13.1" + +xo-vmdk-to-vhd@0.0.12: + version "0.0.12" + resolved "https://registry.yarnpkg.com/xo-vmdk-to-vhd/-/xo-vmdk-to-vhd-0.0.12.tgz#56d12394c94cbfae08e816f7513dd90e3a928f8f" + dependencies: + babel-runtime "^6.11.6" + child-process-promise "^2.0.3" + deflate-js "^0.2.3" + fs-promise "^0.4.1" + pipette "^0.9.3" + +xok@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/xok/-/xok-1.0.0.tgz#1b4e1a2dc8e593bd8907dc4cfd6a1fe6e4254899" + +xregexp@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/xregexp/-/xregexp-2.0.0.tgz#52a63e56ca0b84a7f3a5f3d61872f126ad7a5943" + +xtend@4.0.1, xtend@^4.0.0, xtend@~4.0.0, xtend@~4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" + +xtend@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-3.0.0.tgz#5cce7407baf642cba7becda568111c493f59665a" + +y18n@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41" + +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" + +yargs-parser@^4.2.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-4.2.1.tgz#29cceac0dc4f03c6c87b4a9f217dd18c9f74871c" + dependencies: + camelcase "^3.0.0" + +yargs-parser@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-8.1.0.tgz#f1376a33b6629a5d063782944da732631e966950" + dependencies: + camelcase "^4.1.0" + +yargs@^10.0.3: + version "10.1.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-10.1.2.tgz#454d074c2b16a51a43e2fb7807e4f9de69ccb5c5" + dependencies: + cliui "^4.0.0" + decamelize "^1.1.1" + find-up "^2.1.0" + get-caller-file "^1.0.1" + os-locale "^2.0.0" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^2.0.0" + which-module "^2.0.0" + y18n "^3.2.1" + yargs-parser "^8.1.0" + +yargs@^6.6.0: + version "6.6.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-6.6.0.tgz#782ec21ef403345f830a808ca3d513af56065208" + dependencies: + camelcase "^3.0.0" + cliui "^3.2.0" + decamelize "^1.1.1" + get-caller-file "^1.0.1" + os-locale "^1.4.0" + read-pkg-up "^1.0.1" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^1.0.2" + which-module "^1.0.0" + y18n "^3.2.1" + yargs-parser "^4.2.0" + +yargs@~3.10.0: + version "3.10.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-3.10.0.tgz#f7ee7bd857dd7c1d2d38c0e74efbd681d1431fd1" + dependencies: + camelcase "^1.0.2" + cliui "^2.1.0" + decamelize "^1.0.0" + window-size "0.1.0" + +zip-stream@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-1.2.0.tgz#a8bc45f4c1b49699c6b90198baacaacdbcd4ba04" + dependencies: + archiver-utils "^1.3.0" + compress-commons "^1.2.0" + lodash "^4.8.0" + readable-stream "^2.0.0"