Merge branch 'next-release'
This commit is contained in:
commit
032c3fb856
@ -1,5 +1,7 @@
|
||||
# Xen Orchestra Server
|
||||
|
||||

|
||||
|
||||
XO-Server is part of [Xen Orchestra](https://github.com/vatesfr/xo), a web interface for XenServer or XAPI enabled hosts.
|
||||
|
||||
It contains all the logic of XO and handles:
|
||||
|
111
coffeelint.json
111
coffeelint.json
@ -1,111 +0,0 @@
|
||||
{
|
||||
"arrow_spacing": {
|
||||
"level": "warn"
|
||||
},
|
||||
"camel_case_classes": {
|
||||
"level": "error"
|
||||
},
|
||||
"coffeescript_error": {
|
||||
"level": "error"
|
||||
},
|
||||
"colon_assignment_spacing": {
|
||||
"level": "warn",
|
||||
"spacing": {
|
||||
"left": 0,
|
||||
"right": 1
|
||||
}
|
||||
},
|
||||
"cyclomatic_complexity": {
|
||||
"value": 10,
|
||||
"level": "warn"
|
||||
},
|
||||
"duplicate_key": {
|
||||
"level": "error"
|
||||
},
|
||||
"empty_constructor_needs_parens": {
|
||||
"level": "error"
|
||||
},
|
||||
"indentation": {
|
||||
"value": 2,
|
||||
"level": "error"
|
||||
},
|
||||
"line_endings": {
|
||||
"level": "error",
|
||||
"value": "unix"
|
||||
},
|
||||
"max_line_length": {
|
||||
"value": 80,
|
||||
"level": "warn",
|
||||
"limitComments": false
|
||||
},
|
||||
"missing_fat_arrows": {
|
||||
"level": "ignore"
|
||||
},
|
||||
"newlines_after_classes": {
|
||||
"value": 1,
|
||||
"level": "warn"
|
||||
},
|
||||
"no_backticks": {
|
||||
"level": "error"
|
||||
},
|
||||
"no_debugger": {
|
||||
"level": "warn"
|
||||
},
|
||||
"no_empty_functions": {
|
||||
"level": "ignore"
|
||||
},
|
||||
"no_empty_param_list": {
|
||||
"level": "warn"
|
||||
},
|
||||
"no_implicit_braces": {
|
||||
"level": "ignore",
|
||||
"strict": true
|
||||
},
|
||||
"no_implicit_parens": {
|
||||
"strict": true,
|
||||
"level": "ignore"
|
||||
},
|
||||
"no_implicit_returns": {
|
||||
"level": "warn",
|
||||
"module": "coffeelint-no-implicit-returns"
|
||||
},
|
||||
"no_interpolation_in_single_quotes": {
|
||||
"level": "ignore"
|
||||
},
|
||||
"no_plusplus": {
|
||||
"level": "ignore"
|
||||
},
|
||||
"no_stand_alone_at": {
|
||||
"level": "error"
|
||||
},
|
||||
"no_tabs": {
|
||||
"level": "error"
|
||||
},
|
||||
"no_throwing_strings": {
|
||||
"level": "error"
|
||||
},
|
||||
"no_trailing_semicolons": {
|
||||
"level": "error"
|
||||
},
|
||||
"no_trailing_whitespace": {
|
||||
"level": "error",
|
||||
"allowed_in_comments": false,
|
||||
"allowed_in_empty_lines": false
|
||||
},
|
||||
"no_unnecessary_double_quotes": {
|
||||
"level": "warn"
|
||||
},
|
||||
"no_unnecessary_fat_arrows": {
|
||||
"level": "warn"
|
||||
},
|
||||
"non_empty_constructor_needs_parens": {
|
||||
"level": "ignore"
|
||||
},
|
||||
"prefer_english_operator": {
|
||||
"level": "warn",
|
||||
"doubleNotLevel": "ignore"
|
||||
},
|
||||
"space_operators": {
|
||||
"level": "warn"
|
||||
}
|
||||
}
|
@ -53,6 +53,8 @@ gulp.task(function buildEs6 () {
|
||||
compact: PRODUCTION,
|
||||
comments: false,
|
||||
optional: [
|
||||
'es7.asyncFunctions',
|
||||
'es7.decorators',
|
||||
'runtime'
|
||||
]
|
||||
}))
|
||||
|
33
package.json
33
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "3.9.2",
|
||||
"license": "AGPL3",
|
||||
"version": "3.9.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
"xen",
|
||||
@ -28,28 +28,30 @@
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@julien-f/json-rpc": "^0.4.4",
|
||||
"@julien-f/unzip": "^0.2.1",
|
||||
"app-conf": "^0.3.4",
|
||||
"babel-runtime": "^5",
|
||||
"base64url": "1.0.4",
|
||||
"blocked": "^1.1.0",
|
||||
"bluebird": "^2.9.14",
|
||||
"compiled-accessors": "^0.2.0",
|
||||
"connect": "^3.3.5",
|
||||
"debug": "^2.1.3",
|
||||
"event-to-promise": "^0.3.2",
|
||||
"exec-promise": "^0.5.1",
|
||||
"fibers": "~1.0.5",
|
||||
"fs-promise": "^0.3.1",
|
||||
"got": "^3.2.0",
|
||||
"graceful-fs": "^3.0.6",
|
||||
"gulp-sourcemaps": "^1.5.1",
|
||||
"hashy": "~0.4.2",
|
||||
"http-server-plus": "^0.5.1",
|
||||
"human-format": "^0.3.0",
|
||||
"js-yaml": "^3.2.7",
|
||||
"json-rpc": "julien-f/js-json-rpc#v0.3.4",
|
||||
"lodash.assign": "^3.0.0",
|
||||
"lodash.bind": "^3.0.0",
|
||||
"lodash.clone": "^3.0.1",
|
||||
"lodash.difference": "^3.0.1",
|
||||
"lodash.difference": "^3.2.0",
|
||||
"lodash.endswith": "^3.0.2",
|
||||
"lodash.filter": "^3.1.0",
|
||||
"lodash.find": "^3.0.0",
|
||||
"lodash.findindex": "^3.0.0",
|
||||
@ -63,12 +65,14 @@
|
||||
"lodash.isstring": "^3.0.0",
|
||||
"lodash.keys": "^3.0.4",
|
||||
"lodash.map": "^3.0.0",
|
||||
"lodash.omit": "^3.1.0",
|
||||
"lodash.pick": "^3.0.0",
|
||||
"lodash.pluck": "^3.0.2",
|
||||
"lodash.result": "^3.0.0",
|
||||
"lodash.startswith": "^3.0.1",
|
||||
"make-error": "^1",
|
||||
"multikey-hash": "^1.0.1",
|
||||
"proxy-http-request": "0.0.2",
|
||||
"proxy-http-request": "0.1.0",
|
||||
"request": "^2.53.0",
|
||||
"require-tree": "~1.0.1",
|
||||
"schema-inspector": "^1.5.1",
|
||||
@ -76,30 +80,31 @@
|
||||
"source-map-support": "^0.2.10",
|
||||
"then-redis": "~1.3.0",
|
||||
"ws": "~0.7.1",
|
||||
"xen-api": "^0.3.0",
|
||||
"xen-api": "^0.5.4",
|
||||
"xml2js": "~0.4.6",
|
||||
"xmlrpc": "~1.3.0"
|
||||
"xo-collection": "^0.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-standard": "*",
|
||||
"chai": "~2.1.2",
|
||||
"coffeelint-no-implicit-returns": "0.0.4",
|
||||
"gulp": "git://github.com/gulpjs/gulp#4.0",
|
||||
"gulp-babel": "^5",
|
||||
"gulp-coffee": "^2.3.1",
|
||||
"gulp-plumber": "^1.0.0",
|
||||
"gulp-sourcemaps": "^1.5.1",
|
||||
"gulp-watch": "^4.2.2",
|
||||
"in-publish": "^1.1.1",
|
||||
"mocha": "^2.2.1",
|
||||
"node-inspector": "^0.9.2",
|
||||
"sinon": "^1.14.1",
|
||||
"standard": "*"
|
||||
"node-inspector": "^0.10.1",
|
||||
"sinon": "^1.14.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "gulp build --production",
|
||||
"dev": "gulp build",
|
||||
"lint": "babel-standard",
|
||||
"prepublish": "in-publish && npm run build || in-install",
|
||||
"start": "node bin/xo-server",
|
||||
"test": "standard && mocha 'dist/**/*.spec.js'"
|
||||
"test": "mocha 'dist/**/*.spec.js'"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
|
@ -1,7 +1,6 @@
|
||||
# Example XO-Server configuration.
|
||||
|
||||
# This file is automatically looking for at the following places:
|
||||
# - `./.xo-server.yaml` up to `/.xo-server.yaml`
|
||||
# - `$HOME/.config/xo-server/config.yaml`
|
||||
# - `/etc/xo-server/config.yaml`
|
||||
#
|
||||
@ -91,6 +90,10 @@ http:
|
||||
mounts:
|
||||
#'/': '/path/to/xo-web/dist/'
|
||||
|
||||
# List of proxied URLs (HTTP & WebSockets).
|
||||
proxies:
|
||||
# '/any/url': 'http://localhost:54722'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Connection to the Redis server.
|
||||
|
@ -1,457 +0,0 @@
|
||||
{EventEmitter: $EventEmitter} = require 'events'
|
||||
|
||||
$assign = require 'lodash.assign'
|
||||
$filter = require 'lodash.filter'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$getKeys = require 'lodash.keys'
|
||||
$isArray = require 'lodash.isarray'
|
||||
$isEmpty = require 'lodash.isempty'
|
||||
$isFunction = require 'lodash.isfunction'
|
||||
$isObject = require 'lodash.isobject'
|
||||
$isString = require 'lodash.isstring'
|
||||
$map = require 'lodash.map'
|
||||
|
||||
{mapInPlace: $mapInPlace, wrap: $wrap} = require './utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
class $MappedCollection extends $EventEmitter
|
||||
|
||||
# The dispatch function is called whenever a new item has to be
|
||||
# processed and returns the name of the rule to use.
|
||||
#
|
||||
# To change the way it is dispatched, just override this it.
|
||||
dispatch: ->
|
||||
(@genval and (@genval.rule ? @genval.type)) ? 'unknown'
|
||||
|
||||
# This function is called when an item has been dispatched to a
|
||||
# missing rule.
|
||||
#
|
||||
# The default behavior is to throw an error but you may instead
|
||||
# choose to create a rule:
|
||||
#
|
||||
# collection.missingRule = collection.rule
|
||||
missingRule: (name) ->
|
||||
throw new Error "undefined rule “#{name}”"
|
||||
|
||||
# This function is called when the new generator of an existing item has been
|
||||
# matched to a different rule.
|
||||
#
|
||||
# The default behavior is to throw an error as it usually indicates a bug but
|
||||
# you can ignore it.
|
||||
ruleConflict: (rule, item) ->
|
||||
throw new Error "the item “#{item.key}” was of rule “#{item.rule}” "+
|
||||
"but matches to “#{rule}”"
|
||||
|
||||
constructor: ->
|
||||
# Items are stored here indexed by key.
|
||||
#
|
||||
# The prototype of this object is set to `null` to avoid pollution
|
||||
# from enumerable properties of `Object.prototype` and the
|
||||
# performance hit of `hasOwnProperty o`.
|
||||
@_byKey = Object.create null
|
||||
|
||||
# Hooks are stored here indexed by moment.
|
||||
@_hooks = {
|
||||
beforeDispatch: []
|
||||
beforeUpdate: []
|
||||
beforeSave: []
|
||||
afterRule: []
|
||||
}
|
||||
|
||||
# Rules are stored here indexed by name.
|
||||
#
|
||||
# The prototype of this object is set to `null` to avoid pollution
|
||||
# from enumerable properties of `Object.prototype` and to be able
|
||||
# to use the `name of @_rules` syntax.
|
||||
@_rules = Object.create null
|
||||
|
||||
# Register a hook to run at a given point.
|
||||
#
|
||||
# A hook receives as parameter an event object with the following
|
||||
# properties:
|
||||
# - `preventDefault()`: prevents the next default action from
|
||||
# happening;
|
||||
# - `stopPropagation()`: prevents other hooks from being run.
|
||||
#
|
||||
# Note: if a hook throws an exception, `event.stopPropagation()`
|
||||
# then `event.preventDefault()` will be called and the exception
|
||||
# will be forwarded.
|
||||
#
|
||||
# # Item hook
|
||||
#
|
||||
# Valid items related moments are:
|
||||
# - beforeDispatch: even before the item has been dispatched;
|
||||
# - beforeUpdate: after the item has been dispatched but before
|
||||
# updating its value.
|
||||
# - beforeSave: after the item has been updated.
|
||||
#
|
||||
# An item hook is run in the context of the current item.
|
||||
#
|
||||
# # Rule hook
|
||||
#
|
||||
# Valid rules related moments are:
|
||||
# - afterRule: just after a new rule has been defined (even
|
||||
# singleton).
|
||||
#
|
||||
# An item hook is run in the context of the current rule.
|
||||
hook: (name, hook) ->
|
||||
# Allows a nicer syntax for CoffeeScript.
|
||||
if $isObject name
|
||||
# Extracts the name and the value from the first property of the
|
||||
# object.
|
||||
do ->
|
||||
object = name
|
||||
return for own name, hook of object
|
||||
|
||||
hooks = @_hooks[name]
|
||||
|
||||
@_assert(
|
||||
hooks?
|
||||
"invalid hook moment “#{name}”"
|
||||
)
|
||||
|
||||
hooks.push hook
|
||||
|
||||
# Register a new singleton rule.
|
||||
#
|
||||
# See the `rule()` method for more information.
|
||||
item: (name, definition) ->
|
||||
# Creates the corresponding rule.
|
||||
rule = @rule name, definition, true
|
||||
|
||||
# Creates the singleton.
|
||||
item = {
|
||||
rule: rule.name
|
||||
key: rule.key() # No context because there is not generator.
|
||||
val: undefined
|
||||
}
|
||||
@_updateItems [item], true
|
||||
|
||||
# Register a new rule.
|
||||
#
|
||||
# If the definition is a function, it will be run in the context of
|
||||
# an item-like object with the following properties:
|
||||
# - `key`: the definition for the key of this item;
|
||||
# - `val`: the definition for the value of this item.
|
||||
#
|
||||
# Warning: The definition function is run only once!
|
||||
rule: (name, definition, singleton = false) ->
|
||||
# Allows a nicer syntax for CoffeeScript.
|
||||
if $isObject name
|
||||
# Extracts the name and the definition from the first property
|
||||
# of the object.
|
||||
do ->
|
||||
object = name
|
||||
return for own name, definition of object
|
||||
|
||||
@_assert(
|
||||
name not of @_rules
|
||||
"the rule “#{name}” is already defined"
|
||||
)
|
||||
|
||||
# Extracts the rule definition.
|
||||
if $isFunction definition
|
||||
ctx = {
|
||||
name
|
||||
key: undefined
|
||||
data: undefined
|
||||
val: undefined
|
||||
singleton
|
||||
}
|
||||
definition.call ctx
|
||||
else
|
||||
ctx = {
|
||||
name
|
||||
key: definition?.key
|
||||
data: definition?.data
|
||||
val: definition?.val
|
||||
singleton
|
||||
}
|
||||
|
||||
# Runs the `afterRule` hook and returns if the registration has
|
||||
# been prevented.
|
||||
return unless @_runHook 'afterRule', ctx
|
||||
|
||||
{key, data, val} = ctx
|
||||
|
||||
# The default key.
|
||||
key ?= if singleton then -> name else -> @genkey
|
||||
|
||||
# The default value.
|
||||
val ?= -> @genval
|
||||
|
||||
# Makes sure `key` is a function for uniformity.
|
||||
key = $wrap key unless $isFunction key
|
||||
|
||||
# Register the new rule.
|
||||
@_rules[name] = {
|
||||
name
|
||||
key
|
||||
data
|
||||
val
|
||||
singleton
|
||||
}
|
||||
|
||||
#--------------------------------
|
||||
|
||||
get: (keys, ignoreMissingItems = false) ->
|
||||
if keys is undefined
|
||||
items = $map @_byKey, (item) -> item.val
|
||||
else
|
||||
items = @_fetchItems keys, ignoreMissingItems
|
||||
$mapInPlace items, (item) -> item.val
|
||||
|
||||
if $isString keys then items[0] else items
|
||||
|
||||
getRaw: (keys, ignoreMissingItems = false) ->
|
||||
if keys is undefined
|
||||
item for _, item of @_byKey
|
||||
else
|
||||
items = @_fetchItems keys, ignoreMissingItems
|
||||
|
||||
if $isString keys then items[0] else items
|
||||
|
||||
remove: (keys, ignoreMissingItems = false) ->
|
||||
@_removeItems (@_fetchItems keys, ignoreMissingItems)
|
||||
|
||||
removeWithPredicate: (predicate, thisArg) ->
|
||||
items = ($filter @_byKey, predicate, thisArg)
|
||||
@_removeItems items
|
||||
|
||||
set: (items, {add, update, remove} = {}) ->
|
||||
add = true unless add?
|
||||
update = true unless update?
|
||||
remove = false unless remove?
|
||||
|
||||
itemsToAdd = {}
|
||||
itemsToUpdate = {}
|
||||
|
||||
itemsToRemove = {}
|
||||
$assign itemsToRemove, @_byKey if remove
|
||||
|
||||
$forEach items, (genval, genkey) =>
|
||||
item = {
|
||||
rule: undefined
|
||||
key: undefined
|
||||
data: undefined
|
||||
val: undefined
|
||||
genkey
|
||||
genval
|
||||
}
|
||||
|
||||
return unless @_runHook 'beforeDispatch', item
|
||||
|
||||
# Searches for a rule to handle it.
|
||||
ruleName = @dispatch.call item
|
||||
rule = @_rules[ruleName]
|
||||
|
||||
unless rule?
|
||||
@missingRule ruleName
|
||||
|
||||
# If `missingRule()` has not created the rule, just skip this
|
||||
# item.
|
||||
rule = @_rules[ruleName]
|
||||
return unless rule?
|
||||
|
||||
# Checks if this is a singleton.
|
||||
@_assert(
|
||||
not rule.singleton
|
||||
"cannot add items to singleton rule “#{rule.name}”"
|
||||
)
|
||||
|
||||
# Computes its key.
|
||||
key = rule.key.call item
|
||||
|
||||
@_assert(
|
||||
$isString key
|
||||
"the key “#{key}” is not a string"
|
||||
)
|
||||
|
||||
# Updates known values.
|
||||
item.rule = rule.name
|
||||
item.key = key
|
||||
|
||||
if key of @_byKey
|
||||
# Marks this item as not to be removed.
|
||||
delete itemsToRemove[key]
|
||||
|
||||
if update
|
||||
# Fetches the existing entry.
|
||||
prev = @_byKey[key]
|
||||
|
||||
# Checks if there is a conflict in rules.
|
||||
unless item.rule is prev.rule
|
||||
@ruleConflict item.rule, prev
|
||||
item.prevRule = prev.rule
|
||||
else
|
||||
delete item.prevRule
|
||||
|
||||
# Gets its previous data/value.
|
||||
item.data = prev.data
|
||||
item.val = prev.val
|
||||
|
||||
# Registers the item to be updated.
|
||||
itemsToUpdate[key] = item
|
||||
|
||||
# Note: an item will be updated only once per `set()` and
|
||||
# only the last generator will be used.
|
||||
else
|
||||
if add
|
||||
|
||||
# Registers the item to be added.
|
||||
itemsToAdd[key] = item
|
||||
return
|
||||
|
||||
# Adds items.
|
||||
@_updateItems itemsToAdd, true
|
||||
|
||||
# Updates items.
|
||||
@_updateItems itemsToUpdate
|
||||
|
||||
# Removes any items not seen (iff `remove` is true).
|
||||
@_removeItems itemsToRemove
|
||||
|
||||
# Forces items to update their value.
|
||||
touch: (keys) ->
|
||||
@_updateItems (@_fetchItems keys, true)
|
||||
|
||||
#--------------------------------
|
||||
|
||||
_assert: (cond, message) ->
|
||||
throw new Error message unless cond
|
||||
|
||||
# Emits item related event.
|
||||
_emitEvent: (event, items) ->
|
||||
getRule = if event is 'exit'
|
||||
(item) -> item.prevRule or item.rule
|
||||
else
|
||||
(item) -> item.rule
|
||||
|
||||
byRule = Object.create null
|
||||
|
||||
# One per item.
|
||||
$forEach items, (item) =>
|
||||
@emit "key=#{item.key}", event, item
|
||||
|
||||
(byRule[getRule item] ?= []).push item
|
||||
|
||||
return
|
||||
|
||||
# One per rule.
|
||||
@emit "rule=#{rule}", event, byRule[rule] for rule of byRule
|
||||
|
||||
# One for everything.
|
||||
@emit 'any', event, items
|
||||
|
||||
_fetchItems: (keys, ignoreMissingItems = false) ->
|
||||
unless $isArray keys
|
||||
keys = if $isObject keys then $getKeys keys else [keys]
|
||||
|
||||
items = []
|
||||
for key in keys
|
||||
item = @_byKey[key]
|
||||
if item?
|
||||
items.push item
|
||||
else
|
||||
@_assert(
|
||||
ignoreMissingItems
|
||||
"no item with key “#{key}”"
|
||||
)
|
||||
items
|
||||
|
||||
_removeItems: (items) ->
|
||||
return if $isEmpty items
|
||||
|
||||
$forEach items, (item) =>
|
||||
delete @_byKey[item.key]
|
||||
return
|
||||
|
||||
@_emitEvent 'exit', items
|
||||
|
||||
|
||||
# Runs hooks for the moment `name` with the given context and
|
||||
# returns false if the default action has been prevented.
|
||||
_runHook: (name, ctx) ->
|
||||
hooks = @_hooks[name]
|
||||
|
||||
# If no hooks, nothing to do.
|
||||
return true unless hooks? and (n = hooks.length) isnt 0
|
||||
|
||||
# Flags controlling the run.
|
||||
notStopped = true
|
||||
actionNotPrevented = true
|
||||
|
||||
# Creates the event object.
|
||||
event = {
|
||||
stopPropagation: -> notStopped = false
|
||||
|
||||
# TODO: Should `preventDefault()` imply `stopPropagation()`?
|
||||
preventDefault: -> actionNotPrevented = false
|
||||
}
|
||||
|
||||
i = 0
|
||||
while notStopped and i < n
|
||||
hooks[i++].call ctx, event
|
||||
|
||||
# TODO: Is exception handling necessary to have the wanted
|
||||
# behavior?
|
||||
|
||||
return actionNotPrevented
|
||||
|
||||
_updateItems: (items, areNew) ->
|
||||
return if $isEmpty items
|
||||
|
||||
# An update is similar to an exit followed by an enter.
|
||||
@_removeItems items unless areNew
|
||||
|
||||
$forEach items, (item) =>
|
||||
return unless @_runHook 'beforeUpdate', item
|
||||
|
||||
{rule: ruleName} = item
|
||||
|
||||
# Computes its value.
|
||||
do =>
|
||||
# Item is not passed directly to function to avoid direct
|
||||
# modification.
|
||||
#
|
||||
# This is not a true security but better than nothing.
|
||||
proxy = Object.create item
|
||||
|
||||
updateValue = (parent, prop, def) ->
|
||||
if not $isObject def
|
||||
parent[prop] = def
|
||||
else if $isFunction def
|
||||
parent[prop] = def.call proxy, parent[prop]
|
||||
else if $isArray def
|
||||
i = 0
|
||||
n = def.length
|
||||
|
||||
current = parent[prop] ?= new Array n
|
||||
while i < n
|
||||
updateValue current, i, def[i]
|
||||
++i
|
||||
else
|
||||
# It's a plain object.
|
||||
current = parent[prop] ?= {}
|
||||
for i of def
|
||||
updateValue current, i, def[i]
|
||||
|
||||
updateValue item, 'data', @_rules[ruleName].data
|
||||
updateValue item, 'val', @_rules[ruleName].val
|
||||
|
||||
unless @_runHook 'beforeSave', item
|
||||
# FIXME: should not be removed, only not saved.
|
||||
delete @_byKey[item.key]
|
||||
|
||||
return
|
||||
|
||||
# Really inserts the items and trigger events.
|
||||
$forEach items, (item) =>
|
||||
@_byKey[item.key] = item
|
||||
return
|
||||
@_emitEvent 'enter', items
|
||||
|
||||
#=====================================================================
|
||||
|
||||
module.exports = {$MappedCollection}
|
@ -1,121 +0,0 @@
|
||||
{expect: $expect} = require 'chai'
|
||||
|
||||
$sinon = require 'sinon'
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
{$MappedCollection} = require './MappedCollection'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
describe '$MappedCollection', ->
|
||||
|
||||
# Shared variables.
|
||||
collection = null
|
||||
|
||||
beforeEach ->
|
||||
collection = new $MappedCollection()
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
describe '#dispatch()', ->
|
||||
|
||||
# Test data.
|
||||
beforeEach ->
|
||||
collection.rule test: {}
|
||||
|
||||
#------------------------------
|
||||
|
||||
it 'should have genkey and genval', ->
|
||||
collection.dispatch = ->
|
||||
$expect(@genkey).to.equal 'a key'
|
||||
$expect(@genval).to.equal 'a value'
|
||||
|
||||
'test'
|
||||
|
||||
collection.set {
|
||||
'a key': 'a value'
|
||||
}
|
||||
|
||||
#------------------------------
|
||||
|
||||
it 'should be used to dispatch an item', ->
|
||||
collection.dispatch = -> 'test'
|
||||
|
||||
collection.set [
|
||||
'any value'
|
||||
]
|
||||
|
||||
$expect(collection.getRaw('0').rule).to.equal 'test'
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
describe 'item hooks', ->
|
||||
|
||||
# Test data.
|
||||
beforeEach ->
|
||||
collection.rule test: {}
|
||||
|
||||
#------------------------------
|
||||
|
||||
it 'should be called in the correct order', ->
|
||||
|
||||
beforeDispatch = $sinon.spy()
|
||||
collection.hook {beforeDispatch}
|
||||
|
||||
dispatcher = $sinon.spy ->
|
||||
$expect(beforeDispatch.called).to.true
|
||||
|
||||
# It still is a dispatcher.
|
||||
'test'
|
||||
collection.dispatch = dispatcher
|
||||
|
||||
beforeUpdate = $sinon.spy ->
|
||||
$expect(dispatcher.called).to.true
|
||||
collection.hook {beforeUpdate}
|
||||
|
||||
beforeSave = $sinon.spy ->
|
||||
$expect(beforeUpdate.called).to.true
|
||||
collection.hook {beforeSave}
|
||||
|
||||
collection.set [
|
||||
'any value'
|
||||
]
|
||||
|
||||
$expect(beforeSave.called).to.be.true
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
describe 'adding new items', ->
|
||||
|
||||
beforeEach ->
|
||||
collection.rule test: {}
|
||||
collection.dispatch = -> 'test'
|
||||
|
||||
#------------------------------
|
||||
|
||||
it 'should trigger three `enter` events', ->
|
||||
keySpy = $sinon.spy()
|
||||
ruleSpy = $sinon.spy()
|
||||
anySpy = $sinon.spy()
|
||||
|
||||
collection.on 'key=a key', keySpy
|
||||
collection.on 'rule=test', ruleSpy
|
||||
collection.on 'any', anySpy
|
||||
|
||||
collection.set {
|
||||
'a key': 'a value'
|
||||
}
|
||||
|
||||
item = collection.getRaw 'a key'
|
||||
|
||||
# TODO: items can be an array or a object (it is not defined).
|
||||
$expect(keySpy.args).to.deep.equal [
|
||||
['enter', item]
|
||||
]
|
||||
$expect(ruleSpy.args).to.deep.equal [
|
||||
['enter', [item]]
|
||||
]
|
||||
$expect(anySpy.args).to.deep.equal [
|
||||
['enter', {'a key': item}]
|
||||
]
|
@ -1,10 +1,9 @@
|
||||
import assign from 'lodash.assign'
|
||||
import {JsonRpcError} from 'json-rpc/errors'
|
||||
import {JsonRpcError} from '@julien-f/json-rpc/errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Export standard JSON-RPC errors.
|
||||
export * from 'json-rpc/errors'
|
||||
export * from '@julien-f/json-rpc/errors'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@ -17,8 +16,8 @@ export class NotImplemented extends JsonRpcError {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class NoSuchObject extends JsonRpcError {
|
||||
constructor () {
|
||||
super('no such object', 1)
|
||||
constructor (id, type) {
|
||||
super('no such object', 1, {id, type})
|
||||
}
|
||||
}
|
||||
|
||||
|
161
src/api.js
161
src/api.js
@ -49,7 +49,7 @@ function checkPermission (method) {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function checkParams (method, params) {
|
||||
var schema = method.params
|
||||
const schema = method.params
|
||||
if (!schema) {
|
||||
return
|
||||
}
|
||||
@ -71,12 +71,14 @@ let checkAuthorization
|
||||
|
||||
function authorized () {}
|
||||
// function forbiddden () {
|
||||
// throw new Unauthorized()
|
||||
// // We don't care about an error object.
|
||||
// /* eslint no-throw-literal: 0 */
|
||||
// throw null
|
||||
// }
|
||||
function checkMemberAuthorization (member) {
|
||||
return function (userId, object) {
|
||||
return function (userId, object, permission) {
|
||||
const memberObject = this.getObject(object[member])
|
||||
return checkAuthorization.call(this, userId, memberObject)
|
||||
return checkAuthorization.call(this, userId, memberObject, permission)
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,54 +95,53 @@ const checkAuthorizationByTypes = {
|
||||
|
||||
// Access to a VDI is granted if the user has access to the
|
||||
// containing SR or to a linked VM.
|
||||
VDI (userId, vdi) {
|
||||
VDI (userId, vdi, permission) {
|
||||
// Check authorization for each of the connected VMs.
|
||||
const promises = map(this.getObjects(vdi.$VBDs, 'VBD'), vbd => {
|
||||
const vm = this.getObject(vbd.VM, 'VM')
|
||||
return checkAuthorization.call(this, userId, vm)
|
||||
return checkAuthorization.call(this, userId, vm, permission)
|
||||
})
|
||||
|
||||
// Check authorization for the containing SR.
|
||||
const sr = this.getObject(vdi.$SR, 'SR')
|
||||
promises.push(checkAuthorization.call(this, userId, sr))
|
||||
promises.push(checkAuthorization.call(this, userId, sr, permission))
|
||||
|
||||
// We need at least one success
|
||||
return Bluebird.any(promises).catch(function (aggregateError) {
|
||||
throw aggregateError[0]
|
||||
})
|
||||
return Bluebird.any(promises)
|
||||
},
|
||||
|
||||
VIF (userId, vif) {
|
||||
VIF (userId, vif, permission) {
|
||||
const network = this.getObject(vif.$network)
|
||||
const vm = this.getObject(vif.$VM)
|
||||
|
||||
return Bluebird.any([
|
||||
checkAuthorization.call(this, userId, network),
|
||||
checkAuthorization.call(this, userId, vm)
|
||||
checkAuthorization.call(this, userId, network, permission),
|
||||
checkAuthorization.call(this, userId, vm, permission)
|
||||
])
|
||||
},
|
||||
|
||||
'VM-snapshot': checkMemberAuthorization('$snapshot_of')
|
||||
}
|
||||
|
||||
function defaultCheckAuthorization (userId, object) {
|
||||
return this.acls.exists({
|
||||
subject: userId,
|
||||
object: object.id
|
||||
}).then(success => {
|
||||
if (!success) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
})
|
||||
function throwIfFail (success) {
|
||||
if (!success) {
|
||||
// We don't care about an error object.
|
||||
/* eslint no-throw-literal: 0 */
|
||||
throw null
|
||||
}
|
||||
}
|
||||
|
||||
checkAuthorization = Bluebird.method(function (userId, object) {
|
||||
function defaultCheckAuthorization (userId, object, permission) {
|
||||
return this.hasPermission(userId, object.id, permission).then(throwIfFail)
|
||||
}
|
||||
|
||||
checkAuthorization = Bluebird.method(function (userId, object, permission) {
|
||||
const fn = checkAuthorizationByTypes[object.type] || defaultCheckAuthorization
|
||||
return fn.call(this, userId, object)
|
||||
return fn.call(this, userId, object, permission)
|
||||
})
|
||||
|
||||
function resolveParams (method, params) {
|
||||
var resolve = method.resolve
|
||||
const resolve = method.resolve
|
||||
if (!resolve) {
|
||||
return params
|
||||
}
|
||||
@ -154,30 +155,28 @@ function resolveParams (method, params) {
|
||||
const isAdmin = this.user.hasPermission('admin')
|
||||
|
||||
const promises = []
|
||||
try {
|
||||
forEach(resolve, ([param, types], key) => {
|
||||
const id = params[param]
|
||||
if (id === undefined) {
|
||||
return
|
||||
}
|
||||
forEach(resolve, ([param, types, permission = 'administrate'], key) => {
|
||||
const id = params[param]
|
||||
if (id === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const object = this.getObject(params[param], types)
|
||||
const object = this.getObject(params[param], types)
|
||||
|
||||
// This parameter has been handled, remove it.
|
||||
delete params[param]
|
||||
// This parameter has been handled, remove it.
|
||||
delete params[param]
|
||||
|
||||
// Register this new value.
|
||||
params[key] = object
|
||||
// Register this new value.
|
||||
params[key] = object
|
||||
|
||||
if (!isAdmin) {
|
||||
promises.push(checkAuthorization.call(this, userId, object))
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
if (!isAdmin) {
|
||||
promises.push(checkAuthorization.call(this, userId, object, permission))
|
||||
}
|
||||
})
|
||||
|
||||
return Bluebird.all(promises).return(params)
|
||||
return Bluebird.all(promises).catch(() => {
|
||||
throw new Unauthorized()
|
||||
}).return(params)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -271,54 +270,46 @@ export default class Api {
|
||||
}, this)
|
||||
}
|
||||
|
||||
call (session, name, params) {
|
||||
async call (session, name, params) {
|
||||
debug('%s(...)', name)
|
||||
|
||||
let method
|
||||
let context
|
||||
const method = this.getMethod(name)
|
||||
if (!method) {
|
||||
throw new MethodNotFound(name)
|
||||
}
|
||||
|
||||
return Bluebird.try(() => {
|
||||
method = this.getMethod(name)
|
||||
if (!method) {
|
||||
throw new MethodNotFound(name)
|
||||
const context = Object.create(this.context)
|
||||
context.api = this // Used by system.*().
|
||||
context.session = session
|
||||
|
||||
// FIXME: too coupled with XO.
|
||||
// Fetch and inject the current user.
|
||||
const userId = session.get('user_id', undefined)
|
||||
if (userId) {
|
||||
context.user = await context._getUser(userId)
|
||||
}
|
||||
|
||||
await checkPermission.call(context, method)
|
||||
checkParams(method, params)
|
||||
|
||||
await resolveParams.call(context, method, params)
|
||||
try {
|
||||
let result = await method.call(context, params)
|
||||
|
||||
// If nothing was returned, consider this operation a success
|
||||
// and return true.
|
||||
if (result === undefined) {
|
||||
result = true
|
||||
}
|
||||
|
||||
context = Object.create(this.context)
|
||||
context.api = this // Used by system.*().
|
||||
context.session = session
|
||||
debug('%s(...) → %s', name, typeof result)
|
||||
|
||||
// FIXME: too coupled with XO.
|
||||
// Fetch and inject the current user.
|
||||
const userId = session.get('user_id', undefined)
|
||||
return userId === undefined ? null : context.users.first(userId)
|
||||
}).then(function (user) {
|
||||
context.user = user
|
||||
return result
|
||||
} catch (error) {
|
||||
debug('Error: %s(...) → %s', name, error)
|
||||
|
||||
return checkPermission.call(context, method)
|
||||
}).then(() => {
|
||||
checkParams(method, params)
|
||||
|
||||
return resolveParams.call(context, method, params)
|
||||
}).then(params => {
|
||||
return method.call(context, params)
|
||||
}).then(
|
||||
result => {
|
||||
// If nothing was returned, consider this operation a success
|
||||
// and return true.
|
||||
if (result === undefined) {
|
||||
result = true
|
||||
}
|
||||
|
||||
debug('%s(...) → %s', name, typeof result)
|
||||
|
||||
return result
|
||||
},
|
||||
error => {
|
||||
debug('Error: %s(...) → %s', name, error)
|
||||
|
||||
throw error
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
getMethod (name) {
|
||||
|
@ -1,38 +1,16 @@
|
||||
import {coroutine} from 'bluebird'
|
||||
import {ModelAlreadyExists} from '../collection'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const get = coroutine(function * ({subject, object}) {
|
||||
const sieve = {}
|
||||
try {
|
||||
if (subject !== undefined) {
|
||||
sieve.subject = (yield this.users.first(subject)).get('id')
|
||||
}
|
||||
if (object !== undefined) {
|
||||
sieve.object = this.getObject(object).id
|
||||
}
|
||||
} catch (error) {
|
||||
this.throw('NO_SUCH_OBJECT')
|
||||
}
|
||||
|
||||
return this.acls.get(sieve)
|
||||
})
|
||||
export async function get () {
|
||||
return await this.getAllAcls()
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
|
||||
get.params = {
|
||||
subject: { type: 'string', optional: true },
|
||||
object: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
get.description = 'get existing ACLs'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getCurrent = coroutine(function * () {
|
||||
return this.acls.get({ subject: this.session.get('user_id') })
|
||||
})
|
||||
export async function getCurrent () {
|
||||
return await this.getAclsForUser(this.session.get('user_id'))
|
||||
}
|
||||
|
||||
getCurrent.permission = ''
|
||||
|
||||
@ -40,43 +18,32 @@ getCurrent.description = 'get existing ACLs concerning current user'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const add = coroutine(function * ({subject, object}) {
|
||||
try {
|
||||
subject = (yield this.users.first(subject)).get('id')
|
||||
object = this.getObject(object).id
|
||||
} catch (error) {
|
||||
this.throw('NO_SUCH_OBJECT')
|
||||
}
|
||||
|
||||
try {
|
||||
yield this.acls.create(subject, object)
|
||||
} catch (error) {
|
||||
if (!(error instanceof ModelAlreadyExists)) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
})
|
||||
export async function add ({subject, object, action}) {
|
||||
await this.addAcl(subject, object, action)
|
||||
}
|
||||
|
||||
add.permission = 'admin'
|
||||
|
||||
add.params = {
|
||||
subject: { type: 'string' },
|
||||
object: { type: 'string' }
|
||||
object: { type: 'string' },
|
||||
action: { type: 'string' }
|
||||
}
|
||||
|
||||
add.description = 'add a new ACL entry'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const remove = coroutine(function * ({subject, object}) {
|
||||
yield this.acls.delete(subject, object)
|
||||
})
|
||||
export async function remove ({subject, object, action}) {
|
||||
await this.removeAcl(subject, object, action)
|
||||
}
|
||||
|
||||
remove.permission = 'admin'
|
||||
|
||||
remove.params = {
|
||||
subject: { type: 'string' },
|
||||
object: { type: 'string' }
|
||||
object: { type: 'string' },
|
||||
action: { type: 'string' }
|
||||
}
|
||||
|
||||
remove.description = 'remove an existing ACL entry'
|
||||
|
@ -28,5 +28,5 @@ create.params = {
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
sr: ['sr', 'SR']
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
|
66
src/api/docker.js
Normal file
66
src/api/docker.js
Normal file
@ -0,0 +1,66 @@
|
||||
export async function register ({vm}) {
|
||||
await this.getXAPI(vm).registerDockerContainer(vm.id)
|
||||
}
|
||||
register.permission = 'admin'
|
||||
|
||||
register.description = 'Register the VM for Docker management'
|
||||
|
||||
register.params = {
|
||||
vm: { type: 'string' }
|
||||
}
|
||||
|
||||
register.resolve = {
|
||||
vm: ['vm', 'VM', 'administrate']
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function deregister ({vm}) {
|
||||
await this.getXAPI(vm).unregisterDockerContainer(vm.id)
|
||||
}
|
||||
deregister.permission = 'admin'
|
||||
|
||||
deregister.description = 'Deregister the VM for Docker management'
|
||||
|
||||
deregister.params = {
|
||||
vm: { type: 'string' }
|
||||
}
|
||||
|
||||
deregister.resolve = {
|
||||
vm: ['vm', 'VM', 'administrate']
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function start ({vm, container}) {
|
||||
await this.getXAPI(vm).startDockerContainer(vm.id, container)
|
||||
}
|
||||
|
||||
export async function stop ({vm, container}) {
|
||||
await this.getXAPI(vm).stopDockerContainer(vm.id, container)
|
||||
}
|
||||
|
||||
export async function restart ({vm, container}) {
|
||||
await this.getXAPI(vm).restartDockerContainer(vm.id, container)
|
||||
}
|
||||
|
||||
export async function pause ({vm, container}) {
|
||||
await this.getXAPI(vm).pauseDockerContainer(vm.id, container)
|
||||
}
|
||||
|
||||
export async function unpause ({vm, container}) {
|
||||
await this.getXAPI(vm).unpauseDockerContainer(vm.id, container)
|
||||
}
|
||||
|
||||
for (let fn of [start, stop, restart, pause, unpause]) {
|
||||
fn.permission = 'admin'
|
||||
|
||||
fn.params = {
|
||||
vm: { type: 'string' },
|
||||
container: { type: 'string' }
|
||||
}
|
||||
|
||||
fn.resolve = {
|
||||
vm: ['vm', 'VM', 'operate']
|
||||
}
|
||||
}
|
94
src/api/group.js
Normal file
94
src/api/group.js
Normal file
@ -0,0 +1,94 @@
|
||||
export async function create ({name}) {
|
||||
return (await this.createGroup({name})).id
|
||||
}
|
||||
|
||||
create.description = 'creates a new group'
|
||||
create.permission = 'admin'
|
||||
create.params = {
|
||||
name: {type: 'string'}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Deletes an existing group.
|
||||
async function delete_ ({id}) {
|
||||
await this.deleteGroup(id)
|
||||
}
|
||||
|
||||
// delete is not a valid identifier.
|
||||
export {delete_ as delete}
|
||||
|
||||
delete_.description = 'deletes an existing group'
|
||||
delete_.permission = 'admin'
|
||||
delete_.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getAll () {
|
||||
return await this._groups.get()
|
||||
}
|
||||
|
||||
delete_.description = 'returns all the existing group'
|
||||
delete_.permission = 'admin'
|
||||
delete_.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// sets group.users with an array of user ids
|
||||
export async function setUsers ({id, userIds}) {
|
||||
await this.setGroupUsers(id, userIds)
|
||||
}
|
||||
|
||||
setUsers.description = 'sets the users belonging to a group'
|
||||
setUsers.permission = 'admin'
|
||||
setUsers.params = {
|
||||
id: {type: 'string'},
|
||||
userIds: {}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// adds the user id to group.users
|
||||
export async function addUser ({id, userId}) {
|
||||
await this.addUserToGroup(userId, id)
|
||||
}
|
||||
|
||||
addUser.description = 'adds a user to a group'
|
||||
addUser.permission = 'admin'
|
||||
addUser.params = {
|
||||
id: {type: 'string'},
|
||||
userId: {type: 'string'}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// remove the user id from group.users
|
||||
export async function removeUser ({id, userId}) {
|
||||
await this.removeUserFromGroup(userId, id)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
removeUser.description = 'removes a user from a group'
|
||||
removeUser.permission = 'admin'
|
||||
removeUser.params = {
|
||||
id: {type: 'string'},
|
||||
userId: {type: 'string'}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, name}) {
|
||||
await this.updateGroup(id, {name})
|
||||
}
|
||||
|
||||
set.description = 'changes the properties of an existing group'
|
||||
set.permission = 'admin'
|
||||
set.params = {
|
||||
id: { type: 'string' },
|
||||
name: { type: 'string', optional: true }
|
||||
}
|
@ -1,4 +1,12 @@
|
||||
{$coroutine, $wait} = require '../fibers-utils'
|
||||
$debug = (require 'debug') 'xo:api:vm'
|
||||
$request = require('bluebird').promisify(require('request'))
|
||||
{parseXml} = require '../utils'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$find = require 'lodash.find'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
startsWith = require 'lodash.startswith'
|
||||
endsWith = require 'lodash.endswith'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
@ -30,7 +38,7 @@ set.params =
|
||||
optional: true
|
||||
|
||||
set.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@ -50,7 +58,7 @@ restart.params = {
|
||||
}
|
||||
|
||||
restart.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'operate'],
|
||||
}
|
||||
|
||||
exports.restart = restart
|
||||
@ -69,7 +77,7 @@ restartAgent.params = {
|
||||
}
|
||||
|
||||
restartAgent.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'operate'],
|
||||
}
|
||||
|
||||
# TODO camel case
|
||||
@ -89,7 +97,7 @@ start.params = {
|
||||
}
|
||||
|
||||
start.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'operate'],
|
||||
}
|
||||
|
||||
exports.start = start
|
||||
@ -109,7 +117,7 @@ stop.params = {
|
||||
}
|
||||
|
||||
stop.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'operate'],
|
||||
}
|
||||
|
||||
exports.stop = stop
|
||||
@ -128,7 +136,7 @@ detach.params = {
|
||||
}
|
||||
|
||||
detach.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
exports.detach = detach
|
||||
@ -147,7 +155,7 @@ enable.params = {
|
||||
}
|
||||
|
||||
enable.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
exports.enable = enable
|
||||
@ -166,7 +174,7 @@ disable.params = {
|
||||
}
|
||||
|
||||
disable.resolve = {
|
||||
host: ['id', 'host'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
exports.disable = disable
|
||||
@ -202,7 +210,134 @@ createNetwork.params = {
|
||||
}
|
||||
|
||||
createNetwork.resolve = {
|
||||
host: ['host', 'host'],
|
||||
host: ['host', 'host', 'administrate'],
|
||||
}
|
||||
createNetwork.permission = 'admin'
|
||||
exports.createNetwork = createNetwork
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
# Returns an array of missing new patches in the host
|
||||
# Returns an empty array if up-to-date
|
||||
# Throws an error if the host is not running the latest XS version
|
||||
|
||||
listMissingPatches = ({host}) ->
|
||||
return @getXAPI(host).listMissingPoolPatchesOnHost(host.id)
|
||||
|
||||
listMissingPatches.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
listMissingPatches.resolve = {
|
||||
host: ['host', 'host', 'view'],
|
||||
}
|
||||
|
||||
exports.listMissingPatches = listMissingPatches
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
installPatch = ({host, patch: patchUuid}) ->
|
||||
return @getXAPI(host).installPoolPatchOnHost(patchUuid, host.id)
|
||||
|
||||
installPatch.params = {
|
||||
host: { type: 'string' }
|
||||
patch: { type: 'string' }
|
||||
}
|
||||
|
||||
installPatch.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
exports.installPatch = installPatch
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
|
||||
stats = $coroutine ({host}) ->
|
||||
|
||||
xapi = @getXAPI host
|
||||
|
||||
[response, body] = $wait $request {
|
||||
method: 'get'
|
||||
rejectUnauthorized: false
|
||||
url: 'https://'+host.address+'/host_rrd?session_id='+xapi.sessionId
|
||||
}
|
||||
|
||||
if response.statusCode isnt 200
|
||||
throw new Error('Cannot fetch the RRDs')
|
||||
|
||||
json = parseXml(body)
|
||||
|
||||
# Find index of needed objects for getting their values after
|
||||
cpusIndexes = []
|
||||
pifsIndexes = []
|
||||
memoryFreeIndex = []
|
||||
memoryIndex = []
|
||||
loadIndex = []
|
||||
index = 0
|
||||
|
||||
$forEach(json.rrd.ds, (value, i) ->
|
||||
if /^cpu[0-9]+$/.test(value.name)
|
||||
cpusIndexes.push(i)
|
||||
else if startsWith(value.name, 'pif_eth') && endsWith(value.name, '_tx')
|
||||
pifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'pif_eth') && endsWith(value.name, '_rx')
|
||||
pifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'loadavg')
|
||||
loadIndex.push(i)
|
||||
else if startsWith(value.name, 'memory_free_kib')
|
||||
memoryFreeIndex.push(i)
|
||||
else if startsWith(value.name, 'memory_total_kib')
|
||||
memoryIndex.push(i)
|
||||
|
||||
return
|
||||
)
|
||||
|
||||
memoryFree = []
|
||||
memoryUsed = []
|
||||
memory = []
|
||||
load = []
|
||||
cpus = []
|
||||
pifs = []
|
||||
date = [] #TODO
|
||||
baseDate = json.rrd.lastupdate
|
||||
dateStep = json.rrd.step
|
||||
numStep = json.rrd.rra[0].database.row.length - 1
|
||||
|
||||
$forEach json.rrd.rra[0].database.row, (n, key) ->
|
||||
memoryFree.push(Math.round(parseInt(n.v[memoryFreeIndex])))
|
||||
memoryUsed.push(Math.round(parseInt(n.v[memoryIndex])-(n.v[memoryFreeIndex])))
|
||||
memory.push(parseInt(n.v[memoryIndex]))
|
||||
load.push(n.v[loadIndex])
|
||||
date.push(baseDate - (dateStep * (numStep - key)))
|
||||
# build the multi dimensional arrays
|
||||
$forEach cpusIndexes, (value, key) ->
|
||||
cpus[key] ?= []
|
||||
cpus[key].push(n.v[value]*100)
|
||||
return
|
||||
$forEach pifsIndexes, (value, key) ->
|
||||
pifs[key] ?= []
|
||||
pifs[key].push(if n.v[value] == 'NaN' then null else n.v[value]) # * (if key % 2 then -1 else 1))
|
||||
return
|
||||
return
|
||||
|
||||
|
||||
# the final object
|
||||
return {
|
||||
memoryFree: memoryFree
|
||||
memoryUsed: memoryUsed
|
||||
memory: memory
|
||||
date: date
|
||||
cpus: cpus
|
||||
pifs: pifs
|
||||
load: load
|
||||
}
|
||||
|
||||
stats.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
stats.resolve = {
|
||||
host: ['host', 'host', 'view']
|
||||
}
|
||||
|
||||
exports.stats = stats;
|
||||
|
@ -14,7 +14,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
message: ['id', 'message']
|
||||
message: ['id', 'message', 'administrate']
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
|
@ -16,7 +16,7 @@ exports.delete.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.delete.resolve = {
|
||||
PBD: ['id', 'PBD']
|
||||
PBD: ['id', 'PBD', 'administrate']
|
||||
}
|
||||
|
||||
#=====================================================================
|
||||
@ -33,7 +33,7 @@ exports.disconnect.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.disconnect.resolve = {
|
||||
PBD: ['id', 'PBD']
|
||||
PBD: ['id', 'PBD', 'administrate']
|
||||
}
|
||||
|
||||
#=====================================================================
|
||||
@ -50,5 +50,5 @@ exports.connect.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.connect.resolve = {
|
||||
PBD: ['id', 'PBD']
|
||||
PBD: ['id', 'PBD', 'administrate']
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ exports.delete.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.delete.resolve = {
|
||||
PIF: ['id', 'PIF']
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
#=====================================================================
|
||||
@ -31,7 +31,7 @@ exports.disconnect.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.disconnect.resolve = {
|
||||
PIF: ['id', 'PIF']
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
#=====================================================================
|
||||
# Connect
|
||||
@ -47,5 +47,5 @@ exports.connect.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
exports.connect.resolve = {
|
||||
PIF: ['id', 'PIF']
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
@ -1,84 +0,0 @@
|
||||
$debug = (require 'debug') 'xo:api:vm'
|
||||
{$coroutine, $wait} = require '../fibers-utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
set = $coroutine (params) ->
|
||||
{pool} = params
|
||||
xapi = @getXAPI pool
|
||||
|
||||
for param, field of {
|
||||
'name_label'
|
||||
'name_description'
|
||||
}
|
||||
continue unless param of params
|
||||
|
||||
$wait xapi.call "pool.set_#{field}", pool.ref, params[param]
|
||||
|
||||
return true
|
||||
|
||||
set.params = {
|
||||
id: {
|
||||
type: 'string',
|
||||
},
|
||||
name_label: {
|
||||
type: 'string',
|
||||
optional: true,
|
||||
},
|
||||
name_description: {
|
||||
type: 'string',
|
||||
optional: true,
|
||||
},
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
pool: ['id', 'pool'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME
|
||||
patch = $coroutine ({pool}) ->
|
||||
xapi = @getXAPI pool
|
||||
host = @getObject pool.master, 'host'
|
||||
|
||||
taskRef = $wait xapi.call 'task.create', 'Patch upload from XO', ''
|
||||
@watchTask taskRef
|
||||
.then $coroutine (patchRef) ->
|
||||
$debug 'Patch upload succeeded'
|
||||
xapi.call 'pool_patch.pool_apply', patchRef
|
||||
return
|
||||
.catch (error) ->
|
||||
$debug 'Patch upload failed: %j', error
|
||||
return
|
||||
.finally $coroutine ->
|
||||
xapi.call 'task.destroy', taskRef
|
||||
return
|
||||
|
||||
url = $wait @registerProxyRequest {
|
||||
# Receive a POST but send a PUT.
|
||||
method: 'put'
|
||||
proxyMethod: 'post'
|
||||
hostname: host.address
|
||||
pathname: '/pool_patch_upload'
|
||||
query: {
|
||||
session_id: xapi.sessionId
|
||||
task_id: taskRef
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
$sendTo: url
|
||||
}
|
||||
|
||||
patch.params = {
|
||||
pool: { type: 'string' },
|
||||
}
|
||||
|
||||
patch.resolve = {
|
||||
pool: ['pool', 'pool'],
|
||||
}
|
||||
|
||||
exports.patch = patch
|
77
src/api/pool.js
Normal file
77
src/api/pool.js
Normal file
@ -0,0 +1,77 @@
|
||||
// ===================================================================
|
||||
|
||||
export async function set (params) {
|
||||
const {pool} = params
|
||||
delete params.pool
|
||||
|
||||
await this.getXAPI(pool).setPoolProperties(params)
|
||||
}
|
||||
|
||||
set.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
name_label: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
name_description: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
pool: ['id', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function installPatch ({pool, patch: patchUuid}) {
|
||||
await this.getXAPI(pool).installPoolPatchOnAllHosts(patchUuid)
|
||||
}
|
||||
|
||||
installPatch.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
},
|
||||
patch: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
installPatch.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handlePatchUpload (req, res, {pool}) {
|
||||
const {headers: {['content-length']: contentLength}} = req
|
||||
if (!contentLength) {
|
||||
res.writeHead(411)
|
||||
res.end('Content length is mandatory')
|
||||
return
|
||||
}
|
||||
|
||||
await this.getXAPI(pool).uploadPoolPatch(req, contentLength)
|
||||
}
|
||||
|
||||
export async function uploadPatch ({pool}) {
|
||||
return {
|
||||
$sendTo: await this.registerHttpRequest(handlePatchUpload, {pool})
|
||||
}
|
||||
}
|
||||
|
||||
uploadPatch.params = {
|
||||
pool: { type: 'string' }
|
||||
}
|
||||
|
||||
uploadPatch.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
// Compatibility
|
||||
//
|
||||
// TODO: remove when no longer used in xo-web
|
||||
export {uploadPatch as patch}
|
3
src/api/role.js
Normal file
3
src/api/role.js
Normal file
@ -0,0 +1,3 @@
|
||||
export async function getAll () {
|
||||
return await this.getRoles()
|
||||
}
|
@ -1,32 +1,24 @@
|
||||
import {coroutine} from 'bluebird'
|
||||
import {JsonRpcError, NoSuchObject} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// FIXME: We are storing passwords which is bad!
|
||||
// Could we use tokens instead?
|
||||
|
||||
export const add = coroutine(function * ({
|
||||
export async function add ({
|
||||
host,
|
||||
username,
|
||||
password,
|
||||
autoConnect = true
|
||||
}) {
|
||||
const server = yield this.servers.add({
|
||||
host: host,
|
||||
username: username,
|
||||
password: password
|
||||
})
|
||||
const server = await this.registerXenServer({host, username, password})
|
||||
|
||||
if (autoConnect) {
|
||||
// Connect asynchronously, ignore any error.
|
||||
this.connectServer(server).catch(() => {})
|
||||
this.connectXenServer(server.id).catch(() => {})
|
||||
}
|
||||
|
||||
return server.get('id')
|
||||
})
|
||||
return server.id
|
||||
}
|
||||
|
||||
add.description = 'Add a new Xen server to XO'
|
||||
add.description = 'register a new Xen server'
|
||||
|
||||
add.permission = 'admin'
|
||||
|
||||
@ -48,15 +40,11 @@ add.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const remove = coroutine(function * ({id}) {
|
||||
try {
|
||||
yield this.disconnectServer(id)
|
||||
} catch (error) {}
|
||||
export async function remove ({id}) {
|
||||
this.unregisterXenServer(id)
|
||||
}
|
||||
|
||||
if (!(yield this.servers.remove(id))) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
})
|
||||
remove.description = 'unregister a Xen server'
|
||||
|
||||
remove.permission = 'admin'
|
||||
|
||||
@ -68,8 +56,10 @@ remove.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: remove this function when users are integrated to the main
|
||||
// collection.
|
||||
export const getAll = coroutine(function * () {
|
||||
const servers = yield this.servers.get()
|
||||
const servers = yield this._servers.get()
|
||||
|
||||
for (let i = 0, n = servers.length; i < n; ++i) {
|
||||
servers[i] = this.getServerPublicProperties(servers[i])
|
||||
@ -78,32 +68,17 @@ export const getAll = coroutine(function * () {
|
||||
return servers
|
||||
})
|
||||
|
||||
getAll.description = 'returns all the registered Xen server'
|
||||
|
||||
getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const set = coroutine(function * ({id, host, username, password}) {
|
||||
const server = yield this.servers.first(id)
|
||||
if (!server) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
if (host != null) {
|
||||
server.set({
|
||||
host: host
|
||||
})
|
||||
}
|
||||
if (username != null) {
|
||||
server.set({
|
||||
username: username
|
||||
})
|
||||
}
|
||||
if (password != null) {
|
||||
server.set({
|
||||
password: password
|
||||
})
|
||||
}
|
||||
yield this.servers.update(server)
|
||||
})
|
||||
export async function set ({id, host, username, password}) {
|
||||
await this.updateXenServer(id, {host, username, password})
|
||||
}
|
||||
|
||||
set.description = 'changes the propeorties of a Xen server'
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
@ -127,24 +102,11 @@ set.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const connect = coroutine(function * ({id}) {
|
||||
const server = yield this.servers.first(id)
|
||||
if (!server) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
export async function connect ({id}) {
|
||||
await this.connectXenServer(id)
|
||||
}
|
||||
|
||||
try {
|
||||
yield this.connectServer(server)
|
||||
} catch (error) {
|
||||
if (error.code === 'SESSION_AUTHENTICATION_FAILED') {
|
||||
throw new JsonRpcError('authentication failed')
|
||||
}
|
||||
if (error.code === 'EHOSTUNREACH') {
|
||||
throw new JsonRpcError('host unreachable')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
})
|
||||
connect.description = 'connect a Xen server'
|
||||
|
||||
connect.permission = 'admin'
|
||||
|
||||
@ -156,14 +118,11 @@ connect.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const disconnect = coroutine(function * ({id}) {
|
||||
const server = yield this.servers.first(id)
|
||||
if (!server) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
export async function disconnect ({id}) {
|
||||
await this.disconnectXenServer(id)
|
||||
}
|
||||
|
||||
return this.disconnectServer(server)
|
||||
})
|
||||
disconnect.description = 'disconnect a Xen server'
|
||||
|
||||
disconnect.permission = 'admin'
|
||||
|
||||
|
@ -57,7 +57,7 @@ export const getUser = coroutine(function () {
|
||||
|
||||
return userId === undefined ?
|
||||
null :
|
||||
this.getUserPublicProperties(wait(this.users.first(userId)))
|
||||
this.getUserPublicProperties(wait(this.getUser(userId)))
|
||||
})
|
||||
|
||||
getUser.description = 'return the currently connected user'
|
||||
|
@ -4,21 +4,12 @@ import {ensureArray, parseXml} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const set = coroutine(function (params) {
|
||||
const {SR} = params
|
||||
const xapi = this.getXAPI()
|
||||
export async function set (params) {
|
||||
const {sr} = params
|
||||
delete params.sr
|
||||
|
||||
forEach(['name_label', 'name_description'], param => {
|
||||
const value = params[param]
|
||||
if (value === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
wait(xapi.call(`SR.set_${value}`, SR.ref, params[param]))
|
||||
})
|
||||
|
||||
return true
|
||||
})
|
||||
await this.getXAPI(sr).setSrProperties(sr.id, params)
|
||||
}
|
||||
|
||||
set.params = {
|
||||
id: { type: 'string' },
|
||||
@ -29,7 +20,7 @@ set.params = {
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
SR: ['id', 'SR']
|
||||
sr: ['id', 'SR', 'operate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -47,7 +38,7 @@ scan.params = {
|
||||
}
|
||||
|
||||
scan.resolve = {
|
||||
SR: ['id', 'SR']
|
||||
SR: ['id', 'SR', 'operate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -66,7 +57,7 @@ destroy.params = {
|
||||
}
|
||||
|
||||
destroy.resolve = {
|
||||
SR: ['id', 'SR']
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -84,7 +75,7 @@ forget.params = {
|
||||
}
|
||||
|
||||
forget.resolve = {
|
||||
SR: ['id', 'SR']
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -129,7 +120,7 @@ createIso.params = {
|
||||
}
|
||||
|
||||
createIso.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -184,7 +175,7 @@ createNfs.params = {
|
||||
}
|
||||
|
||||
createNfs.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -229,7 +220,7 @@ createLvm.params = {
|
||||
}
|
||||
|
||||
createLvm.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -259,11 +250,11 @@ export const probeNfs = coroutine(function ({
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
if (error[0] !== 'SR_BACKEND_FAILURE_101') {
|
||||
if (error.code !== 'SR_BACKEND_FAILURE_101') {
|
||||
throw error
|
||||
}
|
||||
|
||||
xml = parseXml(error[3])
|
||||
xml = parseXml(error.params[2])
|
||||
}
|
||||
|
||||
const nfsExports = []
|
||||
@ -283,7 +274,7 @@ probeNfs.params = {
|
||||
}
|
||||
|
||||
probeNfs.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -352,7 +343,7 @@ createIscsi.params = {
|
||||
}
|
||||
|
||||
createIscsi.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -396,14 +387,14 @@ export const probeIscsiIqns = coroutine(function ({
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
if (error[0] === 'SR_BACKEND_FAILURE_141') {
|
||||
if (error.code === 'SR_BACKEND_FAILURE_141') {
|
||||
return []
|
||||
}
|
||||
if (error[0] !== 'SR_BACKEND_FAILURE_96') {
|
||||
if (error.code !== 'SR_BACKEND_FAILURE_96') {
|
||||
throw error
|
||||
}
|
||||
|
||||
xml = parseXml(error[3])
|
||||
xml = parseXml(error.params[2])
|
||||
}
|
||||
|
||||
const targets = []
|
||||
@ -428,7 +419,7 @@ probeIscsiIqns.params = {
|
||||
chapPassword: { type: 'string', optional: true }
|
||||
}
|
||||
probeIscsiIqns.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -474,11 +465,11 @@ export const probeIscsiLuns = coroutine(function ({
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
if (error[0] !== 'SR_BACKEND_FAILURE_107') {
|
||||
if (error.code !== 'SR_BACKEND_FAILURE_107') {
|
||||
throw error
|
||||
}
|
||||
|
||||
xml = parseXml(error[3])
|
||||
xml = parseXml(error.params[2])
|
||||
}
|
||||
|
||||
const luns = []
|
||||
@ -505,7 +496,7 @@ probeIscsiLuns.params = {
|
||||
}
|
||||
|
||||
probeIscsiLuns.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -562,7 +553,7 @@ probeIscsiExists.params = {
|
||||
}
|
||||
|
||||
probeIscsiExists.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -600,7 +591,7 @@ probeNfsExists.params = {
|
||||
}
|
||||
|
||||
probeNfsExists.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -643,7 +634,7 @@ reattach.params = {
|
||||
}
|
||||
|
||||
reattach.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
@ -686,5 +677,5 @@ reattachIso.params = {
|
||||
}
|
||||
|
||||
reattachIso.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ cancel.params = {
|
||||
}
|
||||
|
||||
cancel.resolve = {
|
||||
task: ['id', 'task'],
|
||||
task: ['id', 'task', 'administrate'],
|
||||
}
|
||||
|
||||
exports.cancel = cancel
|
||||
@ -32,7 +32,7 @@ destroy.params = {
|
||||
}
|
||||
|
||||
destroy.resolve = {
|
||||
task: ['id', 'task'],
|
||||
task: ['id', 'task', 'administrate'],
|
||||
}
|
||||
|
||||
exports.destroy = destroy
|
||||
|
33
src/api/test.js
Normal file
33
src/api/test.js
Normal file
@ -0,0 +1,33 @@
|
||||
import {delay} from 'bluebird'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function hasPermission ({userId, objectId, permission}) {
|
||||
return this.hasPermission(userId, objectId, permission)
|
||||
}
|
||||
|
||||
hasPermission.permission = 'admin'
|
||||
|
||||
hasPermission.params = {
|
||||
userId: {
|
||||
type: 'string'
|
||||
},
|
||||
objectId: {
|
||||
type: 'string'
|
||||
},
|
||||
permission: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function wait ({duration, returnValue}) {
|
||||
return delay(returnValue, +duration)
|
||||
}
|
||||
|
||||
wait.params = {
|
||||
duration: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
@ -1,38 +0,0 @@
|
||||
{$coroutine, $wait} = require '../fibers-utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Creates a new token.
|
||||
#
|
||||
# TODO: Token permission.
|
||||
exports.create = $coroutine ->
|
||||
userId = @session.get 'user_id'
|
||||
|
||||
# The user MUST be signed in and not with a token
|
||||
@throw 'UNAUTHORIZED' if not userId? or @session.has 'token_id'
|
||||
|
||||
# Creates the token.
|
||||
token = $wait @tokens.generate userId
|
||||
|
||||
return token.get('id')
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# Deletes a token.
|
||||
delete_ = $coroutine ({token: tokenId}) ->
|
||||
# Gets the token.
|
||||
token = $wait @tokens.first tokenId
|
||||
@throw 'NO_SUCH_OBJECT' unless token?
|
||||
|
||||
# Deletes the token.
|
||||
$wait @tokens.remove tokenId
|
||||
|
||||
return true
|
||||
|
||||
delete_.permission = 'admin'
|
||||
|
||||
delete_.params = {
|
||||
token: { type: 'string' }
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
34
src/api/token.js
Normal file
34
src/api/token.js
Normal file
@ -0,0 +1,34 @@
|
||||
import {Unauthorized} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// TODO: Token permission.
|
||||
export async function create () {
|
||||
// The user MUST not be signed with a token
|
||||
if (this.session.has('token_id')) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
|
||||
const userId = this.session.get('user_id')
|
||||
return (await this.createAuthenticationToken({userId})).id
|
||||
}
|
||||
|
||||
create.description = 'create a new authentication token'
|
||||
|
||||
create.permission = '' // sign in
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function delete_ ({token: id}) {
|
||||
await this.deleteAuthenticationToken(id)
|
||||
}
|
||||
|
||||
export {delete_ as delete}
|
||||
|
||||
delete_.description = 'delete an existing authentication token'
|
||||
|
||||
delete_.permission = 'admin'
|
||||
|
||||
delete_.params = {
|
||||
token: { type: 'string' }
|
||||
}
|
@ -1,106 +0,0 @@
|
||||
{$coroutine, $wait} = require '../fibers-utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Creates a new user.
|
||||
exports.create = $coroutine ({email, password, permission}) ->
|
||||
# Creates the user.
|
||||
user = $wait @users.create email, password, permission
|
||||
|
||||
return user.get('id')
|
||||
exports.create.permission = 'admin'
|
||||
exports.create.params = {
|
||||
email: { type: 'string' }
|
||||
password: { type: 'string' }
|
||||
permission: { type: 'string', optional: true}
|
||||
}
|
||||
|
||||
# Deletes an existing user.
|
||||
#
|
||||
# FIXME: a user should not be able to delete itself.
|
||||
exports.delete = $coroutine ({id}) ->
|
||||
# The user cannot delete himself.
|
||||
@throw 'INVALID_PARAMS' if id is @session.get 'user_id'
|
||||
|
||||
# Throws an error if the user did not exist.
|
||||
@throw 'NO_SUCH_OBJECT' unless $wait @users.remove id
|
||||
|
||||
return true
|
||||
exports.delete.permission = 'admin'
|
||||
exports.delete.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
# Changes the password of the current user.
|
||||
exports.changePassword = $coroutine ({old, new: newP}) ->
|
||||
# Gets the current user (which MUST exist).
|
||||
user = $wait @users.first @session.get 'user_id'
|
||||
|
||||
# Checks its old password.
|
||||
@throw 'INVALID_CREDENTIAL' unless $wait user.checkPassword old
|
||||
|
||||
# Sets the new password.
|
||||
$wait user.setPassword newP
|
||||
|
||||
# Updates the user.
|
||||
$wait @users.update user
|
||||
|
||||
return true
|
||||
exports.changePassword.permission = '' # Signed in.
|
||||
exports.changePassword.params = {
|
||||
old: { type: 'string' }
|
||||
new: { type: 'string' }
|
||||
}
|
||||
|
||||
# Returns the user with a given identifier.
|
||||
exports.get = $coroutine ({id}) ->
|
||||
# Only an administrator can see another user.
|
||||
@checkPermission 'admin' unless @session.get 'user_id' is id
|
||||
|
||||
# Retrieves the user.
|
||||
user = $wait @users.first id
|
||||
|
||||
# Throws an error if it did not exist.
|
||||
@throw 'NO_SUCH_OBJECT' unless user
|
||||
|
||||
return @getUserPublicProperties user
|
||||
exports.get.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
# Returns all users.
|
||||
exports.getAll = $coroutine ->
|
||||
# Retrieves the users.
|
||||
users = $wait @users.get()
|
||||
|
||||
# Filters out private properties.
|
||||
for user, i in users
|
||||
users[i] = @getUserPublicProperties user
|
||||
|
||||
return users
|
||||
exports.getAll.permission = 'admin'
|
||||
|
||||
# Changes the properties of an existing user.
|
||||
exports.set = $coroutine ({id, email, password, permission}) ->
|
||||
# Retrieves the user.
|
||||
user = $wait @users.first id
|
||||
|
||||
# Throws an error if it did not exist.
|
||||
@throw 'NO_SUCH_OBJECT' unless user
|
||||
|
||||
# Updates the provided properties.
|
||||
user.set {email} if email?
|
||||
user.set {permission} if permission?
|
||||
$wait user.setPassword password if password?
|
||||
|
||||
# Updates the user.
|
||||
$wait @users.update user
|
||||
|
||||
return true
|
||||
exports.set.permission = 'admin'
|
||||
exports.set.params = {
|
||||
id: { type: 'string' }
|
||||
email: { type: 'string', optional: true }
|
||||
password: { type: 'string', optional: true }
|
||||
permission: { type: 'string', optional: true }
|
||||
}
|
74
src/api/user.js
Normal file
74
src/api/user.js
Normal file
@ -0,0 +1,74 @@
|
||||
import map from 'lodash.map'
|
||||
|
||||
import {InvalidParameters} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function create ({email, password, permission}) {
|
||||
return (await this.createUser({email, password, permission})).id
|
||||
}
|
||||
|
||||
create.description = 'creates a new user'
|
||||
|
||||
create.permission = 'admin'
|
||||
|
||||
create.params = {
|
||||
email: { type: 'string' },
|
||||
password: { type: 'string' },
|
||||
permission: { type: 'string', optional: true}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Deletes an existing user.
|
||||
async function delete_ ({id}) {
|
||||
if (id === this.session.get('user_id')) {
|
||||
throw new InvalidParameters('an user cannot delete itself')
|
||||
}
|
||||
|
||||
await this.deleteUser(id)
|
||||
}
|
||||
|
||||
// delete is not a valid identifier.
|
||||
export {delete_ as delete}
|
||||
|
||||
delete_.description = 'deletes an existing user'
|
||||
|
||||
delete_.permission = 'admin'
|
||||
|
||||
delete_.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: remove this function when users are integrated to the main
|
||||
// collection.
|
||||
export async function getAll () {
|
||||
// Retrieves the users.
|
||||
const users = await this._users.get()
|
||||
|
||||
// Filters out private properties.
|
||||
return map(users, this.getUserPublicProperties)
|
||||
}
|
||||
|
||||
getAll.description = 'returns all the existing users'
|
||||
|
||||
getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, email, password, permission}) {
|
||||
await this.updateUser(id, {email, password, permission})
|
||||
}
|
||||
|
||||
set.description = 'changes the properties of an existing user'
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
set.params = {
|
||||
id: { type: 'string' },
|
||||
email: { type: 'string', optional: true },
|
||||
password: { type: 'string', optional: true },
|
||||
permission: { type: 'string', optional: true }
|
||||
}
|
@ -17,7 +17,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
vbd: ['id', 'VBD'],
|
||||
vbd: ['id', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@ -37,7 +37,7 @@ disconnect.params = {
|
||||
}
|
||||
|
||||
disconnect.resolve = {
|
||||
vbd: ['id', 'VBD'],
|
||||
vbd: ['id', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.disconnect = disconnect
|
||||
@ -57,7 +57,7 @@ connect.params = {
|
||||
}
|
||||
|
||||
connect.resolve = {
|
||||
vbd: ['id', 'VBD'],
|
||||
vbd: ['id', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.connect = connect
|
||||
@ -83,7 +83,7 @@ set.params = {
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vbd: ['id', 'VBD'],
|
||||
vbd: ['id', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
|
@ -21,7 +21,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
vdi: ['id', 'VDI'],
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@ -72,7 +72,7 @@ set.params = {
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vdi: ['id', 'VDI'],
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@ -93,8 +93,8 @@ migrate.params = {
|
||||
}
|
||||
|
||||
migrate.resolve = {
|
||||
vdi: ['id', 'VDI'],
|
||||
sr: ['sr_id', 'SR'],
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
sr: ['sr_id', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
exports.migrate = migrate
|
||||
|
@ -15,7 +15,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
vif: ['id', 'VIF']
|
||||
vif: ['id', 'VIF', 'administrate']
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@ -35,7 +35,7 @@ disconnect.params = {
|
||||
}
|
||||
|
||||
disconnect.resolve = {
|
||||
vif: ['id', 'VIF']
|
||||
vif: ['id', 'VIF', 'operate']
|
||||
}
|
||||
|
||||
exports.disconnect = disconnect
|
||||
@ -55,7 +55,7 @@ connect.params = {
|
||||
}
|
||||
|
||||
connect.resolve = {
|
||||
vif: ['id', 'VIF']
|
||||
vif: ['id', 'VIF', 'operate']
|
||||
}
|
||||
|
||||
exports.connect = connect
|
||||
|
@ -4,11 +4,16 @@ $result = require 'lodash.result'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$isArray = require 'lodash.isarray'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
startsWith = require 'lodash.startswith'
|
||||
endsWith = require 'lodash.endswith'
|
||||
$request = require('bluebird').promisify(require('request'))
|
||||
|
||||
{$coroutine, $wait} = require '../fibers-utils'
|
||||
{formatXml: $js2xml} = require '../utils'
|
||||
{parseXml} = require '../utils'
|
||||
{
|
||||
formatXml: $js2xml,
|
||||
parseXml,
|
||||
pFinally
|
||||
} = require '../utils'
|
||||
|
||||
$isVMRunning = do ->
|
||||
runningStates = {
|
||||
@ -229,35 +234,15 @@ create.params = {
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
template: ['template', 'VM-template'],
|
||||
template: ['template', 'VM-template', 'administrate'],
|
||||
}
|
||||
|
||||
exports.create = create
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
delete_ = $coroutine ({vm, delete_disks: deleteDisks}) ->
|
||||
if $isVMRunning vm
|
||||
@throw 'INVALID_PARAMS', 'The VM can only be deleted when halted'
|
||||
|
||||
xapi = @getXAPI vm
|
||||
|
||||
if deleteDisks
|
||||
$forEach vm.$VBDs, (ref) =>
|
||||
try
|
||||
VBD = @getObject ref, 'VBD'
|
||||
catch e
|
||||
return
|
||||
|
||||
return if VBD.read_only or not VBD.VDI?
|
||||
|
||||
$wait xapi.call 'VDI.destroy', VBD.VDI
|
||||
|
||||
return
|
||||
|
||||
$wait xapi.call 'VM.destroy', vm.ref
|
||||
|
||||
return true
|
||||
delete_ = ({vm, delete_disks: deleteDisks}) ->
|
||||
return @getXAPI(vm).deleteVm(vm.id, deleteDisks)
|
||||
|
||||
delete_.params = {
|
||||
id: { type: 'string' }
|
||||
@ -269,7 +254,7 @@ delete_.params = {
|
||||
}
|
||||
delete_.permission = 'admin'
|
||||
delete_.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot']]
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@ -289,7 +274,9 @@ ejectCd = $coroutine ({vm}) ->
|
||||
|
||||
if cdDriveRef
|
||||
$wait xapi.call 'VBD.eject', cdDriveRef
|
||||
$wait xapi.call 'VBD.destroy', cdDriveRef
|
||||
|
||||
# Silently attempts to destroy the VBD.
|
||||
xapi.call('VBD.destroy', cdDriveRef).catch(->)
|
||||
|
||||
return true
|
||||
|
||||
@ -298,7 +285,7 @@ ejectCd.params = {
|
||||
}
|
||||
|
||||
ejectCd.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
ejectCd.permission = 'admin'
|
||||
exports.ejectCd = ejectCd
|
||||
@ -349,8 +336,8 @@ insertCd.params = {
|
||||
}
|
||||
|
||||
insertCd.resolve = {
|
||||
vm: ['id', 'VM'],
|
||||
vdi: ['cd_id', 'VDI'],
|
||||
vm: ['id', 'VM', 'operate'],
|
||||
vdi: ['cd_id', 'VDI', 'operate'],
|
||||
}
|
||||
insertCd.permission = 'admin'
|
||||
exports.insertCd = insertCd
|
||||
@ -377,7 +364,7 @@ migrate.params = {
|
||||
|
||||
migrate.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
host: ['host_id', 'host']
|
||||
host: ['host_id', 'host', 'administrate']
|
||||
}
|
||||
|
||||
exports.migrate = migrate
|
||||
@ -465,11 +452,11 @@ migratePool.params = {
|
||||
}
|
||||
|
||||
migratePool.resolve = {
|
||||
vm: ['id', 'VM'],
|
||||
host: ['target_host_id', 'host'],
|
||||
sr: ['target_sr_id', 'SR'],
|
||||
network: ['target_network_id', 'network'],
|
||||
migrationNetwork: ['migration_network_id', 'network'],
|
||||
vm: ['id', 'VM', 'administrate'],
|
||||
host: ['target_host_id', 'host', 'administrate'],
|
||||
sr: ['target_sr_id', 'SR', 'administrate'],
|
||||
network: ['target_network_id', 'network', 'administrate'],
|
||||
migrationNetwork: ['migration_network_id', 'network', 'administrate'],
|
||||
}
|
||||
|
||||
# TODO: camel case.
|
||||
@ -578,7 +565,7 @@ set.params = {
|
||||
}
|
||||
set.permission = 'admin'
|
||||
set.resolve = {
|
||||
VM: ['id', ['VM', 'VM-snapshot']]
|
||||
VM: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@ -601,7 +588,7 @@ restart.params = {
|
||||
}
|
||||
|
||||
restart.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
|
||||
exports.restart = restart
|
||||
@ -628,7 +615,7 @@ clone.params = {
|
||||
|
||||
clone.resolve = {
|
||||
# TODO: is it necessary for snapshots?
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'administrate']
|
||||
}
|
||||
|
||||
exports.clone = clone
|
||||
@ -646,7 +633,7 @@ convert.params = {
|
||||
}
|
||||
|
||||
convert.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot']]
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
convert.permission = 'admin'
|
||||
exports.convert = convert
|
||||
@ -654,7 +641,8 @@ exports.convert = convert
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
snapshot = $coroutine ({vm, name}) ->
|
||||
return $wait @getXAPI(vm).call 'VM.snapshot', vm.ref, name
|
||||
snapshot = $wait @getXAPI(vm).snapshotVm(vm.ref, name)
|
||||
return snapshot.$id
|
||||
|
||||
snapshot.params = {
|
||||
id: { type: 'string' }
|
||||
@ -662,7 +650,7 @@ snapshot.params = {
|
||||
}
|
||||
|
||||
snapshot.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'administrate']
|
||||
}
|
||||
snapshot.permission = 'admin'
|
||||
exports.snapshot = snapshot
|
||||
@ -683,7 +671,7 @@ start.params = {
|
||||
}
|
||||
|
||||
start.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
|
||||
exports.start = start
|
||||
@ -706,7 +694,7 @@ stop = $coroutine ({vm, force}) ->
|
||||
try
|
||||
$wait xapi.call 'VM.clean_shutdown', vm.ref
|
||||
catch error
|
||||
if error[0] is 'VM_MISSING_PV_DRIVERS'
|
||||
if error.code is 'VM_MISSING_PV_DRIVERS'
|
||||
# TODO: Improve reporting: this message is unclear.
|
||||
@throw 'INVALID_PARAMS'
|
||||
else
|
||||
@ -720,7 +708,7 @@ stop.params = {
|
||||
}
|
||||
|
||||
stop.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
|
||||
exports.stop = stop
|
||||
@ -737,7 +725,7 @@ suspend.params = {
|
||||
}
|
||||
|
||||
suspend.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
suspend.permission = 'admin'
|
||||
exports.suspend = suspend
|
||||
@ -759,7 +747,7 @@ resume.params = {
|
||||
}
|
||||
|
||||
resume.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
vm: ['id', 'VM', 'operate']
|
||||
}
|
||||
resume.permission = 'admin'
|
||||
exports.resume = resume
|
||||
@ -778,68 +766,31 @@ revert.params = {
|
||||
}
|
||||
|
||||
revert.resolve = {
|
||||
snapshot: ['id', 'VM-snapshot']
|
||||
snapshot: ['id', 'VM-snapshot', 'administrate']
|
||||
}
|
||||
revert.permission = 'admin'
|
||||
exports.revert = revert
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
handleExport = (req, res, {stream, response: upstream}) ->
|
||||
res.writeHead(
|
||||
upstream.statusCode,
|
||||
upstream.statusMessage ? '',
|
||||
upstream.headers
|
||||
)
|
||||
stream.pipe(res)
|
||||
return
|
||||
|
||||
# TODO: integrate in xapi.js
|
||||
export_ = $coroutine ({vm, compress}) ->
|
||||
compress ?= true
|
||||
|
||||
xapi = @getXAPI vm
|
||||
|
||||
# if the VM is running, we can't export it directly
|
||||
# that's why we export the snapshot
|
||||
exportRef = if vm.power_state is 'Running'
|
||||
$debug 'VM is running, creating temp snapshot...'
|
||||
snapshotRef = $wait xapi.call 'VM.snapshot', vm.ref, vm.name_label
|
||||
# convert the template to a VM
|
||||
$wait xapi.call 'VM.set_is_a_template', snapshotRef, false
|
||||
|
||||
snapshotRef
|
||||
else
|
||||
vm.ref
|
||||
|
||||
host = @getObject vm.$container
|
||||
do (type = host.type) =>
|
||||
if type is 'pool'
|
||||
host = @getObject host.master, 'host'
|
||||
else unless type is 'host'
|
||||
throw new Error "unexpected type: got #{type} instead of host"
|
||||
|
||||
taskRef = $wait xapi.call 'task.create', 'VM export via Xen Orchestra', 'Export VM '+vm.name_label
|
||||
@watchTask taskRef
|
||||
.then (result) ->
|
||||
$debug 'export succeeded'
|
||||
return
|
||||
.catch (error) ->
|
||||
$debug 'export failed: %j', error
|
||||
return
|
||||
.finally $coroutine =>
|
||||
xapi.call 'task.destroy', taskRef
|
||||
|
||||
if snapshotRef?
|
||||
$debug 'deleting temp snapshot...'
|
||||
$wait exports.delete.call this, id: snapshotRef, delete_disks: true
|
||||
|
||||
return
|
||||
|
||||
url = $wait @registerProxyRequest {
|
||||
method: 'get'
|
||||
hostname: host.address
|
||||
pathname: '/export/'
|
||||
query: {
|
||||
session_id: xapi.sessionId
|
||||
ref: exportRef
|
||||
task_id: taskRef
|
||||
use_compression: if compress then 'true' else false
|
||||
}
|
||||
}
|
||||
stream = $wait @getXAPI(vm).exportVm(vm.id, compress ? true)
|
||||
|
||||
return {
|
||||
$getFrom: url
|
||||
$getFrom: $wait @registerHttpRequest(handleExport, {
|
||||
stream,
|
||||
response: $wait stream.response
|
||||
})
|
||||
}
|
||||
|
||||
export_.params = {
|
||||
@ -848,7 +799,7 @@ export_.params = {
|
||||
}
|
||||
|
||||
export_.resolve = {
|
||||
vm: ['vm', ['VM', 'VM-snapshot']],
|
||||
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate'],
|
||||
}
|
||||
export_.permission = 'admin'
|
||||
exports.export = export_;
|
||||
@ -881,51 +832,33 @@ import_.params = {
|
||||
}
|
||||
|
||||
import_.resolve = {
|
||||
host: ['host', 'host']
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
import_.permission = 'admin'
|
||||
exports.import = import_
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME: position should be optional and default to last.
|
||||
#
|
||||
# FIXME: if position is used, all other disks after this position
|
||||
# should be shifted.
|
||||
attachDisk = $coroutine ({vm, vdi, position, mode, bootable}) ->
|
||||
xapi = @getXAPI vm
|
||||
|
||||
VBD_ref = $wait xapi.call 'VBD.create', {
|
||||
VM: vm.ref
|
||||
VDI: vdi.ref
|
||||
mode: mode
|
||||
type: 'Disk'
|
||||
userdevice: position
|
||||
bootable: bootable ? false
|
||||
empty: false
|
||||
other_config: {}
|
||||
qos_algorithm_type: ''
|
||||
qos_algorithm_params: {}
|
||||
}
|
||||
|
||||
$wait xapi.call 'VBD.plug', VBD_ref
|
||||
|
||||
return true
|
||||
$wait @getXAPI(vm).attachVdiToVm(vdi.id, vm.id, {bootable, mode, position})
|
||||
return
|
||||
|
||||
attachDisk.params = {
|
||||
bootable: {
|
||||
type: 'boolean'
|
||||
optional: true
|
||||
}
|
||||
mode: { type: 'string' }
|
||||
position: { type: 'string' }
|
||||
mode: { type: 'string', optional: true }
|
||||
position: { type: 'string', optional: true }
|
||||
vdi: { type: 'string' }
|
||||
vm: { type: 'string' }
|
||||
}
|
||||
|
||||
attachDisk.resolve = {
|
||||
vm: ['vm', 'VM'],
|
||||
vdi: ['vdi', 'VDI'],
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
vdi: ['vdi', 'VDI', 'administrate'],
|
||||
}
|
||||
attachDisk.permission = 'admin'
|
||||
exports.attachDisk = attachDisk
|
||||
@ -935,21 +868,13 @@ exports.attachDisk = attachDisk
|
||||
# FIXME: position should be optional and default to last.
|
||||
|
||||
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
|
||||
xapi = @getXAPI vm
|
||||
|
||||
VIF_ref = $wait xapi.call 'VIF.create', {
|
||||
VM: vm.ref
|
||||
network: network.ref
|
||||
device: position
|
||||
MTU: mtu ? '1500'
|
||||
MAC: mac ? ''
|
||||
other_config: {}
|
||||
qos_algorithm_type: ''
|
||||
qos_algorithm_params: {}
|
||||
}
|
||||
|
||||
return $wait(xapi.call( 'VIF.get_record', VIF_ref)).uuid
|
||||
vif = $wait @getXAPI(vm).createVirtualInterface(vm.id, network.id, {
|
||||
mac,
|
||||
mtu,
|
||||
position
|
||||
})
|
||||
|
||||
return vif.$id
|
||||
|
||||
createInterface.params = {
|
||||
vm: { type: 'string' }
|
||||
@ -960,8 +885,8 @@ createInterface.params = {
|
||||
}
|
||||
|
||||
createInterface.resolve = {
|
||||
vm: ['vm', 'VM'],
|
||||
network: ['network', 'network'],
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
network: ['network', 'network', 'administrate'],
|
||||
}
|
||||
createInterface.permission = 'admin'
|
||||
exports.createInterface = createInterface
|
||||
@ -982,7 +907,7 @@ attachPci.params = {
|
||||
}
|
||||
|
||||
attachPci.resolve = {
|
||||
vm: ['vm', 'VM'],
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
}
|
||||
attachPci.permission = 'admin'
|
||||
exports.attachPci = attachPci
|
||||
@ -1002,11 +927,10 @@ detachPci.params = {
|
||||
}
|
||||
|
||||
detachPci.resolve = {
|
||||
vm: ['vm', 'VM'],
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
}
|
||||
detachPci.permission = 'admin'
|
||||
exports.detachPci = detachPci
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
|
||||
@ -1024,7 +948,7 @@ stats = $coroutine ({vm}) ->
|
||||
[response, body] = $wait $request {
|
||||
method: 'get'
|
||||
rejectUnauthorized: false
|
||||
url: 'https://'+host.address+'/vm_rrd?session_id='+xapi.sessionId+'&uuid='+vm.UUID
|
||||
url: 'https://'+host.address+'/vm_rrd?session_id='+xapi.sessionId+'&uuid='+vm.id
|
||||
}
|
||||
|
||||
if response.statusCode isnt 200
|
||||
@ -1033,22 +957,30 @@ stats = $coroutine ({vm}) ->
|
||||
json = parseXml(body)
|
||||
# Find index of needed objects for getting their values after
|
||||
cpusIndexes = []
|
||||
index = 0
|
||||
while (pos = $findIndex(json.rrd.ds, 'name', 'cpu' + index++)) isnt -1
|
||||
cpusIndexes.push(pos)
|
||||
vifsIndexes = []
|
||||
index = 0
|
||||
while (pos = $findIndex(json.rrd.ds, 'name', 'vif_' + index + '_rx')) isnt -1
|
||||
vifsIndexes.push(pos)
|
||||
vifsIndexes.push($findIndex(json.rrd.ds, 'name', 'vif_' + (index++) + '_tx'))
|
||||
xvdsIndexes = []
|
||||
index = 97 # Starting to browse ascii table from 'a' to 'z' (122)
|
||||
while index <= 122 and (pos = $findIndex(json.rrd.ds, 'name', 'vbd_xvd' + String.fromCharCode(index) + '_read')) isnt -1
|
||||
xvdsIndexes.push(pos)
|
||||
xvdsIndexes.push($findIndex(json.rrd.ds, 'name', 'vbd_xvd' + String.fromCharCode(index++) + '_write'))
|
||||
memoryFreeIndex = []
|
||||
memoryIndex = []
|
||||
index = 0
|
||||
|
||||
memoryFreeIndex = $findIndex(json.rrd.ds, 'name': 'memory_internal_free')
|
||||
memoryIndex = $findIndex(json.rrd.ds, 'name': 'memory')
|
||||
$forEach(json.rrd.ds, (value, i) ->
|
||||
if /^cpu[0-9]+$/.test(value.name)
|
||||
cpusIndexes.push(i)
|
||||
else if startsWith(value.name, 'vif_') && endsWith(value.name, '_tx')
|
||||
vifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vif_') && endsWith(value.name, '_rx')
|
||||
vifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vbd_xvd') && endsWith(value.name, '_write', 14)
|
||||
xvdsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vbd_xvd') && endsWith(value.name, '_read', 13)
|
||||
xvdsIndexes.push(i)
|
||||
else if startsWith(value.name, 'memory_internal_free')
|
||||
memoryFreeIndex.push(i)
|
||||
else if endsWith(value.name, 'memory')
|
||||
memoryIndex.push(i)
|
||||
|
||||
return
|
||||
)
|
||||
|
||||
memoryFree = []
|
||||
memoryUsed = []
|
||||
@ -1099,7 +1031,31 @@ stats.params = {
|
||||
}
|
||||
|
||||
stats.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot']],
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'view'],
|
||||
}
|
||||
|
||||
exports.stats = stats;
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
bootOrder = $coroutine ({vm, order}) ->
|
||||
xapi = @getXAPI vm
|
||||
|
||||
order = {order: order}
|
||||
|
||||
$wait xapi.call 'VM.set_HVM_boot_params', vm.ref, order
|
||||
|
||||
return true
|
||||
|
||||
|
||||
bootOrder.params = {
|
||||
vm: { type: 'string' },
|
||||
order: { type: 'string' }
|
||||
}
|
||||
|
||||
bootOrder.resolve = {
|
||||
vm: ['vm', 'VM', 'operate'],
|
||||
}
|
||||
bootOrder.permission = 'admin'
|
||||
exports.bootOrder = bootOrder
|
||||
#---------------------------------------------------------------------
|
||||
|
@ -99,7 +99,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
update (models) {
|
||||
var array = isArray(models)
|
||||
const array = isArray(models)
|
||||
if (!isArray(models)) {
|
||||
models = [models]
|
||||
}
|
||||
@ -120,7 +120,7 @@ export default class Collection extends EventEmitter {
|
||||
throw new Error('a model without an id cannot be updated')
|
||||
}
|
||||
|
||||
var error = model.validate()
|
||||
const error = model.validate()
|
||||
if (error !== undefined) {
|
||||
// TODO: Better system inspired by Backbone.js.
|
||||
throw error
|
||||
|
@ -63,12 +63,12 @@ export default class Redis extends Collection {
|
||||
// TODO: remove “replace” which is a temporary measure, implement
|
||||
// “set()” instead.
|
||||
|
||||
const {indexes, prefix, redis} = this
|
||||
const {indexes, prefix, redis, idPrefix = ''} = this
|
||||
|
||||
return Bluebird.map(models, coroutine(function * (model) {
|
||||
// Generate a new identifier if necessary.
|
||||
if (model.id === undefined) {
|
||||
model.id = String(yield redis.incr(prefix + '_id'))
|
||||
model.id = idPrefix + String(yield redis.incr(prefix + '_id'))
|
||||
}
|
||||
|
||||
const success = yield redis.sadd(prefix + '_ids', model.id)
|
||||
|
@ -1,52 +1,29 @@
|
||||
'use strict'
|
||||
import {EventEmitter} from 'events'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
var EventEmitter = require('events').EventEmitter
|
||||
var inherits = require('util').inherits
|
||||
|
||||
var assign = require('lodash.assign')
|
||||
// const noop = () => {}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
var has = Object.prototype.hasOwnProperty
|
||||
has = has.call.bind(has)
|
||||
export default class Connection extends EventEmitter {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
function noop () {}
|
||||
this._data = Object.create(null)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function Connection (opts) {
|
||||
EventEmitter.call(this)
|
||||
|
||||
this.data = Object.create(null)
|
||||
|
||||
this._close = opts.close
|
||||
this.notify = opts.notify
|
||||
}
|
||||
inherits(Connection, EventEmitter)
|
||||
|
||||
assign(Connection.prototype, {
|
||||
// Close the connection.
|
||||
close: function () {
|
||||
close () {
|
||||
// Prevent errors when the connection is closed more than once.
|
||||
this.close = noop
|
||||
|
||||
this._close()
|
||||
// this.close = noop
|
||||
|
||||
this.emit('close')
|
||||
|
||||
// Releases values AMAP to ease the garbage collecting.
|
||||
for (var key in this) {
|
||||
if (key !== 'close' && has(this, key)) {
|
||||
delete this[key]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Gets the value for this key.
|
||||
get: function (key, defaultValue) {
|
||||
var data = this.data
|
||||
get (key, defaultValue) {
|
||||
const {_data: data} = this
|
||||
|
||||
if (key in data) {
|
||||
return data[key]
|
||||
@ -57,23 +34,19 @@ assign(Connection.prototype, {
|
||||
}
|
||||
|
||||
throw new Error('no value for `' + key + '`')
|
||||
},
|
||||
}
|
||||
|
||||
// Checks whether there is a value for this key.
|
||||
has: function (key) {
|
||||
return key in this.data
|
||||
},
|
||||
has (key) {
|
||||
return key in this._data
|
||||
}
|
||||
|
||||
// Sets the value for this key.
|
||||
set: function (key, value) {
|
||||
this.data[key] = value
|
||||
},
|
||||
|
||||
unset: function (key) {
|
||||
delete this.data[key]
|
||||
set (key, value) {
|
||||
this._data[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
// ===================================================================
|
||||
|
||||
module.exports = Connection
|
||||
unset (key) {
|
||||
delete this._data[key]
|
||||
}
|
||||
}
|
||||
|
87
src/decorators.js
Normal file
87
src/decorators.js
Normal file
@ -0,0 +1,87 @@
|
||||
import bind from 'lodash.bind'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {defineProperty} = Object
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// See: https://github.com/jayphelps/core-decorators.js#autobind
|
||||
export function autobind (target, key, {
|
||||
configurable,
|
||||
enumerable,
|
||||
value: fn,
|
||||
writable
|
||||
}) {
|
||||
return {
|
||||
configurable,
|
||||
enumerable,
|
||||
|
||||
get () {
|
||||
const bounded = bind(fn, this)
|
||||
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bounded,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bounded
|
||||
},
|
||||
set (newValue) {
|
||||
if (this === target) {
|
||||
// New value directly set on the prototype.
|
||||
delete this[key]
|
||||
this[key] = newValue
|
||||
} else {
|
||||
// New value set on a child object.
|
||||
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Debounce decorator for methods.
|
||||
//
|
||||
// See: https://github.com/wycats/javascript-decorators
|
||||
export const debounce = (duration) => (target, name, descriptor) => {
|
||||
const {value: fn} = descriptor
|
||||
|
||||
// This symbol is used to store the related data directly on the
|
||||
// current object.
|
||||
const s = Symbol()
|
||||
|
||||
function debounced () {
|
||||
let data = this[s] || (this[s] = {
|
||||
lastCall: 0,
|
||||
wrapper: null
|
||||
})
|
||||
|
||||
const now = Date.now()
|
||||
if (now > data.lastCall + duration) {
|
||||
data.lastCall = now
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
data.wrapper = () => result
|
||||
} catch (error) {
|
||||
data.wrapper = () => { throw error }
|
||||
}
|
||||
}
|
||||
return data.wrapper()
|
||||
}
|
||||
debounced.reset = (obj) => { delete obj[s] }
|
||||
|
||||
descriptor.value = debounced
|
||||
return descriptor
|
||||
}
|
78
src/decorators.spec.js
Normal file
78
src/decorators.spec.js
Normal file
@ -0,0 +1,78 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
import {expect} from 'chai'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {autobind, debounce} from './decorators'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('autobind', function () {
|
||||
class Foo {
|
||||
@autobind
|
||||
getFoo () {
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
it('returns a bound instance for a method', function () {
|
||||
const foo = new Foo()
|
||||
const {getFoo} = foo
|
||||
|
||||
expect(getFoo()).to.equal(foo)
|
||||
})
|
||||
|
||||
it('returns the same bound instance each time', function () {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(foo.getFoo).to.equal(foo.getFoo)
|
||||
})
|
||||
|
||||
it('works with multiple instances of the same class', function () {
|
||||
const foo1 = new Foo()
|
||||
const foo2 = new Foo()
|
||||
|
||||
const {getFoo: getFoo1} = foo1
|
||||
const {getFoo: getFoo2} = foo2
|
||||
|
||||
expect(getFoo1()).to.equal(foo1)
|
||||
expect(getFoo2()).to.equal(foo2)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('debounce', function () {
|
||||
let i
|
||||
|
||||
class Foo {
|
||||
@debounce(1e1)
|
||||
foo () {
|
||||
++i
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
i = 0
|
||||
})
|
||||
|
||||
it('works', function (done) {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(i).to.equal(0)
|
||||
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
|
||||
setTimeout(function () {
|
||||
foo.foo()
|
||||
expect(i).to.equal(2)
|
||||
|
||||
done()
|
||||
}, 2e1)
|
||||
})
|
||||
})
|
@ -1,104 +0,0 @@
|
||||
Bluebird = require 'bluebird'
|
||||
Fiber = require 'fibers'
|
||||
forEach = require 'lodash.foreach'
|
||||
isArray = require 'lodash.isarray'
|
||||
isFunction = require 'lodash.isfunction'
|
||||
isObject = require 'lodash.isobject'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
isPromise = (obj) -> obj? and isFunction obj.then
|
||||
|
||||
# The value is guarantee to resolve asynchronously.
|
||||
runAsync = (value, resolve, reject) ->
|
||||
if isPromise value
|
||||
return value.then resolve, reject
|
||||
|
||||
if isFunction value # Continuable
|
||||
handler = (error, result) ->
|
||||
if error?
|
||||
reject error
|
||||
else
|
||||
resolve result
|
||||
return
|
||||
|
||||
sync = true
|
||||
value (error, result) ->
|
||||
if sync
|
||||
process.nextTick -> handler error, result
|
||||
else
|
||||
handler error, result
|
||||
return
|
||||
sync = false
|
||||
return
|
||||
|
||||
unless isObject value
|
||||
return process.nextTick -> resolve value
|
||||
|
||||
left = 0
|
||||
results = if isArray value
|
||||
new Array value.length
|
||||
else
|
||||
Object.create null
|
||||
|
||||
forEach value, (value, index) ->
|
||||
++left
|
||||
runAsync(
|
||||
value
|
||||
(result) ->
|
||||
# Returns if already rejected.
|
||||
return unless results
|
||||
|
||||
results[index] = result
|
||||
resolve results unless --left
|
||||
(error) ->
|
||||
# Returns if already rejected.
|
||||
return unless results
|
||||
|
||||
# Frees the reference ASAP.
|
||||
results = null
|
||||
|
||||
reject error
|
||||
)
|
||||
return
|
||||
|
||||
if left is 0
|
||||
process.nextTick -> resolve value
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Makes a function run in its own fiber and returns a promise.
|
||||
coroutine = (fn) ->
|
||||
return (args...) ->
|
||||
return new Bluebird (resolve, reject) =>
|
||||
new Fiber(=>
|
||||
try
|
||||
resolve fn.apply this, args
|
||||
catch error
|
||||
reject error
|
||||
).run()
|
||||
return
|
||||
exports.coroutine = coroutine
|
||||
|
||||
# Waits for a promise or a continuable to end.
|
||||
#
|
||||
# If value is composed (array or map), every asynchronous value is
|
||||
# resolved before returning (parallelization).
|
||||
wait = (value) ->
|
||||
fiber = Fiber.current
|
||||
throw new Error 'not running in a fiber' unless fiber?
|
||||
|
||||
runAsync(
|
||||
value
|
||||
(value) -> fiber.run value
|
||||
(error) -> fiber.throwInto error
|
||||
)
|
||||
|
||||
return Fiber.yield()
|
||||
exports.wait = wait
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Compatibility.
|
||||
exports.$coroutine = coroutine
|
||||
exports.$wait = wait
|
115
src/fibers-utils.js
Normal file
115
src/fibers-utils.js
Normal file
@ -0,0 +1,115 @@
|
||||
import Bluebird from 'bluebird'
|
||||
import Fiber from 'fibers'
|
||||
import forEach from 'lodash.foreach'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import isObject from 'lodash.isobject'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const isPromise = (obj) => obj && isFunction(obj.then)
|
||||
|
||||
// The value is guarantee to resolve asynchronously.
|
||||
const runAsync = (value, resolve, reject) => {
|
||||
if (isPromise(value)) {
|
||||
return value.then(resolve, reject)
|
||||
}
|
||||
|
||||
if (isFunction(value)) { // Continuable
|
||||
throw new Error('continuable are no longer supported')
|
||||
}
|
||||
|
||||
if (!isObject(value)) {
|
||||
return process.nextTick(() => {
|
||||
resolve(value)
|
||||
})
|
||||
}
|
||||
|
||||
let left = 0
|
||||
let results = isArray(value) ?
|
||||
new Array(value.length) :
|
||||
Object.create(null)
|
||||
|
||||
forEach(value, (value, index) => {
|
||||
++left
|
||||
runAsync(
|
||||
value,
|
||||
(result) => {
|
||||
// Returns if already rejected.
|
||||
if (!results) {
|
||||
return
|
||||
}
|
||||
|
||||
results[index] = result
|
||||
if (!--left) {
|
||||
resolve(results)
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
// Returns if already rejected.
|
||||
if (!results) {
|
||||
return
|
||||
}
|
||||
|
||||
// Frees the reference ASAP.
|
||||
results = null
|
||||
|
||||
reject(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
if (!left) {
|
||||
process.nextTick(() => {
|
||||
resolve(value)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Makes a function run in its own fiber and returns a promise.
|
||||
export function coroutine (fn) {
|
||||
return function (...args) {
|
||||
return new Bluebird((resolve, reject) => {
|
||||
new Fiber(() => {
|
||||
try {
|
||||
resolve(fn.apply(this, args))
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
}).run()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Waits for a promise or a continuable to end.
|
||||
//
|
||||
// If value is composed (array or map), every asynchronous value is
|
||||
// resolved before returning (parallelization).
|
||||
export const wait = (value) => {
|
||||
const fiber = Fiber.current
|
||||
if (!fiber) {
|
||||
throw new Error('not running in a fiber')
|
||||
}
|
||||
|
||||
runAsync(
|
||||
value,
|
||||
(value) => {
|
||||
fiber.run(value)
|
||||
},
|
||||
(error) => {
|
||||
fiber.throwInto(error)
|
||||
}
|
||||
)
|
||||
|
||||
return Fiber.yield()
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Compatibility.
|
||||
export {
|
||||
coroutine as $coroutine,
|
||||
wait as $wait
|
||||
}
|
@ -1,47 +1,46 @@
|
||||
'use strict'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
/* eslint-env mocha */
|
||||
|
||||
var expect = require('chai').expect
|
||||
import {expect} from 'chai'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
var Promise = require('bluebird')
|
||||
import Bluebird from 'bluebird'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
var utils = require('./fibers-utils')
|
||||
var $coroutine = utils.$coroutine
|
||||
import {$coroutine, $wait} from './fibers-utils'
|
||||
|
||||
// Enable source maps support for traces.
|
||||
import sourceMapSupport from 'source-map-support'
|
||||
sourceMapSupport.install()
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('$coroutine', function () {
|
||||
it('creates a on which returns promises', function () {
|
||||
var fn = $coroutine(function () {})
|
||||
it('creates a function which returns promises', function () {
|
||||
const fn = $coroutine(function () {})
|
||||
expect(fn().then).to.be.a('function')
|
||||
})
|
||||
|
||||
it('creates a function which runs in a new fiber', function () {
|
||||
var previous = require('fibers').current
|
||||
const previous = require('fibers').current
|
||||
|
||||
var fn = $coroutine(function () {
|
||||
var current = require('fibers').current
|
||||
const fn = $coroutine(function () {
|
||||
const current = require('fibers').current
|
||||
|
||||
expect(current).to.exists
|
||||
expect(current).to.not.equal(previous)
|
||||
})
|
||||
|
||||
fn()
|
||||
return fn()
|
||||
})
|
||||
|
||||
it('forwards all arguments (even this)', function () {
|
||||
var self = {}
|
||||
var arg1 = {}
|
||||
var arg2 = {}
|
||||
const self = {}
|
||||
const arg1 = {}
|
||||
const arg2 = {}
|
||||
|
||||
$coroutine(function (arg1_, arg2_) {
|
||||
return $coroutine(function (arg1_, arg2_) {
|
||||
expect(this).to.equal(self)
|
||||
expect(arg1_).to.equal(arg1)
|
||||
expect(arg2_).to.equal(arg2)
|
||||
@ -52,61 +51,28 @@ describe('$coroutine', function () {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('$wait', function () {
|
||||
var $wait = utils.$wait
|
||||
|
||||
it('waits for a promise', function (done) {
|
||||
$coroutine(function () {
|
||||
var value = {}
|
||||
var promise = Promise.cast(value)
|
||||
it('waits for a promise', function () {
|
||||
return $coroutine(function () {
|
||||
const value = {}
|
||||
const promise = Bluebird.resolve(value)
|
||||
|
||||
expect($wait(promise)).to.equal(value)
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('handles promise rejection', function (done) {
|
||||
$coroutine(function () {
|
||||
var promise = Promise.reject('an exception')
|
||||
it('handles promise rejection', function () {
|
||||
return $coroutine(function () {
|
||||
const promise = Bluebird.reject('an exception')
|
||||
|
||||
expect(function () {
|
||||
$wait(promise)
|
||||
}).to.throw('an exception')
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('waits for a continuable', function (done) {
|
||||
$coroutine(function () {
|
||||
var value = {}
|
||||
var continuable = function (callback) {
|
||||
callback(null, value)
|
||||
}
|
||||
|
||||
expect($wait(continuable)).to.equal(value)
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('handles continuable error', function (done) {
|
||||
$coroutine(function () {
|
||||
var continuable = function (callback) {
|
||||
callback('an exception')
|
||||
}
|
||||
|
||||
expect(function () {
|
||||
$wait(continuable)
|
||||
}).to.throw('an exception')
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('forwards scalar values', function (done) {
|
||||
$coroutine(function () {
|
||||
var value = 'a scalar value'
|
||||
it('forwards scalar values', function () {
|
||||
return $coroutine(function () {
|
||||
let value = 'a scalar value'
|
||||
expect($wait(value)).to.equal(value)
|
||||
|
||||
value = [
|
||||
@ -128,72 +94,58 @@ describe('$wait', function () {
|
||||
|
||||
value = {}
|
||||
expect($wait(value)).to.deep.equal(value)
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('handles arrays of promises/continuables', function (done) {
|
||||
$coroutine(function () {
|
||||
var value1 = {}
|
||||
var value2 = {}
|
||||
it('handles arrays of promises', function () {
|
||||
return $coroutine(function () {
|
||||
const value1 = {}
|
||||
const value2 = {}
|
||||
|
||||
var promise = Promise.cast(value1)
|
||||
var continuable = function (callback) {
|
||||
callback(null, value2)
|
||||
}
|
||||
const promise1 = Bluebird.resolve(value1)
|
||||
const promise2 = Bluebird.resolve(value2)
|
||||
|
||||
var results = $wait([promise, continuable])
|
||||
const results = $wait([promise1, promise2])
|
||||
expect(results[0]).to.equal(value1)
|
||||
expect(results[1]).to.equal(value2)
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('handles maps of promises/continuable', function (done) {
|
||||
$coroutine(function () {
|
||||
var value1 = {}
|
||||
var value2 = {}
|
||||
it('handles maps of promises', function () {
|
||||
return $coroutine(function () {
|
||||
const value1 = {}
|
||||
const value2 = {}
|
||||
|
||||
var promise = Promise.cast(value1)
|
||||
var continuable = function (callback) {
|
||||
callback(null, value2)
|
||||
}
|
||||
const promise1 = Bluebird.resolve(value1)
|
||||
const promise2 = Bluebird.resolve(value2)
|
||||
|
||||
var results = $wait({
|
||||
foo: promise,
|
||||
bar: continuable
|
||||
const results = $wait({
|
||||
foo: promise1,
|
||||
bar: promise2
|
||||
})
|
||||
expect(results.foo).to.equal(value1)
|
||||
expect(results.bar).to.equal(value2)
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
|
||||
it('handles nested arrays/maps', function (done) {
|
||||
var promise = Promise.cast('a promise')
|
||||
var continuable = function (callback) {
|
||||
callback(null, 'a continuable')
|
||||
}
|
||||
it('handles nested arrays/maps', function () {
|
||||
const promise1 = Bluebird.resolve('promise 1')
|
||||
const promise2 = Bluebird.resolve('promise 2')
|
||||
|
||||
$coroutine(function () {
|
||||
return $coroutine(function () {
|
||||
expect($wait({
|
||||
foo: promise,
|
||||
foo: promise1,
|
||||
bar: [
|
||||
continuable,
|
||||
promise2,
|
||||
'a scalar'
|
||||
]
|
||||
})).to.deep.equal({
|
||||
foo: 'a promise',
|
||||
foo: 'promise 1',
|
||||
bar: [
|
||||
'a continuable',
|
||||
'promise 2',
|
||||
'a scalar'
|
||||
]
|
||||
})
|
||||
|
||||
done()
|
||||
})()
|
||||
})
|
||||
})
|
||||
|
@ -1,338 +0,0 @@
|
||||
# FIXME: This file name should reflect what's inside!
|
||||
|
||||
#=====================================================================
|
||||
|
||||
$clone = require 'lodash.clone'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$isArray = require 'lodash.isarray'
|
||||
$isEmpty = require 'lodash.isempty'
|
||||
$isFunction = require 'lodash.isfunction'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
$asArray = (val) -> if $isArray val then val else [val]
|
||||
$asFunction = (val) -> if $isFunction val then val else -> val
|
||||
|
||||
$first = (collection, def) ->
|
||||
if (n = collection.length)?
|
||||
return collection[0] unless n is 0
|
||||
else
|
||||
return value for own _, value of collection
|
||||
|
||||
# Nothing was found, returns the `def` value.
|
||||
def
|
||||
|
||||
$removeValue = (array, value) ->
|
||||
index = array.indexOf value
|
||||
return false if index is -1
|
||||
array.splice index, 1
|
||||
true
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# TODO: currently the watch can be updated multiple times per
|
||||
# “$MappedCollection.set()” which is inefficient: it should be
|
||||
# possible to address that.
|
||||
|
||||
$watch = (collection, {
|
||||
# Key(s) of the “remote” objects watched.
|
||||
#
|
||||
# If it is a function, it is evaluated in the scope of the “current”
|
||||
# object. (TODO)
|
||||
#
|
||||
# Default: undefined
|
||||
keys
|
||||
|
||||
# Alias for `keys`.
|
||||
key
|
||||
|
||||
# Rule(s) of the “remote” objects watched.
|
||||
#
|
||||
# If it is a function, it is evaluated in the scope of the “current”
|
||||
# object. (TODO)
|
||||
#
|
||||
# Note: `key`/`keys` and `rule`/`rules` cannot be used both.
|
||||
#
|
||||
# Default: undefined
|
||||
rules
|
||||
|
||||
# Alias for `rules`.
|
||||
rule
|
||||
|
||||
# Value to add to the set.
|
||||
#
|
||||
# If it is a function, it is evaluated in the scope of the “remote”
|
||||
# object.
|
||||
#
|
||||
# Default: -> @val
|
||||
val
|
||||
|
||||
# Predicates the “remote” object must fulfill to be used.
|
||||
#
|
||||
# Default: -> true
|
||||
if: cond
|
||||
|
||||
# Function evaluated in the scope of the “remote” object which
|
||||
# returns the key of the object to update (usually the current one).
|
||||
#
|
||||
# TODO: Does it make sense to return an array?
|
||||
#
|
||||
# Default: undefined
|
||||
bind
|
||||
|
||||
# Initial value.
|
||||
init
|
||||
|
||||
# Function called when a loop is detected.
|
||||
#
|
||||
# Usually it is used to either throw an exception or do nothing to
|
||||
# stop the loop.
|
||||
#
|
||||
# Note: The function may also returns `true` to force the processing
|
||||
# to continue.
|
||||
#
|
||||
# Default: (number_of_loops) -> throw new Error 'loop detected'
|
||||
loopDetected
|
||||
}, fn) ->
|
||||
val = if val is undefined
|
||||
# The default value is simply the value of the item.
|
||||
-> @val
|
||||
else
|
||||
$asFunction val
|
||||
|
||||
loopDetected ?= -> throw new Error 'loop detected'
|
||||
|
||||
# Method allowing the cleanup when the helper is no longer used.
|
||||
#cleanUp = -> # TODO: noop for now.
|
||||
|
||||
# Keys of items using the current helper.
|
||||
consumers = Object.create null
|
||||
|
||||
# Current values.
|
||||
values = Object.create null
|
||||
values.common = init
|
||||
|
||||
# The number of nested processing for this watcher is counted to
|
||||
# avoid an infinite loop.
|
||||
loops = 0
|
||||
|
||||
updating = false
|
||||
|
||||
process = (event, items) ->
|
||||
return if updating
|
||||
|
||||
# Values are grouped by namespace.
|
||||
valuesByNamespace = Object.create null
|
||||
|
||||
$forEach items, (item, key) -> # `key` is a local variable.
|
||||
return unless not cond? or cond.call item
|
||||
|
||||
if bind?
|
||||
key = bind.call item
|
||||
|
||||
# If bind did not return a key, ignores this value.
|
||||
return unless key?
|
||||
|
||||
namespace = "$#{key}"
|
||||
else
|
||||
namespace = 'common'
|
||||
|
||||
# Computes the current value.
|
||||
value = val.call item
|
||||
|
||||
(valuesByNamespace[namespace] ?= []).push value
|
||||
|
||||
return
|
||||
|
||||
# Stops here if no values were computed.
|
||||
return if $isEmpty valuesByNamespace
|
||||
|
||||
if loops
|
||||
return unless (loopDetected loops) is true
|
||||
previousLoops = loops++
|
||||
|
||||
# For each namespace.
|
||||
for namespace, values_ of valuesByNamespace
|
||||
|
||||
# Updates the value.
|
||||
value = values[namespace]
|
||||
ctx = {
|
||||
# TODO: test the $clone
|
||||
value: if value is undefined then $clone init else value
|
||||
}
|
||||
changed = if event is 'enter'
|
||||
fn.call ctx, values_, {}
|
||||
else
|
||||
fn.call ctx, {}, values_
|
||||
|
||||
# Notifies watchers unless it is known the value has not
|
||||
# changed.
|
||||
unless changed is false
|
||||
values[namespace] = ctx.value
|
||||
updating = true
|
||||
if namespace is 'common'
|
||||
collection.touch consumers
|
||||
else
|
||||
collection.touch (namespace.substr 1)
|
||||
updating = false
|
||||
|
||||
loops = previousLoops
|
||||
|
||||
processOne = (event, item) ->
|
||||
process event, [item]
|
||||
|
||||
# Sets up the watch based on the provided criteria.
|
||||
#
|
||||
# TODO: provides a way to clean this when no longer used.
|
||||
keys = $asArray (keys ? key ? [])
|
||||
rules = $asArray (rules ? rule ? [])
|
||||
if not $isEmpty keys
|
||||
# Matching is done on the keys.
|
||||
|
||||
throw new Error 'cannot use keys and rules' unless $isEmpty rules
|
||||
|
||||
$forEach keys, (key) -> collection.on "key=#{key}", processOne
|
||||
|
||||
# Handles existing items.
|
||||
process 'enter', (collection.getRaw keys, true)
|
||||
else if not $isEmpty rules
|
||||
# Matching is done the rules.
|
||||
|
||||
$forEach rules, (rule) -> collection.on "rule=#{rule}", process
|
||||
|
||||
# TODO: Inefficient, is there another way?
|
||||
rules = do -> # Minor optimization.
|
||||
tmp = Object.create null
|
||||
tmp[rule] = true for rule in rules
|
||||
tmp
|
||||
$forEach collection.getRaw(), (item) ->
|
||||
processOne 'enter', item if item.rule of rules
|
||||
else
|
||||
# No matching done.
|
||||
|
||||
collection.on 'any', process
|
||||
|
||||
# Handles existing items.
|
||||
process 'enter', collection.getRaw()
|
||||
|
||||
# Creates the generator: the function which items will used to
|
||||
# register to this watcher and to get the current value.
|
||||
generator = do (key) -> # Declare a local variable.
|
||||
->
|
||||
{key} = this
|
||||
|
||||
# Register this item has a consumer.
|
||||
consumers[key] = true
|
||||
|
||||
# Returns the value for this item if any or the common value.
|
||||
namespace = "$#{key}"
|
||||
if namespace of values
|
||||
values[namespace]
|
||||
else
|
||||
values.common
|
||||
|
||||
# Creates a helper to unregister an item from this watcher.
|
||||
generator.unregister = do (key) -> # Declare a local variable.
|
||||
->
|
||||
{key} = this
|
||||
delete consumers[key]
|
||||
delete values["$#{key}"]
|
||||
|
||||
# Creates a helper to get the value without using an item.
|
||||
generator.raw = (key) ->
|
||||
values[if key? then "$#{key}" else 'common']
|
||||
|
||||
# Returns the generator.
|
||||
generator
|
||||
|
||||
#=====================================================================
|
||||
|
||||
$map = (options) ->
|
||||
options.init = Object.create null
|
||||
|
||||
$watch this, options, (entered, exited) ->
|
||||
changed = false
|
||||
|
||||
$forEach entered, ([key, value]) =>
|
||||
unless @value[key] is value
|
||||
@value[key] = value
|
||||
changed = true
|
||||
return
|
||||
$forEach exited, ([key, value]) =>
|
||||
if key of @value
|
||||
delete @value[key]
|
||||
changed = true
|
||||
return
|
||||
|
||||
changed
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# Creates a set of value from various items.
|
||||
$set = (options) ->
|
||||
# Contrary to other helpers, the default value is the key.
|
||||
options.val ?= -> @key
|
||||
|
||||
options.init = []
|
||||
|
||||
$watch this, options, (entered, exited) ->
|
||||
changed = false
|
||||
|
||||
$forEach entered, (value) =>
|
||||
if (@value.indexOf value) is -1
|
||||
@value.push value
|
||||
changed = true
|
||||
return
|
||||
|
||||
$forEach exited, (value) =>
|
||||
changed = true if $removeValue @value, value
|
||||
return
|
||||
|
||||
changed
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
$sum = (options) ->
|
||||
options.init ?= 0
|
||||
|
||||
$watch this, options, (entered, exited) ->
|
||||
prev = @value
|
||||
|
||||
$forEach entered, (value) => @value += value
|
||||
$forEach exited, (value) => @value -= value
|
||||
|
||||
@value isnt prev
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# Uses a value from another item.
|
||||
#
|
||||
# Important note: Behavior is not specified when binding to multiple
|
||||
# items.
|
||||
$val = (options) ->
|
||||
# The default value.
|
||||
def = options.default
|
||||
delete options.default
|
||||
|
||||
options.init ?= def
|
||||
|
||||
# Should the last value be kept instead of returning to the default
|
||||
# value when no items are available!
|
||||
keepLast = !!options.keepLast
|
||||
delete options.keepLast
|
||||
|
||||
$watch this, options, (entered, exited) ->
|
||||
prev = @value
|
||||
|
||||
@value = $first entered, (if keepLast then @value else def)
|
||||
|
||||
@value isnt prev
|
||||
|
||||
#=====================================================================
|
||||
|
||||
module.exports = {
|
||||
$map
|
||||
$set
|
||||
$sum
|
||||
$val
|
||||
}
|
@ -1,244 +0,0 @@
|
||||
{expect: $expect} = require 'chai'
|
||||
|
||||
$sinon = require 'sinon'
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
{$MappedCollection} = require './MappedCollection'
|
||||
|
||||
$nonBindedHelpers = require './helpers'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
describe 'Helper', ->
|
||||
|
||||
# Shared variables.
|
||||
collection = $set = $sum = $val = null
|
||||
beforeEach ->
|
||||
# Creates the collection.
|
||||
collection = new $MappedCollection()
|
||||
|
||||
# Dispatcher used for tests.
|
||||
collection.dispatch = -> (@genkey.split '.')[0]
|
||||
|
||||
# Missing rules should be automatically created.
|
||||
collection.missingRule = collection.rule
|
||||
|
||||
# # Monkey patch the collection to see all emitted events.
|
||||
# emit = collection.emit
|
||||
# collection.emit = (args...) ->
|
||||
# console.log args...
|
||||
# emit.call collection, args...
|
||||
|
||||
# Binds helpers to this collection.
|
||||
{$set, $sum, $val} = do ->
|
||||
helpers = {}
|
||||
helpers[name] = fn.bind collection for name, fn of $nonBindedHelpers
|
||||
helpers
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
# All helpers share the same logical code, we need only to test one
|
||||
# extensively and test the others basically.
|
||||
#
|
||||
# $sum was chosen because it is the simplest helper to test.
|
||||
describe '$sum', ->
|
||||
|
||||
it 'with single key', ->
|
||||
collection.set foo: 1
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
key: 'foo'
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').to.equal 1
|
||||
|
||||
collection.set foo:2
|
||||
|
||||
$expect(collection.get 'sum').to.equal 2
|
||||
|
||||
collection.remove 'foo'
|
||||
|
||||
$expect(collection.get 'sum').to.equal 0
|
||||
|
||||
it 'with multiple keys', ->
|
||||
collection.set {
|
||||
foo: 1
|
||||
bar: 2
|
||||
}
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
keys: ['foo', 'bar']
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').to.equal 3
|
||||
|
||||
collection.set bar:3
|
||||
|
||||
$expect(collection.get 'sum').to.equal 4
|
||||
|
||||
collection.remove 'foo'
|
||||
|
||||
$expect(collection.get 'sum').to.equal 3
|
||||
|
||||
# FIXME: This test fails but this feature is not used.
|
||||
it.skip 'with dynamic keys', ->
|
||||
collection.set {
|
||||
foo: 1
|
||||
bar: 2
|
||||
}
|
||||
|
||||
collection.rule sum: ->
|
||||
@val = $sum {
|
||||
key: -> (@key.split '.')[1]
|
||||
}
|
||||
collection.set {
|
||||
'sum.foo': null
|
||||
'sum.bar': null
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum.foo').to.equal 1
|
||||
$expect(collection.get 'sum.bar').to.equal 2
|
||||
|
||||
collection.remove 'bar'
|
||||
|
||||
$expect(collection.get 'sum.foo').to.equal 1
|
||||
$expect(collection.get 'sum.bar').to.equal 0
|
||||
|
||||
it 'with single rule', ->
|
||||
collection.set {
|
||||
'foo.1': 1
|
||||
'foo.2': 2
|
||||
}
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
rule: 'foo'
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').to.equal 3
|
||||
|
||||
collection.set 'foo.2':3
|
||||
|
||||
$expect(collection.get 'sum').to.equal 4
|
||||
|
||||
collection.remove 'foo.1'
|
||||
|
||||
$expect(collection.get 'sum').to.equal 3
|
||||
|
||||
it 'with multiple rules', ->
|
||||
collection.set {
|
||||
'foo': 1
|
||||
'bar.1': 2
|
||||
'bar.2': 3
|
||||
}
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
rules: ['foo', 'bar']
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').to.equal 6
|
||||
|
||||
collection.set 'bar.1':3
|
||||
|
||||
$expect(collection.get 'sum').to.equal 7
|
||||
|
||||
collection.remove 'bar.2'
|
||||
|
||||
$expect(collection.get 'sum').to.equal 4
|
||||
|
||||
it 'with bind', ->
|
||||
collection.set {
|
||||
'foo': {
|
||||
sum: 2 # This item will participate to `sum.2`.
|
||||
val: 1
|
||||
}
|
||||
'bar': {
|
||||
sum: 1 # This item will participate to `sum.1`.
|
||||
val: 2
|
||||
}
|
||||
}
|
||||
|
||||
collection.rule sum: ->
|
||||
@val = $sum {
|
||||
bind: ->
|
||||
id = @val.sum
|
||||
return unless id?
|
||||
"sum.#{id}"
|
||||
val: -> @val.val
|
||||
}
|
||||
collection.set {
|
||||
'sum.1': null
|
||||
'sum.2': null
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum.1').equal 2
|
||||
$expect(collection.get 'sum.2').equal 1
|
||||
|
||||
collection.set {
|
||||
'foo': {
|
||||
sum: 1
|
||||
val: 3
|
||||
}
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum.1').equal 5
|
||||
$expect(collection.get 'sum.2').equal 0
|
||||
|
||||
collection.remove 'bar'
|
||||
|
||||
$expect(collection.get 'sum.1').equal 3
|
||||
$expect(collection.get 'sum.2').equal 0
|
||||
|
||||
|
||||
it 'with predicate', ->
|
||||
collection.set {
|
||||
foo: 1
|
||||
bar: 2
|
||||
baz: 3
|
||||
}
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
if: -> /^b/.test @rule
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').equal 5
|
||||
|
||||
collection.set foo:4
|
||||
|
||||
$expect(collection.get 'sum').equal 5
|
||||
|
||||
collection.set bar:5
|
||||
|
||||
$expect(collection.get 'sum').equal 8
|
||||
|
||||
collection.remove 'baz'
|
||||
|
||||
$expect(collection.get 'sum').equal 5
|
||||
|
||||
it 'with initial value', ->
|
||||
collection.set foo: 1
|
||||
|
||||
collection.item sum: ->
|
||||
@val = $sum {
|
||||
key: 'foo'
|
||||
init: 2
|
||||
}
|
||||
|
||||
$expect(collection.get 'sum').to.equal 3
|
||||
|
||||
collection.set foo:2
|
||||
|
||||
$expect(collection.get 'sum').to.equal 4
|
||||
|
||||
collection.remove 'foo'
|
||||
|
||||
$expect(collection.get 'sum').to.equal 2
|
||||
|
||||
# TODO:
|
||||
# - dynamic keys
|
||||
# - dynamic rules
|
215
src/index.js
215
src/index.js
@ -1,6 +1,5 @@
|
||||
import createLogger from 'debug'
|
||||
const debug = createLogger('xo:main')
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
import Bluebird from 'bluebird'
|
||||
Bluebird.longStackTraces()
|
||||
@ -8,6 +7,7 @@ Bluebird.longStackTraces()
|
||||
import appConf from 'app-conf'
|
||||
import assign from 'lodash.assign'
|
||||
import bind from 'lodash.bind'
|
||||
import blocked from 'blocked'
|
||||
import createConnectApp from 'connect'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash.foreach'
|
||||
@ -16,6 +16,7 @@ import isArray from 'lodash.isarray'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import map from 'lodash.map'
|
||||
import pick from 'lodash.pick'
|
||||
import proxyRequest from 'proxy-http-request'
|
||||
import serveStatic from 'serve-static'
|
||||
import WebSocket from 'ws'
|
||||
import {
|
||||
@ -25,14 +26,13 @@ import {
|
||||
NoSuchObject,
|
||||
NotImplemented
|
||||
} from './api-errors'
|
||||
import {coroutine} from 'bluebird'
|
||||
import {createServer as createJsonRpcServer} from 'json-rpc'
|
||||
import {createPeer as createJsonRpcPeer} from '@julien-f/json-rpc'
|
||||
import {readFile} from 'fs-promise'
|
||||
|
||||
import Api from './api'
|
||||
import WebServer from 'http-server-plus'
|
||||
import wsProxy from './ws-proxy'
|
||||
import XO from './xo'
|
||||
import Xo from './xo'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@ -60,8 +60,8 @@ const DEPRECATED_ENTRIES = [
|
||||
'servers'
|
||||
]
|
||||
|
||||
const loadConfiguration = coroutine(function * () {
|
||||
const config = yield appConf.load('xo-server', {
|
||||
async function loadConfiguration () {
|
||||
const config = await appConf.load('xo-server', {
|
||||
defaults: DEFAULTS,
|
||||
ignoreUnknownFormats: true
|
||||
})
|
||||
@ -76,21 +76,24 @@ const loadConfiguration = coroutine(function * () {
|
||||
})
|
||||
|
||||
return config
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
const loadPlugin = Bluebird.method(function (pluginConf, pluginName) {
|
||||
debugPlugin('loading %s', pluginName)
|
||||
|
||||
var pluginPath
|
||||
try {
|
||||
pluginPath = require.resolve('xo-server-' + pluginName)
|
||||
} catch (e) {
|
||||
pluginPath = require.resolve(pluginName)
|
||||
}
|
||||
const pluginPath = (function (name) {
|
||||
try {
|
||||
return require.resolve('xo-server-' + name)
|
||||
} catch (e) {
|
||||
return require.resolve(name)
|
||||
}
|
||||
})(pluginName)
|
||||
|
||||
var plugin = require(pluginPath)
|
||||
let plugin = require(pluginPath)
|
||||
|
||||
if (isFunction(plugin)) {
|
||||
plugin = plugin(pluginConf)
|
||||
@ -107,18 +110,18 @@ const loadPlugins = function (plugins, xo) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const makeWebServerListen = coroutine(function * (opts) {
|
||||
async function makeWebServerListen (opts) {
|
||||
// Read certificate and key if necessary.
|
||||
const {certificate, key} = opts
|
||||
if (certificate && key) {
|
||||
[opts.certificate, opts.key] = yield Bluebird.all([
|
||||
[opts.certificate, opts.key] = await Bluebird.all([
|
||||
readFile(certificate),
|
||||
readFile(key)
|
||||
])
|
||||
}
|
||||
|
||||
try {
|
||||
const niceAddress = yield this.listen(opts)
|
||||
const niceAddress = await this.listen(opts)
|
||||
debug(`Web server listening on ${niceAddress}`)
|
||||
} catch (error) {
|
||||
warn(`Web server could not listen on ${error.niceAddress}`)
|
||||
@ -131,7 +134,7 @@ const makeWebServerListen = coroutine(function * (opts) {
|
||||
warn(' Address already in use.')
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const createWebServer = opts => {
|
||||
const webServer = new WebServer()
|
||||
@ -139,7 +142,41 @@ const createWebServer = opts => {
|
||||
return Bluebird
|
||||
.bind(webServer).return(opts).map(makeWebServerListen)
|
||||
.return(webServer)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const setUpProxies = (connect, opts) => {
|
||||
if (!opts) {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: sort proxies by descending prefix length.
|
||||
|
||||
// HTTP request proxy.
|
||||
forEach(opts, (target, url) => {
|
||||
connect.use(url, (req, res) => {
|
||||
proxyRequest(target + req.url, req, res)
|
||||
})
|
||||
})
|
||||
|
||||
// WebSocket proxy.
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
connect.on('upgrade', (req, socket, head) => {
|
||||
const {url} = req
|
||||
|
||||
for (let prefix in opts) {
|
||||
if (url.lastIndexOf(prefix, 0) !== -1) {
|
||||
const target = opts[prefix] + url.slice(prefix.length)
|
||||
webSocketServer.handleUpgrade(req, socket, head, socket => {
|
||||
wsProxy(socket, target)
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@ -173,7 +210,7 @@ const apiHelpers = {
|
||||
// Handles both properties and wrapped models.
|
||||
const properties = user.properties || user
|
||||
|
||||
return pick(properties, 'id', 'email', 'permission')
|
||||
return pick(properties, 'id', 'email', 'groups', 'permission')
|
||||
},
|
||||
|
||||
getServerPublicProperties (server) {
|
||||
@ -207,33 +244,32 @@ const setUpApi = (webServer, xo) => {
|
||||
path: '/api/'
|
||||
})
|
||||
|
||||
webSocketServer.on('connection', connection => {
|
||||
webSocketServer.on('connection', socket => {
|
||||
debug('+ WebSocket connection')
|
||||
|
||||
let xoConnection
|
||||
// Create the abstract XO object for this connection.
|
||||
const connection = xo.createUserConnection()
|
||||
connection.once('close', () => {
|
||||
socket.close()
|
||||
})
|
||||
|
||||
// Create the JSON-RPC server for this connection.
|
||||
const jsonRpc = createJsonRpcServer(message => {
|
||||
const jsonRpc = createJsonRpcPeer(message => {
|
||||
if (message.type === 'request') {
|
||||
return api.call(xoConnection, message.method, message.params)
|
||||
return api.call(connection, message.method, message.params)
|
||||
}
|
||||
})
|
||||
|
||||
// Create the abstract XO object for this connection.
|
||||
xoConnection = xo.createUserConnection({
|
||||
close: bind(connection.close, connection),
|
||||
notify: bind(jsonRpc.notify, jsonRpc)
|
||||
})
|
||||
connection.notify = bind(jsonRpc.notify, jsonRpc)
|
||||
|
||||
// Close the XO connection with this WebSocket.
|
||||
connection.once('close', () => {
|
||||
socket.once('close', () => {
|
||||
debug('- WebSocket connection')
|
||||
|
||||
xoConnection.close()
|
||||
connection.close()
|
||||
})
|
||||
|
||||
// Connect the WebSocket to the JSON-RPC server.
|
||||
connection.on('message', message => {
|
||||
socket.on('message', message => {
|
||||
jsonRpc.write(message)
|
||||
})
|
||||
|
||||
@ -243,61 +279,48 @@ const setUpApi = (webServer, xo) => {
|
||||
}
|
||||
}
|
||||
jsonRpc.on('data', data => {
|
||||
connection.send(JSON.stringify(data), onSend)
|
||||
// The socket may have been closed during the API method
|
||||
// execution.
|
||||
if (socket.readyState === WebSocket.OPEN) {
|
||||
socket.send(data, onSend)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const getVmConsoleUrl = (xo, id) => {
|
||||
const vm = xo.getObject(id, ['VM', 'VM-controller'])
|
||||
if (!vm || vm.power_state !== 'Running') {
|
||||
return
|
||||
}
|
||||
|
||||
const {sessionId} = xo.getXAPI(vm)
|
||||
|
||||
let url
|
||||
forEach(vm.consoles, console => {
|
||||
if (console.protocol === 'rfb') {
|
||||
url = `${console.location}&session_id=${sessionId}`
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
const CONSOLE_PROXY_PATH_RE = /^\/consoles\/(.*)$/
|
||||
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
|
||||
|
||||
const setUpConsoleProxy = (webServer, xo) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
|
||||
webServer.on('upgrade', (req, res, head) => {
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
|
||||
if (!matches) {
|
||||
return
|
||||
}
|
||||
|
||||
const url = getVmConsoleUrl(xo, matches[1])
|
||||
if (!url) {
|
||||
return
|
||||
}
|
||||
const [, id] = matches
|
||||
try {
|
||||
const url = xo.getXAPI(id, ['VM', 'VM-controller']).getVmConsoleUrl(id)
|
||||
|
||||
// FIXME: lost connection due to VM restart is not detected.
|
||||
webSocketServer.handleUpgrade(req, res, head, connection => {
|
||||
wsProxy(connection, url)
|
||||
})
|
||||
// FIXME: lost connection due to VM restart is not detected.
|
||||
webSocketServer.handleUpgrade(req, socket, head, connection => {
|
||||
wsProxy(connection, url, {
|
||||
rejectUnauthorized: false
|
||||
})
|
||||
})
|
||||
} catch (_) {}
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const registerPasswordAuthenticationProvider = (xo) => {
|
||||
const passwordAuthenticationProvider = coroutine(function * ({
|
||||
async function passwordAuthenticationProvider ({
|
||||
email,
|
||||
password,
|
||||
}) {
|
||||
@ -307,18 +330,19 @@ const registerPasswordAuthenticationProvider = (xo) => {
|
||||
throw null
|
||||
}
|
||||
|
||||
const user = yield xo.users.first({email})
|
||||
if (!user || !(yield user.checkPassword(password))) {
|
||||
// TODO: this is deprecated and should be removed.
|
||||
const user = await xo._users.first({email})
|
||||
if (!user || !(await user.checkPassword(password))) {
|
||||
throw null
|
||||
}
|
||||
return user
|
||||
})
|
||||
}
|
||||
|
||||
xo.registerAuthenticationProvider(passwordAuthenticationProvider)
|
||||
}
|
||||
|
||||
const registerTokenAuthenticationProvider = (xo) => {
|
||||
const tokenAuthenticationProvider = coroutine(function * ({
|
||||
async function tokenAuthenticationProvider ({
|
||||
token: tokenId,
|
||||
}) {
|
||||
/* eslint no-throw-literal: 0 */
|
||||
@ -327,37 +351,40 @@ const registerTokenAuthenticationProvider = (xo) => {
|
||||
throw null
|
||||
}
|
||||
|
||||
const token = yield xo.tokens.first(tokenId)
|
||||
if (!token) {
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
} catch (e) {
|
||||
// It is not an error if the token does not exists.
|
||||
throw null
|
||||
}
|
||||
|
||||
return token.get('user_id')
|
||||
})
|
||||
}
|
||||
|
||||
xo.registerAuthenticationProvider(tokenAuthenticationProvider)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
let help
|
||||
{
|
||||
/* eslint no-lone-blocks: 0 */
|
||||
|
||||
const {name, version} = require('../package')
|
||||
help = () => `${name} v${version}`
|
||||
}
|
||||
const help = (function ({name, version}) {
|
||||
return () => `${name} v${version}`
|
||||
})(require('../package'))
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const main = coroutine(function * (args) {
|
||||
export default async function main (args) {
|
||||
if (args.indexOf('--help') !== -1 || args.indexOf('-h') !== -1) {
|
||||
return help()
|
||||
}
|
||||
|
||||
const config = yield loadConfiguration()
|
||||
{
|
||||
const debug = createLogger('xo:perf')
|
||||
blocked(ms => {
|
||||
debug('blocked for %sms', ms | 0)
|
||||
})
|
||||
}
|
||||
|
||||
const webServer = yield createWebServer(config.http.listen)
|
||||
const config = await loadConfiguration()
|
||||
|
||||
const webServer = await createWebServer(config.http.listen)
|
||||
|
||||
// Now the web server is listening, drop privileges.
|
||||
try {
|
||||
@ -376,8 +403,8 @@ const main = coroutine(function * (args) {
|
||||
|
||||
// Create the main object which will connects to Xen servers and
|
||||
// manages all the models.
|
||||
const xo = new XO()
|
||||
xo.start({
|
||||
const xo = new Xo()
|
||||
await xo.start({
|
||||
redis: {
|
||||
uri: config.redis && config.redis.uri
|
||||
}
|
||||
@ -388,29 +415,37 @@ const main = coroutine(function * (args) {
|
||||
registerTokenAuthenticationProvider(xo)
|
||||
|
||||
if (config.plugins) {
|
||||
yield loadPlugins(config.plugins, xo)
|
||||
await loadPlugins(config.plugins, xo)
|
||||
}
|
||||
|
||||
// Connect is used to manage non WebSocket connections.
|
||||
const connect = createConnectApp()
|
||||
webServer.on('request', connect)
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
connect.emit('upgrade', req, socket, head)
|
||||
})
|
||||
|
||||
// Must be set up before the API.
|
||||
setUpConsoleProxy(webServer, xo)
|
||||
|
||||
// Must be set up before the API.
|
||||
connect.use(bind(xo.handleProxyRequest, xo))
|
||||
connect.use(bind(xo._handleHttpRequest, xo))
|
||||
|
||||
// TODO: remove when no longer necessary.
|
||||
connect.use(bind(xo._handleProxyRequest, xo))
|
||||
|
||||
// Must be set up before the static files.
|
||||
setUpApi(webServer, xo)
|
||||
|
||||
setUpProxies(connect, config.http.proxies)
|
||||
|
||||
setUpStaticFiles(connect, config.http.mounts)
|
||||
|
||||
if (!(yield xo.users.exists())) {
|
||||
if (!(await xo._users.exists())) {
|
||||
const email = 'admin@admin.net'
|
||||
const password = 'admin'
|
||||
|
||||
xo.users.create(email, password, 'admin')
|
||||
await xo.createUser({email, password, permission: 'admin'})
|
||||
info('Default user created:', email, ' with password', password)
|
||||
}
|
||||
|
||||
@ -420,6 +455,4 @@ const main = coroutine(function * (args) {
|
||||
process.on('SIGTERM', closeWebServer)
|
||||
|
||||
return eventToPromise(webServer, 'close')
|
||||
})
|
||||
|
||||
exports = module.exports = main
|
||||
}
|
||||
|
78
src/models/acl.js
Normal file
78
src/models/acl.js
Normal file
@ -0,0 +1,78 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import map from 'lodash.map'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {multiKeyHash} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Up until now, there were no actions, therefore the default
|
||||
// action is used to update existing entries.
|
||||
const DEFAULT_ACTION = 'admin'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Acl extends Model {}
|
||||
|
||||
Acl.create = (subject, object, action) => {
|
||||
return Acl.hash(subject, object, action).then(hash => new Acl({
|
||||
id: hash,
|
||||
subject,
|
||||
object,
|
||||
action
|
||||
}))
|
||||
}
|
||||
|
||||
Acl.hash = (subject, object, action) => multiKeyHash(subject, object, action)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Acls extends Collection {
|
||||
get Model () {
|
||||
return Acl
|
||||
}
|
||||
|
||||
create (subject, object, action) {
|
||||
return Acl.create(subject, object, action).then(acl => this.add(acl))
|
||||
}
|
||||
|
||||
delete (subject, object, action) {
|
||||
return Acl.hash(subject, object, action).then(hash => this.remove(hash))
|
||||
}
|
||||
|
||||
aclExists (subject, object, action) {
|
||||
return Acl.hash(subject, object, action).then(hash => this.exists(hash))
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
const acls = await super.get(properties)
|
||||
|
||||
// Finds all records that are missing a action and need to be updated.
|
||||
const toUpdate = []
|
||||
forEach(acls, acl => {
|
||||
if (!acl.action) {
|
||||
acl.action = DEFAULT_ACTION
|
||||
toUpdate.push(acl)
|
||||
}
|
||||
})
|
||||
if (toUpdate.length) {
|
||||
// Removes all existing entries.
|
||||
await this.remove(map(toUpdate, 'id'))
|
||||
|
||||
// Compute the new ids (new hashes).
|
||||
const {hash} = Acl
|
||||
await Promise.all(map(
|
||||
toUpdate,
|
||||
(acl) => hash(acl.subject, acl.object, acl.action).then(id => {
|
||||
acl.id = id
|
||||
})
|
||||
))
|
||||
|
||||
// Inserts the new (updated) entries.
|
||||
await this.add(toUpdate)
|
||||
}
|
||||
|
||||
return acls
|
||||
}
|
||||
}
|
51
src/models/group.js
Normal file
51
src/models/group.js
Normal file
@ -0,0 +1,51 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Group extends Model {}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export class Groups extends Collection {
|
||||
get Model () {
|
||||
return Group
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'group:'
|
||||
}
|
||||
|
||||
create (name) {
|
||||
return this.add(new Group({
|
||||
name,
|
||||
users: '[]'
|
||||
}))
|
||||
}
|
||||
|
||||
async save (group) {
|
||||
// Serializes.
|
||||
group.users = JSON.stringify(group.users)
|
||||
|
||||
return await this.update(group)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
const groups = await super.get(properties)
|
||||
|
||||
// Deserializes.
|
||||
forEach(groups, group => {
|
||||
const {users} = group
|
||||
try {
|
||||
group.users = JSON.parse(users)
|
||||
} catch (error) {
|
||||
console.warn('cannot parse group.users:', users)
|
||||
group.users = []
|
||||
}
|
||||
})
|
||||
|
||||
return groups
|
||||
}
|
||||
}
|
14
src/models/server.js
Normal file
14
src/models/server.js
Normal file
@ -0,0 +1,14 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Server extends Model {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Servers extends Collection {
|
||||
get Model () {
|
||||
return Server
|
||||
}
|
||||
}
|
26
src/models/token.js
Normal file
26
src/models/token.js
Normal file
@ -0,0 +1,26 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {generateToken} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Token extends Model {}
|
||||
|
||||
Token.generate = (userId) => {
|
||||
return generateToken().then(token => new Token({
|
||||
id: token,
|
||||
user_id: userId
|
||||
}))
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Tokens extends Collection {
|
||||
get Model () {
|
||||
return Token
|
||||
}
|
||||
|
||||
generate (userId) {
|
||||
return Token.generate(userId).then(token => this.add(token))
|
||||
}
|
||||
}
|
93
src/models/user.js
Normal file
93
src/models/user.js
Normal file
@ -0,0 +1,93 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import {hash, needsRehash, verify} from 'hashy'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const PERMISSIONS = {
|
||||
none: 0,
|
||||
read: 1,
|
||||
write: 2,
|
||||
admin: 3
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class User extends Model {
|
||||
async checkPassword (password) {
|
||||
const hash = this.get('pw_hash')
|
||||
|
||||
if (!(hash && await verify(password, hash))) {
|
||||
return false
|
||||
}
|
||||
|
||||
// There might be no hash if the user authenticate with another
|
||||
// method (e.g. LDAP).
|
||||
if (needsRehash(hash)) {
|
||||
await this.setPassword(password)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
hasPermission (permission) {
|
||||
return PERMISSIONS[this.get('permission')] >= PERMISSIONS[permission]
|
||||
}
|
||||
|
||||
setPassword (password) {
|
||||
return hash(password).then(hash => {
|
||||
return this.set('pw_hash', hash)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
User.prototype.default = {
|
||||
permission: 'none'
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Users extends Collection {
|
||||
get Model () {
|
||||
return User
|
||||
}
|
||||
|
||||
async create (email, password, permission = 'none') {
|
||||
const user = new User({
|
||||
email,
|
||||
permission
|
||||
})
|
||||
|
||||
if (password != null) {
|
||||
await user.setPassword(password)
|
||||
}
|
||||
|
||||
return this.add(user)
|
||||
}
|
||||
|
||||
async save (user) {
|
||||
// Serializes.
|
||||
user.groups = JSON.stringify(user.groups)
|
||||
|
||||
return await this.update(user)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
const users = await super.get(properties)
|
||||
|
||||
// Deserializes
|
||||
forEach(users, user => {
|
||||
const {groups} = user
|
||||
try {
|
||||
user.groups = groups ? JSON.parse(groups) : []
|
||||
} catch (_) {
|
||||
console.warn('cannot parse user.groups:', groups)
|
||||
user.groups = []
|
||||
}
|
||||
})
|
||||
|
||||
return users
|
||||
}
|
||||
}
|
899
src/spec.coffee
899
src/spec.coffee
@ -1,899 +0,0 @@
|
||||
$forEach = require 'lodash.foreach'
|
||||
$isArray = require 'lodash.isarray'
|
||||
$isObject = require 'lodash.isobject'
|
||||
$xml2js = require 'xml2js'
|
||||
|
||||
$helpers = require './helpers'
|
||||
{parseXml: $parseXML} = require './utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
$isVMRunning = ->
|
||||
return switch @val.power_state
|
||||
when 'Paused', 'Running'
|
||||
true
|
||||
else
|
||||
false
|
||||
|
||||
$isHostRunning = -> @val.power_state is 'Running'
|
||||
|
||||
$isTaskLive = -> @val.status is 'pending' or @val.status is 'cancelling'
|
||||
|
||||
$retrieveTags = -> [] # TODO
|
||||
|
||||
$toTimestamp = (date) ->
|
||||
# Weird behavior from the XAPI.
|
||||
return null if date is '1969-12-31T23:00:00.000Z'
|
||||
|
||||
return if date?
|
||||
Math.round (Date.parse date) / 1000
|
||||
else
|
||||
null
|
||||
|
||||
#=====================================================================
|
||||
|
||||
module.exports = ->
|
||||
|
||||
# Binds the helpers to the collection.
|
||||
{
|
||||
$set
|
||||
$sum
|
||||
$val
|
||||
} = do =>
|
||||
helpers = {}
|
||||
helpers[name] = fn.bind this for name, fn of $helpers
|
||||
helpers
|
||||
|
||||
collection = this
|
||||
# do (emit = collection.emit) ->
|
||||
# collection.emit = (event, items) ->
|
||||
# console.log event
|
||||
# emit.call collection, event, items
|
||||
|
||||
$link = (keyFn, valFn = (-> @val)) ->
|
||||
keyPerItem = Object.create null
|
||||
listenerPerItem = Object.create null
|
||||
valuePerItem = Object.create null
|
||||
updating = false
|
||||
|
||||
->
|
||||
{key} = this
|
||||
|
||||
# Gets the key of the remote object.
|
||||
remoteKey = keyFn.call this
|
||||
|
||||
keyHasChanged = remoteKey isnt keyPerItem[key]
|
||||
|
||||
if keyHasChanged
|
||||
keyPerItem[key] = remoteKey
|
||||
else
|
||||
# Returns the value if already defined.
|
||||
return valuePerItem[key] if key of valuePerItem
|
||||
|
||||
eventName = "key=#{remoteKey}"
|
||||
listener = listenerPerItem[key]
|
||||
|
||||
if listener and keyHasChanged
|
||||
collection.remove eventName, listener
|
||||
listener = null
|
||||
|
||||
# Special case for `OpaqueRef:NULL`.
|
||||
if remoteKey is 'OpaqueRef:NULL'
|
||||
return valuePerItem[key] = null
|
||||
|
||||
unless listener
|
||||
listener = (event, item) ->
|
||||
# If the events are due to an update of this link or if the item is
|
||||
# exiting, just returns.
|
||||
return if updating or event isnt 'enter'
|
||||
|
||||
# Register its value.
|
||||
valuePerItem[key] = valFn.call item
|
||||
|
||||
# Force the object to update.
|
||||
try
|
||||
updating = true
|
||||
collection.touch key
|
||||
finally
|
||||
updating = false
|
||||
collection.on eventName, listener
|
||||
|
||||
# Tries to find the remote object in the collection.
|
||||
try
|
||||
return valuePerItem[key] = valFn.call (collection.getRaw remoteKey)
|
||||
|
||||
# Returns `null` for now.
|
||||
valuePerItem[key] = null
|
||||
|
||||
$map = (valFn) ->
|
||||
map = Object.create null
|
||||
subscribers = Object.create null
|
||||
updating = false
|
||||
|
||||
# First, initializes the map with existing items.
|
||||
$forEach collection.getRaw(), (item) ->
|
||||
val = valFn.call item
|
||||
map[val[0]] = val[1] if val
|
||||
return
|
||||
|
||||
# Listens to any new item.
|
||||
collection.on 'any', (event, items) ->
|
||||
# If the events are due to an update of this map or if items are exiting,
|
||||
# just returns.
|
||||
return if updating or event isnt 'enter'
|
||||
|
||||
# No need to trigger an update if nothing has changed.
|
||||
changed = false
|
||||
|
||||
$forEach items, (item) ->
|
||||
val = valFn.call item
|
||||
if val and map[val[0]] isnt val[1]
|
||||
changed = true
|
||||
map[val[0]] = val[1]
|
||||
return
|
||||
|
||||
if changed
|
||||
try
|
||||
updating = true
|
||||
collection.touch subscribers
|
||||
finally
|
||||
updating = false
|
||||
|
||||
generator = ->
|
||||
subscribers[@key] = true
|
||||
map
|
||||
generator.unsubscribe = ->
|
||||
delete subscribers[@key]
|
||||
|
||||
generator
|
||||
|
||||
# Shared watchers.
|
||||
UUIDsToKeys = $map ->
|
||||
{UUID} = @val
|
||||
return false unless UUID
|
||||
[UUID, "#{@key}"]
|
||||
messages = $set {
|
||||
rule: 'message'
|
||||
bind: -> @val.$object or @val.poolRef
|
||||
}
|
||||
|
||||
# Classes in XAPI are not always delivered with the same case,
|
||||
# therefore a map is needed to make sure they always map to the same
|
||||
# rule.
|
||||
rulesMap = {}
|
||||
|
||||
# Defines which rule should be used for this item.
|
||||
#
|
||||
# Note: If the rule does not exists, a temporary item is created. FIXME
|
||||
@dispatch = ->
|
||||
{$type: type} = @genval
|
||||
|
||||
# Normalizes the type.
|
||||
type = rulesMap[type.toLowerCase()] ? type
|
||||
|
||||
# Subtypes handling for VMs.
|
||||
if type is 'VM'
|
||||
return 'VM-controller' if @genval.is_control_domain
|
||||
return 'VM-snapshot' if @genval.is_a_snapshot
|
||||
return 'VM-template' if @genval.is_a_template
|
||||
|
||||
type
|
||||
|
||||
# Missing rules should be created.
|
||||
@missingRule = (name) ->
|
||||
@rule(name, ->
|
||||
@key = -> @genval.id
|
||||
@val = -> @genval
|
||||
)
|
||||
|
||||
# Rule conflicts are possible (e.g. VM-template to VM).
|
||||
@ruleConflict = ( -> )
|
||||
|
||||
# Used to apply common definition to rules.
|
||||
@hook afterRule: ->
|
||||
# Registers this rule in the map.
|
||||
rulesMap[@name.toLowerCase()] = @name
|
||||
|
||||
# TODO: explain.
|
||||
return unless @val?
|
||||
|
||||
unless $isObject @val
|
||||
throw new Error 'the value should be an object'
|
||||
|
||||
# Injects various common definitions.
|
||||
@val.type = @name
|
||||
if @singleton
|
||||
@val.id = @val.ref = -> @key
|
||||
else
|
||||
# This definition are for non singleton items only.
|
||||
@key = -> @genval.$ref
|
||||
@val.id = -> @genval.$id
|
||||
@val.UUID = -> @genval.uuid
|
||||
@val.ref = -> @genval.$ref
|
||||
@val.poolRef = -> @genval.$pool.$ref
|
||||
@val.$poolId = -> @genval.$pool.$id
|
||||
|
||||
# Main objects all can have associated messages and tags.
|
||||
if @name in ['host', 'pool', 'SR', 'VM', 'VM-controller']
|
||||
@val.messages = messages
|
||||
|
||||
@val.tags = $retrieveTags
|
||||
|
||||
# Helper to create multiple rules with the same definition.
|
||||
rules = (rules, definition) =>
|
||||
@rule rule, definition for rule in rules
|
||||
|
||||
#===================================================================
|
||||
|
||||
# An item is equivalent to a rule but one and only one instance of
|
||||
# this rule is created without any generator.
|
||||
@item xo: ->
|
||||
@val = {
|
||||
|
||||
# TODO: Maybe there should be high-level hosts: those who do not
|
||||
# belong to a pool.
|
||||
|
||||
pools: $set {
|
||||
rule: 'pool'
|
||||
}
|
||||
|
||||
$CPUs: $sum {
|
||||
rule: 'host'
|
||||
val: -> +(@val.CPUs.cpu_count)
|
||||
}
|
||||
|
||||
$running_VMs: $set {
|
||||
rule: 'VM'
|
||||
if: $isVMRunning
|
||||
}
|
||||
|
||||
$vCPUs: $sum {
|
||||
rule: 'VM'
|
||||
val: -> @val.CPUs.number
|
||||
if: $isVMRunning
|
||||
}
|
||||
|
||||
# Do not work due to problem in host rule.
|
||||
$memory: {
|
||||
usage: $sum {
|
||||
rule: 'host'
|
||||
if: $isHostRunning
|
||||
val: -> @val.memory.usage
|
||||
}
|
||||
size: $sum {
|
||||
rule: 'host'
|
||||
if: $isHostRunning
|
||||
val: -> @val.memory.size
|
||||
}
|
||||
}
|
||||
|
||||
# Maps the UUIDs to keys (i.e. opaque references).
|
||||
$UUIDsToKeys: UUIDsToKeys
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule pool: ->
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
SRs: $set {
|
||||
rule: 'SR'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
default_SR: ->
|
||||
SR = @genval.default_SR
|
||||
if SR is 'OpaqueRef:NULL'
|
||||
null
|
||||
else
|
||||
SR
|
||||
|
||||
HA_enabled: -> @genval.ha_enabled
|
||||
|
||||
hosts: $set {
|
||||
rule: 'host'
|
||||
bind: -> @genval.$pool.$ref
|
||||
}
|
||||
|
||||
master: -> @genval.master
|
||||
|
||||
networks: $set {
|
||||
rule: 'network'
|
||||
bind: -> @genval.$pool.$ref
|
||||
}
|
||||
|
||||
templates: $set {
|
||||
rule: 'VM-template'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
VMs: $set {
|
||||
rule: 'VM'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
$running_hosts: $set {
|
||||
rule: 'host'
|
||||
bind: -> @genval.$pool.$ref
|
||||
if: $isHostRunning
|
||||
}
|
||||
|
||||
$running_VMs: $set {
|
||||
rule: 'VM'
|
||||
bind: -> @genval.$pool.$ref
|
||||
if: $isVMRunning
|
||||
}
|
||||
|
||||
$VMs: $set {
|
||||
rule: 'VM'
|
||||
bind: -> @genval.$pool.$ref
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule host: ->
|
||||
# Private properties used to helps construction.
|
||||
@data = {
|
||||
metrics: $link -> @genval.metrics
|
||||
}
|
||||
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
address: -> @genval.address
|
||||
|
||||
controller: $val {
|
||||
rule: 'VM-controller'
|
||||
bind: -> @val.$container
|
||||
val: -> @key
|
||||
}
|
||||
|
||||
bios_strings: -> @genval.bios_strings
|
||||
|
||||
CPUs: -> @genval.cpu_info
|
||||
|
||||
enabled: -> @genval.enabled
|
||||
|
||||
current_operations: -> @genval.current_operations
|
||||
|
||||
hostname: -> @genval.hostname
|
||||
|
||||
iSCSI_name: -> @genval.other_config?.iscsi_iqn ? null
|
||||
|
||||
memory: ->
|
||||
{metrics} = @data
|
||||
if metrics
|
||||
{
|
||||
usage: +metrics.memory_total - metrics.memory_free
|
||||
size: +metrics.memory_total
|
||||
}
|
||||
else
|
||||
{
|
||||
usage: 0
|
||||
size: 0
|
||||
}
|
||||
|
||||
patches: -> @genval.patches
|
||||
|
||||
power_state: ->
|
||||
if @data.metrics?.live
|
||||
'Running'
|
||||
else
|
||||
'Halted'
|
||||
|
||||
# Local SRs are handled directly in `SR.$container`.
|
||||
SRs: $set {
|
||||
rule: 'SR'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
# What are local templates?
|
||||
templates: $set {
|
||||
rule: 'VM-template'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
# Local VMs are handled directly in `VM.$container`.
|
||||
VMs: $set {
|
||||
rule: 'VM'
|
||||
bind: -> @val.$container
|
||||
}
|
||||
|
||||
$PBDs: -> @genval.PBDs
|
||||
|
||||
PIFs: -> @genval.PIFs
|
||||
$PIFs: -> @val.PIFs
|
||||
|
||||
PCIs: -> @genval.PCIs
|
||||
$PCIs: -> @val.PCIs
|
||||
|
||||
PGPUs: -> @genval.PGPUs
|
||||
$PGPUs: -> @val.PGPUs
|
||||
|
||||
tasks: $set {
|
||||
rule: 'task'
|
||||
bind: -> @genval.resident_on
|
||||
if: $isTaskLive
|
||||
}
|
||||
|
||||
$running_VMs: $set {
|
||||
rule: 'VM'
|
||||
bind: -> @val.$container
|
||||
if: $isVMRunning
|
||||
}
|
||||
|
||||
$vCPUs: $sum {
|
||||
rule: 'VM'
|
||||
bind: -> @val.$container
|
||||
if: $isVMRunning
|
||||
val: -> @val.CPUs.number
|
||||
}
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
# This definition is shared.
|
||||
VMdef = ->
|
||||
@data = {
|
||||
metrics: $link -> @genval.metrics
|
||||
guest_metrics: $link -> @genval.guest_metrics
|
||||
}
|
||||
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
addresses: ->
|
||||
{guest_metrics} = @data
|
||||
if guest_metrics
|
||||
guest_metrics.networks
|
||||
else
|
||||
null
|
||||
|
||||
consoles: $set {
|
||||
rule: 'console'
|
||||
bind: -> @genval.VM
|
||||
val: -> @val
|
||||
}
|
||||
|
||||
current_operations: -> @genval.current_operations
|
||||
|
||||
# TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: ->
|
||||
if @genval.ha_restart_priority
|
||||
true
|
||||
else
|
||||
false
|
||||
|
||||
auto_poweron: ->
|
||||
if @genval.other_config.auto_poweron
|
||||
true
|
||||
else
|
||||
false
|
||||
|
||||
os_version: ->
|
||||
{guest_metrics} = @data
|
||||
if guest_metrics
|
||||
guest_metrics.os_version
|
||||
else
|
||||
null
|
||||
|
||||
VGPUs: -> @genval.VGPUs
|
||||
$VGPUs: -> @val.VGPUs
|
||||
|
||||
power_state: -> @genval.power_state
|
||||
|
||||
other: -> @genval.other_config
|
||||
|
||||
memory: ->
|
||||
{metrics, guest_metrics} = @data
|
||||
|
||||
memory = {
|
||||
dynamic: [
|
||||
+@genval.memory_dynamic_min
|
||||
+@genval.memory_dynamic_max
|
||||
]
|
||||
static: [
|
||||
+@genval.memory_static_min
|
||||
+@genval.memory_static_max
|
||||
]
|
||||
}
|
||||
|
||||
memory.size = if not $isVMRunning.call this
|
||||
+@genval.memory_dynamic_max
|
||||
else if (gmmemory = guest_metrics?.memory)?.used
|
||||
memory.usage = +gmmemory.used
|
||||
+gmmemory.total
|
||||
else if metrics
|
||||
+metrics.memory_actual
|
||||
else
|
||||
+@genval.memory_dynamic_max
|
||||
|
||||
memory
|
||||
|
||||
PV_drivers: ->
|
||||
{guest_metrics} = @data
|
||||
if guest_metrics
|
||||
guest_metrics.PV_drivers_up_to_date
|
||||
else
|
||||
false
|
||||
|
||||
CPUs: ->
|
||||
{metrics} = @data
|
||||
|
||||
CPUs = {
|
||||
max: +@genval.VCPUs_max
|
||||
number: if ($isVMRunning.call this) and metrics
|
||||
+metrics.VCPUs_number
|
||||
else
|
||||
+@genval.VCPUs_at_startup
|
||||
}
|
||||
|
||||
$CPU_usage: null #TODO
|
||||
|
||||
# FIXME: $container should contains the pool UUID when the VM is
|
||||
# not on a host.
|
||||
$container: ->
|
||||
if $isVMRunning.call this
|
||||
@genval.resident_on
|
||||
else
|
||||
# TODO: Handle local VMs. (`get_possible_hosts()`).
|
||||
@genval.$pool.$ref
|
||||
|
||||
snapshots: -> @genval.snapshots
|
||||
|
||||
snapshot_time: -> $toTimestamp @genval.snapshot_time
|
||||
|
||||
$VBDs: -> @genval.VBDs
|
||||
|
||||
VIFs: -> @genval.VIFs
|
||||
|
||||
}
|
||||
@rule VM: VMdef
|
||||
@rule 'VM-controller': VMdef
|
||||
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
# VM-snapshot starts with the same definition but extends it.
|
||||
@rule 'VM-snapshot': ->
|
||||
VMdef.call(this)
|
||||
|
||||
@val.$snapshot_of = -> @genval.snapshot_of
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
# VM-template starts with the same definition but extends it.
|
||||
@rule 'VM-template': ->
|
||||
VMdef.call this
|
||||
|
||||
@val.CPUs.number = -> +@genval.VCPUs_at_startup
|
||||
|
||||
@val.template_info = {
|
||||
arch: -> @genval.other_config?['install-arch']
|
||||
disks: ->
|
||||
#console.log @genval.other_config
|
||||
disks = @genval.other_config?.disks
|
||||
return [] unless disks?
|
||||
|
||||
disks = ($parseXML disks)?.provision?.disk
|
||||
return [] unless disks?
|
||||
|
||||
disks = [disks] unless $isArray disks
|
||||
# Normalize entries.
|
||||
for disk in disks
|
||||
disk.bootable = disk.bootable is 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = disk.sr
|
||||
delete disk.sr
|
||||
disks
|
||||
install_methods: ->
|
||||
methods = @genval.other_config?['install-methods']
|
||||
return [] unless methods?
|
||||
methods.split ','
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule SR: ->
|
||||
@data = {
|
||||
# Note: not dynamic.
|
||||
host: $link(
|
||||
-> @genval.PBDs[0] ? 'OpaqueRef:NULL'
|
||||
-> @val.host
|
||||
)
|
||||
}
|
||||
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
SR_type: -> @genval.type
|
||||
|
||||
content_type: -> @genval.content_type
|
||||
|
||||
physical_usage: -> +@genval.physical_utilisation
|
||||
|
||||
usage: -> +@genval.virtual_allocation
|
||||
|
||||
size: -> +@genval.physical_size
|
||||
|
||||
$container: ->
|
||||
if @genval.shared
|
||||
@genval.$pool.$ref
|
||||
else
|
||||
@data.host
|
||||
|
||||
$PBDs: -> @genval.PBDs
|
||||
|
||||
VDIs: -> @genval.VDIs
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule PBD: ->
|
||||
@val = {
|
||||
attached: -> @genval.currently_attached
|
||||
|
||||
host: -> @genval.host
|
||||
|
||||
SR: -> @genval.SR
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule PIF: ->
|
||||
@val = {
|
||||
attached: -> @genval.currently_attached
|
||||
|
||||
device: -> @genval.device
|
||||
|
||||
IP: -> @genval.IP
|
||||
|
||||
$host: -> @genval.host
|
||||
|
||||
MAC: -> @genval.MAC
|
||||
|
||||
# TODO: Find a more meaningful name.
|
||||
management: -> @genval.management
|
||||
|
||||
mode: -> @genval.ip_configuration_mode
|
||||
|
||||
MTU: -> +@genval.MTU
|
||||
|
||||
netmask: -> @genval.netmask
|
||||
|
||||
$network: -> @genval.network
|
||||
|
||||
vlan: -> @genval.VLAN
|
||||
|
||||
# TODO: What is it?
|
||||
#
|
||||
# Could it mean “is this a physical interface?”.
|
||||
# How could a PIF not be physical?
|
||||
#physical: -> @genval.physical
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule VDI: ->
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
# TODO: determine whether or not tags are required for a VDI.
|
||||
#tags: $retrieveTags
|
||||
|
||||
usage: -> +@genval.physical_utilisation
|
||||
|
||||
size: -> +@genval.virtual_size
|
||||
|
||||
$snapshot_of: ->
|
||||
original = @genval.snapshot_of
|
||||
if original is 'OpaqueRef:NULL'
|
||||
null
|
||||
else
|
||||
original
|
||||
|
||||
snapshots: -> @genval.snapshots
|
||||
|
||||
# TODO: Does the name fit?
|
||||
#snapshot_time: -> @genval.snapshot_time
|
||||
|
||||
$SR: -> @genval.SR
|
||||
|
||||
$VBDs: -> @genval.VBDs
|
||||
|
||||
$VBD: -> # Deprecated
|
||||
{VBDs} = @genval
|
||||
|
||||
if VBDs.length is 0 then null else VBDs[0]
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule VBD: ->
|
||||
@val = {
|
||||
attached: -> @genval.currently_attached
|
||||
|
||||
bootable: -> @genval.bootable
|
||||
|
||||
read_only: -> @genval.mode is 'RO'
|
||||
|
||||
is_cd_drive: -> @genval.type is 'CD'
|
||||
|
||||
position: -> @genval.userdevice
|
||||
|
||||
# null if empty.
|
||||
#
|
||||
# TODO: Is it really equivalent?
|
||||
VDI: ->
|
||||
VDI = @genval.VDI
|
||||
if VDI is 'OpaqueRef:NULL'
|
||||
null
|
||||
else
|
||||
VDI
|
||||
|
||||
VM: -> @genval.VM
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule VIF: ->
|
||||
@val = {
|
||||
attached: -> @genval.currently_attached
|
||||
|
||||
# TODO: Should it be cast to a number?
|
||||
device: -> @genval.device
|
||||
|
||||
MAC: -> @genval.MAC
|
||||
|
||||
MTU: -> +@genval.MTU
|
||||
|
||||
$network: -> @genval.network
|
||||
|
||||
$VM: -> @genval.VM
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule network: ->
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
# TODO: determine whether or not tags are required for a VDI.
|
||||
#tags: $retrieveTags
|
||||
|
||||
bridge: -> @genval.bridge
|
||||
|
||||
MTU: -> +@genval.MTU
|
||||
|
||||
PIFs: -> @genval.PIFs
|
||||
|
||||
VIFs: -> @genval.VIFs
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule message: ->
|
||||
@val = {
|
||||
time: -> $toTimestamp @genval.timestamp
|
||||
|
||||
$object: ->
|
||||
# If the key of the concerned object has already be resolved
|
||||
# returns the known value.
|
||||
return @val.$object if @val.$object?
|
||||
|
||||
# Tries to resolve the key of the concerned object.
|
||||
object = (UUIDsToKeys.call this)[@genval.obj_uuid]
|
||||
|
||||
# If resolved, unregister from the watcher.
|
||||
UUIDsToKeys.unsubscribe.call this if object?
|
||||
|
||||
object
|
||||
|
||||
# TODO: Are these names meaningful?
|
||||
name: -> @genval.name
|
||||
body: -> @genval.body
|
||||
}
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
@rule task: ->
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
progress: -> +@genval.progress
|
||||
|
||||
result: -> @genval.result
|
||||
|
||||
$host: -> @genval.resident_on
|
||||
|
||||
created: -> @genval.created
|
||||
|
||||
finished: -> @genval.finished
|
||||
|
||||
current_operations: -> @genval.current_operations
|
||||
|
||||
status: -> @genval.status
|
||||
}
|
||||
|
||||
@rule host_patch: ->
|
||||
@val = {
|
||||
applied: -> @genval.applied
|
||||
|
||||
$host: -> @genval.host
|
||||
|
||||
time: -> $toTimestamp @genval.timestamp_applied
|
||||
|
||||
pool_patch: -> @genval.pool_patch
|
||||
|
||||
}
|
||||
|
||||
@rule pool_patch: ->
|
||||
@val = {
|
||||
name_label: -> @genval.name_label
|
||||
|
||||
name_description: -> @genval.name_description
|
||||
|
||||
applied: -> @genval.pool_applied
|
||||
|
||||
version: -> @genval.version
|
||||
|
||||
$host_patches: -> @genval.host_patches
|
||||
|
||||
size: -> @genval.size
|
||||
|
||||
}
|
||||
|
||||
@rule pci: ->
|
||||
@val = {
|
||||
pci_id: -> @genval.pci_id
|
||||
|
||||
class_name: -> @genval.class_name
|
||||
|
||||
device_name: -> @genval.device_name
|
||||
|
||||
$host: -> @genval.host
|
||||
|
||||
}
|
||||
|
||||
@rule pgpu: ->
|
||||
@val = {
|
||||
pci: -> @genval.PCI
|
||||
|
||||
host: -> @genval.host
|
||||
|
||||
vgpus: -> @genval.resident_VGPUs
|
||||
$vgpus: -> @val.vgpus
|
||||
|
||||
$host: -> @genval.host
|
||||
|
||||
}
|
||||
|
||||
@rule vgpu: ->
|
||||
@val = {
|
||||
currentAttached: -> @genval.currently_attached
|
||||
|
||||
vm: -> @genval.VM
|
||||
|
||||
device: -> @genval.device
|
||||
|
||||
resident_on: -> @genval.resident_on
|
||||
|
||||
}
|
||||
|
||||
return
|
File diff suppressed because one or more lines are too long
@ -1,763 +0,0 @@
|
||||
{expect: $expect} = require 'chai'
|
||||
|
||||
$sinon = require 'sinon'
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
{$MappedCollection} = require './MappedCollection'
|
||||
|
||||
# Helpers for dealing with fibers.
|
||||
{$coroutine} = require './fibers-utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
describe 'spec', ->
|
||||
|
||||
collection = null
|
||||
before $coroutine ->
|
||||
# Creates the collection.
|
||||
collection = new $MappedCollection()
|
||||
|
||||
# Loads the spec.
|
||||
(require './spec').call collection
|
||||
|
||||
# Loads the mockup data.
|
||||
collection.set (require './spec.spec-data')
|
||||
|
||||
#console.log collection.get()
|
||||
|
||||
it 'xo', ->
|
||||
xo = collection.get 'xo'
|
||||
|
||||
#console.log xo
|
||||
|
||||
$expect(xo).to.be.an 'object'
|
||||
|
||||
$expect(xo.type).to.equal 'xo'
|
||||
|
||||
$expect(xo.pools).to.have.members [
|
||||
'OpaqueRef:6462d0b3-8f20-ef76-fddf-002f7af3452e'
|
||||
]
|
||||
|
||||
$expect(xo.$CPUs).to.equal 8
|
||||
|
||||
$expect(xo.$running_VMs).to.have.members [
|
||||
'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
'OpaqueRef:46fa4c52-5e93-6cf7-32e3-c51fb4ed106d'
|
||||
'OpaqueRef:c0fa9288-2a6b-cd8e-b9a8-cc5afc63b386'
|
||||
'OpaqueRef:be2390b2-cd08-53f5-3fae-b76f6f3725bf'
|
||||
'OpaqueRef:8f9966ea-38ef-ac4c-b634-81e31ef1e7c1'
|
||||
'OpaqueRef:646297e5-4fd6-c70d-6365-ef19b9807f64'
|
||||
'OpaqueRef:1ef43ee8-bc18-6c4f-4919-0e42a3ac6e4b'
|
||||
]
|
||||
|
||||
$expect(xo.$vCPUs).to.equal 10
|
||||
|
||||
$expect(xo.$memory).to.be.an 'object'
|
||||
$expect(xo.$memory.usage).to.equal 15185723392
|
||||
$expect(xo.$memory.size).to.equal 33532379136
|
||||
|
||||
UUIDsToKeys = {}
|
||||
UUIDsToKeys[obj.UUID] = "#{obj.ref}" for obj in collection.get() when obj.UUID?
|
||||
$expect(xo.$UUIDsToKeys).to.deep.equal UUIDsToKeys
|
||||
|
||||
it 'pool', ->
|
||||
pool = collection.get 'OpaqueRef:6462d0b3-8f20-ef76-fddf-002f7af3452e'
|
||||
|
||||
#console.log pool
|
||||
|
||||
$expect(pool).to.be.an 'object'
|
||||
|
||||
$expect(pool.type).to.equal 'pool'
|
||||
|
||||
$expect(pool.name_label).to.equal 'Lab Pool'
|
||||
|
||||
$expect(pool.name_description).to.equal 'Vates dev pool at our HQ'
|
||||
|
||||
$expect(pool.tags).to.have.members []
|
||||
|
||||
$expect(pool.SRs).to.have.members [
|
||||
'OpaqueRef:d6fe49bf-dd48-c929-5aab-b2786a2e7aee'
|
||||
'OpaqueRef:6637b7d7-9e5c-f331-c7e4-a7f68f77a047'
|
||||
'OpaqueRef:557155b2-f092-3417-f509-7ee35b1d42da'
|
||||
]
|
||||
|
||||
$expect(pool.default_SR).to.equal 'OpaqueRef:d6fe49bf-dd48-c929-5aab-b2786a2e7aee'
|
||||
|
||||
$expect(pool.HA_enabled).to.be.false
|
||||
|
||||
$expect(pool.hosts).to.have.members [
|
||||
'OpaqueRef:cd0f68c5-5245-5ae8-f0e1-324e2201c692'
|
||||
'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
]
|
||||
|
||||
$expect(pool.master).to.equal 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
$expect(pool.networks).to.have.members [
|
||||
'OpaqueRef:dbc93777-f2c0-e888-967d-dd9beeffb3c0'
|
||||
'OpaqueRef:4e265829-7517-3520-6a97-56b6ac0730c9'
|
||||
'OpaqueRef:16013d48-b9eb-84c0-0e62-d809211b0632'
|
||||
]
|
||||
|
||||
$expect(pool.templates).to.have.members [
|
||||
'OpaqueRef:f81c6db6-4227-55a5-0c2f-b670ca5d8d3f'
|
||||
'OpaqueRef:f449b8ec-ac86-1b6d-2347-37ec36c41bc5'
|
||||
'OpaqueRef:f02a3c19-447b-c618-fb51-a9cde79be17c'
|
||||
'OpaqueRef:ee2e2c00-8011-4847-ba7e-c288d5fb01f5'
|
||||
'OpaqueRef:ebc96e49-11d4-471d-c21f-625a95c34ff9'
|
||||
'OpaqueRef:e9fb38c8-acc3-dbb8-cc6f-f1f89b03c1ae'
|
||||
'OpaqueRef:e803bc1b-d3be-b95f-f3cc-a26a174ec93c'
|
||||
'OpaqueRef:e373c644-3576-985e-9c8f-67062c81d0d2'
|
||||
'OpaqueRef:e3035b8b-cd27-3e7c-ecbf-54a18a2da59e'
|
||||
'OpaqueRef:d99a46bf-1b68-072c-00db-444d099466cd'
|
||||
'OpaqueRef:d45b3989-7350-5166-eeaa-7b789a32addd'
|
||||
'OpaqueRef:d18c965e-0cef-48b0-2f8d-d48ef6663c32'
|
||||
'OpaqueRef:d15de0db-1dc5-2a00-331a-c0f7d3c2e123'
|
||||
'OpaqueRef:cfe620f9-5c68-0f35-ce9f-8f5227fda1c8'
|
||||
'OpaqueRef:cb865487-9139-3fbc-4aac-68abdb663925'
|
||||
'OpaqueRef:c8bf31d6-9888-4256-1547-c722016a0079'
|
||||
'OpaqueRef:c651901b-0944-be6b-aabf-a87d9a037edd'
|
||||
'OpaqueRef:c5a9e2de-1916-7f4c-aa2a-ce95d138032b'
|
||||
'OpaqueRef:c22bce1f-16a0-7745-179d-dcbd5c5deab3'
|
||||
'OpaqueRef:be6abc7d-dd7a-5ee6-9c95-8e562a69d992'
|
||||
'OpaqueRef:b9587bb6-6efe-0c71-e01c-2c750c9ab774'
|
||||
'OpaqueRef:b6f58482-8b60-b3b4-2a01-0d6113411bf2'
|
||||
'OpaqueRef:ad21fbbb-6cf9-e6ca-c415-1f428f20da1f'
|
||||
'OpaqueRef:aa2d04ec-0512-c128-8820-c8ecde93baa4'
|
||||
'OpaqueRef:a247a02f-8909-5044-64a0-82460b25e740'
|
||||
'OpaqueRef:9d28dba9-aee6-cafd-06af-54ebdfb1c271'
|
||||
'OpaqueRef:9796cc01-6640-211f-09f9-fee94f9cd720'
|
||||
'OpaqueRef:922b3a98-f238-4cea-8b75-c38e90ac11ee'
|
||||
'OpaqueRef:8e720505-e75b-eda3-3b14-fd1471890cc1'
|
||||
'OpaqueRef:8e3211dc-fdaf-22c7-41b2-c3a892529679'
|
||||
'OpaqueRef:89919714-1184-ce4b-3cb5-67059640b3a7'
|
||||
'OpaqueRef:892768c0-4d15-769f-e760-b781a0291ddb'
|
||||
'OpaqueRef:838ff163-ae6e-d98e-9cef-4d783f81dcb0'
|
||||
'OpaqueRef:8079d64b-fe87-0ecf-e558-7b607b0e1524'
|
||||
'OpaqueRef:773d92c9-898b-bc25-a50d-d868bbf933a4'
|
||||
'OpaqueRef:770d2193-ab69-4fc3-c462-7f75a79d497c'
|
||||
'OpaqueRef:75441e00-55df-85f5-1780-731110df91de'
|
||||
'OpaqueRef:6ee1cc24-ebbb-b02a-88b0-a921c7a5f217'
|
||||
'OpaqueRef:6b5be573-b116-6238-9cff-bde0658d6f18'
|
||||
'OpaqueRef:6a09a6de-e778-a474-4ebd-f617db5b5d5e'
|
||||
'OpaqueRef:616942c0-1e1b-e733-3c4c-7236fd3de158'
|
||||
'OpaqueRef:5e93cf73-a212-a83f-d3f9-a539be98d320'
|
||||
'OpaqueRef:56af2e14-d4bb-20e9-421b-00d75dfb89f2'
|
||||
'OpaqueRef:5059cc2d-b414-97eb-6aac-ce816b72b2bd'
|
||||
'OpaqueRef:4a43ad28-b809-2c8f-aa24-70d8bd4954f2'
|
||||
'OpaqueRef:466d7dc3-f2df-8c8d-685d-eef256fe2b43'
|
||||
'OpaqueRef:4347e9d6-7faf-90e4-4f5f-d513cf44b3cc'
|
||||
'OpaqueRef:3c4558e8-ed88-ce88-81a9-111ac2cc56d6'
|
||||
'OpaqueRef:3b97e45b-aa4e-d175-95e5-e95ceefa0b6b'
|
||||
'OpaqueRef:2e3b5ada-5083-87b1-d6fb-aaa0e5bd862d'
|
||||
'OpaqueRef:2b6e3248-52b0-85d1-7415-4f91a0a90a3a'
|
||||
'OpaqueRef:2a838052-3aa3-d09d-1eae-8293a565fef5'
|
||||
'OpaqueRef:2a092eee-7c6a-058b-0368-b37362328678'
|
||||
'OpaqueRef:2968283f-8656-6e31-816c-e96325e66ebf'
|
||||
'OpaqueRef:27ad4e06-a7b2-20a2-4fd9-7f1b54fdc5a2'
|
||||
'OpaqueRef:217d930f-8e65-14e6-eb20-63d55158093f'
|
||||
'OpaqueRef:20377446-2388-5c8f-d3f2-6e9c883c61d9'
|
||||
'OpaqueRef:201cf416-bfd0-00d3-a4d2-b19226c43c82'
|
||||
'OpaqueRef:1ed4ee31-56e0-98da-65d4-00c776716b9c'
|
||||
'OpaqueRef:1c0b590d-563b-5061-a253-f98535ab8389'
|
||||
'OpaqueRef:1be0fe3b-1944-06db-3734-b6bb888cfe78'
|
||||
'OpaqueRef:12d0dfc0-ce63-a072-3cd0-ccba7bd3c200'
|
||||
'OpaqueRef:039273c3-b4b2-5c68-63e4-c5610a738fe3'
|
||||
'OpaqueRef:030314a2-0909-9e7a-418a-9f38746aaf0c',
|
||||
]
|
||||
|
||||
$expect(pool.VMs).to.have.members [
|
||||
'OpaqueRef:d4fa8fba-ec86-5928-a1bb-dd78b6fb5944'
|
||||
'OpaqueRef:8491f148-3e78-9c74-ab98-84445c5f2861'
|
||||
'OpaqueRef:13b9ec24-04ea-ae04-78e6-6ec4b81a8deb'
|
||||
]
|
||||
|
||||
$expect(pool.$running_hosts).to.have.members [
|
||||
'OpaqueRef:cd0f68c5-5245-5ae8-f0e1-324e2201c692'
|
||||
'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
]
|
||||
|
||||
$expect(pool.$running_VMs).to.have.members [
|
||||
'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
'OpaqueRef:46fa4c52-5e93-6cf7-32e3-c51fb4ed106d'
|
||||
'OpaqueRef:c0fa9288-2a6b-cd8e-b9a8-cc5afc63b386'
|
||||
'OpaqueRef:be2390b2-cd08-53f5-3fae-b76f6f3725bf'
|
||||
'OpaqueRef:8f9966ea-38ef-ac4c-b634-81e31ef1e7c1'
|
||||
'OpaqueRef:646297e5-4fd6-c70d-6365-ef19b9807f64'
|
||||
'OpaqueRef:1ef43ee8-bc18-6c4f-4919-0e42a3ac6e4b'
|
||||
]
|
||||
|
||||
$expect(pool.$VMs).to.have.members [
|
||||
'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
'OpaqueRef:46fa4c52-5e93-6cf7-32e3-c51fb4ed106d'
|
||||
'OpaqueRef:d4fa8fba-ec86-5928-a1bb-dd78b6fb5944'
|
||||
'OpaqueRef:8491f148-3e78-9c74-ab98-84445c5f2861'
|
||||
'OpaqueRef:13b9ec24-04ea-ae04-78e6-6ec4b81a8deb'
|
||||
'OpaqueRef:c0fa9288-2a6b-cd8e-b9a8-cc5afc63b386'
|
||||
'OpaqueRef:be2390b2-cd08-53f5-3fae-b76f6f3725bf'
|
||||
'OpaqueRef:8f9966ea-38ef-ac4c-b634-81e31ef1e7c1'
|
||||
'OpaqueRef:646297e5-4fd6-c70d-6365-ef19b9807f64'
|
||||
'OpaqueRef:1ef43ee8-bc18-6c4f-4919-0e42a3ac6e4b'
|
||||
]
|
||||
|
||||
$expect(pool.messages).to.have.members [
|
||||
'OpaqueRef:0241d2be-fcda-64b7-b95d-550399f22000'
|
||||
'OpaqueRef:08093780-5d87-46f4-400d-fc8406bdd6c2'
|
||||
'OpaqueRef:0c565205-db69-eb0f-b80b-a8e356ae43ae'
|
||||
'OpaqueRef:0f955091-d6e6-ed3e-2bbe-94d914e6efbe'
|
||||
'OpaqueRef:15f61c91-5ac8-6234-78bb-2edbdcf9164f'
|
||||
'OpaqueRef:1b04b4db-3622-4d54-e8fa-a2f6661c6e43'
|
||||
'OpaqueRef:20aadafb-47c8-0796-e3c2-4e497bcb0205'
|
||||
'OpaqueRef:2243e321-e4bd-50dd-1451-f329df240517'
|
||||
'OpaqueRef:226e9274-77d6-9805-a0f3-396d1e54fe72'
|
||||
'OpaqueRef:230d01c6-3e25-b877-9e35-13a707335e23'
|
||||
'OpaqueRef:279e9aed-7d9e-13bc-e4d2-d477abbf9f6a'
|
||||
'OpaqueRef:2c460c86-2e1c-cd0d-cbaf-95bf771af2bc'
|
||||
'OpaqueRef:300a2868-2b8a-4f0c-788d-4e2ba4a160da'
|
||||
'OpaqueRef:323297f9-4a0b-c517-1ff7-eacad80fc796'
|
||||
'OpaqueRef:33d58ecd-d2a4-f63a-46bb-307a7c7762a6'
|
||||
'OpaqueRef:3962ad4b-18e9-53ce-ff72-b2ef3d6692ec'
|
||||
'OpaqueRef:3a8a42d6-f5b3-1479-3ad6-2c7caed94459'
|
||||
'OpaqueRef:3f77ad7a-de22-0b05-4005-7cfdc5d8bc86'
|
||||
'OpaqueRef:4147a60c-2b41-4dc7-491d-3470466abbc7'
|
||||
'OpaqueRef:443c4e46-d98a-87d6-92f5-c35bb5b65a5c'
|
||||
'OpaqueRef:4a3aebd9-e670-c796-4938-e29e178f1959'
|
||||
'OpaqueRef:50f02c5f-b2d0-a42a-a156-7905b78a918a'
|
||||
'OpaqueRef:5f34bfc5-f92f-9830-b3e9-06367ef56a77'
|
||||
'OpaqueRef:69d3511e-ec73-69c9-819e-14b85236059d'
|
||||
'OpaqueRef:6b04d423-8991-c838-d981-aca1b9c7be7d'
|
||||
'OpaqueRef:6e161f6f-df2b-195f-be46-530884a2c24a'
|
||||
'OpaqueRef:6f9b4c87-c7ba-1a87-073d-569051f307a8'
|
||||
'OpaqueRef:72360954-3629-1e09-b1bf-b819732bddfd'
|
||||
'OpaqueRef:79f9e82b-1a0e-75b7-efc5-8689a4cd4aed'
|
||||
'OpaqueRef:844844c6-5e82-4d9c-7ed9-01c46d46e67c'
|
||||
'OpaqueRef:84a7efe6-2a37-d4be-5f9a-aa66adfe3104'
|
||||
'OpaqueRef:9a645810-7308-c296-d9df-cc5d91f8f2a4'
|
||||
'OpaqueRef:a073f53c-557a-fd67-878d-b3a881ebd935'
|
||||
'OpaqueRef:a08f1c9a-34de-5441-b847-18533244910d'
|
||||
'OpaqueRef:a4fd777c-f417-23e9-8338-30d8097a8430'
|
||||
'OpaqueRef:a5296901-25c3-b600-7be7-16a20ba86600'
|
||||
'OpaqueRef:a99badbe-75fa-8bc8-22b3-78c616873b62'
|
||||
'OpaqueRef:ab16dfa7-3c86-56c3-038c-c6bcfe0b64c1'
|
||||
'OpaqueRef:af840b26-91b6-56aa-e2a0-266ce7dd411b'
|
||||
'OpaqueRef:b857ac11-36a0-38e4-4d9c-13586e381f7a'
|
||||
'OpaqueRef:c0b26952-1a46-9dfb-a826-78cbfeaa1b00'
|
||||
'OpaqueRef:cdeda917-3496-c407-95fd-2ef63bf5e79e'
|
||||
'OpaqueRef:d5ab7d13-0ebb-5805-b767-608cb7737690'
|
||||
'OpaqueRef:dae9fbe3-a709-3433-e8e3-491b3a79df84'
|
||||
'OpaqueRef:dd735a0f-d2fd-9475-7dd3-b387251f4426'
|
||||
'OpaqueRef:df07d60e-8a03-6979-3e61-4460bc8197b3'
|
||||
'OpaqueRef:e6a0aa45-f8e0-ae7d-7b3a-d76b95a03c95'
|
||||
'OpaqueRef:eaad760a-0e23-4e2b-3f96-2f65170a1dd7'
|
||||
'OpaqueRef:ebead5cf-4a48-ad28-4241-ad5869fa9752'
|
||||
'OpaqueRef:ecc7b91d-6f50-94c6-6f51-2d609dc3ebe7'
|
||||
'OpaqueRef:f3492f88-e0b0-405a-5723-f83429e016c5'
|
||||
]
|
||||
|
||||
it 'host', ->
|
||||
host = collection.get 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
#console.log host
|
||||
|
||||
$expect(host).to.be.an 'object'
|
||||
|
||||
$expect(host.type).to.equal 'host'
|
||||
|
||||
$expect(host.name_label).to.equal 'lab1'
|
||||
|
||||
$expect(host.name_description).to.equal 'Default install of XenServer'
|
||||
|
||||
$expect(host.tags).to.have.members []
|
||||
|
||||
$expect(host.address).to.equal '192.168.1.1'
|
||||
|
||||
$expect(host.controller).to.equal 'OpaqueRef:719e4877-c7ad-68be-6b04-5750c8dcfeed'
|
||||
|
||||
# Burk.
|
||||
$expect(host.CPUs).to.deep.equal {
|
||||
cpu_count: '4'
|
||||
socket_count: '1'
|
||||
vendor: 'GenuineIntel'
|
||||
speed: '3192.858'
|
||||
modelname: 'Intel(R) Core(TM) i5-3470 CPU @ 3.20GHz'
|
||||
family: '6'
|
||||
model: '58'
|
||||
stepping: '9'
|
||||
flags: 'fpu de tsc msr pae mce cx8 apic sep mtrr mca cmov pat clflush acpi mmx fxsr sse sse2 ss ht nx constant_tsc nonstop_tsc aperfmperf pni pclmulqdq vmx est ssse3 sse4_1 sse4_2 x2apic popcnt aes hypervisor ida arat tpr_shadow vnmi flexpriority ept vpid'
|
||||
features: '77bae3ff-bfebfbff-00000001-28100800'
|
||||
features_after_reboot: '77bae3ff-bfebfbff-00000001-28100800'
|
||||
physical_features: '77bae3ff-bfebfbff-00000001-28100800'
|
||||
maskable: 'full'
|
||||
}
|
||||
|
||||
$expect(host.enabled).to.be.true
|
||||
|
||||
$expect(host.hostname).to.equal 'lab1'
|
||||
|
||||
$expect(host.iSCSI_name).to.equal 'iqn.2013-07.com.example:83ba9261'
|
||||
|
||||
$expect(host.memory).to.be.an 'object'
|
||||
$expect(host.memory.usage).to.equal 2564788224
|
||||
$expect(host.memory.size).to.equal 8502759424
|
||||
|
||||
$expect(host.power_state).to.equal 'Running'
|
||||
|
||||
$expect(host.SRs).to.have.members [
|
||||
'OpaqueRef:31be9b5e-882a-a8ae-0edf-bf8942b49b5a'
|
||||
'OpaqueRef:7c88a8c6-fc48-8836-28fa-212f67c42d2f'
|
||||
'OpaqueRef:ec76bd6a-f2c0-636d-ca72-de8fb42d6eea'
|
||||
]
|
||||
|
||||
$expect(host.templates).to.have.members [
|
||||
# TODO
|
||||
]
|
||||
|
||||
$expect(host.VMs).to.have.members [
|
||||
'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
'OpaqueRef:46fa4c52-5e93-6cf7-32e3-c51fb4ed106d'
|
||||
]
|
||||
|
||||
$expect(host.$PBDs).to.have.members [
|
||||
'OpaqueRef:ff32de74-138c-9d80-ab58-c631d2aa0e71'
|
||||
'OpaqueRef:f0f98779-5cf8-cabc-edc3-631a2d63d89c'
|
||||
'OpaqueRef:b70f8e06-07a8-a5e7-2856-f221c822e9b2'
|
||||
'OpaqueRef:b641552a-8c92-71b3-c0a2-e4dd3d04c215'
|
||||
'OpaqueRef:93320534-824f-850a-64a2-bcbfdc2e0927'
|
||||
'OpaqueRef:0c1d3862-5a38-e4cc-4a46-d8358a622461'
|
||||
]
|
||||
|
||||
$expect(host.$PIFs).to.have.members [
|
||||
'OpaqueRef:aef57ed4-e4d9-7f72-0376-b781a19bb9d2'
|
||||
'OpaqueRef:06f53e3d-d8de-d4ed-6359-9e20b4fb0d21'
|
||||
]
|
||||
|
||||
$expect(host.messages).to.have.members [
|
||||
'OpaqueRef:cb515b9a-ef8c-13d4-88ea-e0d3ee88d22a'
|
||||
'OpaqueRef:6ba7c244-3b44-2ed2-ec81-4fa13ea82465'
|
||||
'OpaqueRef:0e3fc97f-45ce-26c3-9435-899be96b35c4'
|
||||
'OpaqueRef:6ca16f45-6266-6cff-55cd-19a8ef0acf1a'
|
||||
'OpaqueRef:11452a2a-1ccd-e4df-25d8-ba99bba710db'
|
||||
'OpaqueRef:9ddc8eb2-969f-ba56-757a-efd482da5ce9'
|
||||
'OpaqueRef:68c8d0c6-e5a2-8ade-569a-dfc732e7994d'
|
||||
'OpaqueRef:ddb628ca-24f1-04d2-0b2c-9996aaab59f2'
|
||||
'OpaqueRef:0e7044a7-542b-4dd9-65bc-cded0e41853a'
|
||||
'OpaqueRef:ee26daf0-2ff7-734e-438d-9a521aaaa0c5'
|
||||
'OpaqueRef:40f8459f-1b6b-1625-1284-0f2878c3203d'
|
||||
'OpaqueRef:739ca434-6dca-b633-0097-b3f3183150a7'
|
||||
'OpaqueRef:cf655e45-c8c7-bdb9-e56c-5b67d6952f15'
|
||||
'OpaqueRef:3e33b140-f7e8-7dcc-3475-97dcc2fbfb5b'
|
||||
'OpaqueRef:8f3e2923-e690-e859-4f9e-a3e711a1e230'
|
||||
'OpaqueRef:ed7b1960-1ab7-4f47-8ef1-7a7769e09207'
|
||||
'OpaqueRef:6a0c4183-2f95-661f-9b19-0df0015867ca'
|
||||
'OpaqueRef:8d04b3fa-e81d-c6ae-d072-bd3a1ea22189'
|
||||
'OpaqueRef:dada1bd4-d7ed-429f-0a1a-585a3bfbf7e6'
|
||||
'OpaqueRef:a5648ca1-b37a-0765-9192-ebfb9ff376e8'
|
||||
'OpaqueRef:78c09b42-ad6f-0e66-0349-80b45264120d'
|
||||
'OpaqueRef:9c657a2b-560c-2050-014a-20e8cf5bd235'
|
||||
'OpaqueRef:1d50d25b-41f6-ffd3-5410-0de4fbed8543'
|
||||
'OpaqueRef:cb515b9a-ef8c-13d4-88ea-e0d3ee88d22a'
|
||||
'OpaqueRef:6ba7c244-3b44-2ed2-ec81-4fa13ea82465'
|
||||
'OpaqueRef:0e3fc97f-45ce-26c3-9435-899be96b35c4'
|
||||
'OpaqueRef:6ca16f45-6266-6cff-55cd-19a8ef0acf1a'
|
||||
'OpaqueRef:11452a2a-1ccd-e4df-25d8-ba99bba710db'
|
||||
'OpaqueRef:9ddc8eb2-969f-ba56-757a-efd482da5ce9'
|
||||
'OpaqueRef:68c8d0c6-e5a2-8ade-569a-dfc732e7994d'
|
||||
'OpaqueRef:ddb628ca-24f1-04d2-0b2c-9996aaab59f2'
|
||||
'OpaqueRef:0e7044a7-542b-4dd9-65bc-cded0e41853a'
|
||||
'OpaqueRef:ee26daf0-2ff7-734e-438d-9a521aaaa0c5'
|
||||
'OpaqueRef:40f8459f-1b6b-1625-1284-0f2878c3203d'
|
||||
'OpaqueRef:739ca434-6dca-b633-0097-b3f3183150a7'
|
||||
'OpaqueRef:cf655e45-c8c7-bdb9-e56c-5b67d6952f15'
|
||||
'OpaqueRef:3e33b140-f7e8-7dcc-3475-97dcc2fbfb5b'
|
||||
'OpaqueRef:8f3e2923-e690-e859-4f9e-a3e711a1e230'
|
||||
'OpaqueRef:ed7b1960-1ab7-4f47-8ef1-7a7769e09207'
|
||||
'OpaqueRef:6a0c4183-2f95-661f-9b19-0df0015867ca'
|
||||
'OpaqueRef:8d04b3fa-e81d-c6ae-d072-bd3a1ea22189'
|
||||
'OpaqueRef:dada1bd4-d7ed-429f-0a1a-585a3bfbf7e6'
|
||||
'OpaqueRef:a5648ca1-b37a-0765-9192-ebfb9ff376e8'
|
||||
'OpaqueRef:78c09b42-ad6f-0e66-0349-80b45264120d'
|
||||
'OpaqueRef:9c657a2b-560c-2050-014a-20e8cf5bd235'
|
||||
'OpaqueRef:1d50d25b-41f6-ffd3-5410-0de4fbed8543'
|
||||
]
|
||||
|
||||
$expect(host.tasks).to.have.members [
|
||||
# TODO
|
||||
]
|
||||
|
||||
$expect(host.$running_VMs).to.have.members [
|
||||
'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
'OpaqueRef:46fa4c52-5e93-6cf7-32e3-c51fb4ed106d'
|
||||
]
|
||||
|
||||
$expect(host.$vCPUs).to.equal 2
|
||||
|
||||
it 'VM', ->
|
||||
vm = collection.get 'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
|
||||
#console.log vm
|
||||
|
||||
$expect(vm).to.be.an 'object'
|
||||
|
||||
$expect(vm.type).to.equal 'VM'
|
||||
|
||||
$expect(vm.name_label).to.equal 'ceph3'
|
||||
|
||||
$expect(vm.name_description).to.equal ''
|
||||
|
||||
$expect(vm.tags).to.have.members []
|
||||
|
||||
$expect(vm.addresses).to.deep.equal {
|
||||
'0/ip': '192.168.1.116'
|
||||
'0/ipv6/0': 'fe80::cc20:2bff:fe38:7ffd'
|
||||
}
|
||||
|
||||
$expect(vm.consoles).to.deep.equal [
|
||||
{
|
||||
uuid: 'b7f85b67-4b8a-0586-b279-6146da76642f'
|
||||
protocol: 'rfb'
|
||||
location: 'https://192.168.1.1/console?uuid=b7f85b67-4b8a-0586-b279-6146da76642f'
|
||||
VM: 'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
other_config: {}
|
||||
'$pool': '313624ab-0958-bb1e-45b5-7556a463a10b'
|
||||
'$poolRef': 'OpaqueRef:6462d0b3-8f20-ef76-fddf-002f7af3452e'
|
||||
'$ref': 'OpaqueRef:69b8dbde-161c-b3fa-bd1a-3567e7efdbda'
|
||||
'$type': 'console'
|
||||
}
|
||||
]
|
||||
|
||||
$expect(vm.current_operations).to.deep.equal {
|
||||
# No data for this test.
|
||||
}
|
||||
|
||||
$expect(vm.memory).to.deep.equal {
|
||||
dynamic: [
|
||||
536870912
|
||||
536870912
|
||||
]
|
||||
static: [
|
||||
134217728
|
||||
536870912
|
||||
]
|
||||
size: 536838144
|
||||
}
|
||||
|
||||
$expect(vm.messages).to.have.members [
|
||||
'OpaqueRef:a242799a-03bf-b55e-ecde-ddfe902fa69e'
|
||||
'OpaqueRef:5cec485b-e276-c45b-09cb-dd02bb1d00f3'
|
||||
'OpaqueRef:ff3b6df1-b761-0d75-e80e-4ef137eec9e6'
|
||||
'OpaqueRef:a8d94d7e-7a6e-0cc1-b7a0-8f18940410fd'
|
||||
'OpaqueRef:35585a79-caf7-6522-18ee-8d3e8459441d'
|
||||
'OpaqueRef:68d1102f-eadc-e1f3-7949-3f62248c165c'
|
||||
'OpaqueRef:974bef10-184a-c063-aa32-c318fd39e400'
|
||||
'OpaqueRef:e092c4e1-a211-204a-f773-49cc3a4611be'
|
||||
'OpaqueRef:013a4a12-1981-fbc8-92ac-1fa45d2e9c9c'
|
||||
'OpaqueRef:a77fc714-b5b1-0c37-d006-0935506bb8cd'
|
||||
'OpaqueRef:554ec983-e67a-fc8b-7d2a-00c55be5f266'
|
||||
'OpaqueRef:38404a18-4c1b-0bf5-1d45-c47243bbc69d'
|
||||
'OpaqueRef:0f98e883-a4d5-0fd8-3aa3-92be69adc4e3'
|
||||
'OpaqueRef:b3e9ac53-f6b8-4c49-f096-57f680136477'
|
||||
'OpaqueRef:1aa65d64-a00b-4c0b-be07-95f6eec7fd87'
|
||||
'OpaqueRef:be431f8c-f39b-4a64-5fc2-de9744ced26a'
|
||||
'OpaqueRef:0e571611-6194-6ce6-bae0-94bbe57576c6'
|
||||
'OpaqueRef:114fdd8a-844c-6bb5-0855-e3427bc8f073'
|
||||
'OpaqueRef:a486606c-1c75-e1c3-56de-c6e1bc3df980'
|
||||
'OpaqueRef:b6975094-843e-a19a-6101-ee7953e40580'
|
||||
'OpaqueRef:f15d7d4c-32d1-45e1-5f6f-ddc68733bab6'
|
||||
'OpaqueRef:1b04b1a2-e8b2-df82-6618-0d0a741d8bbb'
|
||||
'OpaqueRef:dcd41e75-47fc-5ae5-1d59-5176a7b76eaa'
|
||||
'OpaqueRef:71ed5eba-33c9-6deb-6dc2-ab670a6c968b'
|
||||
'OpaqueRef:59ee665c-9270-64a4-3829-aef3e045a705'
|
||||
'OpaqueRef:88979f4b-16ef-3b99-a616-aa1e2787bebe'
|
||||
'OpaqueRef:80a3e419-5a81-a7df-103d-5cf60bbde793'
|
||||
'OpaqueRef:38737284-e4e1-5172-2bf3-f9d70dcaadfa'
|
||||
'OpaqueRef:456d4d7f-77f8-ef40-aadd-f56601bc7c2b'
|
||||
'OpaqueRef:4a949518-cc01-a003-f386-b3319db6d7a6'
|
||||
'OpaqueRef:c8834c52-f15b-437d-1e09-958fedbf3c5b'
|
||||
'OpaqueRef:07d40d2c-4f6e-4f5f-0c3e-c2ea028d4fc4'
|
||||
'OpaqueRef:6df45555-1b11-2873-8947-2b6e7c9445be'
|
||||
'OpaqueRef:d3c60e69-2cf8-191f-9679-d6ae0ecdf5f9'
|
||||
'OpaqueRef:ed499671-2c01-3dc9-f6cd-553fef4b6716'
|
||||
]
|
||||
|
||||
$expect(vm.power_state).to.equal 'Running'
|
||||
|
||||
$expect(vm.CPUs).to.deep.equal {
|
||||
max: 1
|
||||
number: 1
|
||||
}
|
||||
|
||||
$expect(vm.$CPU_usage).to.be.null
|
||||
|
||||
$expect(vm.$container).to.equal 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
$expect(vm.snapshots).to.have.members []
|
||||
|
||||
$expect(vm.snapshot_time).to.equal null
|
||||
|
||||
$expect(vm.$VBDs).to.have.members [
|
||||
'OpaqueRef:dbb53525-e1a3-741b-4924-9944b845bc0c'
|
||||
'OpaqueRef:1bd20244-01a0-fec3-eb00-79a453a56446'
|
||||
]
|
||||
|
||||
$expect(vm.VIFs).to.have.members [
|
||||
'OpaqueRef:20349ad5-0a0d-4b80-dcc0-0037fa647182'
|
||||
]
|
||||
|
||||
it 'VM-template', ->
|
||||
vm = collection.get 'OpaqueRef:f02a3c19-447b-c618-fb51-a9cde79be17c'
|
||||
|
||||
#console.log vm
|
||||
|
||||
# Only specific VM-templates fields will be tested.
|
||||
|
||||
$expect(vm.type).to.equal 'VM-template'
|
||||
|
||||
$expect(vm.template_info).to.be.an 'object'
|
||||
|
||||
$expect(vm.template_info.arch).to.equal 'amd64'
|
||||
|
||||
$expect(vm.template_info.disks).to.deep.equal [
|
||||
{
|
||||
bootable: true
|
||||
device: '0'
|
||||
size: 8589934592
|
||||
SR: ''
|
||||
type: 'system'
|
||||
}
|
||||
]
|
||||
|
||||
$expect(vm.template_info.install_methods).to.have.members [
|
||||
'cdrom'
|
||||
'http'
|
||||
'ftp'
|
||||
]
|
||||
|
||||
it 'SR', ->
|
||||
sr = collection.get 'OpaqueRef:d6fe49bf-dd48-c929-5aab-b2786a2e7aee'
|
||||
|
||||
#console.log sr
|
||||
|
||||
$expect(sr).to.be.an 'object'
|
||||
|
||||
$expect(sr.type).to.equal 'SR'
|
||||
|
||||
$expect(sr.name_label).to.equal 'Zfs'
|
||||
|
||||
$expect(sr.name_description).to.equal 'iSCSI SR [192.168.0.100 (iqn.1986-03.com.sun:02:ba2ab54c-2d14-eb74-d6f9-ef7c4f28ff1e; LUN 0: A83BCKLAF: 2048 GB (NEXENTA))]'
|
||||
|
||||
$expect(sr.SR_type).to.equal 'lvmoiscsi'
|
||||
|
||||
$expect(sr.content_type).to.equal ''
|
||||
|
||||
$expect(sr.physical_usage).to.equal 205831274496
|
||||
|
||||
$expect(sr.usage).to.equal 202358390784
|
||||
|
||||
$expect(sr.size).to.equal 2199010672640
|
||||
|
||||
$expect(sr.$container).to.equal 'OpaqueRef:6462d0b3-8f20-ef76-fddf-002f7af3452e'
|
||||
|
||||
$expect(sr.$PBDs).to.have.members [
|
||||
'OpaqueRef:ff32de74-138c-9d80-ab58-c631d2aa0e71'
|
||||
'OpaqueRef:200674ae-d9ab-2caa-a283-4fa3d14592fd'
|
||||
]
|
||||
|
||||
$expect(sr.VDIs).to.have.members [
|
||||
'OpaqueRef:b4a1573f-c235-8acd-4625-dfbcb2beb523'
|
||||
'OpaqueRef:098a2155-605b-241e-f775-a05c2133874e'
|
||||
'OpaqueRef:f7d900f9-a4fe-9a3e-ead8-28db301d26e8'
|
||||
'OpaqueRef:f26d2af5-b529-4d16-21d1-a56965e7bfb1'
|
||||
'OpaqueRef:ec5ce10e-023e-9a9f-eef7-a64e4c6d7b28'
|
||||
'OpaqueRef:e0eb5eb1-a485-fcfc-071e-fafa17f9ac48'
|
||||
'OpaqueRef:c4aa5d87-4115-c359-9cdf-c16fbf56cf2c'
|
||||
'OpaqueRef:b06a9d3f-5132-e58f-25c4-ef94d5b38986'
|
||||
'OpaqueRef:a4dd8a73-5393-81ce-abce-fc1502490a6d'
|
||||
'OpaqueRef:83331526-8bd8-9644-0a7d-9f645f5fcd70'
|
||||
'OpaqueRef:693bef17-aa19-63f8-3775-7d3b2dbce9d6'
|
||||
'OpaqueRef:67618138-57df-e90a-74c6-402ad62d657b'
|
||||
'OpaqueRef:5f1d5117-1033-b12a-92a8-99f206c9dbba'
|
||||
'OpaqueRef:287084c1-241a-58df-929a-cbe2e7454a56'
|
||||
'OpaqueRef:1f7f9828-f4e7-41dd-20e6-3bf57c559a78'
|
||||
]
|
||||
|
||||
$expect(sr.messages).to.have.members [
|
||||
# No data for this test.
|
||||
]
|
||||
|
||||
it 'PBD', ->
|
||||
pbd = collection.get 'OpaqueRef:ff32de74-138c-9d80-ab58-c631d2aa0e71'
|
||||
|
||||
#console.log pbd
|
||||
|
||||
$expect(pbd).to.an 'object'
|
||||
|
||||
$expect(pbd.type).to.equal 'PBD'
|
||||
|
||||
$expect(pbd.attached).to.be.true
|
||||
|
||||
$expect(pbd.host).to.equal 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
$expect(pbd.SR).to.equal 'OpaqueRef:d6fe49bf-dd48-c929-5aab-b2786a2e7aee'
|
||||
|
||||
it 'PIF', ->
|
||||
pif = collection.get 'OpaqueRef:aef57ed4-e4d9-7f72-0376-b781a19bb9d2'
|
||||
|
||||
#console.log pif
|
||||
|
||||
$expect(pif).to.an 'object'
|
||||
|
||||
$expect(pif.type).to.equal 'PIF'
|
||||
|
||||
$expect(pif.attached).to.be.true
|
||||
|
||||
$expect(pif.device).to.equal 'eth0'
|
||||
|
||||
$expect(pif.IP).to.equal '192.168.1.1'
|
||||
|
||||
$expect(pif.$host).to.equal 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
$expect(pif.MAC).to.equal '90:2b:34:d3:ce:75'
|
||||
|
||||
$expect(pif.management).to.be.true
|
||||
|
||||
$expect(pif.mode).to.equal 'Static'
|
||||
|
||||
$expect(pif.MTU).to.equal 1500
|
||||
|
||||
$expect(pif.netmask).to.equal '255.255.255.0'
|
||||
|
||||
$expect(pif.$network).to.equal 'OpaqueRef:dbc93777-f2c0-e888-967d-dd9beeffb3c0'
|
||||
|
||||
it 'VDI', ->
|
||||
vdi = collection.get 'OpaqueRef:1f7f9828-f4e7-41dd-20e6-3bf57c559a78'
|
||||
|
||||
#console.log vdi
|
||||
|
||||
$expect(vdi).to.an 'object'
|
||||
|
||||
$expect(vdi.type).to.equal 'VDI'
|
||||
|
||||
$expect(vdi.name_label).to.equal 'ceph'
|
||||
|
||||
$expect(vdi.name_description).to.equal ''
|
||||
|
||||
$expect(vdi.usage).to.equal 21525168128
|
||||
|
||||
$expect(vdi.size).to.equal 21474836480
|
||||
|
||||
$expect(vdi.$snapshot_of).to.equal null
|
||||
|
||||
$expect(vdi.snapshots).to.have.members [
|
||||
'OpaqueRef:b4a1573f-c235-8acd-4625-dfbcb2beb523'
|
||||
]
|
||||
|
||||
$expect(vdi.$SR).to.equal 'OpaqueRef:d6fe49bf-dd48-c929-5aab-b2786a2e7aee'
|
||||
|
||||
$expect(vdi.$VBDs).to.have.members [
|
||||
'OpaqueRef:9f15200b-3cac-7a61-b3e8-dd2fc0a5572d'
|
||||
]
|
||||
|
||||
it 'VBD', ->
|
||||
vbd = collection.get 'OpaqueRef:9f15200b-3cac-7a61-b3e8-dd2fc0a5572d'
|
||||
|
||||
#console.log vbd
|
||||
|
||||
$expect(vbd).to.an 'object'
|
||||
|
||||
$expect(vbd.type).to.equal 'VBD'
|
||||
|
||||
$expect(vbd.attached).to.be.true
|
||||
|
||||
$expect(vbd.bootable).to.be.false
|
||||
|
||||
$expect(vbd.is_cd_drive).to.be.false
|
||||
|
||||
$expect(vbd.read_only).to.be.false
|
||||
|
||||
$expect(vbd.VDI).to.equal 'OpaqueRef:1f7f9828-f4e7-41dd-20e6-3bf57c559a78'
|
||||
|
||||
$expect(vbd.VM).to.equal 'OpaqueRef:be2390b2-cd08-53f5-3fae-b76f6f3725bf'
|
||||
|
||||
it 'VIF', ->
|
||||
vif = collection.get 'OpaqueRef:20349ad5-0a0d-4b80-dcc0-0037fa647182'
|
||||
|
||||
#console.log vif
|
||||
|
||||
$expect(vif).to.an 'object'
|
||||
|
||||
$expect(vif.type).to.equal 'VIF'
|
||||
|
||||
$expect(vif.attached).to.be.true
|
||||
|
||||
$expect(vif.device).to.equal '0'
|
||||
|
||||
$expect(vif.MAC).to.equal 'ce:20:2b:38:7f:fd'
|
||||
|
||||
$expect(vif.MTU).to.equal 1500
|
||||
|
||||
$expect(vif.$network).to.equal 'OpaqueRef:dbc93777-f2c0-e888-967d-dd9beeffb3c0'
|
||||
|
||||
$expect(vif.$VM).to.equal 'OpaqueRef:fdaba312-c3a5-0190-b1a1-bf389567e620'
|
||||
|
||||
it 'network', ->
|
||||
network = collection.get 'OpaqueRef:dbc93777-f2c0-e888-967d-dd9beeffb3c0'
|
||||
|
||||
#console.log network
|
||||
|
||||
$expect(network).to.be.an 'object'
|
||||
|
||||
$expect(network.type).to.equal 'network'
|
||||
|
||||
$expect(network.name_label).to.equal 'Pool-wide network associated with eth0'
|
||||
|
||||
$expect(network.name_description).to.equal ''
|
||||
|
||||
$expect(network.bridge).to.equal 'xenbr0'
|
||||
|
||||
$expect(network.MTU).to.equal 1500
|
||||
|
||||
$expect(network.PIFs).to.have.members [
|
||||
'OpaqueRef:aef57ed4-e4d9-7f72-0376-b781a19bb9d2'
|
||||
'OpaqueRef:971d6bc5-60f4-a331-bdee-444ee7cbf678'
|
||||
]
|
||||
|
||||
$expect(network.VIFs).to.have.members [
|
||||
'OpaqueRef:fc86d17e-d9d1-5534-69d6-d15edbe36d22'
|
||||
'OpaqueRef:ed2d89ca-1f4e-09ff-f80e-991d6b01de45'
|
||||
'OpaqueRef:c6651d03-cefe-accf-920b-636e32fee23c'
|
||||
'OpaqueRef:c5977d9b-cb50-a615-8488-1dd105d69802'
|
||||
'OpaqueRef:c391575b-168f-e52b-59f7-9f852a2c6854'
|
||||
'OpaqueRef:bf4da755-480b-e3fd-2bfe-f53e7204c8ae'
|
||||
'OpaqueRef:ba41d1a6-724e-aae8-3447-20f74014eb75'
|
||||
'OpaqueRef:b8df4453-542e-6c14-0eb1-174d48373bca'
|
||||
'OpaqueRef:b5980de3-1a74-9f57-1e98-2a74184211dc'
|
||||
'OpaqueRef:aaae3669-faee-4338-3156-0ce8c06c75cf'
|
||||
'OpaqueRef:aa874254-b67c-e9e3-6a08-1c770c2dd8ac'
|
||||
'OpaqueRef:7b8ecb18-5bc5-7650-3ac4-6bc22322e8ba'
|
||||
'OpaqueRef:59b884b0-521f-7b3e-6a91-319ded893e68'
|
||||
'OpaqueRef:20349ad5-0a0d-4b80-dcc0-0037fa647182'
|
||||
]
|
||||
|
||||
it 'message', ->
|
||||
message = collection.get 'OpaqueRef:cb515b9a-ef8c-13d4-88ea-e0d3ee88d22a'
|
||||
|
||||
#console.log message
|
||||
|
||||
$expect(message.type).to.equal 'message'
|
||||
|
||||
$expect(message.time).to.equal 1389449056
|
||||
|
||||
$expect(message.$object).to.equal 'OpaqueRef:bbc98f5e-1a17-2030-28af-0df2393f3145'
|
||||
|
||||
$expect(message.name).to.equal 'PBD_PLUG_FAILED_ON_SERVER_START'
|
||||
|
||||
$expect(message.body).to.equal ''
|
||||
|
||||
it 'task', ->
|
||||
all = collection.get()
|
||||
|
||||
for object in all
|
||||
if object.type is 'task'
|
||||
console.log object
|
||||
|
||||
# FIXME: we need to update the tests data to complete this test.
|
30
src/utils.js
30
src/utils.js
@ -8,8 +8,6 @@ import xml2js from 'xml2js'
|
||||
import {promisify, method} from 'bluebird'
|
||||
import {randomBytes} from 'crypto'
|
||||
|
||||
/* eslint no-lone-blocks: 0 */
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Ensure the value is an array, wrap it if necessary.
|
||||
@ -23,6 +21,15 @@ export const ensureArray = (value) => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Returns the value of a property and removes it from the object.
|
||||
export function extractProperty (obj, prop) {
|
||||
const value = obj[prop]
|
||||
delete obj[prop]
|
||||
return value
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Generate a secure random Base64 string.
|
||||
export const generateToken = (function (randomBytes) {
|
||||
return (n = 32) => randomBytes(n).then(base64url)
|
||||
@ -70,6 +77,25 @@ export const parseXml = (function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// This function does nothing and returns undefined.
|
||||
//
|
||||
// It is often used to swallow promise's errors.
|
||||
export function noop () {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Ponyfill for Promise.finally(cb)
|
||||
export const pFinally = (promise, cb) => {
|
||||
return promise.then(
|
||||
(value) => constructor.resolve(cb()).then(() => value),
|
||||
(reason) => constructor.resolve(cb()).then(() => {
|
||||
throw reason
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function parseSize (size) {
|
||||
let bytes = humanFormat.parse.raw(size, { scale: 'binary' })
|
||||
if (bytes.unit && bytes.unit !== 'B') {
|
||||
|
49
src/utils.spec.js
Normal file
49
src/utils.spec.js
Normal file
@ -0,0 +1,49 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
import {expect} from 'chai'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('ensureArray', function () {
|
||||
it('returns an empty array for undefined', function () {
|
||||
expect(ensureArray(undefined)).to.eql([])
|
||||
})
|
||||
|
||||
it('returns the object itself if is already an array', function () {
|
||||
const array = ['foo', 'bar', 'baz']
|
||||
|
||||
expect(ensureArray(array)).to.equal(array)
|
||||
})
|
||||
|
||||
it('wrap the value in an object', function () {
|
||||
const value = {}
|
||||
|
||||
expect(ensureArray(value)).to.includes(value)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('extractProperty', function () {
|
||||
it('returns the value of the property', function () {
|
||||
const value = {}
|
||||
const obj = { prop: value }
|
||||
|
||||
expect(extractProperty(obj, 'prop')).to.equal(value)
|
||||
})
|
||||
|
||||
it('removes the property from the object', function () {
|
||||
const value = {}
|
||||
const obj = { prop: value }
|
||||
|
||||
expect(extractProperty(obj, 'prop')).to.equal(value)
|
||||
expect(obj).to.not.have.property('prop')
|
||||
})
|
||||
})
|
@ -6,46 +6,47 @@ const debug = createDebug('xo:wsProxy')
|
||||
|
||||
const defaults = {
|
||||
// Automatically close the client connection when the remote close.
|
||||
autoClose: true,
|
||||
|
||||
// Reject secure connections to unauthorized remotes (bad CA).
|
||||
rejectUnauthorized: false
|
||||
autoClose: true
|
||||
}
|
||||
|
||||
// Proxy a WebSocket `client` to a remote server which has `url` as
|
||||
// address.
|
||||
export default function wsProxy (client, url, opts) {
|
||||
opts = assign({}, defaults, opts)
|
||||
opts = assign({}, defaults, {
|
||||
protocol: client.protocol
|
||||
}, opts)
|
||||
const autoClose = !!opts.autoClose
|
||||
delete opts.autoClose
|
||||
|
||||
const remote = new WebSocket(url, {
|
||||
protocol: opts.protocol || client.protocol,
|
||||
rejectUnauthorized: opts.rejectUnauthorized
|
||||
}).once('open', function () {
|
||||
debug('connected to', url)
|
||||
function onClientSend (error) {
|
||||
if (error) {
|
||||
debug('client send error', error)
|
||||
}
|
||||
}
|
||||
function onRemoteSend (error) {
|
||||
if (error) {
|
||||
debug('remote send error', error)
|
||||
}
|
||||
}
|
||||
|
||||
const remote = new WebSocket(url, opts).once('open', function () {
|
||||
debug('connected to %s', url)
|
||||
}).once('close', function () {
|
||||
debug('remote closed')
|
||||
|
||||
if (opts.autoClose) {
|
||||
if (autoClose) {
|
||||
client.close()
|
||||
}
|
||||
}).once('error', function (error) {
|
||||
debug('remote error', error)
|
||||
debug('remote error: %s', error)
|
||||
}).on('message', function (message) {
|
||||
client.send(message, function (error) {
|
||||
if (error) {
|
||||
debug('client send error', error)
|
||||
}
|
||||
})
|
||||
client.send(message, onClientSend)
|
||||
})
|
||||
|
||||
client.once('close', function () {
|
||||
debug('client closed')
|
||||
remote.close()
|
||||
}).on('message', function (message) {
|
||||
remote.send(message, function (error) {
|
||||
if (error) {
|
||||
debug('remote send error', error)
|
||||
}
|
||||
})
|
||||
remote.send(message, onRemoteSend)
|
||||
})
|
||||
}
|
||||
|
491
src/xapi-objects-to-xo.js
Normal file
491
src/xapi-objects-to-xo.js
Normal file
@ -0,0 +1,491 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import isArray from 'lodash.isarray'
|
||||
import map from 'lodash.map'
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
parseXml
|
||||
} from './utils'
|
||||
import {
|
||||
isHostRunning,
|
||||
isVmRunning
|
||||
} from './xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function link (obj, prop) {
|
||||
const dynamicValue = obj[`$${prop}`]
|
||||
if (dynamicValue == null) {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
return map(dynamicValue, '$id')
|
||||
}
|
||||
|
||||
return dynamicValue.$id
|
||||
}
|
||||
|
||||
function toTimestamp (date) {
|
||||
// Weird behavior from the XAPI.
|
||||
if (!date || date === '1969-12-31T23:00:00.000Z') {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(Date.parse(date) / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function pool (obj) {
|
||||
return {
|
||||
default_SR: link(obj, 'default_SR'),
|
||||
HA_enabled: obj.ha_enabled,
|
||||
master: link(obj, 'master'),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label
|
||||
|
||||
// TODO
|
||||
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
|
||||
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
|
||||
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
|
||||
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
|
||||
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
|
||||
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
|
||||
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host (obj) {
|
||||
const {
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
|
||||
return {
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
CPUs: obj.cpu_info,
|
||||
enabled: obj.enabled,
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
if (metrics) {
|
||||
const free = +metrics.memory_free
|
||||
const total = +metrics.memory_total
|
||||
|
||||
return {
|
||||
usage: total - free,
|
||||
size: total
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
power_state: isRunning ? 'Running' : 'Halted',
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
// TODO: dedupe.
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
$PIFs: link(obj, 'PIFs'),
|
||||
PCIs: link(obj, 'PCIs'),
|
||||
$PCIs: link(obj, 'PCIs'),
|
||||
PGPUs: link(obj, 'PGPUs'),
|
||||
$PGPUs: link(obj, 'PGPUs'),
|
||||
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
|
||||
// TODO:
|
||||
// - controller = vmControllersByContainer.items[host.id]
|
||||
// - SRs = srsByContainer.items[host.id]
|
||||
// - tasks = tasksByHost.items[host.id]
|
||||
// - templates = vmTemplatesByContainer.items[host.id]
|
||||
// - VMs = vmsByContainer.items[host.id]
|
||||
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vm (obj) {
|
||||
const {
|
||||
$guest_metrics: guestMetrics,
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isVmRunning(obj)
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics ?
|
||||
+metrics.VCPUs_number :
|
||||
+obj.VCPUs_at_startup
|
||||
)
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
docker: (function () {
|
||||
const monitor = otherConfig['xscontainer-monitor']
|
||||
if (!monitor) {
|
||||
return
|
||||
}
|
||||
|
||||
if (monitor === 'False') {
|
||||
return {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
docker_ps: process,
|
||||
docker_info: info,
|
||||
docker_version: version
|
||||
} = otherConfig
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
|
||||
// TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: Boolean(obj.ha_restart_priority),
|
||||
|
||||
memory: (function () {
|
||||
const dynamicMin = +obj.memory_dynamic_min
|
||||
const dynamicMax = +obj.memory_dynamic_max
|
||||
const staticMin = +obj.memory_static_min
|
||||
const staticMax = +obj.memory_static_max
|
||||
|
||||
const memory = {
|
||||
dynamic: [ dynamicMin, dynamicMax ],
|
||||
static: [ staticMin, staticMax ]
|
||||
}
|
||||
|
||||
const gmMemory = guestMetrics && guestMetrics.memory
|
||||
|
||||
if (!isRunning) {
|
||||
memory.size = dynamicMax
|
||||
} else if (gmMemory && gmMemory.used) {
|
||||
memory.usage = +gmMemory.used
|
||||
memory.size = +gmMemory.total
|
||||
} else if (metrics) {
|
||||
memory.size = +metrics.memory_actual
|
||||
} else {
|
||||
memory.size = dynamicMax
|
||||
}
|
||||
|
||||
return memory
|
||||
})(),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
power_state: obj.power_state,
|
||||
PV_drivers: Boolean(guestMetrics && guestMetrics.PV_drivers_up_to_date),
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
|
||||
$container: (
|
||||
isRunning ?
|
||||
link(obj, 'resident_on') :
|
||||
link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
|
||||
),
|
||||
$VBDs: link(obj, 'VBDs'),
|
||||
|
||||
// TODO: dedupe
|
||||
VGPUs: link(obj, 'VGPUs'),
|
||||
$VGPUs: link(obj, 'VGPUs')
|
||||
}
|
||||
|
||||
if (obj.is_control_domain) {
|
||||
vm.type += '-controller'
|
||||
} else if (obj.is_a_snapshot) {
|
||||
vm.type += '-snapshot'
|
||||
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
vm.template_info = {
|
||||
arch: otherConfig['install-arch'],
|
||||
disks: (function () {
|
||||
const {disks: xml} = otherConfig
|
||||
if (!xml) {
|
||||
return []
|
||||
}
|
||||
|
||||
const disks = ensureArray(parseXml(xml).provision.disk)
|
||||
forEach(disks, function normalize (disk) {
|
||||
disk.bootable = disk.bootable === 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = extractProperty(disk, 'sr')
|
||||
})
|
||||
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
return vm
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function sr (obj) {
|
||||
return {
|
||||
type: 'SR',
|
||||
|
||||
content_type: obj.content_type,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
physical_usage: +obj.physical_utilisation,
|
||||
size: +obj.physical_size,
|
||||
SR_type: obj.type,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared ?
|
||||
link(obj, 'pool') :
|
||||
obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pbd (obj) {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pif (obj) {
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
device: obj.device,
|
||||
IP: obj.IP,
|
||||
MAC: obj.MAC,
|
||||
management: obj.management, // TODO: find a better name.
|
||||
mode: obj.ip_configuration_mode,
|
||||
MTU: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
vlan: +obj.VLAN,
|
||||
|
||||
// TODO: What is it?
|
||||
//
|
||||
// Could it mean “is this a physical interface?”.
|
||||
// How could a PIF not be physical?
|
||||
// physical: obj.physical,
|
||||
|
||||
$host: link(obj, 'host'),
|
||||
$network: link(obj, 'network')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: should we have a VDI-snapshot type like we have with VMs?
|
||||
export function vdi (obj) {
|
||||
return {
|
||||
type: 'VDI',
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.virtual_size,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
usage: +obj.physical_utilisation,
|
||||
|
||||
$snapshot_of: link(obj, 'snapshot_of'),
|
||||
$SR: link(obj, 'SR'),
|
||||
$VBDs: link(obj, 'VBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vbd (obj) {
|
||||
return {
|
||||
type: 'VBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
bootable: obj.bootable,
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
VDI: link(obj, 'VDI'),
|
||||
VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vif (obj) {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
MTU: +obj.MTU,
|
||||
|
||||
$network: link(obj, 'network'),
|
||||
$VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function message (obj) {
|
||||
return {
|
||||
body: obj.body,
|
||||
name: obj.name,
|
||||
time: toTimestamp(obj.timestamp),
|
||||
|
||||
$object: obj.obj_uuid // Special link as it is already an UUID.
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function task (obj) {
|
||||
return {
|
||||
created: toTimestamp(obj.created),
|
||||
current_operations: obj.current_operations,
|
||||
finished: toTimestamp(obj.finished),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
progress: +obj.progress,
|
||||
result: obj.result,
|
||||
status: obj.status,
|
||||
|
||||
$host: link(obj, 'resident_on')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host_patch (obj) {
|
||||
return {
|
||||
applied: obj.applied,
|
||||
time: toTimestamp(obj.timestamp_applied),
|
||||
pool_patch: link(obj, 'pool_patch'),
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pool_patch (obj) {
|
||||
return {
|
||||
applied: obj.pool_applied,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.size,
|
||||
version: obj.version,
|
||||
|
||||
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
|
||||
$host_patches: link(obj, 'host_patches')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pci (obj) {
|
||||
return {
|
||||
type: 'PCI',
|
||||
|
||||
class_name: obj.class_name,
|
||||
device_name: obj.device_name,
|
||||
pci_id: obj.pci_id,
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pgpu (obj) {
|
||||
return {
|
||||
type: 'PGPU',
|
||||
|
||||
pci: link(obj, 'PCI'),
|
||||
|
||||
// TODO: dedupe.
|
||||
host: link(obj, 'host'),
|
||||
$host: link(obj, 'host'),
|
||||
vgpus: link(obj, 'resident_VGPUs'),
|
||||
$vgpus: link(obj, 'resident_VGPUs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vgpu (obj) {
|
||||
return {
|
||||
type: 'VGPU',
|
||||
|
||||
currentlyAttached: obj.currently_attached,
|
||||
device: obj.device,
|
||||
resident_on: link(obj, 'resident_on'),
|
||||
vm: link(obj, 'VM')
|
||||
}
|
||||
}
|
147
src/xapi.coffee
147
src/xapi.coffee
@ -1,147 +0,0 @@
|
||||
# URL parsing.
|
||||
{parse: $parseUrl} = require 'url'
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
$debug = (require 'debug') 'xo:xapi'
|
||||
$xmlrpc = require 'xmlrpc'
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# Helpers for dealing with fibers.
|
||||
{$coroutine, $wait} = require './fibers-utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
$sleep = (delay) ->
|
||||
(cb) -> setTimeout cb, delay
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Note: All methods are synchronous (using fibers).
|
||||
class $XAPI
|
||||
# Number of tries when the connection fails (TCP or XAPI).
|
||||
tries: 10
|
||||
|
||||
constructor: ({@host, @username, @password}) ->
|
||||
@connect()
|
||||
|
||||
connect: (force = false) ->
|
||||
{hostname, port} = $parseUrl "http://#{@host}"
|
||||
|
||||
# Returns nothing if already connected to this host and not force.
|
||||
if !force and (hostname is @xmlrpc?.options.host)
|
||||
return
|
||||
|
||||
port ?= 443
|
||||
|
||||
@_readableHost = "#{@username}@#{hostname}:#{port}"
|
||||
|
||||
@xmlrpc = $xmlrpc.createSecureClient {
|
||||
host: hostname
|
||||
port
|
||||
rejectUnauthorized: false
|
||||
timeout: 10
|
||||
}
|
||||
|
||||
# Logs in.
|
||||
@logIn()
|
||||
|
||||
call: (method, args...) ->
|
||||
@connect() unless @xmlrpc
|
||||
|
||||
tries = @tries
|
||||
do helper = =>
|
||||
try
|
||||
result = $wait (callback) =>
|
||||
actualArgs = if @sessionId
|
||||
[@sessionId, args...]
|
||||
else
|
||||
args
|
||||
|
||||
$debug '%s: %s(...)', @_readableHost, method
|
||||
@xmlrpc.methodCall method, actualArgs, callback
|
||||
|
||||
# Returns the plain result if it does not have a valid XAPI format.
|
||||
return result unless result.Status?
|
||||
|
||||
# Returns the result's value if all went well.
|
||||
return result.Value if result.Status is 'Success'
|
||||
|
||||
# Something went wrong.
|
||||
error = result.ErrorDescription or value
|
||||
catch error # Captures the error if it was thrown.
|
||||
|
||||
# If it failed too much times, just stops.
|
||||
throw error unless --tries
|
||||
|
||||
# Gets the error code for transport errors and XAPI errors.
|
||||
code = error.code or error[0]
|
||||
|
||||
$debug 'Error from %s: %s', @_readableHost, code
|
||||
|
||||
switch code
|
||||
|
||||
# XAPI sometimes close the connection when the server is no
|
||||
# longer pool master (`event.next`), so we have to retry at
|
||||
# least once to know who is the new pool master.
|
||||
when 'ECONNRESET', \
|
||||
'ECONNREFUSED', \
|
||||
'EHOSTUNREACH', \
|
||||
'ETIMEDOUT', \
|
||||
'HOST_STILL_BOOTING', \
|
||||
'HOST_HAS_NO_MANAGEMENT_IP'
|
||||
# Node.js seems to reuse the broken socket, so we add a small
|
||||
# delay.
|
||||
#
|
||||
# FIXME Magic number!!!
|
||||
#
|
||||
# I would like to be able to use a shorter delay but for
|
||||
# some reason, when we connect to XAPI at a given moment,
|
||||
# the connection hangs.
|
||||
$wait $sleep 5e3
|
||||
helper()
|
||||
|
||||
# XAPI is sometimes reinitialized and sessions are lost.
|
||||
# We try log in again if necessary.
|
||||
when 'SESSION_INVALID'
|
||||
@logIn()
|
||||
helper()
|
||||
|
||||
# Except during the login process, catch this error and try to
|
||||
# log in again.
|
||||
when 'SESSION_AUTHENTICATION_FAILED'
|
||||
throw error unless @sessionId
|
||||
|
||||
@logIn()
|
||||
helper()
|
||||
|
||||
# If the current host is a slave, changes the current host,
|
||||
# reconnect and retry.
|
||||
when 'HOST_IS_SLAVE'
|
||||
@host = error[1]
|
||||
@connect()
|
||||
helper()
|
||||
|
||||
# This error has not been handled, just forwards it.
|
||||
else
|
||||
throw error
|
||||
|
||||
logIn: ->
|
||||
# FIXME: Ugly hack.
|
||||
return if @_logging
|
||||
@_logging = true
|
||||
|
||||
# Makes sure there is not session id left.
|
||||
delete @sessionId
|
||||
|
||||
@sessionId = @call 'session.login_with_password', @username, @password
|
||||
|
||||
$debug 'Logged in %s (session = %s)', @_readableHost, @sessionId
|
||||
|
||||
# FIXME: Ugly hack.
|
||||
delete @_logging
|
||||
|
||||
#=====================================================================
|
||||
|
||||
module.exports = $XAPI
|
600
src/xapi.js
Normal file
600
src/xapi.js
Normal file
@ -0,0 +1,600 @@
|
||||
import createDebug from 'debug'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import find from 'lodash.find'
|
||||
import forEach from 'lodash.foreach'
|
||||
import got from 'got'
|
||||
import map from 'lodash.map'
|
||||
import unzip from '@julien-f/unzip'
|
||||
import {PassThrough} from 'stream'
|
||||
import {promisify} from 'bluebird'
|
||||
import {Xapi as XapiBase} from 'xen-api'
|
||||
|
||||
import {debounce} from './decorators'
|
||||
import {ensureArray, noop, parseXml, pFinally} from './utils'
|
||||
import {JsonRpcError} from './api-errors'
|
||||
|
||||
const debug = createDebug('xo:xapi')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const gotPromise = promisify(got)
|
||||
|
||||
const wrapError = error => {
|
||||
const e = new Error(error[0])
|
||||
e.code = error[0]
|
||||
e.params = error.slice(1)
|
||||
return e
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const typeToNamespace = Object.create(null)
|
||||
forEach([
|
||||
'Bond',
|
||||
'DR_task',
|
||||
'GPU_group',
|
||||
'PBD',
|
||||
'PCI',
|
||||
'PGPU',
|
||||
'PIF',
|
||||
'PIF_metrics',
|
||||
'SM',
|
||||
'SR',
|
||||
'VBD',
|
||||
'VBD_metrics',
|
||||
'VDI',
|
||||
'VGPU',
|
||||
'VGPU_type',
|
||||
'VLAN',
|
||||
'VM',
|
||||
'VM_appliance',
|
||||
'VM_guest_metrics',
|
||||
'VM_metrics',
|
||||
'VMPP',
|
||||
'VTPM'
|
||||
], namespace => {
|
||||
typeToNamespace[namespace.toLowerCase()] = namespace
|
||||
})
|
||||
|
||||
// Object types given by `xen-api` are always lowercase but the
|
||||
// namespaces in the Xen API can have a different casing.
|
||||
const getNamespaceForType = (type) => typeToNamespace[type] || type
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const isHostRunning = (host) => {
|
||||
const {$metrics: metrics} = host
|
||||
|
||||
return metrics && metrics.live
|
||||
}
|
||||
|
||||
const VM_RUNNING_POWER_STATES = {
|
||||
Running: true,
|
||||
Paused: true
|
||||
}
|
||||
export const isVmRunning = (vm) => VM_RUNNING_POWER_STATES[vm.power_state]
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Xapi extends XapiBase {
|
||||
constructor (...args) {
|
||||
super(...args)
|
||||
|
||||
const objectsWatchers = this._objectWatchers = Object.create(null)
|
||||
const taskWatchers = this._taskWatchers = Object.create(null)
|
||||
|
||||
const onAddOrUpdate = objects => {
|
||||
forEach(objects, object => {
|
||||
const {
|
||||
$id: id,
|
||||
$ref: ref
|
||||
} = object
|
||||
|
||||
// Watched object.
|
||||
if (id in objectsWatchers) {
|
||||
objectsWatchers[id].resolve(object)
|
||||
delete objectsWatchers[id]
|
||||
}
|
||||
if (ref in objectsWatchers) {
|
||||
objectsWatchers[ref].resolve(object)
|
||||
delete objectsWatchers[ref]
|
||||
}
|
||||
|
||||
// Watched task.
|
||||
if (ref in taskWatchers) {
|
||||
const {status} = object
|
||||
|
||||
if (status === 'success') {
|
||||
taskWatchers[ref].resolve(object.result)
|
||||
} else if (status === 'failure') {
|
||||
taskWatchers[ref].reject(wrapError(object.error_info))
|
||||
} else {
|
||||
return
|
||||
}
|
||||
|
||||
delete taskWatchers[ref]
|
||||
}
|
||||
})
|
||||
}
|
||||
this.objects.on('add', onAddOrUpdate)
|
||||
this.objects.on('update', onAddOrUpdate)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
// Wait for an object to appear or to be updated.
|
||||
//
|
||||
// TODO: implements a timeout.
|
||||
_waitObject (idOrUuidOrRef) {
|
||||
let watcher = this._objectWatchers[idOrUuidOrRef]
|
||||
if (!watcher) {
|
||||
let resolve, reject
|
||||
const promise = new Promise((resolve_, reject_) => {
|
||||
resolve = resolve_
|
||||
reject = reject_
|
||||
})
|
||||
|
||||
// Register the watcher.
|
||||
watcher = this._objectWatchers[idOrUuidOrRef] = {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
}
|
||||
|
||||
return watcher.promise
|
||||
}
|
||||
|
||||
// Returns the objects if already presents or waits for it.
|
||||
async _getOrWaitObject (idOrUuidOrRef) {
|
||||
return (
|
||||
this.getObject(idOrUuidOrRef, undefined) ||
|
||||
this._waitObject(idOrUuidOrRef)
|
||||
)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
// Create a task.
|
||||
//
|
||||
// Returns the task object from the Xapi.
|
||||
async _createTask (name = 'untitled task', description = '') {
|
||||
const ref = await this.call('task.create', `[XO] ${name}`, description)
|
||||
debug('task created: %s', name)
|
||||
|
||||
pFinally(this._watchTask(ref), () => {
|
||||
this.call('task.destroy', ref).then(() => {
|
||||
debug('task destroyed: %s', name)
|
||||
})
|
||||
})
|
||||
|
||||
return this._getOrWaitObject(ref)
|
||||
}
|
||||
|
||||
// Waits for a task to be resolved.
|
||||
_watchTask (ref) {
|
||||
// If a task object is passed, unpacked the ref.
|
||||
if (typeof ref === 'object' && ref.$ref) ref = ref.$ref
|
||||
|
||||
let watcher = this._taskWatchers[ref]
|
||||
if (!watcher) {
|
||||
let resolve, reject
|
||||
const promise = new Promise((resolve_, reject_) => {
|
||||
resolve = resolve_
|
||||
reject = reject_
|
||||
})
|
||||
|
||||
// Register the watcher.
|
||||
watcher = this._taskWatchers[ref] = {
|
||||
promise,
|
||||
resolve,
|
||||
reject
|
||||
}
|
||||
}
|
||||
|
||||
return watcher.promise
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
async _setObjectProperties (id, props) {
|
||||
const {
|
||||
$ref: ref,
|
||||
$type: type
|
||||
} = this.getObject(id)
|
||||
|
||||
const namespace = getNamespaceForType(type)
|
||||
|
||||
// TODO: the thrown error should contain the name of the
|
||||
// properties that failed to be set.
|
||||
await Promise.all(map(props, (value, name) => {
|
||||
if (value != null) {
|
||||
return this.call(`${namespace}.set_${name}`, ref, value)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async setPoolProperties ({
|
||||
name_label,
|
||||
name_description
|
||||
}) {
|
||||
await this._setObjectProperties(this.pool.$id, {
|
||||
name_label,
|
||||
name_description
|
||||
})
|
||||
}
|
||||
|
||||
async setSrProperties (id, {
|
||||
name_label,
|
||||
name_description
|
||||
}) {
|
||||
await this._setObjectProperties(id, {
|
||||
name_label,
|
||||
name_description
|
||||
})
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
// FIXME: should be static
|
||||
@debounce(24 * 60 * 60 * 1000)
|
||||
async _getXenUpdates () {
|
||||
const [body, {statusCode}] = await gotPromise(
|
||||
'http://updates.xensource.com/XenServer/updates.xml'
|
||||
)
|
||||
|
||||
if (statusCode !== 200) {
|
||||
throw new JsonRpcError('cannot fetch patches list from Citrix')
|
||||
}
|
||||
|
||||
const {patchdata: data} = parseXml(body)
|
||||
|
||||
const patches = Object.create(null)
|
||||
forEach(data.patches.patch, patch => {
|
||||
patches[patch.uuid] = {
|
||||
date: patch.timestamp,
|
||||
description: patch['name-description'],
|
||||
documentationUrl: patch.url,
|
||||
guidance: patch['after-apply-guidance'],
|
||||
name: patch['name-label'],
|
||||
url: patch['patch-url'],
|
||||
uuid: patch.uuid,
|
||||
conflicts: map(ensureArray(patch.conflictingpatches), patch => {
|
||||
return patch.conflictingpatch.uuid
|
||||
}),
|
||||
requirements: map(ensureArray(patch.requiredpatches), patch => {
|
||||
return patch.requiredpatch.uuid
|
||||
})
|
||||
|
||||
// TODO: what does it mean, should we handle it?
|
||||
// version: patch.version,
|
||||
}
|
||||
})
|
||||
|
||||
const resolveVersionPatches = function (uuids) {
|
||||
const versionPatches = Object.create(null)
|
||||
|
||||
forEach(uuids, ({uuid}) => {
|
||||
versionPatches[uuid] = patches[uuid]
|
||||
})
|
||||
|
||||
return versionPatches
|
||||
}
|
||||
|
||||
const versions = Object.create(null)
|
||||
let latestVersion
|
||||
forEach(data.serverversions.version, version => {
|
||||
versions[version.value] = {
|
||||
date: version.timestamp,
|
||||
name: version.name,
|
||||
id: version.value,
|
||||
documentationUrl: version.url,
|
||||
patches: resolveVersionPatches(version.patch)
|
||||
}
|
||||
|
||||
if (version.latest) {
|
||||
latestVersion = versions[version.value]
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
patches,
|
||||
latestVersion,
|
||||
versions
|
||||
}
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
async listMissingPoolPatchesOnHost (hostId) {
|
||||
const host = this.getObject(hostId)
|
||||
const {product_version: version} = host.software_version
|
||||
|
||||
const all = (await this._getXenUpdates()).versions[version].patches
|
||||
|
||||
const installed = Object.create(null)
|
||||
forEach(host.$patches, hostPatch => {
|
||||
installed[hostPatch.$pool_patch.uuid] = true
|
||||
})
|
||||
|
||||
const installable = []
|
||||
forEach(all, (patch, uuid) => {
|
||||
if (installed[uuid]) {
|
||||
return
|
||||
}
|
||||
|
||||
for (let uuid of patch.conflicts) {
|
||||
if (uuid in installed) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
installable.push(patch)
|
||||
})
|
||||
|
||||
return installable
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async uploadPoolPatch (stream, length) {
|
||||
const task = await this._createTask('Patch upload')
|
||||
|
||||
const [, patchRef] = await Promise.all([
|
||||
gotPromise('http://' + this.pool.$master.address + '/pool_patch_upload', {
|
||||
method: 'put',
|
||||
body: stream,
|
||||
query: {
|
||||
session_id: this.sessionId,
|
||||
task_id: task.$ref
|
||||
},
|
||||
headers: {
|
||||
'content-length': length
|
||||
}
|
||||
}),
|
||||
this._watchTask(task)
|
||||
])
|
||||
|
||||
return this._getOrWaitObject(patchRef)
|
||||
}
|
||||
|
||||
async _getOrUploadPoolPatch (uuid) {
|
||||
try {
|
||||
return this.getObjectByUuid(uuid)
|
||||
} catch (error) {}
|
||||
|
||||
debug('downloading patch %s', uuid)
|
||||
|
||||
const patchInfo = (await this._getXenUpdates()).patches[uuid]
|
||||
if (!patchInfo) {
|
||||
throw new Error('no such patch ' + uuid)
|
||||
}
|
||||
|
||||
const PATCH_RE = /\.xsupdate$/
|
||||
const proxy = new PassThrough()
|
||||
got(patchInfo.url).on('error', error => {
|
||||
// TODO: better error handling
|
||||
console.error(error)
|
||||
}).pipe(unzip.Parse()).on('entry', entry => {
|
||||
if (PATCH_RE.test(entry.path)) {
|
||||
proxy.emit('length', entry.size)
|
||||
entry.pipe(proxy)
|
||||
} else {
|
||||
entry.autodrain()
|
||||
}
|
||||
}).on('error', error => {
|
||||
// TODO: better error handling
|
||||
console.error(error)
|
||||
})
|
||||
|
||||
const length = await eventToPromise(proxy, 'length')
|
||||
return this.uploadPoolPatch(proxy, length)
|
||||
}
|
||||
|
||||
async installPoolPatchOnHost (patchUuid, hostId) {
|
||||
const patch = await this._getOrUploadPoolPatch(patchUuid)
|
||||
const host = this.getObject(hostId)
|
||||
|
||||
debug('installing patch %s', patchUuid)
|
||||
|
||||
await this.call('pool_patch.apply', patch.$ref, host.$ref)
|
||||
}
|
||||
|
||||
async installPoolPatchOnAllHosts (patchUuid) {
|
||||
const patch = await this._getOrUploadPoolPatch(patchUuid)
|
||||
|
||||
await this.call('pool_patch.pool_apply', patch.$ref)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
async _deleteVdi (vdiId) {
|
||||
const vdi = this.getObject(vdiId)
|
||||
|
||||
await this.call('VDI.destroy', vdi.$ref)
|
||||
}
|
||||
|
||||
async _snapshotVm (vm, nameLabel = vm.name_label) {
|
||||
const ref = await this.call('VM.snapshot', vm.$ref, nameLabel)
|
||||
|
||||
// Convert the template to a VM.
|
||||
await this.call('VM.set_is_a_template', ref, false)
|
||||
|
||||
return ref
|
||||
}
|
||||
|
||||
async deleteVm (vmId, deleteDisks = false) {
|
||||
const vm = this.getObject(vmId)
|
||||
|
||||
if (isVmRunning(vm)) {
|
||||
throw new Error('running VMs cannot be deleted')
|
||||
}
|
||||
|
||||
if (deleteDisks) {
|
||||
await Promise.all(map(vm.$VBDs, vbd => {
|
||||
try {
|
||||
return this._deleteVdi(vbd.$VDI).catch(noop)
|
||||
} catch (_) {}
|
||||
}))
|
||||
}
|
||||
|
||||
await this.call('VM.destroy', vm.$ref)
|
||||
}
|
||||
|
||||
getVmConsoleUrl (vmId) {
|
||||
const vm = this.getObject(vmId)
|
||||
|
||||
const console = find(vm.$consoles, { protocol: 'rfb' })
|
||||
if (!console) {
|
||||
throw new Error('no RFB console found')
|
||||
}
|
||||
|
||||
return `${console.location}&session_id=${this.sessionId}`
|
||||
}
|
||||
|
||||
// Returns a stream to the exported VM.
|
||||
async exportVm (vmId, {compress = true} = {}) {
|
||||
const vm = this.getObject(vmId)
|
||||
|
||||
let host
|
||||
let snapshotRef
|
||||
if (isVmRunning(vm)) {
|
||||
host = vm.$resident_on
|
||||
snapshotRef = await this._snapshotVm(vm)
|
||||
} else {
|
||||
host = this.pool.$master
|
||||
}
|
||||
|
||||
const task = await this._createTask('VM Snapshot', vm.name_label)
|
||||
pFinally(this._watchTask(task), () => {
|
||||
if (snapshotRef) {
|
||||
this.deleteVm(snapshotRef, true)
|
||||
}
|
||||
})
|
||||
|
||||
const stream = got({
|
||||
hostname: host.address,
|
||||
path: '/export/'
|
||||
}, {
|
||||
query: {
|
||||
ref: snapshotRef || vm.$ref,
|
||||
session_id: this.sessionId,
|
||||
task_id: task.$ref,
|
||||
use_compression: compress ? 'true' : 'false'
|
||||
}
|
||||
})
|
||||
stream.response = eventToPromise(stream, 'response')
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async snapshotVm (vmId) {
|
||||
return await this._getOrWaitObject(
|
||||
await this._snapshotVm(
|
||||
this.getObject(vmId)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
async attachVdiToVm (vdiId, vmId, {
|
||||
bootable = false,
|
||||
mode = 'RW',
|
||||
position
|
||||
} = {}) {
|
||||
const vdi = this.getObject(vdiId)
|
||||
const vm = this.getObject(vmId)
|
||||
|
||||
if (position == null) {
|
||||
forEach(vm.$VBDs, vbd => {
|
||||
const curPos = +vbd.userdevice
|
||||
if (!(position > curPos)) {
|
||||
position = curPos
|
||||
}
|
||||
})
|
||||
|
||||
position = position == null ? 0 : position + 1
|
||||
}
|
||||
|
||||
const vbdRef = await this.call('VBD.create', {
|
||||
bootable,
|
||||
empty: false,
|
||||
mode,
|
||||
other_config: {},
|
||||
qos_algorithm_params: {},
|
||||
qos_algorithm_type: '',
|
||||
type: 'Disk',
|
||||
userdevice: String(position),
|
||||
VDI: vdi.$ref,
|
||||
VM: vm.$ref
|
||||
})
|
||||
|
||||
await this.call('VBD.plug', vbdRef)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
async createVirtualInterface (vmId, networkId, {
|
||||
mac = '',
|
||||
mtu = 1500,
|
||||
position = 0
|
||||
} = {}) {
|
||||
const vm = this.getObject(vmId)
|
||||
const network = this.getObject(networkId)
|
||||
|
||||
const ref = await this.call('VIF.create', {
|
||||
device: String(position),
|
||||
MAC: String(mac),
|
||||
MTU: String(mtu),
|
||||
network: network.$ref,
|
||||
other_config: {},
|
||||
qos_algorithm_params: {},
|
||||
qos_algorithm_type: '',
|
||||
VM: vm.$ref
|
||||
})
|
||||
|
||||
return await this._getOrWaitObject(ref)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
async _doDockerAction (vmId, action, containerId) {
|
||||
const vm = this.getObject(vmId)
|
||||
const host = vm.$resident_on
|
||||
|
||||
return await this.call('host.call_plugin', host.$ref, 'xscontainer', action, {
|
||||
vmuuid: vm.uuid,
|
||||
container: containerId
|
||||
})
|
||||
}
|
||||
|
||||
async registerDockerContainer (vmId) {
|
||||
await this._doDockerAction(vmId, 'register')
|
||||
}
|
||||
|
||||
async deregisterDockerContainer (vmId) {
|
||||
await this._doDockerAction(vmId, 'deregister')
|
||||
}
|
||||
|
||||
async startDockerContainer (vmId, containerId) {
|
||||
await this._doDockerAction(vmId, 'start', containerId)
|
||||
}
|
||||
|
||||
async stopDockerContainer (vmId, containerId) {
|
||||
await this._doDockerAction(vmId, 'stop', containerId)
|
||||
}
|
||||
|
||||
async restartDockerContainer (vmId, containerId) {
|
||||
await this._doDockerAction(vmId, 'restart', containerId)
|
||||
}
|
||||
|
||||
async pauseDockerContainer (vmId, containerId) {
|
||||
await this._doDockerAction(vmId, 'pause', containerId)
|
||||
}
|
||||
|
||||
async unpauseDockerContainer (vmId, containerId) {
|
||||
await this._doDockerAction(vmId, 'unpause', containerId)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
}
|
515
src/xo.coffee
515
src/xo.coffee
@ -1,515 +0,0 @@
|
||||
{EventEmitter: $EventEmitter} = require 'events'
|
||||
{format: $formatUrl, parse: $parseUrl} = require 'url'
|
||||
|
||||
$Bluebird = require 'bluebird'
|
||||
$debug = (require 'debug') 'xo:xo'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$includes = require 'lodash.includes'
|
||||
$isEmpty = require 'lodash.isempty'
|
||||
$isString = require 'lodash.isstring'
|
||||
$pluck = require 'lodash.pluck'
|
||||
$Promise = require 'bluebird'
|
||||
$proxyRequest = require 'proxy-http-request'
|
||||
$httpRequest = require 'request'
|
||||
{createClient: $createRedisClient} = require 'then-redis'
|
||||
{createClient: $createXapiClient} = require('xen-api')
|
||||
{
|
||||
hash: $hash
|
||||
needsRehash: $needsRehash
|
||||
verify: $verifyHash
|
||||
} = require 'hashy'
|
||||
|
||||
$Connection = require './connection'
|
||||
$Model = require './model'
|
||||
$RedisCollection = require './collection/redis'
|
||||
$spec = require './spec'
|
||||
{$coroutine, $wait} = require './fibers-utils'
|
||||
{
|
||||
generateToken: $generateToken
|
||||
multiKeyHash: $multiKeyHash
|
||||
} = require './utils'
|
||||
{$MappedCollection} = require './MappedCollection'
|
||||
|
||||
{Set, $for: {getIterator}} = (require 'babel-runtime/core-js').default
|
||||
|
||||
#=====================================================================
|
||||
# Models and collections.
|
||||
|
||||
class $Acl extends $Model
|
||||
@create: (subject, object) ->
|
||||
return $Acl.hash(subject, object).then((hash) ->
|
||||
return new $Acl {
|
||||
id: hash
|
||||
subject
|
||||
object
|
||||
}
|
||||
)
|
||||
@hash: (subject, object) -> $multiKeyHash(subject, object)
|
||||
|
||||
class $Acls extends $RedisCollection
|
||||
Model: $Acl
|
||||
create: (subject, object) ->
|
||||
return $Acl.create(subject, object).then((acl) => @add acl)
|
||||
delete: (subject, object) ->
|
||||
return $Acl.hash(subject, object).then((hash) => @remove hash)
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
class $Server extends $Model
|
||||
validate: -> # TODO
|
||||
|
||||
class $Servers extends $RedisCollection
|
||||
Model: $Server
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
class $Token extends $Model
|
||||
@generate: (userId) ->
|
||||
return $generateToken().then (token) ->
|
||||
return new $Token {
|
||||
id: token
|
||||
user_id: userId
|
||||
}
|
||||
|
||||
validate: -> # TODO
|
||||
|
||||
class $Tokens extends $RedisCollection
|
||||
Model: $Token
|
||||
|
||||
generate: (userId) ->
|
||||
return ($Token.generate userId).then (token) =>
|
||||
return @add token
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
class $User extends $Model
|
||||
default: {
|
||||
permission: 'none'
|
||||
}
|
||||
|
||||
validate: -> # TODO
|
||||
|
||||
# FIXME: Async function should be explicit and return promise.
|
||||
setPassword: $coroutine (password) ->
|
||||
@set 'pw_hash', $wait $hash password
|
||||
return
|
||||
|
||||
# Checks the password and updates the hash if necessary.
|
||||
#
|
||||
# FIXME: Async function should be explicit and return promise.
|
||||
checkPassword: $coroutine (password) ->
|
||||
hash = @get 'pw_hash'
|
||||
|
||||
# There might be no hash if the user authenticate with another
|
||||
# method (e.g. LDAP).
|
||||
unless hash and $wait $verifyHash password, hash
|
||||
return false
|
||||
|
||||
if $needsRehash hash
|
||||
$wait @setPassword password
|
||||
|
||||
return true
|
||||
|
||||
hasPermission: (permission) ->
|
||||
perms = {
|
||||
none: 0
|
||||
read: 1
|
||||
write: 2
|
||||
admin: 3
|
||||
}
|
||||
|
||||
perms[@get 'permission'] >= perms[permission]
|
||||
|
||||
class $Users extends $RedisCollection
|
||||
Model: $User
|
||||
|
||||
create: $coroutine (email, password, permission) ->
|
||||
user = new $User {
|
||||
email: email
|
||||
permission: permission ? 'none'
|
||||
}
|
||||
|
||||
$wait(user.setPassword password) if password?
|
||||
|
||||
@add user
|
||||
|
||||
#=====================================================================
|
||||
|
||||
class $XO extends $EventEmitter
|
||||
|
||||
constructor: ->
|
||||
# These will be initialized in start().
|
||||
@servers = @tokens = @users = @_UUIDsToKeys = null
|
||||
|
||||
# Connections to Xen servers/pools.
|
||||
@_xapis = Object.create null
|
||||
|
||||
# Connections to users.
|
||||
@connections = Object.create null
|
||||
@_nextConId = 0
|
||||
|
||||
# Collections of XAPI objects mapped to XO API.
|
||||
@_xobjs = new $MappedCollection()
|
||||
$spec.call @_xobjs
|
||||
|
||||
@_proxyRequests = Object.create null
|
||||
|
||||
@_authenticationProviders = new Set()
|
||||
|
||||
taskWatchers = @_taskWatchers = Object.create null
|
||||
@_xobjs.on 'rule=task', (event, tasks) ->
|
||||
return unless event is 'enter'
|
||||
|
||||
$forEach tasks, ({val: task}) ->
|
||||
{ref} = task
|
||||
|
||||
watcher = taskWatchers[ref]
|
||||
return unless watcher?
|
||||
|
||||
{status} = task
|
||||
if status is 'success'
|
||||
watcher.resolve task.result
|
||||
else if status is 'failure'
|
||||
watcher.reject task.error_info
|
||||
else
|
||||
return
|
||||
|
||||
delete taskWatchers[ref]
|
||||
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
start: $coroutine (config) ->
|
||||
# Connects to Redis.
|
||||
redis = $createRedisClient config.redis?.uri
|
||||
|
||||
# Creates persistent collections.
|
||||
@acls = new $Acls {
|
||||
connection: redis
|
||||
prefix: 'xo:acl'
|
||||
indexes: ['subject', 'object']
|
||||
}
|
||||
@servers = new $Servers {
|
||||
connection: redis
|
||||
prefix: 'xo:server'
|
||||
indexes: ['host']
|
||||
}
|
||||
@tokens = new $Tokens {
|
||||
connection: redis
|
||||
prefix: 'xo:token'
|
||||
indexes: ['user_id']
|
||||
}
|
||||
@users = new $Users {
|
||||
connection: redis
|
||||
prefix: 'xo:user'
|
||||
indexes: ['email']
|
||||
}
|
||||
|
||||
# Proxies tokens/users related events to XO and removes tokens
|
||||
# when their related user is removed.
|
||||
@tokens.on 'remove', (ids) =>
|
||||
@emit "token.revoked:#{id}" for id in ids
|
||||
@users.on 'remove', $coroutine (ids) =>
|
||||
@emit "user.revoked:#{id}" for id in ids
|
||||
tokens = $wait @tokens.get {user_id: id}
|
||||
if tokens.length
|
||||
@tokens.remove (token.id for token in tokens)
|
||||
|
||||
# When objects enter or exists, sends a notification to all
|
||||
# connected clients.
|
||||
do =>
|
||||
entered = {}
|
||||
exited = {}
|
||||
|
||||
dispatcherRegistered = false
|
||||
dispatcher = =>
|
||||
unless $isEmpty entered
|
||||
enterParams =
|
||||
type: 'enter'
|
||||
items: $pluck entered, 'val'
|
||||
for id, connection of @connections
|
||||
if connection.has('user_id')
|
||||
connection.notify 'all', enterParams
|
||||
|
||||
unless $isEmpty exited
|
||||
exitParams =
|
||||
type: 'exit'
|
||||
items: $pluck exited, 'val'
|
||||
for id, connection of @connections
|
||||
# Notify only authenticated clients.
|
||||
if connection.has('user_id')
|
||||
connection.notify 'all', exitParams
|
||||
dispatcherRegistered = false
|
||||
entered = {}
|
||||
exited = {}
|
||||
|
||||
@_xobjs.on 'any', (event, items) ->
|
||||
unless dispatcherRegistered
|
||||
dispatcherRegistered = true
|
||||
process.nextTick dispatcher
|
||||
|
||||
if event is 'exit'
|
||||
$forEach items, (item) ->
|
||||
{key} = item
|
||||
delete entered[key]
|
||||
exited[key] = item
|
||||
return
|
||||
else
|
||||
$forEach items, (item) ->
|
||||
{key} = item
|
||||
delete exited[key]
|
||||
entered[key] = item
|
||||
return
|
||||
|
||||
# Exports the map from UUIDs to keys.
|
||||
{$UUIDsToKeys: @_UUIDsToKeys} = (@_xobjs.get 'xo')
|
||||
|
||||
# Prevents errors from stopping the server.
|
||||
connect = $coroutine (server) =>
|
||||
try
|
||||
$wait @connectServer server
|
||||
catch error
|
||||
console.error(
|
||||
"[WARN] #{server.host}:"
|
||||
error[0] ? error.stack ? error.code ? error
|
||||
)
|
||||
|
||||
# Connects to existing servers.
|
||||
connect server for server in $wait @servers.get()
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
connectServer: (server) ->
|
||||
if server.properties
|
||||
server = server.properties
|
||||
|
||||
xapi = @_xapis[server.id] = $createXapiClient({
|
||||
url: server.host,
|
||||
auth: {
|
||||
user: server.username,
|
||||
password: server.password
|
||||
}
|
||||
})
|
||||
|
||||
xapi.objects.on('add', (objects) =>
|
||||
@_xapis[xapi.pool.$id] = xapi
|
||||
|
||||
@_xobjs.set(objects, {
|
||||
add: true,
|
||||
update: false,
|
||||
remove: false
|
||||
})
|
||||
)
|
||||
xapi.objects.on('update', (objects) =>
|
||||
@_xapis[xapi.pool.$id] = xapi
|
||||
|
||||
@_xobjs.set(objects, {
|
||||
add: true,
|
||||
update: true,
|
||||
remove: false
|
||||
})
|
||||
)
|
||||
xapi.objects.on('remove', (objects) =>
|
||||
@_xobjs.removeWithPredicate (object) =>
|
||||
return object.genval?.$id of objects
|
||||
)
|
||||
|
||||
return xapi.connect()
|
||||
|
||||
disconnectServer: (server) ->
|
||||
id = server and (server.properties?.id ? server.id) ? server
|
||||
|
||||
xapi = @_xapis[id]
|
||||
return $Bluebird.reject(new Error('no such server')) if not xapi
|
||||
|
||||
delete @_xapis[id]
|
||||
delete @_xapis[xapi.pool.id] if xapi.pool
|
||||
|
||||
return xapi.disconnect()
|
||||
|
||||
# Returns the XAPI connection associated to an object.
|
||||
getXAPI: (object, type) ->
|
||||
if $isString object
|
||||
object = @getObject object, type
|
||||
|
||||
{$poolId: poolId} = object
|
||||
unless poolId
|
||||
throw new Error "object #{object.id} does not belong to a pool"
|
||||
|
||||
xapi = @_xapis[poolId]
|
||||
unless xapi
|
||||
throw new Error "no connection found for object #{object.id}"
|
||||
|
||||
return xapi
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
# Returns an object from its key or UUID.
|
||||
getObject: (key, type) ->
|
||||
# Gracefully handles UUIDs.
|
||||
if key of @_UUIDsToKeys
|
||||
key = @_UUIDsToKeys[key]
|
||||
|
||||
obj = @_xobjs.get key
|
||||
|
||||
if type? and (
|
||||
($isString type and type isnt obj.type) or
|
||||
not $includes type, obj.type # Array
|
||||
)
|
||||
throw new Error "unexpected type: got #{obj.type} instead of #{type}"
|
||||
|
||||
return obj
|
||||
|
||||
# Returns objects.
|
||||
getObjects: (keys) ->
|
||||
# Returns all objects if no keys are passed.
|
||||
return @_xobjs.get() unless keys
|
||||
|
||||
# Resolves all UUIDs.
|
||||
{_UUIDsToKeys: UUIDsToKeys} = this
|
||||
for key, index in keys
|
||||
keys[index] = UUIDsToKeys[key] if key of UUIDsToKeys
|
||||
|
||||
# Fetches all objects ignore those missing.
|
||||
return @_xobjs.get keys, true
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
createUserConnection: (opts) ->
|
||||
connections = @connections
|
||||
|
||||
connection = new $Connection opts
|
||||
connection.id = @_nextConId++
|
||||
connection.on 'close', -> delete connections[@id]
|
||||
|
||||
connections[connection.id] = connection
|
||||
|
||||
return connection
|
||||
|
||||
registerProxyRequest: $coroutine (opts) ->
|
||||
url = "/#{$wait $generateToken()}"
|
||||
|
||||
if $isString opts
|
||||
opts = $parseUrl opts
|
||||
|
||||
opts.method = if opts.method?
|
||||
opts.method.toUpperCase()
|
||||
else
|
||||
'GET'
|
||||
|
||||
if opts.proxyMethod?
|
||||
opts.proxyMethod = opts.proxyMethod.toUpperCase()
|
||||
|
||||
opts.createdAt = Date.now()
|
||||
|
||||
@_proxyRequests[url] = opts
|
||||
|
||||
return url
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
handleProxyRequest: (req, res, next) ->
|
||||
unless (
|
||||
(request = @_proxyRequests[req.url]) and
|
||||
req.method is (request.proxyMethod ? request.method)
|
||||
)
|
||||
return next()
|
||||
|
||||
# A proxy request can only be used once.
|
||||
delete @_proxyRequests[req.url]
|
||||
|
||||
$proxyRequest request, req, res
|
||||
|
||||
res.on 'finish', request.onSuccess if request.onSuccess?
|
||||
|
||||
onFailure = request.onFailure ? ( -> )
|
||||
req.on 'close', onFailure
|
||||
|
||||
closeConnection = ->
|
||||
unless res.headersSent
|
||||
res.writeHead 500
|
||||
res.end()
|
||||
|
||||
onFailure()
|
||||
|
||||
return
|
||||
|
||||
req.on 'error', (error) ->
|
||||
console.warn 'request error', error.stack ? error
|
||||
closeConnection()
|
||||
return
|
||||
res.on 'error', (error) ->
|
||||
console.warn 'response error', error.stack ? error
|
||||
closeConnection()
|
||||
return
|
||||
|
||||
return
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
watchTask: (ref) ->
|
||||
watcher = @_taskWatchers[ref]
|
||||
unless watcher?
|
||||
resolve = reject = null
|
||||
promise = new $Bluebird (resolve_, reject_) ->
|
||||
resolve = resolve_
|
||||
reject = reject_
|
||||
return
|
||||
|
||||
# Register the watcher
|
||||
watcher = @_taskWatchers[ref] = {
|
||||
promise
|
||||
reject
|
||||
resolve
|
||||
}
|
||||
|
||||
# Unregister the watcher once the promise is resolved.
|
||||
promise.finally(() =>
|
||||
delete @_taskWatchers[ref]
|
||||
return
|
||||
)
|
||||
|
||||
return watcher.promise
|
||||
|
||||
#-------------------------------------------------------------------
|
||||
|
||||
registerAuthenticationProvider: (provider) ->
|
||||
@_authenticationProviders.add(provider)
|
||||
|
||||
unregisterAuthenticationProvider: (provider) ->
|
||||
@_authenticationProviders.remove(provider)
|
||||
|
||||
authenticateUser: $coroutine (credentials) ->
|
||||
# TODO: remove when email has been replaced by username
|
||||
if credentials.email?
|
||||
credentials.username = credentials.email
|
||||
else if credentials.username?
|
||||
credentials.email = credentials.username
|
||||
|
||||
iterator = getIterator(@_authenticationProviders)
|
||||
|
||||
while not (current = iterator.next()).done
|
||||
try
|
||||
result = $wait(current.value(credentials))
|
||||
return result if result instanceof $User
|
||||
|
||||
# TODO: replace email by username
|
||||
if result.username?
|
||||
result.email = result.username
|
||||
delete result.username
|
||||
|
||||
user = $wait @users.first(result)
|
||||
return user if user
|
||||
|
||||
return @users.create(result.email)
|
||||
catch e
|
||||
# Authentication providers may just throw `null` to indicate
|
||||
# they could not authenticate the user without any special
|
||||
# errors.
|
||||
console.error(e) if e?
|
||||
return false
|
||||
|
||||
#=====================================================================
|
||||
|
||||
module.exports = $XO
|
896
src/xo.js
Normal file
896
src/xo.js
Normal file
@ -0,0 +1,896 @@
|
||||
import Bluebird from 'bluebird'
|
||||
import filter from 'lodash.filter'
|
||||
import forEach from 'lodash.foreach'
|
||||
import includes from 'lodash.includes'
|
||||
import isEmpty from 'lodash.isempty'
|
||||
import isString from 'lodash.isstring'
|
||||
import map from 'lodash.map'
|
||||
import proxyRequest from 'proxy-http-request'
|
||||
import XoCollection from 'xo-collection'
|
||||
import XoUniqueIndex from 'xo-collection/unique-index'
|
||||
// import XoView from 'xo-collection/view'
|
||||
import {createClient as createRedisClient} from 'then-redis'
|
||||
import {EventEmitter} from 'events'
|
||||
import {parse as parseUrl} from 'url'
|
||||
|
||||
import * as xapiObjectsToXo from './xapi-objects-to-xo'
|
||||
import Connection from './connection'
|
||||
import User, {Users} from './models/user'
|
||||
import Xapi from './xapi'
|
||||
import {Acls} from './models/acl'
|
||||
import {autobind} from './decorators'
|
||||
import {generateToken} from './utils'
|
||||
import {Groups} from './models/group'
|
||||
import {JsonRpcError, NoSuchObject} from './api-errors'
|
||||
import {ModelAlreadyExists} from './collection'
|
||||
import {Servers} from './models/server'
|
||||
import {Tokens} from './models/token'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchAuthenticationToken extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'authentication token')
|
||||
}
|
||||
}
|
||||
|
||||
class NoSuchGroup extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'group')
|
||||
}
|
||||
}
|
||||
|
||||
class NoSuchUser extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'user')
|
||||
}
|
||||
}
|
||||
|
||||
class NoSuchXenServer extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'xen server')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Xo extends EventEmitter {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
this._objects = new XoCollection()
|
||||
this._objects.createIndex('byRef', new XoUniqueIndex('ref'))
|
||||
|
||||
// These will be initialized in start()
|
||||
//
|
||||
// TODO: remove and put everything in the `_objects` collection.
|
||||
this._acls = null
|
||||
this._groups = null
|
||||
this._servers = null
|
||||
this._tokens = null
|
||||
this._users = null
|
||||
this._UUIDsToKeys = null
|
||||
|
||||
// Connections to Xen servers.
|
||||
this._xapis = Object.create(null)
|
||||
|
||||
// Connections to users.
|
||||
this._nextConId = 0
|
||||
this._connections = Object.create(null)
|
||||
|
||||
this._httpRequestWatchers = Object.create(null)
|
||||
|
||||
// TODO: remove when no longer necessary.
|
||||
this._proxyRequests = Object.create(null)
|
||||
|
||||
this._authenticationProviders = new Set()
|
||||
|
||||
this._watchObjects()
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async start (config) {
|
||||
// Connects to Redis.
|
||||
const redis = createRedisClient(config.redis && config.redis.uri)
|
||||
|
||||
// Creates persistent collections.
|
||||
this._acls = new Acls({
|
||||
connection: redis,
|
||||
prefix: 'xo:acl',
|
||||
indexes: ['subject', 'object']
|
||||
})
|
||||
this._groups = new Groups({
|
||||
connection: redis,
|
||||
prefix: 'xo:group'
|
||||
})
|
||||
this._servers = new Servers({
|
||||
connection: redis,
|
||||
prefix: 'xo:server',
|
||||
indexes: ['host']
|
||||
})
|
||||
this._tokens = new Tokens({
|
||||
connection: redis,
|
||||
prefix: 'xo:token',
|
||||
indexes: ['user_id']
|
||||
})
|
||||
this._users = new Users({
|
||||
connection: redis,
|
||||
prefix: 'xo:user',
|
||||
indexes: ['email']
|
||||
})
|
||||
|
||||
// Proxies tokens/users related events to XO and removes tokens
|
||||
// when their related user is removed.
|
||||
this._tokens.on('remove', ids => {
|
||||
for (let id of ids) {
|
||||
this.emit(`token.revoked:${id}`)
|
||||
}
|
||||
})
|
||||
this._users.on('remove', async function (ids) {
|
||||
for (let id of ids) {
|
||||
this.emit(`user.revoked:${id}`)
|
||||
const tokens = await this._tokens.get({ user_id: id })
|
||||
for (let token of tokens) {
|
||||
this._tokens.remove(token.id)
|
||||
}
|
||||
}
|
||||
}.bind(this))
|
||||
|
||||
// Connects to existing servers.
|
||||
for (let server of await this._servers.get()) {
|
||||
this.connectXenServer(server.id).catch(error => {
|
||||
console.error(
|
||||
`[WARN] ${server.host}:`,
|
||||
error[0] || error.stack || error.code || error
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async addAcl (subjectId, objectId, action) {
|
||||
try {
|
||||
await this._acls.create(subjectId, objectId, action)
|
||||
} catch (error) {
|
||||
if (!(error instanceof ModelAlreadyExists)) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async removeAcl (subjectId, objectId, action) {
|
||||
await this._acls.delete(subjectId, objectId, action)
|
||||
}
|
||||
|
||||
async getAclsForUser (userId) {
|
||||
const subjects = (await this.getUser(userId)).groups.concat(userId)
|
||||
|
||||
const acls = []
|
||||
const pushAcls = (function (push) {
|
||||
return function (entries) {
|
||||
push.apply(acls, entries)
|
||||
}
|
||||
})(acls.push)
|
||||
|
||||
const {_acls: collection} = this
|
||||
await Promise.all(map(
|
||||
subjects,
|
||||
subject => collection.get({subject}).then(pushAcls)
|
||||
))
|
||||
|
||||
return acls
|
||||
}
|
||||
|
||||
// TODO: remove when new collection.
|
||||
async getAllAcls () {
|
||||
return this._acls.get()
|
||||
}
|
||||
|
||||
async hasPermission (userId, objectId, permission) {
|
||||
const user = await this.getUser(userId)
|
||||
|
||||
// Special case for super XO administrators.
|
||||
//
|
||||
// TODO: restore when necessary, for now it is already implemented
|
||||
// in resolveParams().
|
||||
// if (user.permission === 'admin') {
|
||||
// return true
|
||||
// }
|
||||
|
||||
const subjects = user.groups.concat(userId)
|
||||
let actions = (await this.getRolesForPermission(permission)).concat(permission)
|
||||
|
||||
const promises = []
|
||||
{
|
||||
const {_acls: acls} = this
|
||||
const throwIfFail = function (success) {
|
||||
if (!success) {
|
||||
// We don't care about an error object.
|
||||
/* eslint no-throw-literal: 0 */
|
||||
throw null
|
||||
}
|
||||
}
|
||||
forEach(subjects, subject => {
|
||||
forEach(actions, action => {
|
||||
promises.push(
|
||||
acls.aclExists(subject, objectId, action).then(throwIfFail)
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
await Bluebird.any(promises)
|
||||
return true
|
||||
} catch (_) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async createUser ({email, password, permission}) {
|
||||
// TODO: use plain objects
|
||||
const user = await this._users.create(email, password, permission)
|
||||
|
||||
return user.properties
|
||||
}
|
||||
|
||||
async deleteUser (id) {
|
||||
if (!await this._users.remove(id)) {
|
||||
throw new NoSuchUser(id)
|
||||
}
|
||||
}
|
||||
|
||||
async updateUser (id, {email, password, permission}) {
|
||||
const user = await this._getUser(id)
|
||||
|
||||
if (email) user.set('email', email)
|
||||
if (password) user.setPassword(password)
|
||||
if (permission) user.set('permission', permission)
|
||||
|
||||
await this._users.save(user.properties)
|
||||
}
|
||||
|
||||
// Merge this method in getUser() when plain objects.
|
||||
async _getUser (id) {
|
||||
const user = await this._users.first(id)
|
||||
if (!user) {
|
||||
throw new NoSuchUser(id)
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
// TODO: this method will no longer be async when users are
|
||||
// integrated to the main collection.
|
||||
async getUser (id) {
|
||||
return (await this._getUser(id)).properties
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async createGroup ({name}) {
|
||||
// TODO: use plain objects.
|
||||
const group = (await this._groups.create(name)).properties
|
||||
|
||||
group.users = JSON.parse(group.users)
|
||||
return group
|
||||
}
|
||||
|
||||
async deleteGroup (id) {
|
||||
if (!await this._groups.remove(id)) {
|
||||
throw new NoSuchGroup(id)
|
||||
}
|
||||
}
|
||||
|
||||
async updateGroup (id, {name}) {
|
||||
const group = await this.getGroup(id)
|
||||
|
||||
if (name) group.name = name
|
||||
|
||||
await this._groups.save(group)
|
||||
}
|
||||
|
||||
async getGroup (id) {
|
||||
const group = (await this._groups.first(id))
|
||||
if (!group) {
|
||||
throw new NoSuchGroup(id)
|
||||
}
|
||||
|
||||
return group.properties
|
||||
}
|
||||
|
||||
async addUserToGroup (userId, groupId) {
|
||||
const [user, group] = await Promise.all([
|
||||
this.getUser(userId),
|
||||
this.getGroup(groupId)
|
||||
])
|
||||
|
||||
const {groups} = user
|
||||
if (!includes(groups, groupId)) {
|
||||
user.groups.push(groupId)
|
||||
}
|
||||
|
||||
const {users} = group
|
||||
if (!includes(users, userId)) {
|
||||
group.users.push(userId)
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
this._users.save(user),
|
||||
this._groups.save(group)
|
||||
])
|
||||
}
|
||||
|
||||
async removeUserFromGroup (userId, groupId) {
|
||||
const [user, group] = await Promise.all([
|
||||
this.getUser(userId),
|
||||
this.getGroup(groupId)
|
||||
])
|
||||
|
||||
// TODO: maybe not iterating through the whole arrays?
|
||||
user.groups = filter(user.groups, id => id !== groupId)
|
||||
group.users = filter(group.users, id => id !== userId)
|
||||
|
||||
await Promise.all([
|
||||
this._users.save(user),
|
||||
this._groups.save(group)
|
||||
])
|
||||
}
|
||||
|
||||
async setGroupUsers (groupId, userIds) {
|
||||
const group = await this.getGroup(groupId)
|
||||
|
||||
const newUsersIds = Object.create(null)
|
||||
const oldUsersIds = Object.create(null)
|
||||
forEach(userIds, id => {
|
||||
newUsersIds[id] = null
|
||||
})
|
||||
forEach(group.users, id => {
|
||||
if (id in newUsersIds) {
|
||||
delete newUsersIds[id]
|
||||
} else {
|
||||
oldUsersIds[id] = null
|
||||
}
|
||||
})
|
||||
|
||||
const [newUsers, oldUsers] = await Promise.all([
|
||||
Promise.all(map(newUsersIds, (_, id) => this.getUser(id))),
|
||||
Promise.all(map(oldUsersIds, (_, id) => this.getUser(id)))
|
||||
])
|
||||
|
||||
forEach(newUsers, user => {
|
||||
const {groups} = user
|
||||
if (!includes(groups, groupId)) {
|
||||
user.groups.push(groupId)
|
||||
}
|
||||
})
|
||||
forEach(oldUsers, user => {
|
||||
user.groups = filter(user.groups, id => id !== groupId)
|
||||
})
|
||||
|
||||
group.users = userIds
|
||||
|
||||
await Promise.all([
|
||||
Promise.all(map(newUsers, this._users.save, this._users)),
|
||||
Promise.all(map(oldUsers, this._users.save, this._users)),
|
||||
this._groups.save(group)
|
||||
])
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// TODO: delete when merged with the new collection.
|
||||
async getRoles () {
|
||||
return [
|
||||
{
|
||||
id: 'viewer',
|
||||
name: 'Viewer',
|
||||
permissions: [
|
||||
'view'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'operator',
|
||||
name: 'Operator',
|
||||
permissions: [
|
||||
'view',
|
||||
'operate'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'admin',
|
||||
name: 'Admin',
|
||||
permissions: [
|
||||
'view',
|
||||
'operate',
|
||||
'administrate'
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
// Returns an array of roles which have a given permission.
|
||||
async getRolesForPermission (permission) {
|
||||
const roles = []
|
||||
|
||||
forEach(await this.getRoles(), role => {
|
||||
if (includes(role.permissions, permission)) {
|
||||
roles.push(role.id)
|
||||
}
|
||||
})
|
||||
|
||||
return roles
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async createAuthenticationToken ({userId}) {
|
||||
// TODO: use plain objects
|
||||
const token = await this._tokens.generate(userId)
|
||||
|
||||
return token.properties
|
||||
}
|
||||
|
||||
async deleteAuthenticationToken (id) {
|
||||
if (!await this._token.remove(id)) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
}
|
||||
|
||||
async getAuthenticationToken (id) {
|
||||
const token = await this._tokens.first(id)
|
||||
if (!token) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
|
||||
return token.properties
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async registerXenServer ({host, username, password}) {
|
||||
// FIXME: We are storing passwords which is bad!
|
||||
// Could we use tokens instead?
|
||||
// TODO: use plain objects
|
||||
const server = await this._servers.add({host, username, password})
|
||||
|
||||
return server.properties
|
||||
}
|
||||
|
||||
async unregisterXenServer (id) {
|
||||
this.disconnectXenServer(id).catch(() => {})
|
||||
|
||||
if (!await this._servers.remove(id)) {
|
||||
throw new NoSuchXenServer(id)
|
||||
}
|
||||
}
|
||||
|
||||
async updateXenServer (id, {host, username, password}) {
|
||||
const server = await this._getXenServer(id)
|
||||
|
||||
if (host) server.set('host', host)
|
||||
if (username) server.set('username', username)
|
||||
if (password) server.set('password', password)
|
||||
|
||||
await this._servers.update(server)
|
||||
}
|
||||
|
||||
// TODO: this method will no longer be async when servers are
|
||||
// integrated to the main collection.
|
||||
async _getXenServer (id) {
|
||||
const server = await this._servers.first(id)
|
||||
if (!server) {
|
||||
throw new NoSuchXenServer(id)
|
||||
}
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
@autobind
|
||||
_onXenAdd (xapiObjects) {
|
||||
const {_objects: objects} = this
|
||||
forEach(xapiObjects, (xapiObject, id) => {
|
||||
const transform = xapiObjectsToXo[xapiObject.$type]
|
||||
if (!transform) {
|
||||
return
|
||||
}
|
||||
|
||||
const xoObject = transform(xapiObject)
|
||||
xoObject.id = id
|
||||
xoObject.ref = xapiObject.$ref
|
||||
if (!xoObject.type) {
|
||||
xoObject.type = xapiObject.$type
|
||||
}
|
||||
|
||||
const {$pool: pool} = xapiObject
|
||||
Object.defineProperties(xoObject, {
|
||||
poolRef: { value: pool.$ref },
|
||||
$poolId: {
|
||||
enumerable: true,
|
||||
value: pool.$id
|
||||
},
|
||||
ref: { value: xapiObject.$ref }
|
||||
})
|
||||
|
||||
objects.set(id, xoObject)
|
||||
})
|
||||
}
|
||||
|
||||
@autobind
|
||||
_onXenRemove (xapiObjects) {
|
||||
const {_objects: objects} = this
|
||||
forEach(xapiObjects, (_, id) => {
|
||||
if (objects.has(id)) {
|
||||
objects.remove(id)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO the previous state should be marked as connected.
|
||||
async connectXenServer (id) {
|
||||
const server = (await this._getXenServer(id)).properties
|
||||
|
||||
const xapi = this._xapis[server.id] = new Xapi({
|
||||
url: server.host,
|
||||
auth: {
|
||||
user: server.username,
|
||||
password: server.password
|
||||
}
|
||||
})
|
||||
|
||||
const {objects} = xapi
|
||||
objects.on('add', this._onXenAdd)
|
||||
objects.on('update', this._onXenAdd)
|
||||
objects.on('remove', this._onXenRemove)
|
||||
|
||||
// Each time objects are refreshed, registers the connection with
|
||||
// the pool identifier.
|
||||
objects.on('finish', () => {
|
||||
this._xapis[xapi.pool.$id] = xapi
|
||||
})
|
||||
|
||||
try {
|
||||
await xapi.connect()
|
||||
} catch (error) {
|
||||
if (error.code === 'SESSION_AUTHENTICATION_FAILED') {
|
||||
throw new JsonRpcError('authentication failed')
|
||||
}
|
||||
if (error.code === 'EHOSTUNREACH') {
|
||||
throw new JsonRpcError('host unreachable')
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// TODO the previous state should be marked as disconnected.
|
||||
async disconnectXenServer (id) {
|
||||
const xapi = this._xapis[id]
|
||||
if (!xapi) {
|
||||
throw new NoSuchXenServer(id)
|
||||
}
|
||||
|
||||
delete this._xapis[id]
|
||||
if (xapi.pool) {
|
||||
delete this._xapis[xapi.pool.id]
|
||||
}
|
||||
|
||||
return xapi.disconnect()
|
||||
}
|
||||
|
||||
// Returns the XAPI connection associated to an object.
|
||||
getXAPI (object, type) {
|
||||
if (isString(object)) {
|
||||
object = this.getObject(object, type)
|
||||
}
|
||||
|
||||
const {$poolId: poolId} = object
|
||||
if (!poolId) {
|
||||
throw new Error(`object ${object.id} does not belong to a pool`)
|
||||
}
|
||||
|
||||
const xapi = this._xapis[poolId]
|
||||
if (!xapi) {
|
||||
throw new Error(`no connection found for object ${object.id}`)
|
||||
}
|
||||
|
||||
return xapi
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// Returns an object from its key or UUID.
|
||||
//
|
||||
// TODO: should throw a NoSuchObject error on failure.
|
||||
getObject (key, type) {
|
||||
const {
|
||||
all,
|
||||
indexes: {
|
||||
byRef
|
||||
}
|
||||
} = this._objects
|
||||
|
||||
const obj = all[key] || byRef[key]
|
||||
if (!obj) {
|
||||
throw new NoSuchObject(key, type)
|
||||
}
|
||||
|
||||
if (type != null && (
|
||||
isString(type) && type !== obj.type ||
|
||||
!includes(type, obj.type) // Array
|
||||
)) {
|
||||
throw new NoSuchObject(key, type)
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
getObjects (keys) {
|
||||
const {
|
||||
all,
|
||||
indexes: {
|
||||
byRef
|
||||
}
|
||||
} = this._objects
|
||||
|
||||
// Returns all objects if no keys have been passed.
|
||||
if (!keys) {
|
||||
return all
|
||||
}
|
||||
|
||||
// Fetches all objects and ignores those missing.
|
||||
const result = []
|
||||
forEach(keys, key => {
|
||||
const object = all[key] || byRef[key]
|
||||
if (object) {
|
||||
result.push(object)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
createUserConnection () {
|
||||
const {_connections: connections} = this
|
||||
|
||||
const connection = new Connection()
|
||||
const id = connection.id = this._nextConId++
|
||||
|
||||
connections[id] = connection
|
||||
connection.on('close', () => {
|
||||
delete connections[id]
|
||||
})
|
||||
|
||||
return connection
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
_handleHttpRequest (req, res, next) {
|
||||
const {url} = req
|
||||
|
||||
const {_httpRequestWatchers: watchers} = this
|
||||
const watcher = watchers[url]
|
||||
if (!watcher) {
|
||||
next()
|
||||
return
|
||||
}
|
||||
delete watchers[url]
|
||||
|
||||
const {fn, data} = watcher
|
||||
Bluebird.try(fn, [req, res, data]).then(
|
||||
result => {
|
||||
if (result != null) {
|
||||
res.end(JSON.stringify(result))
|
||||
}
|
||||
},
|
||||
error => {
|
||||
console.error('HTTP request error', error.stack || error)
|
||||
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(500)
|
||||
}
|
||||
res.end('unknown error')
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async registerHttpRequest (fn, data) {
|
||||
const {_httpRequestWatchers: watchers} = this
|
||||
|
||||
const url = await (function generateUniqueUrl () {
|
||||
return generateToken().then(token => {
|
||||
const url = `/api/${token}`
|
||||
|
||||
return url in watchers ?
|
||||
generateUniqueUrl() :
|
||||
url
|
||||
})
|
||||
})()
|
||||
|
||||
watchers[url] = {
|
||||
fn,
|
||||
data
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// TODO: remove when no longer necessary.
|
||||
_handleProxyRequest (req, res, next) {
|
||||
const {url} = req
|
||||
const request = this._proxyRequests[url]
|
||||
if (!request || req.method !== request.proxyMethod) {
|
||||
next()
|
||||
return
|
||||
}
|
||||
|
||||
// A proxy request can only be used once.
|
||||
delete this._proxyRequests[url]
|
||||
|
||||
proxyRequest(request, req, res)
|
||||
|
||||
if (request.onSuccess) {
|
||||
res.on('finish', request.onSuccess)
|
||||
}
|
||||
|
||||
const onFailure = request.onFailure || (() => {})
|
||||
|
||||
req.on('close', onFailure)
|
||||
|
||||
const closeConnection = () => {
|
||||
if (!res.headerSent) {
|
||||
res.writeHead(500)
|
||||
}
|
||||
res.end()
|
||||
|
||||
onFailure()
|
||||
}
|
||||
req.on('error', error => {
|
||||
console.warn('request error', error.stack || error)
|
||||
closeConnection()
|
||||
})
|
||||
res.on('error', error => {
|
||||
console.warn('response error', error.stack || error)
|
||||
closeConnection()
|
||||
})
|
||||
}
|
||||
async registerProxyRequest (opts) {
|
||||
if (isString(opts)) {
|
||||
opts = parseUrl(opts)
|
||||
} else {
|
||||
opts.method = opts.method != null ?
|
||||
opts.method.toUpperCase() :
|
||||
'GET'
|
||||
|
||||
opts.proxyMethod = opts.proxyMethod != null ?
|
||||
opts.proxyMethod.toUpperCase() :
|
||||
opts.method
|
||||
}
|
||||
|
||||
opts.createdAt = Date.now()
|
||||
|
||||
const url = `/${await generateToken()}`
|
||||
this._proxyRequests[url] = opts
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
registerAuthenticationProvider (provider) {
|
||||
return this._authenticationProviders.add(provider)
|
||||
}
|
||||
|
||||
unregisterAuthenticationProvider (provider) {
|
||||
return this._authenticationProviders.remove(provider)
|
||||
}
|
||||
|
||||
async authenticateUser (credentials) {
|
||||
// TODO: remove when email has been replaced by username.
|
||||
if (credentials.email) {
|
||||
credentials.username = credentials.email
|
||||
} else if (credentials.username) {
|
||||
credentials.email = credentials.username
|
||||
}
|
||||
|
||||
for (let provider of this._authenticationProviders) {
|
||||
try {
|
||||
const result = await provider(credentials)
|
||||
|
||||
if (result instanceof User) {
|
||||
return result
|
||||
}
|
||||
|
||||
// TODO: replace by email by username.
|
||||
if (result.username) {
|
||||
result.email = result.username
|
||||
delete result.username
|
||||
}
|
||||
|
||||
const user = await this._users.first(result)
|
||||
if (user) return user
|
||||
|
||||
return this._users.create(result.email)
|
||||
} catch (error) {
|
||||
// Authentication providers may just throw `null` to indicate
|
||||
// they could not authenticate the user without any special
|
||||
// errors.
|
||||
if (error) console.error(error.stack || error)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// Watches objects changes.
|
||||
//
|
||||
// Some should be forwarded to connected clients.
|
||||
// Some should be persistently saved.
|
||||
_watchObjects () {
|
||||
const {
|
||||
_connections: connections,
|
||||
_objects: objects
|
||||
} = this
|
||||
|
||||
let entered, exited
|
||||
function reset () {
|
||||
entered = Object.create(null)
|
||||
exited = Object.create(null)
|
||||
}
|
||||
reset()
|
||||
|
||||
function onAdd (items) {
|
||||
forEach(items, (item, id) => {
|
||||
entered[id] = item
|
||||
})
|
||||
}
|
||||
objects.on('add', onAdd)
|
||||
objects.on('update', onAdd)
|
||||
|
||||
objects.on('remove', (items) => {
|
||||
forEach(items, (_, id) => {
|
||||
// We don't care about the value here, so we choose `0`
|
||||
// because it is small in JSON.
|
||||
exited[id] = 0
|
||||
})
|
||||
})
|
||||
|
||||
objects.on('finish', () => {
|
||||
const enteredMessage = !isEmpty(entered) && {
|
||||
type: 'enter',
|
||||
items: entered
|
||||
}
|
||||
const exitedMessage = !isEmpty(exited) && {
|
||||
type: 'exit',
|
||||
items: exited
|
||||
}
|
||||
|
||||
if (!enteredMessage && !exitedMessage) {
|
||||
return
|
||||
}
|
||||
|
||||
forEach(connections, connection => {
|
||||
// Notifies only authenticated clients.
|
||||
if (connection.has('user_id')) {
|
||||
if (enteredMessage) {
|
||||
connection.notify('all', enteredMessage)
|
||||
}
|
||||
if (exitedMessage) {
|
||||
connection.notify('all', exitedMessage)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
reset()
|
||||
})
|
||||
}
|
||||
}
|
@ -5,6 +5,7 @@ Description= XO Server
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
Environment="DEBUG=xo:main"
|
||||
ExecStart=/usr/local/bin/xo-server
|
||||
Restart=always
|
||||
SyslogIdentifier=xo-server
|
||||
|
Loading…
Reference in New Issue
Block a user