Compare commits
415 Commits
xo-server/
...
xo-server/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
876c63fe80 | ||
|
|
ba66af922f | ||
|
|
28b9bbe54f | ||
|
|
bf6bd7cbdc | ||
|
|
ddcb2468a6 | ||
|
|
f048b58935 | ||
|
|
09f6200c2e | ||
|
|
354692fb06 | ||
|
|
2c5858c2e0 | ||
|
|
1f41fd0436 | ||
|
|
e0bbefdfae | ||
|
|
bc6fbb2797 | ||
|
|
b579cf8128 | ||
|
|
a94ed014b7 | ||
|
|
0db991b668 | ||
|
|
347ced6942 | ||
|
|
5d7a775b2b | ||
|
|
df732ab4bf | ||
|
|
31cd3953d6 | ||
|
|
4666b13892 | ||
|
|
37d7ddb4b0 | ||
|
|
3abbaeb44b | ||
|
|
847ea49042 | ||
|
|
779068c2ee | ||
|
|
140cd6882d | ||
|
|
2e295c2391 | ||
|
|
596b0995f4 | ||
|
|
b61fe97893 | ||
|
|
209aa2ebe6 | ||
|
|
c03a0e857e | ||
|
|
2854d698e6 | ||
|
|
944163be0e | ||
|
|
269a9eaff0 | ||
|
|
7f9c49cbc4 | ||
|
|
2b6bfeeb15 | ||
|
|
fa9742bc92 | ||
|
|
472e419abc | ||
|
|
169d11387b | ||
|
|
e59ac6d947 | ||
|
|
e193b45562 | ||
|
|
1ac34f810e | ||
|
|
e65e5c6e5f | ||
|
|
af6365c76a | ||
|
|
8c672b23b5 | ||
|
|
3b53f5ac11 | ||
|
|
ccdc744748 | ||
|
|
261f0b4bf0 | ||
|
|
495b59c2e5 | ||
|
|
d6e1c13c39 | ||
|
|
f7f13b9e07 | ||
|
|
62564d747f | ||
|
|
1d5d59c4c0 | ||
|
|
e8380b8a12 | ||
|
|
c304d9cc62 | ||
|
|
aad4ebf287 | ||
|
|
6c2f48181c | ||
|
|
480b6ff7d6 | ||
|
|
4bdd6f972c | ||
|
|
6674d8456a | ||
|
|
d1478ff694 | ||
|
|
cb20d46b74 | ||
|
|
9dd2538043 | ||
|
|
f25136a512 | ||
|
|
03eb56ad2a | ||
|
|
2508840701 | ||
|
|
6e098f5a4f | ||
|
|
31b33406fd | ||
|
|
7ab7c763ed | ||
|
|
06258e757a | ||
|
|
5919b43a21 | ||
|
|
7d4b9521e7 | ||
|
|
f9d2fd7997 | ||
|
|
bdbc20c3c6 | ||
|
|
69d6d03714 | ||
|
|
f40e1e55b0 | ||
|
|
b9082ed838 | ||
|
|
4edfefa9a2 | ||
|
|
0f98ee5407 | ||
|
|
7fdf119873 | ||
|
|
3c054e6ea1 | ||
|
|
98899ece72 | ||
|
|
2061a006d0 | ||
|
|
5496c2d7fd | ||
|
|
d6b862a4a9 | ||
|
|
d581f8a852 | ||
|
|
3a593ee35a | ||
|
|
415d34fdaa | ||
|
|
7d28191bb5 | ||
|
|
e2c7693370 | ||
|
|
f17ff02f4d | ||
|
|
225043e01d | ||
|
|
56f78349f8 | ||
|
|
8839d4f55a | ||
|
|
2562aec1d2 | ||
|
|
db2361be84 | ||
|
|
d08fcbfef3 | ||
|
|
7601b93e65 | ||
|
|
1103ec40e0 | ||
|
|
af32c7e3db | ||
|
|
170918eb3b | ||
|
|
a91e615a8d | ||
|
|
cc92c26fe3 | ||
|
|
937135db32 | ||
|
|
01366558b4 | ||
|
|
b0dbd54ea4 | ||
|
|
f113915307 | ||
|
|
0a3c3d9bb1 | ||
|
|
ba2e005c3e | ||
|
|
b9ea52d65f | ||
|
|
f1e328d333 | ||
|
|
23f1965398 | ||
|
|
fc82f185cb | ||
|
|
56b25f373f | ||
|
|
1ac6add122 | ||
|
|
91b1a903f9 | ||
|
|
a8d6654ef5 | ||
|
|
63093b1be6 | ||
|
|
60abe8f37e | ||
|
|
7ba3909aa1 | ||
|
|
eecdba2d05 | ||
|
|
7bdc005aa7 | ||
|
|
d46703fdc4 | ||
|
|
e4aa85f603 | ||
|
|
233124ef50 | ||
|
|
36a3012de2 | ||
|
|
2b4ee96ed7 | ||
|
|
85a2afd55c | ||
|
|
6cd0d8456a | ||
|
|
7750a0a773 | ||
|
|
a5364b9257 | ||
|
|
e0e7b1406d | ||
|
|
38b67a0002 | ||
|
|
18dd4f8a52 | ||
|
|
879f9b4ea9 | ||
|
|
3db0dda67a | ||
|
|
ed9ee15b90 | ||
|
|
44ff85e8e9 | ||
|
|
cb07e9ba11 | ||
|
|
bfe05ce5fc | ||
|
|
64ee23cec0 | ||
|
|
c022d3c4a4 | ||
|
|
69c764301f | ||
|
|
2f777daef6 | ||
|
|
a10bf7330e | ||
|
|
782bb5967d | ||
|
|
aeb2f55f0d | ||
|
|
ae68749b1b | ||
|
|
a3c25d56a0 | ||
|
|
d2b9cc8df9 | ||
|
|
2027daa75c | ||
|
|
f3493a08bd | ||
|
|
f3963269ae | ||
|
|
ae2212c245 | ||
|
|
3a19ac4c93 | ||
|
|
666f546cf0 | ||
|
|
464f57d7da | ||
|
|
2a192f33a1 | ||
|
|
9ca2674261 | ||
|
|
24bc91dc0c | ||
|
|
cf2d5b502f | ||
|
|
61450ef602 | ||
|
|
78f1d1738e | ||
|
|
9f595cf5f7 | ||
|
|
25b8e49975 | ||
|
|
d40086cd13 | ||
|
|
8f9d8d93b9 | ||
|
|
1080c10004 | ||
|
|
866aeca220 | ||
|
|
121b3afc61 | ||
|
|
e8406b04b4 | ||
|
|
8e7fe81806 | ||
|
|
852807b5d7 | ||
|
|
9928d47fa2 | ||
|
|
412a1bd62a | ||
|
|
b290520951 | ||
|
|
dde677b6d3 | ||
|
|
75030847bd | ||
|
|
e7b9cb76bc | ||
|
|
e96c4c0dd3 | ||
|
|
b553b3fa50 | ||
|
|
c6fb924b8f | ||
|
|
b13844c4a6 | ||
|
|
ab6c83a3fc | ||
|
|
7e0a97973f | ||
|
|
6a8a79bba5 | ||
|
|
4a0c58c50a | ||
|
|
eb0c963332 | ||
|
|
023fe82932 | ||
|
|
2e1a06c7bf | ||
|
|
8b6961d40c | ||
|
|
53351877da | ||
|
|
522445894e | ||
|
|
550351bb16 | ||
|
|
328adbb56f | ||
|
|
44a36bbba3 | ||
|
|
4cc4adeda6 | ||
|
|
c14e6f2a63 | ||
|
|
cfcb2d54d8 | ||
|
|
010d60e504 | ||
|
|
eabde07ff6 | ||
|
|
be19ad5f2a | ||
|
|
d1d0816961 | ||
|
|
7be7170504 | ||
|
|
478272f515 | ||
|
|
09af6958c8 | ||
|
|
adb3a2b64e | ||
|
|
1ee7e842dc | ||
|
|
b080a57406 | ||
|
|
7c017e345a | ||
|
|
4b91343155 | ||
|
|
02a3df8ad0 | ||
|
|
6a7080f4ee | ||
|
|
4547042577 | ||
|
|
0e39eea7f8 | ||
|
|
1e5aefea63 | ||
|
|
02c4f333b0 | ||
|
|
1e8fc4020b | ||
|
|
f969701ac1 | ||
|
|
b236243857 | ||
|
|
39edc64922 | ||
|
|
f22ece403f | ||
|
|
f5423bb314 | ||
|
|
b1e5945ebe | ||
|
|
76b5be8171 | ||
|
|
804bca2041 | ||
|
|
10602b47b4 | ||
|
|
8d7c522596 | ||
|
|
3ac455c5a7 | ||
|
|
2b19a459df | ||
|
|
41ba2d9bf6 | ||
|
|
a7b5eb69d3 | ||
|
|
67c209bb5e | ||
|
|
a6d436d9ea | ||
|
|
652c784e13 | ||
|
|
a0a3b7a158 | ||
|
|
789f51bd2a | ||
|
|
c2f1a74f96 | ||
|
|
a9ed7a3f3b | ||
|
|
b348e88a5f | ||
|
|
1615395866 | ||
|
|
e483abcad0 | ||
|
|
12b6760f6e | ||
|
|
6fde6d7eac | ||
|
|
a7ef891217 | ||
|
|
8f22dfe87b | ||
|
|
2dc7fab39a | ||
|
|
74cb2e3c63 | ||
|
|
6e763a58f1 | ||
|
|
a8e72ed410 | ||
|
|
fcdfd5f936 | ||
|
|
f1faa463c1 | ||
|
|
a0f4952b54 | ||
|
|
bd82ded07d | ||
|
|
016e17dedb | ||
|
|
5cd3e1b368 | ||
|
|
b2b39458da | ||
|
|
07288b3f26 | ||
|
|
90f79b7708 | ||
|
|
e220786a20 | ||
|
|
f16b993294 | ||
|
|
c241bea3bf | ||
|
|
084654cd3c | ||
|
|
d21742afb6 | ||
|
|
b5259384e8 | ||
|
|
bf78ad9fbe | ||
|
|
ab3577c369 | ||
|
|
6efb90c94e | ||
|
|
cbcc400eb4 | ||
|
|
15aec7da7e | ||
|
|
46535e4f56 | ||
|
|
e3f945c079 | ||
|
|
04239c57fe | ||
|
|
ad4439ed55 | ||
|
|
9fe3ef430f | ||
|
|
ff30773097 | ||
|
|
f7531d1e18 | ||
|
|
658008ab64 | ||
|
|
b089d63112 | ||
|
|
ee9b1b7f57 | ||
|
|
cd0fc8176f | ||
|
|
8e291e3e46 | ||
|
|
e3024076cd | ||
|
|
6105874abc | ||
|
|
1855f7829d | ||
|
|
456e8bd9c0 | ||
|
|
d5f2efac26 | ||
|
|
21e692623c | ||
|
|
80e9589af5 | ||
|
|
b2b9ae0677 | ||
|
|
63122905e6 | ||
|
|
f99b6f4646 | ||
|
|
39090c2a22 | ||
|
|
76baa8c791 | ||
|
|
74e4b9d6d2 | ||
|
|
bbfc5039f7 | ||
|
|
b2fd694483 | ||
|
|
b03f38ff22 | ||
|
|
fe48811047 | ||
|
|
bd9396b031 | ||
|
|
f0497ec16d | ||
|
|
7e9e179fa7 | ||
|
|
de62464ad8 | ||
|
|
f6911ca195 | ||
|
|
aec09ed8d2 | ||
|
|
51a983e460 | ||
|
|
0eb46e29c7 | ||
|
|
5ee11c7b6b | ||
|
|
b55accd76f | ||
|
|
fef2be1bc7 | ||
|
|
0b3858f91d | ||
|
|
d07ea1b337 | ||
|
|
7e2dbc7358 | ||
|
|
c676f08a7c | ||
|
|
92f24b5728 | ||
|
|
0254e71435 | ||
|
|
2972fc5814 | ||
|
|
975c96217c | ||
|
|
c30c1848bc | ||
|
|
94615d3b36 | ||
|
|
37a00f0d16 | ||
|
|
0dbe70f5af | ||
|
|
7584374b0b | ||
|
|
71ca51dc1a | ||
|
|
aa81e72e45 | ||
|
|
9954bb9c15 | ||
|
|
e5c0250423 | ||
|
|
135799ed5e | ||
|
|
22c3b57960 | ||
|
|
7054dd74a4 | ||
|
|
d4f1e52ef6 | ||
|
|
76a44459cf | ||
|
|
a5590b090c | ||
|
|
74c9a57070 | ||
|
|
06e283e070 | ||
|
|
8ab2ca3f24 | ||
|
|
0eb949ba39 | ||
|
|
be35693814 | ||
|
|
1e5f13795c | ||
|
|
cca2265633 | ||
|
|
0f0d1ac370 | ||
|
|
d52d4ac183 | ||
|
|
841220fd01 | ||
|
|
ca5e10784b | ||
|
|
712319974b | ||
|
|
067a6d01bc | ||
|
|
27825f9e2e | ||
|
|
425eb115dc | ||
|
|
0a5ce55e2b | ||
|
|
dbc8ed9d4c | ||
|
|
e31e990684 | ||
|
|
8618f56481 | ||
|
|
a39fc4667e | ||
|
|
4c369e240b | ||
|
|
4e291d01d4 | ||
|
|
b32fce46cb | ||
|
|
d1fcd45aac | ||
|
|
ebdb92c708 | ||
|
|
112909d35b | ||
|
|
c6e2c559f1 | ||
|
|
cf8613886a | ||
|
|
39c25c2001 | ||
|
|
9c9721ade5 | ||
|
|
4b8abe4ce8 | ||
|
|
33dfaba276 | ||
|
|
dd8bbcf358 | ||
|
|
cc3e4369ed | ||
|
|
548355fce6 | ||
|
|
a4e0e6544b | ||
|
|
62067e0801 | ||
|
|
5eb40d2299 | ||
|
|
53990a531b | ||
|
|
d799aea3c4 | ||
|
|
9af86cbba2 | ||
|
|
6fbfece4ff | ||
|
|
d56cca7873 | ||
|
|
fa1096a6ba | ||
|
|
16ff721331 | ||
|
|
798ee9dc46 | ||
|
|
bc17c60305 | ||
|
|
432688d577 | ||
|
|
da8d4b47f1 | ||
|
|
ed1e2e2449 | ||
|
|
b9744b4688 | ||
|
|
2f328f8f37 | ||
|
|
ffefd7e50b | ||
|
|
c2445f8a7c | ||
|
|
54d44079cc | ||
|
|
87f089a12c | ||
|
|
aa2172e4db | ||
|
|
0a33e94e79 | ||
|
|
fada54abae | ||
|
|
802641b719 | ||
|
|
1da93829d4 | ||
|
|
9e7acbc49a | ||
|
|
318765d40b | ||
|
|
94ba20dfa1 | ||
|
|
2ad3dc4a32 | ||
|
|
eef940dd7c | ||
|
|
1b5fc12ac1 | ||
|
|
c1c7b8dfcd | ||
|
|
d4510c2afe | ||
|
|
f241f073a3 | ||
|
|
26a6c72611 | ||
|
|
51cee7804b | ||
|
|
52228430f1 | ||
|
|
4fcd45d8a4 | ||
|
|
ea5736947d | ||
|
|
6b5f36fb7e | ||
|
|
b328d6d95f | ||
|
|
2e06921bf8 | ||
|
|
fd95e2d711 | ||
|
|
490e253b79 | ||
|
|
8a10f5cd52 | ||
|
|
5cebcc2424 | ||
|
|
7663b89289 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,5 +1,7 @@
|
||||
/dist/
|
||||
/node_modules/
|
||||
/src/api/index.js
|
||||
/src/xo-mixins/index.js
|
||||
|
||||
npm-debug.log
|
||||
npm-debug.log.*
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 'stable'
|
||||
# - 'stable'
|
||||
- '4'
|
||||
- '0.12'
|
||||
- '0.10'
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
try { require('source-map-support/register') } catch (_) {}
|
||||
Error.stackTraceLimit = 100
|
||||
|
||||
// Async stacks.
|
||||
try { require('trace') } catch (_) {}
|
||||
//
|
||||
// Disabled for now as it cause a huge memory usage with
|
||||
// fs.createReadStream().
|
||||
// TODO: find a way to reenable.
|
||||
//
|
||||
// try { require('trace') } catch (_) {}
|
||||
|
||||
// Removes node_modules and internal modules.
|
||||
// Removes internal modules.
|
||||
try {
|
||||
var sep = require('path').sep
|
||||
var path = __dirname + sep + 'node_modules' + sep
|
||||
|
||||
require('stack-chain').filter.attach(function (_, frames) {
|
||||
var filtered = frames.filter(function (frame) {
|
||||
@@ -17,10 +21,7 @@ try {
|
||||
name &&
|
||||
|
||||
// contains a separator (no internal modules)
|
||||
name.indexOf(sep) !== -1 &&
|
||||
|
||||
// does not start with the current path followed by node_modules.
|
||||
name.lastIndexOf(path, 0) !== 0
|
||||
name.indexOf(sep) !== -1
|
||||
)
|
||||
})
|
||||
|
||||
@@ -31,3 +32,6 @@ try {
|
||||
: frames
|
||||
})
|
||||
} catch (_) {}
|
||||
|
||||
// Source maps.
|
||||
try { require('julien-f-source-map-support/register') } catch (_) {}
|
||||
|
||||
@@ -7,4 +7,7 @@
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
// Make unhandled rejected promises visible on exit.
|
||||
require('loud-rejection/register')
|
||||
|
||||
require('exec-promise')(require('../'))
|
||||
|
||||
10
bin/xo-server-logs
Executable file
10
bin/xo-server-logs
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
require('exec-promise')(require('../dist/logs-cli'))
|
||||
27
config.json
Normal file
27
config.json
Normal file
@@ -0,0 +1,27 @@
|
||||
// Vendor config: DO NOT TOUCH!
|
||||
//
|
||||
// See sample.config.yaml to override.
|
||||
{
|
||||
"http": {
|
||||
"listen": [
|
||||
{
|
||||
"port": 80
|
||||
}
|
||||
],
|
||||
"mounts": {}
|
||||
},
|
||||
"datadir": "/var/lib/xo-server/data",
|
||||
|
||||
// Should users be created on first sign in?
|
||||
//
|
||||
// Necessary for external authentication providers.
|
||||
"createUserOnFirstSignin": true,
|
||||
|
||||
// Whether API logs should contains the full request/response on
|
||||
// errors.
|
||||
//
|
||||
// This is disabled by default for performance (lots of data) and
|
||||
// security concerns (avoiding sensitive data in the logs) but can
|
||||
// be turned for investigation by the administrator.
|
||||
"verboseApiLogsOnErrors": false
|
||||
}
|
||||
67
package.json
67
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "4.7.0",
|
||||
"version": "4.13.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -19,6 +19,7 @@
|
||||
"better-stacks.js",
|
||||
"bin/",
|
||||
"dist/",
|
||||
"config.json",
|
||||
"index.js",
|
||||
"signin.jade"
|
||||
],
|
||||
@@ -29,73 +30,102 @@
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.12 <5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2-promise": "^0.1.0",
|
||||
"app-conf": "^0.4.0",
|
||||
"babel-runtime": "^5",
|
||||
"base64url": "1.0.4",
|
||||
"base64url": "^1.0.5",
|
||||
"blocked": "^1.1.0",
|
||||
"bluebird": "^2.9.14",
|
||||
"bluebird": "^3.1.1",
|
||||
"body-parser": "^1.13.3",
|
||||
"connect-flash": "^0.1.1",
|
||||
"cookie": "^0.2.3",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"cron": "^1.0.9",
|
||||
"d3-time-format": "^0.3.0",
|
||||
"debug": "^2.1.3",
|
||||
"escape-string-regexp": "^1.0.3",
|
||||
"event-to-promise": "^0.4.0",
|
||||
"event-to-promise": "^0.6.0",
|
||||
"exec-promise": "^0.5.1",
|
||||
"execa": "^0.2.2",
|
||||
"express": "^4.13.3",
|
||||
"express-session": "^1.11.3",
|
||||
"fs-extra": "^0.24.0",
|
||||
"fatfs": "^0.10.3",
|
||||
"fs-extra": "^0.26.2",
|
||||
"fs-promise": "^0.3.1",
|
||||
"got": "^4.2.0",
|
||||
"get-stream": "^1.1.0",
|
||||
"graceful-fs": "^4.1.2",
|
||||
"hashy": "~0.4.2",
|
||||
"http-server-plus": "^0.5.1",
|
||||
"human-format": "^0.5.0",
|
||||
"helmet": "^1.1.0",
|
||||
"highland": "^2.5.1",
|
||||
"http-server-plus": "^0.6.4",
|
||||
"human-format": "^0.6.0",
|
||||
"is-my-json-valid": "^2.12.2",
|
||||
"jade": "^1.11.0",
|
||||
"js-yaml": "^3.2.7",
|
||||
"json-rpc-peer": "^0.10.0",
|
||||
"json-rpc-peer": "^0.11.0",
|
||||
"json5": "^0.4.0",
|
||||
"julien-f-source-map-support": "0.0.0",
|
||||
"julien-f-unzip": "^0.2.1",
|
||||
"kindof": "^2.0.0",
|
||||
"level": "^1.3.0",
|
||||
"level-party": "^3.0.4",
|
||||
"level-sublevel": "^6.5.2",
|
||||
"leveldown": "^1.4.2",
|
||||
"lodash.assign": "^3.0.0",
|
||||
"lodash.bind": "^3.0.0",
|
||||
"lodash.difference": "^3.2.0",
|
||||
"lodash.endswith": "^3.0.2",
|
||||
"lodash.every": "^4.0.0",
|
||||
"lodash.filter": "^3.1.0",
|
||||
"lodash.find": "^3.0.0",
|
||||
"lodash.findindex": "^3.0.0",
|
||||
"lodash.foreach": "^3.0.1",
|
||||
"lodash.get": "^3.7.0",
|
||||
"lodash.has": "^3.0.0",
|
||||
"lodash.includes": "^3.1.1",
|
||||
"lodash.invert": "^4.0.1",
|
||||
"lodash.isarray": "^3.0.0",
|
||||
"lodash.isboolean": "^3.0.2",
|
||||
"lodash.isempty": "^3.0.0",
|
||||
"lodash.isfunction": "^3.0.1",
|
||||
"lodash.isinteger": "^4.0.0",
|
||||
"lodash.isobject": "^3.0.0",
|
||||
"lodash.isstring": "^3.0.0",
|
||||
"lodash.keys": "^3.0.4",
|
||||
"lodash.map": "^3.0.0",
|
||||
"lodash.pick": "^3.0.0",
|
||||
"lodash.result": "^3.0.0",
|
||||
"lodash.sortby": "^3.1.4",
|
||||
"lodash.startswith": "^3.0.1",
|
||||
"lodash.trim": "^3.0.1",
|
||||
"loud-rejection": "^1.2.0",
|
||||
"make-error": "^1",
|
||||
"micromatch": "^2.3.2",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^0.7.1",
|
||||
"multikey-hash": "^1.0.1",
|
||||
"ndjson": "^1.4.3",
|
||||
"partial-stream": "0.0.0",
|
||||
"passport": "^0.3.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"promise-toolbox": "^0.1.0",
|
||||
"proxy-http-request": "0.1.0",
|
||||
"redis": "^2.0.1",
|
||||
"schema-inspector": "^1.5.1",
|
||||
"semver": "^5.1.0",
|
||||
"serve-static": "^1.9.2",
|
||||
"source-map-support": "^0.3.2",
|
||||
"stack-chain": "^1.3.3",
|
||||
"trace": "^1.2.0",
|
||||
"through2": "^2.0.0",
|
||||
"trace": "^2.0.1",
|
||||
"ws": "~0.8.0",
|
||||
"xen-api": "^0.6.4",
|
||||
"xen-api": "^0.7.2",
|
||||
"xml2js": "~0.4.6",
|
||||
"xo-collection": "^0.4.0"
|
||||
"xo-acl-resolver": "0.0.0-0",
|
||||
"xo-collection": "^0.4.0",
|
||||
"xo-remote-parser": "^0.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-eslint": "^4.0.10",
|
||||
@@ -110,17 +140,18 @@
|
||||
"leche": "^2.1.1",
|
||||
"mocha": "^2.2.1",
|
||||
"must": "^0.13.1",
|
||||
"node-inspector": "^0.12.2",
|
||||
"sinon": "^1.14.1",
|
||||
"standard": "^5.2.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "gulp build --production",
|
||||
"dev": "gulp build",
|
||||
"build": "npm run build-indexes && gulp build --production",
|
||||
"build-indexes": "./tools/generate-index src/api & ./tools/generate-index src/xo-mixins",
|
||||
"dev": "npm run build-indexes && gulp build",
|
||||
"lint": "standard",
|
||||
"prepublish": "npm run build",
|
||||
"start": "node bin/xo-server",
|
||||
"test": "npm run lint && dependency-check ./package.json && mocha --opts .mocha.opts \"dist/**/*.spec.js\""
|
||||
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
|
||||
"posttest": "npm run lint && dependency-check ./package.json"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
|
||||
@@ -66,6 +66,8 @@ http:
|
||||
#socket: './http.sock'
|
||||
|
||||
# Basic HTTPS.
|
||||
#
|
||||
# You can find the list of possible options there https://nodejs.org/docs/latest/api/tls.html#tls.createServer
|
||||
# -
|
||||
# # The only difference is the presence of the certificate and the
|
||||
# # key.
|
||||
@@ -83,7 +85,7 @@ http:
|
||||
# # certificate authority up to the root.
|
||||
# #
|
||||
# # Default: undefined
|
||||
# certificate: './certificate.pem'
|
||||
# cert: './certificate.pem'
|
||||
|
||||
# # File containing the private key (PEM format).
|
||||
# #
|
||||
@@ -93,6 +95,10 @@ http:
|
||||
# # Default: undefined
|
||||
# key: './key.pem'
|
||||
|
||||
# If set to true, all HTTP traffic will be redirected to the first
|
||||
# HTTPs configuration.
|
||||
#redirectToHttps: true
|
||||
|
||||
# List of files/directories which will be served.
|
||||
mounts:
|
||||
#'/': '/path/to/xo-web/dist/'
|
||||
@@ -109,3 +115,9 @@ redis:
|
||||
#
|
||||
# Default: tcp://localhost:6379
|
||||
#uri: ''
|
||||
|
||||
# Directory containing the database of XO.
|
||||
# Currently used for logs.
|
||||
#
|
||||
# Default: '/var/lib/xo-server/data'
|
||||
#datadir: '/var/lib/xo-server/data'
|
||||
|
||||
11
signin.jade
11
signin.jade
@@ -44,11 +44,12 @@ html
|
||||
.form-group
|
||||
.col-sm-5
|
||||
.checkbox
|
||||
input(
|
||||
name = 'remember-me'
|
||||
type = 'checkbox'
|
||||
)
|
||||
= 'Remember me'
|
||||
label
|
||||
input(
|
||||
name = 'remember-me'
|
||||
type = 'checkbox'
|
||||
)
|
||||
| Remember me
|
||||
.form-group
|
||||
.col-sm-12
|
||||
button.btn.btn-login.btn-block.btn-success
|
||||
|
||||
@@ -50,3 +50,11 @@ export class AlreadyAuthenticated extends JsonRpcError {
|
||||
super('already authenticated', 4)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class ForbiddenOperation extends JsonRpcError {
|
||||
constructor (operation, reason) {
|
||||
super(`forbidden operation: ${operation}`, 5, reason)
|
||||
}
|
||||
}
|
||||
|
||||
212
src/api.js
212
src/api.js
@@ -1,13 +1,9 @@
|
||||
import createDebug from 'debug'
|
||||
const debug = createDebug('xo:api')
|
||||
|
||||
import assign from 'lodash.assign'
|
||||
import Bluebird from 'bluebird'
|
||||
import forEach from 'lodash.foreach'
|
||||
import getKeys from 'lodash.keys'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import kindOf from 'kindof'
|
||||
import map from 'lodash.map'
|
||||
import ms from 'ms'
|
||||
import schemaInspector from 'schema-inspector'
|
||||
|
||||
@@ -17,6 +13,11 @@ import {
|
||||
NoSuchObject,
|
||||
Unauthorized
|
||||
} from './api-errors'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
noop
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -67,100 +68,6 @@ function checkParams (method, params) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Forward declaration.
|
||||
let checkAuthorization
|
||||
|
||||
function authorized () {}
|
||||
function forbiddden () { // eslint-disable-line no-unused-vars
|
||||
throw null // eslint-disable-line no-throw-literal
|
||||
}
|
||||
function checkMemberAuthorization (member) {
|
||||
return function (userId, object, permission) {
|
||||
const memberObject = this.getObject(object[member])
|
||||
return checkAuthorization.call(this, userId, memberObject, permission)
|
||||
}
|
||||
}
|
||||
|
||||
const checkAuthorizationByTypes = {
|
||||
host (userId, host, permission) {
|
||||
return defaultCheckAuthorization.call(this, userId, host, permission).catch(() => {
|
||||
return checkAuthorization.call(this, userId, host.$pool, permission)
|
||||
})
|
||||
},
|
||||
|
||||
message: checkMemberAuthorization('$object'),
|
||||
|
||||
network (userId, network, permission) {
|
||||
return defaultCheckAuthorization.call(this, userId, network, permission).catch(() => {
|
||||
return checkAuthorization.call(this, userId, network.$pool, permission)
|
||||
})
|
||||
},
|
||||
|
||||
SR (userId, sr, permission) {
|
||||
return defaultCheckAuthorization.call(this, userId, sr, permission).catch(() => {
|
||||
return checkAuthorization.call(this, userId, sr.$pool, permission)
|
||||
})
|
||||
},
|
||||
|
||||
task: checkMemberAuthorization('$host'),
|
||||
|
||||
VBD: checkMemberAuthorization('VDI'),
|
||||
|
||||
// Access to a VDI is granted if the user has access to the
|
||||
// containing SR or to a linked VM.
|
||||
VDI (userId, vdi, permission) {
|
||||
// Check authorization for each of the connected VMs.
|
||||
const promises = map(this.getObjects(vdi.$VBDs, 'VBD'), vbd => {
|
||||
const vm = this.getObject(vbd.VM, 'VM')
|
||||
return checkAuthorization.call(this, userId, vm, permission)
|
||||
})
|
||||
|
||||
// Check authorization for the containing SR.
|
||||
const sr = this.getObject(vdi.$SR, 'SR')
|
||||
promises.push(checkAuthorization.call(this, userId, sr, permission))
|
||||
|
||||
// We need at least one success
|
||||
return Bluebird.any(promises)
|
||||
},
|
||||
|
||||
VIF (userId, vif, permission) {
|
||||
const network = this.getObject(vif.$network)
|
||||
const vm = this.getObject(vif.$VM)
|
||||
|
||||
return Bluebird.any([
|
||||
checkAuthorization.call(this, userId, network, permission),
|
||||
checkAuthorization.call(this, userId, vm, permission)
|
||||
])
|
||||
},
|
||||
|
||||
VM (userId, vm, permission) {
|
||||
return defaultCheckAuthorization.call(this, userId, vm, permission).catch(() => {
|
||||
return checkAuthorization.call(this, userId, vm.$host, permission)
|
||||
})
|
||||
},
|
||||
|
||||
'VM-snapshot': checkMemberAuthorization('$snapshot_of'),
|
||||
|
||||
'VM-template': authorized
|
||||
}
|
||||
|
||||
function throwIfFail (success) {
|
||||
if (!success) {
|
||||
// We don't care about an error object.
|
||||
/* eslint no-throw-literal: 0 */
|
||||
throw null
|
||||
}
|
||||
}
|
||||
|
||||
function defaultCheckAuthorization (userId, object, permission) {
|
||||
return this.hasPermission(userId, object.id, permission).then(throwIfFail)
|
||||
}
|
||||
|
||||
checkAuthorization = async function (userId, object, permission) {
|
||||
const fn = checkAuthorizationByTypes[object.type] || defaultCheckAuthorization
|
||||
return fn.call(this, userId, object, permission)
|
||||
}
|
||||
|
||||
function resolveParams (method, params) {
|
||||
const resolve = method.resolve
|
||||
if (!resolve) {
|
||||
@@ -173,9 +80,11 @@ function resolveParams (method, params) {
|
||||
}
|
||||
|
||||
const userId = user.get('id')
|
||||
const isAdmin = this.user.hasPermission('admin')
|
||||
|
||||
const promises = []
|
||||
// Do not alter the original object.
|
||||
params = { ...params }
|
||||
|
||||
const permissions = []
|
||||
forEach(resolve, ([param, types, permission = 'administrate'], key) => {
|
||||
const id = params[param]
|
||||
if (id === undefined) {
|
||||
@@ -190,15 +99,16 @@ function resolveParams (method, params) {
|
||||
// Register this new value.
|
||||
params[key] = object
|
||||
|
||||
if (!isAdmin) {
|
||||
promises.push(checkAuthorization.call(this, userId, object, permission))
|
||||
}
|
||||
permissions.push([ object.id, permission ])
|
||||
})
|
||||
|
||||
return Promise.all(promises).then(
|
||||
() => params,
|
||||
() => { throw new Unauthorized() }
|
||||
)
|
||||
return this.hasPermissions(userId, permissions).then(success => {
|
||||
if (success) {
|
||||
return params
|
||||
}
|
||||
|
||||
throw new Unauthorized()
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -207,11 +117,11 @@ function getMethodsInfo () {
|
||||
const methods = {}
|
||||
|
||||
forEach(this.api._methods, function (method, name) {
|
||||
this[name] = assign({}, {
|
||||
this[name] = {
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
})
|
||||
}
|
||||
}, methods)
|
||||
|
||||
return methods
|
||||
@@ -242,11 +152,12 @@ function methodSignature ({method: name}) {
|
||||
// Return an array for compatibility with XML-RPC.
|
||||
return [
|
||||
// XML-RPC require the name of the method.
|
||||
assign({ name }, {
|
||||
{
|
||||
name,
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
})
|
||||
}
|
||||
]
|
||||
}
|
||||
methodSignature.description = 'returns the signature of an API method'
|
||||
@@ -254,8 +165,12 @@ methodSignature.description = 'returns the signature of an API method'
|
||||
// ===================================================================
|
||||
|
||||
export default class Api {
|
||||
constructor ({context} = {}) {
|
||||
this._methods = Object.create(null)
|
||||
constructor ({
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
} = {}) {
|
||||
this._methods = createRawObject()
|
||||
this._verboseLogsOnErrors = verboseLogsOnErrors
|
||||
this.context = context
|
||||
|
||||
this.addMethods({
|
||||
@@ -269,7 +184,19 @@ export default class Api {
|
||||
}
|
||||
|
||||
addMethod (name, method) {
|
||||
this._methods[name] = method
|
||||
const methods = this._methods
|
||||
|
||||
if (name in methods) {
|
||||
throw new Error(`API method ${name} already exists`)
|
||||
}
|
||||
|
||||
methods[name] = method
|
||||
|
||||
let unset = () => {
|
||||
delete methods[name]
|
||||
unset = noop
|
||||
}
|
||||
return () => unset()
|
||||
}
|
||||
|
||||
addMethods (methods) {
|
||||
@@ -297,24 +224,32 @@ export default class Api {
|
||||
throw new MethodNotFound(name)
|
||||
}
|
||||
|
||||
const context = Object.create(this.context)
|
||||
context.api = this // Used by system.*().
|
||||
context.session = session
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = Object.create(this.context, {
|
||||
api: { // Used by system.*().
|
||||
value: this
|
||||
},
|
||||
session: {
|
||||
value: session
|
||||
}
|
||||
})
|
||||
|
||||
// FIXME: too coupled with XO.
|
||||
// Fetch and inject the current user.
|
||||
const userId = session.get('user_id', undefined)
|
||||
if (userId) {
|
||||
context.user = await context._getUser(userId)
|
||||
}
|
||||
context.user = userId && await context._getUser(userId)
|
||||
const userName = context.user
|
||||
? context.user.get('email')
|
||||
: '(unknown user)'
|
||||
|
||||
try {
|
||||
await checkPermission.call(context, method)
|
||||
checkParams(method, params)
|
||||
|
||||
await resolveParams.call(context, method, params)
|
||||
const resolvedParams = await resolveParams.call(context, method, params)
|
||||
|
||||
let result = await method.call(context, params)
|
||||
let result = await method.call(context, resolvedParams)
|
||||
|
||||
// If nothing was returned, consider this operation a success
|
||||
// and return true.
|
||||
@@ -323,7 +258,8 @@ export default class Api {
|
||||
}
|
||||
|
||||
debug(
|
||||
'%s(...) [%s] ==> %s',
|
||||
'%s | %s(...) [%s] ==> %s',
|
||||
userName,
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
kindOf(result)
|
||||
@@ -331,16 +267,28 @@ export default class Api {
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
debug(
|
||||
'%s(...) [%s] =!> %s',
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
if (this._verboseLogsOnErrors) {
|
||||
debug(
|
||||
'%s | %s(%j) [%s] =!> %s',
|
||||
userName,
|
||||
name,
|
||||
params,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
|
||||
const stack = error && error.stack
|
||||
if (stack) {
|
||||
console.error(stack)
|
||||
const stack = error && error.stack
|
||||
if (stack) {
|
||||
console.error(stack)
|
||||
}
|
||||
} else {
|
||||
debug(
|
||||
'%s | %s(...) [%s] =!> %s',
|
||||
userName,
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
}
|
||||
|
||||
throw error
|
||||
|
||||
@@ -8,13 +8,13 @@ get.description = 'get existing ACLs'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getCurrent () {
|
||||
return await this.getAclsForUser(this.session.get('user_id'))
|
||||
export async function getCurrentPermissions () {
|
||||
return await this.getPermissionsForUser(this.session.get('user_id'))
|
||||
}
|
||||
|
||||
getCurrent.permission = ''
|
||||
getCurrentPermissions.permission = ''
|
||||
|
||||
getCurrent.description = 'get existing ACLs concerning current user'
|
||||
getCurrentPermissions.description = 'get (explicit) permissions by object for the current user'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ import {parseSize} from '../utils'
|
||||
// ===================================================================
|
||||
|
||||
export async function create ({name, size, sr}) {
|
||||
const vdi = await this.getXAPI(sr).createVdi(parseSize(size), {
|
||||
const vdi = await this.getXapi(sr).createVdi(parseSize(size), {
|
||||
name_label: name,
|
||||
sr: sr.id
|
||||
sr: sr._xapiId
|
||||
})
|
||||
return vdi.$id
|
||||
}
|
||||
@@ -14,10 +14,27 @@ create.description = 'create a new disk on a SR'
|
||||
|
||||
create.params = {
|
||||
name: { type: 'string' },
|
||||
size: { type: 'string' },
|
||||
size: { type: ['integer', 'string'] },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function resize ({ vdi, size }) {
|
||||
await this.getXapi(vdi).resizeVdi(vdi._xapiId, parseSize(size))
|
||||
}
|
||||
|
||||
resize.description = 'resize an existing VDI'
|
||||
|
||||
resize.params = {
|
||||
id: { type: 'string' },
|
||||
size: { type: ['integer', 'string'] }
|
||||
}
|
||||
|
||||
resize.resolve = {
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function register ({vm}) {
|
||||
await this.getXAPI(vm).registerDockerContainer(vm.id)
|
||||
await this.getXapi(vm).registerDockerContainer(vm._xapiId)
|
||||
}
|
||||
register.permission = 'admin'
|
||||
|
||||
@@ -16,7 +16,7 @@ register.resolve = {
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function deregister ({vm}) {
|
||||
await this.getXAPI(vm).unregisterDockerContainer(vm.id)
|
||||
await this.getXapi(vm).unregisterDockerContainer(vm._xapiId)
|
||||
}
|
||||
deregister.permission = 'admin'
|
||||
|
||||
@@ -33,23 +33,23 @@ deregister.resolve = {
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function start ({vm, container}) {
|
||||
await this.getXAPI(vm).startDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).startDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function stop ({vm, container}) {
|
||||
await this.getXAPI(vm).stopDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).stopDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function restart ({vm, container}) {
|
||||
await this.getXAPI(vm).restartDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).restartDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function pause ({vm, container}) {
|
||||
await this.getXAPI(vm).pauseDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function unpause ({vm, container}) {
|
||||
await this.getXAPI(vm).unpauseDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
for (let fn of [start, stop, restart, pause, unpause]) {
|
||||
|
||||
@@ -30,11 +30,8 @@ export async function getAll () {
|
||||
return await this._groups.get()
|
||||
}
|
||||
|
||||
delete_.description = 'returns all the existing group'
|
||||
delete_.permission = 'admin'
|
||||
delete_.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
getAll.description = 'returns all the existing group'
|
||||
getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -3,26 +3,27 @@ $find = require 'lodash.find'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
$forEach = require 'lodash.foreach'
|
||||
endsWith = require 'lodash.endswith'
|
||||
got = require('got')
|
||||
startsWith = require 'lodash.startswith'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
{parseXml, promisify} = require '../utils'
|
||||
{
|
||||
extractProperty,
|
||||
parseXml,
|
||||
promisify
|
||||
} = require '../utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
set = $coroutine (params) ->
|
||||
{host} = params
|
||||
xapi = @getXAPI host
|
||||
set = ({
|
||||
host,
|
||||
|
||||
for param, field of {
|
||||
'name_label'
|
||||
'name_description'
|
||||
}
|
||||
continue unless param of params
|
||||
|
||||
yield xapi.call "host.set_#{field}", host.ref, params[param]
|
||||
|
||||
return true
|
||||
# TODO: use camel case.
|
||||
name_label: nameLabel,
|
||||
name_description: nameDescription
|
||||
}) ->
|
||||
return @getXapi(host).setHostProperties(host._xapiId, {
|
||||
nameLabel,
|
||||
nameDescription
|
||||
})
|
||||
|
||||
set.description = 'changes the properties of an host'
|
||||
|
||||
@@ -43,18 +44,19 @@ exports.set = set
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
restart = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
yield xapi.call 'host.reboot', host.ref
|
||||
|
||||
return true
|
||||
# FIXME: set force to false per default when correctly implemented in
|
||||
# UI.
|
||||
restart = ({host, force = true}) ->
|
||||
return @getXapi(host).rebootHost(host._xapiId, force)
|
||||
|
||||
restart.description = 'restart the host'
|
||||
|
||||
restart.params = {
|
||||
id: { type: 'string' }
|
||||
id: { type: 'string' },
|
||||
force: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
restart.resolve = {
|
||||
@@ -65,12 +67,8 @@ exports.restart = restart
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
restartAgent = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.restart_agent', host.ref
|
||||
|
||||
return true
|
||||
restartAgent = ({host}) ->
|
||||
return @getXapi(host).restartHostAgent(host._xapiId)
|
||||
|
||||
restartAgent.description = 'restart the Xen agent on the host'
|
||||
|
||||
@@ -79,7 +77,7 @@ restartAgent.params = {
|
||||
}
|
||||
|
||||
restartAgent.resolve = {
|
||||
host: ['id', 'host', 'operate'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
# TODO camel case
|
||||
@@ -87,12 +85,8 @@ exports.restart_agent = restartAgent
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
start = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.power_on', host.ref
|
||||
|
||||
return true
|
||||
start = ({host}) ->
|
||||
return @getXapi(host).powerOnHost(host._xapiId)
|
||||
|
||||
start.description = 'start the host'
|
||||
|
||||
@@ -108,13 +102,8 @@ exports.start = start
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
stop = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
yield xapi.call 'host.shutdown', host.ref
|
||||
|
||||
return true
|
||||
stop = ({host}) ->
|
||||
return @getXapi(host).shutdownHost(host._xapiId)
|
||||
|
||||
stop.description = 'stop the host'
|
||||
|
||||
@@ -130,12 +119,8 @@ exports.stop = stop
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
detach = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'pool.eject', host.ref
|
||||
|
||||
return true
|
||||
detach = ({host}) ->
|
||||
return @getXapi(host).ejectHostFromPool(host._xapiId)
|
||||
|
||||
detach.description = 'eject the host of a pool'
|
||||
|
||||
@@ -151,12 +136,8 @@ exports.detach = detach
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
enable = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.enable', host.ref
|
||||
|
||||
return true
|
||||
enable = ({host}) ->
|
||||
return @getXapi(host).enableHost(host._xapiId)
|
||||
|
||||
enable.description = 'enable to create VM on the host'
|
||||
|
||||
@@ -172,12 +153,8 @@ exports.enable = enable
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
disable = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
|
||||
return true
|
||||
disable = ({host}) ->
|
||||
return @getXapi(host).disableHost(host._xapiId)
|
||||
|
||||
disable.description = 'disable to create VM on the hsot'
|
||||
|
||||
@@ -193,8 +170,9 @@ exports.disable = disable
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# TODO: to test and to fix.
|
||||
createNetwork = $coroutine ({host, name, description, pif, mtu, vlan}) ->
|
||||
xapi = @getXAPI host
|
||||
xapi = @getXapi host
|
||||
|
||||
description = description ? 'Created with Xen Orchestra'
|
||||
|
||||
@@ -208,7 +186,7 @@ createNetwork = $coroutine ({host, name, description, pif, mtu, vlan}) ->
|
||||
if pif?
|
||||
vlan = vlan ? '0'
|
||||
pif = @getObject pif, 'PIF'
|
||||
yield xapi.call 'pool.create_VLAN_from_PIF', pif.ref, network_ref, vlan
|
||||
yield xapi.call 'pool.create_VLAN_from_PIF', pif._xapiRef, network_ref, vlan
|
||||
|
||||
return true
|
||||
|
||||
@@ -233,7 +211,7 @@ exports.createNetwork = createNetwork
|
||||
# Throws an error if the host is not running the latest XS version
|
||||
|
||||
listMissingPatches = ({host}) ->
|
||||
return @getXAPI(host).listMissingPoolPatchesOnHost(host.id)
|
||||
return @getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
|
||||
|
||||
listMissingPatches.params = {
|
||||
host: { type: 'string' }
|
||||
@@ -250,7 +228,7 @@ listMissingPatches.description = 'return an array of missing new patches in the
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
installPatch = ({host, patch: patchUuid}) ->
|
||||
return @getXAPI(host).installPoolPatchOnHost(patchUuid, host.id)
|
||||
return @getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId)
|
||||
|
||||
installPatch.description = 'install a patch on an host'
|
||||
|
||||
@@ -267,96 +245,51 @@ exports.installPatch = installPatch
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
installAllPatches = ({host}) ->
|
||||
return @getXapi(host).installAllPoolPatchesOnHost(host._xapiId)
|
||||
|
||||
stats = $coroutine ({host, granularity}) ->
|
||||
granularity = if granularity then granularity else 0
|
||||
# granularity: 0: every 5 sec along last 10 minutes, 1: every minute along last 2 hours, 2: every hour along past week, 3: everyday along past year
|
||||
# see http://xenserver.org/partners/developing-products-for-xenserver/18-sdk-development/96-xs-dev-rrds.html
|
||||
installAllPatches.description = 'install all the missing patches on a host'
|
||||
|
||||
# select the AVERAGE values
|
||||
granularity = {0:0, 1:1, 2:4, 3:7}[granularity]
|
||||
xapi = @getXAPI host
|
||||
installAllPatches.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
{body} = response = yield got(
|
||||
"https://#{host.address}/host_rrd?session_id=#{xapi.sessionId}",
|
||||
{ rejectUnauthorized: false }
|
||||
)
|
||||
installAllPatches.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
if response.statusCode isnt 200
|
||||
throw new Error('Cannot fetch the RRDs')
|
||||
exports.installAllPatches = installAllPatches
|
||||
|
||||
json = parseXml(body)
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# Find index of needed objects for getting their values after
|
||||
cpusIndexes = []
|
||||
pifsIndexes = []
|
||||
memoryFreeIndex = []
|
||||
memoryIndex = []
|
||||
loadIndex = []
|
||||
index = 0
|
||||
emergencyShutdownHost = ({host}) ->
|
||||
return @getXapi(host).emergencyShutdownHost(host._xapiId)
|
||||
|
||||
$forEach(json.rrd.ds, (value, i) ->
|
||||
if /^cpu[0-9]+$/.test(value.name)
|
||||
cpusIndexes.push(i)
|
||||
else if startsWith(value.name, 'pif_eth') && endsWith(value.name, '_tx')
|
||||
pifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'pif_eth') && endsWith(value.name, '_rx')
|
||||
pifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'loadavg')
|
||||
loadIndex.push(i)
|
||||
else if startsWith(value.name, 'memory_free_kib')
|
||||
memoryFreeIndex.push(i)
|
||||
else if startsWith(value.name, 'memory_total_kib')
|
||||
memoryIndex.push(i)
|
||||
emergencyShutdownHost.description = 'suspend all VMs and shutdown host'
|
||||
|
||||
return
|
||||
)
|
||||
emergencyShutdownHost.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
memoryFree = []
|
||||
memoryUsed = []
|
||||
memory = []
|
||||
load = []
|
||||
cpus = []
|
||||
pifs = []
|
||||
date = []
|
||||
archive = json.rrd.rra[granularity]
|
||||
dateStep = json.rrd.step * archive.pdp_per_row
|
||||
baseDate = json.rrd.lastupdate - (json.rrd.lastupdate % dateStep)
|
||||
numStep = archive.database.row.length - 1
|
||||
emergencyShutdownHost.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
$forEach archive.database.row, (n, key) ->
|
||||
memoryFree.push(Math.round(parseInt(n.v[memoryFreeIndex])))
|
||||
memoryUsed.push(Math.round(parseInt(n.v[memoryIndex])-(n.v[memoryFreeIndex])))
|
||||
memory.push(parseInt(n.v[memoryIndex]))
|
||||
load.push(if n.v[loadIndex] == 'NaN' then null else n.v[loadIndex])
|
||||
date.push(baseDate - (dateStep * (numStep - key)))
|
||||
# build the multi dimensional arrays
|
||||
$forEach cpusIndexes, (value, key) ->
|
||||
cpus[key] ?= []
|
||||
cpus[key].push(n.v[value]*100)
|
||||
return
|
||||
$forEach pifsIndexes, (value, key) ->
|
||||
pifs[key] ?= []
|
||||
pifs[key].push(if n.v[value] == 'NaN' then null else n.v[value]) # * (if key % 2 then -1 else 1))
|
||||
return
|
||||
return
|
||||
exports.emergencyShutdownHost = emergencyShutdownHost
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# the final object
|
||||
return {
|
||||
memoryFree: memoryFree
|
||||
memoryUsed: memoryUsed
|
||||
memory: memory
|
||||
date: date
|
||||
cpus: cpus
|
||||
pifs: pifs
|
||||
load: load
|
||||
}
|
||||
stats = ({host, granularity}) ->
|
||||
return @getXapiHostStats(host, granularity)
|
||||
|
||||
stats.description = 'returns statistic of the host'
|
||||
|
||||
stats.params = {
|
||||
host: { type: 'string' }
|
||||
host: { type: 'string' },
|
||||
granularity: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
stats.resolve = {
|
||||
@@ -364,3 +297,9 @@ stats.resolve = {
|
||||
}
|
||||
|
||||
exports.stats = stats;
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
//
|
||||
// This file has been generated by ./.generate-index.sh
|
||||
//
|
||||
// It MUST be re-generated each time an API namespace (read file) is
|
||||
// added or removed.
|
||||
//
|
||||
export * as acl from './acl'
|
||||
export * as disk from './disk'
|
||||
export * as docker from './docker'
|
||||
export * as group from './group'
|
||||
export * as host from './host'
|
||||
export * as job from './job'
|
||||
export * as message from './message'
|
||||
export * as pbd from './pbd'
|
||||
export * as pif from './pif'
|
||||
export * as plugin from './plugin'
|
||||
export * as pool from './pool'
|
||||
export * as remote from './remote'
|
||||
export * as role from './role'
|
||||
export * as schedule from './schedule'
|
||||
export * as scheduler from './scheduler'
|
||||
export * as server from './server'
|
||||
export * as session from './session'
|
||||
export * as sr from './sr'
|
||||
export * as tag from './tag'
|
||||
export * as task from './task'
|
||||
export * as test from './test'
|
||||
export * as token from './token'
|
||||
export * as user from './user'
|
||||
export * as vbd from './vbd'
|
||||
export * as vdi from './vdi'
|
||||
export * as vif from './vif'
|
||||
export * as vm from './vm'
|
||||
export * as xo from './xo'
|
||||
@@ -27,6 +27,7 @@ create.params = {
|
||||
job: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: {type: 'string', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
@@ -47,7 +48,8 @@ create.params = {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -64,6 +66,7 @@ set.params = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {type: 'string'},
|
||||
name: {type: 'string', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
@@ -84,7 +87,8 @@ set.params = {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -101,3 +105,13 @@ delete_.params = {
|
||||
}
|
||||
|
||||
export {delete_ as delete}
|
||||
|
||||
export async function runSequence ({idSequence}) {
|
||||
await this.runJobSequence(idSequence)
|
||||
}
|
||||
|
||||
runSequence.permission = 'admin'
|
||||
runSequence.description = 'Runs jobs sequentially, in the provided order'
|
||||
runSequence.params = {
|
||||
idSequence: {type: 'array', items: {type: 'string'}}
|
||||
}
|
||||
|
||||
28
src/api/log.js
Normal file
28
src/api/log.js
Normal file
@@ -0,0 +1,28 @@
|
||||
export async function get ({namespace}) {
|
||||
const logger = this.getLogger(namespace)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const logs = {}
|
||||
|
||||
logger.createReadStream()
|
||||
.on('data', (data) => {
|
||||
logs[data.key] = data.value
|
||||
})
|
||||
.on('end', () => {
|
||||
resolve(logs)
|
||||
})
|
||||
.on('error', reject)
|
||||
})
|
||||
}
|
||||
|
||||
get.description = 'returns logs list for one namespace'
|
||||
|
||||
function delete_ ({namespace, id}) {
|
||||
const logger = this.getLogger(namespace)
|
||||
logger.del(id)
|
||||
}
|
||||
|
||||
delete_.description = 'deletes on or several logs from a namespace'
|
||||
delete_.permission = 'admin'
|
||||
|
||||
export {delete_ as delete}
|
||||
@@ -1,5 +1,5 @@
|
||||
async function delete_ ({message}) {
|
||||
await this.getXAPI(message).call('message.destroy', message.ref)
|
||||
async function delete_ ({ message }) {
|
||||
await this.getXapi(message).call('message.destroy', message._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
async function delete_ ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.destroy', PBD.ref)
|
||||
await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -22,7 +22,7 @@ delete_.resolve = {
|
||||
|
||||
export async function disconnect ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.unplug', PBD.ref)
|
||||
await this.getXapi(PBD).call('PBD.unplug', PBD._xapiRef)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -38,7 +38,7 @@ disconnect.resolve = {
|
||||
|
||||
export async function connect ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.plug', PBD.ref)
|
||||
await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
// TODO: too low level, move into host.
|
||||
|
||||
// ===================================================================
|
||||
// Delete
|
||||
|
||||
async function delete_ ({PIF}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.destroy', PIF.ref)
|
||||
await this.getXapi(PIF).call('PIF.destroy', PIF._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -20,7 +22,7 @@ delete_.resolve = {
|
||||
|
||||
export async function disconnect ({PIF}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.unplug', PIF.ref)
|
||||
await this.getXapi(PIF).call('PIF.unplug', PIF._xapiRef)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -35,7 +37,7 @@ disconnect.resolve = {
|
||||
|
||||
export async function connect ({PIF}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.plug', PIF.ref)
|
||||
await this.getXapi(PIF).call('PIF.plug', PIF._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
|
||||
@@ -1,48 +1,3 @@
|
||||
// Plugin object structure (using [JSON Schema](http://json-schema.org)):
|
||||
({
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this plugin'
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'unique human readable name for this plugin'
|
||||
},
|
||||
autoload: {
|
||||
type: 'boolean',
|
||||
description: 'whether this plugin is loaded on startup'
|
||||
},
|
||||
loaded: {
|
||||
type: 'boolean',
|
||||
description: 'whether or not this plugin is currently loaded'
|
||||
},
|
||||
unloadable: {
|
||||
type: 'boolean',
|
||||
default: 'true',
|
||||
description: 'whether or not this plugin can be unloaded'
|
||||
},
|
||||
configuration: {
|
||||
type: 'object',
|
||||
description: 'current configuration of this plugin (not present if none)'
|
||||
},
|
||||
configurationSchema: {
|
||||
$ref: 'http://json-schema.org/draft-04/schema#',
|
||||
description: 'configuration schema for this plugin (not present if not configurable)'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'name',
|
||||
'autoload',
|
||||
'loaded'
|
||||
]
|
||||
})
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function get () {
|
||||
return await this.getPlugins()
|
||||
}
|
||||
@@ -131,3 +86,19 @@ unload.params = {
|
||||
}
|
||||
|
||||
unload.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function purgeConfiguration ({ id }) {
|
||||
await this.purgePluginConfiguration(id)
|
||||
}
|
||||
|
||||
purgeConfiguration.description = 'removes a plugin configuration'
|
||||
|
||||
purgeConfiguration.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
purgeConfiguration.permission = 'admin'
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import {JsonRpcError} from '../api-errors'
|
||||
import {extractProperty} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function set (params) {
|
||||
const pool = extractProperty(params, 'pool')
|
||||
export async function set ({
|
||||
pool,
|
||||
|
||||
await this.getXAPI(pool).setPoolProperties(params)
|
||||
// TODO: use camel case.
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel
|
||||
}) {
|
||||
await this.getXapi(pool).setPoolProperties({
|
||||
nameDescription,
|
||||
nameLabel
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
@@ -29,8 +35,27 @@ set.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function setDefaultSr ({pool, sr}) {
|
||||
await this.getXapi(pool).setDefaultSr(sr._xapiId)
|
||||
}
|
||||
|
||||
setDefaultSr.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
},
|
||||
sr: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
setDefaultSr.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate'],
|
||||
sr: ['sr', 'SR']
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function installPatch ({pool, patch: patchUuid}) {
|
||||
await this.getXAPI(pool).installPoolPatchOnAllHosts(patchUuid)
|
||||
await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid)
|
||||
}
|
||||
|
||||
installPatch.params = {
|
||||
@@ -56,7 +81,7 @@ async function handlePatchUpload (req, res, {pool}) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.getXAPI(pool).uploadPoolPatch(req, contentLength)
|
||||
await this.getXapi(pool).uploadPoolPatch(req, contentLength)
|
||||
}
|
||||
|
||||
export async function uploadPatch ({pool}) {
|
||||
@@ -82,7 +107,7 @@ export {uploadPatch as patch}
|
||||
|
||||
export async function mergeInto ({ source, target, force }) {
|
||||
try {
|
||||
await this.mergeXenPools(source.id, target.id, force)
|
||||
await this.mergeXenPools(source._xapiId, target._xapiId, force)
|
||||
} catch (e) {
|
||||
// FIXME: should we expose plain XAPI error messages?
|
||||
throw new JsonRpcError(e.message)
|
||||
|
||||
@@ -5,7 +5,7 @@ export async function getAll () {
|
||||
getAll.permission = 'admin'
|
||||
getAll.description = 'Gets all existing fs remote points'
|
||||
|
||||
export async function get (id) {
|
||||
export async function get ({id}) {
|
||||
return await this.getRemote(id)
|
||||
}
|
||||
|
||||
@@ -15,6 +15,16 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function list ({id}) {
|
||||
return await this.listRemoteBackups(id)
|
||||
}
|
||||
|
||||
list.permission = 'admin'
|
||||
list.description = 'Lists the files found in a remote point'
|
||||
list.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function create ({name, url}) {
|
||||
return await this.createRemote({name, url})
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function create ({jobId, cron, enabled}) {
|
||||
return await this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled})
|
||||
export async function create ({jobId, cron, enabled, name}) {
|
||||
return await this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled, name})
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
@@ -26,11 +26,12 @@ create.description = 'Creates a new schedule'
|
||||
create.params = {
|
||||
jobId: {type: 'string'},
|
||||
cron: {type: 'string'},
|
||||
enabled: {type: 'boolean', optional: true}
|
||||
enabled: {type: 'boolean', optional: true},
|
||||
name: {type: 'string', optional: true}
|
||||
}
|
||||
|
||||
export async function set ({id, jobId, cron, enabled}) {
|
||||
await this.updateSchedule(id, {job: jobId, cron, enabled})
|
||||
export async function set ({id, jobId, cron, enabled, name}) {
|
||||
await this.updateSchedule(id, {job: jobId, cron, enabled, name})
|
||||
}
|
||||
|
||||
set.permission = 'admin'
|
||||
@@ -39,7 +40,8 @@ set.params = {
|
||||
id: {type: 'string'},
|
||||
jobId: {type: 'string', optional: true},
|
||||
cron: {type: 'string', optional: true},
|
||||
enabled: {type: 'boolean', optional: true}
|
||||
enabled: {type: 'boolean', optional: true},
|
||||
name: {type: 'string', optional: true}
|
||||
}
|
||||
|
||||
async function delete_ ({id}) {
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import { noop } from '../utils'
|
||||
|
||||
export async function add ({
|
||||
host,
|
||||
username,
|
||||
password,
|
||||
readOnly,
|
||||
autoConnect = true
|
||||
}) {
|
||||
const server = await this.registerXenServer({host, username, password})
|
||||
const server = await this.registerXenServer({host, username, password, readOnly})
|
||||
|
||||
if (autoConnect) {
|
||||
// Connect asynchronously, ignore any error.
|
||||
this.connectXenServer(server.id).catch(() => {})
|
||||
// Connect asynchronously, ignore any errors.
|
||||
this.connectXenServer(server.id).catch(noop)
|
||||
}
|
||||
|
||||
return server.id
|
||||
@@ -70,11 +73,11 @@ getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, host, username, password}) {
|
||||
await this.updateXenServer(id, {host, username, password})
|
||||
export async function set ({id, host, username, password, readOnly}) {
|
||||
await this.updateXenServer(id, {host, username, password, readOnly})
|
||||
}
|
||||
|
||||
set.description = 'changes the propeorties of a Xen server'
|
||||
set.description = 'changes the properties of a Xen server'
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
@@ -99,6 +102,7 @@ set.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connect ({id}) {
|
||||
this.updateXenServer(id, {enabled: true}).catch(noop)
|
||||
await this.connectXenServer(id)
|
||||
}
|
||||
|
||||
@@ -115,6 +119,7 @@ connect.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnect ({id}) {
|
||||
this.updateXenServer(id, {enabled: false}).catch(noop)
|
||||
await this.disconnectXenServer(id)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,22 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import {ensureArray, parseXml} from '../utils'
|
||||
import {
|
||||
ensureArray,
|
||||
forEach,
|
||||
parseXml
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function set (params) {
|
||||
const {sr} = params
|
||||
delete params.sr
|
||||
export async function set ({
|
||||
sr,
|
||||
|
||||
await this.getXAPI(sr).setSrProperties(sr.id, params)
|
||||
// TODO: use camel case.
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel
|
||||
}) {
|
||||
await this.getXapi(sr).setSrProperties(sr._xapiId, {
|
||||
nameDescription,
|
||||
nameLabel
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
@@ -25,7 +34,7 @@ set.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function scan ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.scan', SR.ref)
|
||||
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
|
||||
}
|
||||
|
||||
scan.params = {
|
||||
@@ -40,7 +49,7 @@ scan.resolve = {
|
||||
|
||||
// TODO: find a way to call this "delete" and not destroy
|
||||
export async function destroy ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.destroy', SR.ref)
|
||||
await this.getXapi(SR).call('SR.destroy', SR._xapiRef)
|
||||
}
|
||||
|
||||
destroy.params = {
|
||||
@@ -54,7 +63,7 @@ destroy.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function forget ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.forget', SR.ref)
|
||||
await this.getXapi(SR).call('SR.forget', SR._xapiRef)
|
||||
}
|
||||
|
||||
forget.params = {
|
||||
@@ -73,7 +82,7 @@ export async function createIso ({
|
||||
nameDescription,
|
||||
path
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
// FIXME: won't work for IPv6
|
||||
// Detect if NFS or local path for ISO files
|
||||
@@ -84,7 +93,7 @@ export async function createIso ({
|
||||
}
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0', // SR size 0 because ISO
|
||||
nameLabel,
|
||||
@@ -123,7 +132,7 @@ export async function createNfs ({
|
||||
serverPath,
|
||||
nfsVersion
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server,
|
||||
@@ -137,7 +146,7 @@ export async function createNfs ({
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -176,7 +185,7 @@ export async function createLvm ({
|
||||
nameDescription,
|
||||
device
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
device
|
||||
@@ -184,7 +193,7 @@ export async function createLvm ({
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -218,7 +227,7 @@ export async function probeNfs ({
|
||||
host,
|
||||
server
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server
|
||||
@@ -229,7 +238,7 @@ export async function probeNfs ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'nfs',
|
||||
{}
|
||||
@@ -281,7 +290,7 @@ export async function createIscsi ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target,
|
||||
@@ -302,7 +311,7 @@ export async function createIscsi ({
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -344,7 +353,7 @@ export async function probeIscsiIqns ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp
|
||||
@@ -366,7 +375,7 @@ export async function probeIscsiIqns ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
@@ -421,7 +430,7 @@ export async function probeIscsiLuns ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp,
|
||||
@@ -444,7 +453,7 @@ export async function probeIscsiLuns ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
@@ -499,7 +508,7 @@ export async function probeIscsiExists ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp,
|
||||
@@ -518,7 +527,7 @@ export async function probeIscsiExists ({
|
||||
deviceConfig.port = port
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'lvmoiscsi', {}))
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
|
||||
|
||||
const srs = []
|
||||
forEach(ensureArray(xml['SRlist'].SR), sr => {
|
||||
@@ -552,14 +561,14 @@ export async function probeNfsExists ({
|
||||
server,
|
||||
serverPath
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server,
|
||||
serverpath: serverPath
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'nfs', {}))
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}))
|
||||
|
||||
const srs = []
|
||||
|
||||
@@ -591,7 +600,7 @@ export async function reattach ({
|
||||
nameDescription,
|
||||
type
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
if (type === 'iscsi') {
|
||||
type = 'lvmoiscsi' // the internal XAPI name
|
||||
@@ -634,7 +643,7 @@ export async function reattachIso ({
|
||||
nameDescription,
|
||||
type
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
if (type === 'iscsi') {
|
||||
type = 'lvmoiscsi' // the internal XAPI name
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function add ({tag, object}) {
|
||||
await this.getXAPI(object).addTag(object.id, tag)
|
||||
await this.getXapi(object).addTag(object._xapiId, tag)
|
||||
}
|
||||
|
||||
add.description = 'add a new tag to an object'
|
||||
@@ -16,7 +16,7 @@ add.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function remove ({tag, object}) {
|
||||
await this.getXAPI(object).removeTag(object.id, tag)
|
||||
await this.getXapi(object).removeTag(object._xapiId, tag)
|
||||
}
|
||||
|
||||
remove.description = 'remove an existing tag from an object'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function cancel ({task}) {
|
||||
await this.getXAPI(task).call('task.cancel', task.ref)
|
||||
await this.getXapi(task).call('task.cancel', task._xapiRef)
|
||||
}
|
||||
|
||||
cancel.params = {
|
||||
@@ -13,7 +13,7 @@ cancel.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function destroy ({task}) {
|
||||
await this.getXAPI(task).call('task.destroy', task.ref)
|
||||
await this.getXapi(task).call('task.destroy', task._xapiRef)
|
||||
}
|
||||
|
||||
destroy.params = {
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
export function hasPermission ({userId, objectId, permission}) {
|
||||
return this.hasPermission(userId, objectId, permission)
|
||||
export function getPermissionsForUser ({ userId }) {
|
||||
return this.getPermissionsForUser(userId)
|
||||
}
|
||||
|
||||
getPermissionsForUser.permission = 'admin'
|
||||
|
||||
getPermissionsForUser.params = {
|
||||
userId: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function hasPermission ({ userId, objectId, permission }) {
|
||||
return this.hasPermissions(userId, [
|
||||
[ objectId, permission ]
|
||||
])
|
||||
}
|
||||
|
||||
hasPermission.permission = 'admin'
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import map from 'lodash.map'
|
||||
|
||||
import {InvalidParameters} from '../api-errors'
|
||||
import { mapToArray } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -49,7 +48,7 @@ export async function getAll () {
|
||||
const users = await this._users.get()
|
||||
|
||||
// Filters out private properties.
|
||||
return map(users, this.getUserPublicProperties)
|
||||
return mapToArray(users, this.getUserPublicProperties)
|
||||
}
|
||||
|
||||
getAll.description = 'returns all the existing users'
|
||||
@@ -73,9 +72,11 @@ set.params = {
|
||||
permission: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function changePassword ({oldPassword, newPassword}) {
|
||||
const id = this.session.get('user_id')
|
||||
await this.changePassword(id, oldPassword, newPassword)
|
||||
await this.changeUserPassword(id, oldPassword, newPassword)
|
||||
}
|
||||
|
||||
changePassword.description = 'change password after checking old password (user function)'
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
#=====================================================================
|
||||
|
||||
delete_ = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
xapi = @getXapi vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.destroy', vbd.ref
|
||||
yield xapi.call 'VBD.destroy', vbd._xapiRef
|
||||
|
||||
return true
|
||||
|
||||
@@ -25,12 +25,9 @@ exports.delete = delete_
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
disconnect = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.unplug_force', vbd.ref
|
||||
|
||||
return true
|
||||
xapi = @getXapi vbd
|
||||
yield xapi.disconnectVbd(vbd._xapiRef)
|
||||
return
|
||||
|
||||
disconnect.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -45,12 +42,9 @@ exports.disconnect = disconnect
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
connect = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.plug', vbd.ref
|
||||
|
||||
return true
|
||||
xapi = @getXapi vbd
|
||||
yield xapi.connectVbd(vbd._xapiRef)
|
||||
return
|
||||
|
||||
connect.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -66,9 +60,9 @@ exports.connect = connect
|
||||
|
||||
set = $coroutine (params) ->
|
||||
{vbd} = params
|
||||
xapi = @getXAPI vbd
|
||||
xapi = @getXapi vbd
|
||||
|
||||
{ref} = vbd
|
||||
{ _xapiRef: ref } = vbd
|
||||
|
||||
# VBD position
|
||||
if 'position' of params
|
||||
@@ -87,3 +81,29 @@ set.resolve = {
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
setBootable = $coroutine ({vbd, bootable}) ->
|
||||
xapi = @getXapi vbd
|
||||
{ _xapiRef: ref } = vbd
|
||||
|
||||
yield xapi.call 'VBD.set_bootable', ref, bootable
|
||||
return
|
||||
|
||||
setBootable.params = {
|
||||
vbd: { type: 'string' }
|
||||
bootable: { type: 'boolean' }
|
||||
}
|
||||
|
||||
setBootable.resolve = {
|
||||
vbd: ['vbd', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.setBootable = setBootable
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -3,13 +3,15 @@
|
||||
$isArray = require 'lodash.isarray'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
|
||||
{format} = require 'json-rpc-peer'
|
||||
{InvalidParameters} = require '../api-errors'
|
||||
{parseSize} = require '../utils'
|
||||
{JsonRpcError} = require '../api-errors'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
delete_ = $coroutine ({vdi}) ->
|
||||
yield @getXAPI(vdi).deleteVdi(vdi.id)
|
||||
yield @getXapi(vdi).deleteVdi(vdi._xapiId)
|
||||
|
||||
return
|
||||
|
||||
@@ -18,7 +20,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@@ -28,9 +30,9 @@ exports.delete = delete_
|
||||
# FIXME: human readable strings should be handled.
|
||||
set = $coroutine (params) ->
|
||||
{vdi} = params
|
||||
xapi = @getXAPI vdi
|
||||
xapi = @getXapi vdi
|
||||
|
||||
{ref} = vdi
|
||||
{_xapiRef: ref} = vdi
|
||||
|
||||
# Size.
|
||||
if 'size' of params
|
||||
@@ -40,8 +42,7 @@ set = $coroutine (params) ->
|
||||
throw new InvalidParameters(
|
||||
"cannot set new size (#{size}) below the current size (#{vdi.size})"
|
||||
)
|
||||
|
||||
yield xapi.call 'VDI.resize_online', ref, "#{size}"
|
||||
yield xapi.resizeVdi(ref, size)
|
||||
|
||||
# Other fields.
|
||||
for param, fields of {
|
||||
@@ -68,7 +69,7 @@ set.params = {
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@@ -76,10 +77,9 @@ exports.set = set
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
migrate = $coroutine ({vdi, sr}) ->
|
||||
xapi = @getXAPI vdi
|
||||
xapi = @getXapi vdi
|
||||
|
||||
# TODO: check if VDI is attached before
|
||||
yield xapi.call 'VDI.pool_migrate', vdi.ref, sr.ref, {}
|
||||
yield xapi.moveVdi(vdi._xapiRef, sr._xapiRef)
|
||||
|
||||
return true
|
||||
|
||||
@@ -89,8 +89,14 @@ migrate.params = {
|
||||
}
|
||||
|
||||
migrate.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
sr: ['sr_id', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
exports.migrate = migrate
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// TODO: move into vm and rename to removeInterface
|
||||
async function delete_ ({vif}) {
|
||||
await this.getXAPI(vif).deleteVif(vif.id)
|
||||
await this.getXapi(vif).deleteVif(vif._xapiId)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -16,7 +16,7 @@ delete_.resolve = {
|
||||
// TODO: move into vm and rename to disconnectInterface
|
||||
export async function disconnect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXAPI(vif).call('VIF.unplug_force', vif.ref)
|
||||
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -31,7 +31,7 @@ disconnect.resolve = {
|
||||
// TODO: move into vm and rename to connectInterface
|
||||
export async function connect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXAPI(vif).call('VIF.plug', vif.ref)
|
||||
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
|
||||
@@ -2,20 +2,24 @@ $debug = (require 'debug') 'xo:api:vm'
|
||||
$filter = require 'lodash.filter'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
$findWhere = require 'lodash.find'
|
||||
$forEach = require 'lodash.foreach'
|
||||
$isArray = require 'lodash.isarray'
|
||||
$result = require 'lodash.result'
|
||||
endsWith = require 'lodash.endswith'
|
||||
escapeStringRegexp = require 'escape-string-regexp'
|
||||
eventToPromise = require 'event-to-promise'
|
||||
got = require('got')
|
||||
map = require 'lodash.map'
|
||||
sortBy = require 'lodash.sortby'
|
||||
startsWith = require 'lodash.startswith'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
{format} = require 'json-rpc-peer'
|
||||
|
||||
{
|
||||
JsonRpcError,
|
||||
Unauthorized
|
||||
} = require('../api-errors')
|
||||
{
|
||||
forEach,
|
||||
formatXml: $js2xml,
|
||||
mapToArray,
|
||||
parseSize,
|
||||
parseXml,
|
||||
pFinally
|
||||
} = require '../utils'
|
||||
@@ -23,6 +27,28 @@ startsWith = require 'lodash.startswith'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
checkPermissionOnSrs = (vm, permission = 'operate') -> (
|
||||
permissions = []
|
||||
forEach(vm.$VBDs, (vbdId) =>
|
||||
vbd = @getObject(vbdId, 'VBD')
|
||||
vdiId = vbd.VDI
|
||||
|
||||
if vbd.is_cd_drive or not vdiId
|
||||
return
|
||||
|
||||
permissions.push([
|
||||
@getObject(vdiId, 'VDI').$SR,
|
||||
permission
|
||||
])
|
||||
)
|
||||
|
||||
return @hasPermissions(@session.get('user_id'), permissions).then((success) => (
|
||||
throw new Unauthorized() unless success
|
||||
))
|
||||
)
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# TODO: Implement ACLs
|
||||
create = $coroutine ({
|
||||
installation
|
||||
@@ -32,14 +58,16 @@ create = $coroutine ({
|
||||
pv_args
|
||||
VDIs
|
||||
VIFs
|
||||
existingDisks
|
||||
}) ->
|
||||
vm = yield @getXAPI(template).createVm(template.id, {
|
||||
vm = yield @getXapi(template).createVm(template._xapiId, {
|
||||
installRepository: installation && installation.repository,
|
||||
nameDescription: name_description,
|
||||
nameLabel: name_label,
|
||||
pvArgs: pv_args,
|
||||
vdis: VDIs,
|
||||
vifs: VIFs
|
||||
vifs: VIFs,
|
||||
existingDisks
|
||||
})
|
||||
|
||||
return vm.$id
|
||||
@@ -79,7 +107,7 @@ create.params = {
|
||||
# UUID of the network to create the interface in.
|
||||
network: { type: 'string' }
|
||||
|
||||
MAC: {
|
||||
mac: {
|
||||
optional: true # Auto-generated per default.
|
||||
type: 'string'
|
||||
}
|
||||
@@ -95,7 +123,7 @@ create.params = {
|
||||
type: 'object'
|
||||
properties: {
|
||||
device: { type: 'string' }
|
||||
size: { type: 'integer' }
|
||||
size: { type: ['integer', 'string'] }
|
||||
SR: { type: 'string' }
|
||||
type: { type: 'string' }
|
||||
}
|
||||
@@ -112,7 +140,7 @@ exports.create = create
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
delete_ = ({vm, delete_disks: deleteDisks}) ->
|
||||
return @getXAPI(vm).deleteVm(vm.id, deleteDisks)
|
||||
return @getXapi(vm).deleteVm(vm._xapiId, deleteDisks)
|
||||
|
||||
delete_.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -131,7 +159,7 @@ exports.delete = delete_
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
ejectCd = $coroutine ({vm}) ->
|
||||
yield @getXAPI(vm).ejectCdFromVm(vm.id)
|
||||
yield @getXapi(vm).ejectCdFromVm(vm._xapiId)
|
||||
return
|
||||
|
||||
ejectCd.params = {
|
||||
@@ -146,7 +174,7 @@ exports.ejectCd = ejectCd
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
insertCd = $coroutine ({vm, vdi, force}) ->
|
||||
yield @getXAPI(vm).insertCdIntoVm(vdi.id, vm.id, {force})
|
||||
yield @getXapi(vm).insertCdIntoVm(vdi._xapiId, vm._xapiId, {force})
|
||||
return
|
||||
|
||||
insertCd.params = {
|
||||
@@ -163,82 +191,83 @@ exports.insertCd = insertCd
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
migrate = $coroutine ({vm, host}) ->
|
||||
yield @getXAPI(vm).migrateVm(vm.id, @getXAPI(host), host.id)
|
||||
migrate = $coroutine ({
|
||||
vm,
|
||||
host,
|
||||
mapVdisSrs,
|
||||
mapVifsNetworks,
|
||||
migrationNetwork
|
||||
}) ->
|
||||
permissions = []
|
||||
|
||||
if mapVdisSrs
|
||||
mapVdisSrsXapi = {}
|
||||
forEach mapVdisSrs, (srId, vdiId) =>
|
||||
vdiXapiId = @getObject(vdiId, 'VDI')._xapiId
|
||||
mapVdisSrsXapi[vdiXapiId] = @getObject(srId, 'SR')._xapiId
|
||||
permissions.push([
|
||||
srId,
|
||||
'administrate'
|
||||
])
|
||||
|
||||
if mapVifsNetworks
|
||||
mapVifsNetworksXapi = {}
|
||||
forEach mapVifsNetworks, (networkId, vifId) =>
|
||||
vifXapiId = @getObject(vifId, 'VIF')._xapiId
|
||||
mapVifsNetworksXapi[vifXapiId] = @getObject(networkId, 'network')._xapiId
|
||||
permissions.push([
|
||||
networkId,
|
||||
'administrate'
|
||||
])
|
||||
|
||||
unless yield @hasPermissions(@session.get('user_id'), permissions)
|
||||
throw new Unauthorized()
|
||||
|
||||
yield @getXapi(vm).migrateVm(vm._xapiId, @getXapi(host), host._xapiId, {
|
||||
migrationNetworkId: migrationNetwork?._xapiId
|
||||
mapVifsNetworks: mapVifsNetworksXapi,
|
||||
mapVdisSrs: mapVdisSrsXapi,
|
||||
})
|
||||
return
|
||||
|
||||
migrate.params = {
|
||||
|
||||
# Identifier of the VM to migrate.
|
||||
id: { type: 'string' }
|
||||
vm: { type: 'string' }
|
||||
|
||||
# Identifier of the host to migrate to.
|
||||
host_id: { type: 'string' }
|
||||
targetHost: { type: 'string' }
|
||||
|
||||
# Map VDIs IDs --> SRs IDs
|
||||
mapVdisSrs: { type: 'object', optional: true }
|
||||
|
||||
# Map VIFs IDs --> Networks IDs
|
||||
mapVifsNetworks: { type: 'object', optional: true }
|
||||
|
||||
# Identifier of the Network use for the migration
|
||||
migrationNetwork: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
migrate.resolve = {
|
||||
vm: ['id', 'VM']
|
||||
host: ['host_id', 'host', 'administrate']
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
host: ['targetHost', 'host', 'administrate'],
|
||||
migrationNetwork: ['migrationNetwork', 'network', 'administrate'],
|
||||
}
|
||||
|
||||
exports.migrate = migrate
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
migratePool = $coroutine ({
|
||||
vm,
|
||||
host
|
||||
sr
|
||||
network
|
||||
migrationNetwork
|
||||
}) ->
|
||||
yield @getXAPI(vm).migrateVm(vm.id, @getXAPI(host), host.id, {
|
||||
migrationNetworkId: migrationNetwork?.id
|
||||
networkId: network?.id,
|
||||
srId: sr?.id,
|
||||
})
|
||||
return
|
||||
|
||||
migratePool.params = {
|
||||
|
||||
# Identifier of the VM to migrate.
|
||||
id: { type: 'string' }
|
||||
|
||||
# Identifier of the host to migrate to.
|
||||
target_host_id: { type: 'string' }
|
||||
|
||||
# Identifier of the target SR
|
||||
target_sr_id: { type: 'string', optional: true }
|
||||
|
||||
# Identifier of the target Network
|
||||
target_network_id: { type: 'string', optional: true }
|
||||
|
||||
# Identifier of the Network use for the migration
|
||||
migration_network_id: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
migratePool.resolve = {
|
||||
vm: ['id', 'VM', 'administrate'],
|
||||
host: ['target_host_id', 'host', 'administrate'],
|
||||
sr: ['target_sr_id', 'SR', 'administrate'],
|
||||
network: ['target_network_id', 'network', 'administrate'],
|
||||
migrationNetwork: ['migration_network_id', 'network', 'administrate'],
|
||||
}
|
||||
|
||||
# TODO: camel case.
|
||||
exports.migrate_pool = migratePool
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME: human readable strings should be handled.
|
||||
set = $coroutine (params) ->
|
||||
{VM} = params
|
||||
xapi = @getXAPI VM
|
||||
xapi = @getXapi VM
|
||||
|
||||
{ref} = VM
|
||||
{_xapiRef: ref} = VM
|
||||
|
||||
# Memory.
|
||||
if 'memory' of params
|
||||
{memory} = params
|
||||
memory = parseSize(params.memory)
|
||||
|
||||
if memory < VM.memory.static[0]
|
||||
@throw(
|
||||
@@ -291,9 +320,13 @@ set = $coroutine (params) ->
|
||||
|
||||
if auto_poweron
|
||||
yield xapi.call 'VM.add_to_other_config', ref, 'auto_poweron', 'true'
|
||||
yield xapi.setPoolProperties({autoPowerOn: true})
|
||||
else
|
||||
yield xapi.call 'VM.remove_from_other_config', ref, 'auto_poweron'
|
||||
|
||||
if 'cpuWeight' of params
|
||||
yield xapi.setVcpuWeight(VM._xapiId, params.cpuWeight)
|
||||
|
||||
# Other fields.
|
||||
for param, fields of {
|
||||
'name_label'
|
||||
@@ -327,10 +360,12 @@ set.params = {
|
||||
# Memory to allocate (in bytes).
|
||||
#
|
||||
# Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max
|
||||
memory: { type: 'integer', optional: true }
|
||||
memory: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Kernel arguments for PV VM.
|
||||
PV_args: { type: 'string', optional: true }
|
||||
|
||||
cpuWeight: { type: 'integer', optional: true}
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
@@ -342,12 +377,12 @@ exports.set = set
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
restart = $coroutine ({vm, force}) ->
|
||||
xapi = @getXAPI(vm)
|
||||
xapi = @getXapi(vm)
|
||||
|
||||
if force
|
||||
yield xapi.call 'VM.hard_reboot', vm.ref
|
||||
yield xapi.call 'VM.hard_reboot', vm._xapiRef
|
||||
else
|
||||
yield xapi.call 'VM.clean_reboot', vm.ref
|
||||
yield xapi.call 'VM.clean_reboot', vm._xapiRef
|
||||
|
||||
return true
|
||||
|
||||
@@ -365,14 +400,12 @@ exports.restart = restart
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
clone = $coroutine ({vm, name, full_copy}) ->
|
||||
xapi = @getXAPI(vm)
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
newVm = yield if full_copy
|
||||
xapi.copyVm(vm.ref, null, name)
|
||||
else
|
||||
xapi.cloneVm(vm.ref, name)
|
||||
|
||||
return newVm.$id
|
||||
return @getXapi(vm).cloneVm(vm._xapiRef, {
|
||||
nameLabel: name,
|
||||
fast: not full_copy
|
||||
}).then((vm) -> vm.$id)
|
||||
|
||||
clone.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -389,9 +422,50 @@ exports.clone = clone
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
copy = $coroutine ({
|
||||
compress,
|
||||
name: nameLabel,
|
||||
sr,
|
||||
vm
|
||||
}) ->
|
||||
if vm.$pool == sr.$pool
|
||||
if vm.power_state is 'Running'
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
return @getXapi(vm).copyVm(vm._xapiId, sr._xapiId, {
|
||||
nameLabel
|
||||
}).then((vm) -> vm.$id)
|
||||
|
||||
return @getXapi(vm).remoteCopyVm(vm._xapiId, @getXapi(sr), sr._xapiId, {
|
||||
compress,
|
||||
nameLabel
|
||||
}).then((vm) -> vm.$id)
|
||||
|
||||
copy.params = {
|
||||
compress: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
vm: { type: 'string' },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
copy.resolve = {
|
||||
vm: [ 'vm', 'VM', 'administrate' ]
|
||||
sr: [ 'sr', 'SR', 'operate' ]
|
||||
}
|
||||
|
||||
exports.copy = copy
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# TODO: rename convertToTemplate()
|
||||
convert = $coroutine ({vm}) ->
|
||||
yield @getXAPI(vm).call 'VM.set_is_a_template', vm.ref, true
|
||||
yield @getXapi(vm).call 'VM.set_is_a_template', vm._xapiRef, true
|
||||
|
||||
return true
|
||||
|
||||
@@ -407,7 +481,9 @@ exports.convert = convert
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
snapshot = $coroutine ({vm, name}) ->
|
||||
snapshot = yield @getXAPI(vm).snapshotVm(vm.ref, name)
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name)
|
||||
return snapshot.$id
|
||||
|
||||
snapshot.params = {
|
||||
@@ -422,9 +498,69 @@ exports.snapshot = snapshot
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
rollingDeltaBackup = $coroutine ({vm, remote, tag, depth}) ->
|
||||
return yield @rollingDeltaVmBackup({
|
||||
vm,
|
||||
remoteId: remote,
|
||||
tag,
|
||||
depth
|
||||
})
|
||||
|
||||
rollingDeltaBackup.params = {
|
||||
vm: { type: 'string' }
|
||||
remote: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: ['string', 'number'] }
|
||||
}
|
||||
|
||||
rollingDeltaBackup.resolve = {
|
||||
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
rollingDeltaBackup.permission = 'admin'
|
||||
|
||||
exports.rollingDeltaBackup = rollingDeltaBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
importDeltaBackup = ({sr, remote, filePath}) ->
|
||||
return @importDeltaVmBackup({sr, remoteId: remote, filePath})
|
||||
|
||||
importDeltaBackup.params = {
|
||||
sr: { type: 'string' }
|
||||
remote: { type: 'string' }
|
||||
filePath: { type: 'string' }
|
||||
}
|
||||
|
||||
importDeltaBackup.resolve = {
|
||||
sr: [ 'sr', 'SR', 'operate' ]
|
||||
}
|
||||
|
||||
importDeltaBackup.permission = 'admin'
|
||||
|
||||
exports.importDeltaBackup = importDeltaBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
deltaCopy = ({ vm, sr }) -> @deltaCopyVm(vm, sr)
|
||||
|
||||
deltaCopy.params = {
|
||||
vm: { type: 'string' },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
deltaCopy.resolve = {
|
||||
vm: [ 'vm', 'VM', 'operate'],
|
||||
sr: [ 'sr', 'SR', 'operate']
|
||||
}
|
||||
|
||||
exports.deltaCopy = deltaCopy
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
rollingSnapshot = $coroutine ({vm, tag, depth}) ->
|
||||
snapshot = yield @getXAPI(vm).rollingSnapshotVm(vm.ref, tag, depth)
|
||||
return snapshot.$id
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
yield @rollingSnapshotVm(vm, tag, depth)
|
||||
|
||||
rollingSnapshot.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -436,18 +572,19 @@ rollingSnapshot.resolve = {
|
||||
vm: ['id', 'VM', 'administrate']
|
||||
}
|
||||
|
||||
rollingSnapshot.description = 'Snaphots a VM with a tagged name, and removes the oldest snapshot with the same tag according to depth'
|
||||
rollingSnapshot.description = 'Snapshots a VM with a tagged name, and removes the oldest snapshot with the same tag according to depth'
|
||||
|
||||
exports.rollingSnapshot = rollingSnapshot
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
backup = $coroutine ({vm, pathToFile, compress, onlyMetadata}) ->
|
||||
yield @backupVm({vm, pathToFile, compress, onlyMetadata})
|
||||
backup = $coroutine ({vm, remoteId, file, compress, onlyMetadata}) ->
|
||||
yield @backupVm({vm, remoteId, file, compress, onlyMetadata})
|
||||
|
||||
backup.params = {
|
||||
id: { type: 'string' }
|
||||
pathToFile: { type: 'string' }
|
||||
id: {type: 'string'}
|
||||
remoteId: { type: 'string' }
|
||||
file: { type: 'string' }
|
||||
compress: { type: 'boolean', optional: true }
|
||||
onlyMetadata: { type: 'boolean', optional: true }
|
||||
}
|
||||
@@ -462,13 +599,32 @@ exports.backup = backup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
importBackup = $coroutine ({remote, file, sr}) ->
|
||||
yield @importVmBackup(remote, file, sr)
|
||||
return
|
||||
|
||||
importBackup.permission = 'admin'
|
||||
importBackup.description = 'Imports a VM into host, from a file found in the chosen remote'
|
||||
importBackup.params = {
|
||||
remote: {type: 'string'},
|
||||
file: {type: 'string'},
|
||||
sr: {type: 'string'}
|
||||
}
|
||||
|
||||
importBackup.resolve = {
|
||||
sr: [ 'sr', 'SR', 'operate' ]
|
||||
}
|
||||
|
||||
importBackup.permission = 'admin'
|
||||
|
||||
exports.importBackup = importBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
rollingBackup = $coroutine ({vm, remoteId, tag, depth, compress, onlyMetadata}) ->
|
||||
remote = yield @getRemote remoteId
|
||||
if not remote?.path?
|
||||
throw new Error "No such Remote #{remoteId}"
|
||||
return yield @rollingBackupVm({
|
||||
vm,
|
||||
path: remote.path,
|
||||
remoteId,
|
||||
tag,
|
||||
depth,
|
||||
compress,
|
||||
@@ -493,14 +649,31 @@ exports.rollingBackup = rollingBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
start = $coroutine ({vm}) ->
|
||||
yield @getXAPI(vm).call(
|
||||
'VM.start', vm.ref
|
||||
false # Start paused?
|
||||
false # Skips the pre-boot checks?
|
||||
)
|
||||
rollingDrCopy = ({vm, pool, tag, depth}) ->
|
||||
if vm.$pool is pool.id
|
||||
throw new JsonRpcError('Disaster Recovery attempts to copy on the same pool')
|
||||
return @rollingDrCopyVm({vm, sr: @getObject(pool.default_SR, 'SR'), tag, depth})
|
||||
|
||||
return true
|
||||
rollingDrCopy.params = {
|
||||
id: { type: 'string' }
|
||||
pool: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: 'number' }
|
||||
}
|
||||
|
||||
rollingDrCopy.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
rollingDrCopy.description = 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to depth'
|
||||
|
||||
exports.rollingDrCopy = rollingDrCopy
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
start = ({vm}) ->
|
||||
return @getXapi(vm).startVm(vm._xapiId)
|
||||
|
||||
start.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -519,16 +692,16 @@ exports.start = start
|
||||
# - if force is true → hard shutdown
|
||||
# - if force is integer → clean shutdown and after force seconds, hard shutdown.
|
||||
stop = $coroutine ({vm, force}) ->
|
||||
xapi = @getXAPI vm
|
||||
xapi = @getXapi vm
|
||||
|
||||
# Hard shutdown
|
||||
if force
|
||||
yield xapi.call 'VM.hard_shutdown', vm.ref
|
||||
yield xapi.call 'VM.hard_shutdown', vm._xapiRef
|
||||
return true
|
||||
|
||||
# Clean shutdown
|
||||
try
|
||||
yield xapi.call 'VM.clean_shutdown', vm.ref
|
||||
yield xapi.call 'VM.clean_shutdown', vm._xapiRef
|
||||
catch error
|
||||
if error.code is 'VM_MISSING_PV_DRIVERS' or error.code is 'VM_LACKS_FEATURE_SHUTDOWN'
|
||||
# TODO: Improve reporting: this message is unclear.
|
||||
@@ -552,7 +725,7 @@ exports.stop = stop
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
suspend = $coroutine ({vm}) ->
|
||||
yield @getXAPI(vm).call 'VM.suspend', vm.ref
|
||||
yield @getXapi(vm).call 'VM.suspend', vm._xapiRef
|
||||
|
||||
return true
|
||||
|
||||
@@ -572,7 +745,7 @@ resume = $coroutine ({vm, force}) ->
|
||||
if not force
|
||||
force = true
|
||||
|
||||
yield @getXAPI(vm).call 'VM.resume', vm.ref, false, force
|
||||
yield @getXapi(vm).call 'VM.resume', vm._xapiRef, false, force
|
||||
|
||||
return true
|
||||
|
||||
@@ -591,7 +764,7 @@ exports.resume = resume
|
||||
# revert a snapshot to its parent VM
|
||||
revert = $coroutine ({snapshot}) ->
|
||||
# Attempts a revert from this snapshot to its parent VM
|
||||
yield @getXAPI(snapshot).call 'VM.revert', snapshot.ref
|
||||
yield @getXapi(snapshot).call 'VM.revert', snapshot._xapiRef
|
||||
|
||||
return true
|
||||
|
||||
@@ -606,29 +779,39 @@ exports.revert = revert
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
handleExport = (req, res, { stream }) ->
|
||||
upstream = stream.response
|
||||
|
||||
handleExport = $coroutine (req, res, {xapi, id, compress, onlyMetadata}) ->
|
||||
stream = yield xapi.exportVm(id, {
|
||||
compress: compress ? true,
|
||||
onlyMetadata: onlyMetadata ? false
|
||||
})
|
||||
res.on('close', () ->
|
||||
stream.cancel()
|
||||
)
|
||||
# Remove the filename as it is already part of the URL.
|
||||
upstream.headers['content-disposition'] = 'attachment'
|
||||
stream.headers['content-disposition'] = 'attachment'
|
||||
|
||||
res.writeHead(
|
||||
upstream.statusCode,
|
||||
upstream.statusMessage ? '',
|
||||
upstream.headers
|
||||
stream.statusCode,
|
||||
stream.statusMessage ? '',
|
||||
stream.headers
|
||||
)
|
||||
stream.pipe(res)
|
||||
return
|
||||
|
||||
# TODO: integrate in xapi.js
|
||||
export_ = $coroutine ({vm, compress, onlyMetadata}) ->
|
||||
stream = yield @getXAPI(vm).exportVm(vm.id, {
|
||||
compress: compress ? true,
|
||||
onlyMetadata: onlyMetadata ? false
|
||||
})
|
||||
if vm.power_state is 'Running'
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
data = {
|
||||
xapi: @getXapi(vm),
|
||||
id: vm._xapiId,
|
||||
compress,
|
||||
onlyMetadata
|
||||
}
|
||||
|
||||
return {
|
||||
$getFrom: yield @registerHttpRequest(handleExport, { stream }, {
|
||||
$getFrom: yield @registerHttpRequest(handleExport, data, {
|
||||
suffix: encodeURI("/#{vm.name_label}.xva")
|
||||
})
|
||||
}
|
||||
@@ -646,33 +829,48 @@ exports.export = export_;
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME
|
||||
handleVmImport = $coroutine (req, res, { xapi, srId }) ->
|
||||
# Timeout seems to be broken in Node 4.
|
||||
# See https://github.com/nodejs/node/issues/3319
|
||||
req.setTimeout(43200000) # 12 hours
|
||||
|
||||
try
|
||||
vm = yield xapi.importVm(req, { srId })
|
||||
res.end(format.response(0, vm.$id))
|
||||
catch e
|
||||
res.writeHead(500)
|
||||
res.end(format.error(new JsonRpcError(e.message)))
|
||||
|
||||
return
|
||||
|
||||
# TODO: "sr_id" can be passed in URL to target a specific SR
|
||||
import_ = $coroutine ({host}) ->
|
||||
import_ = $coroutine ({host, sr}) ->
|
||||
if not sr
|
||||
if not host
|
||||
throw new InvalidParameters('you must provide either host or SR')
|
||||
|
||||
{sessionId} = @getXAPI(host)
|
||||
xapi = @getXapi(host)
|
||||
sr = xapi.pool.$default_SR
|
||||
if not sr
|
||||
throw new InvalidParameters('there is not default SR in this pool')
|
||||
else
|
||||
xapi = @getXapi(sr)
|
||||
|
||||
url = yield @registerProxyRequest {
|
||||
# Receive a POST but send a PUT.
|
||||
method: 'put'
|
||||
proxyMethod: 'post'
|
||||
|
||||
hostname: host.address
|
||||
pathname: '/import/'
|
||||
query: {
|
||||
session_id: sessionId
|
||||
}
|
||||
}
|
||||
return {
|
||||
$sendTo: url
|
||||
$sendTo: yield @registerHttpRequest(handleVmImport, {
|
||||
srId: sr._xapiId,
|
||||
xapi
|
||||
})
|
||||
}
|
||||
|
||||
import_.params = {
|
||||
host: { type: 'string' }
|
||||
host: { type: 'string', optional: true },
|
||||
sr: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
import_.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
host: ['host', 'host', 'administrate'],
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
exports.import = import_
|
||||
|
||||
@@ -681,7 +879,7 @@ exports.import = import_
|
||||
# FIXME: if position is used, all other disks after this position
|
||||
# should be shifted.
|
||||
attachDisk = $coroutine ({vm, vdi, position, mode, bootable}) ->
|
||||
yield @getXAPI(vm).attachVdiToVm(vdi.id, vm.id, {
|
||||
yield @getXapi(vm).attachVdiToVm(vdi._xapiId, vm._xapiId, {
|
||||
bootable,
|
||||
position,
|
||||
readOnly: mode is 'RO'
|
||||
@@ -710,7 +908,7 @@ exports.attachDisk = attachDisk
|
||||
# FIXME: position should be optional and default to last.
|
||||
|
||||
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
|
||||
vif = yield @getXAPI(vm).createVif(vm.id, network.id, {
|
||||
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
|
||||
mac,
|
||||
mtu,
|
||||
position
|
||||
@@ -735,9 +933,9 @@ exports.createInterface = createInterface
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
attachPci = $coroutine ({vm, pciId}) ->
|
||||
xapi = @getXAPI vm
|
||||
xapi = @getXapi vm
|
||||
|
||||
yield xapi.call 'VM.add_to_other_config', vm.ref, 'pci', pciId
|
||||
yield xapi.call 'VM.add_to_other_config', vm._xapiRef, 'pci', pciId
|
||||
|
||||
return true
|
||||
|
||||
@@ -755,9 +953,9 @@ exports.attachPci = attachPci
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
detachPci = $coroutine ({vm}) ->
|
||||
xapi = @getXAPI vm
|
||||
xapi = @getXapi vm
|
||||
|
||||
yield xapi.call 'VM.remove_from_other_config', vm.ref, 'pci'
|
||||
yield xapi.call 'VM.remove_from_other_config', vm._xapiRef, 'pci'
|
||||
|
||||
return true
|
||||
|
||||
@@ -772,103 +970,18 @@ detachPci.resolve = {
|
||||
exports.detachPci = detachPci
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
|
||||
stats = $coroutine ({vm, granularity}) ->
|
||||
granularity = if granularity then granularity else 0
|
||||
# granularity: 0: every 5 sec along last 10 minutes, 1: every minute along last 2 hours, 2: every hour along past week, 3: everyday along past year
|
||||
# see http://xenserver.org/partners/developing-products-for-xenserver/18-sdk-development/96-xs-dev-rrds.html
|
||||
xapi = @getXAPI vm
|
||||
stats = yield @getXapiVmStats(vm, granularity)
|
||||
return stats
|
||||
|
||||
host = @getObject vm.$container
|
||||
do (type = host.type) =>
|
||||
if type is 'pool'
|
||||
host = @getObject host.master, 'host'
|
||||
else unless type is 'host'
|
||||
throw new Error "unexpected type: got #{type} instead of host"
|
||||
|
||||
{body} = response = yield got(
|
||||
"https://#{host.address}/vm_rrd?session_id=#{xapi.sessionId}&uuid=#{vm.id}",
|
||||
{ rejectUnauthorized: false }
|
||||
)
|
||||
|
||||
if response.statusCode isnt 200
|
||||
throw new Error('Cannot fetch the RRDs')
|
||||
|
||||
json = parseXml(body)
|
||||
# Find index of needed objects for getting their values after
|
||||
cpusIndexes = []
|
||||
vifsIndexes = []
|
||||
xvdsIndexes = []
|
||||
memoryFreeIndex = []
|
||||
memoryIndex = []
|
||||
index = 0
|
||||
|
||||
$forEach(json.rrd.ds, (value, i) ->
|
||||
if /^cpu[0-9]+$/.test(value.name)
|
||||
cpusIndexes.push(i)
|
||||
else if startsWith(value.name, 'vif_') && endsWith(value.name, '_tx')
|
||||
vifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vif_') && endsWith(value.name, '_rx')
|
||||
vifsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vbd_xvd') && endsWith(value.name, '_write', 14)
|
||||
xvdsIndexes.push(i)
|
||||
else if startsWith(value.name, 'vbd_xvd') && endsWith(value.name, '_read', 13)
|
||||
xvdsIndexes.push(i)
|
||||
else if startsWith(value.name, 'memory_internal_free')
|
||||
memoryFreeIndex.push(i)
|
||||
else if endsWith(value.name, 'memory')
|
||||
memoryIndex.push(i)
|
||||
|
||||
return
|
||||
)
|
||||
|
||||
memoryFree = []
|
||||
memoryUsed = []
|
||||
memory = []
|
||||
cpus = []
|
||||
vifs = []
|
||||
xvds = []
|
||||
date = []
|
||||
archive = json.rrd.rra[granularity]
|
||||
dateStep = json.rrd.step * archive.pdp_per_row
|
||||
baseDate = json.rrd.lastupdate - (json.rrd.lastupdate % dateStep)
|
||||
numStep = archive.database.row.length - 1
|
||||
|
||||
$forEach archive.database.row, (n, key) ->
|
||||
# WARNING! memoryFree is in Kb not in b, memory is in b
|
||||
memoryFree.push(n.v[memoryFreeIndex]*1024)
|
||||
memoryUsed.push(Math.round(parseInt(n.v[memoryIndex])-(n.v[memoryFreeIndex]*1024)))
|
||||
memory.push(parseInt(n.v[memoryIndex]))
|
||||
date.push(baseDate - (dateStep * (numStep - key)))
|
||||
# build the multi dimensional arrays
|
||||
$forEach cpusIndexes, (value, key) ->
|
||||
cpus[key] ?= []
|
||||
cpus[key].push(n.v[value]*100)
|
||||
return
|
||||
$forEach vifsIndexes, (value, key) ->
|
||||
vifs[key] ?= []
|
||||
vifs[key].push(if n.v[value] == 'NaN' then null else n.v[value]) # * (if key % 2 then -1 else 1))
|
||||
return
|
||||
$forEach xvdsIndexes, (value, key) ->
|
||||
xvds[key] ?= []
|
||||
xvds[key].push(if n.v[value] == 'NaN' then null else n.v[value]) # * (if key % 2 then -1 else 1))
|
||||
return
|
||||
return
|
||||
|
||||
|
||||
# the final object
|
||||
return {
|
||||
memoryFree: memoryFree
|
||||
memoryUsed: memoryUsed
|
||||
memory: memory
|
||||
date: date
|
||||
cpus: cpus
|
||||
vifs: vifs
|
||||
xvds: xvds
|
||||
}
|
||||
stats.description = 'returns statistics about the VM'
|
||||
|
||||
stats.params = {
|
||||
id: { type: 'string' }
|
||||
id: { type: 'string' },
|
||||
granularity: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
stats.resolve = {
|
||||
@@ -879,25 +992,83 @@ exports.stats = stats;
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# TODO: rename to setBootOrder
|
||||
# TODO: check current VM is HVM
|
||||
bootOrder = $coroutine ({vm, order}) ->
|
||||
xapi = @getXAPI vm
|
||||
setBootOrder = $coroutine ({vm, order}) ->
|
||||
xapi = @getXapi vm
|
||||
|
||||
order = {order: order}
|
||||
if vm.virtualizationMode == 'hvm'
|
||||
yield xapi.call 'VM.set_HVM_boot_params', vm._xapiRef, order
|
||||
return true
|
||||
|
||||
yield xapi.call 'VM.set_HVM_boot_params', vm.ref, order
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
'You can only set the boot order on a HVM guest'
|
||||
)
|
||||
|
||||
return true
|
||||
|
||||
|
||||
bootOrder.params = {
|
||||
setBootOrder.params = {
|
||||
vm: { type: 'string' },
|
||||
order: { type: 'string' }
|
||||
}
|
||||
|
||||
bootOrder.resolve = {
|
||||
setBootOrder.resolve = {
|
||||
vm: ['vm', 'VM', 'operate'],
|
||||
}
|
||||
exports.bootOrder = bootOrder
|
||||
exports.setBootOrder = setBootOrder
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
recoveryStart = ({vm}) ->
|
||||
return @getXapi(vm).startVmOnCd(vm._xapiId)
|
||||
|
||||
recoveryStart.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
recoveryStart.resolve = {
|
||||
vm: ['id', 'VM', 'operate'],
|
||||
}
|
||||
exports.recoveryStart = recoveryStart
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
getCloudInitConfig = $coroutine ({template}) ->
|
||||
return yield @getXapi(template).getCloudInitConfig(template._xapiId)
|
||||
|
||||
getCloudInitConfig.params = {
|
||||
template: { type: 'string' }
|
||||
}
|
||||
|
||||
getCloudInitConfig.resolve = {
|
||||
template: ['template', 'VM-template', 'administrate'],
|
||||
}
|
||||
exports.getCloudInitConfig = getCloudInitConfig
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
createCloudInitConfigDrive = $coroutine ({vm, sr, config, coreos}) ->
|
||||
xapi = @getXapi vm
|
||||
# CoreOS is a special CloudConfig drive created by XS plugin
|
||||
if coreos
|
||||
yield xapi.createCoreOsCloudInitConfigDrive(vm._xapiId, sr._xapiId, config)
|
||||
# use generic Cloud Init drive
|
||||
else
|
||||
yield xapi.createCloudInitConfigDrive(vm._xapiId, sr._xapiId, config)
|
||||
return true
|
||||
|
||||
createCloudInitConfigDrive.params = {
|
||||
vm: { type: 'string' },
|
||||
sr: { type: 'string' },
|
||||
config: { type: 'string' }
|
||||
}
|
||||
|
||||
createCloudInitConfigDrive.resolve = {
|
||||
vm: ['vm', 'VM', 'administrate'],
|
||||
sr: [ 'sr', 'SR', 'operate' ]
|
||||
}
|
||||
exports.createCloudInitConfigDrive = createCloudInitConfigDrive
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import Bluebird from 'bluebird'
|
||||
import Collection, {ModelAlreadyExists} from '../collection'
|
||||
import difference from 'lodash.difference'
|
||||
import filter from 'lodash.filter'
|
||||
import forEach from 'lodash.foreach'
|
||||
import getKey from 'lodash.keys'
|
||||
import isEmpty from 'lodash.isempty'
|
||||
import map from 'lodash.map'
|
||||
import {createClient as createRedisClient, RedisClient, Multi} from 'redis'
|
||||
|
||||
import {promisifyAll} from '../utils'
|
||||
import {
|
||||
forEach,
|
||||
isEmpty,
|
||||
mapToArray,
|
||||
promisifyAll
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -53,7 +54,7 @@ export default class Redis extends Collection {
|
||||
const {redis} = this
|
||||
|
||||
const models = []
|
||||
return Bluebird.map(ids, id => {
|
||||
return Promise.all(mapToArray(ids, id => {
|
||||
return redis.hgetallAsync(prefix + id).then(model => {
|
||||
// If empty, consider it a no match.
|
||||
if (isEmpty(model)) {
|
||||
@@ -65,7 +66,7 @@ export default class Redis extends Collection {
|
||||
|
||||
models.push(model)
|
||||
})
|
||||
}).return(models)
|
||||
})).then(() => models)
|
||||
}
|
||||
|
||||
_add (models, {replace = false} = {}) {
|
||||
@@ -74,7 +75,7 @@ export default class Redis extends Collection {
|
||||
|
||||
const {indexes, prefix, redis, idPrefix = ''} = this
|
||||
|
||||
return Bluebird.map(models, async function (model) {
|
||||
return Promise.all(mapToArray(models, async model => {
|
||||
// Generate a new identifier if necessary.
|
||||
if (model.id === undefined) {
|
||||
model.id = idPrefix + String(await redis.incrAsync(prefix + '_id'))
|
||||
@@ -99,8 +100,10 @@ export default class Redis extends Collection {
|
||||
params.push(name, value)
|
||||
})
|
||||
|
||||
const key = `${prefix}:${model.id}`
|
||||
const promises = [
|
||||
redis.hmsetAsync(prefix + ':' + model.id, ...params)
|
||||
redis.delAsync(key),
|
||||
redis.hmsetAsync(key, ...params)
|
||||
]
|
||||
|
||||
// Update indexes.
|
||||
@@ -117,7 +120,7 @@ export default class Redis extends Collection {
|
||||
await Promise.all(promises)
|
||||
|
||||
return model
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
_get (properties) {
|
||||
@@ -146,7 +149,7 @@ export default class Redis extends Collection {
|
||||
throw new Error('fields not indexed: ' + unfit.join())
|
||||
}
|
||||
|
||||
const keys = map(properties, (value, index) => prefix + '_' + index + ':' + value)
|
||||
const keys = mapToArray(properties, (value, index) => `${prefix}_${index}:${value}`)
|
||||
return redis.sinterAsync(...keys).then(ids => this._extract(ids))
|
||||
}
|
||||
|
||||
@@ -160,7 +163,7 @@ export default class Redis extends Collection {
|
||||
redis.sremAsync(prefix + '_ids', ...ids),
|
||||
|
||||
// Remove the models.
|
||||
redis.delAsync(map(ids, id => prefix + ':' + id))
|
||||
redis.delAsync(mapToArray(ids, id => `${prefix}:${id}`))
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import {EventEmitter} from 'events'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// const noop = () => {}
|
||||
import {createRawObject, noop} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -10,13 +8,13 @@ export default class Connection extends EventEmitter {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
this._data = Object.create(null)
|
||||
this._data = createRawObject()
|
||||
}
|
||||
|
||||
// Close the connection.
|
||||
close () {
|
||||
// Prevent errors when the connection is closed more than once.
|
||||
// this.close = noop
|
||||
this.close = noop
|
||||
|
||||
this.emit('close')
|
||||
}
|
||||
|
||||
@@ -1,69 +1,83 @@
|
||||
import bind from 'lodash.bind'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
|
||||
import {
|
||||
isPromise,
|
||||
noop,
|
||||
pFinally
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {defineProperty} = Object
|
||||
const {
|
||||
defineProperties,
|
||||
defineProperty,
|
||||
getOwnPropertyDescriptor
|
||||
} = Object
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// See: https://github.com/jayphelps/core-decorators.js#autobind
|
||||
export function autobind (target, key, {
|
||||
//
|
||||
// TODO: make it work for all class methods.
|
||||
export const autobind = (target, key, {
|
||||
configurable,
|
||||
enumerable,
|
||||
value: fn,
|
||||
writable
|
||||
}) {
|
||||
return {
|
||||
configurable,
|
||||
enumerable,
|
||||
}) => ({
|
||||
configurable,
|
||||
enumerable,
|
||||
|
||||
get () {
|
||||
const bounded = bind(fn, this)
|
||||
get () {
|
||||
const bounded = bind(fn, this)
|
||||
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bounded,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bounded
|
||||
},
|
||||
set (newValue) {
|
||||
if (this === target) {
|
||||
// New value directly set on the prototype.
|
||||
delete this[key]
|
||||
this[key] = newValue
|
||||
} else {
|
||||
// New value set on a child object.
|
||||
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bounded,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bounded
|
||||
},
|
||||
set (newValue) {
|
||||
if (this === target) {
|
||||
// New value directly set on the prototype.
|
||||
delete this[key]
|
||||
this[key] = newValue
|
||||
} else {
|
||||
// New value set on a child object.
|
||||
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Debounce decorator for methods.
|
||||
//
|
||||
// See: https://github.com/wycats/javascript-decorators
|
||||
export const debounce = (duration) => (target, name, descriptor) => {
|
||||
const {value: fn} = descriptor
|
||||
//
|
||||
// TODO: make it work for single functions.
|
||||
export const debounce = duration => (target, name, descriptor) => {
|
||||
const fn = descriptor.value
|
||||
|
||||
// This symbol is used to store the related data directly on the
|
||||
// current object.
|
||||
const s = Symbol()
|
||||
|
||||
function debounced () {
|
||||
let data = this[s] || (this[s] = {
|
||||
const data = this[s] || (this[s] = {
|
||||
lastCall: 0,
|
||||
wrapper: null
|
||||
})
|
||||
@@ -80,8 +94,233 @@ export const debounce = (duration) => (target, name, descriptor) => {
|
||||
}
|
||||
return data.wrapper()
|
||||
}
|
||||
debounced.reset = (obj) => { delete obj[s] }
|
||||
debounced.reset = obj => { delete obj[s] }
|
||||
|
||||
descriptor.value = debounced
|
||||
return descriptor
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _push = Array.prototype.push
|
||||
|
||||
export const deferrable = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn () {
|
||||
const deferreds = []
|
||||
const defer = fn => {
|
||||
deferreds.push(fn)
|
||||
}
|
||||
defer.clear = () => {
|
||||
deferreds.length = 0
|
||||
}
|
||||
|
||||
const args = [ defer ]
|
||||
_push.apply(args, arguments)
|
||||
|
||||
let executeDeferreds = () => {
|
||||
let i = deferreds.length
|
||||
while (i) {
|
||||
deferreds[--i]()
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = fn.apply(this, args)
|
||||
|
||||
if (isPromise(result)) {
|
||||
result::pFinally(executeDeferreds)
|
||||
|
||||
// Do not execute the deferreds in the finally block.
|
||||
executeDeferreds = noop
|
||||
}
|
||||
|
||||
return result
|
||||
} finally {
|
||||
executeDeferreds()
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
fn = target
|
||||
return newFn
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on failures.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of success.
|
||||
deferrable.onFailure = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(result => {
|
||||
defer.clear()
|
||||
return result
|
||||
})
|
||||
: (defer.clear(), result)
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on success.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of failure.
|
||||
deferrable.onSuccess = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(null, error => {
|
||||
defer.clear()
|
||||
throw error
|
||||
})
|
||||
: result
|
||||
} catch (error) {
|
||||
defer.clear()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _ownKeys = (
|
||||
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
|
||||
(({
|
||||
getOwnPropertyNames: names,
|
||||
getOwnPropertySymbols: symbols
|
||||
}) => symbols
|
||||
? obj => names(obj).concat(symbols(obj))
|
||||
: names
|
||||
)(Object)
|
||||
)
|
||||
|
||||
const _bindPropertyDescriptor = (descriptor, thisArg) => {
|
||||
const { get, set, value } = descriptor
|
||||
if (get) {
|
||||
descriptor.get = bind(get, thisArg)
|
||||
}
|
||||
if (set) {
|
||||
descriptor.set = bind(set, thisArg)
|
||||
}
|
||||
|
||||
if (isFunction(value)) {
|
||||
descriptor.value = bind(value, thisArg)
|
||||
}
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
const _isIgnoredProperty = name => (
|
||||
name[0] === '_' ||
|
||||
name === 'constructor'
|
||||
)
|
||||
|
||||
const _IGNORED_STATIC_PROPERTIES = {
|
||||
__proto__: null,
|
||||
|
||||
arguments: true,
|
||||
caller: true,
|
||||
length: true,
|
||||
name: true,
|
||||
prototype: true
|
||||
}
|
||||
const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name]
|
||||
|
||||
export const mixin = MixIns => Class => {
|
||||
if (!isArray(MixIns)) {
|
||||
MixIns = [ MixIns ]
|
||||
}
|
||||
|
||||
const { name } = Class
|
||||
|
||||
const Decorator = (...args) => {
|
||||
const instance = new Class(...args)
|
||||
|
||||
for (const MixIn of MixIns) {
|
||||
const { prototype } = MixIn
|
||||
const mixinInstance = new MixIn(instance)
|
||||
const descriptors = { __proto__: null }
|
||||
for (const prop of _ownKeys(prototype)) {
|
||||
if (_isIgnoredProperty(prop)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (prop in instance) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = _bindPropertyDescriptor(
|
||||
getOwnPropertyDescriptor(prototype, prop),
|
||||
mixinInstance
|
||||
)
|
||||
}
|
||||
defineProperties(instance, descriptors)
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
// Copy original and mixed-in static properties on Decorator class.
|
||||
const descriptors = { __proto__: null }
|
||||
for (const prop of _ownKeys(Class)) {
|
||||
let descriptor
|
||||
if (!(
|
||||
// Special properties are not defined...
|
||||
_isIgnoredStaticProperty(prop) &&
|
||||
|
||||
// if they already exist...
|
||||
(descriptor = getOwnPropertyDescriptor(Decorator, prop)) &&
|
||||
|
||||
// and are not configurable.
|
||||
!descriptor.configurable
|
||||
)) {
|
||||
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
|
||||
}
|
||||
}
|
||||
for (const MixIn of MixIns) {
|
||||
for (const prop of _ownKeys(MixIn)) {
|
||||
if (_isIgnoredStaticProperty(prop)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (prop in descriptors) {
|
||||
throw new Error(`${name}.${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
|
||||
}
|
||||
}
|
||||
defineProperties(Decorator, descriptors)
|
||||
|
||||
return Decorator
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import expect from 'must'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {autobind, debounce} from './decorators'
|
||||
import {autobind, debounce, deferrable} from './decorators'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('autobind', function () {
|
||||
describe('autobind()', () => {
|
||||
class Foo {
|
||||
@autobind
|
||||
getFoo () {
|
||||
@@ -16,25 +16,25 @@ describe('autobind', function () {
|
||||
}
|
||||
}
|
||||
|
||||
it('returns a bound instance for a method', function () {
|
||||
it('returns a bound instance for a method', () => {
|
||||
const foo = new Foo()
|
||||
const {getFoo} = foo
|
||||
const { getFoo } = foo
|
||||
|
||||
expect(getFoo()).to.equal(foo)
|
||||
})
|
||||
|
||||
it('returns the same bound instance each time', function () {
|
||||
it('returns the same bound instance each time', () => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(foo.getFoo).to.equal(foo.getFoo)
|
||||
})
|
||||
|
||||
it('works with multiple instances of the same class', function () {
|
||||
it('works with multiple instances of the same class', () => {
|
||||
const foo1 = new Foo()
|
||||
const foo2 = new Foo()
|
||||
|
||||
const {getFoo: getFoo1} = foo1
|
||||
const {getFoo: getFoo2} = foo2
|
||||
const getFoo1 = foo1.getFoo
|
||||
const getFoo2 = foo2.getFoo
|
||||
|
||||
expect(getFoo1()).to.equal(foo1)
|
||||
expect(getFoo2()).to.equal(foo2)
|
||||
@@ -43,7 +43,7 @@ describe('autobind', function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('debounce', function () {
|
||||
describe('debounce()', () => {
|
||||
let i
|
||||
|
||||
class Foo {
|
||||
@@ -53,11 +53,11 @@ describe('debounce', function () {
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
beforeEach(() => {
|
||||
i = 0
|
||||
})
|
||||
|
||||
it('works', function (done) {
|
||||
it('works', done => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(i).to.equal(0)
|
||||
@@ -68,7 +68,7 @@ describe('debounce', function () {
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
|
||||
setTimeout(function () {
|
||||
setTimeout(() => {
|
||||
foo.foo()
|
||||
expect(i).to.equal(2)
|
||||
|
||||
@@ -76,3 +76,98 @@ describe('debounce', function () {
|
||||
}, 2e1)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('deferrable()', () => {
|
||||
it('works with normal termination', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('defer.clear() removes previous deferreds', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
defer.clear()
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(2)
|
||||
})
|
||||
|
||||
it('works with exception', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
expect(() => fn()).to.throw(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise resolution', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
await expect(fn()).to.eventually.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise rejection', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
await expect(fn()).to.reject.to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
84
src/fatfs-buffer.js
Normal file
84
src/fatfs-buffer.js
Normal file
@@ -0,0 +1,84 @@
|
||||
// Buffer driver for [fatfs](https://github.com/natevw/fatfs).
|
||||
//
|
||||
// Usage:
|
||||
//
|
||||
// ```js
|
||||
// import fatfs from 'fatfs'
|
||||
// import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer'
|
||||
//
|
||||
// const buffer = fatfsBufferinit()
|
||||
//
|
||||
// const fs = fatfs.createFileSystem(fatfsBuffer(buffer))
|
||||
//
|
||||
// fs.writeFile('/foo', 'content of foo', function (err, content) {
|
||||
// if (err) {
|
||||
// console.error(err)
|
||||
// }
|
||||
// })
|
||||
|
||||
import { boot16 as fat16 } from 'fatfs/structs'
|
||||
|
||||
const SECTOR_SIZE = 512
|
||||
|
||||
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
|
||||
export function init () {
|
||||
const buf = new Buffer(10 * 1024 * 1024) // 10MB
|
||||
buf.fill(0)
|
||||
|
||||
// https://github.com/natevw/fatfs/blob/master/structs.js
|
||||
fat16.pack({
|
||||
jmpBoot: new Buffer('eb3c90', 'hex'),
|
||||
OEMName: 'mkfs.fat',
|
||||
BytsPerSec: SECTOR_SIZE,
|
||||
SecPerClus: 4,
|
||||
ResvdSecCnt: 1,
|
||||
NumFATs: 2,
|
||||
RootEntCnt: 512,
|
||||
TotSec16: 20480,
|
||||
Media: 248,
|
||||
FATSz16: 20,
|
||||
SecPerTrk: 32,
|
||||
NumHeads: 64,
|
||||
HiddSec: 0,
|
||||
TotSec32: 0,
|
||||
DrvNum: 128,
|
||||
Reserved1: 0,
|
||||
BootSig: 41,
|
||||
VolID: 895111106,
|
||||
VolLab: 'NO NAME ',
|
||||
FilSysType: 'FAT16 '
|
||||
}, buf)
|
||||
|
||||
// End of sector.
|
||||
buf[0x1fe] = 0x55
|
||||
buf[0x1ff] = 0xaa
|
||||
|
||||
// Mark sector as reserved.
|
||||
buf[0x200] = 0xf8
|
||||
buf[0x201] = 0xff
|
||||
buf[0x202] = 0xff
|
||||
buf[0x203] = 0xff
|
||||
|
||||
// Mark sector as reserved.
|
||||
buf[0x2a00] = 0xf8
|
||||
buf[0x2a01] = 0xff
|
||||
buf[0x2a02] = 0xff
|
||||
buf[0x2a03] = 0xff
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
export default buffer => {
|
||||
return {
|
||||
sectorSize: SECTOR_SIZE,
|
||||
numSectors: Math.floor(buffer.length / SECTOR_SIZE),
|
||||
readSectors: (i, target, cb) => {
|
||||
buffer.copy(target, 0, i * SECTOR_SIZE)
|
||||
cb()
|
||||
},
|
||||
writeSectors: (i, source, cb) => {
|
||||
source.copy(buffer, i * SECTOR_SIZE, 0)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
54
src/glob-matcher.js
Normal file
54
src/glob-matcher.js
Normal file
@@ -0,0 +1,54 @@
|
||||
// See: https://gist.github.com/julien-f/5b9a3537eb82a34b04e2
|
||||
|
||||
var matcher = require('micromatch').matcher
|
||||
|
||||
module.exports = function globMatcher (patterns, opts) {
|
||||
if (!Array.isArray(patterns)) {
|
||||
if (patterns[0] === '!') {
|
||||
var m = matcher(patterns.slice(1), opts)
|
||||
return function (string) {
|
||||
return !m(string)
|
||||
}
|
||||
} else {
|
||||
return matcher(patterns, opts)
|
||||
}
|
||||
}
|
||||
|
||||
var noneMustMatch = []
|
||||
var anyMustMatch = []
|
||||
|
||||
// TODO: could probably be optimized by combining all positive patterns (and all negative patterns) as a single matcher.
|
||||
for (var i = 0, n = patterns.length; i < n; ++i) {
|
||||
var pattern = patterns[i]
|
||||
if (pattern[0] === '!') {
|
||||
noneMustMatch.push(matcher(pattern.slice(1), opts))
|
||||
} else {
|
||||
anyMustMatch.push(matcher(pattern, opts))
|
||||
}
|
||||
}
|
||||
|
||||
var nNone = noneMustMatch.length
|
||||
var nAny = anyMustMatch.length
|
||||
|
||||
return function (string) {
|
||||
var i
|
||||
|
||||
for (i = 0; i < nNone; ++i) {
|
||||
if (noneMustMatch[i](string)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if (nAny === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (i = 0; i < nAny; ++i) {
|
||||
if (anyMustMatch[i](string)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
121
src/http-request.js
Normal file
121
src/http-request.js
Normal file
@@ -0,0 +1,121 @@
|
||||
import assign from 'lodash.assign'
|
||||
import getStream from 'get-stream'
|
||||
import isString from 'lodash.isstring'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import { parse as parseUrl } from 'url'
|
||||
import { request as httpRequest } from 'http'
|
||||
import { request as httpsRequest } from 'https'
|
||||
import { stringify as formatQueryString } from 'querystring'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export default (...args) => {
|
||||
let req
|
||||
|
||||
const pResponse = new Promise((resolve, reject) => {
|
||||
const opts = {}
|
||||
for (let i = 0, { length } = args; i < length; ++i) {
|
||||
const arg = args[i]
|
||||
assign(opts, isString(arg) ? parseUrl(arg) : arg)
|
||||
}
|
||||
|
||||
const {
|
||||
body,
|
||||
headers: { ...headers } = {},
|
||||
protocol,
|
||||
query,
|
||||
...rest
|
||||
} = opts
|
||||
|
||||
if (headers['content-length'] == null && body != null) {
|
||||
let tmp
|
||||
if (isString(body)) {
|
||||
headers['content-length'] = Buffer.byteLength(body)
|
||||
} else if (
|
||||
(
|
||||
(tmp = body.headers) &&
|
||||
(tmp = tmp['content-length']) != null
|
||||
) ||
|
||||
(tmp = body.length) != null
|
||||
) {
|
||||
headers['content-length'] = tmp
|
||||
}
|
||||
}
|
||||
|
||||
if (query) {
|
||||
rest.path = `${rest.pathname || rest.path || '/'}?${
|
||||
isString(query)
|
||||
? query
|
||||
: formatQueryString(query)
|
||||
}`
|
||||
}
|
||||
|
||||
// Some headers can be explicitly removed by setting them to null.
|
||||
const headersToRemove = []
|
||||
for (const header in headers) {
|
||||
if (headers[header] === null) {
|
||||
delete headers[header]
|
||||
headersToRemove.push(header)
|
||||
}
|
||||
}
|
||||
|
||||
req = (
|
||||
protocol && startsWith(protocol.toLowerCase(), 'https')
|
||||
? httpsRequest
|
||||
: httpRequest
|
||||
)({
|
||||
...rest,
|
||||
headers
|
||||
})
|
||||
|
||||
for (let i = 0, { length } = headersToRemove; i < length; ++i) {
|
||||
req.removeHeader(headersToRemove[i])
|
||||
}
|
||||
|
||||
if (body) {
|
||||
if (typeof body.pipe === 'function') {
|
||||
body.pipe(req)
|
||||
} else {
|
||||
req.end(body)
|
||||
}
|
||||
} else {
|
||||
req.end()
|
||||
}
|
||||
req.on('error', reject)
|
||||
req.once('response', resolve)
|
||||
}).then(response => {
|
||||
response.cancel = () => {
|
||||
req.abort()
|
||||
}
|
||||
response.readAll = () => getStream(response)
|
||||
|
||||
const length = response.headers['content-length']
|
||||
if (length) {
|
||||
response.length = length
|
||||
}
|
||||
|
||||
const code = response.statusCode
|
||||
if (code < 200 || code >= 300) {
|
||||
const error = new Error(response.statusMessage)
|
||||
error.code = code
|
||||
Object.defineProperty(error, 'response', {
|
||||
configurable: true,
|
||||
value: response,
|
||||
writable: true
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
return response
|
||||
})
|
||||
|
||||
pResponse.cancel = () => {
|
||||
req.emit('error', new Error('HTTP request canceled!'))
|
||||
req.abort()
|
||||
}
|
||||
pResponse.readAll = () => pResponse.then(response => response.readAll())
|
||||
pResponse.request = req
|
||||
|
||||
return pResponse
|
||||
}
|
||||
358
src/index.js
358
src/index.js
@@ -2,23 +2,22 @@ import createLogger from 'debug'
|
||||
const debug = createLogger('xo:main')
|
||||
|
||||
import appConf from 'app-conf'
|
||||
import assign from 'lodash.assign'
|
||||
import bind from 'lodash.bind'
|
||||
import blocked from 'blocked'
|
||||
import createExpress from 'express'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash.foreach'
|
||||
import has from 'lodash.has'
|
||||
import helmet from 'helmet'
|
||||
import includes from 'lodash.includes'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import map from 'lodash.map'
|
||||
import pick from 'lodash.pick'
|
||||
import proxyConsole from './proxy-console'
|
||||
import proxyRequest from 'proxy-http-request'
|
||||
import serveStatic from 'serve-static'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import WebSocket from 'ws'
|
||||
import {compile as compileJade} from 'jade'
|
||||
import {posix as posixPath} from 'path'
|
||||
|
||||
import {
|
||||
AlreadyAuthenticated,
|
||||
@@ -28,23 +27,30 @@ import {
|
||||
NotImplemented
|
||||
} from './api-errors'
|
||||
import JsonRpcPeer from 'json-rpc-peer'
|
||||
import {readFile} from 'fs-promise'
|
||||
import {
|
||||
readFile,
|
||||
readdir
|
||||
} from 'fs-promise'
|
||||
|
||||
import * as apiMethods from './api/index'
|
||||
import Api from './api'
|
||||
import JobExecutor from './job-executor'
|
||||
import RemoteHandler from './remote-handler'
|
||||
import Scheduler from './scheduler'
|
||||
import WebServer from 'http-server-plus'
|
||||
import wsProxy from './ws-proxy'
|
||||
import Xo from './xo'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
mapToArray,
|
||||
pFromCallback
|
||||
} from './utils'
|
||||
|
||||
import bodyParser from 'body-parser'
|
||||
import connectFlash from 'connect-flash'
|
||||
import cookieParser from 'cookie-parser'
|
||||
import expressSession from 'express-session'
|
||||
import passport from 'passport'
|
||||
import {Strategy as LocalStrategy} from 'passport-local'
|
||||
import { parse as parseCookies } from 'cookie'
|
||||
import { Strategy as LocalStrategy } from 'passport-local'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -58,15 +64,6 @@ const warn = (...args) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DEFAULTS = {
|
||||
http: {
|
||||
listen: [
|
||||
{ port: 80 }
|
||||
],
|
||||
mounts: {}
|
||||
}
|
||||
}
|
||||
|
||||
const DEPRECATED_ENTRIES = [
|
||||
'users',
|
||||
'servers'
|
||||
@@ -74,7 +71,6 @@ const DEPRECATED_ENTRIES = [
|
||||
|
||||
async function loadConfiguration () {
|
||||
const config = await appConf.load('xo-server', {
|
||||
defaults: DEFAULTS,
|
||||
ignoreUnknownFormats: true
|
||||
})
|
||||
|
||||
@@ -95,6 +91,8 @@ async function loadConfiguration () {
|
||||
function createExpressApp () {
|
||||
const app = createExpress()
|
||||
|
||||
app.use(helmet())
|
||||
|
||||
// Registers the cookie-parser and express-session middlewares,
|
||||
// necessary for connect-flash.
|
||||
app.use(cookieParser())
|
||||
@@ -121,7 +119,7 @@ function createExpressApp () {
|
||||
}
|
||||
|
||||
async function setUpPassport (express, xo) {
|
||||
const strategies = Object.create(null)
|
||||
const strategies = createRawObject()
|
||||
xo.registerPassportStrategy = strategy => {
|
||||
passport.use(strategy)
|
||||
|
||||
@@ -144,9 +142,6 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
|
||||
express.use(async (req, res, next) => {
|
||||
// A relative path is needed to avoid breaking reverse proxies.
|
||||
const basePath = posixPath.relative(req.path, '/')
|
||||
|
||||
const matches = req.url.match(SIGNIN_STRATEGY_RE)
|
||||
|
||||
if (matches) {
|
||||
@@ -157,7 +152,7 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
if (!user) {
|
||||
req.flash('error', info ? info.message : 'Invalid credentials')
|
||||
return res.redirect(`${basePath}/signin`)
|
||||
return res.redirect('/signin')
|
||||
}
|
||||
|
||||
// The cookie will be set in via the next request because some
|
||||
@@ -173,7 +168,7 @@ async function setUpPassport (express, xo) {
|
||||
matches[1] === 'local' && req.body['remember-me'] === 'on'
|
||||
)
|
||||
|
||||
res.redirect(basePath)
|
||||
res.redirect('/')
|
||||
})(req, res, next)
|
||||
}
|
||||
|
||||
@@ -193,10 +188,10 @@ async function setUpPassport (express, xo) {
|
||||
next()
|
||||
} else if (req.cookies.token) {
|
||||
next()
|
||||
} else if (/fontawesome|images|styles/.test(req.url)) {
|
||||
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
|
||||
next()
|
||||
} else {
|
||||
return res.redirect(`${basePath}/signin`)
|
||||
return res.redirect('/signin')
|
||||
}
|
||||
})
|
||||
|
||||
@@ -207,7 +202,7 @@ async function setUpPassport (express, xo) {
|
||||
const user = await xo.authenticateUser({username, password})
|
||||
done(null, user)
|
||||
} catch (error) {
|
||||
done(error.message)
|
||||
done(null, false, { message: error.message })
|
||||
}
|
||||
}
|
||||
))
|
||||
@@ -215,19 +210,7 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
async function registerPlugin (pluginConf, pluginName) {
|
||||
debugPlugin('register %s', pluginName)
|
||||
|
||||
const pluginPath = (function (name) {
|
||||
try {
|
||||
return require.resolve('xo-server-' + name)
|
||||
} catch (e) {
|
||||
return require.resolve(name)
|
||||
}
|
||||
})(pluginName)
|
||||
|
||||
async function registerPlugin (pluginPath, pluginName) {
|
||||
const plugin = require(pluginPath)
|
||||
|
||||
// Supports both “normal” CommonJS and Babel's ES2015 modules.
|
||||
@@ -242,36 +225,72 @@ async function registerPlugin (pluginConf, pluginName) {
|
||||
? factory({ xo: this })
|
||||
: factory
|
||||
|
||||
await this._registerPlugin(
|
||||
await this.registerPlugin(
|
||||
pluginName,
|
||||
instance,
|
||||
configurationSchema,
|
||||
pluginConf
|
||||
configurationSchema
|
||||
)
|
||||
}
|
||||
|
||||
function registerPlugins (plugins, xo) {
|
||||
return Promise.all(map(plugins, (conf, name) => {
|
||||
return registerPlugin.call(xo, conf, name).then(
|
||||
() => {
|
||||
debugPlugin(`successfully register ${name}`)
|
||||
},
|
||||
error => {
|
||||
debugPlugin(`failed register ${name}`)
|
||||
debugPlugin(error)
|
||||
}
|
||||
)
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
function registerPluginWrapper (pluginPath, pluginName) {
|
||||
debugPlugin('register %s', pluginName)
|
||||
|
||||
return registerPlugin.call(this, pluginPath, pluginName).then(
|
||||
() => {
|
||||
debugPlugin(`successfully register ${pluginName}`)
|
||||
},
|
||||
error => {
|
||||
debugPlugin(`failed register ${pluginName}`)
|
||||
debugPlugin(error)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const PLUGIN_PREFIX = 'xo-server-'
|
||||
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
|
||||
|
||||
async function registerPluginsInPath (path) {
|
||||
const files = await readdir(path).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return []
|
||||
}
|
||||
throw error
|
||||
})
|
||||
|
||||
await Promise.all(mapToArray(files, name => {
|
||||
if (startsWith(name, PLUGIN_PREFIX)) {
|
||||
return registerPluginWrapper.call(
|
||||
this,
|
||||
`${path}/${name}`,
|
||||
name.slice(PLUGIN_PREFIX_LENGTH)
|
||||
)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async function registerPlugins (xo) {
|
||||
await Promise.all(mapToArray([
|
||||
`${__dirname}/../node_modules/`,
|
||||
'/usr/local/lib/node_modules/'
|
||||
], registerPluginsInPath, xo))
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function makeWebServerListen (opts) {
|
||||
// Read certificate and key if necessary.
|
||||
const {certificate, key} = opts
|
||||
if (certificate && key) {
|
||||
[opts.certificate, opts.key] = await Promise.all([
|
||||
readFile(certificate),
|
||||
async function makeWebServerListen ({
|
||||
certificate,
|
||||
|
||||
// The properties was called `certificate` before.
|
||||
cert = certificate,
|
||||
|
||||
key,
|
||||
...opts
|
||||
}) {
|
||||
if (cert && key) {
|
||||
[opts.cert, opts.key] = await Promise.all([
|
||||
readFile(cert),
|
||||
readFile(key)
|
||||
])
|
||||
}
|
||||
@@ -295,14 +314,14 @@ async function makeWebServerListen (opts) {
|
||||
async function createWebServer (opts) {
|
||||
const webServer = new WebServer()
|
||||
|
||||
await Promise.all(map(opts, makeWebServerListen, webServer))
|
||||
await Promise.all(mapToArray(opts, makeWebServerListen, webServer))
|
||||
|
||||
return webServer
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const setUpProxies = (express, opts) => {
|
||||
const setUpProxies = (express, opts, xo) => {
|
||||
if (!opts) {
|
||||
return
|
||||
}
|
||||
@@ -320,6 +339,8 @@ const setUpProxies = (express, opts) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
express.on('upgrade', (req, socket, head) => {
|
||||
const {url} = req
|
||||
|
||||
@@ -353,13 +374,6 @@ const setUpStaticFiles = (express, opts) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function setUpWebSocketServer (webServer) {
|
||||
return new WebSocket.Server({
|
||||
server: webServer,
|
||||
path: '/api/'
|
||||
})
|
||||
}
|
||||
|
||||
const errorClasses = {
|
||||
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
|
||||
INVALID_CREDENTIAL: InvalidCredential,
|
||||
@@ -380,7 +394,7 @@ const apiHelpers = {
|
||||
// Handles both properties and wrapped models.
|
||||
const properties = server.properties || server
|
||||
|
||||
server = pick(properties, 'id', 'host', 'username')
|
||||
server = pick(properties, 'id', 'host', 'username', 'readOnly')
|
||||
|
||||
// Injects connection status.
|
||||
const xapi = this._xapis[server.id]
|
||||
@@ -394,17 +408,29 @@ const apiHelpers = {
|
||||
}
|
||||
}
|
||||
|
||||
const setUpApi = (webSocketServer, xo) => {
|
||||
const context = Object.create(xo)
|
||||
assign(xo, apiHelpers)
|
||||
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
server: webServer,
|
||||
path: '/api/'
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = { __proto__: xo, ...apiHelpers }
|
||||
|
||||
const api = new Api({
|
||||
context
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
})
|
||||
xo.defineProperty('api', api)
|
||||
|
||||
api.addMethods(apiMethods)
|
||||
|
||||
webSocketServer.on('connection', socket => {
|
||||
debug('+ WebSocket connection')
|
||||
const { remoteAddress } = socket.upgradeReq.socket
|
||||
|
||||
debug('+ WebSocket connection (%s)', remoteAddress)
|
||||
|
||||
// Create the abstract XO object for this connection.
|
||||
const connection = xo.createUserConnection()
|
||||
@@ -422,7 +448,7 @@ const setUpApi = (webSocketServer, xo) => {
|
||||
|
||||
// Close the XO connection with this WebSocket.
|
||||
socket.once('close', () => {
|
||||
debug('- WebSocket connection')
|
||||
debug('- WebSocket connection (%s)', remoteAddress)
|
||||
|
||||
connection.close()
|
||||
})
|
||||
@@ -445,25 +471,6 @@ const setUpApi = (webSocketServer, xo) => {
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return api
|
||||
}
|
||||
|
||||
const setUpScheduler = (api, xo) => {
|
||||
const jobExecutor = new JobExecutor(xo, api)
|
||||
const scheduler = new Scheduler(xo, {executor: jobExecutor})
|
||||
xo.scheduler = scheduler
|
||||
|
||||
return scheduler
|
||||
}
|
||||
|
||||
const setUpRemoteHandler = async xo => {
|
||||
const remoteHandler = new RemoteHandler()
|
||||
xo.remoteHandler = remoteHandler
|
||||
xo.initRemotes()
|
||||
xo.syncAllRemotes()
|
||||
|
||||
return remoteHandler
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -474,8 +481,9 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
webServer.on('upgrade', async (req, socket, head) => {
|
||||
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
|
||||
if (!matches) {
|
||||
return
|
||||
@@ -483,7 +491,23 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
|
||||
const [, id] = matches
|
||||
try {
|
||||
const xapi = xo.getXAPI(id, ['VM', 'VM-controller'])
|
||||
// TODO: factorize permissions checking in an Express middleware.
|
||||
{
|
||||
const { token } = parseCookies(req.headers.cookie)
|
||||
|
||||
const user = await xo.authenticateUser({ token })
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { // eslint-disable-line space-before-keywords
|
||||
throw new InvalidCredential()
|
||||
}
|
||||
|
||||
const { remoteAddress } = socket
|
||||
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
socket.on('close', () => {
|
||||
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
})
|
||||
}
|
||||
|
||||
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
|
||||
const vmConsole = xapi.getVmConsole(id)
|
||||
|
||||
// FIXME: lost connection due to VM restart is not detected.
|
||||
@@ -498,43 +522,36 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const registerPasswordAuthenticationProvider = (xo) => {
|
||||
const registerPasswordAuthenticationProvider = xo => {
|
||||
async function passwordAuthenticationProvider ({
|
||||
email,
|
||||
username,
|
||||
password
|
||||
}) {
|
||||
/* eslint no-throw-literal: 0 */
|
||||
|
||||
if (email === undefined || password === undefined) {
|
||||
throw null
|
||||
if (username === undefined || password === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: this is deprecated and should be removed.
|
||||
const user = await xo._users.first({email})
|
||||
if (!user || !(await user.checkPassword(password))) {
|
||||
throw null
|
||||
const user = await xo.getUserByName(username, true)
|
||||
if (user && await xo.checkUserPassword(user.id, password)) {
|
||||
return user.id
|
||||
}
|
||||
return user
|
||||
}
|
||||
|
||||
xo.registerAuthenticationProvider(passwordAuthenticationProvider)
|
||||
}
|
||||
|
||||
const registerTokenAuthenticationProvider = (xo) => {
|
||||
const registerTokenAuthenticationProvider = xo => {
|
||||
async function tokenAuthenticationProvider ({
|
||||
token: tokenId
|
||||
}) {
|
||||
/* eslint no-throw-literal: 0 */
|
||||
|
||||
if (!tokenId) {
|
||||
throw null
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
} catch (e) {
|
||||
// It is not an error if the token does not exists.
|
||||
throw null
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -543,15 +560,18 @@ const registerTokenAuthenticationProvider = (xo) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const help = (function ({name, version}) {
|
||||
return () => `${name} v${version}`
|
||||
})(require('../package.json'))
|
||||
const USAGE = (({
|
||||
name,
|
||||
version
|
||||
}) => `Usage: ${name} [--safe-mode]
|
||||
|
||||
${name} v${version}`)(require('../package.json'))
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default async function main (args) {
|
||||
if (args.indexOf('--help') !== -1 || args.indexOf('-h') !== -1) {
|
||||
return help()
|
||||
if (includes(args, '--help') || includes(args, '-h')) {
|
||||
return USAGE
|
||||
}
|
||||
|
||||
{
|
||||
@@ -580,14 +600,14 @@ export default async function main (args) {
|
||||
warn('Failed to change user/group:', error)
|
||||
}
|
||||
|
||||
// Create the main object which will connects to Xen servers and
|
||||
// manages all the models.
|
||||
const xo = new Xo()
|
||||
await xo.start({
|
||||
redis: {
|
||||
uri: config.redis && config.redis.uri
|
||||
}
|
||||
})
|
||||
// Creates main object.
|
||||
const xo = new Xo(config)
|
||||
|
||||
// Register web server close on XO stop.
|
||||
xo.on('stop', () => pFromCallback(cb => webServer.close(cb)))
|
||||
|
||||
// Connects to all registered servers.
|
||||
await xo.start()
|
||||
|
||||
// Loads default authentication providers.
|
||||
registerPasswordAuthenticationProvider(xo)
|
||||
@@ -596,6 +616,39 @@ export default async function main (args) {
|
||||
// Express is used to manage non WebSocket connections.
|
||||
const express = createExpressApp()
|
||||
|
||||
if (config.http.redirectToHttps) {
|
||||
let port
|
||||
forEach(config.http.listen, listen => {
|
||||
if (
|
||||
listen.port &&
|
||||
(listen.cert || listen.certificate)
|
||||
) {
|
||||
port = listen.port
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
if (port === undefined) {
|
||||
warn('Could not setup HTTPs redirection: no HTTPs port found')
|
||||
} else {
|
||||
express.use((req, res, next) => {
|
||||
if (req.secure) {
|
||||
return next()
|
||||
}
|
||||
|
||||
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Must be set up before the API.
|
||||
setUpConsoleProxy(webServer, xo)
|
||||
|
||||
// Must be set up before the API.
|
||||
express.use(bind(xo._handleHttpRequest, xo))
|
||||
|
||||
// Everything above is not protected by the sign in, allowing xo-cli
|
||||
// to work properly.
|
||||
await setUpPassport(express, xo)
|
||||
|
||||
// Attaches express to the web server.
|
||||
@@ -604,28 +657,15 @@ export default async function main (args) {
|
||||
express.emit('upgrade', req, socket, head)
|
||||
})
|
||||
|
||||
// Must be set up before the API.
|
||||
setUpConsoleProxy(webServer, xo)
|
||||
|
||||
// Must be set up before the API.
|
||||
express.use(bind(xo._handleHttpRequest, xo))
|
||||
|
||||
// TODO: remove when no longer necessary.
|
||||
express.use(bind(xo._handleProxyRequest, xo))
|
||||
|
||||
// Must be set up before the static files.
|
||||
const webSocketServer = setUpWebSocketServer(webServer)
|
||||
const api = setUpApi(webSocketServer, xo)
|
||||
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
|
||||
|
||||
const scheduler = setUpScheduler(api, xo)
|
||||
setUpRemoteHandler(xo)
|
||||
|
||||
setUpProxies(express, config.http.proxies)
|
||||
setUpProxies(express, config.http.proxies, xo)
|
||||
|
||||
setUpStaticFiles(express, config.http.mounts)
|
||||
|
||||
if (config.plugins) {
|
||||
await registerPlugins(config.plugins, xo)
|
||||
if (!includes(args, '--safe-mode')) {
|
||||
await registerPlugins(xo)
|
||||
}
|
||||
|
||||
if (!(await xo._users.exists())) {
|
||||
@@ -636,28 +676,18 @@ export default async function main (args) {
|
||||
info('Default user created:', email, ' with password', password)
|
||||
}
|
||||
|
||||
// Gracefully shutdown on signals.
|
||||
//
|
||||
// TODO: implements a timeout? (or maybe it is the services launcher
|
||||
// responsibility?)
|
||||
process.on('SIGINT', async () => {
|
||||
debug('SIGINT caught, closing web server…')
|
||||
const shutdown = signal => {
|
||||
debug('%s caught, closing…', signal)
|
||||
xo.stop()
|
||||
}
|
||||
|
||||
webServer.close()
|
||||
// Gracefully shutdown on signals.
|
||||
process.on('SIGINT', () => shutdown('SIGINT'))
|
||||
process.on('SIGTERM', () => shutdown('SIGTERM'))
|
||||
|
||||
webSocketServer.close()
|
||||
scheduler.disableAll()
|
||||
await xo.disableAllRemotes()
|
||||
})
|
||||
process.on('SIGTERM', async () => {
|
||||
debug('SIGTERM caught, closing web server…')
|
||||
await eventToPromise(xo, 'stopped')
|
||||
|
||||
webServer.close()
|
||||
|
||||
webSocketServer.close()
|
||||
scheduler.disableAll()
|
||||
await xo.disableAllRemotes()
|
||||
})
|
||||
|
||||
return eventToPromise(webServer, 'close')
|
||||
debug('bye :-)')
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import assign from 'lodash.assign'
|
||||
import forEach from 'lodash.foreach'
|
||||
import {BaseError} from 'make-error'
|
||||
|
||||
import {
|
||||
createRawObject,
|
||||
forEach
|
||||
} from './utils'
|
||||
|
||||
export class JobExecutorError extends BaseError {}
|
||||
export class UnsupportedJobType extends JobExecutorError {
|
||||
constructor (job) {
|
||||
@@ -15,58 +19,142 @@ export class UnsupportedVectorType extends JobExecutorError {
|
||||
}
|
||||
|
||||
export const productParams = (...args) => {
|
||||
let product = Object.create(null)
|
||||
let product = createRawObject()
|
||||
assign(product, ...args)
|
||||
return product
|
||||
}
|
||||
|
||||
export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
|
||||
const upstreamValues = []
|
||||
const itemsCopy = items.slice()
|
||||
const item = itemsCopy.pop()
|
||||
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
|
||||
forEach(values, value => {
|
||||
if (itemsCopy.length) {
|
||||
let downstreamValues = _computeCrossProduct(itemsCopy, productCb, extractValueMap)
|
||||
forEach(downstreamValues, downstreamValue => {
|
||||
upstreamValues.push(productCb(value, downstreamValue))
|
||||
})
|
||||
} else {
|
||||
upstreamValues.push(value)
|
||||
}
|
||||
})
|
||||
return upstreamValues
|
||||
}
|
||||
|
||||
export default class JobExecutor {
|
||||
constructor (xo, api) {
|
||||
constructor (xo) {
|
||||
this.xo = xo
|
||||
this.api = api
|
||||
this._extractValueCb = {
|
||||
'set': items => items.values
|
||||
}
|
||||
}
|
||||
|
||||
exec (job) {
|
||||
if (job.type === 'call') {
|
||||
this._execCall(job.userId, job.method, job.paramsVector)
|
||||
} else {
|
||||
throw new UnsupportedJobType(job)
|
||||
}
|
||||
}
|
||||
|
||||
_execCall (userId, method, paramsVector) {
|
||||
let paramsFlatVector
|
||||
if (paramsVector.type === 'crossProduct') {
|
||||
paramsFlatVector = this._computeCrossProduct(paramsVector.items, productParams, this._extractValueCb)
|
||||
} else {
|
||||
throw new UnsupportedVectorType(paramsVector)
|
||||
}
|
||||
const connection = this.xo.createUserConnection()
|
||||
connection.set('user_id', userId)
|
||||
forEach(paramsFlatVector, params => {
|
||||
this.api.call(connection, method, assign({}, params))
|
||||
// The logger is not available until Xo has started.
|
||||
xo.on('started', () => {
|
||||
this._logger = this.xo.getLogger('jobs')
|
||||
})
|
||||
connection.close()
|
||||
}
|
||||
|
||||
_computeCrossProduct (items, productCb, extractValueMap = {}) {
|
||||
const upstreamValues = []
|
||||
const itemsCopy = items.slice()
|
||||
const item = itemsCopy.pop()
|
||||
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
|
||||
forEach(values, value => {
|
||||
if (itemsCopy.length) {
|
||||
let downstreamValues = this._computeCrossProduct(itemsCopy, productCb, extractValueMap)
|
||||
forEach(downstreamValues, downstreamValue => {
|
||||
upstreamValues.push(productCb(value, downstreamValue))
|
||||
})
|
||||
async exec (job) {
|
||||
const runJobId = this._logger.notice(`Starting execution of ${job.id}.`, {
|
||||
event: 'job.start',
|
||||
userId: job.userId,
|
||||
jobId: job.id,
|
||||
key: job.key
|
||||
})
|
||||
|
||||
try {
|
||||
if (job.type === 'call') {
|
||||
const execStatus = await this._execCall(job, runJobId)
|
||||
|
||||
this.xo.emit('job:terminated', execStatus)
|
||||
} else {
|
||||
upstreamValues.push(value)
|
||||
throw new UnsupportedJobType(job)
|
||||
}
|
||||
|
||||
this._logger.notice(`Execution terminated for ${job.id}.`, {
|
||||
event: 'job.end',
|
||||
runJobId
|
||||
})
|
||||
} catch (e) {
|
||||
this._logger.error(`The execution of ${job.id} has failed.`, {
|
||||
event: 'job.end',
|
||||
runJobId,
|
||||
error: e
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async _execCall (job, runJobId) {
|
||||
let paramsFlatVector
|
||||
|
||||
if (job.paramsVector) {
|
||||
if (job.paramsVector.type === 'crossProduct') {
|
||||
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
|
||||
} else {
|
||||
throw new UnsupportedVectorType(job.paramsVector)
|
||||
}
|
||||
} else {
|
||||
paramsFlatVector = [{}] // One call with no parameters
|
||||
}
|
||||
|
||||
const connection = this.xo.createUserConnection()
|
||||
const promises = []
|
||||
|
||||
connection.set('user_id', job.userId)
|
||||
|
||||
const execStatus = {
|
||||
runJobId,
|
||||
start: Date.now(),
|
||||
calls: {}
|
||||
}
|
||||
|
||||
forEach(paramsFlatVector, params => {
|
||||
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
|
||||
event: 'jobCall.start',
|
||||
runJobId,
|
||||
method: job.method,
|
||||
params
|
||||
})
|
||||
|
||||
const call = execStatus.calls[runCallId] = {
|
||||
method: job.method,
|
||||
params,
|
||||
start: Date.now()
|
||||
}
|
||||
|
||||
promises.push(
|
||||
this.xo.api.call(connection, job.method, assign({}, params)).then(
|
||||
value => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
returnedValue: value
|
||||
})
|
||||
|
||||
call.returnedValue = value
|
||||
call.end = Date.now()
|
||||
},
|
||||
reason => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
error: reason
|
||||
})
|
||||
|
||||
call.error = reason
|
||||
call.end = Date.now()
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
return upstreamValues
|
||||
|
||||
connection.close()
|
||||
await Promise.all(promises)
|
||||
execStatus.end = Date.now()
|
||||
|
||||
return execStatus
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import {expect} from 'chai'
|
||||
import leche from 'leche'
|
||||
|
||||
import {productParams} from './job-executor'
|
||||
import JobExecutor from './job-executor'
|
||||
import {_computeCrossProduct} from './job-executor'
|
||||
|
||||
describe('productParams', function () {
|
||||
leche.withData({
|
||||
@@ -36,8 +36,7 @@ describe('productParams', function () {
|
||||
})
|
||||
})
|
||||
|
||||
describe('JobExecutor._computeCrossProduct', function () {
|
||||
const jobExecutor = new JobExecutor({})
|
||||
describe('_computeCrossProduct', function () {
|
||||
// Gives the sum of all args
|
||||
const addTest = (...args) => args.reduce((prev, curr) => prev + curr, 0)
|
||||
// Gives the product of all args
|
||||
@@ -64,7 +63,7 @@ describe('JobExecutor._computeCrossProduct', function () {
|
||||
]
|
||||
}, function (product, items, cb) {
|
||||
it('Crosses sets of values with a crossProduct callback', function () {
|
||||
expect(jobExecutor._computeCrossProduct(items, cb)).to.have.members(product)
|
||||
expect(_computeCrossProduct(items, cb)).to.have.members(product)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
72
src/loggers/leveldb.js
Normal file
72
src/loggers/leveldb.js
Normal file
@@ -0,0 +1,72 @@
|
||||
import highland from 'highland'
|
||||
import { forEach, noop } from '../utils'
|
||||
|
||||
// See: https://en.wikipedia.org/wiki/Syslog#Severity_level
|
||||
const LEVELS = [
|
||||
'emergency',
|
||||
'alert',
|
||||
'critical',
|
||||
'error',
|
||||
'warning',
|
||||
'notice',
|
||||
'informational',
|
||||
'debug'
|
||||
]
|
||||
|
||||
let lastDate = 0
|
||||
let lastId = 0
|
||||
|
||||
function generateUniqueKey (date) {
|
||||
lastId = (date === lastDate) ? (lastId + 1) : 0
|
||||
lastDate = date
|
||||
|
||||
return `${lastDate}:${lastId}`
|
||||
}
|
||||
|
||||
export default class LevelDbLogger {
|
||||
constructor (db, namespace) {
|
||||
this._db = db
|
||||
this._namespace = namespace
|
||||
}
|
||||
|
||||
_add (level, message, data) {
|
||||
const log = {
|
||||
level,
|
||||
message,
|
||||
data,
|
||||
namespace: this._namespace,
|
||||
time: Date.now()
|
||||
}
|
||||
|
||||
const key = generateUniqueKey(log.time)
|
||||
this._db.put(key, log)
|
||||
return key
|
||||
}
|
||||
|
||||
createReadStream () {
|
||||
return highland(this._db.createReadStream())
|
||||
.filter(({value}) => value.namespace === this._namespace)
|
||||
}
|
||||
|
||||
del (id) {
|
||||
if (!Array.isArray(id)) {
|
||||
id = [id]
|
||||
}
|
||||
forEach(id, id => {
|
||||
this._db.get(id, (err, value) => {
|
||||
if (!err && value.namespace === this._namespace) {
|
||||
this._db.del(id, noop)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Create high level log methods.
|
||||
for (const level of LEVELS) {
|
||||
Object.defineProperty(LevelDbLogger.prototype, level, {
|
||||
value (message, data) {
|
||||
return this._add(level, message, data)
|
||||
}
|
||||
})
|
||||
}
|
||||
202
src/logs-cli.js
Normal file
202
src/logs-cli.js
Normal file
@@ -0,0 +1,202 @@
|
||||
import appConf from 'app-conf'
|
||||
import get from 'lodash.get'
|
||||
import highland from 'highland'
|
||||
import levelup from 'level-party'
|
||||
import ndjson from 'ndjson'
|
||||
import parseArgs from 'minimist'
|
||||
import sublevel from 'level-sublevel'
|
||||
import util from 'util'
|
||||
import { repair as repairDb } from 'leveldown'
|
||||
|
||||
import {forEach} from './utils'
|
||||
import globMatcher from './glob-matcher'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function printLogs (db, args) {
|
||||
let stream = highland(db.createReadStream({reverse: true}))
|
||||
|
||||
if (args.since) {
|
||||
stream = stream.filter(({value}) => (value.time >= args.since))
|
||||
}
|
||||
|
||||
if (args.until) {
|
||||
stream = stream.filter(({value}) => (value.time <= args.until))
|
||||
}
|
||||
|
||||
const fields = Object.keys(args.matchers)
|
||||
|
||||
if (fields.length > 0) {
|
||||
stream = stream.filter(({value}) => {
|
||||
for (const field of fields) {
|
||||
const fieldValue = get(value, field)
|
||||
if (fieldValue === undefined || !args.matchers[field](fieldValue)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
stream = stream.take(args.limit)
|
||||
|
||||
if (args.json) {
|
||||
stream = highland(stream.pipe(ndjson.serialize()))
|
||||
.each(value => {
|
||||
process.stdout.write(value)
|
||||
})
|
||||
} else {
|
||||
stream = stream.each(value => {
|
||||
console.log(util.inspect(value, { depth: null }))
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
stream.done(resolve)
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function helper () {
|
||||
console.error(`
|
||||
xo-server-logs --help, -h
|
||||
|
||||
Display this help message.
|
||||
|
||||
xo-server-logs [--json] [--limit=<limit>] [--since=<date>] [--until=<date>] [<pattern>...]
|
||||
|
||||
Prints the logs.
|
||||
|
||||
--json
|
||||
Display the results as new line delimited JSON for consumption
|
||||
by another program.
|
||||
|
||||
--limit=<limit>, -n <limit>
|
||||
Limit the number of results to be displayed (default 100)
|
||||
|
||||
--since=<date>, --until=<date>
|
||||
Start showing entries on or newer than the specified date, or on
|
||||
or older than the specified date.
|
||||
|
||||
<date> should use the format \`YYYY-MM-DD\`.
|
||||
|
||||
<pattern>
|
||||
Patterns can be used to filter the entries.
|
||||
|
||||
Patterns have the following format \`<field>=<value>\`/\`<field>\`.
|
||||
|
||||
xo-server-logs --repair
|
||||
|
||||
Repair/compact the database.
|
||||
|
||||
This is an advanced operation and should be used only when necessary and offline (xo-server should be stopped).
|
||||
`)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function getArgs () {
|
||||
const stringArgs = ['since', 'until', 'limit']
|
||||
const args = parseArgs(process.argv.slice(2), {
|
||||
string: stringArgs,
|
||||
boolean: ['help', 'json', 'repair'],
|
||||
default: {
|
||||
limit: 100,
|
||||
json: false,
|
||||
help: false
|
||||
},
|
||||
alias: {
|
||||
limit: 'n',
|
||||
help: 'h'
|
||||
}
|
||||
})
|
||||
|
||||
const patterns = {}
|
||||
|
||||
for (let value of args._) {
|
||||
value = String(value)
|
||||
|
||||
const i = value.indexOf('=')
|
||||
|
||||
if (i !== -1) {
|
||||
const field = value.slice(0, i)
|
||||
const pattern = value.slice(i + 1)
|
||||
|
||||
patterns[pattern]
|
||||
? patterns[field].push(pattern)
|
||||
: patterns[field] = [ pattern ]
|
||||
} else if (!patterns[value]) {
|
||||
patterns[value] = null
|
||||
}
|
||||
}
|
||||
|
||||
const trueFunction = () => true
|
||||
args.matchers = {}
|
||||
|
||||
for (const field in patterns) {
|
||||
const values = patterns[field]
|
||||
args.matchers[field] = (values === null) ? trueFunction : globMatcher(values)
|
||||
}
|
||||
|
||||
// Warning: minimist makes one array of values if the same option is used many times.
|
||||
// (But only for strings args, not boolean)
|
||||
forEach(stringArgs, arg => {
|
||||
if (args[arg] instanceof Array) {
|
||||
throw new Error(`error: too many values for ${arg} argument`)
|
||||
}
|
||||
})
|
||||
|
||||
;['since', 'until'].forEach(arg => {
|
||||
if (args[arg] !== undefined) {
|
||||
args[arg] = Date.parse(args[arg])
|
||||
|
||||
if (isNaN(args[arg])) {
|
||||
throw new Error(`error: bad ${arg} timestamp format`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (isNaN(args.limit = +args.limit)) {
|
||||
throw new Error('error: limit is not a valid number')
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default async function main () {
|
||||
const args = getArgs()
|
||||
|
||||
if (args.help) {
|
||||
helper()
|
||||
return
|
||||
}
|
||||
|
||||
const config = await appConf.load('xo-server', {
|
||||
ignoreUnknownFormats: true
|
||||
})
|
||||
|
||||
if (args.repair) {
|
||||
await new Promise((resolve, reject) => {
|
||||
repairDb(`${config.datadir}/leveldb`, error => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const db = sublevel(levelup(
|
||||
`${config.datadir}/leveldb`,
|
||||
{ valueEncoding: 'json' }
|
||||
)).sublevel('logs')
|
||||
|
||||
return printLogs(db, args)
|
||||
}
|
||||
13
src/model.js
13
src/model.js
@@ -1,15 +1,18 @@
|
||||
import assign from 'lodash.assign'
|
||||
import forEach from 'lodash.foreach'
|
||||
import isEmpty from 'lodash.isempty'
|
||||
import isString from 'lodash.isstring'
|
||||
import {EventEmitter} from 'events'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
isEmpty
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Model extends EventEmitter {
|
||||
constructor (properties) {
|
||||
super()
|
||||
|
||||
this.properties = assign({}, this.default)
|
||||
this.properties = { ...this.default }
|
||||
|
||||
if (properties) {
|
||||
this.set(properties)
|
||||
@@ -39,7 +42,7 @@ export default class Model extends EventEmitter {
|
||||
set (properties, value) {
|
||||
// This method can also be used with two arguments to set a single
|
||||
// property.
|
||||
if (value !== undefined) {
|
||||
if (isString(properties)) {
|
||||
properties = { [properties]: value }
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import map from 'lodash.map'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {multiKeyHash} from '../utils'
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
multiKeyHash
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -58,11 +59,11 @@ export class Acls extends Collection {
|
||||
})
|
||||
if (toUpdate.length) {
|
||||
// Removes all existing entries.
|
||||
await this.remove(map(toUpdate, 'id'))
|
||||
await this.remove(mapToArray(toUpdate, 'id'))
|
||||
|
||||
// Compute the new ids (new hashes).
|
||||
const {hash} = Acl
|
||||
await Promise.all(map(
|
||||
await Promise.all(mapToArray(
|
||||
toUpdate,
|
||||
(acl) => hash(acl.subject, acl.object, acl.action).then(id => {
|
||||
acl.id = id
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Group extends Model {}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import assign from 'lodash.assign'
|
||||
import forEach from 'lodash.foreach'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -33,9 +31,10 @@ export class PluginsMetadata extends Collection {
|
||||
throw new Error('no such plugin metadata')
|
||||
}
|
||||
|
||||
const {properties} = pluginMetadata
|
||||
assign(properties, data)
|
||||
return await this.save(properties)
|
||||
return await this.save({
|
||||
...pluginMetadata.properties,
|
||||
...data
|
||||
})
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import Collection from '../collection/redis'
|
||||
import forEach from 'lodash.foreach'
|
||||
import Model from '../model'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import Collection from '../collection/redis'
|
||||
import forEach from 'lodash.foreach'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -15,12 +15,13 @@ export class Schedules extends Collection {
|
||||
return 'schedule:'
|
||||
}
|
||||
|
||||
create (userId, job, cron, enabled) {
|
||||
create (userId, job, cron, enabled, name = undefined) {
|
||||
return this.add(new Schedule({
|
||||
userId,
|
||||
job,
|
||||
cron,
|
||||
enabled
|
||||
enabled,
|
||||
name
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -12,11 +12,11 @@ export class Servers extends Collection {
|
||||
return Server
|
||||
}
|
||||
|
||||
async create ({host, username, password}) {
|
||||
async create ({host, username, password, readOnly}) {
|
||||
if (await this.exists({host})) {
|
||||
throw new Error('server already exists')
|
||||
}
|
||||
|
||||
return await this.add({host, username, password})
|
||||
return await this.add({host, username, password, readOnly})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,10 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import {generateToken} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Token extends Model {
|
||||
static generate (userId) {
|
||||
return generateToken().then(token => new Token({
|
||||
id: token,
|
||||
user_id: userId
|
||||
}))
|
||||
}
|
||||
}
|
||||
export default class Token extends Model {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Tokens extends Collection {
|
||||
get Model () {
|
||||
return Token
|
||||
}
|
||||
|
||||
generate (userId) {
|
||||
return Token.generate(userId).then(token => this.add(token))
|
||||
}
|
||||
}
|
||||
export class Tokens extends Collection {}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import {hash, needsRehash, verify} from 'hashy'
|
||||
import { hash } from 'hashy'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -16,22 +16,6 @@ const PERMISSIONS = {
|
||||
// ===================================================================
|
||||
|
||||
export default class User extends Model {
|
||||
async checkPassword (password) {
|
||||
const hash = this.get('pw_hash')
|
||||
|
||||
if (!(hash && await verify(password, hash))) {
|
||||
return false
|
||||
}
|
||||
|
||||
// There might be no hash if the user authenticate with another
|
||||
// method (e.g. LDAP).
|
||||
if (needsRehash(hash)) {
|
||||
await this.setPassword(password)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
hasPermission (permission) {
|
||||
return PERMISSIONS[this.get('permission')] >= PERMISSIONS[permission]
|
||||
}
|
||||
|
||||
@@ -27,8 +27,10 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
// TODO: check status code 200.
|
||||
debug('connected')
|
||||
})).on('data', data => {
|
||||
// Encode to base 64.
|
||||
ws.send(data.toString('base64'))
|
||||
if (!closed) {
|
||||
// Encode to base 64.
|
||||
ws.send(data.toString('base64'))
|
||||
}
|
||||
}).on('end', () => {
|
||||
if (!closed) {
|
||||
closed = true
|
||||
@@ -46,8 +48,10 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
socket.close()
|
||||
})
|
||||
.on('message', data => {
|
||||
// Decode from base 64.
|
||||
socket.write(new Buffer(data, 'base64'))
|
||||
if (!closed) {
|
||||
// Decode from base 64.
|
||||
socket.write(new Buffer(data, 'base64'))
|
||||
}
|
||||
})
|
||||
.on('close', () => {
|
||||
if (!closed) {
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
import filter from 'lodash.filter'
|
||||
import forEach from 'lodash.foreach'
|
||||
import fs from 'fs-promise'
|
||||
import {exec} from 'child_process'
|
||||
|
||||
import {promisify} from './utils'
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
class NfsMounter {
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
try {
|
||||
[stdout] = await execAsync('findmnt -P -t nfs,nfs4 --output SOURCE,TARGET --noheadings')
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
}
|
||||
const mounted = {}
|
||||
if (stdout) {
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_fullPath (path) {
|
||||
return path
|
||||
}
|
||||
|
||||
_matchesRealMount (mount) {
|
||||
return this._fullPath(mount.path) in this._realMounts
|
||||
}
|
||||
|
||||
async _mount (mount) {
|
||||
const path = this._fullPath(mount.path)
|
||||
await fs.ensureDir(path)
|
||||
return await execAsync(`mount -t nfs ${mount.host}:${mount.share} ${path}`)
|
||||
}
|
||||
|
||||
async forget (mount) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (mount) {
|
||||
const path = this._fullPath(mount.path)
|
||||
await execAsync(`umount ${path}`)
|
||||
}
|
||||
|
||||
async sync (mount) {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount(mount) && !mount.enabled) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (exc) {
|
||||
mount.enabled = true
|
||||
mount.error = exc.message
|
||||
}
|
||||
} else if (!this._matchesRealMount(mount) && mount.enabled) {
|
||||
try {
|
||||
await this._mount(mount)
|
||||
} catch (exc) {
|
||||
mount.enabled = false
|
||||
mount.error = exc.message
|
||||
}
|
||||
}
|
||||
return mount
|
||||
}
|
||||
|
||||
async disableAll (mounts) {
|
||||
await this._loadRealMounts()
|
||||
forEach(mounts, async mount => {
|
||||
if (this._matchesRealMount(mount)) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class LocalHandler {
|
||||
constructor () {
|
||||
this.forget = noop
|
||||
this.disableAll = noop
|
||||
}
|
||||
|
||||
async sync (local) {
|
||||
if (local.enabled) {
|
||||
try {
|
||||
await fs.ensureDir(local.path)
|
||||
await fs.access(local.path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
local.enabled = false
|
||||
local.error = exc.message
|
||||
}
|
||||
}
|
||||
return local
|
||||
}
|
||||
}
|
||||
|
||||
export default class RemoteHandler {
|
||||
constructor () {
|
||||
this.handlers = {
|
||||
nfs: new NfsMounter(),
|
||||
local: new LocalHandler()
|
||||
}
|
||||
}
|
||||
|
||||
async sync (remote) {
|
||||
return await this.handlers[remote.type].sync(remote)
|
||||
}
|
||||
|
||||
async forget (remote) {
|
||||
return await this.handlers[remote.type].forget(remote)
|
||||
}
|
||||
|
||||
async disableAll (remotes) {
|
||||
const promises = []
|
||||
forEach(['local', 'nfs'], type => promises.push(this.handlers[type].disableAll(filter(remotes, remote => remote.type === type))))
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
177
src/remote-handlers/abstract.js
Normal file
177
src/remote-handlers/abstract.js
Normal file
@@ -0,0 +1,177 @@
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import getStream from 'get-stream'
|
||||
import through2 from 'through2'
|
||||
|
||||
import {
|
||||
parse
|
||||
} from 'xo-remote-parser'
|
||||
|
||||
import {
|
||||
addChecksumToReadStream,
|
||||
noop,
|
||||
validChecksumOfReadStream
|
||||
} from '../utils'
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
this._remote = parse({...remote})
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
}
|
||||
|
||||
get type () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks the handler to sync the state of the effective remote with its' metadata
|
||||
*/
|
||||
async sync () {
|
||||
return this._sync()
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
|
||||
*/
|
||||
async forget () {
|
||||
return this._forget()
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async outputFile (file, data, options) {
|
||||
return this._outputFile(file, data, options)
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const stream = await this.createOutputStream(file)
|
||||
const promise = eventToPromise(stream, 'finish')
|
||||
stream.end(data)
|
||||
return promise
|
||||
}
|
||||
|
||||
async readFile (file, options) {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return getStream(await this.createReadStream(file, options))
|
||||
}
|
||||
|
||||
async rename (oldPath, newPath) {
|
||||
return this._rename(oldPath, newPath)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async list (dir = '.') {
|
||||
return this._list(dir)
|
||||
}
|
||||
|
||||
async _list (dir) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async createReadStream (file, {
|
||||
checksum = false,
|
||||
ignoreMissingChecksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createReadStream(file, options).then(async stream => {
|
||||
await eventToPromise(stream, 'readable')
|
||||
|
||||
if (stream.length === undefined) {
|
||||
stream.length = await this.getSize(file).catch(noop)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
try {
|
||||
checksum = await this.readFile(`${file}.checksum`)
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
let stream = await streamP
|
||||
|
||||
const { length } = stream
|
||||
stream = validChecksumOfReadStream(stream, checksum.toString())
|
||||
stream.length = length
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async createOutputStream (file, {
|
||||
checksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createOutputStream(file, options)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const connectorStream = through2()
|
||||
const forwardError = error => {
|
||||
connectorStream.emit('error', error)
|
||||
}
|
||||
|
||||
const streamWithChecksum = addChecksumToReadStream(connectorStream)
|
||||
streamWithChecksum.pipe(await streamP)
|
||||
streamWithChecksum.on('error', forwardError)
|
||||
|
||||
streamWithChecksum.checksum
|
||||
.then(value => this.outputFile(`${file}.checksum`, value))
|
||||
.catch(forwardError)
|
||||
|
||||
return connectorStream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async unlink (file, {
|
||||
checksum = false
|
||||
} = {}) {
|
||||
if (checksum) {
|
||||
this._unlink(`${file}.checksum`).catch(noop)
|
||||
}
|
||||
|
||||
return this._unlink(file)
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getSize (file) {
|
||||
return this._getSize(file)
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
84
src/remote-handlers/local.js
Normal file
84
src/remote-handlers/local.js
Normal file
@@ -0,0 +1,84 @@
|
||||
import fs from 'fs-promise'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import {
|
||||
dirname,
|
||||
resolve
|
||||
} from 'path'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import {
|
||||
noop
|
||||
} from '../utils'
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
return 'local'
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const parts = [this._remote.path]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, this._remote.path)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
await fs.ensureDir(this._remote.path)
|
||||
await fs.access(this._remote.path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
return noop()
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(this._getFilePath(file), data, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
return fs.createReadStream(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
return fs.createWriteStream(path, options)
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
return fs.unlink(this._getFilePath(file))
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const stats = await fs.stat(this._getFilePath(file))
|
||||
return stats.size
|
||||
}
|
||||
|
||||
}
|
||||
77
src/remote-handlers/nfs.js
Normal file
77
src/remote-handlers/nfs.js
Normal file
@@ -0,0 +1,77 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-promise'
|
||||
|
||||
import LocalHandler from './local'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
|
||||
export default class NfsHandler extends LocalHandler {
|
||||
get type () {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
[stdout] = await execa('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2]
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
}
|
||||
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_matchesRealMount (remote) {
|
||||
return remote.path in this._realMounts
|
||||
}
|
||||
|
||||
async _mount (remote) {
|
||||
await fs.ensureDir(remote.path)
|
||||
return execa('mount', ['-t', 'nfs', `${remote.host}:${remote.share}`, remote.path])
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount(this._remote) && !this._remote.enabled) {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = true
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
} else if (!this._matchesRealMount(this._remote) && this._remote.enabled) {
|
||||
try {
|
||||
await this._mount(this._remote)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (remote) {
|
||||
await execa('umount', [remote.path])
|
||||
}
|
||||
}
|
||||
144
src/remote-handlers/smb.js
Normal file
144
src/remote-handlers/smb.js
Normal file
@@ -0,0 +1,144 @@
|
||||
import Smb2 from '@marsaud/smb2-promise'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import {
|
||||
noop
|
||||
} from '../utils'
|
||||
|
||||
export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
super(remote)
|
||||
this._forget = noop
|
||||
}
|
||||
|
||||
get type () {
|
||||
return 'smb'
|
||||
}
|
||||
|
||||
_getClient (remote) {
|
||||
return new Smb2({
|
||||
share: `\\\\${remote.host}`,
|
||||
domain: remote.domain,
|
||||
username: remote.username,
|
||||
password: remote.password,
|
||||
autoCloseTimeout: 0
|
||||
})
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
if (file === '.') {
|
||||
file = undefined
|
||||
}
|
||||
const parts = []
|
||||
if (this._remote.path !== '') {
|
||||
parts.push(this._remote.path)
|
||||
}
|
||||
if (file) {
|
||||
parts.push(file.split('/'))
|
||||
}
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
_dirname (file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
} catch (error) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = error.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const client = this._getClient(this._remote)
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
try {
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
return client.writeFile(path, data, options)
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
const client = this._getClient(this._remote)
|
||||
try {
|
||||
return client.readFile(this._getFilePath(file), options)
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
const client = this._getClient(this._remote)
|
||||
try {
|
||||
return client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
const client = this._getClient(this._remote)
|
||||
try {
|
||||
return client.readdir(this._getFilePath(dir))
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
const client = this._getClient(this._remote)
|
||||
const stream = await client.createReadStream(this._getFilePath(file), options) // FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
stream.on('end', () => client.close())
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
const client = this._getClient(this._remote)
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
let stream
|
||||
try {
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
} catch (err) {
|
||||
client.close()
|
||||
throw err
|
||||
}
|
||||
stream.on('finish', () => client.close())
|
||||
return stream
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
const client = this._getClient(this._remote)
|
||||
try {
|
||||
return client.unlink(this._getFilePath(file))
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const client = await this._getClient(this._remote)
|
||||
try {
|
||||
return client.getSize(this._getFilePath(file))
|
||||
} finally {
|
||||
client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import {BaseError} from 'make-error'
|
||||
import {CronJob} from 'cron'
|
||||
|
||||
import { forEach } from './utils'
|
||||
|
||||
const _resolveId = scheduleOrId => scheduleOrId.id || scheduleOrId
|
||||
|
||||
export class SchedulerError extends BaseError {}
|
||||
@@ -32,11 +33,11 @@ export class ScheduleJobNotFound extends SchedulerError {
|
||||
}
|
||||
|
||||
export default class Scheduler {
|
||||
constructor (xo, {executor}) {
|
||||
this.executor = executor
|
||||
constructor (xo) {
|
||||
this.xo = xo
|
||||
this._scheduleTable = undefined
|
||||
this._loadSchedules()
|
||||
|
||||
this._runningSchedules = {}
|
||||
}
|
||||
|
||||
async _loadSchedules () {
|
||||
@@ -114,26 +115,29 @@ export default class Scheduler {
|
||||
}
|
||||
|
||||
_enable (schedule) {
|
||||
const running = this._runningSchedules
|
||||
|
||||
const { id } = schedule
|
||||
const jobId = schedule.job
|
||||
|
||||
const cronJob = new CronJob(schedule.cron, async () => {
|
||||
if (running[id]) {
|
||||
return // Simply ignore.
|
||||
}
|
||||
|
||||
try {
|
||||
const job = await this._getJob(jobId, schedule.id)
|
||||
this.executor.exec(job)
|
||||
} catch (_) {
|
||||
// FIXME What do we do ?
|
||||
running[id] = true
|
||||
await this.xo.runJobSequence([ jobId ])
|
||||
} catch (error) {
|
||||
// FIXME: better error handling
|
||||
console.error(error && error.stack || error)
|
||||
} finally {
|
||||
delete running[id]
|
||||
}
|
||||
})
|
||||
this._cronJobs[schedule.id] = cronJob
|
||||
this._cronJobs[id] = cronJob
|
||||
cronJob.start()
|
||||
this._scheduleTable[schedule.id] = true
|
||||
}
|
||||
|
||||
async _getJob (id, scheduleId) {
|
||||
const job = await this.xo.getJob(id)
|
||||
if (!job) {
|
||||
throw new ScheduleJobNotFound(id, scheduleId)
|
||||
}
|
||||
return job
|
||||
this._scheduleTable[id] = true
|
||||
}
|
||||
|
||||
_disable (scheduleOrId) {
|
||||
|
||||
28
src/schemas/acl.js
Normal file
28
src/schemas/acl.js
Normal file
@@ -0,0 +1,28 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this ACL'
|
||||
},
|
||||
action: {
|
||||
type: 'string',
|
||||
description: 'permission (or role)'
|
||||
},
|
||||
object: {
|
||||
type: 'string',
|
||||
description: 'item (or set)'
|
||||
},
|
||||
subject: {
|
||||
type: 'string',
|
||||
description: 'user (or group)'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'action',
|
||||
'object',
|
||||
'subject'
|
||||
]
|
||||
}
|
||||
29
src/schemas/log.js
Normal file
29
src/schemas/log.js
Normal file
@@ -0,0 +1,29 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this log'
|
||||
},
|
||||
time: {
|
||||
type: 'string',
|
||||
description: 'timestamp (in milliseconds) of this log'
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'human readable (short) description of this log'
|
||||
},
|
||||
namespace: {
|
||||
type: 'string',
|
||||
description: 'space to store logs'
|
||||
},
|
||||
data: {}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'time',
|
||||
'message',
|
||||
'namespace'
|
||||
]
|
||||
}
|
||||
33
src/schemas/log/jobCallEnd.js
Normal file
33
src/schemas/log/jobCallEnd.js
Normal file
@@ -0,0 +1,33 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['jobCall.end']
|
||||
},
|
||||
runJobId: {
|
||||
type: 'string',
|
||||
description: 'instance id of this job'
|
||||
},
|
||||
runCallId: {
|
||||
type: 'string',
|
||||
description: 'instance id of this call'
|
||||
},
|
||||
error: {
|
||||
type: 'object',
|
||||
description: 'describe one failure, exists if the call has failed'
|
||||
},
|
||||
returnedValue: {
|
||||
description: 'call\'s result, exists if the call is a success'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'event',
|
||||
'runJobId',
|
||||
'runCallId'
|
||||
],
|
||||
oneOf: [
|
||||
{ required: ['error'] },
|
||||
{ required: ['returnedValue'] }
|
||||
]
|
||||
}
|
||||
27
src/schemas/log/jobCallStart.js
Normal file
27
src/schemas/log/jobCallStart.js
Normal file
@@ -0,0 +1,27 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['jobCall.start']
|
||||
},
|
||||
runJobId: {
|
||||
type: 'string',
|
||||
description: 'instance id of this job'
|
||||
},
|
||||
method: {
|
||||
type: 'string',
|
||||
description: 'method linked to this call'
|
||||
},
|
||||
params: {
|
||||
type: 'object',
|
||||
description: 'params of the called method'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'event',
|
||||
'runJobId',
|
||||
'method',
|
||||
'params'
|
||||
]
|
||||
}
|
||||
21
src/schemas/log/jobEnd.js
Normal file
21
src/schemas/log/jobEnd.js
Normal file
@@ -0,0 +1,21 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['job.end']
|
||||
},
|
||||
runJobId: {
|
||||
type: 'string',
|
||||
description: 'instance id of this job'
|
||||
},
|
||||
error: {
|
||||
type: 'object',
|
||||
description: 'describe one failure, exists if no call has been made'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'event',
|
||||
'runJobId'
|
||||
]
|
||||
}
|
||||
26
src/schemas/log/jobStart.js
Normal file
26
src/schemas/log/jobStart.js
Normal file
@@ -0,0 +1,26 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
event: {
|
||||
enum: ['job.start']
|
||||
},
|
||||
userId: {
|
||||
type: 'string',
|
||||
description: 'user who executes this job'
|
||||
},
|
||||
jobId: {
|
||||
type: 'string',
|
||||
description: 'identifier of this job'
|
||||
},
|
||||
key: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'event',
|
||||
'userId',
|
||||
'jobId',
|
||||
'key'
|
||||
]
|
||||
}
|
||||
41
src/schemas/plugin.js
Normal file
41
src/schemas/plugin.js
Normal file
@@ -0,0 +1,41 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this plugin'
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'unique human readable name for this plugin'
|
||||
},
|
||||
autoload: {
|
||||
type: 'boolean',
|
||||
description: 'whether this plugin is loaded on startup'
|
||||
},
|
||||
loaded: {
|
||||
type: 'boolean',
|
||||
description: 'whether or not this plugin is currently loaded'
|
||||
},
|
||||
unloadable: {
|
||||
type: 'boolean',
|
||||
default: 'true',
|
||||
description: 'whether or not this plugin can be unloaded'
|
||||
},
|
||||
configuration: {
|
||||
type: 'object',
|
||||
description: 'current configuration of this plugin (not present if none)'
|
||||
},
|
||||
configurationSchema: {
|
||||
$ref: 'http://json-schema.org/draft-04/schema#',
|
||||
description: 'configuration schema for this plugin (not present if not configurable)'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'name',
|
||||
'autoload',
|
||||
'loaded'
|
||||
]
|
||||
}
|
||||
368
src/utils.js
368
src/utils.js
@@ -1,16 +1,46 @@
|
||||
import base64url from 'base64url'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash.foreach'
|
||||
import has from 'lodash.has'
|
||||
import humanFormat from 'human-format'
|
||||
import invert from 'lodash.invert'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isString from 'lodash.isstring'
|
||||
import kindOf from 'kindof'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
import xml2js from 'xml2js'
|
||||
import { defer } from 'promise-toolbox'
|
||||
import {promisify} from 'bluebird'
|
||||
import {randomBytes} from 'crypto'
|
||||
import {
|
||||
createHash,
|
||||
randomBytes
|
||||
} from 'crypto'
|
||||
import { Readable } from 'stream'
|
||||
import through2 from 'through2'
|
||||
import {utcFormat as d3TimeFormat} from 'd3-time-format'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function bufferToStream (buf) {
|
||||
const stream = new Readable()
|
||||
|
||||
let i = 0
|
||||
const { length } = buf
|
||||
stream._read = function (size) {
|
||||
if (i === length) {
|
||||
return this.push(null)
|
||||
}
|
||||
|
||||
const newI = Math.min(i + size, length)
|
||||
this.push(buf.slice(i, newI))
|
||||
i = newI
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function camelToSnakeCase (string) {
|
||||
return string.replace(
|
||||
/([a-z])([A-Z])/g,
|
||||
@@ -20,6 +50,97 @@ export function camelToSnakeCase (string) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Returns an empty object without prototype (if possible).
|
||||
export const createRawObject = Object.create
|
||||
? (createObject => () => createObject(null))(Object.create)
|
||||
: () => ({})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const ALGORITHM_TO_ID = {
|
||||
md5: '1',
|
||||
sha256: '5',
|
||||
sha512: '6'
|
||||
}
|
||||
|
||||
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
|
||||
|
||||
// Wrap a readable stream in a stream with a checksum promise
|
||||
// attribute which is resolved at the end of an input stream.
|
||||
// (Finally .checksum contains the checksum of the input stream)
|
||||
//
|
||||
// Example:
|
||||
// const sourceStream = ...
|
||||
// const targetStream = ...
|
||||
// const checksumStream = addChecksumToReadStream(sourceStream)
|
||||
// await Promise.all([
|
||||
// eventToPromise(checksumStream.pipe(targetStream), 'finish'),
|
||||
// checksumStream.checksum.then(console.log)
|
||||
// ])
|
||||
export const addChecksumToReadStream = (stream, algorithm = 'md5') => {
|
||||
const algorithmId = ALGORITHM_TO_ID[algorithm]
|
||||
|
||||
if (!algorithmId) {
|
||||
throw new Error(`unknown algorithm: ${algorithm}`)
|
||||
}
|
||||
|
||||
const hash = createHash(algorithm)
|
||||
const { promise, resolve } = defer()
|
||||
|
||||
const wrapper = stream.pipe(through2(
|
||||
(chunk, enc, callback) => {
|
||||
hash.update(chunk)
|
||||
callback(null, chunk)
|
||||
},
|
||||
callback => {
|
||||
resolve(hash.digest('hex'))
|
||||
callback()
|
||||
}
|
||||
))
|
||||
|
||||
stream.on('error', error => wrapper.emit('error', error))
|
||||
wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`)
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
// Check if the checksum of a readable stream is equals to an expected checksum.
|
||||
// The given stream is wrapped in a stream which emits an error event
|
||||
// if the computed checksum is not equals to the expected checksum.
|
||||
export const validChecksumOfReadStream = (stream, expectedChecksum) => {
|
||||
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
|
||||
|
||||
if (!algorithmId) {
|
||||
throw new Error(`unknown algorithm: ${algorithmId}`)
|
||||
}
|
||||
|
||||
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
|
||||
|
||||
const wrapper = stream.pipe(through2(
|
||||
{ highWaterMark: 0 },
|
||||
(chunk, enc, callback) => {
|
||||
hash.update(chunk)
|
||||
callback(null, chunk)
|
||||
},
|
||||
callback => {
|
||||
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
|
||||
|
||||
callback(
|
||||
checksum !== expectedChecksum
|
||||
? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`)
|
||||
: null
|
||||
)
|
||||
}
|
||||
))
|
||||
|
||||
stream.on('error', error => wrapper.emit('error', error))
|
||||
wrapper.checksumVerified = eventToPromise(wrapper, 'end')
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Ensure the value is an array, wrap it if necessary.
|
||||
export function ensureArray (value) {
|
||||
if (value === undefined) {
|
||||
@@ -82,24 +203,162 @@ export const parseXml = (function () {
|
||||
// This function does nothing and returns undefined.
|
||||
//
|
||||
// It is often used to swallow promise's errors.
|
||||
export function noop () {}
|
||||
export const noop = () => {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const isPromise = value => (
|
||||
value != null &&
|
||||
typeof value.then === 'function'
|
||||
)
|
||||
|
||||
const _pAll = (promises, mapFn) => {
|
||||
let mainPromise = Promise.resolve()
|
||||
|
||||
const results = mapFn
|
||||
? (promises = map(promises, mapFn))
|
||||
: 'length' in promises
|
||||
? new Array(promises.length)
|
||||
: {}
|
||||
|
||||
forEach(promises, (promise, key) => {
|
||||
mainPromise = mainPromise
|
||||
.then(() => promise)
|
||||
.then(value => {
|
||||
results[key] = value
|
||||
})
|
||||
})
|
||||
|
||||
return mainPromise.then(() => results)
|
||||
}
|
||||
|
||||
// Returns a promise which resolves when all the promises in a
|
||||
// collection have resolved or rejects with the reason of the first
|
||||
// promise that rejects.
|
||||
//
|
||||
// Optionally a function can be provided to map all items in the
|
||||
// collection before waiting for completion.
|
||||
//
|
||||
// Usage: pAll(promises, [ mapFn ]) or promises::pAll([ mapFn ])
|
||||
export function pAll (promises, mapFn) {
|
||||
if (this) {
|
||||
mapFn = promises
|
||||
promises = this
|
||||
}
|
||||
|
||||
return Promise.resolve(promises)
|
||||
.then(promises => _pAll(promises, mapFn))
|
||||
}
|
||||
|
||||
// Usage: pDebug(promise, name) or promise::pDebug(name)
|
||||
export function pDebug (promise, name) {
|
||||
if (arguments.length === 1) {
|
||||
name = promise
|
||||
promise = this
|
||||
}
|
||||
|
||||
Promise.resolve(promise).then(
|
||||
value => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}`
|
||||
)
|
||||
},
|
||||
reason => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}`
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return promise
|
||||
}
|
||||
|
||||
// Ponyfill for Promise.finally(cb)
|
||||
export const pFinally = (promise, cb) => {
|
||||
return promise.then(
|
||||
(value) => constructor.resolve(cb()).then(() => value),
|
||||
(reason) => constructor.resolve(cb()).then(() => {
|
||||
//
|
||||
// Usage: promise::pFinally(cb)
|
||||
export function pFinally (cb) {
|
||||
return this.then(
|
||||
value => this.constructor.resolve(cb()).then(() => value),
|
||||
reason => this.constructor.resolve(cb()).then(() => {
|
||||
throw reason
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Usage:
|
||||
//
|
||||
// pFromCallback(cb => fs.readFile('foo.txt', cb))
|
||||
// .then(content => {
|
||||
// console.log(content)
|
||||
// })
|
||||
export const pFromCallback = fn => new Promise((resolve, reject) => {
|
||||
fn((error, result) => error
|
||||
? reject(error)
|
||||
: resolve(result)
|
||||
)
|
||||
})
|
||||
|
||||
const _pReflectResolution = (__proto__ => value => ({
|
||||
__proto__,
|
||||
value: () => value
|
||||
}))({
|
||||
isFulfilled: () => true,
|
||||
isRejected: () => false,
|
||||
reason: () => {
|
||||
throw new Error('no reason, the promise has resolved')
|
||||
}
|
||||
})
|
||||
|
||||
const _pReflectRejection = (__proto__ => reason => ({
|
||||
__proto__,
|
||||
reason: () => reason
|
||||
}))({
|
||||
isFulfilled: () => false,
|
||||
isRejected: () => true,
|
||||
value: () => {
|
||||
throw new Error('no value, the promise has rejected')
|
||||
}
|
||||
})
|
||||
|
||||
// Returns a promise that is always successful when this promise is
|
||||
// settled. Its fulfillment value is an object that implements the
|
||||
// PromiseInspection interface and reflects the resolution this
|
||||
// promise.
|
||||
//
|
||||
// Usage: pReflect(promise) or promise::pReflect()
|
||||
export function pReflect (promise) {
|
||||
return Promise.resolve(this || promise).then(
|
||||
_pReflectResolution,
|
||||
_pReflectRejection
|
||||
)
|
||||
}
|
||||
|
||||
// Given a collection (array or object) which contains promises,
|
||||
// return a promise that is fulfilled when all the items in the
|
||||
// collection are either fulfilled or rejected.
|
||||
//
|
||||
// This promise will be fulfilled with a collection (of the same type,
|
||||
// array or object) containing promise inspections.
|
||||
//
|
||||
// Usage: pSettle(promises) or promises::pSettle()
|
||||
export function pSettle (promises) {
|
||||
return pAll(this || promises, pReflect)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export {promisify}
|
||||
export {promisifyAll} from 'bluebird'
|
||||
export {
|
||||
// Create a function which returns promises instead of taking a
|
||||
// callback.
|
||||
promisify,
|
||||
|
||||
// For all enumerable methods of an object, create a new method
|
||||
// which name is suffixed with `Async` which return promises instead
|
||||
// of taking a callback.
|
||||
promisifyAll
|
||||
} from 'bluebird'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -121,30 +380,78 @@ export function parseSize (size) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _has = Object.prototype.hasOwnProperty
|
||||
|
||||
// Removes an own property from an object and returns its value.
|
||||
export const popProperty = obj => {
|
||||
for (const prop in obj) {
|
||||
if (_has.call(obj, prop)) {
|
||||
return extractProperty(obj, prop)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Format a date in ISO 8601 in a safe way to be used in filenames
|
||||
// (even on Windows).
|
||||
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// This functions are often used throughout xo-server.
|
||||
//
|
||||
// Exports them from here to avoid direct dependencies on lodash.
|
||||
export { default as forEach } from 'lodash.foreach'
|
||||
export { default as isEmpty } from 'lodash.isempty'
|
||||
export { default as mapToArray } from 'lodash.map'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Special value which can be returned to stop an iteration in map()
|
||||
// and mapInPlace().
|
||||
export const done = {}
|
||||
export const DONE = {}
|
||||
|
||||
// Similar to `lodash.map()` for array and `lodash.mapValues()` for
|
||||
// objects.
|
||||
// Fill `target` by running each element in `collection` through
|
||||
// `iteratee`.
|
||||
//
|
||||
// Note: can be interrupted by returning the special value `done`
|
||||
// provided as the forth argument.
|
||||
export function map (col, iterator, thisArg = this) {
|
||||
const result = has(col, 'length') ? [] : {}
|
||||
forEach(col, (item, i) => {
|
||||
const value = iterator.call(thisArg, item, i, done)
|
||||
if (value === done) {
|
||||
// If `target` is undefined, it defaults to a new array if
|
||||
// `collection` is array-like (has a `length` property), otherwise an
|
||||
// object.
|
||||
//
|
||||
// The context of `iteratee` can be specified via `thisArg`.
|
||||
//
|
||||
// Note: the Mapping can be interrupted by returning the special value
|
||||
// `DONE` provided as the fourth argument.
|
||||
//
|
||||
// Usage: map(collection, item => item + 1)
|
||||
export function map (
|
||||
collection,
|
||||
iteratee,
|
||||
thisArg,
|
||||
target = has(collection, 'length') ? [] : {}
|
||||
) {
|
||||
forEach(collection, (item, i) => {
|
||||
const value = iteratee.call(thisArg, item, i, collection, DONE)
|
||||
if (value === DONE) {
|
||||
return false
|
||||
}
|
||||
|
||||
result[i] = value
|
||||
target[i] = value
|
||||
})
|
||||
return result
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
// Helper to `map()` to update the current collection.
|
||||
export function mapInPlace (collection, iteratee, thisArg) {
|
||||
return map(collection, iteratee, thisArg, collection)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Create a hash from multiple values.
|
||||
export const multiKeyHash = (...args) => new Promise((resolve, reject) => {
|
||||
export const multiKeyHash = (...args) => new Promise(resolve => {
|
||||
const hash = multiKeyHashInt(...args)
|
||||
|
||||
const buf = new Buffer(4)
|
||||
@@ -153,22 +460,7 @@ export const multiKeyHash = (...args) => new Promise((resolve, reject) => {
|
||||
resolve(base64url(buf))
|
||||
})
|
||||
|
||||
// Similar to `map()` but change the current collection.
|
||||
//
|
||||
// Note: can be interrupted by returning the special value `done`
|
||||
// provided as the forth argument.
|
||||
export function mapInPlace (col, iterator, thisArg = this) {
|
||||
forEach(col, (item, i) => {
|
||||
const value = iterator.call(thisArg, item, i, done)
|
||||
if (value === done) {
|
||||
return false
|
||||
}
|
||||
|
||||
col[i] = value
|
||||
})
|
||||
|
||||
return col
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Wrap a value in a function.
|
||||
export const wrap = (value) => () => value
|
||||
export const wrap = value => () => value
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
import expect from 'must'
|
||||
import sinon from 'sinon'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
createRawObject,
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
formatXml,
|
||||
generateToken,
|
||||
parseSize
|
||||
parseSize,
|
||||
pFinally,
|
||||
pSettle
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
@@ -31,6 +35,28 @@ describe('camelToSnakeCase()', function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('createRawObject()', () => {
|
||||
it('returns an object', () => {
|
||||
expect(createRawObject()).to.be.an.object()
|
||||
})
|
||||
|
||||
it('returns an empty object', () => {
|
||||
expect(createRawObject()).to.be.empty()
|
||||
})
|
||||
|
||||
it('creates a new object each time', () => {
|
||||
expect(createRawObject()).to.not.equal(createRawObject())
|
||||
})
|
||||
|
||||
if (Object.getPrototypeOf) {
|
||||
it('creates an object without a prototype', () => {
|
||||
expect(Object.getPrototypeOf(createRawObject())).to.be.null()
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('ensureArray()', function () {
|
||||
it('wrap the value in an array', function () {
|
||||
const value = 'foo'
|
||||
@@ -115,3 +141,93 @@ describe('parseSize()', function () {
|
||||
expect(parseSize('3MB')).to.equal(3e6)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pFinally()', () => {
|
||||
it('calls a callback on resolution', async () => {
|
||||
const value = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.resolve(value)::pFinally(spy)
|
||||
).to.resolve.to.equal(
|
||||
value
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
})
|
||||
|
||||
it('calls a callback on rejection', async () => {
|
||||
const reason = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.reject(reason)::pFinally(spy)
|
||||
).to.reject.to.equal(
|
||||
reason
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pSettle()', () => {
|
||||
it('works with arrays', async () => {
|
||||
const [
|
||||
status1,
|
||||
status2,
|
||||
status3
|
||||
] = await pSettle([
|
||||
Promise.resolve(42),
|
||||
Math.PI,
|
||||
Promise.reject('fatality')
|
||||
])
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
})
|
||||
|
||||
it('works with objects', async () => {
|
||||
const {
|
||||
a: status1,
|
||||
b: status2,
|
||||
c: status3
|
||||
} = await pSettle({
|
||||
a: Promise.resolve(42),
|
||||
b: Math.PI,
|
||||
c: Promise.reject('fatality')
|
||||
})
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import assign from 'lodash.assign'
|
||||
import createDebug from 'debug'
|
||||
import WebSocket from 'ws'
|
||||
|
||||
@@ -12,9 +11,11 @@ const defaults = {
|
||||
// Proxy a WebSocket `client` to a remote server which has `url` as
|
||||
// address.
|
||||
export default function wsProxy (client, url, opts) {
|
||||
opts = assign({}, defaults, {
|
||||
protocol: client.protocol
|
||||
}, opts)
|
||||
opts = {
|
||||
...defaults,
|
||||
protocol: client.protocol,
|
||||
...opts
|
||||
}
|
||||
const autoClose = !!opts.autoClose
|
||||
delete opts.autoClose
|
||||
|
||||
|
||||
611
src/xapi-object-to-xo.js
Normal file
611
src/xapi-object-to-xo.js
Normal file
@@ -0,0 +1,611 @@
|
||||
import isArray from 'lodash.isarray'
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
forEach,
|
||||
mapToArray,
|
||||
parseXml
|
||||
} from './utils'
|
||||
import {
|
||||
isHostRunning,
|
||||
isVmHvm,
|
||||
isVmRunning,
|
||||
parseDateTime
|
||||
} from './xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {
|
||||
defineProperties,
|
||||
freeze
|
||||
} = Object
|
||||
|
||||
function link (obj, prop, idField = '$id') {
|
||||
const dynamicValue = obj[`$${prop}`]
|
||||
if (dynamicValue == null) {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
return mapToArray(dynamicValue, idField)
|
||||
}
|
||||
|
||||
return dynamicValue[idField]
|
||||
}
|
||||
|
||||
// Parse a string date time to a Unix timestamp (in seconds).
|
||||
//
|
||||
// If there are no data or if the timestamp is 0, returns null.
|
||||
function toTimestamp (date) {
|
||||
if (!date) {
|
||||
return null
|
||||
}
|
||||
|
||||
const ms = parseDateTime(date).getTime()
|
||||
if (!ms) {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(ms / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const TRANSFORMS = {
|
||||
pool (obj) {
|
||||
return {
|
||||
default_SR: link(obj, 'default_SR'),
|
||||
HA_enabled: Boolean(obj.ha_enabled),
|
||||
master: link(obj, 'master'),
|
||||
tags: obj.tags,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label || obj.$master.name_label
|
||||
|
||||
// TODO
|
||||
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
|
||||
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
|
||||
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
|
||||
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
|
||||
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
|
||||
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
|
||||
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
host (obj) {
|
||||
const {
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
|
||||
return {
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
CPUs: obj.cpu_info,
|
||||
enabled: Boolean(obj.enabled),
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
if (metrics) {
|
||||
const free = +metrics.memory_free
|
||||
const total = +metrics.memory_total
|
||||
|
||||
return {
|
||||
usage: total - free,
|
||||
size: total
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
powerOnMode: obj.power_on_mode,
|
||||
power_state: isRunning ? 'Running' : 'Halted',
|
||||
tags: obj.tags,
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
// TODO: dedupe.
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
$PIFs: link(obj, 'PIFs'),
|
||||
PCIs: link(obj, 'PCIs'),
|
||||
$PCIs: link(obj, 'PCIs'),
|
||||
PGPUs: link(obj, 'PGPUs'),
|
||||
$PGPUs: link(obj, 'PGPUs'),
|
||||
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
|
||||
// TODO:
|
||||
// - controller = vmControllersByContainer.items[host.id]
|
||||
// - SRs = srsByContainer.items[host.id]
|
||||
// - tasks = tasksByHost.items[host.id]
|
||||
// - templates = vmTemplatesByContainer.items[host.id]
|
||||
// - VMs = vmsByContainer.items[host.id]
|
||||
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vm (obj) {
|
||||
const {
|
||||
$guest_metrics: guestMetrics,
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isHvm = isVmHvm(obj)
|
||||
const isRunning = isVmRunning(obj)
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics
|
||||
? +metrics.VCPUs_number
|
||||
: +obj.VCPUs_at_startup
|
||||
)
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
docker: (function () {
|
||||
const monitor = otherConfig['xscontainer-monitor']
|
||||
if (!monitor) {
|
||||
return
|
||||
}
|
||||
|
||||
if (monitor === 'False') {
|
||||
return {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
docker_ps: process,
|
||||
docker_info: info,
|
||||
docker_version: version
|
||||
} = otherConfig
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
|
||||
// TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: Boolean(obj.ha_restart_priority),
|
||||
|
||||
memory: (function () {
|
||||
const dynamicMin = +obj.memory_dynamic_min
|
||||
const dynamicMax = +obj.memory_dynamic_max
|
||||
const staticMin = +obj.memory_static_min
|
||||
const staticMax = +obj.memory_static_max
|
||||
|
||||
const memory = {
|
||||
dynamic: [ dynamicMin, dynamicMax ],
|
||||
static: [ staticMin, staticMax ]
|
||||
}
|
||||
|
||||
const gmMemory = guestMetrics && guestMetrics.memory
|
||||
|
||||
if (!isRunning) {
|
||||
memory.size = dynamicMax
|
||||
} else if (gmMemory && gmMemory.used) {
|
||||
memory.usage = +gmMemory.used
|
||||
memory.size = +gmMemory.total
|
||||
} else if (metrics) {
|
||||
memory.size = +metrics.memory_actual
|
||||
} else {
|
||||
memory.size = dynamicMax
|
||||
}
|
||||
|
||||
return memory
|
||||
})(),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
power_state: obj.power_state,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
virtualizationMode: isHvm ? 'hvm' : 'pv',
|
||||
|
||||
// <=> Are the Xen Server tools installed?
|
||||
//
|
||||
// - undefined: unknown status
|
||||
// - false: not optimized
|
||||
// - 'out of date': optimized but drivers should be updated
|
||||
// - 'up to date': optimized
|
||||
xenTools: (() => {
|
||||
if (!isRunning || !metrics) {
|
||||
// Unknown status, returns nothing.
|
||||
return
|
||||
}
|
||||
|
||||
if (!guestMetrics) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { PV_drivers_version: { major, minor } } = guestMetrics
|
||||
if (major === undefined || minor === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return guestMetrics.PV_drivers_up_to_date
|
||||
? 'up to date'
|
||||
: 'out of date'
|
||||
})(),
|
||||
|
||||
$container: (
|
||||
isRunning
|
||||
? link(obj, 'resident_on')
|
||||
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
|
||||
),
|
||||
$VBDs: link(obj, 'VBDs'),
|
||||
|
||||
// TODO: dedupe
|
||||
VGPUs: link(obj, 'VGPUs'),
|
||||
$VGPUs: link(obj, 'VGPUs')
|
||||
}
|
||||
|
||||
if (obj.is_control_domain) {
|
||||
vm.type += '-controller'
|
||||
} else if (obj.is_a_snapshot) {
|
||||
vm.type += '-snapshot'
|
||||
|
||||
vm.snapshot_time = toTimestamp(obj.snapshot_time)
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
vm.template_info = {
|
||||
arch: otherConfig['install-arch'],
|
||||
disks: (function () {
|
||||
const {disks: xml} = otherConfig
|
||||
let data
|
||||
if (!xml || !(data = parseXml(xml)).provision) {
|
||||
return []
|
||||
}
|
||||
|
||||
const disks = ensureArray(data.provision.disk)
|
||||
forEach(disks, function normalize (disk) {
|
||||
disk.bootable = disk.bootable === 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = extractProperty(disk, 'sr')
|
||||
})
|
||||
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})(),
|
||||
install_repository: otherConfig['install-repository']
|
||||
}
|
||||
}
|
||||
|
||||
if (obj.VCPUs_params && obj.VCPUs_params.weight) {
|
||||
vm.cpuWeight = obj.VCPUs_params.weight
|
||||
}
|
||||
|
||||
if (!isHvm) {
|
||||
vm.PV_args = obj.PV_args
|
||||
}
|
||||
|
||||
return vm
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
sr (obj) {
|
||||
return {
|
||||
type: 'SR',
|
||||
|
||||
content_type: obj.content_type,
|
||||
|
||||
// TODO: Should it replace usage?
|
||||
physical_usage: +obj.physical_utilisation,
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.physical_size,
|
||||
SR_type: obj.type,
|
||||
tags: obj.tags,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared
|
||||
? link(obj, 'pool')
|
||||
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pbd (obj) {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pif (obj) {
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
IP: obj.IP,
|
||||
MAC: obj.MAC,
|
||||
management: Boolean(obj.management), // TODO: find a better name.
|
||||
mode: obj.ip_configuration_mode,
|
||||
MTU: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
vlan: +obj.VLAN,
|
||||
|
||||
// TODO: What is it?
|
||||
//
|
||||
// Could it mean “is this a physical interface?”.
|
||||
// How could a PIF not be physical?
|
||||
// physical: obj.physical,
|
||||
|
||||
$host: link(obj, 'host'),
|
||||
$network: link(obj, 'network')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vdi (obj) {
|
||||
if (!obj.managed) {
|
||||
return
|
||||
}
|
||||
|
||||
const vdi = {
|
||||
type: 'VDI',
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.virtual_size,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
tags: obj.tags,
|
||||
usage: +obj.physical_utilisation,
|
||||
|
||||
$SR: link(obj, 'SR'),
|
||||
$VBDs: link(obj, 'VBDs')
|
||||
}
|
||||
|
||||
if (obj.is_a_snapshot) {
|
||||
vdi.type += '-snapshot'
|
||||
vdi.snapshot_time = toTimestamp(obj.snapshot_time)
|
||||
vdi.$snapshot_of = link(obj, 'snapshot_of')
|
||||
}
|
||||
|
||||
return vdi
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vbd (obj) {
|
||||
return {
|
||||
type: 'VBD',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
bootable: Boolean(obj.bootable),
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
VDI: link(obj, 'VDI'),
|
||||
VM: link(obj, 'VM')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vif (obj) {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
MTU: +obj.MTU,
|
||||
|
||||
$network: link(obj, 'network'),
|
||||
$VM: link(obj, 'VM')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
tags: obj.tags,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
message (obj) {
|
||||
return {
|
||||
body: obj.body,
|
||||
name: obj.name,
|
||||
time: toTimestamp(obj.timestamp),
|
||||
|
||||
$object: obj.obj_uuid // Special link as it is already an UUID.
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
task (obj) {
|
||||
return {
|
||||
created: toTimestamp(obj.created),
|
||||
current_operations: obj.current_operations,
|
||||
finished: toTimestamp(obj.finished),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
progress: +obj.progress,
|
||||
result: obj.result,
|
||||
status: obj.status,
|
||||
|
||||
$host: link(obj, 'resident_on')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
host_patch (obj) {
|
||||
return {
|
||||
applied: Boolean(obj.applied),
|
||||
time: toTimestamp(obj.timestamp_applied),
|
||||
pool_patch: link(obj, 'pool_patch', '$ref'),
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pool_patch (obj) {
|
||||
return {
|
||||
id: obj.$ref,
|
||||
|
||||
applied: Boolean(obj.pool_applied),
|
||||
description: obj.name_description,
|
||||
guidance: obj.after_apply_guidance,
|
||||
name: obj.name_label,
|
||||
size: +obj.size,
|
||||
uuid: obj.uuid,
|
||||
|
||||
// TODO: what does it mean, should we handle it?
|
||||
// version: obj.version,
|
||||
|
||||
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
|
||||
$host_patches: link(obj, 'host_patches')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pci (obj) {
|
||||
return {
|
||||
type: 'PCI',
|
||||
|
||||
class_name: obj.class_name,
|
||||
device_name: obj.device_name,
|
||||
pci_id: obj.pci_id,
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pgpu (obj) {
|
||||
return {
|
||||
type: 'PGPU',
|
||||
|
||||
pci: link(obj, 'PCI'),
|
||||
|
||||
// TODO: dedupe.
|
||||
host: link(obj, 'host'),
|
||||
$host: link(obj, 'host'),
|
||||
vgpus: link(obj, 'resident_VGPUs'),
|
||||
$vgpus: link(obj, 'resident_VGPUs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vgpu (obj) {
|
||||
return {
|
||||
type: 'VGPU',
|
||||
|
||||
currentlyAttached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
resident_on: link(obj, 'resident_on'),
|
||||
vm: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default xapiObj => {
|
||||
const transform = TRANSFORMS[xapiObj.$type.toLowerCase()]
|
||||
if (!transform) {
|
||||
return
|
||||
}
|
||||
|
||||
const xoObj = transform(xapiObj)
|
||||
if (!xoObj) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!('id' in xoObj)) {
|
||||
xoObj.id = xapiObj.$id
|
||||
}
|
||||
if (!('type' in xoObj)) {
|
||||
xoObj.type = xapiObj.$type
|
||||
}
|
||||
if (
|
||||
'uuid' in xapiObj &&
|
||||
!('uuid' in xoObj)
|
||||
) {
|
||||
xoObj.uuid = xapiObj.uuid
|
||||
}
|
||||
xoObj.$pool = xapiObj.$pool.$id
|
||||
xoObj.$poolId = xoObj.$pool // TODO: deprecated, remove when no longer used in xo-web
|
||||
|
||||
// Internal properties.
|
||||
defineProperties(xoObj, {
|
||||
_xapiId: {
|
||||
value: xapiObj.$id
|
||||
},
|
||||
_xapiRef: {
|
||||
value: xapiObj.$ref
|
||||
}
|
||||
})
|
||||
|
||||
// Freezes and returns the new object.
|
||||
return freeze(xoObj)
|
||||
}
|
||||
@@ -1,517 +0,0 @@
|
||||
import forEach from 'lodash.foreach'
|
||||
import isArray from 'lodash.isarray'
|
||||
import map from 'lodash.map'
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
parseXml
|
||||
} from './utils'
|
||||
import {
|
||||
isHostRunning,
|
||||
isVmRunning
|
||||
} from './xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function link (obj, prop) {
|
||||
const dynamicValue = obj[`$${prop}`]
|
||||
if (dynamicValue == null) {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
return map(dynamicValue, '$id')
|
||||
}
|
||||
|
||||
return dynamicValue.$id
|
||||
}
|
||||
|
||||
// The JSON interface of XAPI format dates incorrectly.
|
||||
const JSON_DATE_RE = /^(\d{4})(\d{2})(\d{2})T(.+)$/
|
||||
function fixJsonDate (date) {
|
||||
const matches = JSON_DATE_RE.exec(date)
|
||||
|
||||
if (!matches) {
|
||||
return date
|
||||
}
|
||||
|
||||
const [, year, month, day, time] = matches
|
||||
return `${year}-${month}-${day}T${time}`
|
||||
}
|
||||
|
||||
function toTimestamp (date) {
|
||||
if (!date) {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(Date.parse(fixJsonDate(date)) / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function pool (obj) {
|
||||
return {
|
||||
default_SR: link(obj, 'default_SR'),
|
||||
HA_enabled: Boolean(obj.ha_enabled),
|
||||
master: link(obj, 'master'),
|
||||
tags: obj.tags,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label || obj.$master.name_label
|
||||
|
||||
// TODO
|
||||
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
|
||||
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
|
||||
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
|
||||
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
|
||||
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
|
||||
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
|
||||
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host (obj) {
|
||||
const {
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
|
||||
return {
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
CPUs: obj.cpu_info,
|
||||
enabled: Boolean(obj.enabled),
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
if (metrics) {
|
||||
const free = +metrics.memory_free
|
||||
const total = +metrics.memory_total
|
||||
|
||||
return {
|
||||
usage: total - free,
|
||||
size: total
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
powerOnMode: obj.power_on_mode,
|
||||
power_state: isRunning ? 'Running' : 'Halted',
|
||||
tags: obj.tags,
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
// TODO: dedupe.
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
$PIFs: link(obj, 'PIFs'),
|
||||
PCIs: link(obj, 'PCIs'),
|
||||
$PCIs: link(obj, 'PCIs'),
|
||||
PGPUs: link(obj, 'PGPUs'),
|
||||
$PGPUs: link(obj, 'PGPUs'),
|
||||
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
|
||||
// TODO:
|
||||
// - controller = vmControllersByContainer.items[host.id]
|
||||
// - SRs = srsByContainer.items[host.id]
|
||||
// - tasks = tasksByHost.items[host.id]
|
||||
// - templates = vmTemplatesByContainer.items[host.id]
|
||||
// - VMs = vmsByContainer.items[host.id]
|
||||
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vm (obj) {
|
||||
const {
|
||||
$guest_metrics: guestMetrics,
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isVmRunning(obj)
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics
|
||||
? +metrics.VCPUs_number
|
||||
: +obj.VCPUs_at_startup
|
||||
)
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
docker: (function () {
|
||||
const monitor = otherConfig['xscontainer-monitor']
|
||||
if (!monitor) {
|
||||
return
|
||||
}
|
||||
|
||||
if (monitor === 'False') {
|
||||
return {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
docker_ps: process,
|
||||
docker_info: info,
|
||||
docker_version: version
|
||||
} = otherConfig
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
|
||||
// TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: Boolean(obj.ha_restart_priority),
|
||||
|
||||
memory: (function () {
|
||||
const dynamicMin = +obj.memory_dynamic_min
|
||||
const dynamicMax = +obj.memory_dynamic_max
|
||||
const staticMin = +obj.memory_static_min
|
||||
const staticMax = +obj.memory_static_max
|
||||
|
||||
const memory = {
|
||||
dynamic: [ dynamicMin, dynamicMax ],
|
||||
static: [ staticMin, staticMax ]
|
||||
}
|
||||
|
||||
const gmMemory = guestMetrics && guestMetrics.memory
|
||||
|
||||
if (!isRunning) {
|
||||
memory.size = dynamicMax
|
||||
} else if (gmMemory && gmMemory.used) {
|
||||
memory.usage = +gmMemory.used
|
||||
memory.size = +gmMemory.total
|
||||
} else if (metrics) {
|
||||
memory.size = +metrics.memory_actual
|
||||
} else {
|
||||
memory.size = dynamicMax
|
||||
}
|
||||
|
||||
return memory
|
||||
})(),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
power_state: obj.power_state,
|
||||
PV_args: obj.PV_args,
|
||||
PV_drivers: Boolean(guestMetrics),
|
||||
PV_drivers_up_to_date: Boolean(guestMetrics && guestMetrics.PV_drivers_up_to_date),
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
|
||||
$container: (
|
||||
isRunning
|
||||
? link(obj, 'resident_on')
|
||||
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
|
||||
),
|
||||
$VBDs: link(obj, 'VBDs'),
|
||||
|
||||
// TODO: dedupe
|
||||
VGPUs: link(obj, 'VGPUs'),
|
||||
$VGPUs: link(obj, 'VGPUs')
|
||||
}
|
||||
|
||||
if (obj.is_control_domain) {
|
||||
vm.type += '-controller'
|
||||
} else if (obj.is_a_snapshot) {
|
||||
vm.type += '-snapshot'
|
||||
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
vm.template_info = {
|
||||
arch: otherConfig['install-arch'],
|
||||
disks: (function () {
|
||||
const {disks: xml} = otherConfig
|
||||
let data
|
||||
if (!xml || !(data = parseXml(xml)).provision) {
|
||||
return []
|
||||
}
|
||||
|
||||
const disks = ensureArray(data.provision.disk)
|
||||
forEach(disks, function normalize (disk) {
|
||||
disk.bootable = disk.bootable === 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = extractProperty(disk, 'sr')
|
||||
})
|
||||
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
return vm
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function sr (obj) {
|
||||
return {
|
||||
type: 'SR',
|
||||
|
||||
content_type: obj.content_type,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
physical_usage: +obj.physical_utilisation,
|
||||
size: +obj.physical_size,
|
||||
SR_type: obj.type,
|
||||
tags: obj.tags,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared
|
||||
? link(obj, 'pool')
|
||||
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pbd (obj) {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pif (obj) {
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
IP: obj.IP,
|
||||
MAC: obj.MAC,
|
||||
management: Boolean(obj.management), // TODO: find a better name.
|
||||
mode: obj.ip_configuration_mode,
|
||||
MTU: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
vlan: +obj.VLAN,
|
||||
|
||||
// TODO: What is it?
|
||||
//
|
||||
// Could it mean “is this a physical interface?”.
|
||||
// How could a PIF not be physical?
|
||||
// physical: obj.physical,
|
||||
|
||||
$host: link(obj, 'host'),
|
||||
$network: link(obj, 'network')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: should we have a VDI-snapshot type like we have with VMs?
|
||||
export function vdi (obj) {
|
||||
if (!obj.managed) {
|
||||
return
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'VDI',
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.virtual_size,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
tags: obj.tags,
|
||||
usage: +obj.physical_utilisation,
|
||||
|
||||
$snapshot_of: link(obj, 'snapshot_of'),
|
||||
$SR: link(obj, 'SR'),
|
||||
$VBDs: link(obj, 'VBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vbd (obj) {
|
||||
return {
|
||||
type: 'VBD',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
bootable: Boolean(obj.bootable),
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
VDI: link(obj, 'VDI'),
|
||||
VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vif (obj) {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
MTU: +obj.MTU,
|
||||
|
||||
$network: link(obj, 'network'),
|
||||
$VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
tags: obj.tags,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function message (obj) {
|
||||
return {
|
||||
body: obj.body,
|
||||
name: obj.name,
|
||||
time: toTimestamp(obj.timestamp),
|
||||
|
||||
$object: obj.obj_uuid // Special link as it is already an UUID.
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function task (obj) {
|
||||
return {
|
||||
created: toTimestamp(obj.created),
|
||||
current_operations: obj.current_operations,
|
||||
finished: toTimestamp(obj.finished),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
progress: +obj.progress,
|
||||
result: obj.result,
|
||||
status: obj.status,
|
||||
|
||||
$host: link(obj, 'resident_on')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host_patch (obj) {
|
||||
return {
|
||||
applied: Boolean(obj.applied),
|
||||
time: toTimestamp(obj.timestamp_applied),
|
||||
pool_patch: link(obj, 'pool_patch'),
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pool_patch (obj) {
|
||||
return {
|
||||
applied: Boolean(obj.pool_applied),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.size,
|
||||
version: obj.version,
|
||||
|
||||
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
|
||||
$host_patches: link(obj, 'host_patches')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pci (obj) {
|
||||
return {
|
||||
type: 'PCI',
|
||||
|
||||
class_name: obj.class_name,
|
||||
device_name: obj.device_name,
|
||||
pci_id: obj.pci_id,
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pgpu (obj) {
|
||||
return {
|
||||
type: 'PGPU',
|
||||
|
||||
pci: link(obj, 'PCI'),
|
||||
|
||||
// TODO: dedupe.
|
||||
host: link(obj, 'host'),
|
||||
$host: link(obj, 'host'),
|
||||
vgpus: link(obj, 'resident_VGPUs'),
|
||||
$vgpus: link(obj, 'resident_VGPUs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vgpu (obj) {
|
||||
return {
|
||||
type: 'VGPU',
|
||||
|
||||
currentlyAttached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
resident_on: link(obj, 'resident_on'),
|
||||
vm: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
533
src/xapi-stats.js
Normal file
533
src/xapi-stats.js
Normal file
@@ -0,0 +1,533 @@
|
||||
import endsWith from 'lodash.endswith'
|
||||
import JSON5 from 'json5'
|
||||
import { BaseError } from 'make-error'
|
||||
|
||||
import httpRequest from './http-request'
|
||||
import { parseDateTime } from './xapi'
|
||||
|
||||
const RRD_STEP_SECONDS = 5
|
||||
const RRD_STEP_MINUTES = 60
|
||||
const RRD_STEP_HOURS = 3600
|
||||
const RRD_STEP_DAYS = 86400
|
||||
|
||||
const RRD_STEP_FROM_STRING = {
|
||||
'seconds': RRD_STEP_SECONDS,
|
||||
'minutes': RRD_STEP_MINUTES,
|
||||
'hours': RRD_STEP_HOURS,
|
||||
'days': RRD_STEP_DAYS
|
||||
}
|
||||
|
||||
const RRD_POINTS_PER_STEP = {
|
||||
[RRD_STEP_SECONDS]: 120,
|
||||
[RRD_STEP_MINUTES]: 120,
|
||||
[RRD_STEP_HOURS]: 168,
|
||||
[RRD_STEP_DAYS]: 366
|
||||
}
|
||||
|
||||
export class XapiStatsError extends BaseError {}
|
||||
|
||||
export class UnknownLegendFormat extends XapiStatsError {
|
||||
constructor (line) {
|
||||
super('Unknown legend line: ' + line)
|
||||
}
|
||||
}
|
||||
|
||||
export class FaultyGranularity extends XapiStatsError {
|
||||
constructor (msg) {
|
||||
super(msg)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Utils
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function makeUrl (hostname, sessionId, timestamp) {
|
||||
return `https://${hostname}/rrd_updates?session_id=${sessionId}&start=${timestamp}&cf=AVERAGE&host=true&json=true`
|
||||
}
|
||||
|
||||
// Return current local timestamp in seconds
|
||||
function getCurrentTimestamp () {
|
||||
return Date.now() / 1000
|
||||
}
|
||||
|
||||
function convertNanToNull (value) {
|
||||
return isNaN(value) ? null : value
|
||||
}
|
||||
|
||||
async function getServerTimestamp (xapi, host) {
|
||||
const serverLocalTime = await xapi.call('host.get_servertime', host.$ref)
|
||||
return Math.floor(parseDateTime(serverLocalTime).getTime() / 1000)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Stats
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function getNewHostStats () {
|
||||
return {
|
||||
cpus: [],
|
||||
pifs: {
|
||||
rx: [],
|
||||
tx: []
|
||||
},
|
||||
load: [],
|
||||
memory: [],
|
||||
memoryFree: [],
|
||||
memoryUsed: []
|
||||
}
|
||||
}
|
||||
|
||||
function getNewVmStats () {
|
||||
return {
|
||||
cpus: [],
|
||||
vifs: {
|
||||
rx: [],
|
||||
tx: []
|
||||
},
|
||||
xvds: {
|
||||
r: {},
|
||||
w: {}
|
||||
},
|
||||
memory: [],
|
||||
memoryFree: [],
|
||||
memoryUsed: []
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Stats legends
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function getNewHostLegends () {
|
||||
return {
|
||||
cpus: [],
|
||||
pifs: {
|
||||
rx: [],
|
||||
tx: []
|
||||
},
|
||||
load: null,
|
||||
memoryFree: null,
|
||||
memory: null
|
||||
}
|
||||
}
|
||||
|
||||
function getNewVmLegends () {
|
||||
return {
|
||||
cpus: [],
|
||||
vifs: {
|
||||
rx: [],
|
||||
tx: []
|
||||
},
|
||||
xvds: {
|
||||
r: [],
|
||||
w: []
|
||||
},
|
||||
memoryFree: null,
|
||||
memory: null
|
||||
}
|
||||
}
|
||||
|
||||
// Compute one legend line for one host
|
||||
function parseOneHostLegend (hostLegend, type, index) {
|
||||
let resReg
|
||||
|
||||
if ((resReg = /^cpu([0-9]+)$/.exec(type)) !== null) {
|
||||
hostLegend.cpus[resReg[1]] = index
|
||||
} else if ((resReg = /^pif_eth([0-9]+)_(rx|tx)$/.exec(type)) !== null) {
|
||||
if (resReg[2] === 'rx') {
|
||||
hostLegend.pifs.rx[resReg[1]] = index
|
||||
} else {
|
||||
hostLegend.pifs.tx[resReg[1]] = index
|
||||
}
|
||||
} else if (type === 'loadavg') {
|
||||
hostLegend.load = index
|
||||
} else if (type === 'memory_free_kib') {
|
||||
hostLegend.memoryFree = index
|
||||
} else if (type === 'memory_total_kib') {
|
||||
hostLegend.memory = index
|
||||
}
|
||||
}
|
||||
|
||||
// Compute one legend line for one vm
|
||||
function parseOneVmLegend (vmLegend, type, index) {
|
||||
let resReg
|
||||
|
||||
if ((resReg = /^cpu([0-9]+)$/.exec(type)) !== null) {
|
||||
vmLegend.cpus[resReg[1]] = index
|
||||
} else if ((resReg = /^vif_([0-9]+)_(rx|tx)$/.exec(type)) !== null) {
|
||||
if (resReg[2] === 'rx') {
|
||||
vmLegend.vifs.rx[resReg[1]] = index
|
||||
} else {
|
||||
vmLegend.vifs.tx[resReg[1]] = index
|
||||
}
|
||||
} else if ((resReg = /^vbd_xvd(.)_(read|write)$/.exec(type))) {
|
||||
if (resReg[2] === 'read') {
|
||||
vmLegend.xvds.r[resReg[1]] = index
|
||||
} else {
|
||||
vmLegend.xvds.w[resReg[1]] = index
|
||||
}
|
||||
} else if (type === 'memory_internal_free') {
|
||||
vmLegend.memoryFree = index
|
||||
} else if (endsWith(type, 'memory')) {
|
||||
vmLegend.memory = index
|
||||
}
|
||||
}
|
||||
|
||||
// Compute Stats Legends for host and vms from RRD update
|
||||
function parseLegends (json) {
|
||||
const hostLegends = getNewHostLegends()
|
||||
const vmsLegends = {}
|
||||
|
||||
json.meta.legend.forEach((value, index) => {
|
||||
const parsedLine = /^AVERAGE:(host|vm):(.+):(.+)$/.exec(value)
|
||||
|
||||
if (parsedLine === null) {
|
||||
throw new UnknownLegendFormat(value)
|
||||
}
|
||||
|
||||
const [ , name, uuid, type, , ] = parsedLine
|
||||
|
||||
if (name !== 'vm') {
|
||||
parseOneHostLegend(hostLegends, type, index)
|
||||
} else {
|
||||
if (vmsLegends[uuid] === undefined) {
|
||||
vmsLegends[uuid] = getNewVmLegends()
|
||||
}
|
||||
|
||||
parseOneVmLegend(vmsLegends[uuid], type, index)
|
||||
}
|
||||
})
|
||||
|
||||
return [hostLegends, vmsLegends]
|
||||
}
|
||||
|
||||
export default class XapiStats {
|
||||
constructor () {
|
||||
this._vms = {}
|
||||
this._hosts = {}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Remove stats (Helper)
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
_removeOlderStats (source, dest, pointsPerStep) {
|
||||
for (const key in source) {
|
||||
if (key === 'cpus') {
|
||||
for (const cpuIndex in source.cpus) {
|
||||
dest.cpus[cpuIndex].splice(0, dest.cpus[cpuIndex].length - pointsPerStep)
|
||||
}
|
||||
|
||||
// If the number of cpus has been decreased, remove !
|
||||
let offset
|
||||
|
||||
if ((offset = dest.cpus.length - source.cpus.length) > 0) {
|
||||
dest.cpus.splice(-offset)
|
||||
}
|
||||
} else if (endsWith(key, 'ifs')) {
|
||||
// For each pif or vif
|
||||
for (const ifType in source[key]) {
|
||||
for (const pifIndex in source[key][ifType]) {
|
||||
dest[key][ifType][pifIndex].splice(0, dest[key][ifType][pifIndex].length - pointsPerStep)
|
||||
}
|
||||
|
||||
// If the number of pifs has been decreased, remove !
|
||||
let offset
|
||||
|
||||
if ((offset = dest[key][ifType].length - source[key][ifType].length) > 0) {
|
||||
dest[key][ifType].splice(-offset)
|
||||
}
|
||||
}
|
||||
} else if (key === 'xvds') {
|
||||
for (const xvdType in source.xvds) {
|
||||
for (const xvdLetter in source.xvds[xvdType]) {
|
||||
dest.xvds[xvdType][xvdLetter].splice(0, dest.xvds[xvdType][xvdLetter].length - pointsPerStep)
|
||||
}
|
||||
|
||||
// If the number of xvds has been decreased, remove !
|
||||
// FIXME
|
||||
}
|
||||
} else if (key === 'load') {
|
||||
dest.load.splice(0, dest[key].length - pointsPerStep)
|
||||
} else if (key === 'memory') {
|
||||
// Load, memory, memoryFree, memoryUsed
|
||||
const length = dest.memory.length - pointsPerStep
|
||||
dest.memory.splice(0, length)
|
||||
dest.memoryFree.splice(0, length)
|
||||
dest.memoryUsed.splice(0, length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// HOST: Computation and stats update
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Compute one stats row for one host
|
||||
_parseRowHostStats (hostLegends, hostStats, values) {
|
||||
// Cpus
|
||||
hostLegends.cpus.forEach((cpuIndex, index) => {
|
||||
if (hostStats.cpus[index] === undefined) {
|
||||
hostStats.cpus[index] = []
|
||||
}
|
||||
|
||||
hostStats.cpus[index].push(values[cpuIndex] * 100)
|
||||
})
|
||||
|
||||
// Pifs
|
||||
for (const pifType in hostLegends.pifs) {
|
||||
hostLegends.pifs[pifType].forEach((pifIndex, index) => {
|
||||
if (hostStats.pifs[pifType][index] === undefined) {
|
||||
hostStats.pifs[pifType][index] = []
|
||||
}
|
||||
|
||||
hostStats.pifs[pifType][index].push(convertNanToNull(values[pifIndex]))
|
||||
})
|
||||
}
|
||||
|
||||
// Load
|
||||
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
|
||||
|
||||
// Memory
|
||||
const memory = values[hostLegends.memory]
|
||||
const memoryFree = values[hostLegends.memoryFree]
|
||||
|
||||
hostStats.memory.push(memory)
|
||||
|
||||
if (hostLegends.memoryFree !== undefined) {
|
||||
hostStats.memoryFree.push(memoryFree)
|
||||
hostStats.memoryUsed.push(memory - memoryFree)
|
||||
}
|
||||
}
|
||||
|
||||
// Compute stats for host from RRD update
|
||||
_parseHostStats (json, hostname, hostLegends, step) {
|
||||
const host = this._hosts[hostname][step]
|
||||
|
||||
if (host.stats === undefined) {
|
||||
host.stats = getNewHostStats()
|
||||
}
|
||||
|
||||
for (const row of json.data) {
|
||||
this._parseRowHostStats(hostLegends, host.stats, row.values)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// VM: Computation and stats update
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Compute stats for vms from RRD update
|
||||
_parseRowVmStats (vmLegends, vmStats, values) {
|
||||
// Cpus
|
||||
vmLegends.cpus.forEach((cpuIndex, index) => {
|
||||
if (vmStats.cpus[index] === undefined) {
|
||||
vmStats.cpus[index] = []
|
||||
}
|
||||
|
||||
vmStats.cpus[index].push(values[cpuIndex] * 100)
|
||||
})
|
||||
|
||||
// Vifs
|
||||
for (const vifType in vmLegends.vifs) {
|
||||
vmLegends.vifs[vifType].forEach((vifIndex, index) => {
|
||||
if (vmStats.vifs[vifType][index] === undefined) {
|
||||
vmStats.vifs[vifType][index] = []
|
||||
}
|
||||
|
||||
vmStats.vifs[vifType][index].push(convertNanToNull(values[vifIndex]))
|
||||
})
|
||||
}
|
||||
|
||||
// Xvds
|
||||
for (const xvdType in vmLegends.xvds) {
|
||||
for (const index in vmLegends.xvds[xvdType]) {
|
||||
if (vmStats.xvds[xvdType][index] === undefined) {
|
||||
vmStats.xvds[xvdType][index] = []
|
||||
}
|
||||
|
||||
vmStats.xvds[xvdType][index].push(convertNanToNull(values[vmLegends.xvds[xvdType][index]]))
|
||||
}
|
||||
}
|
||||
|
||||
// Memory
|
||||
// WARNING! memoryFree is in Kb not in b, memory is in b
|
||||
const memory = values[vmLegends.memory]
|
||||
const memoryFree = values[vmLegends.memoryFree] * 1024
|
||||
|
||||
vmStats.memory.push(memory)
|
||||
|
||||
if (vmLegends.memoryFree !== undefined) {
|
||||
vmStats.memoryFree.push(memoryFree)
|
||||
vmStats.memoryUsed.push(memory - memoryFree)
|
||||
}
|
||||
}
|
||||
|
||||
// Compute stats for vms
|
||||
_parseVmsStats (json, hostname, vmsLegends, step) {
|
||||
if (this._vms[hostname][step] === undefined) {
|
||||
this._vms[hostname][step] = {}
|
||||
}
|
||||
|
||||
const vms = this._vms[hostname][step]
|
||||
|
||||
for (const uuid in vmsLegends) {
|
||||
if (vms[uuid] === undefined) {
|
||||
vms[uuid] = getNewVmStats()
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of json.data) {
|
||||
for (const uuid in vmsLegends) {
|
||||
this._parseRowVmStats(vmsLegends[uuid], vms[uuid], row.values)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Execute one http request on a XenServer for get stats
|
||||
// Return stats (Json format) or throws got exception
|
||||
async _getJson (url) {
|
||||
const body = await httpRequest(url, { rejectUnauthorized: false }).readAll()
|
||||
return JSON5.parse(body)
|
||||
}
|
||||
|
||||
async _getLastTimestamp (xapi, host, step) {
|
||||
if (this._hosts[host.address][step] === undefined) {
|
||||
const serverTimeStamp = await getServerTimestamp(xapi, host)
|
||||
return serverTimeStamp - step * RRD_POINTS_PER_STEP[step] + step
|
||||
}
|
||||
|
||||
return this._hosts[host.address][step].endTimestamp
|
||||
}
|
||||
|
||||
_getPoints (hostname, step, vmId) {
|
||||
// Return host points
|
||||
if (vmId === undefined) {
|
||||
return this._hosts[hostname][step]
|
||||
}
|
||||
|
||||
// Return vm points
|
||||
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
|
||||
|
||||
if (this._vms[hostname][step] !== undefined) {
|
||||
points.stats = this._vms[hostname][step][vmId]
|
||||
}
|
||||
|
||||
return points
|
||||
}
|
||||
|
||||
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
|
||||
// Get granularity to use
|
||||
const step = (granularity === undefined || granularity === 0)
|
||||
? RRD_STEP_SECONDS : RRD_STEP_FROM_STRING[granularity]
|
||||
|
||||
if (step === undefined) {
|
||||
throw new FaultyGranularity(`Unknown granularity: '${granularity}'. Use 'seconds', 'minutes', 'hours', or 'days'.`)
|
||||
}
|
||||
|
||||
// Limit the number of http requests
|
||||
const hostname = host.address
|
||||
|
||||
if (this._hosts[hostname] === undefined) {
|
||||
this._hosts[hostname] = {}
|
||||
this._vms[hostname] = {}
|
||||
}
|
||||
|
||||
if (this._hosts[hostname][step] !== undefined &&
|
||||
this._hosts[hostname][step].localTimestamp + step > getCurrentTimestamp()) {
|
||||
return this._getPoints(hostname, step, vmId)
|
||||
}
|
||||
|
||||
// Check if we are in the good interval, use this._hosts[hostname][step].localTimestamp
|
||||
// for avoid bad requests
|
||||
// TODO
|
||||
|
||||
// Get json
|
||||
const timestamp = await this._getLastTimestamp(xapi, host, step)
|
||||
let json = await this._getJson(makeUrl(hostname, xapi.sessionId, timestamp))
|
||||
|
||||
// Check if the granularity is linked to 'step'
|
||||
// If it's not the case, we retry other url with the json timestamp
|
||||
if (json.meta.step !== step) {
|
||||
console.log(`RRD call: Expected step: ${step}, received step: ${json.meta.step}. Retry with other timestamp`)
|
||||
const serverTimestamp = await getServerTimestamp(xapi, host)
|
||||
|
||||
// Approximately: half points are asked
|
||||
// FIXME: Not the best solution
|
||||
json = await this._getJson(makeUrl(hostname, xapi.sessionId, serverTimestamp - step * (RRD_POINTS_PER_STEP[step] / 2) + step))
|
||||
|
||||
if (json.meta.step !== step) {
|
||||
throw new FaultyGranularity(`Unable to get the true granularity: ${json.meta.step}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Make new backup slot if necessary
|
||||
if (this._hosts[hostname][step] === undefined) {
|
||||
this._hosts[hostname][step] = {
|
||||
endTimestamp: 0,
|
||||
localTimestamp: 0
|
||||
}
|
||||
}
|
||||
|
||||
// It exists data
|
||||
if (json.data.length !== 0) {
|
||||
// Warning: Sometimes, the json.xport.meta.start value does not match with the
|
||||
// timestamp of the oldest data value
|
||||
// So, we use the timestamp of the oldest data value !
|
||||
const startTimestamp = json.data[json.meta.rows - 1].t
|
||||
|
||||
// Remove useless data and reorder
|
||||
// Note: Older values are at end of json.data.row
|
||||
const parseOffset = (this._hosts[hostname][step].endTimestamp - startTimestamp + step) / step
|
||||
|
||||
json.data.splice(json.data.length - parseOffset)
|
||||
json.data.reverse()
|
||||
|
||||
// It exists useful data
|
||||
if (json.data.length > 0) {
|
||||
const [hostLegends, vmsLegends] = parseLegends(json)
|
||||
|
||||
// Compute and update host/vms stats
|
||||
this._parseVmsStats(json, hostname, vmsLegends, step)
|
||||
this._parseHostStats(json, hostname, hostLegends, step)
|
||||
|
||||
// Remove older stats
|
||||
this._removeOlderStats(hostLegends, this._hosts[hostname][step].stats, RRD_POINTS_PER_STEP[step])
|
||||
|
||||
for (const uuid in vmsLegends) {
|
||||
this._removeOlderStats(vmsLegends[uuid], this._vms[hostname][step][uuid], RRD_POINTS_PER_STEP[step])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update timestamp
|
||||
this._hosts[hostname][step].endTimestamp = json.meta.end
|
||||
this._hosts[hostname][step].localTimestamp = getCurrentTimestamp()
|
||||
|
||||
return this._getPoints(hostname, step, vmId)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Warning: This functions returns one reference on internal data
|
||||
// So, data can be changed by a parallel call on this functions
|
||||
// It is forbidden to modify the returned data
|
||||
|
||||
// Return host stats
|
||||
async getHostPoints (xapi, hostId, granularity) {
|
||||
const host = xapi.getObject(hostId)
|
||||
return this._getAndUpdatePoints(xapi, host, undefined, granularity)
|
||||
}
|
||||
|
||||
// Return vms stats
|
||||
async getVmPoints (xapi, vmId, granularity) {
|
||||
const vm = xapi.getObject(vmId)
|
||||
const host = vm.$resident_on
|
||||
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
|
||||
}
|
||||
}
|
||||
1697
src/xapi.js
1697
src/xapi.js
File diff suppressed because it is too large
Load Diff
739
src/xo-mixins/backups.js
Normal file
739
src/xo-mixins/backups.js
Normal file
@@ -0,0 +1,739 @@
|
||||
import endsWith from 'lodash.endswith'
|
||||
import escapeStringRegexp from 'escape-string-regexp'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import execa from 'execa'
|
||||
import filter from 'lodash.filter'
|
||||
import findIndex from 'lodash.findindex'
|
||||
import sortBy from 'lodash.sortby'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import {
|
||||
basename,
|
||||
dirname
|
||||
} from 'path'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import {
|
||||
deferrable
|
||||
} from '../decorators'
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
noop,
|
||||
pSettle,
|
||||
safeDateFormat
|
||||
} from '../utils'
|
||||
import {
|
||||
VDI_FORMAT_VHD
|
||||
} from '../xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DELTA_BACKUP_EXT = '.json'
|
||||
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
|
||||
|
||||
// Test if a file is a vdi backup. (full or delta)
|
||||
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
|
||||
|
||||
// Test if a file is a delta vdi backup.
|
||||
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
|
||||
|
||||
// Get the timestamp of a vdi backup. (full or delta)
|
||||
const getVdiTimestamp = name => {
|
||||
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
|
||||
return arr[1]
|
||||
}
|
||||
|
||||
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
|
||||
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
|
||||
|
||||
async function checkFileIntegrity (handler, name) {
|
||||
let stream
|
||||
|
||||
try {
|
||||
stream = await handler.createReadStream(name, { checksum: true })
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
stream.resume()
|
||||
await eventToPromise(stream, 'finish')
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
}
|
||||
|
||||
async listRemoteBackups (remoteId) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
// List backups. (No delta)
|
||||
const backupFilter = file => endsWith(file, '.xva')
|
||||
|
||||
const files = await handler.list()
|
||||
const backups = filter(files, backupFilter)
|
||||
|
||||
// List delta backups.
|
||||
const deltaDirs = filter(files, file => startsWith(file, 'vm_delta_'))
|
||||
|
||||
for (const deltaDir of deltaDirs) {
|
||||
const files = await handler.list(deltaDir)
|
||||
const deltaBackups = filter(files, isDeltaBackup)
|
||||
|
||||
backups.push(...mapToArray(
|
||||
deltaBackups,
|
||||
deltaBackup => {
|
||||
return `${deltaDir}/${getDeltaBackupNameWithoutExt(deltaBackup)}`
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
return backups
|
||||
}
|
||||
|
||||
async importVmBackup (remoteId, file, sr) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const stream = await handler.createReadStream(file)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
await xapi.importVm(stream, { srId: sr._xapiId })
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
@deferrable.onFailure
|
||||
async deltaCopyVm ($onFailure, srcVm, targetSr) {
|
||||
const srcXapi = this._xo.getXapi(srcVm)
|
||||
const targetXapi = this._xo.getXapi(targetSr)
|
||||
|
||||
// Get Xen objects from XO objects.
|
||||
srcVm = srcXapi.getObject(srcVm._xapiId)
|
||||
targetSr = targetXapi.getObject(targetSr._xapiId)
|
||||
|
||||
// 1. Find the local base for this SR (if any).
|
||||
const TAG_LAST_BASE_DELTA = `xo:base_delta:${targetSr.uuid}`
|
||||
const localBaseUuid = (id => {
|
||||
if (id != null) {
|
||||
const base = srcXapi.getObject(id, null)
|
||||
return base && base.uuid
|
||||
}
|
||||
})(srcVm.other_config[TAG_LAST_BASE_DELTA])
|
||||
|
||||
// 2. Copy.
|
||||
const dstVm = await (async () => {
|
||||
const delta = await srcXapi.exportDeltaVm(srcVm.$id, localBaseUuid, {
|
||||
snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${targetSr.uuid})`
|
||||
})
|
||||
$onFailure(async () => {
|
||||
await Promise.all(mapToArray(
|
||||
delta.streams,
|
||||
stream => stream.cancel()
|
||||
))
|
||||
|
||||
return srcXapi.deleteVm(delta.vm.$id, true)
|
||||
})
|
||||
|
||||
const promise = targetXapi.importDeltaVm(
|
||||
delta,
|
||||
{
|
||||
deleteBase: true, // Remove the remote base.
|
||||
srId: targetSr.$id
|
||||
}
|
||||
)
|
||||
|
||||
// Once done, (asynchronously) remove the (now obsolete) local
|
||||
// base.
|
||||
if (localBaseUuid) {
|
||||
promise.then(() => srcXapi.deleteVm(localBaseUuid, true)).catch(noop)
|
||||
}
|
||||
|
||||
// (Asynchronously) Identify snapshot as future base.
|
||||
promise.then(() => {
|
||||
return srcXapi._updateObjectMapProperty(srcVm, 'other_config', {
|
||||
[TAG_LAST_BASE_DELTA]: delta.vm.uuid
|
||||
})
|
||||
}).catch(noop)
|
||||
|
||||
return promise
|
||||
})()
|
||||
|
||||
// 5. Return the identifier of the new XO VM object.
|
||||
return xapiObjectToXo(dstVm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// TODO: The other backup methods must use this function !
|
||||
// Prerequisite: The backups array must be ordered. (old to new backups)
|
||||
async _removeOldBackups (backups, handler, dir, n) {
|
||||
if (n <= 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const getPath = (file, dir) => dir ? `${dir}/${file}` : file
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(backups.slice(0, n), async backup => await handler.unlink(getPath(backup, dir)))
|
||||
)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _legacyImportDeltaVdiBackup (xapi, { vmId, handler, dir, vdiInfo }) {
|
||||
const vdi = await xapi.createVdi(vdiInfo.virtual_size, vdiInfo)
|
||||
const vdiId = vdi.$id
|
||||
|
||||
// dir = vm_delta_xxx
|
||||
// xoPath = vdi_xxx/timestamp_(full|delta).vhd
|
||||
// vdiDir = vdi_xxx
|
||||
const { xoPath } = vdiInfo
|
||||
const filePath = `${dir}/${xoPath}`
|
||||
const vdiDir = dirname(xoPath)
|
||||
|
||||
const backups = await this._listDeltaVdiDependencies(handler, filePath)
|
||||
|
||||
for (const backup of backups) {
|
||||
const stream = await handler.createReadStream(`${dir}/${vdiDir}/${backup}`)
|
||||
|
||||
await xapi.importVdiContent(vdiId, stream, {
|
||||
format: VDI_FORMAT_VHD
|
||||
})
|
||||
}
|
||||
|
||||
return vdiId
|
||||
}
|
||||
|
||||
async _legacyImportDeltaVmBackup (xapi, { remoteId, handler, filePath, info, sr }) {
|
||||
// Import vm metadata.
|
||||
const vm = await (async () => {
|
||||
const stream = await handler.createReadStream(`${filePath}.xva`)
|
||||
return await xapi.importVm(stream, { onlyMetadata: true })
|
||||
})()
|
||||
|
||||
const vmName = vm.name_label
|
||||
const dir = dirname(filePath)
|
||||
|
||||
// Disable start and change the VM name label during import.
|
||||
await Promise.all([
|
||||
xapi.addForbiddenOperationToVm(vm.$id, 'start', 'Delta backup import...'),
|
||||
xapi._setObjectProperties(vm, { name_label: `[Importing...] ${vmName}` })
|
||||
])
|
||||
|
||||
// Destroy vbds if necessary. Why ?
|
||||
// Because XenServer creates Vbds linked to the vdis of the backup vm if it exists.
|
||||
await xapi.destroyVbdsFromVm(vm.uuid)
|
||||
|
||||
// Import VDIs.
|
||||
const vdiIds = {}
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
info.vdis,
|
||||
async vdiInfo => {
|
||||
vdiInfo.sr = sr._xapiId
|
||||
|
||||
const vdiId = await this._legacyImportDeltaVdiBackup(xapi, { vmId: vm.$id, handler, dir, vdiInfo })
|
||||
vdiIds[vdiInfo.uuid] = vdiId
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
info.vbds,
|
||||
vbdInfo => {
|
||||
xapi.attachVdiToVm(vdiIds[vbdInfo.xoVdi], vm.$id, vbdInfo)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
// Import done, reenable start and set real vm name.
|
||||
await Promise.all([
|
||||
xapi.removeForbiddenOperationFromVm(vm.$id, 'start'),
|
||||
xapi._setObjectProperties(vm, { name_label: vmName })
|
||||
])
|
||||
|
||||
return vm
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _listVdiBackups (handler, dir) {
|
||||
let files
|
||||
|
||||
try {
|
||||
files = await handler.list(dir)
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
files = []
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const backups = sortBy(filter(files, fileName => isVdiBackup(fileName)))
|
||||
let i
|
||||
|
||||
// Avoid unstable state: No full vdi found to the beginning of array. (base)
|
||||
for (i = 0; i < backups.length && isDeltaVdiBackup(backups[i]); i++);
|
||||
await this._removeOldBackups(backups, handler, dir, i)
|
||||
|
||||
return backups.slice(i)
|
||||
}
|
||||
|
||||
async _deltaVdiBackup (xapi, {vdi, handler, dir, depth}) {
|
||||
const backupDirectory = `vdi_${vdi.uuid}`
|
||||
|
||||
dir = `${dir}/${backupDirectory}`
|
||||
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
|
||||
// Make snapshot.
|
||||
const date = safeDateFormat(new Date())
|
||||
const currentSnapshot = await xapi.snapshotVdi(vdi.$id, 'XO_DELTA_BASE_VDI_SNAPSHOT')
|
||||
|
||||
const bases = sortBy(
|
||||
filter(vdi.$snapshots, { name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT' }),
|
||||
base => base.snapshot_time
|
||||
)
|
||||
|
||||
const base = bases.pop()
|
||||
|
||||
// Remove old bases if exists.
|
||||
Promise.all(mapToArray(bases, base => xapi.deleteVdi(base.$id))).catch(noop)
|
||||
|
||||
// It is strange to have no base but a full backup !
|
||||
// A full is necessary if it not exists backups or
|
||||
// the base is missing.
|
||||
const isFull = (!backups.length || !base)
|
||||
|
||||
// Export full or delta backup.
|
||||
const vdiFilename = `${date}_${isFull ? 'full' : 'delta'}.vhd`
|
||||
const backupFullPath = `${dir}/${vdiFilename}`
|
||||
|
||||
try {
|
||||
const sourceStream = await xapi.exportVdi(currentSnapshot.$id, {
|
||||
baseId: isFull ? undefined : base.$id,
|
||||
format: VDI_FORMAT_VHD
|
||||
})
|
||||
|
||||
const targetStream = await handler.createOutputStream(backupFullPath, {
|
||||
// FIXME: Checksum is not computed for full vdi backups.
|
||||
// The problem is in the merge case, a delta merged in a full vdi
|
||||
// backup forces us to browse the resulting file =>
|
||||
// Significant transfer time on the network !
|
||||
checksum: !isFull,
|
||||
flags: 'wx'
|
||||
})
|
||||
|
||||
sourceStream.on('error', error => targetStream.emit('error', error))
|
||||
|
||||
await Promise.all([
|
||||
eventToPromise(sourceStream.pipe(targetStream), 'finish'),
|
||||
sourceStream.task
|
||||
])
|
||||
} catch (error) {
|
||||
// Remove new backup. (corrupt) and delete new vdi base.
|
||||
xapi.deleteVdi(currentSnapshot.$id).catch(noop)
|
||||
await handler.unlink(backupFullPath, { checksum: true }).catch(noop)
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
// Returns relative path. (subdir and vdi filename), old/new base.
|
||||
return {
|
||||
backupDirectory,
|
||||
vdiFilename,
|
||||
oldBaseId: base && base.$id, // Base can be undefined. (full backup)
|
||||
newBaseId: currentSnapshot.$id
|
||||
}
|
||||
}
|
||||
|
||||
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
|
||||
if (handler.type === 'smb') {
|
||||
throw new Error('VDI merging is not available through SMB')
|
||||
}
|
||||
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
let i = backups.length - depth
|
||||
|
||||
// No merge.
|
||||
if (i <= 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const vhdUtil = `${__dirname}/../../bin/vhd-util`
|
||||
|
||||
const timestamp = getVdiTimestamp(backups[i])
|
||||
const newFullBackup = `${dir}/${timestamp}_full.vhd`
|
||||
|
||||
await checkFileIntegrity(handler, `${dir}/${backups[i]}`)
|
||||
|
||||
for (; i > 0 && isDeltaVdiBackup(backups[i]); i--) {
|
||||
const backup = `${dir}/${backups[i]}`
|
||||
const parent = `${dir}/${backups[i - 1]}`
|
||||
|
||||
const path = handler._remote.path // FIXME, private attribute !
|
||||
|
||||
try {
|
||||
await checkFileIntegrity(handler, `${dir}/${backups[i - 1]}`)
|
||||
await execa(vhdUtil, ['modify', '-n', `${path}/${backup}`, '-p', `${path}/${parent}`]) // FIXME not ok at least with smb remotes
|
||||
await execa(vhdUtil, ['coalesce', '-n', `${path}/${backup}`]) // FIXME not ok at least with smb remotes
|
||||
} catch (e) {
|
||||
console.error('Unable to use vhd-util.', e)
|
||||
throw e
|
||||
}
|
||||
|
||||
await handler.unlink(backup, { checksum: true })
|
||||
}
|
||||
|
||||
// The base was removed, it exists two full backups or more ?
|
||||
// => Remove old backups before the most recent full.
|
||||
if (i > 0) {
|
||||
for (i--; i >= 0; i--) {
|
||||
await handler.unlink(`${dir}/${backups[i]}`, { checksum: true })
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Rename the first old full backup to the new full backup.
|
||||
await handler.rename(`${dir}/${backups[0]}`, newFullBackup)
|
||||
}
|
||||
|
||||
async _listDeltaVdiDependencies (handler, filePath) {
|
||||
const dir = dirname(filePath)
|
||||
const filename = basename(filePath)
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
|
||||
// Search file. (delta or full backup)
|
||||
const i = findIndex(backups, backup =>
|
||||
getVdiTimestamp(backup) === getVdiTimestamp(filename)
|
||||
)
|
||||
|
||||
if (i === -1) {
|
||||
throw new Error('VDI to import not found in this remote.')
|
||||
}
|
||||
|
||||
// Search full backup.
|
||||
let j
|
||||
|
||||
for (j = i; j >= 0 && isDeltaVdiBackup(backups[j]); j--);
|
||||
|
||||
if (j === -1) {
|
||||
throw new Error(`Unable to found full vdi backup of: ${filePath}`)
|
||||
}
|
||||
|
||||
return backups.slice(j, i + 1)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _listDeltaVmBackups (handler, dir) {
|
||||
const files = await handler.list(dir)
|
||||
return await sortBy(filter(files, isDeltaBackup))
|
||||
}
|
||||
|
||||
async _failedRollingDeltaVmBackup (xapi, handler, dir, fulFilledVdiBackups) {
|
||||
await Promise.all(
|
||||
mapToArray(fulFilledVdiBackups, async vdiBackup => {
|
||||
const { newBaseId, backupDirectory, vdiFilename } = vdiBackup.value()
|
||||
|
||||
await xapi.deleteVdi(newBaseId)
|
||||
await handler.unlink(`${dir}/${backupDirectory}/${vdiFilename}`, { checksum: true }).catch(noop)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async rollingDeltaVmBackup ({vm, remoteId, tag, depth}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
if (handler.type === 'smb') {
|
||||
throw new Error('Delta Backup is not supported for smb remotes')
|
||||
}
|
||||
|
||||
const dir = `vm_delta_${tag}_${vm.uuid}`
|
||||
|
||||
const info = {
|
||||
version: '1.0.0',
|
||||
vbds: {},
|
||||
vdis: {},
|
||||
vifs: {}
|
||||
}
|
||||
|
||||
const promises = []
|
||||
const xapi = this._xo.getXapi(vm)
|
||||
|
||||
vm = xapi.getObject(vm._xapiId)
|
||||
|
||||
for (const vbd of vm.$VBDs) {
|
||||
const vdiId = vbd.VDI
|
||||
|
||||
if (!vdiId || vbd.type !== 'Disk') {
|
||||
continue
|
||||
}
|
||||
|
||||
info.vbds[vbd.$ref] = vbd
|
||||
|
||||
// Warning: There may be the same VDI id for a VBD set.
|
||||
if (info.vdis[vdiId]) {
|
||||
continue
|
||||
}
|
||||
|
||||
const vdi = vbd.$VDI
|
||||
|
||||
info.vdis[vdiId] = { ...vdi }
|
||||
promises.push(
|
||||
this._deltaVdiBackup(xapi, {vdi, handler, dir, depth}).then(
|
||||
vdiBackup => {
|
||||
const { backupDirectory, vdiFilename } = vdiBackup
|
||||
info.vdis[vdiId].xoPath = `${backupDirectory}/${vdiFilename}`
|
||||
|
||||
return vdiBackup
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const vdiBackups = await pSettle(promises)
|
||||
const fulFilledVdiBackups = []
|
||||
let fail = false
|
||||
|
||||
// One or many vdi backups have failed.
|
||||
for (const vdiBackup of vdiBackups) {
|
||||
if (vdiBackup.isFulfilled()) {
|
||||
fulFilledVdiBackups.push(vdiBackup)
|
||||
} else {
|
||||
console.error(`Rejected backup: ${vdiBackup.reason()}`)
|
||||
fail = true
|
||||
}
|
||||
}
|
||||
|
||||
if (fail) {
|
||||
console.error(`Remove successful backups in ${dir}`, fulFilledVdiBackups)
|
||||
await this._failedRollingDeltaVmBackup(xapi, handler, dir, fulFilledVdiBackups)
|
||||
|
||||
throw new Error('Rolling delta vm backup failed.')
|
||||
}
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
const backupFormat = `${date}_${vm.name_label}`
|
||||
const infoPath = `${dir}/${backupFormat}${DELTA_BACKUP_EXT}`
|
||||
|
||||
try {
|
||||
for (const vif of vm.$VIFs) {
|
||||
info.vifs[vif.$ref] = vif
|
||||
}
|
||||
|
||||
info.vm = vm
|
||||
|
||||
await handler.outputFile(infoPath, JSON.stringify(info, null, 2), {flag: 'wx'})
|
||||
} catch (e) {
|
||||
await Promise.all([
|
||||
handler.unlink(infoPath).catch(noop),
|
||||
this._failedRollingDeltaVmBackup(xapi, handler, dir, fulFilledVdiBackups)
|
||||
])
|
||||
|
||||
throw e
|
||||
}
|
||||
|
||||
// Here we have a completed backup. We can merge old vdis.
|
||||
await Promise.all(
|
||||
mapToArray(vdiBackups, vdiBackup => {
|
||||
const { backupDirectory } = vdiBackup.value()
|
||||
return this._mergeDeltaVdiBackups({handler, dir: `${dir}/${backupDirectory}`, depth})
|
||||
})
|
||||
)
|
||||
|
||||
// Remove old vm backups.
|
||||
const backups = await this._listDeltaVmBackups(handler, dir)
|
||||
const nOldBackups = backups.length - depth
|
||||
|
||||
if (nOldBackups > 0) {
|
||||
await Promise.all(
|
||||
mapToArray(backups.slice(0, nOldBackups), async backup => {
|
||||
// Remove json file.
|
||||
await handler.unlink(`${dir}/${backup}`)
|
||||
|
||||
// Remove xva file.
|
||||
// Version 0.0.0 (Legacy) Delta Backup.
|
||||
handler.unlink(`${dir}/${getDeltaBackupNameWithoutExt(backup)}.xva`).catch(noop)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Remove old vdi bases.
|
||||
Promise.all(
|
||||
mapToArray(vdiBackups, async vdiBackup => {
|
||||
const { oldBaseId } = vdiBackup.value()
|
||||
|
||||
if (oldBaseId) {
|
||||
await xapi.deleteVdi(oldBaseId)
|
||||
}
|
||||
})
|
||||
).catch(noop)
|
||||
|
||||
// Returns relative path.
|
||||
return `${dir}/${backupFormat}`
|
||||
}
|
||||
|
||||
async importDeltaVmBackup ({sr, remoteId, filePath}) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
|
||||
let vm
|
||||
const { version } = delta
|
||||
|
||||
if (!version) {
|
||||
// Legacy import. (Version 0.0.0)
|
||||
vm = await this._legacyImportDeltaVmBackup(xapi, {
|
||||
remoteId, handler, filePath, info: delta, sr
|
||||
})
|
||||
} else if (versionSatisfies(delta.version, '^1')) {
|
||||
const basePath = dirname(filePath)
|
||||
const streams = delta.streams = {}
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
delta.vdis,
|
||||
async (vdi, id) => {
|
||||
const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}`
|
||||
const backups = await this._listDeltaVdiDependencies(handler, `${basePath}/${vdi.xoPath}`)
|
||||
|
||||
streams[`${id}.vhd`] = await Promise.all(mapToArray(backups, async backup =>
|
||||
handler.createReadStream(`${vdisFolder}/${backup}`, { checksum: true, ignoreMissingChecksum: true })
|
||||
))
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
vm = await xapi.importDeltaVm(delta, {
|
||||
srId: sr._xapiId,
|
||||
disableStartAfterImport: false
|
||||
})
|
||||
} else {
|
||||
throw new Error(`Unsupported delta backup version: ${version}`)
|
||||
}
|
||||
|
||||
return xapiObjectToXo(vm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
|
||||
}
|
||||
|
||||
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
|
||||
const targetStream = await handler.createOutputStream(file, { flags: 'wx' })
|
||||
const promise = eventToPromise(targetStream, 'finish')
|
||||
|
||||
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
|
||||
compress,
|
||||
onlyMetadata: onlyMetadata || false
|
||||
})
|
||||
sourceStream.pipe(targetStream)
|
||||
|
||||
await promise
|
||||
}
|
||||
|
||||
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
|
||||
const files = await handler.list()
|
||||
|
||||
const reg = new RegExp('^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`))
|
||||
const backups = sortBy(filter(files, (fileName) => reg.test(fileName)))
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
const file = `${date}_${tag}_${vm.name_label}.xva`
|
||||
|
||||
await this._backupVm(vm, handler, file, {compress, onlyMetadata})
|
||||
await this._removeOldBackups(backups, handler, undefined, backups.length - (depth - 1))
|
||||
}
|
||||
|
||||
async rollingSnapshotVm (vm, tag, depth) {
|
||||
const xapi = this._xo.getXapi(vm)
|
||||
vm = xapi.getObject(vm._xapiId)
|
||||
|
||||
const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_')
|
||||
const snapshots = sortBy(filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), 'name_label')
|
||||
const date = safeDateFormat(new Date())
|
||||
|
||||
await xapi.snapshotVm(vm.$id, `rollingSnapshot_${date}_${tag}_${vm.name_label}`)
|
||||
|
||||
const promises = []
|
||||
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldSnap = snapshots.shift()
|
||||
promises.push(xapi.deleteVm(oldSnap.uuid, true))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
async rollingDrCopyVm ({vm, sr, tag, depth}) {
|
||||
tag = 'DR_' + tag
|
||||
const reg = new RegExp('^' + escapeStringRegexp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$')
|
||||
|
||||
const targetXapi = this._xo.getXapi(sr)
|
||||
sr = targetXapi.getObject(sr._xapiId)
|
||||
const sourceXapi = this._xo.getXapi(vm)
|
||||
vm = sourceXapi.getObject(vm._xapiId)
|
||||
|
||||
const vms = []
|
||||
forEach(sr.$VDIs, vdi => {
|
||||
const vbds = vdi.$VBDs
|
||||
const vm = vbds && vbds[0] && vbds[0].$VM
|
||||
if (vm && reg.test(vm.name_label)) {
|
||||
vms.push(vm)
|
||||
}
|
||||
})
|
||||
const olderCopies = sortBy(vms, 'name_label')
|
||||
|
||||
const copyName = `${vm.name_label}_${tag}_${safeDateFormat(new Date())}`
|
||||
const drCopy = await sourceXapi.remoteCopyVm(vm.$id, targetXapi, sr.$id, {
|
||||
nameLabel: copyName
|
||||
})
|
||||
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
|
||||
|
||||
const promises = []
|
||||
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldDRVm = olderCopies.shift()
|
||||
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
76
src/xo-mixins/jobs.js
Normal file
76
src/xo-mixins/jobs.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import JobExecutor from '../job-executor'
|
||||
import { Jobs } from '../models/job'
|
||||
import {
|
||||
JsonRpcError,
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchJob extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'job')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._executor = new JobExecutor(xo)
|
||||
this._jobs = new Jobs({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:job',
|
||||
indexes: ['user_id', 'key']
|
||||
})
|
||||
}
|
||||
|
||||
async getAllJobs () {
|
||||
return await this._jobs.get()
|
||||
}
|
||||
|
||||
async getJob (id) {
|
||||
const job = await this._jobs.first(id)
|
||||
if (!job) {
|
||||
throw new NoSuchJob(id)
|
||||
}
|
||||
|
||||
return job.properties
|
||||
}
|
||||
|
||||
async createJob (userId, job) {
|
||||
// TODO: use plain objects
|
||||
const job_ = await this._jobs.create(userId, job)
|
||||
return job_.properties
|
||||
}
|
||||
|
||||
async updateJob (job) {
|
||||
return await this._jobs.save(job)
|
||||
}
|
||||
|
||||
async removeJob (id) {
|
||||
return await this._jobs.remove(id)
|
||||
}
|
||||
|
||||
async runJobSequence (idSequence) {
|
||||
const notFound = []
|
||||
for (const id of idSequence) {
|
||||
let job
|
||||
try {
|
||||
job = await this.getJob(id)
|
||||
} catch (error) {
|
||||
if (error instanceof NoSuchJob) {
|
||||
notFound.push(id)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
if (job) {
|
||||
await this._executor.exec(job)
|
||||
}
|
||||
}
|
||||
if (notFound.length > 0) {
|
||||
throw new JsonRpcError(`The following jobs were not found: ${notFound.join()}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
205
src/xo-mixins/plugins.js
Normal file
205
src/xo-mixins/plugins.js
Normal file
@@ -0,0 +1,205 @@
|
||||
import createJsonSchemaValidator from 'is-my-json-valid'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
|
||||
import { PluginsMetadata } from '../models/plugin-metadata'
|
||||
import {
|
||||
InvalidParameters,
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import {
|
||||
createRawObject,
|
||||
mapToArray
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchPlugin extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'plugin')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._plugins = createRawObject()
|
||||
|
||||
this._pluginsMetadata = new PluginsMetadata({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:plugin-metadata'
|
||||
})
|
||||
}
|
||||
|
||||
_getRawPlugin (id) {
|
||||
const plugin = this._plugins[id]
|
||||
if (!plugin) {
|
||||
throw new NoSuchPlugin(id)
|
||||
}
|
||||
return plugin
|
||||
}
|
||||
|
||||
async _getPluginMetadata (id) {
|
||||
const metadata = await this._pluginsMetadata.first(id)
|
||||
return metadata
|
||||
? metadata.properties
|
||||
: null
|
||||
}
|
||||
|
||||
async registerPlugin (
|
||||
name,
|
||||
instance,
|
||||
configurationSchema
|
||||
) {
|
||||
const id = name
|
||||
|
||||
const plugin = this._plugins[id] = {
|
||||
configured: !configurationSchema,
|
||||
configurationSchema,
|
||||
id,
|
||||
instance,
|
||||
name,
|
||||
unloadable: isFunction(instance.unload)
|
||||
}
|
||||
|
||||
const metadata = await this._getPluginMetadata(id)
|
||||
let autoload = true
|
||||
let configuration
|
||||
|
||||
if (metadata) {
|
||||
({
|
||||
autoload,
|
||||
configuration
|
||||
} = metadata)
|
||||
} else {
|
||||
console.log(`[NOTICE] register plugin ${name} for the first time`)
|
||||
await this._pluginsMetadata.save({
|
||||
id,
|
||||
autoload
|
||||
})
|
||||
}
|
||||
|
||||
// Configure plugin if necessary. (i.e. configurationSchema)
|
||||
// Load plugin.
|
||||
// Ignore configuration and loading errors.
|
||||
Promise.resolve()
|
||||
.then(() => {
|
||||
if (!plugin.configured) {
|
||||
return this._configurePlugin(plugin, configuration)
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
if (autoload) {
|
||||
return this.loadPlugin(id)
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('register plugin %s: %s', name, error && error.stack || error)
|
||||
})
|
||||
}
|
||||
|
||||
async _getPlugin (id) {
|
||||
const {
|
||||
configurationSchema,
|
||||
loaded,
|
||||
name,
|
||||
unloadable
|
||||
} = this._getRawPlugin(id)
|
||||
const {
|
||||
autoload,
|
||||
configuration
|
||||
} = (await this._getPluginMetadata(id)) || {}
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
autoload,
|
||||
loaded,
|
||||
unloadable,
|
||||
configuration,
|
||||
configurationSchema
|
||||
}
|
||||
}
|
||||
|
||||
async getPlugins () {
|
||||
return await Promise.all(
|
||||
mapToArray(this._plugins, ({ id }) => this._getPlugin(id))
|
||||
)
|
||||
}
|
||||
|
||||
// Validate the configuration and configure the plugin instance.
|
||||
async _configurePlugin (plugin, configuration) {
|
||||
if (!plugin.configurationSchema) {
|
||||
throw new InvalidParameters('plugin not configurable')
|
||||
}
|
||||
|
||||
const validate = createJsonSchemaValidator(plugin.configurationSchema)
|
||||
if (!validate(configuration)) {
|
||||
throw new InvalidParameters(validate.errors)
|
||||
}
|
||||
|
||||
// Sets the plugin configuration.
|
||||
await plugin.instance.configure({
|
||||
// Shallow copy of the configuration object to avoid most of the
|
||||
// errors when the plugin is altering the configuration object
|
||||
// which is handed over to it.
|
||||
...configuration
|
||||
})
|
||||
plugin.configured = true
|
||||
}
|
||||
|
||||
// Validate the configuration, configure the plugin instance and
|
||||
// save the new configuration.
|
||||
async configurePlugin (id, configuration) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
|
||||
await this._configurePlugin(plugin, configuration)
|
||||
|
||||
// Saves the configuration.
|
||||
await this._pluginsMetadata.merge(id, { configuration })
|
||||
}
|
||||
|
||||
async disablePluginAutoload (id) {
|
||||
// TODO: handle case where autoload is already disabled.
|
||||
|
||||
await this._pluginsMetadata.merge(id, { autoload: false })
|
||||
}
|
||||
|
||||
async enablePluginAutoload (id) {
|
||||
// TODO: handle case where autoload is already enabled.
|
||||
|
||||
await this._pluginsMetadata.merge(id, { autoload: true })
|
||||
}
|
||||
|
||||
async loadPlugin (id) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
if (plugin.loaded) {
|
||||
throw new InvalidParameters('plugin already loaded')
|
||||
}
|
||||
|
||||
if (!plugin.configured) {
|
||||
throw new InvalidParameters('plugin not configured')
|
||||
}
|
||||
|
||||
await plugin.instance.load()
|
||||
plugin.loaded = true
|
||||
}
|
||||
|
||||
async unloadPlugin (id) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
if (!plugin.loaded) {
|
||||
throw new InvalidParameters('plugin already unloaded')
|
||||
}
|
||||
|
||||
if (plugin.unloadable === false) {
|
||||
throw new InvalidParameters('plugin cannot be unloaded')
|
||||
}
|
||||
|
||||
await plugin.instance.unload()
|
||||
plugin.loaded = false
|
||||
}
|
||||
|
||||
async purgePluginConfiguration (id) {
|
||||
await this._pluginsMetadata.merge(id, { configuration: undefined })
|
||||
}
|
||||
}
|
||||
128
src/xo-mixins/remotes.js
Normal file
128
src/xo-mixins/remotes.js
Normal file
@@ -0,0 +1,128 @@
|
||||
import RemoteHandlerLocal from '../remote-handlers/local'
|
||||
import RemoteHandlerNfs from '../remote-handlers/nfs'
|
||||
import RemoteHandlerSmb from '../remote-handlers/smb'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
import {
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import {
|
||||
Remotes
|
||||
} from '../models/remote'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchRemote extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'remote')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._remotes = new Remotes({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:remote',
|
||||
indexes: ['enabled']
|
||||
})
|
||||
|
||||
xo.on('start', async () => {
|
||||
await this.initRemotes()
|
||||
await this.syncAllRemotes()
|
||||
})
|
||||
xo.on('stop', () => this.forgetAllRemotes())
|
||||
}
|
||||
|
||||
async getRemoteHandler (remote) {
|
||||
if (typeof remote === 'string') {
|
||||
remote = await this.getRemote(remote)
|
||||
}
|
||||
const Handler = {
|
||||
file: RemoteHandlerLocal,
|
||||
smb: RemoteHandlerSmb,
|
||||
nfs: RemoteHandlerNfs
|
||||
}
|
||||
const type = remote.url.split('://')[0]
|
||||
if (!Handler[type]) {
|
||||
throw new Error('Unhandled remote type')
|
||||
}
|
||||
return new Handler[type](remote)
|
||||
}
|
||||
|
||||
async getAllRemotes () {
|
||||
return this._remotes.get()
|
||||
}
|
||||
|
||||
async _getRemote (id) {
|
||||
const remote = await this._remotes.first(id)
|
||||
if (!remote) {
|
||||
throw new NoSuchRemote(id)
|
||||
}
|
||||
|
||||
return remote
|
||||
}
|
||||
|
||||
async getRemote (id) {
|
||||
return (await this._getRemote(id)).properties
|
||||
}
|
||||
|
||||
async createRemote ({name, url}) {
|
||||
let remote = await this._remotes.create(name, url)
|
||||
return await this.updateRemote(remote.get('id'), {enabled: true})
|
||||
}
|
||||
|
||||
async updateRemote (id, {name, url, enabled, error}) {
|
||||
const remote = await this._getRemote(id)
|
||||
this._updateRemote(remote, {name, url, enabled, error})
|
||||
const handler = await this.getRemoteHandler(remote.properties)
|
||||
const props = await handler.sync()
|
||||
this._updateRemote(remote, props)
|
||||
return (await this._remotes.save(remote)).properties
|
||||
}
|
||||
|
||||
_updateRemote (remote, {name, url, enabled, error}) {
|
||||
if (name) remote.set('name', name)
|
||||
if (url) remote.set('url', url)
|
||||
if (enabled !== undefined) remote.set('enabled', enabled)
|
||||
if (error) {
|
||||
remote.set('error', error)
|
||||
} else {
|
||||
remote.set('error', '')
|
||||
}
|
||||
}
|
||||
|
||||
async removeRemote (id) {
|
||||
const handler = await this.getRemoteHandler(id)
|
||||
await handler.forget()
|
||||
await this._remotes.remove(id)
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async syncAllRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
forEach(remotes, remote => {
|
||||
this.updateRemote(remote.id, {})
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async forgetAllRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
for (let remote of remotes) {
|
||||
try {
|
||||
(await this.getRemoteHandler(remote)).forget()
|
||||
} catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async initRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
if (!remotes || !remotes.length) {
|
||||
await this.createRemote({name: 'default', url: 'file://var/lib/xoa-backups'})
|
||||
}
|
||||
}
|
||||
}
|
||||
80
src/xo-mixins/scheduling.js
Normal file
80
src/xo-mixins/scheduling.js
Normal file
@@ -0,0 +1,80 @@
|
||||
import Scheduler from '../scheduler'
|
||||
import { Schedules } from '../models/schedule'
|
||||
import {
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchSchedule extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'schedule')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._schedules = new Schedules({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:schedule',
|
||||
indexes: ['user_id', 'job']
|
||||
})
|
||||
const scheduler = this._scheduler = new Scheduler(xo)
|
||||
|
||||
xo.on('start', () => scheduler._loadSchedules())
|
||||
xo.on('stop', () => scheduler.disableAll())
|
||||
}
|
||||
|
||||
get scheduler () {
|
||||
return this._scheduler
|
||||
}
|
||||
|
||||
async _getSchedule (id) {
|
||||
const schedule = await this._schedules.first(id)
|
||||
if (!schedule) {
|
||||
throw new NoSuchSchedule(id)
|
||||
}
|
||||
|
||||
return schedule
|
||||
}
|
||||
|
||||
async getSchedule (id) {
|
||||
return (await this._getSchedule(id)).properties
|
||||
}
|
||||
|
||||
async getAllSchedules () {
|
||||
return await this._schedules.get()
|
||||
}
|
||||
|
||||
async createSchedule (userId, {job, cron, enabled, name}) {
|
||||
const schedule_ = await this._schedules.create(userId, job, cron, enabled, name)
|
||||
const schedule = schedule_.properties
|
||||
if (this.scheduler) {
|
||||
this.scheduler.add(schedule)
|
||||
}
|
||||
return schedule
|
||||
}
|
||||
|
||||
async updateSchedule (id, {job, cron, enabled, name}) {
|
||||
const schedule = await this._getSchedule(id)
|
||||
|
||||
if (job) schedule.set('job', job)
|
||||
if (cron) schedule.set('cron', cron)
|
||||
if (enabled !== undefined) schedule.set('enabled', enabled)
|
||||
if (name !== undefined) schedule.set('name', name)
|
||||
|
||||
await this._schedules.save(schedule)
|
||||
if (this.scheduler) {
|
||||
this.scheduler.update(schedule.properties)
|
||||
}
|
||||
}
|
||||
|
||||
async removeSchedule (id) {
|
||||
await this._schedules.remove(id)
|
||||
if (this.scheduler) {
|
||||
this.scheduler.remove(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# TODO: this generation should probably be automated and integrated
|
||||
# into the build system.
|
||||
|
||||
set -e -u
|
||||
|
||||
cd "$(dirname "$(which "$0")")"
|
||||
cd "${1:-$(dirname "$(which "$0")")}"
|
||||
|
||||
{
|
||||
printf %s '//
|
||||
@@ -14,13 +11,22 @@ cd "$(dirname "$(which "$0")")"
|
||||
// It MUST be re-generated each time an API namespace (read file) is
|
||||
// added or removed.
|
||||
//
|
||||
|
||||
const defaults = {}
|
||||
export default defaults
|
||||
|
||||
'
|
||||
|
||||
for f in *.js *.coffee
|
||||
for f in *.js *.coffee */
|
||||
do
|
||||
[ -f "$f" ] || continue
|
||||
|
||||
base=${f%.*}
|
||||
[ "$base" != index ] || continue
|
||||
|
||||
printf '%s\n' "export * as $base from './$base'"
|
||||
printf '%s; %s\n%s\n' \
|
||||
"import $base from './$base'" \
|
||||
"defaults['$base'] = $base" \
|
||||
"export * as $base from './$base'"
|
||||
done | sort
|
||||
} > index.js
|
||||
Reference in New Issue
Block a user