Compare commits
517 Commits
xo-server/
...
xo-server/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3989bfa832 | ||
|
|
740e1ccd5b | ||
|
|
bc03f2ed17 | ||
|
|
4b5cb86d87 | ||
|
|
b877d6088b | ||
|
|
26cfd439e1 | ||
|
|
eafcf66f4d | ||
|
|
a8fad8193b | ||
|
|
ca695c38cd | ||
|
|
6e0f98a809 | ||
|
|
8901d3ce64 | ||
|
|
2641966285 | ||
|
|
09dc2265fe | ||
|
|
a1fa139ef1 | ||
|
|
df26a500c4 | ||
|
|
a465218ba3 | ||
|
|
1ead6eb916 | ||
|
|
44fe1b31ba | ||
|
|
0856d3d5c9 | ||
|
|
f6b74ea836 | ||
|
|
e0cef71700 | ||
|
|
19d1f70458 | ||
|
|
43c103e436 | ||
|
|
6ff4f096a3 | ||
|
|
d8d82441c3 | ||
|
|
4f489e1854 | ||
|
|
9ab275df5d | ||
|
|
66c1754eb8 | ||
|
|
e67bab1f5c | ||
|
|
ceb6667112 | ||
|
|
cafba0b361 | ||
|
|
3e4efcf297 | ||
|
|
4ccadac148 | ||
|
|
6e148c18b0 | ||
|
|
75f849982e | ||
|
|
8722ef45ac | ||
|
|
b347c78a8c | ||
|
|
88ae24855a | ||
|
|
356884ea53 | ||
|
|
51fba21dd6 | ||
|
|
6aa5d608bf | ||
|
|
21ad2c5744 | ||
|
|
dea1163159 | ||
|
|
c4c2e8cf74 | ||
|
|
5883c35cf3 | ||
|
|
4d2719a424 | ||
|
|
7cf2d0d01c | ||
|
|
5a08b512a7 | ||
|
|
aa6ff6cd64 | ||
|
|
89421d292c | ||
|
|
55c6515bac | ||
|
|
5db6f6a58c | ||
|
|
eeedf6ab28 | ||
|
|
3758cd207b | ||
|
|
c15ede6239 | ||
|
|
e54e31e059 | ||
|
|
8c573aa8e4 | ||
|
|
654559086c | ||
|
|
b0d9679568 | ||
|
|
87fdaf7fa7 | ||
|
|
bb3a365166 | ||
|
|
2be183d980 | ||
|
|
c6dc846838 | ||
|
|
1142f1d59a | ||
|
|
126c470979 | ||
|
|
d679dc3e8b | ||
|
|
d5422dfe89 | ||
|
|
d64237b4f2 | ||
|
|
7b7e4942f2 | ||
|
|
e4c343a587 | ||
|
|
1a8ae21478 | ||
|
|
dd37a5b584 | ||
|
|
eec340e6c0 | ||
|
|
c2fb5ba1f0 | ||
|
|
36d7e17b86 | ||
|
|
5a1dc49428 | ||
|
|
47caf54772 | ||
|
|
6af50a8c44 | ||
|
|
1b27407970 | ||
|
|
4da6306c67 | ||
|
|
f950b7a725 | ||
|
|
930cf9ed04 | ||
|
|
744016e752 | ||
|
|
2fb4e907df | ||
|
|
ef2a815e52 | ||
|
|
33a81d4f3c | ||
|
|
4911df0bf9 | ||
|
|
2b612b5db7 | ||
|
|
bfe81b52ef | ||
|
|
26f6a4beb9 | ||
|
|
52e9c3053a | ||
|
|
908d1f1ec8 | ||
|
|
6a1120f95b | ||
|
|
2a2780c25c | ||
|
|
7d4152197b | ||
|
|
9c742600ff | ||
|
|
a035bf132a | ||
|
|
b989d157a0 | ||
|
|
261587511b | ||
|
|
ff798801fb | ||
|
|
9b4aab0d19 | ||
|
|
6e42cf9952 | ||
|
|
4c3a8ca312 | ||
|
|
a63eb48f03 | ||
|
|
d0214f805e | ||
|
|
d736bd6501 | ||
|
|
2ce4a11e0a | ||
|
|
e5ab8fe3e4 | ||
|
|
657b74a084 | ||
|
|
dfee98b66b | ||
|
|
f65b9f695e | ||
|
|
4056385cd3 | ||
|
|
96d56d43bc | ||
|
|
eba8f95e58 | ||
|
|
7e2da1ff93 | ||
|
|
b7b7e81468 | ||
|
|
0c7768f5d2 | ||
|
|
8fe6a56dfc | ||
|
|
7b9dae980d | ||
|
|
b59ba6b7bb | ||
|
|
8cdee4d173 | ||
|
|
c9ed5fbe00 | ||
|
|
e698e89968 | ||
|
|
02f198d42c | ||
|
|
61d2d0263b | ||
|
|
ed477e99a8 | ||
|
|
1449be8d66 | ||
|
|
28902d8747 | ||
|
|
d534592479 | ||
|
|
b2f6ea9116 | ||
|
|
8bf38bb29b | ||
|
|
9c6a78b678 | ||
|
|
850199d7fc | ||
|
|
4282928960 | ||
|
|
356dd89d9f | ||
|
|
7dd2391e5a | ||
|
|
e0093f236a | ||
|
|
8c5c32268a | ||
|
|
b61ccc1af1 | ||
|
|
7caf0e40f4 | ||
|
|
a16508db10 | ||
|
|
81bff342b9 | ||
|
|
49d41a76a0 | ||
|
|
b1732b3298 | ||
|
|
9372cdb6c7 | ||
|
|
1d8e54b83e | ||
|
|
30c5600271 | ||
|
|
9f7e5c3a9a | ||
|
|
37c9342717 | ||
|
|
8827f8e940 | ||
|
|
58334bf4a1 | ||
|
|
b898a6702c | ||
|
|
6d78a810b9 | ||
|
|
8fc4eb8cdf | ||
|
|
b3fac0c56f | ||
|
|
0b063b1f5e | ||
|
|
480f05e676 | ||
|
|
1ac8af34ec | ||
|
|
34ff8b0f02 | ||
|
|
77c3684e28 | ||
|
|
93038ea838 | ||
|
|
46348f7cba | ||
|
|
ccc0e45daf | ||
|
|
46ca03b017 | ||
|
|
1bfe3197a5 | ||
|
|
4d2617fe68 | ||
|
|
92e289f9da | ||
|
|
a8c7558a77 | ||
|
|
c756e7ecbe | ||
|
|
1998c56e84 | ||
|
|
2ed55b1616 | ||
|
|
0c8d456fd3 | ||
|
|
9e4924caf6 | ||
|
|
7f391a5860 | ||
|
|
5c7249c8fc | ||
|
|
932d00133d | ||
|
|
32a371bf13 | ||
|
|
5d0622d2cf | ||
|
|
9ab9155bf0 | ||
|
|
86a1ed6d46 | ||
|
|
b3c9936d74 | ||
|
|
21b4d7cf11 | ||
|
|
4ec07f9ff8 | ||
|
|
b7c89d6f64 | ||
|
|
0eb168ec70 | ||
|
|
8ac1a66e93 | ||
|
|
301da3662a | ||
|
|
e474946cb7 | ||
|
|
9a0ca1ebb2 | ||
|
|
520f7b2a77 | ||
|
|
c0b3b3aab8 | ||
|
|
d499332ce3 | ||
|
|
19ce06e0bb | ||
|
|
ea6ff4224e | ||
|
|
871d1f8632 | ||
|
|
77ce2ff6d1 | ||
|
|
6383104796 | ||
|
|
b99b4159c8 | ||
|
|
8bedb1f3b9 | ||
|
|
dc85804a27 | ||
|
|
42a31e512a | ||
|
|
2be7388696 | ||
|
|
bc5b00781b | ||
|
|
313e2b3de6 | ||
|
|
0bbd002060 | ||
|
|
5e785266a5 | ||
|
|
5870769e7d | ||
|
|
79b80dcd07 | ||
|
|
6f6e547e6c | ||
|
|
352c9357df | ||
|
|
1ba4641641 | ||
|
|
60e0047285 | ||
|
|
235e7c143c | ||
|
|
522d6eed92 | ||
|
|
9d1d6ea4c5 | ||
|
|
0afd506a41 | ||
|
|
9dfb837e3f | ||
|
|
4ab63b569f | ||
|
|
8d390d256d | ||
|
|
4eec5e06fc | ||
|
|
e4063b1ba8 | ||
|
|
0c3227cf8e | ||
|
|
7bed200bf5 | ||
|
|
4f763e2109 | ||
|
|
75167fb65b | ||
|
|
675588f780 | ||
|
|
2d6f94edd8 | ||
|
|
247c66ef4b | ||
|
|
1076fac40f | ||
|
|
14a4a415a2 | ||
|
|
524355b59c | ||
|
|
36fe49f3f5 | ||
|
|
c0c0af9b14 | ||
|
|
d1e472d482 | ||
|
|
c80e43ad0d | ||
|
|
fdd395e2b6 | ||
|
|
e094437168 | ||
|
|
2ee0be7466 | ||
|
|
2784a7cc92 | ||
|
|
b09f998d6c | ||
|
|
bdeb5895f6 | ||
|
|
3944b8aaee | ||
|
|
6e66cffb92 | ||
|
|
57092ee788 | ||
|
|
70e9e1c706 | ||
|
|
9662b8fbee | ||
|
|
9f66421ae7 | ||
|
|
50584c2e50 | ||
|
|
7be4e1901a | ||
|
|
b47146de45 | ||
|
|
97b229b2c7 | ||
|
|
6bb5bb9403 | ||
|
|
8c4b8271d8 | ||
|
|
69291c0574 | ||
|
|
2dc073dcd6 | ||
|
|
1894cb35d2 | ||
|
|
cd37420b07 | ||
|
|
55cb6b39db | ||
|
|
89d13b2285 | ||
|
|
1b64b0468a | ||
|
|
085fb83294 | ||
|
|
edd606563f | ||
|
|
fb804e99f0 | ||
|
|
1707cbcb54 | ||
|
|
6d6a630c31 | ||
|
|
ff2990e8e5 | ||
|
|
d679aff0fb | ||
|
|
603a444905 | ||
|
|
a002958448 | ||
|
|
cb4bc37424 | ||
|
|
0fc6f917e6 | ||
|
|
ec0d012b24 | ||
|
|
2cd4b171a1 | ||
|
|
0cb6906c4d | ||
|
|
4c19b93c30 | ||
|
|
6165f1b405 | ||
|
|
37a4221e43 | ||
|
|
9831b222b5 | ||
|
|
7b6f44fb74 | ||
|
|
399f4d0ea3 | ||
|
|
26a668a875 | ||
|
|
bf96262b6e | ||
|
|
1155fa1fe9 | ||
|
|
1875d31731 | ||
|
|
6f855fd14e | ||
|
|
08e392bb46 | ||
|
|
66d63e0546 | ||
|
|
7ee56fe8bc | ||
|
|
669d04ee48 | ||
|
|
cb1b37326e | ||
|
|
7bb73bee67 | ||
|
|
7286ddc338 | ||
|
|
7d1f9e33fe | ||
|
|
63c676ebfe | ||
|
|
fcaf6b7923 | ||
|
|
9f347a170a | ||
|
|
2f7cd4426d | ||
|
|
854f256470 | ||
|
|
5d0b40f752 | ||
|
|
27a2853ee8 | ||
|
|
67f6b80312 | ||
|
|
016037adc1 | ||
|
|
70d5c1034d | ||
|
|
ed6fb8754f | ||
|
|
6d08a9b11c | ||
|
|
cf6aa7cf79 | ||
|
|
6c4e57aae0 | ||
|
|
d08a04959c | ||
|
|
2762f74ce5 | ||
|
|
6ebcf6eec5 | ||
|
|
25b78fb7e1 | ||
|
|
670dd2dd96 | ||
|
|
1baf04f786 | ||
|
|
ce05b7a041 | ||
|
|
290cc146c8 | ||
|
|
db4d46a584 | ||
|
|
8ed2e51dde | ||
|
|
33702c09a6 | ||
|
|
45aeca3753 | ||
|
|
deae7dfb4d | ||
|
|
2af043ebdd | ||
|
|
e121295735 | ||
|
|
7c1c405a64 | ||
|
|
5d7c95a34d | ||
|
|
504c934fc9 | ||
|
|
81b0223f73 | ||
|
|
6d1e410bfd | ||
|
|
26c5c6152d | ||
|
|
d83bf0ebaf | ||
|
|
5adfe9a552 | ||
|
|
883f461dc7 | ||
|
|
8595ebc258 | ||
|
|
2bd31f4560 | ||
|
|
6df85ecadd | ||
|
|
07829918e4 | ||
|
|
b0d400b6eb | ||
|
|
706cb895ad | ||
|
|
45bf539b3c | ||
|
|
0923981f8d | ||
|
|
b0ac14363d | ||
|
|
5d346aba37 | ||
|
|
124cb15ebe | ||
|
|
a244ab898d | ||
|
|
3c551590eb | ||
|
|
10e30cccbc | ||
|
|
806a6b86a2 | ||
|
|
9719fdf5cc | ||
|
|
6d8764f8cb | ||
|
|
d9fd9cb408 | ||
|
|
7710ec0aba | ||
|
|
c97bd78cd0 | ||
|
|
728c5aa86e | ||
|
|
83d68ca293 | ||
|
|
47d7561db4 | ||
|
|
7d993e8319 | ||
|
|
1d1a597b22 | ||
|
|
23082f9300 | ||
|
|
ea1a7f9376 | ||
|
|
1796c7bab8 | ||
|
|
65ad76479a | ||
|
|
422db04ec8 | ||
|
|
d12f60fe37 | ||
|
|
194c1c991c | ||
|
|
3e8e2222c1 | ||
|
|
1620327a33 | ||
|
|
b1131e3667 | ||
|
|
db0250ac08 | ||
|
|
0a6b605760 | ||
|
|
81ac2375e5 | ||
|
|
6bcaca6cd7 | ||
|
|
ec8375252e | ||
|
|
766aa1762f | ||
|
|
5165e0a54c | ||
|
|
a2f7ad627e | ||
|
|
1176c162d4 | ||
|
|
a4880cd017 | ||
|
|
383bdce416 | ||
|
|
7cc300dd83 | ||
|
|
687809db9d | ||
|
|
1127ec3a90 | ||
|
|
a797edfae9 | ||
|
|
938e106252 | ||
|
|
a0eb9caaa2 | ||
|
|
442f53d45e | ||
|
|
68de1ca248 | ||
|
|
e16061141e | ||
|
|
64cbe3d209 | ||
|
|
ebdc6376d8 | ||
|
|
68335123a1 | ||
|
|
25b18f4ef8 | ||
|
|
9ad615b0ff | ||
|
|
12eaceb032 | ||
|
|
3263511b72 | ||
|
|
75cae8c647 | ||
|
|
9991ef624c | ||
|
|
489e9fce27 | ||
|
|
0655628073 | ||
|
|
9460822529 | ||
|
|
d02358ac0d | ||
|
|
366237a625 | ||
|
|
2f2da18994 | ||
|
|
ecd30db215 | ||
|
|
1980854f6f | ||
|
|
7d4f006c25 | ||
|
|
b697be2383 | ||
|
|
143e53c43f | ||
|
|
6dde1ade01 | ||
|
|
d4de391ac5 | ||
|
|
af15f4bc6a | ||
|
|
d4ace24caa | ||
|
|
c5ab47fa66 | ||
|
|
d60051b629 | ||
|
|
22ff330ee7 | ||
|
|
dd62bef66d | ||
|
|
e7feb99f8d | ||
|
|
6358accece | ||
|
|
9ce8a24eea | ||
|
|
4d0673f489 | ||
|
|
fbe1e6a7d5 | ||
|
|
4ed02ca501 | ||
|
|
af245ed9fe | ||
|
|
fc86a3e882 | ||
|
|
f9109edcf1 | ||
|
|
ec100e1a91 | ||
|
|
746c5f4a79 | ||
|
|
b2611728a1 | ||
|
|
fc6cc4234d | ||
|
|
7706c1cb63 | ||
|
|
4d7a07220c | ||
|
|
436875f7dc | ||
|
|
21c6f53ecc | ||
|
|
5472be8b72 | ||
|
|
d22542fcf3 | ||
|
|
1d8341eb27 | ||
|
|
1897a7ada3 | ||
|
|
a048698c66 | ||
|
|
f891e57f4a | ||
|
|
fcc590e48a | ||
|
|
9a02a2a65b | ||
|
|
536a6c5c60 | ||
|
|
86a6871ee8 | ||
|
|
6046045151 | ||
|
|
9c3ddd4ba4 | ||
|
|
6c9f55c1d7 | ||
|
|
5bec3d7dcd | ||
|
|
a4c309efe8 | ||
|
|
4e22a208dd | ||
|
|
ff9e77118e | ||
|
|
6c6dfa9ac4 | ||
|
|
d60d5207d8 | ||
|
|
8c0ae892f5 | ||
|
|
f570492a11 | ||
|
|
cc447304f5 | ||
|
|
8f8c6366e3 | ||
|
|
3b13bcb098 | ||
|
|
df60784b51 | ||
|
|
bae3122bb5 | ||
|
|
0770aef4bf | ||
|
|
c198350bfa | ||
|
|
a2ed388777 | ||
|
|
f6670c699a | ||
|
|
5fa4c95480 | ||
|
|
5b8608c186 | ||
|
|
bb75d42ede | ||
|
|
b4b6def07a | ||
|
|
b305700987 | ||
|
|
40232b7eb1 | ||
|
|
67ff666db4 | ||
|
|
5960fd4fe0 | ||
|
|
f8b28c519c | ||
|
|
ee1105b6dd | ||
|
|
4778274c97 | ||
|
|
d7ecb32238 | ||
|
|
744306fc50 | ||
|
|
11bbb8ed4d | ||
|
|
b5092a4444 | ||
|
|
e2442c07a9 | ||
|
|
6f924d4e83 | ||
|
|
faf1508914 | ||
|
|
7eb8152835 | ||
|
|
8f45905831 | ||
|
|
4ba2ffce5b | ||
|
|
ffb3659ef5 | ||
|
|
6dec07d562 | ||
|
|
afb22f3279 | ||
|
|
f2f369db64 | ||
|
|
635c76db93 | ||
|
|
5f50f1928d | ||
|
|
32c9ed1dc2 | ||
|
|
71741e144e | ||
|
|
f2e64cdd5e | ||
|
|
afaa5d5e9e | ||
|
|
d82861727d | ||
|
|
90f0795416 | ||
|
|
9efbe7771c | ||
|
|
a75caac13d | ||
|
|
279d0d20ea | ||
|
|
332ba96d34 | ||
|
|
3f6e5b7606 | ||
|
|
94703492fd | ||
|
|
df78117617 | ||
|
|
909b9480e4 | ||
|
|
21762ac1aa | ||
|
|
412bc175b4 | ||
|
|
dc0eb76e88 | ||
|
|
2695941a3c | ||
|
|
3506be1a70 | ||
|
|
cbf4786b39 | ||
|
|
8dbf334208 | ||
|
|
60ba5fbc72 | ||
|
|
c3ace0c44f | ||
|
|
8eceb90e63 | ||
|
|
4754e19e83 | ||
|
|
a0559d0dc9 | ||
|
|
8d03ce19b0 | ||
|
|
2470d851e9 | ||
|
|
df99f5c0a5 |
15
.babelrc
15
.babelrc
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"comments": false,
|
||||
"compact": true,
|
||||
"optional": [
|
||||
// Experimental features.
|
||||
// "minification.constantFolding",
|
||||
// "minification.deadCodeElimination",
|
||||
|
||||
"es7.asyncFunctions",
|
||||
"es7.decorators",
|
||||
"es7.exportExtensions",
|
||||
"es7.functionBind",
|
||||
"runtime"
|
||||
]
|
||||
}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
/.nyc_output/
|
||||
/dist/
|
||||
/node_modules/
|
||||
/src/api/index.js
|
||||
/src/xapi/mixins/index.js
|
||||
/src/xo-mixins/index.js
|
||||
|
||||
npm-debug.log
|
||||
|
||||
93
.jshintrc
93
.jshintrc
@@ -1,93 +0,0 @@
|
||||
{
|
||||
// Julien Fontanet JSHint configuration
|
||||
// https://gist.github.com/julien-f/8095615
|
||||
//
|
||||
// Changes from defaults:
|
||||
// - all enforcing options (except `++` & `--`) enabled
|
||||
// - single quotes
|
||||
// - indentation set to 2 instead of 4
|
||||
// - almost all relaxing options disabled
|
||||
// - environments are set to Node.js
|
||||
//
|
||||
// See http://jshint.com/docs/ for more details
|
||||
|
||||
"maxerr" : 50, // {int} Maximum error before stopping
|
||||
|
||||
// Enforcing
|
||||
"bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
|
||||
"camelcase" : true, // true: Identifiers must be in camelCase
|
||||
"curly" : true, // true: Require {} for every new block or scope
|
||||
"eqeqeq" : true, // true: Require triple equals (===) for comparison
|
||||
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
|
||||
"freeze" : true, // true: Prohibit overwriting prototypes of native objects (Array, Date, ...)
|
||||
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
|
||||
"indent" : 2, // {int} Number of spaces to use for indentation
|
||||
"latedef" : true, // true: Require variables/functions to be defined before being used
|
||||
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
|
||||
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
|
||||
"noempty" : true, // true: Prohibit use of empty blocks
|
||||
"nonbsp" : true, // true: Prohibit use of non breakable spaces
|
||||
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
|
||||
"plusplus" : false, // true: Prohibit use of `++` & `--`
|
||||
"quotmark" : "single", // Quotation mark consistency:
|
||||
// false : do nothing (default)
|
||||
// true : ensure whatever is used is consistent
|
||||
// "single" : require single quotes
|
||||
// "double" : require double quotes
|
||||
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
|
||||
"unused" : true, // true: Require all defined variables be used
|
||||
"strict" : false, // true: Requires all functions run in ES5 Strict Mode
|
||||
"maxcomplexity" : 7, // {int} Max cyclomatic complexity per function
|
||||
"maxdepth" : 3, // {int} Max depth of nested blocks (within functions)
|
||||
"maxlen" : 80, // {int} Max number of characters per line
|
||||
"maxparams" : 4, // {int} Max number of formal params allowed per function
|
||||
"maxstatements" : 20, // {int} Max number statements per function
|
||||
|
||||
// Relaxing
|
||||
"asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
|
||||
"boss" : false, // true: Tolerate assignments where comparisons would be expected
|
||||
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
|
||||
"eqnull" : false, // true: Tolerate use of `== null`
|
||||
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
|
||||
"evil" : false, // true: Tolerate use of `eval` and `new Function()`
|
||||
"expr" : false, // true: Tolerate `ExpressionStatement` as Programs
|
||||
"funcscope" : false, // true: Tolerate defining variables inside control statements
|
||||
"globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
|
||||
"iterator" : false, // true: Tolerate using the `__iterator__` property
|
||||
"lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
|
||||
"laxbreak" : false, // true: Tolerate possibly unsafe line breakings
|
||||
"laxcomma" : false, // true: Tolerate comma-first style coding
|
||||
"loopfunc" : false, // true: Tolerate functions being defined in loops
|
||||
"moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
|
||||
// (ex: `for each`, multiple try/catch, function expression…)
|
||||
"multistr" : false, // true: Tolerate multi-line strings
|
||||
"notypeof" : false, // true: Tolerate typeof comparison with unknown values.
|
||||
"proto" : false, // true: Tolerate using the `__proto__` property
|
||||
"scripturl" : false, // true: Tolerate script-targeted URLs
|
||||
"shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
|
||||
"sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
|
||||
"supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
|
||||
"validthis" : false, // true: Tolerate using this in a non-constructor function
|
||||
"noyield" : false, // true: Tolerate generators without yields
|
||||
|
||||
// Environments
|
||||
"browser" : false, // Web Browser (window, document, etc)
|
||||
"browserify" : false, // Browserify (node.js code in the browser)
|
||||
"couch" : false, // CouchDB
|
||||
"devel" : false, // Development/debugging (alert, confirm, etc)
|
||||
"dojo" : false, // Dojo Toolkit
|
||||
"jquery" : false, // jQuery
|
||||
"mocha" : false, // mocha
|
||||
"mootools" : false, // MooTools
|
||||
"node" : true, // Node.js
|
||||
"nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
|
||||
"phantom" : false, // PhantomJS
|
||||
"prototypejs" : false, // Prototype and Scriptaculous
|
||||
"rhino" : false, // Rhino
|
||||
"worker" : false, // Web Workers
|
||||
"wsh" : false, // Windows Scripting Host
|
||||
"yui" : false, // Yahoo User Interface
|
||||
|
||||
// Custom Globals
|
||||
"globals" : {} // additional predefined global variables
|
||||
}
|
||||
3
.mention-bot
Normal file
3
.mention-bot
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"userBlacklist": [ "greenkeeper", "Wescoeur" ]
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
--require ./better-stacks.js
|
||||
@@ -1,5 +1,6 @@
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
# - 'stable'
|
||||
- '4'
|
||||
- '0.12'
|
||||
- stable
|
||||
- 6
|
||||
- 4
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
|
||||
3
ISSUE_TEMPLATE.md
Normal file
3
ISSUE_TEMPLATE.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER!
|
||||
|
||||
https://github.com/vatesfr/xo-web/issues
|
||||
@@ -19,7 +19,7 @@ ___
|
||||
|
||||
## Installation
|
||||
|
||||
Manual install procedure is [available here](https://github.com/vatesfr/xo/blob/master/doc/installation/README.md#installation).
|
||||
Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html).
|
||||
|
||||
## Compilation
|
||||
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
Error.stackTraceLimit = 100
|
||||
|
||||
// Async stacks.
|
||||
//
|
||||
// Disabled for now as it cause a huge memory usage with
|
||||
// fs.createReadStream().
|
||||
// TODO: find a way to reenable.
|
||||
//
|
||||
// try { require('trace') } catch (_) {}
|
||||
|
||||
// Removes internal modules.
|
||||
try {
|
||||
var sep = require('path').sep
|
||||
|
||||
@@ -7,9 +7,25 @@
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
// Use Bluebird for all promises as it provides better performance and
|
||||
// less memory usage.
|
||||
global.Promise = require('bluebird')
|
||||
|
||||
// Make unhandled rejected promises visible.
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
console.log('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
|
||||
process.on('unhandledRejection', function (reason) {
|
||||
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
|
||||
})
|
||||
|
||||
;(function (EE) {
|
||||
var proto = EE.prototype
|
||||
var emit = proto.emit
|
||||
proto.emit = function patchedError (event, error) {
|
||||
if (event === 'error' && !this.listenerCount(event)) {
|
||||
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
|
||||
}
|
||||
|
||||
return emit.apply(this, arguments)
|
||||
}
|
||||
})(require('events').EventEmitter)
|
||||
|
||||
require('exec-promise')(require('../'))
|
||||
|
||||
@@ -7,4 +7,4 @@
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
require('exec-promise')(require('../dist/logs-cli'))
|
||||
require('exec-promise')(require('../dist/logs-cli').default)
|
||||
|
||||
15
gulpfile.js
15
gulpfile.js
@@ -7,13 +7,16 @@ var gulp = require('gulp')
|
||||
var babel = require('gulp-babel')
|
||||
var coffee = require('gulp-coffee')
|
||||
var plumber = require('gulp-plumber')
|
||||
var rimraf = require('rimraf')
|
||||
var sourceMaps = require('gulp-sourcemaps')
|
||||
var watch = require('gulp-watch')
|
||||
|
||||
var join = require('path').join
|
||||
|
||||
// ===================================================================
|
||||
|
||||
var SRC_DIR = __dirname + '/src'
|
||||
var DIST_DIR = __dirname + '/dist'
|
||||
var SRC_DIR = join(__dirname, 'src')
|
||||
var DIST_DIR = join(__dirname, 'dist')
|
||||
|
||||
var PRODUCTION = process.argv.indexOf('--production') !== -1
|
||||
|
||||
@@ -36,6 +39,10 @@ function src (patterns) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
gulp.task(function clean (cb) {
|
||||
rimraf(DIST_DIR, cb)
|
||||
})
|
||||
|
||||
gulp.task(function buildCoffee () {
|
||||
return src('**/*.coffee')
|
||||
.pipe(sourceMaps.init())
|
||||
@@ -51,7 +58,7 @@ gulp.task(function buildCoffee () {
|
||||
})
|
||||
|
||||
gulp.task(function buildEs6 () {
|
||||
return src('**/*.js')
|
||||
return src([ '**/*.js', '!*.spec.js' ])
|
||||
.pipe(sourceMaps.init())
|
||||
.pipe(babel())
|
||||
.pipe(sourceMaps.write('.'))
|
||||
@@ -60,4 +67,4 @@ gulp.task(function buildEs6 () {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
|
||||
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))
|
||||
|
||||
4
index.js
4
index.js
@@ -4,8 +4,8 @@
|
||||
|
||||
// Enable xo logs by default.
|
||||
if (process.env.DEBUG === undefined) {
|
||||
process.env.DEBUG = 'app-conf,xen-api,xo:*'
|
||||
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
|
||||
}
|
||||
|
||||
// Import the real main module.
|
||||
module.exports = require('./dist')
|
||||
module.exports = require('./dist').default
|
||||
|
||||
245
package.json
245
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "4.15.2",
|
||||
"version": "5.7.2",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -13,6 +13,10 @@
|
||||
"bugs": {
|
||||
"url": "https://github.com/vatesfr/xo-web/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
|
||||
"preferGlobal": true,
|
||||
"files": [
|
||||
@@ -21,153 +25,156 @@
|
||||
"dist/",
|
||||
"config.json",
|
||||
"index.js",
|
||||
"signin.jade"
|
||||
"signin.pug"
|
||||
],
|
||||
"directories": {
|
||||
"bin": "bin"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.12 <5"
|
||||
"node": ">=4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2-promise": "^0.2.0",
|
||||
"app-conf": "^0.4.0",
|
||||
"babel-runtime": "^5",
|
||||
"base64url": "^1.0.5",
|
||||
"blocked": "^1.1.0",
|
||||
"bluebird": "^3.1.1",
|
||||
"body-parser": "^1.13.3",
|
||||
"@marsaud/smb2-promise": "^0.2.1",
|
||||
"@nraynaud/struct-fu": "^1.0.1",
|
||||
"app-conf": "^0.4.1",
|
||||
"archiver": "^1.3.0",
|
||||
"arp-a": "^0.5.1",
|
||||
"babel-runtime": "^6.23.0",
|
||||
"base64url": "^2.0.0",
|
||||
"bind-property-descriptor": "^0.0.0",
|
||||
"blocked": "^1.2.1",
|
||||
"bluebird": "^3.5.0",
|
||||
"body-parser": "^1.17.1",
|
||||
"connect-flash": "^0.1.1",
|
||||
"cookie": "^0.2.3",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"cron": "^1.0.9",
|
||||
"d3-time-format": "^0.3.0",
|
||||
"debug": "^2.1.3",
|
||||
"escape-string-regexp": "^1.0.3",
|
||||
"event-to-promise": "^0.7.0",
|
||||
"cookie": "^0.3.1",
|
||||
"cookie-parser": "^1.4.3",
|
||||
"cron": "^1.2.1",
|
||||
"d3-time-format": "^2.0.5",
|
||||
"debug": "^2.6.3",
|
||||
"decorator-synchronized": "^0.2.3",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"exec-promise": "^0.6.1",
|
||||
"execa": "^0.2.2",
|
||||
"express": "^4.13.3",
|
||||
"express-session": "^1.11.3",
|
||||
"fatfs": "^0.10.3",
|
||||
"fs-extra": "^0.26.2",
|
||||
"fs-promise": "^0.4.1",
|
||||
"get-stream": "^1.1.0",
|
||||
"hashy": "~0.4.2",
|
||||
"helmet": "^1.1.0",
|
||||
"highland": "^2.5.1",
|
||||
"http-server-plus": "^0.6.4",
|
||||
"human-format": "^0.6.0",
|
||||
"is-my-json-valid": "^2.12.2",
|
||||
"jade": "^1.11.0",
|
||||
"js-yaml": "^3.2.7",
|
||||
"json-rpc-peer": "^0.11.0",
|
||||
"json5": "^0.4.0",
|
||||
"execa": "^0.6.3",
|
||||
"express": "^4.15.2",
|
||||
"express-session": "^1.15.1",
|
||||
"fatfs": "^0.10.4",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^2.1.2",
|
||||
"fs-promise": "^2.0.1",
|
||||
"golike-defer": "^0.0.0",
|
||||
"hashy": "~0.6.1",
|
||||
"helmet": "^3.5.0",
|
||||
"highland": "^2.10.5",
|
||||
"http-proxy": "^1.16.2",
|
||||
"http-server-plus": "^0.8.0",
|
||||
"human-format": "^0.8.0",
|
||||
"is-my-json-valid": "^2.16.0",
|
||||
"is-redirect": "^1.0.0",
|
||||
"js-yaml": "^3.8.2",
|
||||
"json-rpc-peer": "^0.13.1",
|
||||
"json5": "^0.5.1",
|
||||
"julien-f-source-map-support": "0.0.0",
|
||||
"julien-f-unzip": "^0.2.1",
|
||||
"kindof": "^2.0.0",
|
||||
"level": "^1.3.0",
|
||||
"level": "^1.6.0",
|
||||
"level-party": "^3.0.4",
|
||||
"level-sublevel": "^6.5.2",
|
||||
"leveldown": "^1.4.2",
|
||||
"lodash.assign": "^4.0.3",
|
||||
"lodash.bind": "^4.1.0",
|
||||
"lodash.difference": "^4.1.0",
|
||||
"lodash.endswith": "^4.0.0",
|
||||
"lodash.every": "^4.0.0",
|
||||
"lodash.filter": "^4.2.0",
|
||||
"lodash.find": "^4.2.0",
|
||||
"lodash.findindex": "^4.2.0",
|
||||
"lodash.foreach": "^4.1.0",
|
||||
"lodash.get": "^4.1.2",
|
||||
"lodash.has": "^4.2.0",
|
||||
"lodash.includes": "^4.1.0",
|
||||
"lodash.invert": "^4.0.1",
|
||||
"lodash.isarray": "^4.0.0",
|
||||
"lodash.isboolean": "^3.0.2",
|
||||
"lodash.isempty": "^4.1.2",
|
||||
"lodash.isfunction": "^3.0.1",
|
||||
"lodash.isinteger": "^4.0.0",
|
||||
"lodash.isobject": "^3.0.0",
|
||||
"lodash.isstring": "^4.0.1",
|
||||
"lodash.keys": "^4.0.3",
|
||||
"lodash.map": "^4.2.0",
|
||||
"lodash.pick": "^4.1.0",
|
||||
"lodash.pickby": "^4.2.0",
|
||||
"lodash.remove": "^4.0.1",
|
||||
"lodash.some": "^4.2.0",
|
||||
"lodash.sortby": "^4.2.0",
|
||||
"lodash.startswith": "^4.0.0",
|
||||
"lodash.trim": "^4.2.0",
|
||||
"level-sublevel": "^6.6.1",
|
||||
"leveldown": "^1.6.0",
|
||||
"lodash": "^4.17.4",
|
||||
"make-error": "^1",
|
||||
"micromatch": "^2.3.2",
|
||||
"micromatch": "^2.3.11",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^0.7.1",
|
||||
"multikey-hash": "^1.0.1",
|
||||
"ndjson": "^1.4.3",
|
||||
"moment-timezone": "^0.5.11",
|
||||
"ms": "^1.0.0",
|
||||
"multikey-hash": "^1.0.4",
|
||||
"ndjson": "^1.5.0",
|
||||
"parse-pairs": "^0.2.2",
|
||||
"partial-stream": "0.0.0",
|
||||
"passport": "^0.3.0",
|
||||
"passport": "^0.3.2",
|
||||
"passport-local": "^1.0.0",
|
||||
"promise-toolbox": "^0.2.0",
|
||||
"pretty-format": "^19.0.0",
|
||||
"promise-toolbox": "^0.8.2",
|
||||
"proxy-agent": "^2.0.0",
|
||||
"proxy-http-request": "0.1.0",
|
||||
"redis": "^2.0.1",
|
||||
"schema-inspector": "^1.5.1",
|
||||
"semver": "^5.1.0",
|
||||
"serve-static": "^1.9.2",
|
||||
"stack-chain": "^1.3.3",
|
||||
"through2": "^2.0.0",
|
||||
"struct-fu": "^1.0.0",
|
||||
"trace": "^2.0.1",
|
||||
"ws": "~1.0.1",
|
||||
"xen-api": "^0.7.4",
|
||||
"xml2js": "~0.4.6",
|
||||
"xo-acl-resolver": "0.0.0",
|
||||
"xo-collection": "^0.4.0",
|
||||
"xo-remote-parser": "^0.1.0"
|
||||
"pug": "^2.0.0-beta11",
|
||||
"redis": "^2.7.1",
|
||||
"schema-inspector": "^1.6.8",
|
||||
"semver": "^5.3.0",
|
||||
"serve-static": "^1.12.1",
|
||||
"split-lines": "^1.1.0",
|
||||
"stack-chain": "^1.3.7",
|
||||
"tar-stream": "^1.5.2",
|
||||
"through2": "^2.0.3",
|
||||
"tmp": "^0.0.31",
|
||||
"uuid": "^3.0.1",
|
||||
"ws": "^2.2.2",
|
||||
"xen-api": "^0.10.0-2",
|
||||
"xml2js": "~0.4.17",
|
||||
"xo-acl-resolver": "^0.2.3",
|
||||
"xo-collection": "^0.4.1",
|
||||
"xo-common": "^0.1.1",
|
||||
"xo-remote-parser": "^0.3",
|
||||
"xo-vmdk-to-vhd": "0.0.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-eslint": "^4.0.10",
|
||||
"chai": "^3.0.0",
|
||||
"dependency-check": "^2.4.0",
|
||||
"ghooks": "^1.0.3",
|
||||
"babel-eslint": "^7.2.1",
|
||||
"babel-plugin-lodash": "^3.2.11",
|
||||
"babel-plugin-transform-decorators-legacy": "^1.3.4",
|
||||
"babel-plugin-transform-runtime": "^6.23.0",
|
||||
"babel-preset-env": "^1.2.2",
|
||||
"babel-preset-stage-0": "^6.22.0",
|
||||
"dependency-check": "^2.8.0",
|
||||
"gulp": "git://github.com/gulpjs/gulp#4.0",
|
||||
"gulp-babel": "^5",
|
||||
"gulp-coffee": "^2.3.1",
|
||||
"gulp-plumber": "^1.0.0",
|
||||
"gulp-sourcemaps": "^1.5.1",
|
||||
"gulp-watch": "^4.2.2",
|
||||
"leche": "^2.1.1",
|
||||
"mocha": "^2.2.1",
|
||||
"must": "^0.13.1",
|
||||
"sinon": "^1.14.1",
|
||||
"standard": "^5.2.1"
|
||||
"gulp-babel": "^6",
|
||||
"gulp-coffee": "^2.3.4",
|
||||
"gulp-plumber": "^1.1.0",
|
||||
"gulp-sourcemaps": "^2.4.1",
|
||||
"gulp-watch": "^4.3.11",
|
||||
"husky": "^0.13.2",
|
||||
"index-modules": "^0.3.0",
|
||||
"jest": "^19.0.2",
|
||||
"rimraf": "^2.6.1",
|
||||
"standard": "^9.0.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "npm run build-indexes && gulp build --production",
|
||||
"build-indexes": "./tools/generate-index src/api src/xo-mixins",
|
||||
"dev": "npm run build-indexes && gulp build",
|
||||
"lint": "standard",
|
||||
"build": "gulp build --production",
|
||||
"commitmsg": "npm test",
|
||||
"dev": "gulp build",
|
||||
"dev-test": "jest --bail --watch",
|
||||
"posttest": "standard && dependency-check -i constant-stream ./package.json",
|
||||
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins",
|
||||
"predev": "npm run prebuild",
|
||||
"prepublish": "npm run build",
|
||||
"start": "node bin/xo-server",
|
||||
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
|
||||
"posttest": "npm run lint && dependency-check ./package.json",
|
||||
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
|
||||
"release": "npm version",
|
||||
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable"
|
||||
"test": "jest"
|
||||
},
|
||||
"config": {
|
||||
"ghooks": {
|
||||
"pre-commit": "npm it"
|
||||
}
|
||||
"babel": {
|
||||
"plugins": [
|
||||
"lodash",
|
||||
"transform-decorators-legacy",
|
||||
"transform-runtime"
|
||||
],
|
||||
"presets": [
|
||||
[
|
||||
"env",
|
||||
{
|
||||
"targets": {
|
||||
"node": 4
|
||||
}
|
||||
}
|
||||
],
|
||||
"stage-0"
|
||||
]
|
||||
},
|
||||
"jest": {
|
||||
"roots": [
|
||||
"<rootDir>/src"
|
||||
],
|
||||
"testRegex": "\\.spec\\.js$"
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"dist/**"
|
||||
"dist"
|
||||
],
|
||||
"parser": "babel-eslint"
|
||||
}
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
# Example XO-Server configuration.
|
||||
# BE *VERY* CAREFUL WHEN EDITING!
|
||||
# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT!
|
||||
# visit http://www.yamllint.com/ to validate this file as needed
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Example XO-Server configuration.
|
||||
#
|
||||
# This file is automatically looking for at the following places:
|
||||
# - `$HOME/.config/xo-server/config.yaml`
|
||||
# - `/etc/xo-server/config.yaml`
|
||||
#
|
||||
# The first entries have priority.
|
||||
|
||||
#
|
||||
# Note: paths are relative to the configuration file.
|
||||
|
||||
#=====================================================================
|
||||
@@ -43,16 +49,17 @@ http:
|
||||
|
||||
# Hosts & ports on which to listen.
|
||||
#
|
||||
# By default, the server listens on 0.0.0.0:80.
|
||||
# By default, the server listens on [::]:80.
|
||||
listen:
|
||||
# Basic HTTP.
|
||||
-
|
||||
# Address on which the server is listening on.
|
||||
#
|
||||
# Sets it to '127.0.0.1' to listen only on the local host.
|
||||
# Sets it to 'localhost' for IP to listen only on the local host.
|
||||
#
|
||||
# Default: '0.0.0.0' (all addresses)
|
||||
#hostname: '127.0.0.1'
|
||||
# Default: all IPv6 addresses if available, otherwise all IPv4
|
||||
# addresses.
|
||||
#hostname: 'localhost'
|
||||
|
||||
# Port on which the server is listening on.
|
||||
#
|
||||
@@ -117,10 +124,23 @@ http:
|
||||
|
||||
# Connection to the Redis server.
|
||||
redis:
|
||||
# Syntax: redis://[db[:password]@]hostname[:port]
|
||||
# Unix sockets can be used
|
||||
#
|
||||
# Default: redis://localhost:6379
|
||||
#uri: ''
|
||||
# Default: undefined
|
||||
#socket: /var/run/redis/redis.sock
|
||||
|
||||
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
|
||||
#
|
||||
# Default: redis://localhost:6379/0
|
||||
#uri: redis://redis.company.lan/42
|
||||
|
||||
# List of aliased commands.
|
||||
#
|
||||
# See http://redis.io/topics/security#disabling-of-specific-commands
|
||||
#renameCommands:
|
||||
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
|
||||
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
|
||||
|
||||
|
||||
# Directory containing the database of XO.
|
||||
# Currently used for logs.
|
||||
|
||||
59
signin.jade
59
signin.jade
@@ -1,59 +0,0 @@
|
||||
doctype html
|
||||
html
|
||||
head
|
||||
meta(charset = 'utf-8')
|
||||
meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1')
|
||||
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
|
||||
title Xen Orchestra
|
||||
meta(name = 'author' content = 'Vates SAS')
|
||||
link(rel = 'stylesheet' href = 'styles/main.css')
|
||||
body
|
||||
.container
|
||||
.row-login
|
||||
.page-header
|
||||
img(src = 'images/logo_small.png')
|
||||
h2 Xen Orchestra
|
||||
form.form-horizontal(action = 'signin/local' method = 'post')
|
||||
fieldset
|
||||
legend.login
|
||||
h3 Sign in
|
||||
if error
|
||||
p.text-danger #{error}
|
||||
.form-group
|
||||
.col-sm-12
|
||||
.input-group
|
||||
span.input-group-addon
|
||||
i.xo-icon-user.fa-fw
|
||||
input.form-control.input-sm(
|
||||
name = 'username'
|
||||
type = 'text'
|
||||
placeholder = 'Username'
|
||||
required
|
||||
)
|
||||
.form-group
|
||||
.col-sm-12
|
||||
.input-group
|
||||
span.input-group-addon
|
||||
i.fa.fa-key.fa-fw
|
||||
input.form-control.input-sm(
|
||||
name = 'password'
|
||||
type = 'password'
|
||||
placeholder = 'Password'
|
||||
required
|
||||
)
|
||||
.form-group
|
||||
.col-sm-5
|
||||
.checkbox
|
||||
label
|
||||
input(
|
||||
name = 'remember-me'
|
||||
type = 'checkbox'
|
||||
)
|
||||
| Remember me
|
||||
.form-group
|
||||
.col-sm-12
|
||||
button.btn.btn-login.btn-block.btn-success
|
||||
i.fa.fa-sign-in
|
||||
| Sign in
|
||||
each label, id in strategies
|
||||
div: a(href = 'signin/' + id) Sign in with #{label}
|
||||
50
signin.pug
Normal file
50
signin.pug
Normal file
@@ -0,0 +1,50 @@
|
||||
doctype html
|
||||
html
|
||||
head
|
||||
meta(charset = 'utf-8')
|
||||
meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1')
|
||||
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
|
||||
title Xen Orchestra
|
||||
meta(name = 'author' content = 'Vates SAS')
|
||||
link(rel = 'stylesheet' href = 'index.css')
|
||||
body(style = 'display: flex; height: 100vh;')
|
||||
div(style = 'margin: auto; width: 20em;')
|
||||
div.mb-2(style = 'display: flex;')
|
||||
img(src = 'assets/logo.png' style = 'margin: auto;')
|
||||
h2.text-xs-center.mb-2 Xen Orchestra
|
||||
form(action = 'signin/local' method = 'post')
|
||||
fieldset
|
||||
if error
|
||||
p.text-danger #{error}
|
||||
.input-group.mb-1
|
||||
span.input-group-addon
|
||||
i.xo-icon-user.fa-fw
|
||||
input.form-control(
|
||||
name = 'username'
|
||||
type = 'text'
|
||||
placeholder = 'Username'
|
||||
required
|
||||
)
|
||||
.input-group.mb-1
|
||||
span.input-group-addon
|
||||
i.fa.fa-key.fa-fw
|
||||
input.form-control(
|
||||
name = 'password'
|
||||
type = 'password'
|
||||
placeholder = 'Password'
|
||||
required
|
||||
)
|
||||
.checkbox
|
||||
label
|
||||
input(
|
||||
name = 'remember-me'
|
||||
type = 'checkbox'
|
||||
)
|
||||
|
|
||||
| Remember me
|
||||
div
|
||||
button.btn.btn-block.btn-info
|
||||
i.fa.fa-sign-in
|
||||
| Sign in
|
||||
each label, id in strategies
|
||||
div: a(href = 'signin/' + id) Sign in with #{label}
|
||||
@@ -1,70 +0,0 @@
|
||||
import {JsonRpcError} from 'json-rpc-peer'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Export standard JSON-RPC errors.
|
||||
export {
|
||||
InvalidJson,
|
||||
InvalidParameters,
|
||||
InvalidRequest,
|
||||
JsonRpcError,
|
||||
MethodNotFound
|
||||
} from 'json-rpc-peer'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class NotImplemented extends JsonRpcError {
|
||||
constructor () {
|
||||
super('not implemented', 0)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class NoSuchObject extends JsonRpcError {
|
||||
constructor (id, type) {
|
||||
super('no such object', 1, {id, type})
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class Unauthorized extends JsonRpcError {
|
||||
constructor () {
|
||||
super('not authenticated or not enough permissions', 2)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class InvalidCredential extends JsonRpcError {
|
||||
constructor () {
|
||||
super('invalid credential', 3)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class AlreadyAuthenticated extends JsonRpcError {
|
||||
constructor () {
|
||||
super('already authenticated', 4)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class ForbiddenOperation extends JsonRpcError {
|
||||
constructor (operation, reason) {
|
||||
super(`forbidden operation: ${operation}`, 5, reason)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// To be used with a user-readable message.
|
||||
// The message can be destined to be displayed to the front-end user.
|
||||
export class GenericError extends JsonRpcError {
|
||||
constructor (message) {
|
||||
super(message, 6)
|
||||
}
|
||||
}
|
||||
0
src/api/.index-modules
Normal file
0
src/api/.index-modules
Normal file
98
src/api/backup.js
Normal file
98
src/api/backup.js
Normal file
@@ -0,0 +1,98 @@
|
||||
import archiver from 'archiver'
|
||||
import { basename } from 'path'
|
||||
import { format } from 'json-rpc-peer'
|
||||
import { forEach } from 'lodash'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function list ({ remote }) {
|
||||
return this.listVmBackups(remote)
|
||||
}
|
||||
|
||||
list.permission = 'admin'
|
||||
list.params = {
|
||||
remote: { type: 'string' }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function scanDisk ({ remote, disk }) {
|
||||
return this.scanDiskBackup(remote, disk)
|
||||
}
|
||||
|
||||
scanDisk.permission = 'admin'
|
||||
scanDisk.params = {
|
||||
remote: { type: 'string' },
|
||||
disk: { type: 'string' }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function scanFiles ({ remote, disk, partition, path }) {
|
||||
return this.scanFilesInDiskBackup(remote, disk, partition, path)
|
||||
}
|
||||
|
||||
scanFiles.permission = 'admin'
|
||||
scanFiles.params = {
|
||||
remote: { type: 'string' },
|
||||
disk: { type: 'string' },
|
||||
partition: { type: 'string', optional: true },
|
||||
path: { type: 'string' }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
|
||||
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
|
||||
res.setHeader('content-disposition', 'attachment')
|
||||
res.setHeader('content-type', 'application/octet-stream')
|
||||
|
||||
const nFiles = paths.length
|
||||
|
||||
// Send lone file directly
|
||||
if (nFiles === 1) {
|
||||
files[0].pipe(res)
|
||||
return
|
||||
}
|
||||
|
||||
const archive = archiver(archiveFormat)
|
||||
archive.on('error', error => {
|
||||
console.error(error)
|
||||
res.end(format.error(0, error))
|
||||
})
|
||||
|
||||
forEach(files, file => {
|
||||
archive.append(file, { name: basename(file.path) })
|
||||
})
|
||||
archive.finalize()
|
||||
|
||||
archive.pipe(res)
|
||||
}).catch(error => {
|
||||
console.error(error)
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, error))
|
||||
})
|
||||
}
|
||||
|
||||
export async function fetchFiles ({ format = 'zip', ...params }) {
|
||||
const fileName = params.paths.length > 1
|
||||
? `restore_${new Date().toJSON()}.${format}`
|
||||
: basename(params.paths[0])
|
||||
|
||||
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
|
||||
suffix: encodeURI(`/${fileName}`)
|
||||
}).then(url => ({ $getFrom: url }))
|
||||
}
|
||||
|
||||
fetchFiles.permission = 'admin'
|
||||
fetchFiles.params = {
|
||||
remote: { type: 'string' },
|
||||
disk: { type: 'string' },
|
||||
format: { type: 'string', optional: true },
|
||||
partition: { type: 'string', optional: true },
|
||||
paths: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
minLength: 1
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,15 @@
|
||||
$debug = (require 'debug') 'xo:api:vm'
|
||||
$find = require 'lodash.find'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
$forEach = require 'lodash.foreach'
|
||||
endsWith = require 'lodash.endswith'
|
||||
startsWith = require 'lodash.startswith'
|
||||
$find = require 'lodash/find'
|
||||
$findIndex = require 'lodash/findIndex'
|
||||
$forEach = require 'lodash/forEach'
|
||||
endsWith = require 'lodash/endsWith'
|
||||
startsWith = require 'lodash/startsWith'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
{format} = require 'json-rpc-peer'
|
||||
{
|
||||
extractProperty,
|
||||
parseXml,
|
||||
promisify
|
||||
mapToArray,
|
||||
parseXml
|
||||
} = require '../utils'
|
||||
|
||||
#=====================================================================
|
||||
@@ -261,6 +262,42 @@ stats.resolve = {
|
||||
|
||||
exports.stats = stats;
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
handleInstallSupplementalPack = $coroutine (req, res, { hostId }) ->
|
||||
xapi = @getXapi(hostId)
|
||||
|
||||
# Timeout seems to be broken in Node 4.
|
||||
# See https://github.com/nodejs/node/issues/3319
|
||||
req.setTimeout(43200000) # 12 hours
|
||||
req.length = req.headers['content-length']
|
||||
|
||||
try
|
||||
yield xapi.installSupplementalPack(req, { hostId })
|
||||
res.end(format.response(0))
|
||||
catch e
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, new Error(e.message)))
|
||||
|
||||
return
|
||||
|
||||
installSupplementalPack = $coroutine ({host}) ->
|
||||
return {
|
||||
$sendTo: yield @registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })
|
||||
}
|
||||
|
||||
installSupplementalPack.description = 'installs supplemental pack from ISO file'
|
||||
|
||||
installSupplementalPack.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
installSupplementalPack.resolve = {
|
||||
host: ['host', 'host', 'admin']
|
||||
}
|
||||
|
||||
exports.installSupplementalPack = installSupplementalPack;
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
|
||||
44
src/api/ip-pool.js
Normal file
44
src/api/ip-pool.js
Normal file
@@ -0,0 +1,44 @@
|
||||
import { unauthorized } from 'xo-common/api-errors'
|
||||
|
||||
export function create (props) {
|
||||
return this.createIpPool(props)
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
create.description = 'Creates a new ipPool'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function delete_ ({ id }) {
|
||||
return this.deleteIpPool(id)
|
||||
}
|
||||
export { delete_ as delete }
|
||||
|
||||
delete_.permission = 'admin'
|
||||
delete_.description = 'Delete an ipPool'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function getAll (params) {
|
||||
const { user } = this
|
||||
|
||||
if (!user) {
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllIpPools(user.permission === 'admin'
|
||||
? params && params.userId
|
||||
: user.id
|
||||
)
|
||||
}
|
||||
|
||||
getAll.description = 'List all ipPools'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function set ({ id, ...props }) {
|
||||
return this.updateIpPool(id, props)
|
||||
}
|
||||
|
||||
set.permission = 'admin'
|
||||
set.description = 'Allow to modify an existing ipPool'
|
||||
@@ -18,7 +18,11 @@ get.params = {
|
||||
}
|
||||
|
||||
export async function create ({job}) {
|
||||
return (await this.createJob(this.session.get('user_id'), job)).id
|
||||
if (!job.userId) {
|
||||
job.userId = this.session.get('user_id')
|
||||
}
|
||||
|
||||
return (await this.createJob(job)).id
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
@@ -27,7 +31,9 @@ create.params = {
|
||||
job: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
userId: {type: 'string', optional: true},
|
||||
name: {type: 'string', optional: true},
|
||||
timeout: {type: 'number', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
@@ -38,14 +44,7 @@ create.params = {
|
||||
items: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {type: 'string'},
|
||||
values: {
|
||||
type: 'array',
|
||||
items: {type: 'object'}
|
||||
}
|
||||
}
|
||||
type: 'object'
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -67,9 +66,10 @@ set.params = {
|
||||
properties: {
|
||||
id: {type: 'string'},
|
||||
name: {type: 'string', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
timeout: {type: ['number', 'null'], optional: true},
|
||||
type: {type: 'string', optional: true},
|
||||
key: {type: 'string', optional: true},
|
||||
method: {type: 'string', optional: true},
|
||||
paramsVector: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -77,14 +77,7 @@ set.params = {
|
||||
items: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {type: 'string'},
|
||||
values: {
|
||||
type: 'array',
|
||||
items: {type: 'object'}
|
||||
}
|
||||
}
|
||||
type: 'object'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -16,13 +16,23 @@ export async function get ({namespace}) {
|
||||
}
|
||||
|
||||
get.description = 'returns logs list for one namespace'
|
||||
get.params = {
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
get.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function delete_ ({namespace, id}) {
|
||||
const logger = await this.getLogger(namespace)
|
||||
logger.del(id)
|
||||
}
|
||||
|
||||
delete_.description = 'deletes on or several logs from a namespace'
|
||||
delete_.description = 'deletes one or several logs from a namespace'
|
||||
delete_.params = {
|
||||
id: { type: [ 'array', 'string' ] },
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
delete_.permission = 'admin'
|
||||
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
import { mapToArray } from '../utils'
|
||||
|
||||
export function getBondModes () {
|
||||
return ['balance-slb', 'active-backup', 'lacp']
|
||||
}
|
||||
|
||||
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
|
||||
return this.getXapi(pool).createNetwork({
|
||||
name,
|
||||
@@ -24,6 +30,81 @@ create.permission = 'admin'
|
||||
|
||||
// =================================================================
|
||||
|
||||
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
|
||||
return this.getXapi(pool).createBondedNetwork({
|
||||
name,
|
||||
description,
|
||||
pifIds: mapToArray(pifs, pif =>
|
||||
this.getObject(pif, 'PIF')._xapiId
|
||||
),
|
||||
mtu: +mtu,
|
||||
mac,
|
||||
bondMode
|
||||
})
|
||||
}
|
||||
|
||||
createBonded.params = {
|
||||
pool: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string', optional: true },
|
||||
pifs: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
mtu: { type: ['integer', 'string'], optional: true },
|
||||
// RegExp since schema-inspector does not provide a param check based on an enumeration
|
||||
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }
|
||||
}
|
||||
|
||||
createBonded.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
createBonded.permission = 'admin'
|
||||
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function set ({
|
||||
network,
|
||||
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel,
|
||||
defaultIsLocked,
|
||||
id
|
||||
}) {
|
||||
await this.getXapi(network).setNetworkProperties(network._xapiId, {
|
||||
nameDescription,
|
||||
nameLabel,
|
||||
defaultIsLocked
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
name_label: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
name_description: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
defaultIsLocked: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
network: ['id', 'network', 'administrate']
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
export async function delete_ ({ network }) {
|
||||
return this.getXapi(network).deleteNetwork(network._xapiId)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import {
|
||||
GenericError
|
||||
} from '../api-errors'
|
||||
|
||||
// FIXME: too low level, should be removed.
|
||||
|
||||
// ===================================================================
|
||||
@@ -24,17 +20,8 @@ delete_.resolve = {
|
||||
// ===================================================================
|
||||
// Disconnect
|
||||
|
||||
export async function disconnect ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
try {
|
||||
await this.getXapi(PBD).call('PBD.unplug', PBD._xapiRef)
|
||||
} catch (error) {
|
||||
if (error.code === 'VDI_IN_USE') {
|
||||
throw new GenericError('VDI in use')
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
export async function disconnect ({ pbd }) {
|
||||
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -42,7 +29,7 @@ disconnect.params = {
|
||||
}
|
||||
|
||||
disconnect.resolve = {
|
||||
PBD: ['id', 'PBD', 'administrate']
|
||||
pbd: ['id', 'PBD', 'administrate']
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
// TODO: too low level, move into host.
|
||||
|
||||
import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi'
|
||||
|
||||
export function getIpv4ConfigurationModes () {
|
||||
return IPV4_CONFIG_MODES
|
||||
}
|
||||
|
||||
export function getIpv6ConfigurationModes () {
|
||||
return IPV6_CONFIG_MODES
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
// Delete
|
||||
|
||||
@@ -66,3 +76,18 @@ reconfigureIp.params = {
|
||||
reconfigureIp.resolve = {
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function editPif ({ pif, vlan }) {
|
||||
await this.getXapi(pif).editPif(pif._xapiId, { vlan })
|
||||
}
|
||||
|
||||
editPif.params = {
|
||||
id: { type: 'string' },
|
||||
vlan: { type: ['integer', 'string'] }
|
||||
}
|
||||
|
||||
editPif.resolve = {
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
@@ -102,3 +102,24 @@ purgeConfiguration.params = {
|
||||
}
|
||||
|
||||
purgeConfiguration.permission = 'admin'
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
export async function test ({ id, data }) {
|
||||
await this.testPlugin(id, data)
|
||||
}
|
||||
|
||||
test.description = 'Test a plugin with its current configuration'
|
||||
|
||||
test.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
data: {
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
test.permission = 'admin'
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import {GenericError} from '../api-errors'
|
||||
import { format } from 'json-rpc-peer'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -35,21 +35,21 @@ set.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function setDefaultSr ({pool, sr}) {
|
||||
await this.getXapi(pool).setDefaultSr(sr._xapiId)
|
||||
export async function setDefaultSr ({ sr }) {
|
||||
await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ])
|
||||
|
||||
await this.getXapi(sr).setDefaultSr(sr._xapiId)
|
||||
}
|
||||
|
||||
setDefaultSr.permission = '' // signed in
|
||||
|
||||
setDefaultSr.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
},
|
||||
sr: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
setDefaultSr.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate'],
|
||||
sr: ['sr', 'SR']
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
@@ -70,11 +70,28 @@ installPatch.params = {
|
||||
installPatch.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function installAllPatches ({ pool }) {
|
||||
await this.getXapi(pool).installAllPoolPatchesOnAllHosts()
|
||||
}
|
||||
|
||||
installAllPatches.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
installAllPatches.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handlePatchUpload (req, res, {pool}) {
|
||||
const {headers: {['content-length']: contentLength}} = req
|
||||
const contentLength = req.headers['content-length']
|
||||
if (!contentLength) {
|
||||
res.writeHead(411)
|
||||
res.end('Content length is mandatory')
|
||||
@@ -106,12 +123,7 @@ export {uploadPatch as patch}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function mergeInto ({ source, target, force }) {
|
||||
try {
|
||||
await this.mergeXenPools(source._xapiId, target._xapiId, force)
|
||||
} catch (e) {
|
||||
// FIXME: should we expose plain XAPI error messages?
|
||||
throw new GenericError(e.message)
|
||||
}
|
||||
await this.mergeXenPools(source._xapiId, target._xapiId, force)
|
||||
}
|
||||
|
||||
mergeInto.params = {
|
||||
@@ -130,7 +142,7 @@ mergeInto.resolve = {
|
||||
export async function getLicenseState ({pool}) {
|
||||
return this.getXapi(pool).call(
|
||||
'pool.get_license_state',
|
||||
pool._xapiId.$ref,
|
||||
pool._xapiId.$ref
|
||||
)
|
||||
}
|
||||
|
||||
@@ -143,3 +155,38 @@ getLicenseState.params = {
|
||||
getLicenseState.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handleInstallSupplementalPack (req, res, { poolId }) {
|
||||
const xapi = this.getXapi(poolId)
|
||||
|
||||
// Timeout seems to be broken in Node 4.
|
||||
// See https://github.com/nodejs/node/issues/3319
|
||||
req.setTimeout(43200000) // 12 hours
|
||||
req.length = req.headers['content-length']
|
||||
|
||||
try {
|
||||
await xapi.installSupplementalPackOnAllHosts(req)
|
||||
res.end(format.response(0))
|
||||
} catch (e) {
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, new Error(e.message)))
|
||||
}
|
||||
}
|
||||
|
||||
export async function installSupplementalPack ({ pool }) {
|
||||
return {
|
||||
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id })
|
||||
}
|
||||
}
|
||||
|
||||
installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts'
|
||||
|
||||
installSupplementalPack.params = {
|
||||
pool: { type: 'string' }
|
||||
}
|
||||
|
||||
installSupplementalPack.resolve = {
|
||||
pool: ['pool', 'pool', 'admin']
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
export async function getAll () {
|
||||
return /* await */ this.getAllRemotes()
|
||||
return this.getAllRemotes()
|
||||
}
|
||||
|
||||
getAll.permission = 'admin'
|
||||
getAll.description = 'Gets all existing fs remote points'
|
||||
|
||||
export async function get ({id}) {
|
||||
return /* await */ this.getRemote(id)
|
||||
return this.getRemote(id)
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
@@ -15,8 +15,18 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function test ({id}) {
|
||||
return this.testRemote(id)
|
||||
}
|
||||
|
||||
test.permission = 'admin'
|
||||
test.description = 'Performs a read/write matching test on a remote point'
|
||||
test.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function list ({id}) {
|
||||
return /* await */ this.listRemoteBackups(id)
|
||||
return this.listRemoteBackups(id)
|
||||
}
|
||||
|
||||
list.permission = 'admin'
|
||||
@@ -26,7 +36,7 @@ list.params = {
|
||||
}
|
||||
|
||||
export async function create ({name, url}) {
|
||||
return /* await */ this.createRemote({name, url})
|
||||
return this.createRemote({name, url})
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
Unauthorized
|
||||
} from '../api-errors'
|
||||
unauthorized
|
||||
} from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -51,11 +51,12 @@ delete_.params = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function set ({ id, name, subjects, objects, limits }) {
|
||||
export function set ({ id, name, subjects, objects, ipPools, limits }) {
|
||||
return this.updateResourceSet(id, {
|
||||
limits,
|
||||
name,
|
||||
objects,
|
||||
ipPools,
|
||||
subjects
|
||||
})
|
||||
}
|
||||
@@ -84,6 +85,13 @@ set.params = {
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
ipPools: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
limits: {
|
||||
type: 'object',
|
||||
optional: true
|
||||
@@ -109,12 +117,14 @@ get.params = {
|
||||
export async function getAll () {
|
||||
const { user } = this
|
||||
if (!user) {
|
||||
throw new Unauthorized()
|
||||
throw unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllResourceSets(user.id)
|
||||
}
|
||||
|
||||
getAll.description = 'Get the list of all existing resource set'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function addObject ({ id, object }) {
|
||||
@@ -227,3 +237,4 @@ export function recomputeAllLimits () {
|
||||
}
|
||||
|
||||
recomputeAllLimits.permission = 'admin'
|
||||
recomputeAllLimits.description = 'Recompute manually the current resource set usage'
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
export async function getAll () {
|
||||
return /* await */ this.getRoles()
|
||||
}
|
||||
|
||||
getAll.description = 'Returns the list of all existing roles'
|
||||
|
||||
@@ -17,8 +17,8 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function create ({jobId, cron, enabled, name}) {
|
||||
return /* await */ this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled, name})
|
||||
export async function create ({ jobId, cron, enabled, name, timezone }) {
|
||||
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
@@ -30,8 +30,8 @@ create.params = {
|
||||
name: {type: 'string', optional: true}
|
||||
}
|
||||
|
||||
export async function set ({id, jobId, cron, enabled, name}) {
|
||||
await this.updateSchedule(id, {job: jobId, cron, enabled, name})
|
||||
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
|
||||
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
|
||||
}
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
@@ -4,13 +4,14 @@ import {
|
||||
} from '../utils'
|
||||
|
||||
export async function add ({
|
||||
label,
|
||||
host,
|
||||
username,
|
||||
password,
|
||||
readOnly,
|
||||
autoConnect = true
|
||||
}) {
|
||||
const server = await this.registerXenServer({host, username, password, readOnly})
|
||||
const server = await this.registerXenServer({label, host, username, password, readOnly})
|
||||
|
||||
if (autoConnect) {
|
||||
// Connect asynchronously, ignore any errors.
|
||||
@@ -25,6 +26,10 @@ add.description = 'register a new Xen server'
|
||||
add.permission = 'admin'
|
||||
|
||||
add.params = {
|
||||
label: {
|
||||
optional: true,
|
||||
type: 'string'
|
||||
},
|
||||
host: {
|
||||
type: 'string'
|
||||
},
|
||||
@@ -70,8 +75,8 @@ getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, host, username, password, readOnly}) {
|
||||
await this.updateXenServer(id, {host, username, password, readOnly})
|
||||
export async function set ({id, label, host, username, password, readOnly}) {
|
||||
await this.updateXenServer(id, {label, host, username, password, readOnly})
|
||||
}
|
||||
|
||||
set.description = 'changes the properties of a Xen server'
|
||||
@@ -82,6 +87,10 @@ set.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
label: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
host: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
import {deprecate} from 'util'
|
||||
|
||||
import {InvalidCredential, AlreadyAuthenticated} from '../api-errors'
|
||||
import { getUserPublicProperties } from '../utils'
|
||||
import {invalidCredentials} from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function signIn (credentials) {
|
||||
if (this.session.has('user_id')) {
|
||||
throw new AlreadyAuthenticated()
|
||||
}
|
||||
|
||||
const user = await this.authenticateUser(credentials)
|
||||
if (!user) {
|
||||
throw new InvalidCredential()
|
||||
throw invalidCredentials()
|
||||
}
|
||||
this.session.set('user_id', user.id)
|
||||
|
||||
return this.getUserPublicProperties(user)
|
||||
return getUserPublicProperties(user)
|
||||
}
|
||||
|
||||
signIn.description = 'sign in'
|
||||
@@ -55,7 +52,7 @@ export async function getUser () {
|
||||
|
||||
return userId === undefined
|
||||
? null
|
||||
: this.getUserPublicProperties(await this.getUser(userId))
|
||||
: getUserPublicProperties(await this.getUser(userId))
|
||||
}
|
||||
|
||||
getUser.description = 'return the currently connected user'
|
||||
|
||||
158
src/api/sr.js
158
src/api/sr.js
@@ -1,3 +1,4 @@
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import {
|
||||
ensureArray,
|
||||
forEach,
|
||||
@@ -33,7 +34,7 @@ set.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function scan ({SR}) {
|
||||
export async function scan ({ SR }) {
|
||||
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
|
||||
}
|
||||
|
||||
@@ -48,8 +49,16 @@ scan.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: find a way to call this "delete" and not destroy
|
||||
export async function destroy ({SR}) {
|
||||
await this.getXapi(SR).call('SR.destroy', SR._xapiRef)
|
||||
export async function destroy ({ sr }) {
|
||||
const xapi = this.getXapi(sr)
|
||||
if (sr.SR_type === 'xosan') {
|
||||
const config = xapi.xo.getData(sr, 'xosan_config')
|
||||
// we simply forget because the hosted disks are been destroyed with the VMs
|
||||
await xapi.forgetSr(sr._xapiId)
|
||||
await Promise.all(config.nodes.map(node => xapi.deleteVm(node.vm.id, true)))
|
||||
return xapi.deleteNetwork(config.network)
|
||||
}
|
||||
await xapi.destroySr(sr._xapiId)
|
||||
}
|
||||
|
||||
destroy.params = {
|
||||
@@ -57,13 +66,13 @@ destroy.params = {
|
||||
}
|
||||
|
||||
destroy.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
sr: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function forget ({SR}) {
|
||||
await this.getXapi(SR).call('SR.forget', SR._xapiRef)
|
||||
export async function forget ({ SR }) {
|
||||
await this.getXapi(SR).forgetSr(SR._xapiId)
|
||||
}
|
||||
|
||||
forget.params = {
|
||||
@@ -76,6 +85,34 @@ forget.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connectAllPbds ({ SR }) {
|
||||
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
|
||||
}
|
||||
|
||||
connectAllPbds.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
connectAllPbds.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnectAllPbds ({ SR }) {
|
||||
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
|
||||
}
|
||||
|
||||
disconnectAllPbds.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
disconnectAllPbds.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function createIso ({
|
||||
host,
|
||||
nameLabel,
|
||||
@@ -92,6 +129,7 @@ export async function createIso ({
|
||||
deviceConfig.legacy_mode = 'true'
|
||||
} else if (type === 'smb') {
|
||||
path = path.replace(/\\/g, '/')
|
||||
deviceConfig.type = 'cifs'
|
||||
deviceConfig.username = user
|
||||
deviceConfig.cifspassword = password
|
||||
}
|
||||
@@ -107,7 +145,7 @@ export async function createIso ({
|
||||
nameDescription,
|
||||
'iso', // SR type ISO
|
||||
'iso', // SR content type ISO
|
||||
true,
|
||||
type !== 'local',
|
||||
{}
|
||||
)
|
||||
|
||||
@@ -184,6 +222,51 @@ createNfs.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// HBA SR
|
||||
|
||||
// This functions creates an HBA SR
|
||||
|
||||
export async function createHba ({
|
||||
host,
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
scsiId
|
||||
}) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
scsiId
|
||||
}
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
'lvmoohba', // SR LVM over HBA
|
||||
'user', // recommended by Citrix
|
||||
true,
|
||||
{}
|
||||
)
|
||||
|
||||
const sr = await xapi.call('SR.get_record', srRef)
|
||||
return sr.uuid
|
||||
}
|
||||
|
||||
createHba.params = {
|
||||
host: { type: 'string' },
|
||||
nameLabel: { type: 'string' },
|
||||
nameDescription: { type: 'string' },
|
||||
scsiId: { type: 'string' }
|
||||
}
|
||||
|
||||
createHba.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Local LVM SR
|
||||
|
||||
@@ -283,6 +366,55 @@ probeNfs.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// This function helps to detect all HBA devices on the host
|
||||
|
||||
export async function probeHba ({
|
||||
host
|
||||
}) {
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
let xml
|
||||
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host._xapiRef,
|
||||
'type',
|
||||
{}
|
||||
)
|
||||
|
||||
throw new Error('the call above should have thrown an error')
|
||||
} catch (error) {
|
||||
if (error.code !== 'SR_BACKEND_FAILURE_107') {
|
||||
throw error
|
||||
}
|
||||
|
||||
xml = parseXml(error.params[2])
|
||||
}
|
||||
|
||||
const hbaDevices = []
|
||||
forEach(ensureArray(xml.Devlist.BlockDevice), hbaDevice => {
|
||||
hbaDevices.push({
|
||||
hba: hbaDevice.hba.trim(),
|
||||
path: hbaDevice.path.trim(),
|
||||
scsciId: hbaDevice.SCSIid.trim(),
|
||||
size: hbaDevice.size.trim(),
|
||||
vendor: hbaDevice.vendor.trim()
|
||||
})
|
||||
})
|
||||
|
||||
return hbaDevices
|
||||
}
|
||||
|
||||
probeHba.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
probeHba.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// ISCSI SR
|
||||
|
||||
@@ -316,7 +448,7 @@ export async function createIscsi ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
const srRef = await xapi.call(
|
||||
@@ -377,7 +509,7 @@ export async function probeIscsiIqns ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
let xml
|
||||
@@ -455,7 +587,7 @@ export async function probeIscsiLuns ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
let xml
|
||||
@@ -534,7 +666,7 @@ export async function probeIscsiExists ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
|
||||
@@ -542,7 +674,7 @@ export async function probeIscsiExists ({
|
||||
const srs = []
|
||||
forEach(ensureArray(xml['SRlist'].SR), sr => {
|
||||
// get the UUID of SR connected to this LUN
|
||||
srs.push({uuid: sr.UUID.trim()})
|
||||
srs.push({ uuid: sr.UUID.trim() })
|
||||
})
|
||||
|
||||
return srs
|
||||
@@ -584,7 +716,7 @@ export async function probeNfsExists ({
|
||||
|
||||
forEach(ensureArray(xml['SRlist'].SR), sr => {
|
||||
// get the UUID of SR connected to this LUN
|
||||
srs.push({uuid: sr.UUID.trim()})
|
||||
srs.push({ uuid: sr.UUID.trim() })
|
||||
})
|
||||
|
||||
return srs
|
||||
|
||||
67
src/api/system.js
Normal file
67
src/api/system.js
Normal file
@@ -0,0 +1,67 @@
|
||||
import forEach from 'lodash/forEach'
|
||||
import getKeys from 'lodash/keys'
|
||||
import moment from 'moment-timezone'
|
||||
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
import { version as xoServerVersion } from '../../package.json'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function getMethodsInfo () {
|
||||
const methods = {}
|
||||
|
||||
forEach(this.apiMethods, (method, name) => {
|
||||
methods[name] = {
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
}
|
||||
})
|
||||
|
||||
return methods
|
||||
}
|
||||
getMethodsInfo.description = 'returns the signatures of all available API methods'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getServerTimezone = (tz => () => tz)(moment.tz.guess())
|
||||
getServerTimezone.description = 'return the timezone server'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getServerVersion = () => xoServerVersion
|
||||
getServerVersion.description = 'return the version of xo-server'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getVersion = () => '0.1'
|
||||
getVersion.description = 'API version (unstable)'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function listMethods () {
|
||||
return getKeys(this.apiMethods)
|
||||
}
|
||||
listMethods.description = 'returns the name of all available API methods'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function methodSignature ({method: name}) {
|
||||
const method = this.apiMethods[name]
|
||||
|
||||
if (!method) {
|
||||
throw noSuchObject()
|
||||
}
|
||||
|
||||
// Return an array for compatibility with XML-RPC.
|
||||
return [
|
||||
// XML-RPC require the name of the method.
|
||||
{
|
||||
name,
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
}
|
||||
]
|
||||
}
|
||||
methodSignature.description = 'returns the signature of an API method'
|
||||
@@ -36,9 +36,9 @@ hasPermission.params = {
|
||||
|
||||
export function wait ({duration, returnValue}) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(+duration, () => {
|
||||
setTimeout(() => {
|
||||
resolve(returnValue)
|
||||
})
|
||||
}, +duration)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import {InvalidParameters} from '../api-errors'
|
||||
import { mapToArray } from '../utils'
|
||||
import {invalidParameters} from 'xo-common/api-errors'
|
||||
import { getUserPublicProperties, mapToArray } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function create ({email, password, permission}) {
|
||||
return (await this.createUser(email, {password, permission})).id
|
||||
return (await this.createUser({email, password, permission})).id
|
||||
}
|
||||
|
||||
create.description = 'creates a new user'
|
||||
@@ -22,7 +22,7 @@ create.params = {
|
||||
// Deletes an existing user.
|
||||
async function delete_ ({id}) {
|
||||
if (id === this.session.get('user_id')) {
|
||||
throw new InvalidParameters('an user cannot delete itself')
|
||||
throw invalidParameters('a user cannot delete itself')
|
||||
}
|
||||
|
||||
await this.deleteUser(id)
|
||||
@@ -48,7 +48,7 @@ export async function getAll () {
|
||||
const users = await this.getAllUsers()
|
||||
|
||||
// Filters out private properties.
|
||||
return mapToArray(users, this.getUserPublicProperties)
|
||||
return mapToArray(users, getUserPublicProperties)
|
||||
}
|
||||
|
||||
getAll.description = 'returns all the existing users'
|
||||
@@ -57,19 +57,29 @@ getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, email, password, permission}) {
|
||||
await this.updateUser(id, {email, password, permission})
|
||||
export async function set ({id, email, password, permission, preferences}) {
|
||||
const isAdmin = this.user && this.user.permission === 'admin'
|
||||
if (isAdmin) {
|
||||
if (permission && id === this.session.get('user_id')) {
|
||||
throw invalidParameters('a user cannot change its own permission')
|
||||
}
|
||||
} else if (email || password || permission) {
|
||||
throw invalidParameters('this properties can only changed by an administrator')
|
||||
}
|
||||
|
||||
await this.updateUser(id, {email, password, permission, preferences})
|
||||
}
|
||||
|
||||
set.description = 'changes the properties of an existing user'
|
||||
|
||||
set.permission = 'admin'
|
||||
set.permission = ''
|
||||
|
||||
set.params = {
|
||||
id: { type: 'string' },
|
||||
email: { type: 'string', optional: true },
|
||||
password: { type: 'string', optional: true },
|
||||
permission: { type: 'string', optional: true }
|
||||
permission: { type: 'string', optional: true },
|
||||
preferences: { type: 'object', optional: true }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# FIXME: rename to disk.*
|
||||
|
||||
$isArray = require 'lodash.isarray'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
|
||||
{format} = require 'json-rpc-peer'
|
||||
{InvalidParameters} = require '../api-errors'
|
||||
{parseSize} = require '../utils'
|
||||
{JsonRpcError} = require '../api-errors'
|
||||
{invalidParameters} = require 'xo-common/api-errors'
|
||||
{isArray: $isArray, parseSize} = require '../utils'
|
||||
{JsonRpcError} = require 'json-rpc-peer'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
@@ -39,7 +38,7 @@ set = $coroutine (params) ->
|
||||
size = parseSize(params.size)
|
||||
|
||||
if size < vdi.size
|
||||
throw new InvalidParameters(
|
||||
throw invalidParameters(
|
||||
"cannot set new size (#{size}) below the current size (#{vdi.size})"
|
||||
)
|
||||
yield xapi.resizeVdi(ref, size)
|
||||
|
||||
102
src/api/vif.js
102
src/api/vif.js
@@ -1,5 +1,19 @@
|
||||
import {
|
||||
diffItems,
|
||||
noop,
|
||||
pCatch
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// TODO: move into vm and rename to removeInterface
|
||||
async function delete_ ({vif}) {
|
||||
this.allocIpAddresses(
|
||||
vif.id,
|
||||
null,
|
||||
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
|
||||
)::pCatch(noop)
|
||||
|
||||
await this.getXapi(vif).deleteVif(vif._xapiId)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
@@ -13,10 +27,11 @@ delete_.resolve = {
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: move into vm and rename to disconnectInterface
|
||||
export async function disconnect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
|
||||
await this.getXapi(vif).disconnectVif(vif._xapiId)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -31,7 +46,7 @@ disconnect.resolve = {
|
||||
// TODO: move into vm and rename to connectInterface
|
||||
export async function connect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
|
||||
await this.getXapi(vif).connectVif(vif._xapiId)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
@@ -41,3 +56,86 @@ connect.params = {
|
||||
connect.resolve = {
|
||||
vif: ['id', 'VIF', 'operate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({
|
||||
vif,
|
||||
network,
|
||||
mac,
|
||||
allowedIpv4Addresses,
|
||||
allowedIpv6Addresses,
|
||||
attached
|
||||
}) {
|
||||
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
|
||||
const newIpAddresses = []
|
||||
{
|
||||
const { push } = newIpAddresses
|
||||
push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses)
|
||||
push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses)
|
||||
}
|
||||
|
||||
if (network || mac) {
|
||||
const xapi = this.getXapi(vif)
|
||||
|
||||
const vm = xapi.getObject(vif.$VM)
|
||||
mac == null && (mac = vif.MAC)
|
||||
network = xapi.getObject((network && network.id) || vif.$network)
|
||||
attached == null && (attached = vif.attached)
|
||||
|
||||
await this.allocIpAddresses(vif.id, null, oldIpAddresses)
|
||||
await xapi.deleteVif(vif._xapiId)
|
||||
|
||||
// create new VIF with new parameters
|
||||
const newVif = await xapi.createVif(vm.$id, network.$id, {
|
||||
mac,
|
||||
currently_attached: attached,
|
||||
ipv4_allowed: newIpAddresses
|
||||
})
|
||||
|
||||
await this.allocIpAddresses(newVif.$id, newIpAddresses)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const [ addAddresses, removeAddresses ] = diffItems(
|
||||
newIpAddresses,
|
||||
oldIpAddresses
|
||||
)
|
||||
await this.allocIpAddresses(
|
||||
vif.id,
|
||||
addAddresses,
|
||||
removeAddresses
|
||||
)
|
||||
|
||||
return this.getXapi(vif).editVif(vif._xapiId, {
|
||||
ipv4Allowed: allowedIpv4Addresses,
|
||||
ipv6Allowed: allowedIpv6Addresses
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
id: { type: 'string' },
|
||||
network: { type: 'string', optional: true },
|
||||
mac: { type: 'string', optional: true },
|
||||
allowedIpv4Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
allowedIpv6Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
attached: { type: 'boolean', optional: true }
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vif: ['id', 'VIF', 'operate'],
|
||||
network: ['network', 'network', 'operate']
|
||||
}
|
||||
|
||||
@@ -1,25 +1,29 @@
|
||||
$assign = require 'lodash.assign'
|
||||
$assign = require 'lodash/assign'
|
||||
$debug = (require 'debug') 'xo:api:vm'
|
||||
$filter = require 'lodash.filter'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
$findWhere = require 'lodash.find'
|
||||
$isArray = require 'lodash.isarray'
|
||||
endsWith = require 'lodash.endswith'
|
||||
$filter = require 'lodash/filter'
|
||||
$findIndex = require 'lodash/findIndex'
|
||||
$findWhere = require 'lodash/find'
|
||||
concat = require 'lodash/concat'
|
||||
endsWith = require 'lodash/endsWith'
|
||||
escapeStringRegexp = require 'escape-string-regexp'
|
||||
eventToPromise = require 'event-to-promise'
|
||||
sortBy = require 'lodash.sortby'
|
||||
startsWith = require 'lodash.startswith'
|
||||
merge = require 'lodash/merge'
|
||||
sortBy = require 'lodash/sortBy'
|
||||
startsWith = require 'lodash/startsWith'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
{format} = require 'json-rpc-peer'
|
||||
|
||||
{
|
||||
GenericError,
|
||||
Unauthorized
|
||||
} = require('../api-errors')
|
||||
forbiddenOperation,
|
||||
invalidParameters,
|
||||
unauthorized
|
||||
} = require('xo-common/api-errors')
|
||||
{
|
||||
forEach,
|
||||
formatXml: $js2xml,
|
||||
isArray: $isArray,
|
||||
map,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
noop,
|
||||
parseSize,
|
||||
@@ -27,7 +31,7 @@ startsWith = require 'lodash.startswith'
|
||||
pCatch,
|
||||
pFinally
|
||||
} = require '../utils'
|
||||
{isVmRunning: $isVMRunning} = require('../xapi')
|
||||
{isVmRunning: $isVmRunning} = require('../xapi')
|
||||
|
||||
#=====================================================================
|
||||
|
||||
@@ -47,39 +51,51 @@ checkPermissionOnSrs = (vm, permission = 'operate') -> (
|
||||
)
|
||||
|
||||
return @hasPermissions(@session.get('user_id'), permissions).then((success) => (
|
||||
throw new Unauthorized() unless success
|
||||
throw unauthorized() unless success
|
||||
))
|
||||
)
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# TODO: Implement ACLs
|
||||
create = $coroutine ({
|
||||
resourceSet
|
||||
installation
|
||||
name_description
|
||||
name_label
|
||||
template
|
||||
pv_args
|
||||
VDIs
|
||||
VIFs
|
||||
existingDisks
|
||||
}) ->
|
||||
{ user } = this
|
||||
unless user
|
||||
throw new Unauthorized()
|
||||
extract = (obj, prop) ->
|
||||
value = obj[prop]
|
||||
delete obj[prop]
|
||||
return value
|
||||
|
||||
# TODO: Implement ACLs
|
||||
create = $coroutine (params) ->
|
||||
{ user } = this
|
||||
resourceSet = extract(params, 'resourceSet')
|
||||
if not resourceSet and user.permission isnt 'admin'
|
||||
throw unauthorized()
|
||||
|
||||
template = extract(params, 'template')
|
||||
params.template = template._xapiId
|
||||
|
||||
xapi = this.getXapi(template)
|
||||
|
||||
limits = {
|
||||
cpus: template.CPUs.number,
|
||||
disk: 0,
|
||||
memory: template.memory.size,
|
||||
vms: 1
|
||||
}
|
||||
objectIds = [
|
||||
template.id
|
||||
]
|
||||
limits = {
|
||||
cpus: template.CPUs.number,
|
||||
disk: 0,
|
||||
memory: template.memory.dynamic[1],
|
||||
vms: 1
|
||||
}
|
||||
vdiSizesByDevice = {}
|
||||
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
|
||||
if (
|
||||
vbd.type is 'Disk' and
|
||||
(vdi = vbd.$VDI)
|
||||
)
|
||||
vdiSizesByDevice[vbd.userdevice] = +vdi.virtual_size
|
||||
|
||||
xapiVdis = VDIs and map(VDIs, (vdi) =>
|
||||
return
|
||||
)
|
||||
|
||||
vdis = extract(params, 'VDIs')
|
||||
params.vdis = vdis and map(vdis, (vdi) =>
|
||||
sr = @getObject(vdi.SR)
|
||||
size = parseSize(vdi.size)
|
||||
|
||||
@@ -87,31 +103,18 @@ create = $coroutine ({
|
||||
limits.disk += size
|
||||
|
||||
return $assign({}, vdi, {
|
||||
device: vdi.device ? vdi.position,
|
||||
device: vdi.userdevice ? vdi.device ? vdi.position,
|
||||
size,
|
||||
SR: sr._xapiId,
|
||||
type: vdi.type
|
||||
})
|
||||
)
|
||||
|
||||
xapi = @getXapi(template)
|
||||
|
||||
diskSizesByDevice = {}
|
||||
|
||||
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
|
||||
if (
|
||||
vbd.type is 'Disk' and
|
||||
(vdi = vbd.$VDI)
|
||||
)
|
||||
diskSizesByDevice[vbd.device] = +vdi.virtual_size
|
||||
|
||||
return
|
||||
)
|
||||
|
||||
xapiExistingVdis = existingDisks and map(existingDisks, (vdi, device) =>
|
||||
existingVdis = extract(params, 'existingDisks')
|
||||
params.existingVdis = existingVdis and map(existingVdis, (vdi, userdevice) =>
|
||||
if vdi.size?
|
||||
size = parseSize(vdi.size)
|
||||
diskSizesByDevice[device] = size
|
||||
vdiSizesByDevice[userdevice] = size
|
||||
|
||||
if vdi.$SR
|
||||
sr = @getObject(vdi.$SR)
|
||||
@@ -123,9 +126,10 @@ create = $coroutine ({
|
||||
})
|
||||
)
|
||||
|
||||
forEach(diskSizesByDevice, (size) => limits.disk += size)
|
||||
forEach(vdiSizesByDevice, (size) => limits.disk += size)
|
||||
|
||||
xapiVifs = VIFs and map(VIFs, (vif) =>
|
||||
vifs = extract(params, 'VIFs')
|
||||
params.vifs = vifs and map(vifs, (vif) =>
|
||||
network = @getObject(vif.network)
|
||||
|
||||
objectIds.push(network.id)
|
||||
@@ -133,36 +137,70 @@ create = $coroutine ({
|
||||
return {
|
||||
mac: vif.mac
|
||||
network: network._xapiId
|
||||
ipv4_allowed: vif.allowedIpv4Addresses
|
||||
ipv6_allowed: vif.allowedIpv6Addresses
|
||||
}
|
||||
)
|
||||
|
||||
installation = extract(params, 'installation')
|
||||
params.installRepository = installation && installation.repository
|
||||
|
||||
checkLimits = null
|
||||
|
||||
if resourceSet
|
||||
yield this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
|
||||
yield this.allocateLimitsInResourceSet(limits, resourceSet)
|
||||
else unless user.permission is 'admin'
|
||||
throw new Unauthorized()
|
||||
|
||||
xapiVm = yield xapi.createVm(template._xapiId, {
|
||||
installRepository: installation && installation.repository,
|
||||
nameDescription: name_description,
|
||||
nameLabel: name_label,
|
||||
pvArgs: pv_args,
|
||||
vdis: xapiVdis,
|
||||
vifs: xapiVifs,
|
||||
existingVdis: xapiExistingVdis
|
||||
})
|
||||
checkLimits = $coroutine (limits2) =>
|
||||
yield this.allocateLimitsInResourceSet(limits, resourceSet)
|
||||
yield this.allocateLimitsInResourceSet(limits2, resourceSet)
|
||||
|
||||
xapiVm = yield xapi.createVm(template._xapiId, params, checkLimits)
|
||||
vm = xapi.xo.addObject(xapiVm)
|
||||
|
||||
if resourceSet
|
||||
yield Promise.all([
|
||||
@addAcl(user.id, vm.id, 'admin'),
|
||||
if params.share
|
||||
$resourceSet = yield @getResourceSet(resourceSet)
|
||||
Promise.all(map($resourceSet.subjects, (subjectId) => @addAcl(subjectId, vm.id, 'admin')))
|
||||
else
|
||||
@addAcl(user.id, vm.id, 'admin')
|
||||
|
||||
xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet)
|
||||
])
|
||||
|
||||
for vifId in vm.VIFs
|
||||
vif = @getObject(vifId, 'VIF')
|
||||
yield this.allocIpAddresses(vifId, concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses)).catch(() =>
|
||||
xapi.deleteVif(vif._xapiId)
|
||||
)
|
||||
|
||||
if params.bootAfterCreate
|
||||
pCatch.call(xapi.startVm(vm._xapiId), noop)
|
||||
|
||||
return vm.id
|
||||
|
||||
create.params = {
|
||||
affinityHost: { type: 'string', optional: true }
|
||||
|
||||
bootAfterCreate: {
|
||||
type: 'boolean'
|
||||
optional: true
|
||||
}
|
||||
|
||||
cloudConfig: {
|
||||
type: 'string'
|
||||
optional: true
|
||||
}
|
||||
|
||||
coreOs: {
|
||||
type: 'boolean'
|
||||
optional: true
|
||||
}
|
||||
|
||||
clone: {
|
||||
type: 'boolean'
|
||||
optional: true
|
||||
}
|
||||
|
||||
resourceSet: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
@@ -184,6 +222,12 @@ create.params = {
|
||||
# PV Args
|
||||
pv_args: { type: 'string', optional: true }
|
||||
|
||||
|
||||
share: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
}
|
||||
|
||||
# TODO: add the install repository!
|
||||
# VBD.insert/eject
|
||||
# Also for the console!
|
||||
@@ -193,6 +237,7 @@ create.params = {
|
||||
|
||||
# Virtual interfaces to create for the new VM.
|
||||
VIFs: {
|
||||
optional: true
|
||||
type: 'array'
|
||||
items: {
|
||||
type: 'object'
|
||||
@@ -204,6 +249,18 @@ create.params = {
|
||||
optional: true # Auto-generated per default.
|
||||
type: 'string'
|
||||
}
|
||||
|
||||
allowedIpv4Addresses: {
|
||||
optional: true
|
||||
type: 'array'
|
||||
items: { type: 'string' }
|
||||
}
|
||||
|
||||
allowedIpv6Addresses: {
|
||||
optional: true
|
||||
type: 'array'
|
||||
items: { type: 'string' }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -246,19 +303,43 @@ create.params = {
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
template: ['template', 'VM-template', 'administrate'],
|
||||
template: ['template', 'VM-template', ''],
|
||||
}
|
||||
|
||||
exports.create = create
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
delete_ = ({vm, delete_disks: deleteDisks}) ->
|
||||
delete_ = $coroutine ({vm, delete_disks: deleteDisks = false }) ->
|
||||
cpus = vm.CPUs.number
|
||||
memory = vm.memory.size
|
||||
|
||||
xapi = @getXapi(vm)
|
||||
|
||||
@getAllAcls().then((acls) =>
|
||||
Promise.all(mapFilter(acls, (acl) =>
|
||||
if (acl.object == vm.id)
|
||||
return pCatch.call(
|
||||
@removeAcl(acl.subject, acl.object, acl.action),
|
||||
noop
|
||||
)
|
||||
))
|
||||
)
|
||||
|
||||
# Update IP pools
|
||||
yield Promise.all(map(vm.VIFs, (vifId) =>
|
||||
vif = xapi.getObject(vifId)
|
||||
return pCatch.call(
|
||||
this.allocIpAddresses(
|
||||
vifId,
|
||||
null,
|
||||
concat(vif.ipv4_allowed, vif.ipv6_allowed)
|
||||
),
|
||||
noop
|
||||
)
|
||||
))
|
||||
|
||||
# Update resource sets
|
||||
resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
|
||||
if resourceSet?
|
||||
disk = 0
|
||||
@@ -275,10 +356,16 @@ delete_ = ({vm, delete_disks: deleteDisks}) ->
|
||||
return
|
||||
)
|
||||
|
||||
pCatch.call(@releaseLimitsInResourceSet(
|
||||
@computeVmResourcesUsage(vm),
|
||||
resourceSet
|
||||
), noop)
|
||||
resourceSetUsage = @computeVmResourcesUsage(vm)
|
||||
ipPoolsUsage = yield @computeVmIpPoolsUsage(vm)
|
||||
|
||||
pCatch.call(
|
||||
@releaseLimitsInResourceSet(
|
||||
merge(resourceSetUsage, ipPoolsUsage),
|
||||
resourceSet
|
||||
),
|
||||
noop
|
||||
)
|
||||
|
||||
return xapi.deleteVm(vm._xapiId, deleteDisks)
|
||||
|
||||
@@ -361,7 +448,7 @@ migrate = $coroutine ({
|
||||
])
|
||||
|
||||
unless yield @hasPermissions(@session.get('user_id'), permissions)
|
||||
throw new Unauthorized()
|
||||
throw unauthorized()
|
||||
|
||||
yield @getXapi(vm).migrateVm(vm._xapiId, @getXapi(host), host._xapiId, {
|
||||
migrationNetworkId: migrationNetwork?._xapiId
|
||||
@@ -398,99 +485,23 @@ exports.migrate = migrate
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME: human readable strings should be handled.
|
||||
set = $coroutine (params) ->
|
||||
{VM} = params
|
||||
xapi = @getXapi VM
|
||||
set = (params) ->
|
||||
VM = extract(params, 'VM')
|
||||
xapi = @getXapi(VM)
|
||||
|
||||
{_xapiRef: ref} = VM
|
||||
return xapi.editVm(VM._xapiId, params, $coroutine (limits, vm) =>
|
||||
resourceSet = xapi.xo.getData(vm, 'resourceSet')
|
||||
|
||||
resourceSet = xapi.xo.getData(ref, 'resourceSet')
|
||||
if (resourceSet)
|
||||
try
|
||||
return yield @allocateLimitsInResourceSet(limits, resourceSet)
|
||||
catch error
|
||||
# if the resource set no longer exist, behave as if the VM is free
|
||||
throw error unless noSuchObject.is(error)
|
||||
|
||||
# Memory.
|
||||
if 'memory' of params
|
||||
memory = parseSize(params.memory)
|
||||
|
||||
if memory < VM.memory.static[0]
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
"cannot set memory below the static minimum (#{VM.memory.static[0]})"
|
||||
)
|
||||
|
||||
if ($isVMRunning VM) and memory > VM.memory.static[1]
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
|
||||
"for a running VM"
|
||||
)
|
||||
|
||||
if memory < VM.memory.dynamic[0]
|
||||
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memory}"
|
||||
else if memory > VM.memory.static[1]
|
||||
yield xapi.call 'VM.set_memory_static_max', ref, "#{memory}"
|
||||
if resourceSet?
|
||||
yield @allocateLimitsInResourceSet({
|
||||
memory: memory - VM.memory.size
|
||||
}, resourceSet)
|
||||
yield xapi.call 'VM.set_memory_dynamic_max', ref, "#{memory}"
|
||||
|
||||
# Number of CPUs.
|
||||
if 'CPUs' of params
|
||||
{CPUs} = params
|
||||
|
||||
if resourceSet?
|
||||
yield @allocateLimitsInResourceSet({
|
||||
cpus: CPUs - VM.CPUs.number
|
||||
}, resourceSet)
|
||||
if $isVMRunning VM
|
||||
if CPUs > VM.CPUs.max
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
"cannot set CPUs above the static maximum (#{VM.CPUs.max}) "+
|
||||
"for a running VM"
|
||||
)
|
||||
yield xapi.call 'VM.set_VCPUs_number_live', ref, "#{CPUs}"
|
||||
else
|
||||
if CPUs > VM.CPUs.max
|
||||
yield xapi.call 'VM.set_VCPUs_max', ref, "#{CPUs}"
|
||||
yield xapi.call 'VM.set_VCPUs_at_startup', ref, "#{CPUs}"
|
||||
|
||||
# HA policy
|
||||
# TODO: also handle "best-effort" case
|
||||
if 'high_availability' of params
|
||||
{high_availability} = params
|
||||
|
||||
if high_availability
|
||||
yield xapi.call 'VM.set_ha_restart_priority', ref, "restart"
|
||||
else
|
||||
yield xapi.call 'VM.set_ha_restart_priority', ref, ""
|
||||
|
||||
if 'auto_poweron' of params
|
||||
{auto_poweron} = params
|
||||
|
||||
if auto_poweron
|
||||
yield xapi.call 'VM.add_to_other_config', ref, 'auto_poweron', 'true'
|
||||
yield xapi.setPoolProperties({autoPowerOn: true})
|
||||
else
|
||||
yield xapi.call 'VM.remove_from_other_config', ref, 'auto_poweron'
|
||||
|
||||
if 'cpuWeight' of params
|
||||
if resourceSet? and this.user.permission isnt 'admin'
|
||||
throw new Unauthorized()
|
||||
yield xapi.setVcpuWeight(VM._xapiId, params.cpuWeight)
|
||||
|
||||
# Other fields.
|
||||
for param, fields of {
|
||||
'name_label'
|
||||
'name_description'
|
||||
'PV_args'
|
||||
}
|
||||
continue unless param of params
|
||||
|
||||
for field in (if $isArray fields then fields else [fields])
|
||||
yield xapi.call "VM.set_#{field}", ref, "#{params[param]}"
|
||||
|
||||
return true
|
||||
if (limits.cpuWeight && this.user.permission != 'admin')
|
||||
throw unauthorized()
|
||||
)
|
||||
|
||||
set.params = {
|
||||
# Identifier of the VM to update.
|
||||
@@ -509,19 +520,34 @@ set.params = {
|
||||
# Number of virtual CPUs to allocate.
|
||||
CPUs: { type: 'integer', optional: true }
|
||||
|
||||
cpusMax: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Memory to allocate (in bytes).
|
||||
#
|
||||
# Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max
|
||||
memory: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Set dynamic_min
|
||||
memoryMin: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Set dynamic_max
|
||||
memoryMax: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Set static_max
|
||||
memoryStaticMax: { type: ['integer', 'string'], optional: true }
|
||||
|
||||
# Kernel arguments for PV VM.
|
||||
PV_args: { type: 'string', optional: true }
|
||||
|
||||
cpuWeight: { type: 'integer', optional: true}
|
||||
cpuWeight: { type: ['integer', 'null'], optional: true }
|
||||
|
||||
cpuCap: { type: ['integer', 'null'], optional: true }
|
||||
|
||||
affinityHost: { type: ['string', 'null'], optional: true }
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
VM: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
VM: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate']
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@@ -609,7 +635,7 @@ copy.params = {
|
||||
}
|
||||
|
||||
copy.resolve = {
|
||||
vm: [ 'vm', 'VM', 'administrate' ]
|
||||
vm: [ 'vm', ['VM', 'VM-snapshot'], 'administrate' ]
|
||||
sr: [ 'sr', 'SR', 'operate' ]
|
||||
}
|
||||
|
||||
@@ -622,7 +648,7 @@ convertToTemplate = $coroutine ({vm}) ->
|
||||
unless yield @hasPermissions(@session.get('user_id'), [
|
||||
[ vm.$pool, 'administrate' ]
|
||||
])
|
||||
throw new Unauthorized()
|
||||
throw unauthorized()
|
||||
|
||||
yield @getXapi(vm).call 'VM.set_is_a_template', vm._xapiRef, true
|
||||
|
||||
@@ -646,12 +672,12 @@ exports.convert = convertToTemplate
|
||||
snapshot = $coroutine ({vm, name}) ->
|
||||
yield checkPermissionOnSrs.call(this, vm)
|
||||
|
||||
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name)
|
||||
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name ? "#{vm.name_label}_#{new Date().toISOString()}")
|
||||
return snapshot.$id
|
||||
|
||||
snapshot.params = {
|
||||
id: { type: 'string' }
|
||||
name: { type: 'string' }
|
||||
name: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
snapshot.resolve = {
|
||||
@@ -670,14 +696,14 @@ rollingDeltaBackup = $coroutine ({vm, remote, tag, depth}) ->
|
||||
})
|
||||
|
||||
rollingDeltaBackup.params = {
|
||||
vm: { type: 'string' }
|
||||
id: { type: 'string' }
|
||||
remote: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: ['string', 'number'] }
|
||||
}
|
||||
|
||||
rollingDeltaBackup.resolve = {
|
||||
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate']
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
rollingDeltaBackup.permission = 'admin'
|
||||
@@ -708,12 +734,12 @@ exports.importDeltaBackup = importDeltaBackup
|
||||
deltaCopy = ({ vm, sr }) -> @deltaCopyVm(vm, sr)
|
||||
|
||||
deltaCopy.params = {
|
||||
vm: { type: 'string' },
|
||||
id: { type: 'string' },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
deltaCopy.resolve = {
|
||||
vm: [ 'vm', 'VM', 'operate'],
|
||||
vm: [ 'id', 'VM', 'operate'],
|
||||
sr: [ 'sr', 'SR', 'operate']
|
||||
}
|
||||
|
||||
@@ -764,9 +790,7 @@ exports.backup = backup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
importBackup = $coroutine ({remote, file, sr}) ->
|
||||
yield @importVmBackup(remote, file, sr)
|
||||
return
|
||||
importBackup = ({remote, file, sr}) -> @importVmBackup(remote, file, sr)
|
||||
|
||||
importBackup.permission = 'admin'
|
||||
importBackup.description = 'Imports a VM into host, from a file found in the chosen remote'
|
||||
@@ -816,21 +840,30 @@ exports.rollingBackup = rollingBackup
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
rollingDrCopy = ({vm, pool, tag, depth}) ->
|
||||
if vm.$pool is pool.id
|
||||
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
|
||||
return @rollingDrCopyVm({vm, sr: @getObject(pool.default_SR, 'SR'), tag, depth})
|
||||
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
|
||||
unless sr
|
||||
unless pool
|
||||
throw invalidParameters('either pool or sr param should be specified')
|
||||
|
||||
if vm.$pool is pool.id
|
||||
throw forbiddenOperation('Disaster Recovery attempts to copy on the same pool')
|
||||
|
||||
sr = @getObject(pool.default_SR, 'SR')
|
||||
|
||||
return @rollingDrCopyVm({vm, sr, tag, depth})
|
||||
|
||||
rollingDrCopy.params = {
|
||||
id: { type: 'string' }
|
||||
pool: { type: 'string' }
|
||||
tag: { type: 'string'}
|
||||
depth: { type: 'number' }
|
||||
id: { type: 'string' }
|
||||
pool: { type: 'string', optional: true }
|
||||
sr: { type: 'string', optional: true }
|
||||
tag: { type: 'string'}
|
||||
}
|
||||
|
||||
rollingDrCopy.resolve = {
|
||||
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
rollingDrCopy.description = 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to depth'
|
||||
@@ -871,8 +904,7 @@ stop = $coroutine ({vm, force}) ->
|
||||
yield xapi.call 'VM.clean_shutdown', vm._xapiRef
|
||||
catch error
|
||||
if error.code is 'VM_MISSING_PV_DRIVERS' or error.code is 'VM_LACKS_FEATURE_SHUTDOWN'
|
||||
# TODO: Improve reporting: this message is unclear.
|
||||
@throw 'INVALID_PARAMS'
|
||||
throw invalidParameters('clean shutdown requires PV drivers')
|
||||
else
|
||||
throw error
|
||||
|
||||
@@ -907,18 +939,11 @@ exports.suspend = suspend
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
resume = $coroutine ({vm, force}) ->
|
||||
# FIXME: WTF this is?
|
||||
if not force
|
||||
force = true
|
||||
|
||||
yield @getXapi(vm).call 'VM.resume', vm._xapiRef, false, force
|
||||
|
||||
return true
|
||||
resume = ({vm}) ->
|
||||
return @getXapi(vm).resumeVm(vm._xapiId)
|
||||
|
||||
resume.params = {
|
||||
id: { type: 'string' }
|
||||
force: { type: 'boolean', optional: true }
|
||||
}
|
||||
|
||||
resume.resolve = {
|
||||
@@ -928,15 +953,12 @@ exports.resume = resume
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# revert a snapshot to its parent VM
|
||||
revert = $coroutine ({snapshot}) ->
|
||||
# Attempts a revert from this snapshot to its parent VM
|
||||
yield @getXapi(snapshot).call 'VM.revert', snapshot._xapiRef
|
||||
|
||||
return true
|
||||
revert = ({snapshot, snapshotBefore}) ->
|
||||
return @getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore)
|
||||
|
||||
revert.params = {
|
||||
id: { type: 'string' }
|
||||
id: { type: 'string' },
|
||||
snapshotBefore: { type: 'boolean', optional: true }
|
||||
}
|
||||
|
||||
revert.resolve = {
|
||||
@@ -996,30 +1018,33 @@ exports.export = export_;
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
handleVmImport = $coroutine (req, res, { xapi, srId }) ->
|
||||
handleVmImport = $coroutine (req, res, { data, srId, type, xapi }) ->
|
||||
# Timeout seems to be broken in Node 4.
|
||||
# See https://github.com/nodejs/node/issues/3319
|
||||
req.setTimeout(43200000) # 12 hours
|
||||
|
||||
try
|
||||
vm = yield xapi.importVm(req, { srId })
|
||||
vm = yield xapi.importVm(req, { data, srId, type })
|
||||
res.end(format.response(0, vm.$id))
|
||||
catch e
|
||||
res.writeHead(500)
|
||||
res.end(format.error(0, new GenericError(e.message)))
|
||||
res.end(format.error(0, new Error(e.message)))
|
||||
|
||||
return
|
||||
|
||||
# TODO: "sr_id" can be passed in URL to target a specific SR
|
||||
import_ = $coroutine ({host, sr}) ->
|
||||
import_ = $coroutine ({ data, host, sr, type }) ->
|
||||
if data and type is 'xva'
|
||||
throw invalidParameters('unsupported field data for the file type xva')
|
||||
|
||||
if not sr
|
||||
if not host
|
||||
throw new InvalidParameters('you must provide either host or SR')
|
||||
throw invalidParameters('you must provide either host or SR')
|
||||
|
||||
xapi = @getXapi(host)
|
||||
sr = xapi.pool.$default_SR
|
||||
if not sr
|
||||
throw new InvalidParameters('there is not default SR in this pool')
|
||||
throw invalidParameters('there is not default SR in this pool')
|
||||
|
||||
# FIXME: must have administrate permission on default SR.
|
||||
else
|
||||
@@ -1027,13 +1052,45 @@ import_ = $coroutine ({host, sr}) ->
|
||||
|
||||
return {
|
||||
$sendTo: yield @registerHttpRequest(handleVmImport, {
|
||||
data,
|
||||
srId: sr._xapiId,
|
||||
type,
|
||||
xapi
|
||||
})
|
||||
}
|
||||
|
||||
import_.params = {
|
||||
data: {
|
||||
type: 'object',
|
||||
optional: true,
|
||||
properties: {
|
||||
descriptionLabel: { type: 'string' },
|
||||
disks: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
capacity: { type: 'integer' },
|
||||
descriptionLabel: { type: 'string' },
|
||||
nameLabel: { type: 'string' },
|
||||
path: { type: 'string' },
|
||||
position: { type: 'integer' }
|
||||
}
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
memory: { type: 'integer' },
|
||||
nameLabel: { type: 'string' },
|
||||
nCpus: { type: 'integer' },
|
||||
networks: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
optional: true
|
||||
},
|
||||
}
|
||||
},
|
||||
host: { type: 'string', optional: true },
|
||||
type: { type: 'string', optional: true },
|
||||
sr: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
@@ -1074,24 +1131,48 @@ exports.attachDisk = attachDisk
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
# FIXME: position should be optional and default to last.
|
||||
|
||||
# TODO: implement resource sets
|
||||
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
|
||||
createInterface = $coroutine ({
|
||||
vm,
|
||||
network,
|
||||
position,
|
||||
mac,
|
||||
allowedIpv4Addresses,
|
||||
allowedIpv6Addresses
|
||||
}) ->
|
||||
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
|
||||
mac,
|
||||
mtu,
|
||||
position
|
||||
position,
|
||||
ipv4_allowed: allowedIpv4Addresses,
|
||||
ipv6_allowed: allowedIpv6Addresses
|
||||
})
|
||||
|
||||
{ push } = ipAddresses = []
|
||||
push.apply(ipAddresses, allowedIpv4Addresses) if allowedIpv4Addresses
|
||||
push.apply(ipAddresses, allowedIpv6Addresses) if allowedIpv6Addresses
|
||||
pCatch.call(@allocIpAddresses(vif.$id, allo), noop) if ipAddresses.length
|
||||
|
||||
return vif.$id
|
||||
|
||||
createInterface.params = {
|
||||
vm: { type: 'string' }
|
||||
network: { type: 'string' }
|
||||
position: { type: 'string' }
|
||||
mtu: { type: 'string', optional: true }
|
||||
position: { type: ['integer', 'string'], optional: true }
|
||||
mac: { type: 'string', optional: true }
|
||||
allowedIpv4Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
allowedIpv6Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
createInterface.resolve = {
|
||||
@@ -1170,10 +1251,7 @@ setBootOrder = $coroutine ({vm, order}) ->
|
||||
yield xapi.call 'VM.set_HVM_boot_params', vm._xapiRef, order
|
||||
return true
|
||||
|
||||
@throw(
|
||||
'INVALID_PARAMS'
|
||||
'You can only set the boot order on a HVM guest'
|
||||
)
|
||||
throw invalidParameters('You can only set the boot order on a HVM guest')
|
||||
|
||||
setBootOrder.params = {
|
||||
vm: { type: 'string' },
|
||||
|
||||
@@ -1,5 +1,50 @@
|
||||
import { streamToBuffer } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function clean () {
|
||||
return this.clean()
|
||||
}
|
||||
|
||||
clean.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function exportConfig () {
|
||||
return {
|
||||
$getFrom: await this.registerHttpRequest((req, res) => {
|
||||
res.writeHead(200, 'OK', {
|
||||
'content-disposition': 'attachment'
|
||||
})
|
||||
|
||||
return this.exportConfig()
|
||||
},
|
||||
undefined,
|
||||
{ suffix: '/config.json' })
|
||||
}
|
||||
}
|
||||
|
||||
exportConfig.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function getAllObjects () {
|
||||
return this.getObjects()
|
||||
}
|
||||
|
||||
getAllObjects.permission = ''
|
||||
getAllObjects.description = 'Returns all XO objects'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function importConfig () {
|
||||
return {
|
||||
$sendTo: await this.registerHttpRequest(async (req, res) => {
|
||||
await this.importConfig(JSON.parse(await streamToBuffer(req)))
|
||||
|
||||
res.end('config successfully imported')
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
importConfig.permission = 'admin'
|
||||
|
||||
478
src/api/xosan.js
Normal file
478
src/api/xosan.js
Normal file
@@ -0,0 +1,478 @@
|
||||
import arp from 'arp-a'
|
||||
import createLogger from 'debug'
|
||||
import defer from 'golike-defer'
|
||||
import execa from 'execa'
|
||||
import fromPairs from 'lodash/fromPairs'
|
||||
import fs from 'fs-promise'
|
||||
import map from 'lodash/map'
|
||||
import splitLines from 'split-lines'
|
||||
import {
|
||||
filter,
|
||||
includes
|
||||
} from 'lodash'
|
||||
|
||||
import {
|
||||
noop,
|
||||
pCatch,
|
||||
pFromCallback,
|
||||
splitFirst
|
||||
} from '../utils'
|
||||
|
||||
const debug = createLogger('xo:xosan')
|
||||
|
||||
const SSH_KEY_FILE = 'id_rsa_xosan'
|
||||
const NETWORK_PREFIX = '172.31.100.'
|
||||
|
||||
const XOSAN_VM_SYSTEM_DISK_SIZE = 10 * 1024 * 1024 * 1024
|
||||
const XOSAN_DATA_DISK_USEAGE_RATIO = 0.99
|
||||
const XOSAN_MAX_DISK_SIZE = 2093050 * 1024 * 1024 // a bit under 2To
|
||||
|
||||
const CURRENTLY_CREATING_SRS = {}
|
||||
|
||||
export async function getVolumeInfo ({ sr }) {
|
||||
const xapi = this.getXapi(sr)
|
||||
const giantIPtoVMDict = {}
|
||||
const data = xapi.xo.getData(sr, 'xosan_config')
|
||||
if (!data || !data.nodes) {
|
||||
return null
|
||||
}
|
||||
const nodes = data.nodes
|
||||
nodes.forEach(conf => {
|
||||
giantIPtoVMDict[conf.vm.ip] = xapi.getObject(conf.vm.id)
|
||||
})
|
||||
const oneHostAndVm = nodes[0]
|
||||
const resultCmd = await remoteSsh(xapi, {
|
||||
host: xapi.getObject(oneHostAndVm.host),
|
||||
address: oneHostAndVm.vm.ip
|
||||
}, 'gluster volume info xosan')
|
||||
const result = resultCmd['stdout']
|
||||
|
||||
/*
|
||||
Volume Name: xosan
|
||||
Type: Disperse
|
||||
Volume ID: 1d4d0e57-8b6b-43f9-9d40-c48be1df7548
|
||||
Status: Started
|
||||
Snapshot Count: 0
|
||||
Number of Bricks: 1 x (2 + 1) = 3
|
||||
Transport-type: tcp
|
||||
Bricks:
|
||||
Brick1: 192.168.0.201:/bricks/brick1/xosan1
|
||||
Brick2: 192.168.0.202:/bricks/brick1/xosan1
|
||||
Brick3: 192.168.0.203:/bricks/brick1/xosan1
|
||||
Options Reconfigured:
|
||||
client.event-threads: 16
|
||||
server.event-threads: 16
|
||||
performance.client-io-threads: on
|
||||
nfs.disable: on
|
||||
performance.readdir-ahead: on
|
||||
transport.address-family: inet
|
||||
features.shard: on
|
||||
features.shard-block-size: 64MB
|
||||
network.remote-dio: enable
|
||||
cluster.eager-lock: enable
|
||||
performance.io-cache: off
|
||||
performance.read-ahead: off
|
||||
performance.quick-read: off
|
||||
performance.stat-prefetch: on
|
||||
performance.strict-write-ordering: off
|
||||
cluster.server-quorum-type: server
|
||||
cluster.quorum-type: auto
|
||||
*/
|
||||
const info = fromPairs(
|
||||
splitLines(result.trim()).map(line =>
|
||||
splitFirst(line, ':').map(val => val.trim())
|
||||
)
|
||||
)
|
||||
|
||||
const getNumber = item => +item.substr(5)
|
||||
const brickKeys = filter(Object.keys(info), key => key.match(/^Brick[1-9]/)).sort((i1, i2) => getNumber(i1) - getNumber(i2))
|
||||
|
||||
// expected brickKeys : [ 'Brick1', 'Brick2', 'Brick3' ]
|
||||
info['Bricks'] = brickKeys.map(key => {
|
||||
const ip = info[key].split(':')[0]
|
||||
return { config: info[key], ip: ip, vm: giantIPtoVMDict[ip] }
|
||||
})
|
||||
const entry = await pFromCallback(cb => arp.table(cb))
|
||||
if (entry) {
|
||||
const brick = info['Bricks'].find(element => element.config.split(':')[0] === entry.ip)
|
||||
if (brick) {
|
||||
brick.mac = entry.mac
|
||||
}
|
||||
}
|
||||
|
||||
return info
|
||||
}
|
||||
|
||||
getVolumeInfo.description = 'info on gluster volume'
|
||||
getVolumeInfo.permission = 'admin'
|
||||
|
||||
getVolumeInfo.params = {
|
||||
sr: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
getVolumeInfo.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
function floor2048 (value) {
|
||||
return 2048 * Math.floor(value / 2048)
|
||||
}
|
||||
|
||||
async function copyVm (xapi, originalVm, params) {
|
||||
return { vm: await xapi.copyVm(originalVm, params.sr), params }
|
||||
}
|
||||
|
||||
async function prepareGlusterVm (xapi, vmAndParam, xosanNetwork, increaseDataDisk = true) {
|
||||
let vm = vmAndParam.vm
|
||||
// refresh the object so that sizes are correct
|
||||
const params = vmAndParam.params
|
||||
const ip = params.xenstore_data['vm-data/ip']
|
||||
const sr = xapi.getObject(params.sr.$id)
|
||||
await xapi._waitObjectState(sr.$id, sr => Boolean(sr.$PBDs))
|
||||
const host = sr.$PBDs[0].$host
|
||||
const firstVif = vm.$VIFs[0]
|
||||
if (xosanNetwork.$id !== firstVif.$network.$id) {
|
||||
await xapi.call('VIF.move', firstVif.$ref, xosanNetwork.$ref)
|
||||
}
|
||||
await xapi.editVm(vm, {
|
||||
name_label: params.name_label,
|
||||
name_description: params.name_description
|
||||
})
|
||||
await xapi.call('VM.set_xenstore_data', vm.$ref, params.xenstore_data)
|
||||
if (increaseDataDisk) {
|
||||
const dataDisk = vm.$VBDs.map(vbd => vbd.$VDI).find(vdi => vdi && vdi.name_label === 'xosan_data')
|
||||
const srFreeSpace = sr.physical_size - sr.physical_utilisation
|
||||
// we use a percentage because it looks like the VDI overhead is proportional
|
||||
const newSize = floor2048((srFreeSpace + dataDisk.virtual_size) * XOSAN_DATA_DISK_USEAGE_RATIO)
|
||||
await xapi._resizeVdi(dataDisk, Math.min(newSize, XOSAN_MAX_DISK_SIZE))
|
||||
}
|
||||
await xapi.startVm(vm)
|
||||
debug('waiting for boot of ', ip)
|
||||
// wait until we find the assigned IP in the networks, we are just checking the boot is complete
|
||||
const vmIsUp = vm => Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip))
|
||||
vm = await xapi._waitObjectState(vm.$id, vmIsUp)
|
||||
debug('booted ', ip)
|
||||
return { address: ip, host, vm }
|
||||
}
|
||||
|
||||
async function callPlugin (xapi, host, command, params) {
|
||||
debug('calling plugin', host.address, command)
|
||||
return JSON.parse(await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params))
|
||||
}
|
||||
|
||||
async function remoteSsh (xapi, hostAndAddress, cmd) {
|
||||
const result = await callPlugin(xapi, hostAndAddress.host, 'run_ssh', {
|
||||
destination: 'root@' + hostAndAddress.address,
|
||||
cmd: cmd
|
||||
})
|
||||
if (result.exit !== 0) {
|
||||
throw new Error('ssh error: ' + JSON.stringify(result))
|
||||
}
|
||||
debug(result)
|
||||
return result
|
||||
}
|
||||
|
||||
async function setPifIp (xapi, pif, address) {
|
||||
await xapi.call('PIF.reconfigure_ip', pif.$ref, 'Static', address, '255.255.255.0', NETWORK_PREFIX + '1', '')
|
||||
}
|
||||
|
||||
const createNetworkAndInsertHosts = defer.onFailure(async function ($onFailure, xapi, pif, vlan) {
|
||||
let hostIpLastNumber = 1
|
||||
const xosanNetwork = await xapi.createNetwork({
|
||||
name: 'XOSAN network',
|
||||
description: 'XOSAN network',
|
||||
pifId: pif._xapiId,
|
||||
mtu: 9000,
|
||||
vlan: +vlan
|
||||
})
|
||||
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
|
||||
await Promise.all(xosanNetwork.$PIFs.map(pif => setPifIp(xapi, pif, NETWORK_PREFIX + (hostIpLastNumber++))))
|
||||
|
||||
return xosanNetwork
|
||||
})
|
||||
async function getOrCreateSshKey (xapi) {
|
||||
let sshKey = xapi.xo.getData(xapi.pool, 'xosan_ssh_key')
|
||||
|
||||
if (!sshKey) {
|
||||
const readKeys = async () => {
|
||||
sshKey = {
|
||||
private: await fs.readFile(SSH_KEY_FILE, 'ascii'),
|
||||
public: await fs.readFile(SSH_KEY_FILE + '.pub', 'ascii')
|
||||
}
|
||||
xapi.xo.setData(xapi.pool, 'xosan_ssh_key', sshKey)
|
||||
}
|
||||
|
||||
try {
|
||||
await readKeys()
|
||||
} catch (e) {
|
||||
await execa('ssh-keygen', ['-q', '-f', SSH_KEY_FILE, '-t', 'rsa', '-b', '4096', '-N', ''])
|
||||
await readKeys()
|
||||
}
|
||||
}
|
||||
|
||||
return sshKey
|
||||
}
|
||||
async function configureGluster (redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter = null) {
|
||||
const configByType = {
|
||||
replica_arbiter: {
|
||||
creation: 'replica 3 arbiter 1',
|
||||
extra: []
|
||||
},
|
||||
replica: {
|
||||
creation: 'replica ' + redundancy + ' ',
|
||||
extra: ['gluster volume set xosan cluster.data-self-heal on']
|
||||
},
|
||||
disperse: {
|
||||
creation: 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ',
|
||||
extra: []
|
||||
}
|
||||
}
|
||||
let brickVms = arbiter ? ipAndHosts.concat(arbiter) : ipAndHosts
|
||||
for (let i = 1; i < brickVms.length; i++) {
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster peer probe ' + brickVms[i].address)
|
||||
}
|
||||
const creation = configByType[glusterType].creation
|
||||
const volumeCreation = 'gluster volume create xosan ' + creation +
|
||||
' ' + brickVms.map(ipAndHost => (ipAndHost.address + ':/bricks/xosan/xosandir')).join(' ')
|
||||
debug('creating volume: ', volumeCreation)
|
||||
await remoteSsh(xapi, firstIpAndHost, volumeCreation)
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan network.remote-dio enable')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan cluster.eager-lock enable')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-cache off')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.read-ahead off')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.quick-read off')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.strict-write-ordering off')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan client.event-threads 8')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan server.event-threads 8')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-thread-count 64')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.stat-prefetch on')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard on')
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard-block-size 512MB')
|
||||
for (const confChunk of configByType[glusterType].extra) {
|
||||
await remoteSsh(xapi, firstIpAndHost, confChunk)
|
||||
}
|
||||
await remoteSsh(xapi, firstIpAndHost, 'gluster volume start xosan')
|
||||
}
|
||||
|
||||
export const createSR = defer.onFailure(async function ($onFailure, { template, pif, vlan, srs, glusterType, redundancy }) {
|
||||
if (!this.requestResource) {
|
||||
throw new Error('requestResource is not a function')
|
||||
}
|
||||
|
||||
if (srs.length < 1) {
|
||||
return // TODO: throw an error
|
||||
}
|
||||
|
||||
let vmIpLastNumber = 101
|
||||
const xapi = this.getXapi(srs[0])
|
||||
if (CURRENTLY_CREATING_SRS[xapi.pool.$id]) {
|
||||
throw new Error('createSR is already running for this pool')
|
||||
}
|
||||
|
||||
CURRENTLY_CREATING_SRS[xapi.pool.$id] = true
|
||||
try {
|
||||
const xosanNetwork = await createNetworkAndInsertHosts(xapi, pif, vlan)
|
||||
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
|
||||
const sshKey = await getOrCreateSshKey(xapi)
|
||||
const srsObjects = map(srs, srId => xapi.getObject(srId))
|
||||
|
||||
const vmParameters = map(srs, srId => {
|
||||
const sr = xapi.getObject(srId)
|
||||
const host = sr.$PBDs[0].$host
|
||||
return {
|
||||
sr,
|
||||
host,
|
||||
name_label: `XOSAN - ${sr.name_label} - ${host.name_label}`,
|
||||
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
|
||||
// the values of the xenstore_data object *have* to be string, don't forget.
|
||||
xenstore_data: {
|
||||
'vm-data/hostname': 'XOSAN' + sr.name_label,
|
||||
'vm-data/sshkey': sshKey.public,
|
||||
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
|
||||
'vm-data/mtu': String(xosanNetwork.MTU),
|
||||
'vm-data/vlan': String(vlan)
|
||||
}
|
||||
}
|
||||
})
|
||||
await Promise.all(vmParameters.map(vmParam => callPlugin(xapi, vmParam.host, 'receive_ssh_keys', {
|
||||
private_key: sshKey.private,
|
||||
public_key: sshKey.public,
|
||||
force: 'true'
|
||||
})))
|
||||
|
||||
const firstVM = await xapi.importVm(
|
||||
await this.requestResource('xosan', template.id, template.version),
|
||||
{ srId: vmParameters[0].sr.$ref, type: 'xva' }
|
||||
)
|
||||
$onFailure(() => xapi.deleteVm(firstVM, true)::pCatch(noop))
|
||||
await xapi.editVm(firstVM, {
|
||||
autoPoweron: true
|
||||
})
|
||||
const copiedVms = await Promise.all(vmParameters.slice(1).map(param => copyVm(xapi, firstVM, param)))
|
||||
// TODO: Promise.all() is certainly not the right operation to execute all the given promises whether they fulfill or reject.
|
||||
$onFailure(() => Promise.all(copiedVms.map(vm => xapi.deleteVm(vm.vm, true)))::pCatch(noop))
|
||||
const vmsAndParams = [{
|
||||
vm: firstVM,
|
||||
params: vmParameters[0]
|
||||
}].concat(copiedVms)
|
||||
let arbiter = null
|
||||
if (srs.length === 2) {
|
||||
const sr = vmParameters[0].sr
|
||||
const arbiterConfig = {
|
||||
sr: sr,
|
||||
host: vmParameters[0].host,
|
||||
name_label: vmParameters[0].name_label + ' arbiter',
|
||||
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
|
||||
xenstore_data: {
|
||||
'vm-data/hostname': 'XOSAN' + sr.name_label + '_arb',
|
||||
'vm-data/sshkey': sshKey.public,
|
||||
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
|
||||
'vm-data/mtu': String(xosanNetwork.MTU),
|
||||
'vm-data/vlan': String(vlan)
|
||||
}
|
||||
}
|
||||
const arbiterVm = await copyVm(xapi, firstVM, arbiterConfig)
|
||||
$onFailure(() => xapi.deleteVm(arbiterVm.vm, true)::pCatch(noop))
|
||||
arbiter = await prepareGlusterVm(xapi, arbiterVm, xosanNetwork, false)
|
||||
}
|
||||
const ipAndHosts = await Promise.all(map(vmsAndParams, vmAndParam => prepareGlusterVm(xapi, vmAndParam, xosanNetwork)))
|
||||
const firstIpAndHost = ipAndHosts[0]
|
||||
await configureGluster(redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter)
|
||||
debug('xosan gluster volume started')
|
||||
const config = { server: firstIpAndHost.address + ':/xosan', backupserver: ipAndHosts[1].address }
|
||||
const xosanSr = await xapi.call('SR.create', srsObjects[0].$PBDs[0].$host.$ref, config, 0, 'XOSAN', 'XOSAN', 'xosan', '', true, {})
|
||||
if (arbiter) {
|
||||
ipAndHosts.push(arbiter)
|
||||
}
|
||||
// we just forget because the cleanup actions will be executed before.
|
||||
$onFailure(() => xapi.forgetSr(xosanSr)::pCatch(noop))
|
||||
await xapi.xo.setData(xosanSr, 'xosan_config', {
|
||||
nodes: ipAndHosts.map(param => ({
|
||||
host: param.host.$id,
|
||||
vm: { id: param.vm.$id, ip: param.address }
|
||||
})),
|
||||
network: xosanNetwork.$id
|
||||
})
|
||||
} finally {
|
||||
delete CURRENTLY_CREATING_SRS[xapi.pool.$id]
|
||||
}
|
||||
})
|
||||
|
||||
createSR.description = 'create gluster VM'
|
||||
createSR.permission = 'admin'
|
||||
createSR.params = {
|
||||
srs: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
pif: {
|
||||
type: 'string'
|
||||
},
|
||||
vlan: {
|
||||
type: 'string'
|
||||
},
|
||||
glusterType: {
|
||||
type: 'string'
|
||||
},
|
||||
redundancy: {
|
||||
type: 'number'
|
||||
}
|
||||
}
|
||||
|
||||
createSR.resolve = {
|
||||
srs: ['sr', 'SR', 'administrate'],
|
||||
pif: ['pif', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
export function checkSrIsBusy ({ poolId }) {
|
||||
return !!CURRENTLY_CREATING_SRS[poolId]
|
||||
}
|
||||
checkSrIsBusy.description = 'checks if there is a xosan SR curently being created on the given pool id'
|
||||
checkSrIsBusy.permission = 'admin'
|
||||
checkSrIsBusy.params = { poolId: { type: 'string' } }
|
||||
|
||||
const POSSIBLE_CONFIGURATIONS = {}
|
||||
POSSIBLE_CONFIGURATIONS[2] = [{ layout: 'replica_arbiter', redundancy: 3, capacity: 1 }]
|
||||
POSSIBLE_CONFIGURATIONS[3] = [
|
||||
{ layout: 'disperse', redundancy: 1, capacity: 2 },
|
||||
{ layout: 'replica', redundancy: 3, capacity: 1 }]
|
||||
POSSIBLE_CONFIGURATIONS[4] = [{ layout: 'replica', redundancy: 2, capacity: 1 }]
|
||||
POSSIBLE_CONFIGURATIONS[5] = [{ layout: 'disperse', redundancy: 1, capacity: 4 }]
|
||||
POSSIBLE_CONFIGURATIONS[6] = [
|
||||
{ layout: 'disperse', redundancy: 2, capacity: 4 },
|
||||
{ layout: 'replica', redundancy: 2, capacity: 3 },
|
||||
{ layout: 'replica', redundancy: 3, capacity: 2 }]
|
||||
POSSIBLE_CONFIGURATIONS[7] = [{ layout: 'disperse', redundancy: 3, capacity: 4 }]
|
||||
POSSIBLE_CONFIGURATIONS[8] = [{ layout: 'replica', redundancy: 2, capacity: 4 }]
|
||||
POSSIBLE_CONFIGURATIONS[9] = [
|
||||
{ layout: 'disperse', redundancy: 1, capacity: 8 },
|
||||
{ layout: 'replica', redundancy: 3, capacity: 3 }]
|
||||
POSSIBLE_CONFIGURATIONS[10] = [
|
||||
{ layout: 'disperse', redundancy: 2, capacity: 8 },
|
||||
{ layout: 'replica', redundancy: 2, capacity: 5 }]
|
||||
POSSIBLE_CONFIGURATIONS[11] = [{ layout: 'disperse', redundancy: 3, capacity: 8 }]
|
||||
POSSIBLE_CONFIGURATIONS[12] = [
|
||||
{ layout: 'disperse', redundancy: 4, capacity: 8 },
|
||||
{ layout: 'replica', redundancy: 2, capacity: 6 }]
|
||||
POSSIBLE_CONFIGURATIONS[13] = [{ layout: 'disperse', redundancy: 5, capacity: 8 }]
|
||||
POSSIBLE_CONFIGURATIONS[14] = [
|
||||
{ layout: 'disperse', redundancy: 6, capacity: 8 },
|
||||
{ layout: 'replica', redundancy: 2, capacity: 7 }]
|
||||
POSSIBLE_CONFIGURATIONS[15] = [
|
||||
{ layout: 'disperse', redundancy: 7, capacity: 8 },
|
||||
{ layout: 'replica', redundancy: 3, capacity: 5 }]
|
||||
POSSIBLE_CONFIGURATIONS[16] = [{ layout: 'replica', redundancy: 2, capacity: 8 }]
|
||||
|
||||
export async function computeXosanPossibleOptions ({ lvmSrs }) {
|
||||
const count = lvmSrs.length
|
||||
const configurations = POSSIBLE_CONFIGURATIONS[count]
|
||||
if (!configurations) {
|
||||
return null
|
||||
}
|
||||
if (count > 0) {
|
||||
const xapi = this.getXapi(lvmSrs[0])
|
||||
const srs = map(lvmSrs, srId => xapi.getObject(srId))
|
||||
const srSizes = map(srs, sr => sr.physical_size - sr.physical_utilisation)
|
||||
const minSize = Math.min.apply(null, srSizes)
|
||||
const brickSize = (minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO
|
||||
return configurations.map(conf => ({ ...conf, availableSpace: brickSize * conf.capacity }))
|
||||
}
|
||||
}
|
||||
|
||||
computeXosanPossibleOptions.params = {
|
||||
lvmSrs: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
export async function downloadAndInstallXosanPack ({ id, version, pool }) {
|
||||
if (!this.requestResource) {
|
||||
throw new Error('requestResource is not a function')
|
||||
}
|
||||
|
||||
const xapi = this.getXapi(pool.id)
|
||||
const res = await this.requestResource('xosan', id, version)
|
||||
|
||||
return xapi.installSupplementalPackOnAllHosts(res)
|
||||
}
|
||||
|
||||
downloadAndInstallXosanPack.description = 'Register a resource via cloud plugin'
|
||||
|
||||
downloadAndInstallXosanPack.params = {
|
||||
id: { type: 'string' },
|
||||
version: { type: 'string' },
|
||||
pool: { type: 'string' }
|
||||
}
|
||||
|
||||
downloadAndInstallXosanPack.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
downloadAndInstallXosanPack.permission = 'admin'
|
||||
@@ -33,10 +33,6 @@ export default class Collection extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
constructor () {
|
||||
super()
|
||||
}
|
||||
|
||||
async add (models, opts) {
|
||||
const array = isArray(models)
|
||||
if (!array) {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import Collection, {ModelAlreadyExists} from '../collection'
|
||||
import difference from 'lodash.difference'
|
||||
import filter from 'lodash.filter'
|
||||
import getKey from 'lodash.keys'
|
||||
import difference from 'lodash/difference'
|
||||
import filter from 'lodash/filter'
|
||||
import getKey from 'lodash/keys'
|
||||
import {createClient as createRedisClient} from 'redis'
|
||||
import {v4 as generateUuid} from 'uuid'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
@@ -35,13 +36,13 @@ export default class Redis extends Collection {
|
||||
connection,
|
||||
indexes = [],
|
||||
prefix,
|
||||
uri = 'tcp://localhost:6379'
|
||||
uri
|
||||
}) {
|
||||
super()
|
||||
|
||||
this.indexes = indexes
|
||||
this.prefix = prefix
|
||||
this.redis = promisifyAll.call(connection || createRedisClient(uri))
|
||||
this.redis = promisifyAll(connection || createRedisClient(uri))
|
||||
}
|
||||
|
||||
_extract (ids) {
|
||||
@@ -68,12 +69,12 @@ export default class Redis extends Collection {
|
||||
// TODO: remove “replace” which is a temporary measure, implement
|
||||
// “set()” instead.
|
||||
|
||||
const {indexes, prefix, redis, idPrefix = ''} = this
|
||||
const {indexes, prefix, redis} = this
|
||||
|
||||
return Promise.all(mapToArray(models, async model => {
|
||||
// Generate a new identifier if necessary.
|
||||
if (model.id === undefined) {
|
||||
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
|
||||
model.id = generateUuid()
|
||||
}
|
||||
|
||||
const success = await redis.sadd(prefix + '_ids', model.id)
|
||||
@@ -149,6 +150,10 @@ export default class Redis extends Collection {
|
||||
}
|
||||
|
||||
_remove (ids) {
|
||||
if (isEmpty(ids)) {
|
||||
return
|
||||
}
|
||||
|
||||
const {prefix, redis} = this
|
||||
|
||||
// TODO: handle indexes.
|
||||
|
||||
@@ -1,69 +1,19 @@
|
||||
import bind from 'lodash.bind'
|
||||
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
|
||||
|
||||
import {
|
||||
isArray,
|
||||
isPromise,
|
||||
isFunction,
|
||||
noop,
|
||||
pFinally
|
||||
isFunction
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {
|
||||
defineProperties,
|
||||
defineProperty,
|
||||
getOwnPropertyDescriptor
|
||||
} = Object
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// See: https://github.com/jayphelps/core-decorators.js#autobind
|
||||
//
|
||||
// TODO: make it work for all class methods.
|
||||
export const autobind = (target, key, {
|
||||
configurable,
|
||||
enumerable,
|
||||
value: fn,
|
||||
writable
|
||||
}) => ({
|
||||
configurable,
|
||||
enumerable,
|
||||
|
||||
get () {
|
||||
const bounded = bind(fn, this)
|
||||
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bounded,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bounded
|
||||
},
|
||||
set (newValue) {
|
||||
if (this === target) {
|
||||
// New value directly set on the prototype.
|
||||
delete this[key]
|
||||
this[key] = newValue
|
||||
} else {
|
||||
// New value set on a child object.
|
||||
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Debounce decorator for methods.
|
||||
//
|
||||
// See: https://github.com/wycats/javascript-decorators
|
||||
@@ -74,7 +24,7 @@ export const debounce = duration => (target, name, descriptor) => {
|
||||
|
||||
// This symbol is used to store the related data directly on the
|
||||
// current object.
|
||||
const s = Symbol()
|
||||
const s = Symbol(`debounced ${name} data`)
|
||||
|
||||
function debounced () {
|
||||
const data = this[s] || (this[s] = {
|
||||
@@ -102,119 +52,8 @@ export const debounce = duration => (target, name, descriptor) => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _push = Array.prototype.push
|
||||
|
||||
export const deferrable = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn () {
|
||||
const deferreds = []
|
||||
const defer = fn => {
|
||||
deferreds.push(fn)
|
||||
}
|
||||
defer.clear = () => {
|
||||
deferreds.length = 0
|
||||
}
|
||||
|
||||
const args = [ defer ]
|
||||
_push.apply(args, arguments)
|
||||
|
||||
let executeDeferreds = () => {
|
||||
let i = deferreds.length
|
||||
while (i) {
|
||||
deferreds[--i]()
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = fn.apply(this, args)
|
||||
|
||||
if (isPromise(result)) {
|
||||
result::pFinally(executeDeferreds)
|
||||
|
||||
// Do not execute the deferreds in the finally block.
|
||||
executeDeferreds = noop
|
||||
}
|
||||
|
||||
return result
|
||||
} finally {
|
||||
executeDeferreds()
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
fn = target
|
||||
return newFn
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on failures.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of success.
|
||||
deferrable.onFailure = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(result => {
|
||||
defer.clear()
|
||||
return result
|
||||
})
|
||||
: (defer.clear(), result)
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on success.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of failure.
|
||||
deferrable.onSuccess = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(null, error => {
|
||||
defer.clear()
|
||||
throw error
|
||||
})
|
||||
: result
|
||||
} catch (error) {
|
||||
defer.clear()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _ownKeys = (
|
||||
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
|
||||
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
|
||||
(({
|
||||
getOwnPropertyNames: names,
|
||||
getOwnPropertySymbols: symbols
|
||||
@@ -224,22 +63,6 @@ const _ownKeys = (
|
||||
)(Object)
|
||||
)
|
||||
|
||||
const _bindPropertyDescriptor = (descriptor, thisArg) => {
|
||||
const { get, set, value } = descriptor
|
||||
if (get) {
|
||||
descriptor.get = bind(get, thisArg)
|
||||
}
|
||||
if (set) {
|
||||
descriptor.set = bind(set, thisArg)
|
||||
}
|
||||
|
||||
if (isFunction(value)) {
|
||||
descriptor.value = bind(value, thisArg)
|
||||
}
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
const _isIgnoredProperty = name => (
|
||||
name[0] === '_' ||
|
||||
name === 'constructor'
|
||||
@@ -263,7 +86,32 @@ export const mixin = MixIns => Class => {
|
||||
|
||||
const { name } = Class
|
||||
|
||||
const Decorator = (...args) => {
|
||||
// Copy properties of plain object mix-ins to the prototype.
|
||||
{
|
||||
const allMixIns = MixIns
|
||||
MixIns = []
|
||||
const { prototype } = Class
|
||||
const descriptors = { __proto__: null }
|
||||
for (const MixIn of allMixIns) {
|
||||
if (isFunction(MixIn)) {
|
||||
MixIns.push(MixIn)
|
||||
continue
|
||||
}
|
||||
|
||||
for (const prop of _ownKeys(MixIn)) {
|
||||
if (prop in prototype) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
(
|
||||
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
|
||||
).enumerable = false // Object methods are enumerable but class methods are not.
|
||||
}
|
||||
}
|
||||
defineProperties(prototype, descriptors)
|
||||
}
|
||||
|
||||
function Decorator (...args) {
|
||||
const instance = new Class(...args)
|
||||
|
||||
for (const MixIn of MixIns) {
|
||||
@@ -279,8 +127,9 @@ export const mixin = MixIns => Class => {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = _bindPropertyDescriptor(
|
||||
getOwnPropertyDescriptor(prototype, prop),
|
||||
descriptors[prop] = getBoundPropertyDescriptor(
|
||||
prototype,
|
||||
prop,
|
||||
mixinInstance
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,48 +1,9 @@
|
||||
/* eslint-env mocha */
|
||||
/* eslint-env jest */
|
||||
|
||||
import expect from 'must'
|
||||
import {debounce} from './decorators'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {autobind, debounce, deferrable} from './decorators'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('autobind()', () => {
|
||||
class Foo {
|
||||
@autobind
|
||||
getFoo () {
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
it('returns a bound instance for a method', () => {
|
||||
const foo = new Foo()
|
||||
const { getFoo } = foo
|
||||
|
||||
expect(getFoo()).to.equal(foo)
|
||||
})
|
||||
|
||||
it('returns the same bound instance each time', () => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(foo.getFoo).to.equal(foo.getFoo)
|
||||
})
|
||||
|
||||
it('works with multiple instances of the same class', () => {
|
||||
const foo1 = new Foo()
|
||||
const foo2 = new Foo()
|
||||
|
||||
const getFoo1 = foo1.getFoo
|
||||
const getFoo2 = foo2.getFoo
|
||||
|
||||
expect(getFoo1()).to.equal(foo1)
|
||||
expect(getFoo2()).to.equal(foo2)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('debounce()', () => {
|
||||
let i
|
||||
|
||||
@@ -60,114 +21,19 @@ describe('debounce()', () => {
|
||||
it('works', done => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(i).to.equal(0)
|
||||
expect(i).toBe(0)
|
||||
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
expect(i).toBe(1)
|
||||
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
expect(i).toBe(1)
|
||||
|
||||
setTimeout(() => {
|
||||
foo.foo()
|
||||
expect(i).to.equal(2)
|
||||
expect(i).toBe(2)
|
||||
|
||||
done()
|
||||
}, 2e1)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('deferrable()', () => {
|
||||
it('works with normal termination', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('defer.clear() removes previous deferreds', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
defer.clear()
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(2)
|
||||
})
|
||||
|
||||
it('works with exception', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
expect(() => fn()).to.throw(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise resolution', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
await expect(fn()).to.eventually.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise rejection', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
await expect(fn()).to.reject.to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -20,14 +20,15 @@ import { boot16 as fat16 } from 'fatfs/structs'
|
||||
|
||||
const SECTOR_SIZE = 512
|
||||
|
||||
const TEN_MIB = 10 * 1024 * 1024
|
||||
|
||||
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
|
||||
export function init () {
|
||||
const buf = new Buffer(10 * 1024 * 1024) // 10MB
|
||||
buf.fill(0)
|
||||
const buf = Buffer.alloc(TEN_MIB)
|
||||
|
||||
// https://github.com/natevw/fatfs/blob/master/structs.js
|
||||
fat16.pack({
|
||||
jmpBoot: new Buffer('eb3c90', 'hex'),
|
||||
jmpBoot: Buffer.from('eb3c90', 'hex'),
|
||||
OEMName: 'mkfs.fat',
|
||||
BytsPerSec: SECTOR_SIZE,
|
||||
SecPerClus: 4,
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
import assign from 'lodash.assign'
|
||||
import getStream from 'get-stream'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import { parse as parseUrl } from 'url'
|
||||
import isRedirect from 'is-redirect'
|
||||
import { assign, isString, startsWith } from 'lodash'
|
||||
import { cancellable } from 'promise-toolbox'
|
||||
import { request as httpRequest } from 'http'
|
||||
import { request as httpsRequest } from 'https'
|
||||
import { stringify as formatQueryString } from 'querystring'
|
||||
|
||||
import {
|
||||
isString
|
||||
} from './utils'
|
||||
format as formatUrl,
|
||||
parse as parseUrl,
|
||||
resolve as resolveUrl
|
||||
} from 'url'
|
||||
|
||||
import { streamToBuffer } from './utils'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export default (...args) => {
|
||||
const raw = opts => {
|
||||
let req
|
||||
|
||||
const pResponse = new Promise((resolve, reject) => {
|
||||
const opts = {}
|
||||
for (let i = 0, length = args.length; i < length; ++i) {
|
||||
const arg = args[i]
|
||||
assign(opts, isString(arg) ? parseUrl(arg) : arg)
|
||||
}
|
||||
|
||||
const {
|
||||
body,
|
||||
headers: { ...headers } = {},
|
||||
@@ -62,11 +58,16 @@ export default (...args) => {
|
||||
}
|
||||
}
|
||||
|
||||
req = (
|
||||
protocol && startsWith(protocol.toLowerCase(), 'https')
|
||||
? httpsRequest
|
||||
: httpRequest
|
||||
)({
|
||||
const secure = protocol && startsWith(protocol.toLowerCase(), 'https')
|
||||
let requestFn
|
||||
if (secure) {
|
||||
requestFn = httpsRequest
|
||||
} else {
|
||||
requestFn = httpRequest
|
||||
delete rest.rejectUnauthorized
|
||||
}
|
||||
|
||||
req = requestFn({
|
||||
...rest,
|
||||
headers
|
||||
})
|
||||
@@ -90,7 +91,7 @@ export default (...args) => {
|
||||
response.cancel = () => {
|
||||
req.abort()
|
||||
}
|
||||
response.readAll = () => getStream(response)
|
||||
response.readAll = () => streamToBuffer(response)
|
||||
|
||||
const length = response.headers['content-length']
|
||||
if (length) {
|
||||
@@ -98,6 +99,11 @@ export default (...args) => {
|
||||
}
|
||||
|
||||
const code = response.statusCode
|
||||
const { location } = response.headers
|
||||
if (isRedirect(code) && location) {
|
||||
assign(opts, parseUrl(resolveUrl(formatUrl(opts), location)))
|
||||
return raw(opts)
|
||||
}
|
||||
if (code < 200 || code >= 300) {
|
||||
const error = new Error(response.statusMessage)
|
||||
error.code = code
|
||||
@@ -112,13 +118,27 @@ export default (...args) => {
|
||||
|
||||
return response
|
||||
})
|
||||
|
||||
pResponse.cancel = () => {
|
||||
req.emit('error', new Error('HTTP request canceled!'))
|
||||
req.abort()
|
||||
}
|
||||
pResponse.readAll = () => pResponse.then(response => response.readAll())
|
||||
pResponse.request = req
|
||||
|
||||
return pResponse
|
||||
}
|
||||
|
||||
const httpRequestPlus = ($cancelToken, ...args) => {
|
||||
const opts = {}
|
||||
for (let i = 0, length = args.length; i < length; ++i) {
|
||||
const arg = args[i]
|
||||
assign(opts, isString(arg) ? parseUrl(arg) : arg)
|
||||
}
|
||||
|
||||
const pResponse = raw(opts)
|
||||
|
||||
$cancelToken.promise.then(() => {
|
||||
const { request } = pResponse
|
||||
request.emit('error', new Error('HTTP request canceled!'))
|
||||
request.abort()
|
||||
})
|
||||
pResponse.readAll = () => pResponse.then(response => response.readAll())
|
||||
|
||||
return pResponse
|
||||
}
|
||||
export default cancellable(httpRequestPlus)
|
||||
|
||||
179
src/index.js
179
src/index.js
@@ -1,39 +1,29 @@
|
||||
import createLogger from 'debug'
|
||||
const debug = createLogger('xo:main')
|
||||
|
||||
import appConf from 'app-conf'
|
||||
import bind from 'lodash.bind'
|
||||
import bind from 'lodash/bind'
|
||||
import blocked from 'blocked'
|
||||
import createExpress from 'express'
|
||||
import createLogger from 'debug'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import has from 'lodash.has'
|
||||
import has from 'lodash/has'
|
||||
import helmet from 'helmet'
|
||||
import includes from 'lodash.includes'
|
||||
import pick from 'lodash.pick'
|
||||
import includes from 'lodash/includes'
|
||||
import proxyConsole from './proxy-console'
|
||||
import proxyRequest from 'proxy-http-request'
|
||||
import serveStatic from 'serve-static'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import WebSocket from 'ws'
|
||||
import {compile as compileJade} from 'jade'
|
||||
import { compile as compilePug } from 'pug'
|
||||
import { createServer as createProxyServer } from 'http-proxy'
|
||||
import { join as joinPath } from 'path'
|
||||
|
||||
import {
|
||||
AlreadyAuthenticated,
|
||||
InvalidCredential,
|
||||
InvalidParameters,
|
||||
NoSuchObject,
|
||||
NotImplemented
|
||||
} from './api-errors'
|
||||
import JsonRpcPeer from 'json-rpc-peer'
|
||||
import { invalidCredentials } from 'xo-common/api-errors'
|
||||
import {
|
||||
readFile,
|
||||
readdir
|
||||
ensureDir,
|
||||
readdir,
|
||||
readFile
|
||||
} from 'fs-promise'
|
||||
|
||||
import * as apiMethods from './api/index'
|
||||
import Api from './api'
|
||||
import WebServer from 'http-server-plus'
|
||||
import wsProxy from './ws-proxy'
|
||||
import Xo from './xo'
|
||||
import {
|
||||
setup as setupHttpProxy
|
||||
@@ -57,6 +47,8 @@ import { Strategy as LocalStrategy } from 'passport-local'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debug = createLogger('xo:main')
|
||||
|
||||
const warn = (...args) => {
|
||||
console.warn('[Warn]', ...args)
|
||||
}
|
||||
@@ -129,8 +121,8 @@ async function setUpPassport (express, xo) {
|
||||
}
|
||||
|
||||
// Registers the sign in form.
|
||||
const signInPage = compileJade(
|
||||
await readFile(__dirname + '/../signin.jade')
|
||||
const signInPage = compilePug(
|
||||
await readFile(joinPath(__dirname, '..', 'signin.pug'))
|
||||
)
|
||||
express.get('/signin', (req, res, next) => {
|
||||
res.send(signInPage({
|
||||
@@ -139,9 +131,15 @@ async function setUpPassport (express, xo) {
|
||||
}))
|
||||
})
|
||||
|
||||
express.get('/signout', (req, res) => {
|
||||
res.clearCookie('token')
|
||||
res.redirect('/')
|
||||
})
|
||||
|
||||
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
|
||||
express.use(async (req, res, next) => {
|
||||
const matches = req.url.match(SIGNIN_STRATEGY_RE)
|
||||
const { url } = req
|
||||
const matches = url.match(SIGNIN_STRATEGY_RE)
|
||||
|
||||
if (matches) {
|
||||
return passport.authenticate(matches[1], async (err, user, info) => {
|
||||
@@ -167,7 +165,7 @@ async function setUpPassport (express, xo) {
|
||||
matches[1] === 'local' && req.body['remember-me'] === 'on'
|
||||
)
|
||||
|
||||
res.redirect('/')
|
||||
res.redirect(req.flash('return-url')[0] || '/')
|
||||
})(req, res, next)
|
||||
}
|
||||
|
||||
@@ -187,9 +185,10 @@ async function setUpPassport (express, xo) {
|
||||
next()
|
||||
} else if (req.cookies.token) {
|
||||
next()
|
||||
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
|
||||
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
|
||||
next()
|
||||
} else {
|
||||
req.flash('return-url', url)
|
||||
return res.redirect('/signin')
|
||||
}
|
||||
})
|
||||
@@ -222,19 +221,28 @@ async function registerPlugin (pluginPath, pluginName) {
|
||||
// Supports both “normal” CommonJS and Babel's ES2015 modules.
|
||||
const {
|
||||
default: factory = plugin,
|
||||
configurationSchema
|
||||
configurationSchema,
|
||||
configurationPresets,
|
||||
testSchema
|
||||
} = plugin
|
||||
|
||||
// The default export can be either a factory or directly a plugin
|
||||
// instance.
|
||||
const instance = isFunction(factory)
|
||||
? factory({ xo: this })
|
||||
? factory({
|
||||
xo: this,
|
||||
getDataDir: () => {
|
||||
const dir = `${this._config.datadir}/${pluginName}`
|
||||
return ensureDir(dir).then(() => dir)
|
||||
}})
|
||||
: factory
|
||||
|
||||
await this.registerPlugin(
|
||||
pluginName,
|
||||
instance,
|
||||
configurationSchema,
|
||||
configurationPresets,
|
||||
testSchema,
|
||||
version
|
||||
)
|
||||
}
|
||||
@@ -337,13 +345,29 @@ const setUpProxies = (express, opts, xo) => {
|
||||
return
|
||||
}
|
||||
|
||||
const proxy = createProxyServer({
|
||||
ignorePath: true
|
||||
}).on('error', (error) => console.error(error))
|
||||
|
||||
// TODO: sort proxies by descending prefix length.
|
||||
|
||||
// HTTP request proxy.
|
||||
forEach(opts, (target, url) => {
|
||||
express.use(url, (req, res) => {
|
||||
proxyRequest(target + req.url, req, res)
|
||||
})
|
||||
express.use((req, res, next) => {
|
||||
const { url } = req
|
||||
|
||||
for (const prefix in opts) {
|
||||
if (startsWith(url, prefix)) {
|
||||
const target = opts[prefix]
|
||||
|
||||
proxy.web(req, res, {
|
||||
target: target + url.slice(prefix.length)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
|
||||
// WebSocket proxy.
|
||||
@@ -353,14 +377,16 @@ const setUpProxies = (express, opts, xo) => {
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
express.on('upgrade', (req, socket, head) => {
|
||||
const {url} = req
|
||||
const { url } = req
|
||||
|
||||
for (let prefix in opts) {
|
||||
if (url.lastIndexOf(prefix, 0) !== -1) {
|
||||
const target = opts[prefix] + url.slice(prefix.length)
|
||||
webSocketServer.handleUpgrade(req, socket, head, socket => {
|
||||
wsProxy(socket, target)
|
||||
for (const prefix in opts) {
|
||||
if (startsWith(url, prefix)) {
|
||||
const target = opts[prefix]
|
||||
|
||||
proxy.ws(req, socket, head, {
|
||||
target: target + url.slice(prefix.length)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -385,47 +411,13 @@ const setUpStaticFiles = (express, opts) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const errorClasses = {
|
||||
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
|
||||
INVALID_CREDENTIAL: InvalidCredential,
|
||||
INVALID_PARAMS: InvalidParameters,
|
||||
NO_SUCH_OBJECT: NoSuchObject,
|
||||
NOT_IMPLEMENTED: NotImplemented
|
||||
}
|
||||
|
||||
const apiHelpers = {
|
||||
getUserPublicProperties (user) {
|
||||
// Handles both properties and wrapped models.
|
||||
const properties = user.properties || user
|
||||
|
||||
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
|
||||
},
|
||||
|
||||
throw (errorId, data) {
|
||||
throw new (errorClasses[errorId])(data)
|
||||
}
|
||||
}
|
||||
|
||||
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
server: webServer,
|
||||
path: '/api/'
|
||||
noServer: true
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = { __proto__: xo, ...apiHelpers }
|
||||
|
||||
const api = new Api({
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
})
|
||||
xo.defineProperty('api', api)
|
||||
|
||||
api.addMethods(apiMethods)
|
||||
|
||||
webSocketServer.on('connection', socket => {
|
||||
const onConnection = socket => {
|
||||
const { remoteAddress } = socket.upgradeReq.socket
|
||||
|
||||
debug('+ WebSocket connection (%s)', remoteAddress)
|
||||
@@ -439,7 +431,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
// Create the JSON-RPC server for this connection.
|
||||
const jsonRpc = new JsonRpcPeer(message => {
|
||||
if (message.type === 'request') {
|
||||
return api.call(connection, message.method, message.params)
|
||||
return xo.callApiMethod(connection, message.method, message.params)
|
||||
}
|
||||
})
|
||||
connection.notify = bind(jsonRpc.notify, jsonRpc)
|
||||
@@ -468,6 +460,11 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
socket.send(data, onSend)
|
||||
}
|
||||
})
|
||||
}
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
if (req.url === '/api/') {
|
||||
webSocketServer.handleUpgrade(req, socket, head, onConnection)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -494,8 +491,8 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
const { token } = parseCookies(req.headers.cookie)
|
||||
|
||||
const user = await xo.authenticateUser({ token })
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { // eslint-disable-line space-before-keywords
|
||||
throw new InvalidCredential()
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
|
||||
throw invalidCredentials()
|
||||
}
|
||||
|
||||
const { remoteAddress } = socket
|
||||
@@ -512,8 +509,8 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
webSocketServer.handleUpgrade(req, socket, head, connection => {
|
||||
proxyConsole(connection, vmConsole, xapi.sessionId)
|
||||
})
|
||||
} catch (_) {
|
||||
console.error(_)
|
||||
} catch (error) {
|
||||
console.error((error && error.stack) || error)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -628,16 +625,24 @@ export default async function main (args) {
|
||||
await registerPlugins(xo)
|
||||
}
|
||||
|
||||
// Gracefully shutdown on signals.
|
||||
//
|
||||
// TODO: implements a timeout? (or maybe it is the services launcher
|
||||
// responsibility?)
|
||||
const shutdown = signal => {
|
||||
debug('%s caught, closing…', signal)
|
||||
xo.stop()
|
||||
}
|
||||
forEach([ 'SIGINT', 'SIGTERM' ], signal => {
|
||||
let alreadyCalled = false
|
||||
|
||||
// Gracefully shutdown on signals.
|
||||
process.on('SIGINT', () => shutdown('SIGINT'))
|
||||
process.on('SIGTERM', () => shutdown('SIGTERM'))
|
||||
process.on(signal, () => {
|
||||
if (alreadyCalled) {
|
||||
warn('forced exit')
|
||||
process.exit(1)
|
||||
}
|
||||
alreadyCalled = true
|
||||
|
||||
debug('%s caught, closing…', signal)
|
||||
xo.stop()
|
||||
})
|
||||
})
|
||||
|
||||
await eventToPromise(xo, 'stopped')
|
||||
|
||||
|
||||
@@ -1,9 +1,20 @@
|
||||
import assign from 'lodash.assign'
|
||||
import {BaseError} from 'make-error'
|
||||
import assign from 'lodash/assign'
|
||||
import Bluebird from 'bluebird'
|
||||
import every from 'lodash/every'
|
||||
import filter from 'lodash/filter'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isPlainObject from 'lodash/isPlainObject'
|
||||
import map from 'lodash/map'
|
||||
import mapValues from 'lodash/mapValues'
|
||||
import size from 'lodash/size'
|
||||
import some from 'lodash/some'
|
||||
import { BaseError } from 'make-error'
|
||||
import { timeout } from 'promise-toolbox'
|
||||
|
||||
import { crossProduct } from './math'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach
|
||||
serializeError,
|
||||
thunkToArray
|
||||
} from './utils'
|
||||
|
||||
export class JobExecutorError extends BaseError {}
|
||||
@@ -18,30 +29,67 @@ export class UnsupportedVectorType extends JobExecutorError {
|
||||
}
|
||||
}
|
||||
|
||||
export const productParams = (...args) => {
|
||||
let product = createRawObject()
|
||||
assign(product, ...args)
|
||||
return product
|
||||
// ===================================================================
|
||||
|
||||
const match = (pattern, value) => {
|
||||
if (isPlainObject(pattern)) {
|
||||
if (size(pattern) === 1) {
|
||||
if (pattern.__or) {
|
||||
return some(pattern.__or, subpattern => match(subpattern, value))
|
||||
}
|
||||
if (pattern.__not) {
|
||||
return !match(pattern.__not, value)
|
||||
}
|
||||
}
|
||||
|
||||
return isPlainObject(value) && every(pattern, (subpattern, key) => (
|
||||
value[key] !== undefined && match(subpattern, value[key])
|
||||
))
|
||||
}
|
||||
|
||||
if (isArray(pattern)) {
|
||||
return isArray(value) && every(pattern, subpattern =>
|
||||
some(value, subvalue => match(subpattern, subvalue))
|
||||
)
|
||||
}
|
||||
|
||||
return pattern === value
|
||||
}
|
||||
|
||||
export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
|
||||
const upstreamValues = []
|
||||
const itemsCopy = items.slice()
|
||||
const item = itemsCopy.pop()
|
||||
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
|
||||
forEach(values, value => {
|
||||
if (itemsCopy.length) {
|
||||
let downstreamValues = _computeCrossProduct(itemsCopy, productCb, extractValueMap)
|
||||
forEach(downstreamValues, downstreamValue => {
|
||||
upstreamValues.push(productCb(value, downstreamValue))
|
||||
const paramsVectorActionsMap = {
|
||||
extractProperties ({ mapping, value }) {
|
||||
return mapValues(mapping, key => value[key])
|
||||
},
|
||||
crossProduct ({ items }) {
|
||||
return thunkToArray(crossProduct(
|
||||
map(items, value => resolveParamsVector.call(this, value))
|
||||
))
|
||||
},
|
||||
fetchObjects ({ pattern }) {
|
||||
return filter(this.xo.getObjects(), object => match(pattern, object))
|
||||
},
|
||||
map ({ collection, iteratee, paramName = 'value' }) {
|
||||
return map(resolveParamsVector.call(this, collection), value => {
|
||||
return resolveParamsVector.call(this, {
|
||||
...iteratee,
|
||||
[paramName]: value
|
||||
})
|
||||
} else {
|
||||
upstreamValues.push(value)
|
||||
}
|
||||
})
|
||||
return upstreamValues
|
||||
})
|
||||
},
|
||||
set: ({ values }) => values
|
||||
}
|
||||
|
||||
export function resolveParamsVector (paramsVector) {
|
||||
const visitor = paramsVectorActionsMap[paramsVector.type]
|
||||
if (!visitor) {
|
||||
throw new Error(`Unsupported function '${paramsVector.type}'.`)
|
||||
}
|
||||
|
||||
return visitor.call(this, paramsVector)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class JobExecutor {
|
||||
constructor (xo) {
|
||||
this.xo = xo
|
||||
@@ -76,30 +124,24 @@ export default class JobExecutor {
|
||||
event: 'job.end',
|
||||
runJobId
|
||||
})
|
||||
} catch (e) {
|
||||
} catch (error) {
|
||||
this._logger.error(`The execution of ${job.id} has failed.`, {
|
||||
event: 'job.end',
|
||||
runJobId,
|
||||
error: e
|
||||
error: serializeError(error)
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async _execCall (job, runJobId) {
|
||||
let paramsFlatVector
|
||||
|
||||
if (job.paramsVector) {
|
||||
if (job.paramsVector.type === 'crossProduct') {
|
||||
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
|
||||
} else {
|
||||
throw new UnsupportedVectorType(job.paramsVector)
|
||||
}
|
||||
} else {
|
||||
paramsFlatVector = [{}] // One call with no parameters
|
||||
}
|
||||
const { paramsVector } = job
|
||||
const paramsFlatVector = paramsVector
|
||||
? resolveParamsVector.call(this, paramsVector)
|
||||
: [{}] // One call with no parameters
|
||||
|
||||
const connection = this.xo.createUserConnection()
|
||||
const promises = []
|
||||
|
||||
connection.set('user_id', job.userId)
|
||||
|
||||
@@ -109,7 +151,7 @@ export default class JobExecutor {
|
||||
calls: {}
|
||||
}
|
||||
|
||||
forEach(paramsFlatVector, params => {
|
||||
await Bluebird.map(paramsFlatVector, params => {
|
||||
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
|
||||
event: 'jobCall.start',
|
||||
runJobId,
|
||||
@@ -122,37 +164,40 @@ export default class JobExecutor {
|
||||
params,
|
||||
start: Date.now()
|
||||
}
|
||||
let promise = this.xo.callApiMethod(connection, job.method, assign({}, params))
|
||||
if (job.timeout) {
|
||||
promise = promise::timeout(job.timeout)
|
||||
}
|
||||
|
||||
promises.push(
|
||||
this.xo.api.call(connection, job.method, assign({}, params)).then(
|
||||
value => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
returnedValue: value
|
||||
})
|
||||
return promise.then(
|
||||
value => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
returnedValue: value
|
||||
})
|
||||
|
||||
call.returnedValue = value
|
||||
call.end = Date.now()
|
||||
},
|
||||
reason => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
error: reason
|
||||
})
|
||||
call.returnedValue = value
|
||||
call.end = Date.now()
|
||||
},
|
||||
reason => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
error: serializeError(reason)
|
||||
})
|
||||
|
||||
call.error = reason
|
||||
call.end = Date.now()
|
||||
}
|
||||
)
|
||||
call.error = reason
|
||||
call.end = Date.now()
|
||||
}
|
||||
)
|
||||
}, {
|
||||
concurrency: 2
|
||||
})
|
||||
|
||||
connection.close()
|
||||
await Promise.all(promises)
|
||||
execStatus.end = Date.now()
|
||||
|
||||
return execStatus
|
||||
|
||||
@@ -1,69 +1,100 @@
|
||||
/* eslint-env mocha */
|
||||
/* eslint-env jest */
|
||||
|
||||
import {expect} from 'chai'
|
||||
import leche from 'leche'
|
||||
import { forEach } from 'lodash'
|
||||
import { resolveParamsVector } from './job-executor'
|
||||
|
||||
import {productParams} from './job-executor'
|
||||
import {_computeCrossProduct} from './job-executor'
|
||||
describe('resolveParamsVector', function () {
|
||||
forEach({
|
||||
'cross product with three sets': [
|
||||
// Expected result.
|
||||
[ { id: 3, value: 'foo', remote: 'local' },
|
||||
{ id: 7, value: 'foo', remote: 'local' },
|
||||
{ id: 10, value: 'foo', remote: 'local' },
|
||||
{ id: 3, value: 'bar', remote: 'local' },
|
||||
{ id: 7, value: 'bar', remote: 'local' },
|
||||
{ id: 10, value: 'bar', remote: 'local' } ],
|
||||
// Entry.
|
||||
{
|
||||
type: 'crossProduct',
|
||||
items: [{
|
||||
type: 'set',
|
||||
values: [ { id: 3 }, { id: 7 }, { id: 10 } ]
|
||||
}, {
|
||||
type: 'set',
|
||||
values: [ { value: 'foo' }, { value: 'bar' } ]
|
||||
}, {
|
||||
type: 'set',
|
||||
values: [ { remote: 'local' } ]
|
||||
}]
|
||||
}
|
||||
],
|
||||
'cross product with `set` and `map`': [
|
||||
// Expected result.
|
||||
[
|
||||
{ remote: 'local', id: 'vm:2' },
|
||||
{ remote: 'smb', id: 'vm:2' }
|
||||
],
|
||||
|
||||
describe('productParams', function () {
|
||||
leche.withData({
|
||||
'Two sets of one': [
|
||||
{a: 1, b: 2}, {a: 1}, {b: 2}
|
||||
],
|
||||
'Two sets of two': [
|
||||
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
|
||||
],
|
||||
'Three sets': [
|
||||
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
|
||||
],
|
||||
'One set': [
|
||||
{a: 1, b: 2}, {a: 1, b: 2}
|
||||
],
|
||||
'Empty set': [
|
||||
{a: 1}, {a: 1}, {}
|
||||
],
|
||||
'All empty': [
|
||||
{}, {}, {}
|
||||
],
|
||||
'No set': [
|
||||
{}
|
||||
// Entry.
|
||||
{
|
||||
type: 'crossProduct',
|
||||
items: [{
|
||||
type: 'set',
|
||||
values: [ { remote: 'local' }, { remote: 'smb' } ]
|
||||
}, {
|
||||
type: 'map',
|
||||
collection: {
|
||||
type: 'fetchObjects',
|
||||
pattern: {
|
||||
$pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] },
|
||||
power_state: 'Running',
|
||||
tags: [ 'foo' ],
|
||||
type: 'VM'
|
||||
}
|
||||
},
|
||||
iteratee: {
|
||||
type: 'extractProperties',
|
||||
mapping: { id: 'id' }
|
||||
}
|
||||
}]
|
||||
},
|
||||
|
||||
// Context.
|
||||
{
|
||||
xo: {
|
||||
getObjects: function () {
|
||||
return [{
|
||||
id: 'vm:1',
|
||||
$pool: 'pool:1',
|
||||
tags: [],
|
||||
type: 'VM',
|
||||
power_state: 'Halted'
|
||||
}, {
|
||||
id: 'vm:2',
|
||||
$pool: 'pool:1',
|
||||
tags: [ 'foo' ],
|
||||
type: 'VM',
|
||||
power_state: 'Running'
|
||||
}, {
|
||||
id: 'host:1',
|
||||
type: 'host',
|
||||
power_state: 'Running'
|
||||
}, {
|
||||
id: 'vm:3',
|
||||
$pool: 'pool:8',
|
||||
tags: [ 'foo' ],
|
||||
type: 'VM',
|
||||
power_state: 'Halted'
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}, function (resultSet, ...sets) {
|
||||
it('Assembles all given param sets in on set', function () {
|
||||
expect(productParams(...sets)).to.eql(resultSet)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('_computeCrossProduct', function () {
|
||||
// Gives the sum of all args
|
||||
const addTest = (...args) => args.reduce((prev, curr) => prev + curr, 0)
|
||||
// Gives the product of all args
|
||||
const multiplyTest = (...args) => args.reduce((prev, curr) => prev * curr, 1)
|
||||
|
||||
leche.withData({
|
||||
'2 sets of 2 items to multiply': [
|
||||
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
|
||||
],
|
||||
'3 sets of 2 items to multiply': [
|
||||
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
|
||||
],
|
||||
'2 sets of 3 items to multiply': [
|
||||
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
|
||||
],
|
||||
'2 sets of 2 items to add': [
|
||||
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
|
||||
],
|
||||
'3 sets of 2 items to add': [
|
||||
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
|
||||
],
|
||||
'2 sets of 3 items to add': [
|
||||
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
|
||||
]
|
||||
}, function (product, items, cb) {
|
||||
it('Crosses sets of values with a crossProduct callback', function () {
|
||||
expect(_computeCrossProduct(items, cb)).to.have.members(product)
|
||||
}, ([ expectedResult, entry, context ], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Resolves params vector', () => {
|
||||
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import appConf from 'app-conf'
|
||||
import get from 'lodash.get'
|
||||
import get from 'lodash/get'
|
||||
import highland from 'highland'
|
||||
import levelup from 'level-party'
|
||||
import ndjson from 'ndjson'
|
||||
|
||||
33
src/lvm.js
Normal file
33
src/lvm.js
Normal file
@@ -0,0 +1,33 @@
|
||||
import execa from 'execa'
|
||||
import splitLines from 'split-lines'
|
||||
import { createParser } from 'parse-pairs'
|
||||
import { isArray, map } from 'lodash'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const parse = createParser({
|
||||
keyTransform: key => key.slice(5).toLowerCase()
|
||||
})
|
||||
const makeFunction = command => (fields, ...args) =>
|
||||
execa.stdout(command, [
|
||||
'--noheading',
|
||||
'--nosuffix',
|
||||
'--nameprefixes',
|
||||
'--unbuffered',
|
||||
'--units',
|
||||
'b',
|
||||
'-o',
|
||||
String(fields),
|
||||
...args
|
||||
]).then(stdout => map(
|
||||
splitLines(stdout),
|
||||
isArray(fields)
|
||||
? parse
|
||||
: line => {
|
||||
const data = parse(line)
|
||||
return data[fields]
|
||||
}
|
||||
))
|
||||
|
||||
export const lvs = makeFunction('lvs')
|
||||
export const pvs = makeFunction('pvs')
|
||||
48
src/math.js
Normal file
48
src/math.js
Normal file
@@ -0,0 +1,48 @@
|
||||
import assign from 'lodash/assign'
|
||||
|
||||
const _combine = (vectors, n, cb) => {
|
||||
if (!n) {
|
||||
return
|
||||
}
|
||||
|
||||
const nLast = n - 1
|
||||
|
||||
const vector = vectors[nLast]
|
||||
const m = vector.length
|
||||
if (n === 1) {
|
||||
for (let i = 0; i < m; ++i) {
|
||||
cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for (let i = 0; i < m; ++i) {
|
||||
const value = vector[i]
|
||||
|
||||
_combine(vectors, nLast, (vector) => {
|
||||
vector.push(value)
|
||||
cb(vector)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Compute all combinations from vectors.
|
||||
//
|
||||
// Ex: combine([[2, 3], [5, 7]])
|
||||
// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ]
|
||||
export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
|
||||
|
||||
// Merge the properties of an objects set in one object.
|
||||
//
|
||||
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
|
||||
export const mergeObjects = objects => assign({}, ...objects)
|
||||
|
||||
// Compute a cross product between vectors.
|
||||
//
|
||||
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
|
||||
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
|
||||
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
|
||||
combine(vectors)(vector => {
|
||||
cb(mergeFn(vector))
|
||||
})
|
||||
)
|
||||
74
src/math.spec.js
Normal file
74
src/math.spec.js
Normal file
@@ -0,0 +1,74 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { forEach } from 'lodash'
|
||||
import { thunkToArray } from './utils'
|
||||
import {
|
||||
crossProduct,
|
||||
mergeObjects
|
||||
} from './math'
|
||||
|
||||
describe('mergeObjects', function () {
|
||||
forEach({
|
||||
'Two sets of one': [
|
||||
{a: 1, b: 2}, {a: 1}, {b: 2}
|
||||
],
|
||||
'Two sets of two': [
|
||||
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
|
||||
],
|
||||
'Three sets': [
|
||||
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
|
||||
],
|
||||
'One set': [
|
||||
{a: 1, b: 2}, {a: 1, b: 2}
|
||||
],
|
||||
'Empty set': [
|
||||
{a: 1}, {a: 1}, {}
|
||||
],
|
||||
'All empty': [
|
||||
{}, {}, {}
|
||||
],
|
||||
'No set': [
|
||||
{}
|
||||
]
|
||||
}, ([ resultSet, ...sets ], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Assembles all given param sets in on set', function () {
|
||||
expect(mergeObjects(sets)).toEqual(resultSet)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('crossProduct', function () {
|
||||
// Gives the sum of all args
|
||||
const addTest = args => args.reduce((prev, curr) => prev + curr, 0)
|
||||
// Gives the product of all args
|
||||
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
|
||||
|
||||
forEach({
|
||||
'2 sets of 2 items to multiply': [
|
||||
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
|
||||
],
|
||||
'3 sets of 2 items to multiply': [
|
||||
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
|
||||
],
|
||||
'2 sets of 3 items to multiply': [
|
||||
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
|
||||
],
|
||||
'2 sets of 2 items to add': [
|
||||
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
|
||||
],
|
||||
'3 sets of 2 items to add': [
|
||||
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
|
||||
],
|
||||
'2 sets of 3 items to add': [
|
||||
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
|
||||
]
|
||||
}, ([ product, items, cb ], name) => {
|
||||
describe(`with ${name}`, () => {
|
||||
it('Crosses sets of values with a crossProduct callback', function () {
|
||||
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,8 +1,12 @@
|
||||
import isEmpty from 'lodash/isEmpty'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
|
||||
import { forEach } from '../utils'
|
||||
|
||||
import { parseProp } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Group extends Model {}
|
||||
@@ -14,20 +18,16 @@ export class Groups extends Collection {
|
||||
return Group
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'group:'
|
||||
}
|
||||
|
||||
create (name) {
|
||||
return this.add(new Group({
|
||||
name,
|
||||
users: '[]'
|
||||
}))
|
||||
return this.add(new Group({ name }))
|
||||
}
|
||||
|
||||
async save (group) {
|
||||
// Serializes.
|
||||
group.users = JSON.stringify(group.users)
|
||||
let tmp
|
||||
group.users = isEmpty(tmp = group.users)
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
|
||||
return /* await */ this.update(group)
|
||||
}
|
||||
@@ -37,13 +37,7 @@ export class Groups extends Collection {
|
||||
|
||||
// Deserializes.
|
||||
forEach(groups, group => {
|
||||
const {users} = group
|
||||
try {
|
||||
group.users = JSON.parse(users)
|
||||
} catch (error) {
|
||||
console.warn('cannot parse group.users:', users)
|
||||
group.users = []
|
||||
}
|
||||
group.users = parseProp('group', group, 'users', [])
|
||||
})
|
||||
|
||||
return groups
|
||||
|
||||
@@ -11,12 +11,7 @@ export class Jobs extends Collection {
|
||||
return Job
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'job:'
|
||||
}
|
||||
|
||||
async create (userId, job) {
|
||||
job.userId = userId
|
||||
async create (job) {
|
||||
// Serializes.
|
||||
job.paramsVector = JSON.stringify(job.paramsVector)
|
||||
return /* await */ this.add(new Job(job))
|
||||
|
||||
@@ -13,10 +13,6 @@ export class PluginsMetadata extends Collection {
|
||||
return PluginMetadata
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'plugin-metadata:'
|
||||
}
|
||||
|
||||
async save ({ id, autoload, configuration }) {
|
||||
return /* await */ this.update({
|
||||
id,
|
||||
|
||||
@@ -13,10 +13,6 @@ export class Remotes extends Collection {
|
||||
return Remote
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'remote-'
|
||||
}
|
||||
|
||||
create (name, url) {
|
||||
return this.add(new Remote({
|
||||
name,
|
||||
|
||||
@@ -11,17 +11,14 @@ export class Schedules extends Collection {
|
||||
return Schedule
|
||||
}
|
||||
|
||||
get idPrefix () {
|
||||
return 'schedule:'
|
||||
}
|
||||
|
||||
create (userId, job, cron, enabled, name = undefined) {
|
||||
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
|
||||
return this.add(new Schedule({
|
||||
userId,
|
||||
job,
|
||||
cron,
|
||||
enabled,
|
||||
name
|
||||
name,
|
||||
timezone
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
import { parseProp } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -12,11 +15,26 @@ export class Servers extends Collection {
|
||||
return Server
|
||||
}
|
||||
|
||||
async create ({host, username, password, readOnly}) {
|
||||
async create ({label, host, username, password, readOnly}) {
|
||||
if (await this.exists({host})) {
|
||||
throw new Error('server already exists')
|
||||
}
|
||||
|
||||
return /* await */ this.add({host, username, password, readOnly})
|
||||
return /* await */ this.add({label, host, username, password, readOnly})
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
const servers = await super.get(properties)
|
||||
|
||||
// Deserializes
|
||||
forEach(servers, server => {
|
||||
if (server.error) {
|
||||
server.error = parseProp('server', server, 'error', '')
|
||||
} else {
|
||||
delete server.error
|
||||
}
|
||||
})
|
||||
|
||||
return servers
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import isEmpty from 'lodash/isEmpty'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
|
||||
import { parseProp } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class User extends Model {}
|
||||
@@ -17,15 +21,14 @@ export class Users extends Collection {
|
||||
return User
|
||||
}
|
||||
|
||||
async create (email, properties = {}) {
|
||||
async create (properties) {
|
||||
const { email } = properties
|
||||
|
||||
// Avoid duplicates.
|
||||
if (await this.exists({email})) {
|
||||
throw new Error(`the user ${email} already exists`)
|
||||
}
|
||||
|
||||
// Adds the email to the user's properties.
|
||||
properties.email = email
|
||||
|
||||
// Create the user object.
|
||||
const user = new User(properties)
|
||||
|
||||
@@ -35,7 +38,13 @@ export class Users extends Collection {
|
||||
|
||||
async save (user) {
|
||||
// Serializes.
|
||||
user.groups = JSON.stringify(user.groups)
|
||||
let tmp
|
||||
user.groups = isEmpty(tmp = user.groups)
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
user.preferences = isEmpty(tmp = user.preferences)
|
||||
? undefined
|
||||
: JSON.stringify(tmp)
|
||||
|
||||
return /* await */ this.update(user)
|
||||
}
|
||||
@@ -45,13 +54,8 @@ export class Users extends Collection {
|
||||
|
||||
// Deserializes
|
||||
forEach(users, user => {
|
||||
const {groups} = user
|
||||
try {
|
||||
user.groups = groups ? JSON.parse(groups) : []
|
||||
} catch (_) {
|
||||
console.warn('cannot parse user.groups:', groups)
|
||||
user.groups = []
|
||||
}
|
||||
user.groups = parseProp('user', user, 'groups', [])
|
||||
user.preferences = parseProp('user', user, 'preferences', {})
|
||||
})
|
||||
|
||||
return users
|
||||
|
||||
16
src/models/utils.js
Normal file
16
src/models/utils.js
Normal file
@@ -0,0 +1,16 @@
|
||||
export const parseProp = (type, obj, name, defaultValue) => {
|
||||
const value = obj[name]
|
||||
if (
|
||||
value == null ||
|
||||
value === '' // do not warn on this trivial and minor error
|
||||
) {
|
||||
return defaultValue
|
||||
}
|
||||
try {
|
||||
return JSON.parse(value)
|
||||
} catch (error) {
|
||||
// do not display the error because it can occurs a lot and fill
|
||||
// up log files
|
||||
return defaultValue
|
||||
}
|
||||
}
|
||||
42
src/node_modules/constant-stream.js
generated
vendored
Normal file
42
src/node_modules/constant-stream.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import from2 from 'from2'
|
||||
|
||||
const constantStream = (data, n = 1) => {
|
||||
if (!Buffer.isBuffer(data)) {
|
||||
data = Buffer.from(data)
|
||||
}
|
||||
|
||||
const { length } = data
|
||||
|
||||
if (!length) {
|
||||
throw new Error('data should not be empty')
|
||||
}
|
||||
|
||||
n *= length
|
||||
let currentLength = length
|
||||
|
||||
return from2((size, next) => {
|
||||
if (n <= 0) {
|
||||
return next(null, null)
|
||||
}
|
||||
|
||||
if (n < size) {
|
||||
size = n
|
||||
}
|
||||
|
||||
if (size < currentLength) {
|
||||
const m = Math.floor(size / length) * length || length
|
||||
n -= m
|
||||
return next(null, data.slice(0, m))
|
||||
}
|
||||
|
||||
// if more than twice the data length is requested, repeat the data
|
||||
if (size > currentLength * 2) {
|
||||
currentLength = Math.floor(size / length) * length
|
||||
data = Buffer.alloc(currentLength, data)
|
||||
}
|
||||
|
||||
n -= currentLength
|
||||
return next(null, data)
|
||||
})
|
||||
}
|
||||
export { constantStream as default }
|
||||
@@ -23,13 +23,19 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
'', ''
|
||||
].join('\r\n'))
|
||||
|
||||
const onSend = (error) => {
|
||||
if (error) {
|
||||
debug('error sending to the XO client: %s', error.stack || error.message || error)
|
||||
}
|
||||
}
|
||||
|
||||
socket.pipe(partialStream('\r\n\r\n', headers => {
|
||||
// TODO: check status code 200.
|
||||
debug('connected')
|
||||
})).on('data', data => {
|
||||
if (!closed) {
|
||||
// Encode to base 64.
|
||||
ws.send(data.toString('base64'))
|
||||
ws.send(data.toString('base64'), onSend)
|
||||
}
|
||||
}).on('end', () => {
|
||||
if (!closed) {
|
||||
@@ -50,7 +56,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
.on('message', data => {
|
||||
if (!closed) {
|
||||
// Decode from base 64.
|
||||
socket.write(new Buffer(data, 'base64'))
|
||||
socket.write(Buffer.from(data, 'base64'))
|
||||
}
|
||||
})
|
||||
.on('close', () => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import getStream from 'get-stream'
|
||||
import through2 from 'through2'
|
||||
|
||||
import {
|
||||
@@ -8,14 +7,16 @@ import {
|
||||
|
||||
import {
|
||||
addChecksumToReadStream,
|
||||
getPseudoRandomBytes,
|
||||
noop,
|
||||
pCatch,
|
||||
streamToBuffer,
|
||||
validChecksumOfReadStream
|
||||
} from '../utils'
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
this._remote = parse({...remote})
|
||||
this._remote = {...remote, ...parse(remote.url)}
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
@@ -47,12 +48,41 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async test () {
|
||||
const testFileName = `${Date.now()}.test`
|
||||
const data = getPseudoRandomBytes(1024 * 1024)
|
||||
let step = 'write'
|
||||
try {
|
||||
await this.outputFile(testFileName, data)
|
||||
step = 'read'
|
||||
const read = await this.readFile(testFileName)
|
||||
if (data.compare(read) !== 0) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error)
|
||||
}
|
||||
} finally {
|
||||
this.unlink(testFileName).catch(noop)
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile (file, data, options) {
|
||||
return this._outputFile(file, data, options)
|
||||
return this._outputFile(file, data, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const stream = await this.createOutputStream(file)
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
const promise = eventToPromise(stream, 'finish')
|
||||
stream.end(data)
|
||||
return promise
|
||||
@@ -62,8 +92,8 @@ export default class RemoteHandlerAbstract {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return getStream(await this.createReadStream(file, options))
|
||||
_readFile (file, options) {
|
||||
return this.createReadStream(file, options).then(streamToBuffer)
|
||||
}
|
||||
|
||||
async rename (oldPath, newPath) {
|
||||
@@ -82,53 +112,72 @@ export default class RemoteHandlerAbstract {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async createReadStream (file, {
|
||||
createReadStream (file, {
|
||||
checksum = false,
|
||||
ignoreMissingChecksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createReadStream(file, options).then(async stream => {
|
||||
await eventToPromise(stream, 'readable')
|
||||
const streamP = this._createReadStream(file, options).then(stream => {
|
||||
// detect early errors
|
||||
let promise = eventToPromise(stream, 'readable')
|
||||
|
||||
if (stream.length === undefined) {
|
||||
stream.length = await this.getSize(file)::pCatch(noop)
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([ promise, this.getSize(file).then(size => {
|
||||
stream.length = size
|
||||
}, noop) ])
|
||||
}
|
||||
|
||||
return stream
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
try {
|
||||
checksum = await this.readFile(`${file}.checksum`)
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
|
||||
return streamP
|
||||
// avoid a unhandled rejection warning
|
||||
streamP.catch(noop)
|
||||
|
||||
return this.readFile(`${file}.checksum`).then(
|
||||
checksum => streamP.then(stream => {
|
||||
const { length } = stream
|
||||
stream = validChecksumOfReadStream(stream, String(checksum).trim())
|
||||
stream.length = length
|
||||
|
||||
return stream
|
||||
}),
|
||||
error => {
|
||||
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
|
||||
return streamP
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
let stream = await streamP
|
||||
|
||||
const { length } = stream
|
||||
stream = validChecksumOfReadStream(stream, checksum.toString())
|
||||
stream.length = length
|
||||
|
||||
return stream
|
||||
)
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async refreshChecksum (path) {
|
||||
const stream = addChecksumToReadStream(await this.createReadStream(path))
|
||||
stream.resume() // start reading the whole file
|
||||
const checksum = await stream.checksum
|
||||
await this.outputFile(`${path}.checksum`, checksum)
|
||||
}
|
||||
|
||||
async createOutputStream (file, {
|
||||
checksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createOutputStream(file, options)
|
||||
const streamP = this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import fs from 'fs-promise'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import {
|
||||
dirname,
|
||||
resolve
|
||||
@@ -12,16 +12,21 @@ import {
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
return 'local'
|
||||
return 'file'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return this._remote.path
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const parts = [this._remote.path]
|
||||
const realPath = this._getRealPath()
|
||||
const parts = [realPath]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, this._remote.path)) {
|
||||
if (!startsWith(path, realPath)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
}
|
||||
return path
|
||||
@@ -30,8 +35,9 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
await fs.ensureDir(this._remote.path)
|
||||
await fs.access(this._remote.path, fs.R_OK | fs.W_OK)
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
@@ -47,7 +53,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
async _outputFile (file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(this._getFilePath(file), data, options)
|
||||
await fs.writeFile(path, data, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
@@ -80,5 +86,4 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
const stats = await fs.stat(this._getFilePath(file))
|
||||
return stats.size
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -11,11 +11,15 @@ export default class NfsHandler extends LocalHandler {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return `/run/xo-server/mounts/${this._remote.id}`
|
||||
}
|
||||
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
({stdout} = await execa('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings']))
|
||||
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
@@ -37,27 +41,27 @@ export default class NfsHandler extends LocalHandler {
|
||||
return mounted
|
||||
}
|
||||
|
||||
_matchesRealMount (remote) {
|
||||
return remote.path in this._realMounts
|
||||
_matchesRealMount () {
|
||||
return this._getRealPath() in this._realMounts
|
||||
}
|
||||
|
||||
async _mount (remote) {
|
||||
await fs.ensureDir(remote.path)
|
||||
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${remote.host}:/${remote.share}`, remote.path])
|
||||
async _mount () {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount(this._remote) && !this._remote.enabled) {
|
||||
if (this._matchesRealMount() && !this._remote.enabled) {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = true
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
} else if (!this._matchesRealMount(this._remote) && this._remote.enabled) {
|
||||
} else if (!this._matchesRealMount() && this._remote.enabled) {
|
||||
try {
|
||||
await this._mount(this._remote)
|
||||
await this._mount()
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
@@ -75,6 +79,6 @@ export default class NfsHandler extends LocalHandler {
|
||||
}
|
||||
|
||||
async _umount (remote) {
|
||||
await execa('umount', [remote.path])
|
||||
await execa('umount', [this._getRealPath()])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +54,11 @@ export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
? this._remote.path
|
||||
: ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
path += '\\'
|
||||
}
|
||||
|
||||
if (file) {
|
||||
path += file.replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import paramsVector from 'job/params-vector'
|
||||
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
@@ -27,7 +25,13 @@ export default {
|
||||
type: 'string',
|
||||
description: 'called method'
|
||||
},
|
||||
paramsVector
|
||||
paramsVector: {
|
||||
type: 'object'
|
||||
},
|
||||
timeout: {
|
||||
type: 'number',
|
||||
description: 'number of milliseconds after which the job is considered failed'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
enum: ['crossProduct']
|
||||
},
|
||||
items: {
|
||||
type: 'array',
|
||||
description: 'vector of values to multiply with others vectors',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
enum: ['set']
|
||||
},
|
||||
values: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object'
|
||||
},
|
||||
minItems: 1
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'values'
|
||||
]
|
||||
},
|
||||
minItems: 1
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'items'
|
||||
]
|
||||
}
|
||||
|
||||
/* Example:
|
||||
{
|
||||
"type": "cross product",
|
||||
"items": [
|
||||
{
|
||||
"type": "set",
|
||||
"values": [
|
||||
{"id": 0, "name": "snapshost de 0"},
|
||||
{"id": 1, "name": "snapshost de 1"}
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "set",
|
||||
"values": [
|
||||
{"force": true}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
*/
|
||||
@@ -20,7 +20,7 @@ export default {
|
||||
},
|
||||
unloadable: {
|
||||
type: 'boolean',
|
||||
default: 'true',
|
||||
default: true,
|
||||
description: 'whether or not this plugin can be unloaded'
|
||||
},
|
||||
configuration: {
|
||||
@@ -30,6 +30,14 @@ export default {
|
||||
configurationSchema: {
|
||||
$ref: 'http://json-schema.org/draft-04/schema#',
|
||||
description: 'configuration schema for this plugin (not present if not configurable)'
|
||||
},
|
||||
testable: {
|
||||
type: 'boolean',
|
||||
description: 'whether or not this plugin can be tested'
|
||||
},
|
||||
testSchema: {
|
||||
$ref: 'http://json-schema.org/draft-04/schema#',
|
||||
description: 'test schema for this plugin'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
|
||||
50
src/schemas/user.js
Normal file
50
src/schemas/user.js
Normal file
@@ -0,0 +1,50 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this user'
|
||||
},
|
||||
email: {
|
||||
type: 'string',
|
||||
description: 'email address of this user'
|
||||
},
|
||||
groups: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
description: 'identifier of groups this user belong to'
|
||||
},
|
||||
permission: {
|
||||
enum: ['none', 'read', 'write', 'admin'],
|
||||
description: 'root permission for this user, none and admin are the only significant ones'
|
||||
},
|
||||
preferences: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
lang: { type: 'string' },
|
||||
sshKeys: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
key: { type: 'string' },
|
||||
title: { type: 'string' }
|
||||
},
|
||||
required: [
|
||||
'key',
|
||||
'title'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
description: 'various user preferences'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'email'
|
||||
]
|
||||
}
|
||||
44
src/stream-to-existing-buffer.js
Normal file
44
src/stream-to-existing-buffer.js
Normal file
@@ -0,0 +1,44 @@
|
||||
import assert from 'assert'
|
||||
|
||||
const streamToExistingBuffer = (
|
||||
stream,
|
||||
buffer,
|
||||
offset = 0,
|
||||
end = buffer.length
|
||||
) => new Promise((resolve, reject) => {
|
||||
assert(offset >= 0)
|
||||
assert(end > offset)
|
||||
assert(end <= buffer.length)
|
||||
|
||||
let i = offset
|
||||
|
||||
const onData = chunk => {
|
||||
const prev = i
|
||||
i += chunk.length
|
||||
|
||||
if (i > end) {
|
||||
return onError(new Error('too much data'))
|
||||
}
|
||||
|
||||
chunk.copy(buffer, prev)
|
||||
}
|
||||
stream.on('data', onData)
|
||||
|
||||
const clean = () => {
|
||||
stream.removeListener('data', onData)
|
||||
stream.removeListener('end', onEnd)
|
||||
stream.removeListener('error', onError)
|
||||
}
|
||||
const onEnd = () => {
|
||||
resolve(i - offset)
|
||||
clean()
|
||||
}
|
||||
stream.on('end', onEnd)
|
||||
const onError = error => {
|
||||
reject(error)
|
||||
clean()
|
||||
}
|
||||
stream.on('error', onError)
|
||||
})
|
||||
|
||||
export { streamToExistingBuffer as default }
|
||||
20
src/stream-to-existing-buffer.spec.js
Normal file
20
src/stream-to-existing-buffer.spec.js
Normal file
@@ -0,0 +1,20 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { createReadStream, readFile } from 'fs'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
|
||||
import streamToExistingBuffer from './stream-to-existing-buffer'
|
||||
|
||||
describe('streamToExistingBuffer()', () => {
|
||||
it('read the content of a stream in a buffer', async () => {
|
||||
const stream = createReadStream(__filename)
|
||||
|
||||
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
|
||||
|
||||
const buf = Buffer.allocUnsafe(expected.length + 1)
|
||||
buf[0] = 'A'.charCodeAt()
|
||||
await streamToExistingBuffer(stream, buf, 1)
|
||||
|
||||
expect(String(buf)).toBe(`A${expected}`)
|
||||
})
|
||||
})
|
||||
27
src/stream-to-new-buffer.js
Normal file
27
src/stream-to-new-buffer.js
Normal file
@@ -0,0 +1,27 @@
|
||||
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
|
||||
const chunks = []
|
||||
let length = 0
|
||||
|
||||
const onData = chunk => {
|
||||
chunks.push(chunk)
|
||||
length += chunk.length
|
||||
}
|
||||
stream.on('data', onData)
|
||||
|
||||
const clean = () => {
|
||||
stream.removeListener('data', onData)
|
||||
stream.removeListener('end', onEnd)
|
||||
stream.removeListener('error', onError)
|
||||
}
|
||||
const onEnd = () => {
|
||||
resolve(Buffer.concat(chunks, length))
|
||||
clean()
|
||||
}
|
||||
stream.on('end', onEnd)
|
||||
const onError = error => {
|
||||
reject(error)
|
||||
clean()
|
||||
}
|
||||
stream.on('error', onError)
|
||||
})
|
||||
export { streamToNewBuffer as default }
|
||||
231
src/utils.js
231
src/utils.js
@@ -1,19 +1,33 @@
|
||||
import base64url from 'base64url'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash.foreach'
|
||||
import has from 'lodash.has'
|
||||
import forEach from 'lodash/forEach'
|
||||
import has from 'lodash/has'
|
||||
import highland from 'highland'
|
||||
import humanFormat from 'human-format'
|
||||
import invert from 'lodash.invert'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isString from 'lodash.isstring'
|
||||
import invert from 'lodash/invert'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isString from 'lodash/isString'
|
||||
import keys from 'lodash/keys'
|
||||
import kindOf from 'kindof'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
import pick from 'lodash/pick'
|
||||
import tmp from 'tmp'
|
||||
import xml2js from 'xml2js'
|
||||
import { resolve } from 'path'
|
||||
|
||||
// Moment timezone can be loaded only one time, it's a workaround to load
|
||||
// the latest version because cron module uses an old version of moment which
|
||||
// does not implement `guess` function for example.
|
||||
import 'moment-timezone'
|
||||
|
||||
import through2 from 'through2'
|
||||
import { CronJob } from 'cron'
|
||||
import { Readable } from 'stream'
|
||||
import { utcFormat, utcParse } from 'd3-time-format'
|
||||
import {
|
||||
all as pAll,
|
||||
defer,
|
||||
fromCallback,
|
||||
promisify,
|
||||
reflect as pReflect
|
||||
} from 'promise-toolbox'
|
||||
@@ -21,9 +35,6 @@ import {
|
||||
createHash,
|
||||
randomBytes
|
||||
} from 'crypto'
|
||||
import { Readable } from 'stream'
|
||||
import through2 from 'through2'
|
||||
import {utcFormat as d3TimeFormat} from 'd3-time-format'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -45,23 +56,13 @@ export function bufferToStream (buf) {
|
||||
return stream
|
||||
}
|
||||
|
||||
export async function streamToBuffer (stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const bufs = []
|
||||
|
||||
stream.on('error', reject)
|
||||
stream.on('data', data => {
|
||||
bufs.push(data)
|
||||
})
|
||||
stream.on('end', () => resolve(Buffer.concat(bufs)))
|
||||
})
|
||||
}
|
||||
export streamToBuffer from './stream-to-new-buffer'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function camelToSnakeCase (string) {
|
||||
return string.replace(
|
||||
/([a-z])([A-Z])/g,
|
||||
/([a-z0-9])([A-Z])/g,
|
||||
(_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}`
|
||||
)
|
||||
}
|
||||
@@ -75,6 +76,27 @@ export const createRawObject = Object.create
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Only works with string items!
|
||||
export const diffItems = (coll1, coll2) => {
|
||||
const removed = createRawObject()
|
||||
forEach(coll2, value => {
|
||||
removed[value] = true
|
||||
})
|
||||
|
||||
const added = []
|
||||
forEach(coll1, value => {
|
||||
if (value in removed) {
|
||||
delete removed[value]
|
||||
} else {
|
||||
added.push(value)
|
||||
}
|
||||
})
|
||||
|
||||
return [ added, keys(removed) ]
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const ALGORITHM_TO_ID = {
|
||||
md5: '1',
|
||||
sha256: '5',
|
||||
@@ -179,8 +201,15 @@ export function extractProperty (obj, prop) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const generateUnsecureToken = (n = 32) => {
|
||||
const bytes = new Buffer(n)
|
||||
export const getUserPublicProperties = user => pick(
|
||||
user.properties || user,
|
||||
'id', 'email', 'groups', 'permission', 'preferences', 'provider'
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getPseudoRandomBytes = n => {
|
||||
const bytes = Buffer.allocUnsafe(n)
|
||||
|
||||
const odd = n & 1
|
||||
for (let i = 0, m = n - odd; i < m; i += 2) {
|
||||
@@ -191,13 +220,15 @@ export const generateUnsecureToken = (n = 32) => {
|
||||
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
|
||||
}
|
||||
|
||||
return base64url(bytes)
|
||||
return bytes
|
||||
}
|
||||
|
||||
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
|
||||
|
||||
// Generate a secure random Base64 string.
|
||||
export const generateToken = (randomBytes => {
|
||||
return (n = 32) => randomBytes(n).then(base64url)
|
||||
})(randomBytes::promisify())
|
||||
})(promisify(randomBytes))
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -239,21 +270,30 @@ export const parseXml = (function () {
|
||||
// - methods are already bound and chainable
|
||||
export const lightSet = collection => {
|
||||
const data = createRawObject()
|
||||
collection && forEach(collection, value => {
|
||||
data[value] = true
|
||||
})
|
||||
collection = null
|
||||
if (collection) {
|
||||
forEach(collection, value => {
|
||||
data[value] = true
|
||||
})
|
||||
collection = null
|
||||
}
|
||||
|
||||
const set = {
|
||||
add: value => (data[value] = true, set),
|
||||
add: value => {
|
||||
data[value] = true
|
||||
return set
|
||||
},
|
||||
clear: () => {
|
||||
for (const value in data) {
|
||||
delete data[value]
|
||||
}
|
||||
return set
|
||||
},
|
||||
delete: value => (delete data[value], set),
|
||||
has: value => data[value]
|
||||
delete: value => {
|
||||
delete data[value]
|
||||
return set
|
||||
},
|
||||
has: value => data[value],
|
||||
toArray: () => keys(data)
|
||||
}
|
||||
return set
|
||||
}
|
||||
@@ -306,7 +346,7 @@ export function pSettle (promises) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
export { // eslint-disable-line no-duplicate-imports
|
||||
all as pAll,
|
||||
catchPlus as pCatch,
|
||||
delay as pDelay,
|
||||
@@ -353,22 +393,24 @@ export const popProperty = obj => {
|
||||
|
||||
// Format a date in ISO 8601 in a safe way to be used in filenames
|
||||
// (even on Windows).
|
||||
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
|
||||
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')
|
||||
|
||||
export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// This functions are often used throughout xo-server.
|
||||
//
|
||||
// Exports them from here to avoid direct dependencies on lodash.
|
||||
export { default as forEach } from 'lodash.foreach'
|
||||
export { default as isArray } from 'lodash.isarray'
|
||||
export { default as isBoolean } from 'lodash.isboolean'
|
||||
export { default as isEmpty } from 'lodash.isempty'
|
||||
export { default as isFunction } from 'lodash.isfunction'
|
||||
export { default as isInteger } from 'lodash.isinteger'
|
||||
export { default as isObject } from 'lodash.isobject'
|
||||
export { default as isString } from 'lodash.isstring'
|
||||
export { default as mapToArray } from 'lodash.map'
|
||||
// Exports them from here to avoid direct dependencies on lodash/
|
||||
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isBoolean } from 'lodash/isBoolean'
|
||||
export { default as isEmpty } from 'lodash/isEmpty'
|
||||
export { default as isFunction } from 'lodash/isFunction'
|
||||
export { default as isInteger } from 'lodash/isInteger'
|
||||
export { default as isObject } from 'lodash/isObject'
|
||||
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
|
||||
export { default as mapToArray } from 'lodash/map'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -412,7 +454,7 @@ export function map (
|
||||
export const multiKeyHash = (...args) => new Promise(resolve => {
|
||||
const hash = multiKeyHashInt(...args)
|
||||
|
||||
const buf = new Buffer(4)
|
||||
const buf = Buffer.allocUnsafe(4)
|
||||
buf.writeUInt32LE(hash, 0)
|
||||
|
||||
resolve(base64url(buf))
|
||||
@@ -420,6 +462,11 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const resolveSubpath = (root, path) =>
|
||||
resolve(root, `./${resolve('/', path)}`)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const streamToArray = (stream, {
|
||||
filter,
|
||||
mapper
|
||||
@@ -436,27 +483,30 @@ export const streamToArray = (stream, {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const scheduleFn = (cronPattern, fn) => {
|
||||
export const scheduleFn = (cronTime, fn, timeZone) => {
|
||||
let running = false
|
||||
|
||||
const job = new CronJob(cronPattern, async () => {
|
||||
if (running) {
|
||||
return
|
||||
}
|
||||
const job = new CronJob({
|
||||
cronTime,
|
||||
onTick: async () => {
|
||||
if (running) {
|
||||
return
|
||||
}
|
||||
|
||||
running = true
|
||||
running = true
|
||||
|
||||
try {
|
||||
await fn()
|
||||
} catch (error) {
|
||||
console.error('[WARN] scheduled function:', error && error.stack || error)
|
||||
} finally {
|
||||
running = false
|
||||
}
|
||||
try {
|
||||
await fn()
|
||||
} catch (error) {
|
||||
console.error('[WARN] scheduled function:', (error && error.stack) || error)
|
||||
} finally {
|
||||
running = false
|
||||
}
|
||||
},
|
||||
start: true,
|
||||
timeZone
|
||||
})
|
||||
|
||||
job.start()
|
||||
|
||||
return () => {
|
||||
job.stop()
|
||||
}
|
||||
@@ -464,5 +514,68 @@ export const scheduleFn = (cronPattern, fn) => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Create a serializable object from an error.
|
||||
export const serializeError = error => ({
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
...error // Copy enumerable properties.
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Create an array which contains the results of one thunk function.
|
||||
// Only works with synchronous thunks.
|
||||
export const thunkToArray = thunk => {
|
||||
const values = []
|
||||
thunk(::values.push)
|
||||
return values
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Creates a new function which throws an error.
|
||||
//
|
||||
// ```js
|
||||
// promise.catch(throwFn('an error has occured'))
|
||||
//
|
||||
// function foo (param = throwFn('param is required')()) {}
|
||||
// ```
|
||||
export const throwFn = error => () => {
|
||||
throw (
|
||||
isString(error)
|
||||
? new Error(error)
|
||||
: error
|
||||
)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Wrap a value in a function.
|
||||
export const wrap = value => () => value
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const mapFilter = (collection, iteratee) => {
|
||||
const result = []
|
||||
forEach(collection, (...args) => {
|
||||
const value = iteratee(...args)
|
||||
if (value) {
|
||||
result.push(value)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const splitFirst = (string, separator) => {
|
||||
const i = string.indexOf(separator)
|
||||
return i === -1 ? null : [
|
||||
string.slice(0, i),
|
||||
string.slice(i + separator.length)
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
import expect from 'must'
|
||||
import sinon from 'sinon'
|
||||
|
||||
// ===================================================================
|
||||
/* eslint-env jest */
|
||||
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
createRawObject,
|
||||
diffItems,
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
formatXml,
|
||||
generateToken,
|
||||
parseSize,
|
||||
pFinally,
|
||||
pSettle
|
||||
} from './utils'
|
||||
|
||||
@@ -21,57 +16,69 @@ import {
|
||||
|
||||
describe('camelToSnakeCase()', function () {
|
||||
it('converts a string from camelCase to snake_case', function () {
|
||||
expect(camelToSnakeCase('fooBar')).to.equal('foo_bar')
|
||||
expect(camelToSnakeCase('fooBar')).toBe('foo_bar')
|
||||
expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed')
|
||||
})
|
||||
|
||||
it('does not alter snake_case strings', function () {
|
||||
expect(camelToSnakeCase('foo_bar')).to.equal('foo_bar')
|
||||
expect(camelToSnakeCase('foo_bar')).toBe('foo_bar')
|
||||
expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed')
|
||||
})
|
||||
|
||||
it('does not alter upper case letters expect those from the camelCase', function () {
|
||||
expect(camelToSnakeCase('fooBar_BAZ')).to.equal('foo_bar_BAZ')
|
||||
expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ')
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('createRawObject()', () => {
|
||||
it('returns an object', () => {
|
||||
expect(createRawObject()).to.be.an.object()
|
||||
})
|
||||
|
||||
it('returns an empty object', () => {
|
||||
expect(createRawObject()).to.be.empty()
|
||||
expect(createRawObject()).toEqual({})
|
||||
})
|
||||
|
||||
it('creates a new object each time', () => {
|
||||
expect(createRawObject()).to.not.equal(createRawObject())
|
||||
expect(createRawObject()).not.toBe(createRawObject())
|
||||
})
|
||||
|
||||
if (Object.getPrototypeOf) {
|
||||
it('creates an object without a prototype', () => {
|
||||
expect(Object.getPrototypeOf(createRawObject())).to.be.null()
|
||||
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('diffItems', () => {
|
||||
it('computes the added/removed items between 2 iterables', () => {
|
||||
expect(diffItems(
|
||||
['foo', 'bar'],
|
||||
['baz', 'foo']
|
||||
)).toEqual([
|
||||
['bar'],
|
||||
['baz']
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('ensureArray()', function () {
|
||||
it('wrap the value in an array', function () {
|
||||
const value = 'foo'
|
||||
|
||||
expect(ensureArray(value)).to.eql([value])
|
||||
expect(ensureArray(value)).toEqual([value])
|
||||
})
|
||||
|
||||
it('returns an empty array for undefined', function () {
|
||||
expect(ensureArray(undefined)).to.eql([])
|
||||
expect(ensureArray(undefined)).toEqual([])
|
||||
})
|
||||
|
||||
it('returns the object itself if is already an array', function () {
|
||||
const array = ['foo', 'bar', 'baz']
|
||||
|
||||
expect(ensureArray(array)).to.equal(array)
|
||||
expect(ensureArray(array)).toBe(array)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -82,15 +89,15 @@ describe('extractProperty()', function () {
|
||||
const value = {}
|
||||
const obj = { prop: value }
|
||||
|
||||
expect(extractProperty(obj, 'prop')).to.equal(value)
|
||||
expect(extractProperty(obj, 'prop')).toBe(value)
|
||||
})
|
||||
|
||||
it('removes the property from the object', function () {
|
||||
const value = {}
|
||||
const obj = { prop: value }
|
||||
|
||||
expect(extractProperty(obj, 'prop')).to.equal(value)
|
||||
expect(obj).to.not.have.property('prop')
|
||||
expect(extractProperty(obj, 'prop')).toBe(value)
|
||||
expect(obj.prop).not.toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -105,7 +112,7 @@ describe('formatXml()', function () {
|
||||
{$: {baz: 'plip'}}
|
||||
]
|
||||
}
|
||||
})).to.equal(`<foo>
|
||||
})).toBe(`<foo>
|
||||
<bar baz="plop"/>
|
||||
<bar baz="plip"/>
|
||||
</foo>`)
|
||||
@@ -116,7 +123,7 @@ describe('formatXml()', function () {
|
||||
|
||||
describe('generateToken()', () => {
|
||||
it('generates a string', async () => {
|
||||
expect(await generateToken()).to.be.a.string()
|
||||
expect(typeof await generateToken()).toBe('string')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -124,51 +131,21 @@ describe('generateToken()', () => {
|
||||
|
||||
describe('parseSize()', function () {
|
||||
it('parses a human size', function () {
|
||||
expect(parseSize('1G')).to.equal(1e9)
|
||||
expect(parseSize('1G')).toBe(1e9)
|
||||
})
|
||||
|
||||
it('returns the parameter if already a number', function () {
|
||||
expect(parseSize(1e6)).to.equal(1e6)
|
||||
expect(parseSize(1e6)).toBe(1e6)
|
||||
})
|
||||
|
||||
it('throws if the string cannot be parsed', function () {
|
||||
expect(function () {
|
||||
parseSize('foo')
|
||||
}).to.throw()
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('supports the B unit as suffix', function () {
|
||||
expect(parseSize('3MB')).to.equal(3e6)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pFinally()', () => {
|
||||
it('calls a callback on resolution', async () => {
|
||||
const value = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.resolve(value)::pFinally(spy)
|
||||
).to.resolve.to.equal(
|
||||
value
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
})
|
||||
|
||||
it('calls a callback on rejection', async () => {
|
||||
const reason = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.reject(reason)::pFinally(spy)
|
||||
).to.reject.to.equal(
|
||||
reason
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
expect(parseSize('3MB')).toBe(3e6)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -176,6 +153,7 @@ describe('pFinally()', () => {
|
||||
|
||||
describe('pSettle()', () => {
|
||||
it('works with arrays', async () => {
|
||||
const rejection = 'fatality'
|
||||
const [
|
||||
status1,
|
||||
status2,
|
||||
@@ -183,27 +161,29 @@ describe('pSettle()', () => {
|
||||
] = await pSettle([
|
||||
Promise.resolve(42),
|
||||
Math.PI,
|
||||
Promise.reject('fatality')
|
||||
Promise.reject(rejection)
|
||||
])
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
expect(status1.isRejected()).toBe(false)
|
||||
expect(status2.isRejected()).toBe(false)
|
||||
expect(status3.isRejected()).toBe(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
expect(status1.isFulfilled()).toBe(true)
|
||||
expect(status2.isFulfilled()).toBe(true)
|
||||
expect(status3.isFulfilled()).toBe(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
expect(status1.value()).toBe(42)
|
||||
expect(status2.value()).toBe(Math.PI)
|
||||
expect(::status3.value).toThrow()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
expect(::status1.reason).toThrow()
|
||||
expect(::status2.reason).toThrow()
|
||||
expect(status3.reason()).toBe(rejection)
|
||||
})
|
||||
|
||||
it('works with objects', async () => {
|
||||
const rejection = 'fatality'
|
||||
|
||||
const {
|
||||
a: status1,
|
||||
b: status2,
|
||||
@@ -211,23 +191,23 @@ describe('pSettle()', () => {
|
||||
} = await pSettle({
|
||||
a: Promise.resolve(42),
|
||||
b: Math.PI,
|
||||
c: Promise.reject('fatality')
|
||||
c: Promise.reject(rejection)
|
||||
})
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
expect(status1.isRejected()).toBe(false)
|
||||
expect(status2.isRejected()).toBe(false)
|
||||
expect(status3.isRejected()).toBe(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
expect(status1.isFulfilled()).toBe(true)
|
||||
expect(status2.isFulfilled()).toBe(true)
|
||||
expect(status3.isFulfilled()).toBe(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
expect(status1.value()).toBe(42)
|
||||
expect(status2.value()).toBe(Math.PI)
|
||||
expect(::status3.value).toThrow()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
expect(::status1.reason).toThrow()
|
||||
expect(::status2.reason).toThrow()
|
||||
expect(status3.reason()).toBe(rejection)
|
||||
})
|
||||
})
|
||||
|
||||
434
src/vhd-merge.js
434
src/vhd-merge.js
@@ -1,4 +1,10 @@
|
||||
import fu from 'struct-fu'
|
||||
// TODO: remove once completely merged in vhd.js
|
||||
|
||||
import assert from 'assert'
|
||||
import constantStream from 'constant-stream'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import fu from '@nraynaud/struct-fu'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
|
||||
import {
|
||||
noop,
|
||||
@@ -39,6 +45,10 @@ const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
|
||||
const BLOCK_UNUSED = 0xFFFFFFFF
|
||||
const BIT_MASK = 0x80
|
||||
|
||||
// unused block as buffer containing a uint32BE
|
||||
const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(VHD_ENTRY_SIZE)
|
||||
BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const fuFooter = fu.struct([
|
||||
@@ -91,7 +101,7 @@ const fuHeader = fu.struct([
|
||||
fu.uint8('parentUuid', 16),
|
||||
fu.uint32('parentTimestamp'),
|
||||
fu.uint32('reserved1'),
|
||||
fu.char('parentUnicodeName', 512),
|
||||
fu.char16be('parentUnicodeName', 512),
|
||||
fu.struct('parentLocatorEntry', [
|
||||
fu.uint32('platformCode'),
|
||||
fu.uint32('platformDataSpace'),
|
||||
@@ -144,40 +154,28 @@ const unpackField = (field, buf) => {
|
||||
}
|
||||
// ===================================================================
|
||||
|
||||
// Returns the checksum of a raw footer.
|
||||
// The raw footer is altered with the new sum.
|
||||
function checksumFooter (rawFooter) {
|
||||
const checksumField = fuFooter.fields.checksum
|
||||
// Returns the checksum of a raw struct.
|
||||
// The raw struct (footer or header) is altered with the new sum.
|
||||
function checksumStruct (rawStruct, struct) {
|
||||
const checksumField = struct.fields.checksum
|
||||
|
||||
let sum = 0
|
||||
|
||||
// Reset current sum.
|
||||
packField(checksumField, 0, rawFooter)
|
||||
packField(checksumField, 0, rawStruct)
|
||||
|
||||
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
|
||||
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
|
||||
for (let i = 0, n = struct.size; i < n; i++) {
|
||||
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
|
||||
}
|
||||
|
||||
sum = 0xFFFFFFFF - sum
|
||||
|
||||
// Write new sum.
|
||||
packField(checksumField, sum, rawFooter)
|
||||
packField(checksumField, sum, rawStruct)
|
||||
|
||||
return sum
|
||||
}
|
||||
|
||||
function getParentLocatorSize (parentLocatorEntry) {
|
||||
const { platformDataSpace } = parentLocatorEntry
|
||||
|
||||
if (platformDataSpace < VHD_SECTOR_SIZE) {
|
||||
return sectorsToBytes(platformDataSpace)
|
||||
}
|
||||
|
||||
return (platformDataSpace % VHD_SECTOR_SIZE === 0)
|
||||
? platformDataSpace
|
||||
: 0
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class Vhd {
|
||||
@@ -190,6 +188,17 @@ class Vhd {
|
||||
// Read functions.
|
||||
// =================================================================
|
||||
|
||||
_readStream (start, n) {
|
||||
return this._handler.createReadStream(this._path, {
|
||||
start,
|
||||
end: start + n - 1 // end is inclusive
|
||||
})
|
||||
}
|
||||
|
||||
_read (start, n) {
|
||||
return this._readStream(start, n).then(streamToBuffer)
|
||||
}
|
||||
|
||||
// Returns the first address after metadata. (In bytes)
|
||||
getEndOfHeaders () {
|
||||
const { header } = this
|
||||
@@ -207,10 +216,10 @@ class Vhd {
|
||||
const entry = header.parentLocatorEntry[i]
|
||||
|
||||
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
|
||||
const dataOffset = uint32ToUint64(entry.platformDataOffset)
|
||||
|
||||
// Max(end, locator end)
|
||||
end = Math.max(end, dataOffset + getParentLocatorSize(entry))
|
||||
end = Math.max(end,
|
||||
uint32ToUint64(entry.platformDataOffset) +
|
||||
sectorsToBytes(entry.platformDataSpace)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,17 +230,15 @@ class Vhd {
|
||||
|
||||
// Returns the first sector after data.
|
||||
getEndOfData () {
|
||||
let end = Math.floor(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
|
||||
let end = Math.ceil(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
|
||||
|
||||
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
|
||||
const { maxTableEntries } = this.header
|
||||
for (let i = 0; i < maxTableEntries; i++) {
|
||||
let blockAddr = this.readAllocationTableEntry(i)
|
||||
const blockAddr = this._getBatEntry(i)
|
||||
|
||||
if (blockAddr !== BLOCK_UNUSED) {
|
||||
// Compute next block address.
|
||||
blockAddr += this.sectorsPerBlock + this.sectorsOfBitmap
|
||||
|
||||
end = Math.max(end, blockAddr)
|
||||
end = Math.max(end, blockAddr + fullBlockSize)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,24 +247,12 @@ class Vhd {
|
||||
return sectorsToBytes(end)
|
||||
}
|
||||
|
||||
// Returns the start position of the vhd footer.
|
||||
// The real footer, not the copy at the beginning of the vhd file.
|
||||
async getFooterStart () {
|
||||
const stats = await this._handler.getSize(this._path)
|
||||
return stats.size - VHD_FOOTER_SIZE
|
||||
}
|
||||
|
||||
// Get the beginning (footer + header) of a vhd file.
|
||||
async readHeaderAndFooter () {
|
||||
const buf = await streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: 0,
|
||||
end: VHD_FOOTER_SIZE + VHD_HEADER_SIZE - 1
|
||||
})
|
||||
)
|
||||
const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE)
|
||||
|
||||
const sum = unpackField(fuFooter.fields.checksum, buf)
|
||||
const sumToTest = checksumFooter(buf)
|
||||
const sumToTest = checksumStruct(buf, fuFooter)
|
||||
|
||||
// Checksum child & parent.
|
||||
if (sumToTest !== sum) {
|
||||
@@ -297,119 +292,176 @@ class Vhd {
|
||||
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
|
||||
)
|
||||
|
||||
this.blockTable = await streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: offset,
|
||||
end: offset + size - 1
|
||||
})
|
||||
)
|
||||
this.blockTable = await this._read(offset, size)
|
||||
}
|
||||
|
||||
// Returns the address block at the entry location of one table.
|
||||
readAllocationTableEntry (entry) {
|
||||
return this.blockTable.readUInt32BE(entry * VHD_ENTRY_SIZE)
|
||||
// return the first sector (bitmap) of a block
|
||||
_getBatEntry (block) {
|
||||
return this.blockTable.readUInt32BE(block * VHD_ENTRY_SIZE)
|
||||
}
|
||||
|
||||
// Returns the data content of a block. (Not the bitmap !)
|
||||
async readBlockData (blockAddr) {
|
||||
const { blockSize } = this.header
|
||||
|
||||
const handler = this._handler
|
||||
const path = this._path
|
||||
|
||||
const blockDataAddr = sectorsToBytes(blockAddr + this.sectorsOfBitmap)
|
||||
const footerStart = await this.getFooterStart()
|
||||
const isPadded = footerStart < (blockDataAddr + blockSize)
|
||||
|
||||
// Size ot the current block in the vhd file.
|
||||
const size = isPadded ? (footerStart - blockDataAddr) : sectorsToBytes(this.sectorsPerBlock)
|
||||
|
||||
debug(`Read block data at: ${blockDataAddr}. (size=${size})`)
|
||||
|
||||
const buf = await streamToBuffer(
|
||||
await handler.createReadStream(path, {
|
||||
start: blockDataAddr,
|
||||
end: blockDataAddr + size - 1
|
||||
})
|
||||
)
|
||||
|
||||
// Padded by zero !
|
||||
if (isPadded) {
|
||||
return Buffer.concat([buf, new Buffer(blockSize - size).fill(0)])
|
||||
_readBlock (blockId, onlyBitmap = false) {
|
||||
const blockAddr = this._getBatEntry(blockId)
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
throw new Error(`no such block ${blockId}`)
|
||||
}
|
||||
|
||||
return buf
|
||||
return this._read(
|
||||
sectorsToBytes(blockAddr),
|
||||
onlyBitmap ? this.bitmapSize : this.fullBlockSize
|
||||
).then(buf => onlyBitmap
|
||||
? { bitmap: buf }
|
||||
: {
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Returns a buffer that contains the bitmap of a block.
|
||||
// get the identifiers and first sectors of the first and last block
|
||||
// in the file
|
||||
//
|
||||
// TODO: merge with readBlockData().
|
||||
async readBlockBitmap (blockAddr) {
|
||||
const { bitmapSize } = this
|
||||
const offset = sectorsToBytes(blockAddr)
|
||||
// return undefined if none
|
||||
_getFirstAndLastBlocks () {
|
||||
const n = this.header.maxTableEntries
|
||||
const bat = this.blockTable
|
||||
let i = 0
|
||||
let j = 0
|
||||
let first, firstSector, last, lastSector
|
||||
|
||||
debug(`Read bitmap at: ${offset}. (size=${bitmapSize})`)
|
||||
// get first allocated block for initialization
|
||||
while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) {
|
||||
i += 1
|
||||
j += VHD_ENTRY_SIZE
|
||||
|
||||
return streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: offset,
|
||||
end: offset + bitmapSize - 1
|
||||
})
|
||||
)
|
||||
if (i === n) {
|
||||
return
|
||||
}
|
||||
}
|
||||
lastSector = firstSector
|
||||
first = last = i
|
||||
|
||||
while (i < n) {
|
||||
const sector = bat.readUInt32BE(j)
|
||||
if (sector !== BLOCK_UNUSED) {
|
||||
if (sector < firstSector) {
|
||||
first = i
|
||||
firstSector = sector
|
||||
} else if (sector > lastSector) {
|
||||
last = i
|
||||
lastSector = sector
|
||||
}
|
||||
}
|
||||
|
||||
i += 1
|
||||
j += VHD_ENTRY_SIZE
|
||||
}
|
||||
|
||||
return { first, firstSector, last, lastSector }
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Write functions.
|
||||
// =================================================================
|
||||
|
||||
// Write a buffer at a given position in a vhd file.
|
||||
async _write (buffer, offset) {
|
||||
// Write a buffer/stream at a given position in a vhd file.
|
||||
_write (data, offset) {
|
||||
debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`)
|
||||
// TODO: could probably be merged in remote handlers.
|
||||
return this._handler.createOutputStream(this._path, {
|
||||
start: offset,
|
||||
flags: 'r+'
|
||||
}).then(stream => new Promise((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
stream.write(buffer, () => {
|
||||
stream.end()
|
||||
resolve()
|
||||
})
|
||||
}))
|
||||
flags: 'r+',
|
||||
start: offset
|
||||
}).then(
|
||||
Buffer.isBuffer(data)
|
||||
? stream => new Promise((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
stream.end(data, resolve)
|
||||
})
|
||||
: stream => eventToPromise(data.pipe(stream), 'finish')
|
||||
)
|
||||
}
|
||||
|
||||
// Write an entry in the allocation table.
|
||||
writeAllocationTableEntry (entry, value) {
|
||||
this.blockTable.writeUInt32BE(value, entry * VHD_ENTRY_SIZE)
|
||||
async ensureBatSize (size) {
|
||||
const { header } = this
|
||||
|
||||
const prevMaxTableEntries = header.maxTableEntries
|
||||
if (prevMaxTableEntries >= size) {
|
||||
return
|
||||
}
|
||||
|
||||
const tableOffset = uint32ToUint64(header.tableOffset)
|
||||
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
|
||||
|
||||
// extend BAT
|
||||
const maxTableEntries = header.maxTableEntries = size
|
||||
const batSize = maxTableEntries * VHD_ENTRY_SIZE
|
||||
const prevBat = this.blockTable
|
||||
const bat = this.blockTable = Buffer.allocUnsafe(batSize)
|
||||
prevBat.copy(bat)
|
||||
bat.fill(BUF_BLOCK_UNUSED, prevBat.length)
|
||||
debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
|
||||
|
||||
const extendBat = () => {
|
||||
debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
|
||||
|
||||
return this._write(
|
||||
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
|
||||
tableOffset + prevBat.length
|
||||
)
|
||||
}
|
||||
|
||||
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
|
||||
return Promise.all([
|
||||
extendBat(),
|
||||
this.writeHeader()
|
||||
])
|
||||
}
|
||||
|
||||
const { fullBlockSize } = this
|
||||
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
|
||||
debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`)
|
||||
|
||||
return Promise.all([
|
||||
// copy the first block at the end
|
||||
this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream =>
|
||||
this._write(stream, sectorsToBytes(newFirstSector))
|
||||
).then(extendBat),
|
||||
|
||||
this._setBatEntry(first, newFirstSector),
|
||||
this.writeHeader(),
|
||||
this.writeFooter()
|
||||
])
|
||||
}
|
||||
|
||||
// set the first sector (bitmap) of a block
|
||||
_setBatEntry (block, blockSector) {
|
||||
const i = block * VHD_ENTRY_SIZE
|
||||
const { blockTable } = this
|
||||
|
||||
blockTable.writeUInt32BE(blockSector, i)
|
||||
|
||||
return this._write(
|
||||
blockTable.slice(i, i + VHD_ENTRY_SIZE),
|
||||
uint32ToUint64(this.header.tableOffset) + i
|
||||
)
|
||||
}
|
||||
|
||||
// Make a new empty block at vhd end.
|
||||
// Update block allocation table in context and in file.
|
||||
async createBlock (blockId) {
|
||||
// End of file !
|
||||
let offset = this.getEndOfData()
|
||||
const blockAddr = Math.ceil(this.getEndOfData() / VHD_SECTOR_SIZE)
|
||||
|
||||
// Padded on bound sector.
|
||||
if (offset % VHD_SECTOR_SIZE) {
|
||||
offset += (VHD_SECTOR_SIZE - (offset % VHD_SECTOR_SIZE))
|
||||
}
|
||||
debug(`create block ${blockId} at ${blockAddr}`)
|
||||
|
||||
const blockAddr = Math.floor(offset / VHD_SECTOR_SIZE)
|
||||
await Promise.all([
|
||||
// Write an empty block and addr in vhd file.
|
||||
this._write(
|
||||
constantStream([ 0 ], this.fullBlockSize),
|
||||
sectorsToBytes(blockAddr)
|
||||
),
|
||||
|
||||
const {
|
||||
blockTable,
|
||||
fullBlockSize
|
||||
} = this
|
||||
debug(`Create block at ${blockAddr}. (size=${fullBlockSize}, offset=${offset})`)
|
||||
|
||||
// New entry in block allocation table.
|
||||
this.writeAllocationTableEntry(blockId, blockAddr)
|
||||
|
||||
const tableOffset = uint32ToUint64(this.header.tableOffset)
|
||||
const entry = blockId * VHD_ENTRY_SIZE
|
||||
|
||||
// Write an empty block and addr in vhd file.
|
||||
await this._write(new Buffer(fullBlockSize).fill(0), offset)
|
||||
await this._write(blockTable.slice(entry, entry + VHD_ENTRY_SIZE), tableOffset + entry)
|
||||
this._setBatEntry(blockId, blockAddr)
|
||||
])
|
||||
|
||||
return blockAddr
|
||||
}
|
||||
@@ -428,17 +480,16 @@ class Vhd {
|
||||
await this._write(bitmap, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeBlockSectors (block, beginSectorId, n) {
|
||||
let blockAddr = this.readAllocationTableEntry(block.id)
|
||||
async writeBlockSectors (block, beginSectorId, endSectorId) {
|
||||
let blockAddr = this._getBatEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
blockAddr = await this.createBlock(block.id)
|
||||
}
|
||||
|
||||
const endSectorId = beginSectorId + n
|
||||
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
|
||||
|
||||
debug(`Write block data at: ${offset}. (counter=${n}, blockId=${block.id}, blockSector=${beginSectorId})`)
|
||||
debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`)
|
||||
|
||||
await this._write(
|
||||
block.data.slice(
|
||||
@@ -448,7 +499,7 @@ class Vhd {
|
||||
sectorsToBytes(offset)
|
||||
)
|
||||
|
||||
const bitmap = await this.readBlockBitmap(this.bitmapSize, blockAddr)
|
||||
const { bitmap } = await this._readBlock(block.id, true)
|
||||
|
||||
for (let i = beginSectorId; i < endSectorId; ++i) {
|
||||
mapSetBit(bitmap, i)
|
||||
@@ -458,61 +509,69 @@ class Vhd {
|
||||
}
|
||||
|
||||
// Merge block id (of vhd child) into vhd parent.
|
||||
async coalesceBlock (child, blockAddr, blockId) {
|
||||
async coalesceBlock (child, blockId) {
|
||||
// Get block data and bitmap of block id.
|
||||
const blockData = await child.readBlockData(blockAddr)
|
||||
const blockBitmap = await child.readBlockBitmap(blockAddr)
|
||||
const { bitmap, data } = await child._readBlock(blockId)
|
||||
|
||||
debug(`Coalesce block ${blockId} at ${blockAddr}.`)
|
||||
debug(`coalesceBlock block=${blockId}`)
|
||||
|
||||
// For each sector of block data...
|
||||
const { sectorsPerBlock } = child
|
||||
for (let i = 0; i < sectorsPerBlock; i++) {
|
||||
// If no changes on one sector, skip.
|
||||
if (!mapTestBit(blockBitmap, i)) {
|
||||
if (!mapTestBit(bitmap, i)) {
|
||||
continue
|
||||
}
|
||||
|
||||
let sectors = 0
|
||||
let endSector = i + 1
|
||||
|
||||
// Count changed sectors.
|
||||
for (; sectors + i < sectorsPerBlock; sectors++) {
|
||||
if (!mapTestBit(blockBitmap, sectors + i)) {
|
||||
break
|
||||
}
|
||||
while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) {
|
||||
++endSector
|
||||
}
|
||||
|
||||
// Write n sectors into parent.
|
||||
debug(`Coalesce block: write. (offset=${i}, sectors=${sectors})`)
|
||||
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
|
||||
await this.writeBlockSectors(
|
||||
{ id: blockId, data: blockData },
|
||||
{ id: blockId, data },
|
||||
i,
|
||||
sectors
|
||||
endSector
|
||||
)
|
||||
|
||||
i += sectors
|
||||
i = endSector
|
||||
}
|
||||
}
|
||||
|
||||
// Write a context footer. (At the end and beggining of a vhd file.)
|
||||
// Write a context footer. (At the end and beginning of a vhd file.)
|
||||
async writeFooter () {
|
||||
const { footer } = this
|
||||
|
||||
const offset = this.getEndOfData()
|
||||
const rawFooter = fuFooter.pack(footer)
|
||||
|
||||
footer.checksum = checksumFooter(rawFooter)
|
||||
footer.checksum = checksumStruct(rawFooter, fuFooter)
|
||||
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
|
||||
|
||||
await this._write(rawFooter, 0)
|
||||
await this._write(rawFooter, offset)
|
||||
}
|
||||
|
||||
writeHeader () {
|
||||
const { header } = this
|
||||
const rawHeader = fuHeader.pack(header)
|
||||
header.checksum = checksumStruct(rawHeader, fuHeader)
|
||||
const offset = VHD_FOOTER_SIZE
|
||||
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
|
||||
return this._write(rawHeader, offset)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge vhd child into vhd parent.
|
||||
//
|
||||
// Child must be a delta backup !
|
||||
// Parent must be a full backup !
|
||||
//
|
||||
// TODO: update the identifier of the parent VHD.
|
||||
export default async function vhdMerge (
|
||||
parentHandler, parentPath,
|
||||
childHandler, childPath
|
||||
@@ -526,14 +585,16 @@ export default async function vhdMerge (
|
||||
childVhd.readHeaderAndFooter()
|
||||
])
|
||||
|
||||
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
|
||||
|
||||
// Child must be a delta.
|
||||
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
|
||||
throw new Error(`Unable to merge, child is not a delta backup.`)
|
||||
throw new Error('Unable to merge, child is not a delta backup.')
|
||||
}
|
||||
|
||||
// Merging in differencing disk is prohibited in our case.
|
||||
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
|
||||
throw new Error(`Unable to merge, parent is not a full backup.`)
|
||||
throw new Error('Unable to merge, parent is not a full backup.')
|
||||
}
|
||||
|
||||
// Allocation table map is not yet implemented.
|
||||
@@ -541,7 +602,7 @@ export default async function vhdMerge (
|
||||
parentVhd.hasBlockAllocationTableMap() ||
|
||||
childVhd.hasBlockAllocationTableMap()
|
||||
) {
|
||||
throw new Error(`Unsupported allocation table map.`)
|
||||
throw new Error('Unsupported allocation table map.')
|
||||
}
|
||||
|
||||
// Read allocation table of child/parent.
|
||||
@@ -550,17 +611,66 @@ export default async function vhdMerge (
|
||||
childVhd.readBlockTable()
|
||||
])
|
||||
|
||||
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
|
||||
const blockAddr = childVhd.readAllocationTableEntry(blockId)
|
||||
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
|
||||
|
||||
if (blockAddr !== BLOCK_UNUSED) {
|
||||
await parentVhd.coalesceBlock(
|
||||
childVhd,
|
||||
blockAddr,
|
||||
blockId
|
||||
)
|
||||
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
|
||||
if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) {
|
||||
await parentVhd.coalesceBlock(childVhd, blockId)
|
||||
}
|
||||
}
|
||||
|
||||
const cFooter = childVhd.footer
|
||||
const pFooter = parentVhd.footer
|
||||
|
||||
pFooter.currentSize = { ...cFooter.currentSize }
|
||||
pFooter.diskGeometry = { ...cFooter.diskGeometry }
|
||||
pFooter.originalSize = { ...cFooter.originalSize }
|
||||
pFooter.timestamp = cFooter.timestamp
|
||||
|
||||
// necessary to update values and to recreate the footer after block
|
||||
// creation
|
||||
await parentVhd.writeFooter()
|
||||
}
|
||||
|
||||
// returns true if the child was actually modified
|
||||
export async function chainVhd (
|
||||
parentHandler, parentPath,
|
||||
childHandler, childPath
|
||||
) {
|
||||
const parentVhd = new Vhd(parentHandler, parentPath)
|
||||
const childVhd = new Vhd(childHandler, childPath)
|
||||
await Promise.all([
|
||||
parentVhd.readHeaderAndFooter(),
|
||||
childVhd.readHeaderAndFooter()
|
||||
])
|
||||
|
||||
const { header } = childVhd
|
||||
|
||||
const parentName = parentPath.split('/').pop()
|
||||
const parentUuid = parentVhd.footer.uuid
|
||||
if (
|
||||
header.parentUnicodeName !== parentName ||
|
||||
!isEqual(header.parentUuid, parentUuid)
|
||||
) {
|
||||
header.parentUuid = parentUuid
|
||||
header.parentUnicodeName = parentName
|
||||
await childVhd.writeHeader()
|
||||
return true
|
||||
}
|
||||
|
||||
// The checksum was broken between xo-server v5.2.4 and v5.2.5
|
||||
//
|
||||
// Replace by a correct checksum if necessary.
|
||||
//
|
||||
// TODO: remove when enough time as passed (6 months).
|
||||
{
|
||||
const rawHeader = fuHeader.pack(header)
|
||||
const checksum = checksumStruct(rawHeader, fuHeader)
|
||||
if (checksum !== header.checksum) {
|
||||
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import createDebug from 'debug'
|
||||
import WebSocket from 'ws'
|
||||
|
||||
const debug = createDebug('xo:wsProxy')
|
||||
|
||||
const defaults = {
|
||||
// Automatically close the client connection when the remote close.
|
||||
autoClose: true
|
||||
}
|
||||
|
||||
// Proxy a WebSocket `client` to a remote server which has `url` as
|
||||
// address.
|
||||
export default function wsProxy (client, url, opts) {
|
||||
opts = {
|
||||
...defaults,
|
||||
protocol: client.protocol,
|
||||
...opts
|
||||
}
|
||||
const autoClose = !!opts.autoClose
|
||||
delete opts.autoClose
|
||||
|
||||
function onClientSend (error) {
|
||||
if (error) {
|
||||
debug('client send error', error)
|
||||
}
|
||||
}
|
||||
function onRemoteSend (error) {
|
||||
if (error) {
|
||||
debug('remote send error', error)
|
||||
}
|
||||
}
|
||||
|
||||
const remote = new WebSocket(url, opts).once('open', function () {
|
||||
debug('connected to %s', url)
|
||||
}).once('close', function () {
|
||||
debug('remote closed')
|
||||
|
||||
if (autoClose) {
|
||||
client.close()
|
||||
}
|
||||
}).once('error', function (error) {
|
||||
debug('remote error: %s', error)
|
||||
}).on('message', function (message) {
|
||||
client.send(message, onClientSend)
|
||||
})
|
||||
|
||||
client.once('close', function () {
|
||||
debug('client closed')
|
||||
remote.close()
|
||||
}).on('message', function (message) {
|
||||
remote.send(message, onRemoteSend)
|
||||
})
|
||||
}
|
||||
@@ -1,8 +1,14 @@
|
||||
import {
|
||||
startsWith
|
||||
} from 'lodash'
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
forEach,
|
||||
isArray,
|
||||
isEmpty,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
parseXml
|
||||
} from './utils'
|
||||
@@ -12,6 +18,9 @@ import {
|
||||
isVmRunning,
|
||||
parseDateTime
|
||||
} from './xapi'
|
||||
import {
|
||||
useUpdateSystem
|
||||
} from './xapi/utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -35,18 +44,28 @@ function link (obj, prop, idField = '$id') {
|
||||
|
||||
// Parse a string date time to a Unix timestamp (in seconds).
|
||||
//
|
||||
// If the value is a number or can be converted as one, it is assumed
|
||||
// to already be a timestamp and returned.
|
||||
//
|
||||
// If there are no data or if the timestamp is 0, returns null.
|
||||
function toTimestamp (date) {
|
||||
if (!date) {
|
||||
return null
|
||||
}
|
||||
|
||||
const ms = parseDateTime(date).getTime()
|
||||
const timestamp = +date
|
||||
|
||||
// Not NaN.
|
||||
if (timestamp === timestamp) { // eslint-disable-line no-self-compare
|
||||
return timestamp
|
||||
}
|
||||
|
||||
const ms = parseDateTime(date)
|
||||
if (!ms) {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(ms / 1000)
|
||||
return Math.round(ms.getTime() / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -83,18 +102,51 @@ const TRANSFORMS = {
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
const { software_version } = obj
|
||||
let supplementalPacks, patches
|
||||
|
||||
if (useUpdateSystem(obj)) {
|
||||
supplementalPacks = []
|
||||
patches = []
|
||||
|
||||
forEach(obj.$updates, update => {
|
||||
const formattedUpdate = {
|
||||
name: update.name_label,
|
||||
description: update.name_description,
|
||||
author: update.key.split('-')[3],
|
||||
version: update.version,
|
||||
guidance: update.after_apply_guidance,
|
||||
hosts: link(update, 'hosts'),
|
||||
vdi: link(update, 'vdi'),
|
||||
size: update.installation_size
|
||||
}
|
||||
|
||||
if (startsWith(update.name_label, 'XS')) {
|
||||
patches.push(formattedUpdate)
|
||||
} else {
|
||||
supplementalPacks.push(formattedUpdate)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
// Deprecated
|
||||
CPUs: obj.cpu_info,
|
||||
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
CPUs: obj.cpu_info,
|
||||
enabled: Boolean(obj.enabled),
|
||||
cpus: {
|
||||
cores: +obj.cpu_info.cpu_count,
|
||||
sockets: +obj.cpu_info.socket_count
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
license_params: obj.license_params,
|
||||
license_server: obj.license_server,
|
||||
license_expiry: toTimestamp(obj.license_params.expiry),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
@@ -110,14 +162,32 @@ const TRANSFORMS = {
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
size: 0,
|
||||
|
||||
// Deprecated
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
patches: patches || link(obj, 'patches'),
|
||||
powerOnMode: obj.power_on_mode,
|
||||
power_state: metrics
|
||||
? (isRunning ? 'Running' : 'Halted')
|
||||
: 'Unknown',
|
||||
startTime: toTimestamp(otherConfig.boot_time),
|
||||
supplementalPacks: supplementalPacks ||
|
||||
mapFilter(software_version, (value, key) => {
|
||||
let author, name
|
||||
if (([ author, name ] = key.split(':')).length === 2) {
|
||||
const [ description, version ] = value.split(', ')
|
||||
return {
|
||||
name,
|
||||
description,
|
||||
author,
|
||||
version: version.split(' ')[1]
|
||||
}
|
||||
}
|
||||
}),
|
||||
agentStartTime: toTimestamp(otherConfig.agent_start_time),
|
||||
tags: obj.tags,
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
@@ -152,19 +222,49 @@ const TRANSFORMS = {
|
||||
|
||||
const isHvm = isVmHvm(obj)
|
||||
const isRunning = isVmRunning(obj)
|
||||
const xenTools = (() => {
|
||||
if (!isRunning || !metrics) {
|
||||
// Unknown status, returns nothing.
|
||||
return
|
||||
}
|
||||
|
||||
if (!guestMetrics) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { major, minor } = guestMetrics.PV_drivers_version
|
||||
const [ hostMajor, hostMinor ] = (obj.$resident_on || obj.$pool.$master)
|
||||
.software_version
|
||||
.product_version
|
||||
.split('.')
|
||||
|
||||
return major >= hostMajor && minor >= hostMinor
|
||||
? 'up to date'
|
||||
: 'out of date'
|
||||
})()
|
||||
|
||||
let resourceSet = otherConfig['xo:resource_set']
|
||||
if (resourceSet) {
|
||||
try {
|
||||
resourceSet = JSON.parse(resourceSet)
|
||||
} catch (_) {
|
||||
resourceSet = undefined
|
||||
}
|
||||
}
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
addresses: (guestMetrics && guestMetrics.networks) || null,
|
||||
affinityHost: link(obj, 'affinity'),
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics
|
||||
isRunning && metrics && xenTools
|
||||
? +metrics.VCPUs_number
|
||||
: +obj.VCPUs_at_startup
|
||||
)
|
||||
@@ -191,7 +291,8 @@ const TRANSFORMS = {
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
containers: ensureArray(process && parseXml(process).docker_ps.item),
|
||||
process: process && parseXml(process).docker_ps, // deprecated (only used in v4)
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
@@ -225,12 +326,15 @@ const TRANSFORMS = {
|
||||
|
||||
return memory
|
||||
})(),
|
||||
installTime: metrics && toTimestamp(metrics.install_time),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
os_version: (guestMetrics && guestMetrics.os_version) || null,
|
||||
power_state: obj.power_state,
|
||||
resourceSet,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
startTime: metrics && toTimestamp(metrics.start_time),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
virtualizationMode: isHvm ? 'hvm' : 'pv',
|
||||
@@ -241,25 +345,7 @@ const TRANSFORMS = {
|
||||
// - false: not optimized
|
||||
// - 'out of date': optimized but drivers should be updated
|
||||
// - 'up to date': optimized
|
||||
xenTools: (() => {
|
||||
if (!isRunning || !metrics) {
|
||||
// Unknown status, returns nothing.
|
||||
return
|
||||
}
|
||||
|
||||
if (!guestMetrics) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { PV_drivers_version: { major, minor } } = guestMetrics
|
||||
if (major === undefined || minor === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return guestMetrics.PV_drivers_up_to_date
|
||||
? 'up to date'
|
||||
: 'out of date'
|
||||
})(),
|
||||
xenTools,
|
||||
|
||||
$container: (
|
||||
isRunning
|
||||
@@ -281,6 +367,7 @@ const TRANSFORMS = {
|
||||
vm.snapshot_time = toTimestamp(obj.snapshot_time)
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.id = obj.$ref // use refs for templates as they
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
@@ -303,7 +390,7 @@ const TRANSFORMS = {
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
const methods = otherConfig['install-methods']
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})(),
|
||||
@@ -311,8 +398,10 @@ const TRANSFORMS = {
|
||||
}
|
||||
}
|
||||
|
||||
if (obj.VCPUs_params && obj.VCPUs_params.weight) {
|
||||
vm.cpuWeight = obj.VCPUs_params.weight
|
||||
let tmp
|
||||
if ((tmp = obj.VCPUs_params)) {
|
||||
tmp.cap && (vm.cpuCap = +tmp.cap)
|
||||
tmp.weight && (vm.cpuWeight = +tmp.weight)
|
||||
}
|
||||
|
||||
if (!isHvm) {
|
||||
@@ -336,15 +425,18 @@ const TRANSFORMS = {
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.physical_size,
|
||||
shared: Boolean(obj.shared),
|
||||
SR_type: obj.type,
|
||||
tags: obj.tags,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
other_config: obj.other_config,
|
||||
sm_config: obj.sm_config,
|
||||
|
||||
$container: (
|
||||
obj.shared
|
||||
obj.shared || !obj.$PBDs[0]
|
||||
? link(obj, 'pool')
|
||||
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
: link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
@@ -356,26 +448,32 @@ const TRANSFORMS = {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
attached: Boolean(obj.currently_attached),
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
SR: link(obj, 'SR'),
|
||||
device_config: obj.device_config
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pif (obj) {
|
||||
const metrics = obj.$metrics
|
||||
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
isBondMaster: !isEmpty(obj.bond_master_of),
|
||||
device: obj.device,
|
||||
deviceName: metrics && metrics.device_name,
|
||||
dns: obj.DNS,
|
||||
disallowUnplug: Boolean(obj.disallow_unplug),
|
||||
gateway: obj.gateway,
|
||||
ip: obj.IP,
|
||||
mac: obj.MAC,
|
||||
management: Boolean(obj.management), // TODO: find a better name.
|
||||
carrier: Boolean(metrics && metrics.carrier),
|
||||
mode: obj.ip_configuration_mode,
|
||||
mtu: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
@@ -426,6 +524,7 @@ const TRANSFORMS = {
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
bootable: Boolean(obj.bootable),
|
||||
device: obj.device || null,
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
@@ -440,6 +539,8 @@ const TRANSFORMS = {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
allowedIpv4Addresses: obj.ipv4_allowed,
|
||||
allowedIpv6Addresses: obj.ipv6_allowed,
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
@@ -455,9 +556,11 @@ const TRANSFORMS = {
|
||||
network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
defaultIsLocked: obj.default_locking_mode === 'disabled',
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other_config: obj.other_config,
|
||||
tags: obj.tags,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import endsWith from 'lodash.endswith'
|
||||
import endsWith from 'lodash/endsWith'
|
||||
import JSON5 from 'json5'
|
||||
import { BaseError } from 'make-error'
|
||||
|
||||
@@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
|
||||
}
|
||||
}
|
||||
|
||||
export class FaultyGranularity extends XapiStatsError {
|
||||
constructor (msg) {
|
||||
super(msg)
|
||||
}
|
||||
}
|
||||
export class FaultyGranularity extends XapiStatsError {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Utils
|
||||
@@ -289,9 +285,10 @@ export default class XapiStats {
|
||||
// Load
|
||||
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
|
||||
|
||||
// Memory
|
||||
const memory = values[hostLegends.memory]
|
||||
const memoryFree = values[hostLegends.memoryFree]
|
||||
// Memory.
|
||||
// WARNING! memory/memoryFree are in kB.
|
||||
const memory = values[hostLegends.memory] * 1024
|
||||
const memoryFree = values[hostLegends.memoryFree] * 1024
|
||||
|
||||
hostStats.memory.push(memory)
|
||||
|
||||
@@ -405,19 +402,24 @@ export default class XapiStats {
|
||||
}
|
||||
|
||||
_getPoints (hostname, step, vmId) {
|
||||
const hostStats = this._hosts[hostname][step]
|
||||
|
||||
// Return host points
|
||||
if (vmId === undefined) {
|
||||
return this._hosts[hostname][step]
|
||||
return {
|
||||
interval: step,
|
||||
...hostStats
|
||||
}
|
||||
}
|
||||
|
||||
const vmsStats = this._vms[hostname][step]
|
||||
|
||||
// Return vm points
|
||||
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
|
||||
|
||||
if (this._vms[hostname][step] !== undefined) {
|
||||
points.stats = this._vms[hostname][step][vmId]
|
||||
return {
|
||||
interval: step,
|
||||
endTimestamp: hostStats.endTimestamp,
|
||||
stats: (vmsStats && vmsStats[vmId]) || getNewVmStats()
|
||||
}
|
||||
|
||||
return points
|
||||
}
|
||||
|
||||
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
|
||||
@@ -528,6 +530,11 @@ export default class XapiStats {
|
||||
async getVmPoints (xapi, vmId, granularity) {
|
||||
const vm = xapi.getObject(vmId)
|
||||
const host = vm.$resident_on
|
||||
|
||||
if (!host) {
|
||||
throw new Error(`VM ${vmId} is halted or host could not be found.`)
|
||||
}
|
||||
|
||||
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
0
src/xapi/mixins/.index-modules
Normal file
0
src/xapi/mixins/.index-modules
Normal file
60
src/xapi/mixins/networking.js
Normal file
60
src/xapi/mixins/networking.js
Normal file
@@ -0,0 +1,60 @@
|
||||
import { isEmpty } from '../../utils'
|
||||
|
||||
import { makeEditObject } from '../utils'
|
||||
|
||||
export default {
|
||||
async _connectVif (vif) {
|
||||
await this.call('VIF.plug', vif.$ref)
|
||||
},
|
||||
async connectVif (vifId) {
|
||||
await this._connectVif(this.getObject(vifId))
|
||||
},
|
||||
async _deleteVif (vif) {
|
||||
await this.call('VIF.destroy', vif.$ref)
|
||||
},
|
||||
async deleteVif (vifId) {
|
||||
const vif = this.getObject(vifId)
|
||||
if (vif.currently_attached) {
|
||||
await this._disconnectVif(vif)
|
||||
}
|
||||
await this._deleteVif(vif)
|
||||
},
|
||||
async _disconnectVif (vif) {
|
||||
await this.call('VIF.unplug_force', vif.$ref)
|
||||
},
|
||||
async disconnectVif (vifId) {
|
||||
await this._disconnectVif(this.getObject(vifId))
|
||||
},
|
||||
editVif: makeEditObject({
|
||||
ipv4Allowed: {
|
||||
get: true,
|
||||
set: [
|
||||
'ipv4Allowed',
|
||||
function (value, vif) {
|
||||
const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed)
|
||||
? 'network_default'
|
||||
: 'locked'
|
||||
|
||||
if (lockingMode !== vif.locking_mode) {
|
||||
return this._set('locking_mode', lockingMode)
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
ipv6Allowed: {
|
||||
get: true,
|
||||
set: [
|
||||
'ipv6Allowed',
|
||||
function (value, vif) {
|
||||
const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed)
|
||||
? 'network_default'
|
||||
: 'locked'
|
||||
|
||||
if (lockingMode !== vif.locking_mode) {
|
||||
return this._set('locking_mode', lockingMode)
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
}
|
||||
380
src/xapi/mixins/patching.js
Normal file
380
src/xapi/mixins/patching.js
Normal file
@@ -0,0 +1,380 @@
|
||||
import deferrable from 'golike-defer'
|
||||
import filter from 'lodash/filter'
|
||||
import includes from 'lodash/includes'
|
||||
import some from 'lodash/some'
|
||||
import sortBy from 'lodash/sortBy'
|
||||
import unzip from 'julien-f-unzip'
|
||||
|
||||
import httpProxy from '../../http-proxy'
|
||||
import httpRequest from '../../http-request'
|
||||
import { debounce } from '../../decorators'
|
||||
import {
|
||||
createRawObject,
|
||||
ensureArray,
|
||||
forEach,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
parseXml
|
||||
} from '../../utils'
|
||||
|
||||
import {
|
||||
debug,
|
||||
put,
|
||||
useUpdateSystem
|
||||
} from '../utils'
|
||||
|
||||
export default {
|
||||
// FIXME: should be static
|
||||
@debounce(24 * 60 * 60 * 1000)
|
||||
async _getXenUpdates () {
|
||||
const { readAll, statusCode } = await httpRequest(
|
||||
'http://updates.xensource.com/XenServer/updates.xml',
|
||||
{ agent: httpProxy }
|
||||
)
|
||||
|
||||
if (statusCode !== 200) {
|
||||
throw new Error('cannot fetch patches list from Citrix')
|
||||
}
|
||||
|
||||
const data = parseXml(await readAll()).patchdata
|
||||
|
||||
const patches = createRawObject()
|
||||
forEach(data.patches.patch, patch => {
|
||||
patches[patch.uuid] = {
|
||||
date: patch.timestamp,
|
||||
description: patch['name-description'],
|
||||
documentationUrl: patch.url,
|
||||
guidance: patch['after-apply-guidance'],
|
||||
name: patch['name-label'],
|
||||
url: patch['patch-url'],
|
||||
uuid: patch.uuid,
|
||||
conflicts: mapToArray(ensureArray(patch.conflictingpatches), patch => {
|
||||
return patch.conflictingpatch.uuid
|
||||
}),
|
||||
requirements: mapToArray(ensureArray(patch.requiredpatches), patch => {
|
||||
return patch.requiredpatch.uuid
|
||||
})
|
||||
// TODO: what does it mean, should we handle it?
|
||||
// version: patch.version,
|
||||
}
|
||||
if (patches[patch.uuid].conflicts[0] === undefined) {
|
||||
patches[patch.uuid].conflicts.length = 0
|
||||
}
|
||||
if (patches[patch.uuid].requirements[0] === undefined) {
|
||||
patches[patch.uuid].requirements.length = 0
|
||||
}
|
||||
})
|
||||
|
||||
const resolveVersionPatches = function (uuids) {
|
||||
const versionPatches = createRawObject()
|
||||
|
||||
forEach(ensureArray(uuids), ({uuid}) => {
|
||||
versionPatches[uuid] = patches[uuid]
|
||||
})
|
||||
|
||||
return versionPatches
|
||||
}
|
||||
|
||||
const versions = createRawObject()
|
||||
let latestVersion
|
||||
forEach(data.serverversions.version, version => {
|
||||
versions[version.value] = {
|
||||
date: version.timestamp,
|
||||
name: version.name,
|
||||
id: version.value,
|
||||
documentationUrl: version.url,
|
||||
patches: resolveVersionPatches(version.patch)
|
||||
}
|
||||
|
||||
if (version.latest) {
|
||||
latestVersion = versions[version.value]
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
patches,
|
||||
latestVersion,
|
||||
versions
|
||||
}
|
||||
},
|
||||
|
||||
// =================================================================
|
||||
|
||||
// Returns installed and not installed patches for a given host.
|
||||
async _getPoolPatchesForHost (host) {
|
||||
const versions = (await this._getXenUpdates()).versions
|
||||
|
||||
const hostVersions = host.software_version
|
||||
const version =
|
||||
versions[hostVersions.product_version] ||
|
||||
versions[hostVersions.product_version_text]
|
||||
|
||||
return version
|
||||
? version.patches
|
||||
: []
|
||||
},
|
||||
|
||||
_getInstalledPoolPatchesOnHost (host) {
|
||||
const installed = createRawObject()
|
||||
|
||||
// platform_version < 2.1.1
|
||||
forEach(host.$patches, hostPatch => {
|
||||
installed[hostPatch.$pool_patch.uuid] = true
|
||||
})
|
||||
|
||||
// platform_version >= 2.1.1
|
||||
forEach(host.$updates, update => {
|
||||
installed[update.uuid] = true // TODO: ignore packs
|
||||
})
|
||||
|
||||
return installed
|
||||
},
|
||||
|
||||
async _listMissingPoolPatchesOnHost (host) {
|
||||
const all = await this._getPoolPatchesForHost(host)
|
||||
const installed = this._getInstalledPoolPatchesOnHost(host)
|
||||
|
||||
const installable = createRawObject()
|
||||
forEach(all, (patch, uuid) => {
|
||||
if (installed[uuid]) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const uuid of patch.conflicts) {
|
||||
if (uuid in installed) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
installable[uuid] = patch
|
||||
})
|
||||
|
||||
return installable
|
||||
},
|
||||
|
||||
async listMissingPoolPatchesOnHost (hostId) {
|
||||
// Returns an array to not break compatibility.
|
||||
return mapToArray(
|
||||
await this._listMissingPoolPatchesOnHost(this.getObject(hostId))
|
||||
)
|
||||
},
|
||||
|
||||
async _ejectToolsIsos (hostRef) {
|
||||
return Promise.all(mapFilter(
|
||||
this.objects.all,
|
||||
vm => {
|
||||
if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) {
|
||||
return
|
||||
}
|
||||
|
||||
const shouldEjectCd = some(vm.$VBDs, vbd => {
|
||||
const vdi = vbd.$VDI
|
||||
|
||||
return vdi && vdi.is_tools_iso
|
||||
})
|
||||
|
||||
if (shouldEjectCd) {
|
||||
return this.ejectCdFromVm(vm.$id)
|
||||
}
|
||||
}
|
||||
))
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
_isPoolPatchInstallableOnHost (patchUuid, host) {
|
||||
const installed = this._getInstalledPoolPatchesOnHost(host)
|
||||
|
||||
if (installed[patchUuid]) {
|
||||
return false
|
||||
}
|
||||
|
||||
let installable = true
|
||||
|
||||
forEach(installed, patch => {
|
||||
if (includes(patch.conflicts, patchUuid)) {
|
||||
installable = false
|
||||
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
return installable
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// platform_version < 2.1.1 ----------------------------------------
|
||||
async uploadPoolPatch (stream, patchName = 'unknown') {
|
||||
const taskRef = await this._createTask('Patch upload', patchName)
|
||||
|
||||
const task = this._watchTask(taskRef)
|
||||
const [ patchRef ] = await Promise.all([
|
||||
task,
|
||||
put(stream, {
|
||||
hostname: this.pool.$master.address,
|
||||
path: '/pool_patch_upload',
|
||||
protocol: 'https',
|
||||
query: {
|
||||
session_id: this.sessionId,
|
||||
task_id: taskRef
|
||||
},
|
||||
rejectUnauthorized: false
|
||||
}, task)
|
||||
])
|
||||
|
||||
return this._getOrWaitObject(patchRef)
|
||||
},
|
||||
|
||||
async _getOrUploadPoolPatch (uuid) {
|
||||
try {
|
||||
return this.getObjectByUuid(uuid)
|
||||
} catch (error) {}
|
||||
|
||||
debug('downloading patch %s', uuid)
|
||||
|
||||
const patchInfo = (await this._getXenUpdates()).patches[uuid]
|
||||
if (!patchInfo) {
|
||||
throw new Error('no such patch ' + uuid)
|
||||
}
|
||||
|
||||
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
|
||||
stream = await new Promise((resolve, reject) => {
|
||||
const PATCH_RE = /\.xsupdate$/
|
||||
stream.pipe(unzip.Parse()).on('entry', entry => {
|
||||
if (PATCH_RE.test(entry.path)) {
|
||||
entry.length = entry.size
|
||||
resolve(entry)
|
||||
} else {
|
||||
entry.autodrain()
|
||||
}
|
||||
}).on('error', reject)
|
||||
})
|
||||
|
||||
return this.uploadPoolPatch(stream, patchInfo.name)
|
||||
},
|
||||
|
||||
// patform_version >= 2.1.1 ----------------------------------------
|
||||
_installPatch: deferrable(async function ($defer, stream, { hostId }) {
|
||||
if (!stream.length) {
|
||||
throw new Error('stream must have a length')
|
||||
}
|
||||
|
||||
const vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO')
|
||||
$defer(() => this._deleteVdi(vdi))
|
||||
|
||||
const updateRef = await this.call('pool_update.introduce', vdi.$ref)
|
||||
// TODO: check update status
|
||||
// await this.call('pool_update.precheck', updateRef, host.$ref)
|
||||
// - ok_livepatch_complete An applicable live patch exists for every required component
|
||||
// - ok_livepatch_incomplete An applicable live patch exists but it is not sufficient
|
||||
// - ok There is no applicable live patch
|
||||
await this.call('pool_update.apply', updateRef, this.getObject(hostId).$ref)
|
||||
}),
|
||||
|
||||
async _downloadPatchAndInstall (uuid, hostId) {
|
||||
debug('downloading patch %s', uuid)
|
||||
|
||||
const patchInfo = (await this._getXenUpdates()).patches[uuid]
|
||||
if (!patchInfo) {
|
||||
throw new Error('no such patch ' + uuid)
|
||||
}
|
||||
|
||||
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
|
||||
stream = await new Promise((resolve, reject) => {
|
||||
stream.pipe(unzip.Parse()).on('entry', entry => {
|
||||
entry.length = entry.size
|
||||
resolve(entry)
|
||||
}).on('error', reject)
|
||||
})
|
||||
|
||||
return this._installPatch(stream, { hostId })
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _installPoolPatchOnHost (patchUuid, host) {
|
||||
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ])
|
||||
|
||||
await this.call('pool_patch.apply', patch.$ref, host.$ref)
|
||||
},
|
||||
|
||||
_installPatchUpdateOnHost (patchUuid, host) {
|
||||
return Promise.all([ this._downloadPatchAndInstall(patchUuid, host.$id), this._ejectToolsIsos(host.$ref) ])
|
||||
},
|
||||
|
||||
async _checkSoftwareVersionAndInstallPatch (patchUuid, hostId) {
|
||||
const host = this.getObject(hostId)
|
||||
|
||||
return useUpdateSystem(host)
|
||||
? this._installPatchUpdateOnHost(patchUuid, host)
|
||||
: this._installPoolPatchOnHost(patchUuid, host)
|
||||
},
|
||||
|
||||
async installPoolPatchOnHost (patchUuid, hostId) {
|
||||
debug('installing patch %s', patchUuid)
|
||||
|
||||
return this._checkSoftwareVersionAndInstallPatch(patchUuid, hostId)
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async installPoolPatchOnAllHosts (patchUuid) {
|
||||
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos() ])
|
||||
|
||||
await this.call('pool_patch.pool_apply', patch.$ref)
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _installPoolPatchOnHostAndRequirements (patch, host, patchesByUuid) {
|
||||
const { requirements } = patch
|
||||
if (requirements.length) {
|
||||
for (const requirementUuid of requirements) {
|
||||
if (this._isPoolPatchInstallableOnHost(requirementUuid, host)) {
|
||||
const requirement = patchesByUuid[requirementUuid]
|
||||
await this._installPoolPatchOnHostAndRequirements(requirement, host, patchesByUuid)
|
||||
|
||||
host = this.getObject(host.$id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this._checkSoftwareVersionAndInstallPatch(patch.uuid, host)
|
||||
},
|
||||
|
||||
async installAllPoolPatchesOnHost (hostId) {
|
||||
let host = this.getObject(hostId)
|
||||
|
||||
const installableByUuid = await this._listMissingPoolPatchesOnHost(host)
|
||||
|
||||
// List of all installable patches sorted from the newest to the
|
||||
// oldest.
|
||||
const installable = sortBy(
|
||||
installableByUuid,
|
||||
patch => -Date.parse(patch.date)
|
||||
)
|
||||
|
||||
for (let i = 0, n = installable.length; i < n; ++i) {
|
||||
const patch = installable[i]
|
||||
|
||||
if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) {
|
||||
await this._installPoolPatchOnHostAndRequirements(patch, host, installableByUuid).catch(error => {
|
||||
if (error.code !== 'PATCH_ALREADY_APPLIED') {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
host = this.getObject(host.$id)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async installAllPoolPatchesOnAllHosts () {
|
||||
await this.installAllPoolPatchesOnHost(this.pool.master)
|
||||
// TODO: use pool_update.pool_apply for platform_version ^2.1.1
|
||||
await Promise.all(mapToArray(
|
||||
filter(this.objects.all, { $type: 'host' }),
|
||||
host => this.installAllPoolPatchesOnHost(host.$id)
|
||||
))
|
||||
}
|
||||
}
|
||||
53
src/xapi/mixins/storage.js
Normal file
53
src/xapi/mixins/storage.js
Normal file
@@ -0,0 +1,53 @@
|
||||
import {
|
||||
mapToArray
|
||||
} from '../../utils'
|
||||
|
||||
export default {
|
||||
_connectAllSrPbds (sr) {
|
||||
return Promise.all(
|
||||
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
|
||||
)
|
||||
},
|
||||
|
||||
async connectAllSrPbds (id) {
|
||||
await this._connectAllSrPbds(this.getObject(id))
|
||||
},
|
||||
|
||||
_disconnectAllSrPbds (sr) {
|
||||
return Promise.all(
|
||||
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
|
||||
)
|
||||
},
|
||||
|
||||
async disconnectAllSrPbds (id) {
|
||||
await this._disconnectAllSrPbds(this.getObject(id))
|
||||
},
|
||||
|
||||
async destroySr (id) {
|
||||
const sr = this.getObject(id)
|
||||
await this._disconnectAllSrPbds(sr)
|
||||
await this.call('SR.destroy', sr.$ref)
|
||||
},
|
||||
|
||||
async forgetSr (id) {
|
||||
const sr = this.getObject(id)
|
||||
await this._disconnectAllSrPbds(sr)
|
||||
await this.call('SR.forget', sr.$ref)
|
||||
},
|
||||
|
||||
_plugPbd (pbd) {
|
||||
return this.call('PBD.plug', pbd.$ref)
|
||||
},
|
||||
|
||||
async plugPbd (id) {
|
||||
await this._plugPbd(this.getObject(id))
|
||||
},
|
||||
|
||||
_unplugPbd (pbd) {
|
||||
return this.call('PBD.unplug', pbd.$ref)
|
||||
},
|
||||
|
||||
async unplugPbd (id) {
|
||||
await this._unplugPbd(this.getObject(id))
|
||||
}
|
||||
}
|
||||
371
src/xapi/mixins/vm.js
Normal file
371
src/xapi/mixins/vm.js
Normal file
@@ -0,0 +1,371 @@
|
||||
import deferrable from 'golike-defer'
|
||||
import find from 'lodash/find'
|
||||
import gte from 'lodash/gte'
|
||||
import isEmpty from 'lodash/isEmpty'
|
||||
import lte from 'lodash/lte'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
noop,
|
||||
parseSize,
|
||||
pCatch
|
||||
} from '../../utils'
|
||||
|
||||
import {
|
||||
isVmHvm,
|
||||
isVmRunning,
|
||||
makeEditObject,
|
||||
NULL_REF
|
||||
} from '../utils'
|
||||
|
||||
export default {
|
||||
// TODO: clean up on error.
|
||||
@deferrable.onFailure
|
||||
async createVm ($onFailure, templateId, {
|
||||
name_label, // deprecated
|
||||
nameLabel = name_label, // eslint-disable-line camelcase
|
||||
|
||||
clone = true,
|
||||
installRepository = undefined,
|
||||
vdis = undefined,
|
||||
vifs = undefined,
|
||||
existingVdis = undefined,
|
||||
|
||||
coreOs = false,
|
||||
cloudConfig = undefined,
|
||||
|
||||
...props
|
||||
} = {}, checkLimits) {
|
||||
const installMethod = (() => {
|
||||
if (installRepository == null) {
|
||||
return 'none'
|
||||
}
|
||||
|
||||
try {
|
||||
installRepository = this.getObject(installRepository)
|
||||
return 'cd'
|
||||
} catch (_) {
|
||||
return 'network'
|
||||
}
|
||||
})()
|
||||
const template = this.getObject(templateId)
|
||||
|
||||
// Clones the template.
|
||||
const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
|
||||
$onFailure(() => this.deleteVm(vmRef)::pCatch(noop))
|
||||
|
||||
// TODO: copy BIOS strings?
|
||||
|
||||
// Removes disks from the provision XML, we will create them by
|
||||
// ourselves.
|
||||
await this.call('VM.remove_from_other_config', vmRef, 'disks')::pCatch(noop)
|
||||
|
||||
// Creates the VDIs and executes the initial steps of the
|
||||
// installation.
|
||||
await this.call('VM.provision', vmRef)
|
||||
|
||||
let vm = await this._getOrWaitObject(vmRef)
|
||||
|
||||
// Set VMs params.
|
||||
await this._editVm(vm, props, checkLimits)
|
||||
|
||||
// Sets boot parameters.
|
||||
{
|
||||
const isHvm = isVmHvm(vm)
|
||||
|
||||
if (isHvm) {
|
||||
if (!isEmpty(vdis) || installMethod === 'network') {
|
||||
const { HVM_boot_params: bootParams } = vm
|
||||
let order = bootParams.order
|
||||
if (order) {
|
||||
order = 'n' + order.replace('n', '')
|
||||
} else {
|
||||
order = 'ncd'
|
||||
}
|
||||
|
||||
this._setObjectProperties(vm, {
|
||||
HVM_boot_params: { ...bootParams, order }
|
||||
})
|
||||
}
|
||||
} else { // PV
|
||||
if (vm.PV_bootloader === 'eliloader') {
|
||||
if (installMethod === 'network') {
|
||||
// TODO: normalize RHEL URL?
|
||||
|
||||
await this._updateObjectMapProperty(vm, 'other_config', {
|
||||
'install-repository': installRepository
|
||||
})
|
||||
} else if (installMethod === 'cd') {
|
||||
await this._updateObjectMapProperty(vm, 'other_config', {
|
||||
'install-repository': 'cdrom'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let nVbds = vm.VBDs.length
|
||||
let hasBootableDisk = !!find(vm.$VBDs, 'bootable')
|
||||
|
||||
// Inserts the CD if necessary.
|
||||
if (installMethod === 'cd') {
|
||||
// When the VM is started, if PV, the CD drive will become not
|
||||
// bootable and the first disk bootable.
|
||||
await this._insertCdIntoVm(installRepository, vm, {
|
||||
bootable: true
|
||||
})
|
||||
hasBootableDisk = true
|
||||
|
||||
++nVbds
|
||||
}
|
||||
|
||||
// Modify existing (previous template) disks if necessary
|
||||
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
|
||||
const vbd = find(vm.$VBDs, { userdevice })
|
||||
if (!vbd) {
|
||||
return
|
||||
}
|
||||
const vdi = vbd.$VDI
|
||||
await this._setObjectProperties(vdi, properties)
|
||||
|
||||
// if the disk is bigger
|
||||
if (
|
||||
size != null &&
|
||||
size > vdi.virtual_size
|
||||
) {
|
||||
await this.resizeVdi(vdi.$id, size)
|
||||
}
|
||||
// if another SR is set, move it there
|
||||
if (srId) {
|
||||
await this.moveVdi(vdi.$id, srId)
|
||||
}
|
||||
}))
|
||||
|
||||
// Creates the user defined VDIs.
|
||||
//
|
||||
// TODO: set vm.suspend_SR
|
||||
if (!isEmpty(vdis)) {
|
||||
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
|
||||
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
|
||||
++nVbds
|
||||
|
||||
return this._createVdi(
|
||||
vdiDescription.size, // FIXME: Should not be done in Xapi.
|
||||
{
|
||||
name_label: vdiDescription.name_label,
|
||||
name_description: vdiDescription.name_description,
|
||||
sr: vdiDescription.sr || vdiDescription.SR
|
||||
}
|
||||
)
|
||||
.then(ref => this._getOrWaitObject(ref))
|
||||
.then(vdi => this._createVbd(vm, vdi, {
|
||||
// Either the CD or the 1st disk is bootable (only useful for PV VMs)
|
||||
bootable: !(hasBootableDisk || i),
|
||||
userdevice: devices[i]
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
// Destroys the VIFs cloned from the template.
|
||||
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
|
||||
|
||||
// Creates the VIFs specified by the user.
|
||||
let nVifs = 0
|
||||
if (vifs) {
|
||||
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
|
||||
await Promise.all(mapToArray(vifs, (vif, index) => {
|
||||
++nVifs
|
||||
|
||||
return this._createVif(
|
||||
vm,
|
||||
this.getObject(vif.network),
|
||||
{
|
||||
ipv4_allowed: vif.ipv4_allowed,
|
||||
ipv6_allowed: vif.ipv6_allowed,
|
||||
device: devices[index],
|
||||
locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked',
|
||||
mac: vif.mac,
|
||||
mtu: vif.mtu
|
||||
}
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
// TODO: Assign VGPUs.
|
||||
|
||||
if (cloudConfig != null) {
|
||||
// Refresh the record.
|
||||
vm = await this._waitObjectState(vm.$id, vm => vm.VBDs.length === nVbds)
|
||||
|
||||
// Find the SR of the first VDI.
|
||||
let srRef
|
||||
forEach(vm.$VBDs, vbd => {
|
||||
let vdi
|
||||
if (
|
||||
vbd.type === 'Disk' &&
|
||||
(vdi = vbd.$VDI)
|
||||
) {
|
||||
srRef = vdi.SR
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
const method = coreOs
|
||||
? 'createCoreOsCloudInitConfigDrive'
|
||||
: 'createCloudInitConfigDrive'
|
||||
await this[method](vm.$id, srRef, cloudConfig)
|
||||
|
||||
++nVbds
|
||||
}
|
||||
|
||||
// wait for the record with all the VBDs and VIFs
|
||||
return this._waitObjectState(vm.$id, vm =>
|
||||
vm.VBDs.length === nVbds &&
|
||||
vm.VIFs.length === nVifs
|
||||
)
|
||||
},
|
||||
|
||||
// High level method to edit a VM.
|
||||
//
|
||||
// Params do not correspond directly to XAPI props.
|
||||
_editVm: makeEditObject({
|
||||
affinityHost: {
|
||||
get: 'affinity',
|
||||
set (value, vm) {
|
||||
return this._setObjectProperty(
|
||||
vm,
|
||||
'affinity',
|
||||
value ? this.getObject(value).$ref : NULL_REF
|
||||
)
|
||||
}
|
||||
},
|
||||
|
||||
autoPoweron: {
|
||||
set (value, vm) {
|
||||
return Promise.all([
|
||||
this._updateObjectMapProperty(vm, 'other_config', {
|
||||
autoPoweron: value ? 'true' : null
|
||||
}),
|
||||
value && this.setPoolProperties({
|
||||
autoPoweron: true
|
||||
})
|
||||
])
|
||||
}
|
||||
},
|
||||
|
||||
CPUs: 'cpus',
|
||||
cpus: {
|
||||
addToLimits: true,
|
||||
|
||||
// Current value may have constraints with other values.
|
||||
//
|
||||
// If the other value is not set and the constraint is not
|
||||
// respected, the other value is changed first.
|
||||
constraints: {
|
||||
cpusStaticMax: gte
|
||||
},
|
||||
|
||||
get: vm => +vm.VCPUs_at_startup,
|
||||
set: [
|
||||
'VCPUs_at_startup',
|
||||
function (value, vm) {
|
||||
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
cpuCap: {
|
||||
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
|
||||
set (cap, vm) {
|
||||
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
|
||||
}
|
||||
},
|
||||
|
||||
cpusMax: 'cpusStaticMax',
|
||||
cpusStaticMax: {
|
||||
constraints: {
|
||||
cpus: lte
|
||||
},
|
||||
get: vm => +vm.VCPUs_max,
|
||||
set: 'VCPUs_max'
|
||||
},
|
||||
|
||||
cpuWeight: {
|
||||
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
|
||||
set (weight, vm) {
|
||||
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
|
||||
}
|
||||
},
|
||||
|
||||
highAvailability: {
|
||||
set (ha, vm) {
|
||||
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
|
||||
}
|
||||
},
|
||||
|
||||
memoryMin: {
|
||||
constraints: {
|
||||
memoryMax: gte
|
||||
},
|
||||
get: vm => +vm.memory_dynamic_min,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_dynamic_min'
|
||||
},
|
||||
|
||||
memory: 'memoryMax',
|
||||
memoryMax: {
|
||||
addToLimits: true,
|
||||
limitName: 'memory',
|
||||
constraints: {
|
||||
memoryMin: lte,
|
||||
memoryStaticMax: gte
|
||||
},
|
||||
get: vm => +vm.memory_dynamic_max,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_dynamic_max'
|
||||
},
|
||||
|
||||
memoryStaticMax: {
|
||||
constraints: {
|
||||
memoryMax: lte
|
||||
},
|
||||
get: vm => +vm.memory_static_max,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_static_max'
|
||||
},
|
||||
|
||||
nameDescription: true,
|
||||
|
||||
nameLabel: true,
|
||||
|
||||
PV_args: true,
|
||||
|
||||
tags: true
|
||||
}),
|
||||
|
||||
async editVm (id, props, checkLimits) {
|
||||
return /* await */ this._editVm(this.getObject(id), props, checkLimits)
|
||||
},
|
||||
|
||||
async revertVm (snapshotId, snapshotBefore = true) {
|
||||
const snapshot = this.getObject(snapshotId)
|
||||
if (snapshotBefore) {
|
||||
await this._snapshotVm(snapshot.$snapshot_of)
|
||||
}
|
||||
await this.call('VM.revert', snapshot.$ref)
|
||||
if (snapshot.snapshot_info['power-state-at-snapshot'] === 'Running') {
|
||||
const vm = snapshot.$snapshot_of
|
||||
if (vm.power_state === 'Halted') {
|
||||
this.startVm(vm.$id)::pCatch(noop)
|
||||
} else if (vm.power_state === 'Suspended') {
|
||||
this.resumeVm(vm.$id)::pCatch(noop)
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async resumeVm (vmId) {
|
||||
// the force parameter is always true
|
||||
return this.call('VM.resume', this.getObject(vmId).$ref, false, true)
|
||||
}
|
||||
}
|
||||
53
src/xapi/other-config-template.js
Normal file
53
src/xapi/other-config-template.js
Normal file
@@ -0,0 +1,53 @@
|
||||
import { NULL_REF } from './utils'
|
||||
|
||||
const OTHER_CONFIG_TEMPLATE = {
|
||||
actions_after_crash: 'restart',
|
||||
actions_after_reboot: 'restart',
|
||||
actions_after_shutdown: 'destroy',
|
||||
affinity: null,
|
||||
blocked_operations: {},
|
||||
ha_always_run: false,
|
||||
HVM_boot_params: {
|
||||
order: 'cdn'
|
||||
},
|
||||
HVM_boot_policy: 'BIOS order',
|
||||
HVM_shadow_multiplier: 1,
|
||||
is_a_template: false,
|
||||
memory_dynamic_max: 4294967296,
|
||||
memory_dynamic_min: 4294967296,
|
||||
memory_static_max: 4294967296,
|
||||
memory_static_min: 134217728,
|
||||
order: 0,
|
||||
other_config: {
|
||||
vgpu_pci: '',
|
||||
base_template_name: 'Other install media',
|
||||
mac_seed: '5e88eb6a-d680-c47f-a94a-028886971ba4',
|
||||
'install-methods': 'cdrom'
|
||||
},
|
||||
PCI_bus: '',
|
||||
platform: {
|
||||
timeoffset: '0',
|
||||
nx: 'true',
|
||||
acpi: '1',
|
||||
apic: 'true',
|
||||
pae: 'true',
|
||||
hpet: 'true',
|
||||
viridian: 'true'
|
||||
},
|
||||
protection_policy: NULL_REF,
|
||||
PV_args: '',
|
||||
PV_bootloader: '',
|
||||
PV_bootloader_args: '',
|
||||
PV_kernel: '',
|
||||
PV_legacy_args: '',
|
||||
PV_ramdisk: '',
|
||||
recommendations: '<restrictions><restriction field="memory-static-max" max="137438953472" /><restriction field="vcpus-max" max="32" /><restriction property="number-of-vbds" max="255" /><restriction property="number-of-vifs" max="7" /><restriction field="has-vendor-device" value="false" /></restrictions>',
|
||||
shutdown_delay: 0,
|
||||
start_delay: 0,
|
||||
user_version: 1,
|
||||
VCPUs_at_startup: 1,
|
||||
VCPUs_max: 1,
|
||||
VCPUs_params: {},
|
||||
version: 0
|
||||
}
|
||||
export { OTHER_CONFIG_TEMPLATE as default }
|
||||
392
src/xapi/utils.js
Normal file
392
src/xapi/utils.js
Normal file
@@ -0,0 +1,392 @@
|
||||
// import isFinite from 'lodash/isFinite'
|
||||
import camelCase from 'lodash/camelCase'
|
||||
import createDebug from 'debug'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import isPlainObject from 'lodash/isPlainObject'
|
||||
import pickBy from 'lodash/pickBy'
|
||||
import { utcFormat, utcParse } from 'd3-time-format'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
|
||||
import httpRequest from '../http-request'
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
createRawObject,
|
||||
forEach,
|
||||
isArray,
|
||||
isBoolean,
|
||||
isFunction,
|
||||
isInteger,
|
||||
isString,
|
||||
map,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
noop,
|
||||
pFinally
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const asBoolean = value => Boolean(value)
|
||||
|
||||
// const asFloat = value => {
|
||||
// value = String(value)
|
||||
// return value.indexOf('.') === -1
|
||||
// ? `${value}.0`
|
||||
// : value
|
||||
// }
|
||||
|
||||
export const asInteger = value => String(value)
|
||||
|
||||
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
|
||||
|
||||
export const optional = (value, fn) => value == null
|
||||
? undefined
|
||||
: fn ? fn(value) : value
|
||||
|
||||
export const prepareXapiParam = param => {
|
||||
// if (isFinite(param) && !isInteger(param)) {
|
||||
// return asFloat(param)
|
||||
// }
|
||||
if (isInteger(param)) {
|
||||
return asInteger(param)
|
||||
}
|
||||
if (isBoolean(param)) {
|
||||
return asBoolean(param)
|
||||
}
|
||||
if (isArray(param)) {
|
||||
return map(param, prepareXapiParam)
|
||||
}
|
||||
if (isPlainObject(param)) {
|
||||
return map(filterUndefineds(param), prepareXapiParam)
|
||||
}
|
||||
|
||||
return param
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const debug = createDebug('xo:xapi')
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
|
||||
export const extractOpaqueRef = str => {
|
||||
const matches = OPAQUE_REF_RE.exec(str)
|
||||
if (!matches) {
|
||||
throw new Error('no opaque ref found')
|
||||
}
|
||||
return matches[0]
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const TYPE_TO_NAMESPACE = createRawObject()
|
||||
forEach([
|
||||
'Bond',
|
||||
'DR_task',
|
||||
'GPU_group',
|
||||
'PBD',
|
||||
'PCI',
|
||||
'PGPU',
|
||||
'PIF',
|
||||
'PIF_metrics',
|
||||
'SM',
|
||||
'SR',
|
||||
'VBD',
|
||||
'VBD_metrics',
|
||||
'VDI',
|
||||
'VGPU',
|
||||
'VGPU_type',
|
||||
'VIF',
|
||||
'VLAN',
|
||||
'VM',
|
||||
'VM_appliance',
|
||||
'VM_guest_metrics',
|
||||
'VM_metrics',
|
||||
'VMPP',
|
||||
'VTPM'
|
||||
], namespace => {
|
||||
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
|
||||
})
|
||||
|
||||
// Object types given by `xen-api` are always lowercase but the
|
||||
// namespaces in the Xen API can have a different casing.
|
||||
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Format a date (pseudo ISO 8601) from one XenServer get by
|
||||
// xapi.call('host.get_servertime', host.$ref) for example
|
||||
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
|
||||
|
||||
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const isHostRunning = host => {
|
||||
const { $metrics } = host
|
||||
|
||||
return $metrics && $metrics.live
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
|
||||
|
||||
const VM_RUNNING_POWER_STATES = {
|
||||
Running: true,
|
||||
Paused: true
|
||||
}
|
||||
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
|
||||
|
||||
export const makeEditObject = specs => {
|
||||
const normalizeGet = (get, name) => {
|
||||
if (get === true) {
|
||||
const prop = camelToSnakeCase(name)
|
||||
return object => object[prop]
|
||||
}
|
||||
|
||||
if (isString(get)) {
|
||||
return object => object[get]
|
||||
}
|
||||
|
||||
return get
|
||||
}
|
||||
const normalizeSet = (set, name) => {
|
||||
if (isFunction(set)) {
|
||||
return set
|
||||
}
|
||||
|
||||
if (set === true) {
|
||||
const prop = camelToSnakeCase(name)
|
||||
return function (value) {
|
||||
return this._set(prop, value)
|
||||
}
|
||||
}
|
||||
|
||||
if (isString(set)) {
|
||||
const index = set.indexOf('.')
|
||||
if (index === -1) {
|
||||
const prop = camelToSnakeCase(set)
|
||||
return function (value) {
|
||||
return this._set(prop, value)
|
||||
}
|
||||
}
|
||||
|
||||
const map = set.slice(0, index)
|
||||
const prop = set.slice(index + 1)
|
||||
|
||||
return function (value, object) {
|
||||
return this._updateObjectMapProperty(object, map, { [prop]: value })
|
||||
}
|
||||
}
|
||||
|
||||
if (!isArray(set)) {
|
||||
throw new Error('must be an array, a function or a string')
|
||||
}
|
||||
|
||||
set = mapToArray(set, normalizeSet)
|
||||
|
||||
const { length } = set
|
||||
if (!length) {
|
||||
throw new Error('invalid setter')
|
||||
}
|
||||
|
||||
if (length === 1) {
|
||||
return set[0]
|
||||
}
|
||||
|
||||
return function (value, object) {
|
||||
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
|
||||
}
|
||||
}
|
||||
|
||||
const normalizeSpec = (spec, name) => {
|
||||
if (spec === true) {
|
||||
spec = {
|
||||
get: true,
|
||||
set: true
|
||||
}
|
||||
}
|
||||
|
||||
if (spec.addToLimits === true) {
|
||||
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
|
||||
}
|
||||
if (!spec.limitName) {
|
||||
spec.limitName = name
|
||||
}
|
||||
|
||||
forEach(spec.constraints, (constraint, constraintName) => {
|
||||
if (!isFunction(constraint)) {
|
||||
throw new Error('constraint must be a function')
|
||||
}
|
||||
|
||||
const constraintSpec = specs[constraintName]
|
||||
if (!constraintSpec.get) {
|
||||
throw new Error('constraint values must have a get')
|
||||
}
|
||||
})
|
||||
|
||||
const { get } = spec
|
||||
if (get) {
|
||||
spec.get = normalizeGet(get, name)
|
||||
} else if (spec.addToLimits) {
|
||||
throw new Error('addToLimits cannot be defined without get')
|
||||
}
|
||||
|
||||
spec.set = normalizeSet(spec.set, name)
|
||||
|
||||
return spec
|
||||
}
|
||||
forEach(specs, (spec, name) => {
|
||||
isString(spec) || (specs[name] = normalizeSpec(spec, name))
|
||||
})
|
||||
|
||||
// Resolves aliases and add camelCase and snake_case aliases.
|
||||
forEach(specs, (spec, name) => {
|
||||
if (isString(spec)) {
|
||||
do {
|
||||
spec = specs[spec]
|
||||
} while (isString(spec))
|
||||
specs[name] = spec
|
||||
}
|
||||
|
||||
let tmp
|
||||
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
|
||||
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
|
||||
})
|
||||
|
||||
return async function _editObject_ (id, values, checkLimits) {
|
||||
const limits = checkLimits && {}
|
||||
const object = this.getObject(id)
|
||||
|
||||
const _objectRef = object.$ref
|
||||
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
|
||||
|
||||
// Context used to execute functions.
|
||||
const context = {
|
||||
__proto__: this,
|
||||
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
|
||||
}
|
||||
|
||||
const set = (value, name) => {
|
||||
if (value === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const spec = specs[name]
|
||||
if (!spec) {
|
||||
return
|
||||
}
|
||||
|
||||
const { preprocess } = spec
|
||||
if (preprocess) {
|
||||
value = preprocess(value)
|
||||
}
|
||||
|
||||
const { get } = spec
|
||||
if (get) {
|
||||
const current = get(object)
|
||||
if (isEqual(value, current)) {
|
||||
return
|
||||
}
|
||||
|
||||
let addToLimits
|
||||
if (limits && (addToLimits = spec.addToLimits)) {
|
||||
limits[spec.limitName] = addToLimits(value, current)
|
||||
}
|
||||
}
|
||||
|
||||
const cb = () => spec.set.call(context, value, object)
|
||||
|
||||
const { constraints } = spec
|
||||
if (constraints) {
|
||||
const cbs = []
|
||||
|
||||
forEach(constraints, (constraint, constraintName) => {
|
||||
// Before setting a property to a new value, if the constraint check fails (e.g. memoryMin > memoryMax):
|
||||
// - if the user wants to set the constraint (ie constraintNewValue is defined):
|
||||
// constraint <-- constraintNewValue THEN property <-- value (e.g. memoryMax <-- 2048 THEN memoryMin <-- 1024)
|
||||
// - if the user DOES NOT want to set the constraint (ie constraintNewValue is NOT defined):
|
||||
// constraint <-- value THEN property <-- value (e.g. memoryMax <-- 1024 THEN memoryMin <-- 1024)
|
||||
// FIXME: Some values combinations will lead to setting the same property twice, which is not perfect but works for now.
|
||||
const constraintCurrentValue = specs[constraintName].get(object)
|
||||
const constraintNewValue = values[constraintName]
|
||||
|
||||
if (!constraint(constraintCurrentValue, value)) {
|
||||
const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName)
|
||||
if (cb) {
|
||||
cbs.push(cb)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (cbs.length) {
|
||||
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
|
||||
}
|
||||
}
|
||||
|
||||
return cb
|
||||
}
|
||||
|
||||
const cbs = mapFilter(values, set)
|
||||
|
||||
if (checkLimits) {
|
||||
await checkLimits(limits, object)
|
||||
}
|
||||
|
||||
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const NULL_REF = 'OpaqueRef:NULL'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// HTTP put, use an ugly hack if the length is not known because XAPI
|
||||
// does not support chunk encoding.
|
||||
export const put = (stream, {
|
||||
headers: { ...headers } = {},
|
||||
...opts
|
||||
}, task) => {
|
||||
const makeRequest = () => httpRequest({
|
||||
...opts,
|
||||
body: stream,
|
||||
headers,
|
||||
method: 'put'
|
||||
})
|
||||
|
||||
// Xen API does not support chunk encoding.
|
||||
if (stream.length == null) {
|
||||
headers['transfer-encoding'] = null
|
||||
|
||||
const promise = makeRequest()
|
||||
|
||||
if (task) {
|
||||
// Some connections need the task to resolve (VDI import).
|
||||
task::pFinally(() => {
|
||||
promise.cancel()
|
||||
})
|
||||
} else {
|
||||
// Some tasks need the connection to close (VM import).
|
||||
promise.request.once('finish', () => {
|
||||
promise.cancel()
|
||||
})
|
||||
}
|
||||
|
||||
return promise.readAll()
|
||||
}
|
||||
|
||||
return makeRequest().readAll()
|
||||
}
|
||||
|
||||
export const useUpdateSystem = host => {
|
||||
// Match Xen Center's condition: https://github.com/xenserver/xenadmin/blob/f3a64fc54bbff239ca6f285406d9034f57537d64/XenModel/Utils/Helpers.cs#L420
|
||||
return versionSatisfies(host.software_version.platform_version, '^2.1.1')
|
||||
}
|
||||
0
src/xo-mixins/.index-modules
Normal file
0
src/xo-mixins/.index-modules
Normal file
@@ -19,27 +19,47 @@ export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
|
||||
this._acls = new Acls({
|
||||
const aclsDb = this._acls = new Acls({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:acl',
|
||||
indexes: ['subject', 'object']
|
||||
})
|
||||
|
||||
xo.on('start', () => {
|
||||
xo.addConfigManager('acls',
|
||||
() => aclsDb.get(),
|
||||
acls => aclsDb.update(acls)
|
||||
)
|
||||
})
|
||||
|
||||
xo.on('clean', async () => {
|
||||
const acls = await aclsDb.get()
|
||||
const toRemove = []
|
||||
forEach(acls, ({ subject, object, action, id }) => {
|
||||
if (!subject || !object || !action) {
|
||||
toRemove.push(id)
|
||||
}
|
||||
})
|
||||
await aclsDb.remove(toRemove)
|
||||
})
|
||||
}
|
||||
|
||||
async _getAclsForUser (userId) {
|
||||
const subjects = (await this._xo.getUser(userId)).groups.concat(userId)
|
||||
const user = await this._xo.getUser(userId)
|
||||
const { groups } = user
|
||||
|
||||
const subjects = groups
|
||||
? groups.concat(userId)
|
||||
: [ userId ]
|
||||
|
||||
const acls = []
|
||||
const pushAcls = (function (push) {
|
||||
return function (entries) {
|
||||
push.apply(acls, entries)
|
||||
}
|
||||
const pushAcls = (push => entries => {
|
||||
push.apply(acls, entries)
|
||||
})(acls.push)
|
||||
|
||||
const {_acls: collection} = this
|
||||
await Promise.all(mapToArray(
|
||||
subjects,
|
||||
subject => collection.get({subject}).then(pushAcls)
|
||||
subject => this.getAclsForSubject(subject).then(pushAcls)
|
||||
))
|
||||
|
||||
return acls
|
||||
@@ -64,6 +84,10 @@ export default class {
|
||||
return this._acls.get()
|
||||
}
|
||||
|
||||
async getAclsForSubject (subjectId) {
|
||||
return this._acls.get({ subject: subjectId })
|
||||
}
|
||||
|
||||
async getPermissionsForUser (userId) {
|
||||
const [
|
||||
acls,
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
import createDebug from 'debug'
|
||||
const debug = createDebug('xo:api')
|
||||
|
||||
import getKeys from 'lodash.keys'
|
||||
import kindOf from 'kindof'
|
||||
import ms from 'ms'
|
||||
import schemaInspector from 'schema-inspector'
|
||||
|
||||
import * as methods from '../api'
|
||||
import {
|
||||
InvalidParameters,
|
||||
MethodNotFound,
|
||||
NoSuchObject,
|
||||
Unauthorized
|
||||
} from './api-errors'
|
||||
import {
|
||||
version as xoServerVersion
|
||||
} from '../package.json'
|
||||
MethodNotFound
|
||||
} from 'json-rpc-peer'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
isFunction,
|
||||
noop
|
||||
} from './utils'
|
||||
noop,
|
||||
serializeError
|
||||
} from '../utils'
|
||||
|
||||
import * as errors from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debug = createDebug('xo:api')
|
||||
|
||||
const PERMISSIONS = {
|
||||
none: 0,
|
||||
read: 1,
|
||||
@@ -31,39 +28,30 @@ const PERMISSIONS = {
|
||||
admin: 3
|
||||
}
|
||||
|
||||
// TODO:
|
||||
// - error when adding a server to a pool with incompatible version
|
||||
// - error when halted VM migration failure is due to XS < 7
|
||||
const XAPI_ERROR_TO_XO_ERROR = {
|
||||
EHOSTUNREACH: errors.serverUnreachable,
|
||||
HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }),
|
||||
NO_HOSTS_AVAILABLE: errors.noHostsAvailable,
|
||||
NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade,
|
||||
OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }),
|
||||
PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }),
|
||||
PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }),
|
||||
SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed,
|
||||
VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }),
|
||||
VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }),
|
||||
VM_IS_TEMPLATE: errors.vmIsTemplate,
|
||||
VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }),
|
||||
VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }),
|
||||
VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) })
|
||||
}
|
||||
|
||||
const hasPermission = (user, permission) => (
|
||||
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
|
||||
)
|
||||
|
||||
// FIXME: this function is specific to XO and should not be defined in
|
||||
// this file.
|
||||
function checkPermission (method) {
|
||||
/* jshint validthis: true */
|
||||
|
||||
const {permission} = method
|
||||
|
||||
// No requirement.
|
||||
if (permission === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const {user} = this
|
||||
if (!user) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
|
||||
// The only requirement is login.
|
||||
if (!permission) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!hasPermission(user, permission)) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function checkParams (method, params) {
|
||||
const schema = method.params
|
||||
if (!schema) {
|
||||
@@ -76,11 +64,34 @@ function checkParams (method, params) {
|
||||
}, params)
|
||||
|
||||
if (!result.valid) {
|
||||
throw new InvalidParameters(result.error)
|
||||
throw errors.invalidParameters(result.error)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
function checkPermission (method) {
|
||||
/* jshint validthis: true */
|
||||
|
||||
const {permission} = method
|
||||
|
||||
// No requirement.
|
||||
if (permission === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const {user} = this
|
||||
if (!user) {
|
||||
throw errors.unauthorized()
|
||||
}
|
||||
|
||||
// The only requirement is login.
|
||||
if (!permission) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!hasPermission(user, permission)) {
|
||||
throw errors.unauthorized()
|
||||
}
|
||||
}
|
||||
|
||||
function resolveParams (method, params) {
|
||||
const resolve = method.resolve
|
||||
@@ -90,7 +101,7 @@ function resolveParams (method, params) {
|
||||
|
||||
const {user} = this
|
||||
if (!user) {
|
||||
throw new Unauthorized()
|
||||
throw errors.unauthorized()
|
||||
}
|
||||
|
||||
const userId = user.id
|
||||
@@ -126,89 +137,29 @@ function resolveParams (method, params) {
|
||||
return params
|
||||
}
|
||||
|
||||
throw new Unauthorized()
|
||||
throw errors.unauthorized()
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function getMethodsInfo () {
|
||||
const methods = {}
|
||||
|
||||
forEach(this.api._methods, (method, name) => {
|
||||
methods[name] = {
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
}
|
||||
})
|
||||
|
||||
return methods
|
||||
}
|
||||
getMethodsInfo.description = 'returns the signatures of all available API methods'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const getServerVersion = () => xoServerVersion
|
||||
getServerVersion.description = 'return the version of xo-server'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const getVersion = () => '0.1'
|
||||
getVersion.description = 'API version (unstable)'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function listMethods () {
|
||||
return getKeys(this.api._methods)
|
||||
}
|
||||
listMethods.description = 'returns the name of all available API methods'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function methodSignature ({method: name}) {
|
||||
const method = this.api.getMethod(name)
|
||||
|
||||
if (!method) {
|
||||
throw new NoSuchObject()
|
||||
}
|
||||
|
||||
// Return an array for compatibility with XML-RPC.
|
||||
return [
|
||||
// XML-RPC require the name of the method.
|
||||
{
|
||||
name,
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
}
|
||||
]
|
||||
}
|
||||
methodSignature.description = 'returns the signature of an API method'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Api {
|
||||
constructor ({
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
} = {}) {
|
||||
constructor (xo) {
|
||||
this._logger = null
|
||||
this._methods = createRawObject()
|
||||
this._verboseLogsOnErrors = verboseLogsOnErrors
|
||||
this.context = context
|
||||
this._xo = xo
|
||||
|
||||
this.addMethods({
|
||||
system: {
|
||||
getMethodsInfo,
|
||||
getServerVersion,
|
||||
getVersion,
|
||||
listMethods,
|
||||
methodSignature
|
||||
}
|
||||
this.addApiMethods(methods)
|
||||
xo.on('start', async () => {
|
||||
this._logger = await xo.getLogger('api')
|
||||
})
|
||||
}
|
||||
|
||||
addMethod (name, method) {
|
||||
get apiMethods () {
|
||||
return this._methods
|
||||
}
|
||||
|
||||
addApiMethod (name, method) {
|
||||
const methods = this._methods
|
||||
|
||||
if (name in methods) {
|
||||
@@ -217,21 +168,22 @@ export default class Api {
|
||||
|
||||
methods[name] = method
|
||||
|
||||
let unset = () => {
|
||||
let remove = () => {
|
||||
delete methods[name]
|
||||
unset = noop
|
||||
remove = noop
|
||||
}
|
||||
return () => unset()
|
||||
return () => remove()
|
||||
}
|
||||
|
||||
addMethods (methods) {
|
||||
addApiMethods (methods) {
|
||||
let base = ''
|
||||
const removes = []
|
||||
|
||||
const addMethod = (method, name) => {
|
||||
name = base + name
|
||||
|
||||
if (isFunction(method)) {
|
||||
this.addMethod(name, method)
|
||||
removes.push(this.addApiMethod(name, method))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -240,20 +192,35 @@ export default class Api {
|
||||
forEach(method, addMethod)
|
||||
base = oldBase
|
||||
}
|
||||
forEach(methods, addMethod)
|
||||
|
||||
try {
|
||||
forEach(methods, addMethod)
|
||||
} catch (error) {
|
||||
// Remove all added methods.
|
||||
forEach(removes, remove => remove())
|
||||
|
||||
// Forward the error
|
||||
throw error
|
||||
}
|
||||
|
||||
let remove = () => {
|
||||
forEach(removes, remove => remove())
|
||||
remove = noop
|
||||
}
|
||||
return remove
|
||||
}
|
||||
|
||||
async call (session, name, params) {
|
||||
async callApiMethod (session, name, params) {
|
||||
const startTime = Date.now()
|
||||
|
||||
const method = this.getMethod(name)
|
||||
const method = this._methods[name]
|
||||
if (!method) {
|
||||
throw new MethodNotFound(name)
|
||||
}
|
||||
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = Object.create(this.context, {
|
||||
const context = Object.create(this._xo, {
|
||||
api: { // Used by system.*().
|
||||
value: this
|
||||
},
|
||||
@@ -262,17 +229,30 @@ export default class Api {
|
||||
}
|
||||
})
|
||||
|
||||
// FIXME: too coupled with XO.
|
||||
// Fetch and inject the current user.
|
||||
const userId = session.get('user_id', undefined)
|
||||
context.user = userId && await context.getUser(userId)
|
||||
context.user = userId && await this._xo.getUser(userId)
|
||||
const userName = context.user
|
||||
? context.user.email
|
||||
: '(unknown user)'
|
||||
|
||||
try {
|
||||
await checkPermission.call(context, method)
|
||||
checkParams(method, params)
|
||||
|
||||
// API methods are in a namespace.
|
||||
// Some methods use the namespace or an id parameter like:
|
||||
//
|
||||
// vm.detachPci vm=<string>
|
||||
// vm.ejectCd id=<string>
|
||||
//
|
||||
// The goal here is to standardize the calls by always providing
|
||||
// an id parameter when possible to simplify calls to the API.
|
||||
if (params && params.id === undefined) {
|
||||
const namespace = name.slice(0, name.indexOf('.'))
|
||||
params.id = params[namespace]
|
||||
}
|
||||
|
||||
checkParams.call(context, method, params)
|
||||
|
||||
const resolvedParams = await resolveParams.call(context, method, params)
|
||||
|
||||
@@ -294,15 +274,19 @@ export default class Api {
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
if (this._verboseLogsOnErrors) {
|
||||
debug(
|
||||
'%s | %s(%j) [%s] =!> %s',
|
||||
userName,
|
||||
name,
|
||||
params,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
const data = {
|
||||
userId,
|
||||
method: name,
|
||||
params,
|
||||
duration: Date.now() - startTime,
|
||||
error: serializeError(error)
|
||||
}
|
||||
const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}`
|
||||
|
||||
this._logger.error(message, data)
|
||||
|
||||
if (this._xo._config.verboseLogsOnErrors) {
|
||||
debug(message)
|
||||
|
||||
const stack = error && error.stack
|
||||
if (stack) {
|
||||
@@ -318,11 +302,18 @@ export default class Api {
|
||||
)
|
||||
}
|
||||
|
||||
const xoError = XAPI_ERROR_TO_XO_ERROR[error.code]
|
||||
if (xoError) {
|
||||
throw xoError(error.params, ref => {
|
||||
try {
|
||||
return this._xo.getObject(ref).id
|
||||
} catch (e) {
|
||||
return ref
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
getMethod (name) {
|
||||
return this._methods[name]
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,8 @@
|
||||
import Token, { Tokens } from '../models/token'
|
||||
import {
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
generateToken,
|
||||
pCatch,
|
||||
noop
|
||||
@@ -11,13 +10,10 @@ import {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchAuthenticationToken extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'authentication token')
|
||||
}
|
||||
}
|
||||
const noSuchAuthenticationToken = id =>
|
||||
noSuchObject(id, 'authenticationToken')
|
||||
|
||||
// ===================================================================
|
||||
const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
@@ -30,7 +26,7 @@ export default class {
|
||||
this._providers = new Set()
|
||||
|
||||
// Creates persistent collections.
|
||||
this._tokens = new Tokens({
|
||||
const tokensDb = this._tokens = new Tokens({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:token',
|
||||
indexes: ['user_id']
|
||||
@@ -61,9 +57,26 @@ export default class {
|
||||
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
} catch (e) {
|
||||
return
|
||||
}
|
||||
} catch (error) {}
|
||||
})
|
||||
|
||||
xo.on('clean', async () => {
|
||||
const tokens = await tokensDb.get()
|
||||
const toRemove = []
|
||||
const now = Date.now()
|
||||
forEach(tokens, ({ expiration, id }) => {
|
||||
if (!expiration || expiration < now) {
|
||||
toRemove.push(id)
|
||||
}
|
||||
})
|
||||
await tokensDb.remove(toRemove)
|
||||
})
|
||||
|
||||
xo.on('start', () => {
|
||||
xo.addConfigManager('authTokens',
|
||||
() => tokensDb.get(),
|
||||
tokens => tokensDb.update(tokens)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -141,7 +154,7 @@ export default class {
|
||||
const token = new Token({
|
||||
id: await generateToken(),
|
||||
user_id: userId,
|
||||
expiration: Date.now() + 1e3 * 60 * 60 * 24 * 30 // 1 month validity.
|
||||
expiration: Date.now() + ONE_MONTH
|
||||
})
|
||||
|
||||
await this._tokens.add(token)
|
||||
@@ -151,15 +164,15 @@ export default class {
|
||||
}
|
||||
|
||||
async deleteAuthenticationToken (id) {
|
||||
if (!await this._tokens.remove(id)) { // eslint-disable-line space-before-keywords
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
if (!await this._tokens.remove(id)) {
|
||||
throw noSuchAuthenticationToken(id)
|
||||
}
|
||||
}
|
||||
|
||||
async getAuthenticationToken (id) {
|
||||
let token = await this._tokens.first(id)
|
||||
if (!token) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
throw noSuchAuthenticationToken(id)
|
||||
}
|
||||
|
||||
token = token.properties
|
||||
@@ -169,13 +182,13 @@ export default class {
|
||||
)) {
|
||||
this._tokens.remove(id)::pCatch(noop)
|
||||
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
throw noSuchAuthenticationToken(id)
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
async _getAuthenticationTokensForUser (userId) {
|
||||
async getAuthenticationTokensForUser (userId) {
|
||||
return this._tokens.get({ user_id: userId })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,44 @@
|
||||
import endsWith from 'lodash.endswith'
|
||||
import deferrable from 'golike-defer'
|
||||
import escapeStringRegexp from 'escape-string-regexp'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import filter from 'lodash.filter'
|
||||
import find from 'lodash.find'
|
||||
import findIndex from 'lodash.findindex'
|
||||
import sortBy from 'lodash.sortby'
|
||||
import startsWith from 'lodash.startswith'
|
||||
import execa from 'execa'
|
||||
import splitLines from 'split-lines'
|
||||
import { createParser as createPairsParser } from 'parse-pairs'
|
||||
import { createReadStream, readdir, stat } from 'fs'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
import { utcFormat } from 'd3-time-format'
|
||||
import {
|
||||
basename,
|
||||
dirname
|
||||
} from 'path'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
|
||||
import vhdMerge from '../vhd-merge'
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import {
|
||||
deferrable
|
||||
} from '../decorators'
|
||||
endsWith,
|
||||
filter,
|
||||
find,
|
||||
findIndex,
|
||||
includes,
|
||||
once,
|
||||
sortBy,
|
||||
startsWith,
|
||||
trim
|
||||
} from 'lodash'
|
||||
|
||||
import vhdMerge, { chainVhd } from '../vhd-merge'
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import { lvs, pvs } from '../lvm'
|
||||
import {
|
||||
forEach,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
noop,
|
||||
pCatch,
|
||||
pFinally,
|
||||
pFromCallback,
|
||||
pSettle,
|
||||
safeDateFormat
|
||||
resolveSubpath,
|
||||
safeDateFormat,
|
||||
safeDateParse,
|
||||
tmpDir
|
||||
} from '../utils'
|
||||
import {
|
||||
VDI_FORMAT_VHD
|
||||
@@ -34,6 +49,8 @@ import {
|
||||
const DELTA_BACKUP_EXT = '.json'
|
||||
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
|
||||
|
||||
const shortDate = utcFormat('%Y-%m-%d')
|
||||
|
||||
// Test if a file is a vdi backup. (full or delta)
|
||||
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
|
||||
|
||||
@@ -41,6 +58,41 @@ const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
|
||||
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
|
||||
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
|
||||
|
||||
const toTimestamp = date => date && Math.round(date.getTime() / 1000)
|
||||
|
||||
const parseVmBackupPath = name => {
|
||||
const base = basename(name)
|
||||
let baseMatches
|
||||
|
||||
baseMatches = /^([^_]+)_([^_]+)_(.+)\.xva$/.exec(base)
|
||||
if (baseMatches) {
|
||||
return {
|
||||
datetime: toTimestamp(safeDateParse(baseMatches[1])),
|
||||
id: name,
|
||||
name: baseMatches[3],
|
||||
tag: baseMatches[2],
|
||||
type: 'xva'
|
||||
}
|
||||
}
|
||||
|
||||
let dirMatches
|
||||
if (
|
||||
(baseMatches = /^([^_]+)_(.+)\.json$/.exec(base)) &&
|
||||
(dirMatches = /^vm_delta_([^_]+)_(.+)$/.exec(basename(dirname(name))))
|
||||
) {
|
||||
return {
|
||||
datetime: toTimestamp(safeDateParse(baseMatches[1])),
|
||||
id: name,
|
||||
name: baseMatches[2],
|
||||
tag: dirMatches[1],
|
||||
type: 'delta',
|
||||
uuid: dirMatches[2]
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('invalid VM backup filename')
|
||||
}
|
||||
|
||||
// Get the timestamp of a vdi backup. (full or delta)
|
||||
const getVdiTimestamp = name => {
|
||||
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
|
||||
@@ -50,21 +102,200 @@ const getVdiTimestamp = name => {
|
||||
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
|
||||
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
|
||||
|
||||
// Checksums have been corrupted between 5.2.6 and 5.2.7.
|
||||
//
|
||||
// For a short period of time, bad checksums will be regenerated
|
||||
// instead of rejected.
|
||||
//
|
||||
// TODO: restore when enough time has passed (a week/a month).
|
||||
async function checkFileIntegrity (handler, name) {
|
||||
let stream
|
||||
await handler.refreshChecksum(name)
|
||||
// let stream
|
||||
//
|
||||
// try {
|
||||
// stream = await handler.createReadStream(name, { checksum: true })
|
||||
// } catch (error) {
|
||||
// if (error.code === 'ENOENT') {
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// throw error
|
||||
// }
|
||||
//
|
||||
// stream.resume()
|
||||
// await eventToPromise(stream, 'finish')
|
||||
}
|
||||
|
||||
try {
|
||||
stream = await handler.createReadStream(name, { checksum: true })
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
throw error
|
||||
const listPartitions = (() => {
|
||||
const IGNORED = {}
|
||||
forEach([
|
||||
// https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38
|
||||
0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5,
|
||||
|
||||
0x82 // swap
|
||||
], type => {
|
||||
IGNORED[type] = true
|
||||
})
|
||||
|
||||
const TYPES = {
|
||||
0x7: 'NTFS',
|
||||
0x83: 'linux',
|
||||
0xc: 'FAT'
|
||||
}
|
||||
|
||||
stream.resume()
|
||||
await eventToPromise(stream, 'finish')
|
||||
const parseLine = createPairsParser({
|
||||
keyTransform: key => key === 'UUID'
|
||||
? 'id'
|
||||
: key.toLowerCase(),
|
||||
valueTransform: (value, key) => key === 'start' || key === 'size'
|
||||
? +value
|
||||
: key === 'type'
|
||||
? TYPES[+value] || value
|
||||
: value
|
||||
})
|
||||
|
||||
return device => execa.stdout('partx', [
|
||||
'--bytes',
|
||||
'--output=NR,START,SIZE,NAME,UUID,TYPE',
|
||||
'--pairs',
|
||||
device.path
|
||||
]).then(stdout => mapFilter(splitLines(stdout), line => {
|
||||
const partition = parseLine(line)
|
||||
const { type } = partition
|
||||
if (type != null && !IGNORED[+type]) {
|
||||
return partition
|
||||
}
|
||||
}))
|
||||
})()
|
||||
|
||||
// handle LVM logical volumes automatically
|
||||
const listPartitions2 = device => listPartitions(device).then(partitions => {
|
||||
const partitions2 = []
|
||||
const promises = []
|
||||
forEach(partitions, partition => {
|
||||
if (+partition.type === 0x8e) {
|
||||
promises.push(mountLvmPv(device, partition).then(device => {
|
||||
const promise = listLvmLvs(device).then(lvs => {
|
||||
forEach(lvs, lv => {
|
||||
partitions2.push({
|
||||
name: lv.lv_name,
|
||||
size: +lv.lv_size,
|
||||
id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`
|
||||
})
|
||||
})
|
||||
})
|
||||
promise::pFinally(device.unmount)
|
||||
return promise
|
||||
}))
|
||||
} else {
|
||||
partitions2.push(partition)
|
||||
}
|
||||
})
|
||||
return Promise.all(promises).then(() => partitions2)
|
||||
})
|
||||
|
||||
const mountPartition = (device, partitionId) => Promise.all([
|
||||
partitionId != null && listPartitions(device),
|
||||
tmpDir()
|
||||
]).then(([ partitions, path ]) => {
|
||||
const options = [
|
||||
'loop',
|
||||
'ro'
|
||||
]
|
||||
|
||||
if (partitions) {
|
||||
const partition = find(partitions, { id: partitionId })
|
||||
|
||||
const { start } = partition
|
||||
if (start != null) {
|
||||
options.push(`offset=${start * 512}`)
|
||||
}
|
||||
}
|
||||
|
||||
const mount = options => execa('mount', [
|
||||
`--options=${options.join(',')}`,
|
||||
`--source=${device.path}`,
|
||||
`--target=${path}`
|
||||
])
|
||||
|
||||
// `noload` option is used for ext3/ext4, if it fails it might
|
||||
// `be another fs, try without
|
||||
return mount([ ...options, 'noload' ]).catch(() =>
|
||||
mount(options)
|
||||
).then(() => ({
|
||||
path,
|
||||
unmount: once(() => execa('umount', [ '--lazy', path ]))
|
||||
}), error => {
|
||||
console.log(error)
|
||||
|
||||
throw error
|
||||
})
|
||||
})
|
||||
|
||||
// handle LVM logical volumes automatically
|
||||
const mountPartition2 = (device, partitionId) => {
|
||||
if (
|
||||
partitionId == null ||
|
||||
!includes(partitionId, '/')
|
||||
) {
|
||||
return mountPartition(device, partitionId)
|
||||
}
|
||||
|
||||
const [ pvId, vgName, lvName ] = partitionId.split('/')
|
||||
|
||||
return listPartitions(device).then(partitions =>
|
||||
find(partitions, { id: pvId })
|
||||
).then(pvId => mountLvmPv(device, pvId)).then(device1 =>
|
||||
execa('vgchange', [ '-ay', vgName ]).then(() =>
|
||||
lvs([ 'lv_name', 'lv_path' ], vgName).then(lvs =>
|
||||
find(lvs, { lv_name: lvName }).lv_path
|
||||
)
|
||||
).then(path =>
|
||||
mountPartition({ path }).then(device2 => ({
|
||||
...device2,
|
||||
unmount: () => device2.unmount().then(device1.unmount)
|
||||
}))
|
||||
).catch(error => device1.unmount().then(() => {
|
||||
throw error
|
||||
}))
|
||||
)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const listLvmLvs = device => pvs([
|
||||
'lv_name',
|
||||
'lv_path',
|
||||
'lv_size',
|
||||
'vg_name'
|
||||
], device.path).then(pvs => filter(pvs, 'lv_name'))
|
||||
|
||||
const mountLvmPv = (device, partition) => {
|
||||
const args = []
|
||||
if (partition) {
|
||||
args.push('-o', partition.start * 512)
|
||||
}
|
||||
args.push(
|
||||
'--show',
|
||||
'-f',
|
||||
device.path
|
||||
)
|
||||
|
||||
return execa.stdout('losetup', args).then(stdout => {
|
||||
const path = trim(stdout)
|
||||
return {
|
||||
path,
|
||||
unmount: once(() => Promise.all([
|
||||
execa('losetup', [ '-d', path ]),
|
||||
pvs('vg_name', path).then(vgNames => execa('vgchange', [
|
||||
'-an',
|
||||
...vgNames
|
||||
]))
|
||||
]))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -72,6 +303,15 @@ async function checkFileIntegrity (handler, name) {
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
|
||||
// clean any LVM volumes that might have not been properly
|
||||
// unmounted
|
||||
xo.on('start', () => Promise.all([
|
||||
execa('losetup', [ '-D' ]),
|
||||
execa('vgchange', [ '-an' ])
|
||||
]).then(() =>
|
||||
execa('pvscan', [ '--cache' ])
|
||||
))
|
||||
}
|
||||
|
||||
async listRemoteBackups (remoteId) {
|
||||
@@ -101,12 +341,53 @@ export default class {
|
||||
return backups
|
||||
}
|
||||
|
||||
async listVmBackups (remoteId) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
const backups = []
|
||||
|
||||
await Promise.all(mapToArray(await handler.list(), entry => {
|
||||
if (endsWith(entry, '.xva')) {
|
||||
backups.push(parseVmBackupPath(entry))
|
||||
} else if (startsWith(entry, 'vm_delta_')) {
|
||||
return handler.list(entry).then(children => Promise.all(mapToArray(children, child => {
|
||||
if (endsWith(child, '.json')) {
|
||||
const path = `${entry}/${child}`
|
||||
|
||||
const record = parseVmBackupPath(path)
|
||||
backups.push(record)
|
||||
|
||||
return handler.readFile(path).then(data => {
|
||||
record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({
|
||||
id: `${entry}/${vdi.xoPath}`,
|
||||
name: vdi.name_label,
|
||||
uuid: vdi.uuid
|
||||
}))
|
||||
}).catch(noop)
|
||||
}
|
||||
})))
|
||||
}
|
||||
}))
|
||||
|
||||
return backups
|
||||
}
|
||||
|
||||
async importVmBackup (remoteId, file, sr) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const stream = await handler.createReadStream(file)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
await xapi.importVm(stream, { srId: sr._xapiId })
|
||||
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
|
||||
|
||||
const { datetime } = parseVmBackupPath(file)
|
||||
await Promise.all([
|
||||
xapi.addTag(vm.$id, 'restored from backup'),
|
||||
xapi.editVm(vm.$id, {
|
||||
name_label: `${vm.name_label} (${shortDate(datetime * 1e3)})`
|
||||
})
|
||||
])
|
||||
|
||||
return xapiObjectToXo(vm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
@@ -140,7 +421,7 @@ export default class {
|
||||
stream => stream.cancel()
|
||||
))
|
||||
|
||||
return srcXapi.deleteVm(delta.vm.$id, true)
|
||||
return srcXapi.deleteVm(delta.vm.uuid)
|
||||
})
|
||||
|
||||
const promise = targetXapi.importDeltaVm(
|
||||
@@ -154,7 +435,7 @@ export default class {
|
||||
// Once done, (asynchronously) remove the (now obsolete) local
|
||||
// base.
|
||||
if (localBaseUuid) {
|
||||
promise.then(() => srcXapi.deleteVm(localBaseUuid, true))::pCatch(noop)
|
||||
promise.then(() => srcXapi.deleteVm(localBaseUuid))::pCatch(noop)
|
||||
}
|
||||
|
||||
// (Asynchronously) Identify snapshot as future base.
|
||||
@@ -290,6 +571,18 @@ export default class {
|
||||
return backups.slice(i)
|
||||
}
|
||||
|
||||
// fix the parent UUID and filename in delta files after download from xapi or backup compression
|
||||
async _chainDeltaVdiBackups ({handler, dir}) {
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
for (let i = 1; i < backups.length; i++) {
|
||||
const childPath = dir + '/' + backups[i]
|
||||
const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath)
|
||||
if (modified) {
|
||||
await handler.refreshChecksum(childPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
let i = backups.length - depth
|
||||
@@ -391,8 +684,7 @@ export default class {
|
||||
// The problem is in the merge case, a delta merged in a full vdi
|
||||
// backup forces us to browse the resulting file =>
|
||||
// Significant transfer time on the network !
|
||||
checksum: !isFull,
|
||||
flags: 'wx'
|
||||
checksum: !isFull
|
||||
})
|
||||
|
||||
stream.on('error', error => targetStream.emit('error', error))
|
||||
@@ -432,16 +724,7 @@ export default class {
|
||||
|
||||
@deferrable.onFailure
|
||||
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const xapi = this._xo.getXapi(vm)
|
||||
|
||||
vm = xapi.getObject(vm._xapiId)
|
||||
@@ -452,7 +735,7 @@ export default class {
|
||||
base => base.snapshot_time
|
||||
)
|
||||
const baseVm = bases.pop()
|
||||
forEach(bases, base => { xapi.deleteVm(base.$id, true)::pCatch(noop) })
|
||||
forEach(bases, base => { xapi.deleteVm(base.$id)::pCatch(noop) })
|
||||
|
||||
// Check backup dirs.
|
||||
const dir = `vm_delta_${tag}_${vm.uuid}`
|
||||
@@ -487,7 +770,7 @@ export default class {
|
||||
stream => stream.cancel()
|
||||
))
|
||||
|
||||
await xapi.deleteVm(delta.vm.$id, true)
|
||||
await xapi.deleteVm(delta.vm.uuid)
|
||||
})
|
||||
|
||||
// Save vdis.
|
||||
@@ -515,15 +798,15 @@ export default class {
|
||||
)
|
||||
|
||||
const fulFilledVdiBackups = []
|
||||
let success = true
|
||||
let error
|
||||
|
||||
// One or many vdi backups have failed.
|
||||
for (const vdiBackup of vdiBackups) {
|
||||
if (vdiBackup.isFulfilled()) {
|
||||
fulFilledVdiBackups.push(vdiBackup)
|
||||
} else {
|
||||
console.error(`Rejected backup: ${vdiBackup.reason()}`)
|
||||
success = false
|
||||
error = vdiBackup.reason()
|
||||
console.error('Rejected backup:', error)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -535,8 +818,8 @@ export default class {
|
||||
)
|
||||
})
|
||||
|
||||
if (!success) {
|
||||
throw new Error('Rolling delta vm backup failed.')
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
@@ -545,19 +828,17 @@ export default class {
|
||||
|
||||
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
|
||||
|
||||
const { streams,
|
||||
...infos
|
||||
} = delta
|
||||
|
||||
// Write Metadata.
|
||||
await handler.outputFile(infoPath, JSON.stringify(infos, null, 2), {flag: 'wx'})
|
||||
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
|
||||
|
||||
// Here we have a completed backup. We can merge old vdis.
|
||||
await Promise.all(
|
||||
mapToArray(vdiBackups, vdiBackup => {
|
||||
const backupName = vdiBackup.value()
|
||||
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
|
||||
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
|
||||
const backupDir = `${dir}/${backupDirectory}`
|
||||
return this._mergeDeltaVdiBackups({ handler, dir: backupDir, depth })
|
||||
.then(() => { this._chainDeltaVdiBackups({ handler, dir: backupDir }) })
|
||||
})
|
||||
)
|
||||
|
||||
@@ -565,7 +846,7 @@ export default class {
|
||||
await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, depth })
|
||||
|
||||
if (baseVm) {
|
||||
xapi.deleteVm(baseVm.$id, true)::pCatch(noop)
|
||||
xapi.deleteVm(baseVm.$id)::pCatch(noop)
|
||||
}
|
||||
|
||||
// Returns relative path.
|
||||
@@ -573,10 +854,13 @@ export default class {
|
||||
}
|
||||
|
||||
async importDeltaVmBackup ({sr, remoteId, filePath}) {
|
||||
filePath = `${filePath}${DELTA_BACKUP_EXT}`
|
||||
const { datetime } = parseVmBackupPath(filePath)
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
|
||||
const delta = JSON.parse(await handler.readFile(filePath))
|
||||
let vm
|
||||
const { version } = delta
|
||||
|
||||
@@ -603,9 +887,12 @@ export default class {
|
||||
)
|
||||
)
|
||||
|
||||
delta.vm.name_label += ` (${shortDate(datetime * 1e3)})`
|
||||
delta.vm.tags.push('restored from backup')
|
||||
|
||||
vm = await xapi.importDeltaVm(delta, {
|
||||
srId: sr._xapiId,
|
||||
disableStartAfterImport: false
|
||||
disableStartAfterImport: false,
|
||||
srId: sr._xapiId
|
||||
})
|
||||
} else {
|
||||
throw new Error(`Unsupported delta backup version: ${version}`)
|
||||
@@ -617,21 +904,12 @@ export default class {
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
|
||||
}
|
||||
|
||||
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
|
||||
const targetStream = await handler.createOutputStream(file, { flags: 'wx' })
|
||||
const targetStream = await handler.createOutputStream(file)
|
||||
const promise = eventToPromise(targetStream, 'finish')
|
||||
|
||||
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
|
||||
@@ -644,16 +922,7 @@ export default class {
|
||||
}
|
||||
|
||||
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
const files = await handler.list()
|
||||
|
||||
@@ -680,7 +949,7 @@ export default class {
|
||||
const promises = []
|
||||
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldSnap = snapshots.shift()
|
||||
promises.push(xapi.deleteVm(oldSnap.uuid, true))
|
||||
promises.push(xapi.deleteVm(oldSnap.uuid))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
@@ -694,12 +963,12 @@ export default class {
|
||||
const sourceXapi = this._xo.getXapi(vm)
|
||||
vm = sourceXapi.getObject(vm._xapiId)
|
||||
|
||||
const vms = []
|
||||
const vms = {}
|
||||
forEach(sr.$VDIs, vdi => {
|
||||
const vbds = vdi.$VBDs
|
||||
const vm = vbds && vbds[0] && vbds[0].$VM
|
||||
if (vm && reg.test(vm.name_label)) {
|
||||
vms.push(vm)
|
||||
vms[vm.$id] = vm
|
||||
}
|
||||
})
|
||||
const olderCopies = sortBy(vms, 'name_label')
|
||||
@@ -710,11 +979,121 @@ export default class {
|
||||
})
|
||||
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
|
||||
|
||||
const promises = []
|
||||
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldDRVm = olderCopies.shift()
|
||||
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
|
||||
const n = 1 - depth
|
||||
await Promise.all(mapToArray(n ? olderCopies.slice(0, n) : olderCopies, vm =>
|
||||
// Do not consider a failure to delete an old copy as a fatal error.
|
||||
targetXapi.deleteVm(vm.$id)::pCatch(noop)
|
||||
))
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
_mountVhd (remoteId, vhdPath) {
|
||||
return Promise.all([
|
||||
this._xo.getRemoteHandler(remoteId),
|
||||
tmpDir()
|
||||
]).then(([ handler, mountDir ]) => {
|
||||
if (!handler._getRealPath) {
|
||||
throw new Error(`this remote is not supported`)
|
||||
}
|
||||
|
||||
const remotePath = handler._getRealPath()
|
||||
vhdPath = resolveSubpath(remotePath, vhdPath)
|
||||
|
||||
return Promise.resolve().then(() => {
|
||||
// TODO: remove when no longer necessary.
|
||||
//
|
||||
// Currently, the filenames of the VHD changes over time
|
||||
// (delta → full), but the JSON is not updated, therefore the
|
||||
// VHD path may need to be fixed.
|
||||
return endsWith(vhdPath, '_delta.vhd')
|
||||
? pFromCallback(cb => stat(vhdPath, cb)).then(
|
||||
() => vhdPath,
|
||||
error => {
|
||||
if (error && error.code === 'ENOENT') {
|
||||
return `${vhdPath.slice(0, -10)}_full.vhd`
|
||||
}
|
||||
}
|
||||
)
|
||||
: vhdPath
|
||||
}).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() =>
|
||||
pFromCallback(cb => readdir(mountDir, cb)).then(entries => {
|
||||
let max = 0
|
||||
forEach(entries, entry => {
|
||||
const matches = /^vhdi(\d+)/.exec(entry)
|
||||
if (matches) {
|
||||
const value = +matches[1]
|
||||
if (value > max) {
|
||||
max = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!max) {
|
||||
throw new Error('no disks found')
|
||||
}
|
||||
|
||||
return {
|
||||
path: `${mountDir}/vhdi${max}`,
|
||||
unmount: once(() => execa('fusermount', [ '-uz', mountDir ]))
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
_mountPartition (remoteId, vhdPath, partitionId) {
|
||||
return this._mountVhd(remoteId, vhdPath).then(device =>
|
||||
mountPartition2(device, partitionId).then(partition => ({
|
||||
...partition,
|
||||
unmount: () => partition.unmount().then(device.unmount)
|
||||
})).catch(error => device.unmount().then(() => {
|
||||
throw error
|
||||
}))
|
||||
)
|
||||
}
|
||||
|
||||
@deferrable
|
||||
async scanDiskBackup ($defer, remoteId, vhdPath) {
|
||||
const device = await this._mountVhd(remoteId, vhdPath)
|
||||
$defer(device.unmount)
|
||||
|
||||
return {
|
||||
partitions: await listPartitions2(device)
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
@deferrable
|
||||
async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) {
|
||||
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
|
||||
$defer(partition.unmount)
|
||||
|
||||
path = resolveSubpath(partition.path, path)
|
||||
|
||||
const entries = await pFromCallback(cb => readdir(path, cb))
|
||||
|
||||
const entriesMap = {}
|
||||
await Promise.all(mapToArray(entries, async name => {
|
||||
const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::pCatch(noop)
|
||||
if (stats) {
|
||||
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
|
||||
}
|
||||
}))
|
||||
return entriesMap
|
||||
}
|
||||
|
||||
async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) {
|
||||
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
|
||||
|
||||
let i = 0
|
||||
const onEnd = () => {
|
||||
if (!--i) {
|
||||
partition.unmount()
|
||||
}
|
||||
}
|
||||
return mapToArray(paths, path => {
|
||||
++i
|
||||
return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
33
src/xo-mixins/config-management.js
Normal file
33
src/xo-mixins/config-management.js
Normal file
@@ -0,0 +1,33 @@
|
||||
import { map, noop } from '../utils'
|
||||
|
||||
import { all as pAll } from 'promise-toolbox'
|
||||
|
||||
export default class ConfigManagement {
|
||||
constructor () {
|
||||
this._managers = { __proto__: null }
|
||||
}
|
||||
|
||||
addConfigManager (id, exporter, importer) {
|
||||
const managers = this._managers
|
||||
if (id in managers) {
|
||||
throw new Error(`${id} is already taken`)
|
||||
}
|
||||
|
||||
this._managers[id] = { exporter, importer }
|
||||
}
|
||||
|
||||
exportConfig () {
|
||||
return map(this._managers, ({ exporter }, key) => exporter())::pAll()
|
||||
}
|
||||
|
||||
importConfig (config) {
|
||||
const managers = this._managers
|
||||
|
||||
return map(config, (entry, key) => {
|
||||
const manager = managers[key]
|
||||
if (manager) {
|
||||
return manager.importer(entry)
|
||||
}
|
||||
})::pAll().then(noop)
|
||||
}
|
||||
}
|
||||
307
src/xo-mixins/ip-pools.js
Normal file
307
src/xo-mixins/ip-pools.js
Normal file
@@ -0,0 +1,307 @@
|
||||
import concat from 'lodash/concat'
|
||||
import countBy from 'lodash/countBy'
|
||||
import diff from 'lodash/difference'
|
||||
import findIndex from 'lodash/findIndex'
|
||||
import flatten from 'lodash/flatten'
|
||||
import highland from 'highland'
|
||||
import includes from 'lodash/includes'
|
||||
import isObject from 'lodash/isObject'
|
||||
import keys from 'lodash/keys'
|
||||
import mapValues from 'lodash/mapValues'
|
||||
import pick from 'lodash/pick'
|
||||
import remove from 'lodash/remove'
|
||||
import synchronized from 'decorator-synchronized'
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
generateUnsecureToken,
|
||||
isEmpty,
|
||||
lightSet,
|
||||
mapToArray,
|
||||
streamToArray,
|
||||
throwFn
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const normalize = ({
|
||||
addresses,
|
||||
id = throwFn('id is a required field'),
|
||||
name = '',
|
||||
networks,
|
||||
resourceSets
|
||||
}) => ({
|
||||
addresses,
|
||||
id,
|
||||
name,
|
||||
networks,
|
||||
resourceSets
|
||||
})
|
||||
|
||||
const _isAddressInIpPool = (address, network, ipPool) => (
|
||||
ipPool.addresses && (address in ipPool.addresses) &&
|
||||
includes(ipPool.networks, isObject(network) ? network.id : network)
|
||||
)
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Note: an address cannot be in two different pools sharing a
|
||||
// network.
|
||||
export default class IpPools {
|
||||
constructor (xo) {
|
||||
this._store = null
|
||||
this._xo = xo
|
||||
|
||||
xo.on('start', async () => {
|
||||
this._store = await xo.getStore('ipPools')
|
||||
|
||||
xo.addConfigManager('ipPools',
|
||||
() => this.getAllIpPools(),
|
||||
ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool)))
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async createIpPool ({ addresses, name, networks }) {
|
||||
const id = await this._generateId()
|
||||
|
||||
await this._save({
|
||||
addresses,
|
||||
id,
|
||||
name,
|
||||
networks
|
||||
})
|
||||
|
||||
return id
|
||||
}
|
||||
|
||||
async deleteIpPool (id) {
|
||||
const store = this._store
|
||||
|
||||
if (await store.has(id)) {
|
||||
await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => {
|
||||
await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id)
|
||||
return this._xo.removeIpPoolFromResourceSet(id, set.id)
|
||||
}))
|
||||
await this._removeIpAddressesFromVifs(
|
||||
mapValues((await this.getIpPool(id)).addresses, 'vifs')
|
||||
)
|
||||
|
||||
return store.del(id)
|
||||
}
|
||||
|
||||
throw noSuchObject(id, 'ipPool')
|
||||
}
|
||||
|
||||
_getAllIpPools (filter) {
|
||||
return streamToArray(this._store.createValueStream(), {
|
||||
filter,
|
||||
mapper: normalize
|
||||
})
|
||||
}
|
||||
|
||||
async getAllIpPools (userId) {
|
||||
let filter
|
||||
if (userId != null) {
|
||||
const user = await this._xo.getUser(userId)
|
||||
if (user.permission !== 'admin') {
|
||||
const resourceSets = await this._xo.getAllResourceSets(userId)
|
||||
const ipPools = lightSet(flatten(mapToArray(resourceSets, 'ipPools')))
|
||||
filter = ({ id }) => ipPools.has(id)
|
||||
}
|
||||
}
|
||||
|
||||
return this._getAllIpPools(filter)
|
||||
}
|
||||
|
||||
getIpPool (id) {
|
||||
return this._store.get(id).then(normalize, error => {
|
||||
throw error.notFound ? noSuchObject(id, 'ipPool') : error
|
||||
})
|
||||
}
|
||||
|
||||
async _getAddressIpPool (address, network) {
|
||||
const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool))
|
||||
|
||||
return ipPools && ipPools[0]
|
||||
}
|
||||
|
||||
// Returns a map that indicates how many IPs from each IP pool the VM uses
|
||||
// e.g.: { 'ipPool:abc': 3, 'ipPool:xyz': 7 }
|
||||
async computeVmIpPoolsUsage (vm) {
|
||||
const vifs = vm.VIFs
|
||||
const ipPools = []
|
||||
for (const vifId of vifs) {
|
||||
const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId)
|
||||
|
||||
for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) {
|
||||
const ipPool = await this._getAddressIpPool(address, $network)
|
||||
ipPool && ipPools.push(ipPool.id)
|
||||
}
|
||||
}
|
||||
|
||||
return countBy(ipPools, ({ id }) => `ipPool:${id}`)
|
||||
}
|
||||
|
||||
@synchronized
|
||||
allocIpAddresses (vifId, addAddresses, removeAddresses) {
|
||||
const updatedIpPools = {}
|
||||
const limits = {}
|
||||
|
||||
const xoVif = this._xo.getObject(vifId)
|
||||
const xapi = this._xo.getXapi(xoVif)
|
||||
const vif = xapi.getObject(xoVif._xapiId)
|
||||
|
||||
const allocAndSave = (() => {
|
||||
const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet')
|
||||
|
||||
return () => {
|
||||
const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool)))
|
||||
return resourseSetId
|
||||
? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then(
|
||||
saveIpPools
|
||||
)
|
||||
: saveIpPools()
|
||||
}
|
||||
})()
|
||||
|
||||
return fromCallback(cb => {
|
||||
const network = vif.$network
|
||||
const networkId = network.$id
|
||||
|
||||
const isVif = id => id === vifId
|
||||
|
||||
highland(this._store.createValueStream()).each(ipPool => {
|
||||
const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool
|
||||
if (!(addresses && networks && includes(networks, networkId))) {
|
||||
return false
|
||||
}
|
||||
|
||||
let allocations = 0
|
||||
let changed = false
|
||||
forEach(removeAddresses, address => {
|
||||
let vifs, i
|
||||
if (
|
||||
(vifs = addresses[address]) &&
|
||||
(vifs = vifs.vifs) &&
|
||||
(i = findIndex(vifs, isVif)) !== -1
|
||||
) {
|
||||
vifs.splice(i, 1)
|
||||
--allocations
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
forEach(addAddresses, address => {
|
||||
const data = addresses[address]
|
||||
if (!data) {
|
||||
return
|
||||
}
|
||||
const vifs = data.vifs || (data.vifs = [])
|
||||
if (!includes(vifs, vifId)) {
|
||||
vifs.push(vifId)
|
||||
++allocations
|
||||
changed = true
|
||||
}
|
||||
})
|
||||
|
||||
if (changed) {
|
||||
const { id } = ipPool
|
||||
updatedIpPools[id] = ipPool
|
||||
limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations
|
||||
}
|
||||
}).toCallback(cb)
|
||||
}).then(allocAndSave)
|
||||
}
|
||||
|
||||
async _removeIpAddressesFromVifs (mapAddressVifs) {
|
||||
const mapVifAddresses = {}
|
||||
forEach(mapAddressVifs, (vifs, address) => {
|
||||
forEach(vifs, vifId => {
|
||||
if (mapVifAddresses[vifId]) {
|
||||
mapVifAddresses[vifId].push(address)
|
||||
} else {
|
||||
mapVifAddresses[vifId] = [ address ]
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
const { getXapi } = this._xo
|
||||
return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => {
|
||||
let vif
|
||||
try {
|
||||
// The IP may not have been correctly deallocated from the IP pool when the VIF was deleted
|
||||
vif = this._xo.getObject(vifId)
|
||||
} catch (error) {
|
||||
return
|
||||
}
|
||||
const { allowedIpv4Addresses, allowedIpv6Addresses } = vif
|
||||
remove(allowedIpv4Addresses, address => includes(addresses, address))
|
||||
remove(allowedIpv6Addresses, address => includes(addresses, address))
|
||||
this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses))
|
||||
|
||||
return getXapi(vif).editVif(vif._xapiId, {
|
||||
ipv4Allowed: allowedIpv4Addresses,
|
||||
ipv6Allowed: allowedIpv6Addresses
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
async updateIpPool (id, {
|
||||
addresses,
|
||||
name,
|
||||
networks,
|
||||
resourceSets
|
||||
}) {
|
||||
const ipPool = await this.getIpPool(id)
|
||||
const previousAddresses = { ...ipPool.addresses }
|
||||
|
||||
name != null && (ipPool.name = name)
|
||||
if (addresses) {
|
||||
const addresses_ = ipPool.addresses || {}
|
||||
forEach(addresses, (props, address) => {
|
||||
if (props === null) {
|
||||
delete addresses_[address]
|
||||
} else {
|
||||
addresses_[address] = props
|
||||
}
|
||||
})
|
||||
|
||||
// Remove the addresses that are no longer in the IP pool from the concerned VIFs
|
||||
const deletedAddresses = diff(keys(previousAddresses), keys(addresses_))
|
||||
await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses))
|
||||
|
||||
if (isEmpty(addresses_)) {
|
||||
delete ipPool.addresses
|
||||
} else {
|
||||
ipPool.addresses = addresses_
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Implement patching like for addresses.
|
||||
if (networks) {
|
||||
ipPool.networks = networks
|
||||
}
|
||||
|
||||
// TODO: Implement patching like for addresses.
|
||||
if (resourceSets) {
|
||||
ipPool.resourceSets = resourceSets
|
||||
}
|
||||
|
||||
await this._save(ipPool)
|
||||
}
|
||||
|
||||
async _generateId () {
|
||||
let id
|
||||
do {
|
||||
id = generateUnsecureToken(8)
|
||||
} while (await this._store.has(id))
|
||||
return id
|
||||
}
|
||||
|
||||
_save (ipPool) {
|
||||
ipPool = normalize(ipPool)
|
||||
return this._store.put(ipPool.id, ipPool)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user