Compare commits
664 Commits
xo-server/
...
xo-server/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
07829918e4 | ||
|
|
b0d400b6eb | ||
|
|
706cb895ad | ||
|
|
45bf539b3c | ||
|
|
0923981f8d | ||
|
|
b0ac14363d | ||
|
|
5d346aba37 | ||
|
|
124cb15ebe | ||
|
|
a244ab898d | ||
|
|
3c551590eb | ||
|
|
10e30cccbc | ||
|
|
806a6b86a2 | ||
|
|
9719fdf5cc | ||
|
|
6d8764f8cb | ||
|
|
d9fd9cb408 | ||
|
|
7710ec0aba | ||
|
|
c97bd78cd0 | ||
|
|
728c5aa86e | ||
|
|
83d68ca293 | ||
|
|
47d7561db4 | ||
|
|
7d993e8319 | ||
|
|
1d1a597b22 | ||
|
|
23082f9300 | ||
|
|
ea1a7f9376 | ||
|
|
1796c7bab8 | ||
|
|
65ad76479a | ||
|
|
422db04ec8 | ||
|
|
d12f60fe37 | ||
|
|
194c1c991c | ||
|
|
3e8e2222c1 | ||
|
|
1620327a33 | ||
|
|
b1131e3667 | ||
|
|
db0250ac08 | ||
|
|
0a6b605760 | ||
|
|
81ac2375e5 | ||
|
|
6bcaca6cd7 | ||
|
|
ec8375252e | ||
|
|
766aa1762f | ||
|
|
5165e0a54c | ||
|
|
a2f7ad627e | ||
|
|
1176c162d4 | ||
|
|
a4880cd017 | ||
|
|
383bdce416 | ||
|
|
7cc300dd83 | ||
|
|
687809db9d | ||
|
|
1127ec3a90 | ||
|
|
a797edfae9 | ||
|
|
938e106252 | ||
|
|
a0eb9caaa2 | ||
|
|
442f53d45e | ||
|
|
68de1ca248 | ||
|
|
e16061141e | ||
|
|
64cbe3d209 | ||
|
|
ebdc6376d8 | ||
|
|
68335123a1 | ||
|
|
25b18f4ef8 | ||
|
|
9ad615b0ff | ||
|
|
12eaceb032 | ||
|
|
3263511b72 | ||
|
|
75cae8c647 | ||
|
|
9991ef624c | ||
|
|
489e9fce27 | ||
|
|
0655628073 | ||
|
|
9460822529 | ||
|
|
d02358ac0d | ||
|
|
366237a625 | ||
|
|
2f2da18994 | ||
|
|
ecd30db215 | ||
|
|
1980854f6f | ||
|
|
7d4f006c25 | ||
|
|
b697be2383 | ||
|
|
143e53c43f | ||
|
|
6dde1ade01 | ||
|
|
d4de391ac5 | ||
|
|
af15f4bc6a | ||
|
|
d4ace24caa | ||
|
|
c5ab47fa66 | ||
|
|
d60051b629 | ||
|
|
22ff330ee7 | ||
|
|
dd62bef66d | ||
|
|
e7feb99f8d | ||
|
|
6358accece | ||
|
|
9ce8a24eea | ||
|
|
4d0673f489 | ||
|
|
fbe1e6a7d5 | ||
|
|
4ed02ca501 | ||
|
|
af245ed9fe | ||
|
|
fc86a3e882 | ||
|
|
f9109edcf1 | ||
|
|
ec100e1a91 | ||
|
|
746c5f4a79 | ||
|
|
b2611728a1 | ||
|
|
fc6cc4234d | ||
|
|
7706c1cb63 | ||
|
|
4d7a07220c | ||
|
|
436875f7dc | ||
|
|
21c6f53ecc | ||
|
|
5472be8b72 | ||
|
|
d22542fcf3 | ||
|
|
1d8341eb27 | ||
|
|
1897a7ada3 | ||
|
|
a048698c66 | ||
|
|
f891e57f4a | ||
|
|
fcc590e48a | ||
|
|
9a02a2a65b | ||
|
|
536a6c5c60 | ||
|
|
86a6871ee8 | ||
|
|
6046045151 | ||
|
|
9c3ddd4ba4 | ||
|
|
6c9f55c1d7 | ||
|
|
5bec3d7dcd | ||
|
|
a4c309efe8 | ||
|
|
4e22a208dd | ||
|
|
ff9e77118e | ||
|
|
6c6dfa9ac4 | ||
|
|
d60d5207d8 | ||
|
|
8c0ae892f5 | ||
|
|
f570492a11 | ||
|
|
cc447304f5 | ||
|
|
8f8c6366e3 | ||
|
|
3b13bcb098 | ||
|
|
df60784b51 | ||
|
|
bae3122bb5 | ||
|
|
0770aef4bf | ||
|
|
c198350bfa | ||
|
|
a2ed388777 | ||
|
|
f6670c699a | ||
|
|
5fa4c95480 | ||
|
|
5b8608c186 | ||
|
|
bb75d42ede | ||
|
|
b4b6def07a | ||
|
|
b305700987 | ||
|
|
40232b7eb1 | ||
|
|
67ff666db4 | ||
|
|
5960fd4fe0 | ||
|
|
f8b28c519c | ||
|
|
ee1105b6dd | ||
|
|
4778274c97 | ||
|
|
d7ecb32238 | ||
|
|
744306fc50 | ||
|
|
11bbb8ed4d | ||
|
|
b5092a4444 | ||
|
|
e2442c07a9 | ||
|
|
6f924d4e83 | ||
|
|
faf1508914 | ||
|
|
7eb8152835 | ||
|
|
8f45905831 | ||
|
|
4ba2ffce5b | ||
|
|
ffb3659ef5 | ||
|
|
6dec07d562 | ||
|
|
afb22f3279 | ||
|
|
f2f369db64 | ||
|
|
635c76db93 | ||
|
|
5f50f1928d | ||
|
|
32c9ed1dc2 | ||
|
|
0536926a1f | ||
|
|
3959c98479 | ||
|
|
2ce5735676 | ||
|
|
71741e144e | ||
|
|
f2e64cdd5e | ||
|
|
afaa5d5e9e | ||
|
|
d82861727d | ||
|
|
90f0795416 | ||
|
|
9efbe7771c | ||
|
|
a75caac13d | ||
|
|
279d0d20ea | ||
|
|
332ba96d34 | ||
|
|
3f6e5b7606 | ||
|
|
94703492fd | ||
|
|
df78117617 | ||
|
|
909b9480e4 | ||
|
|
21762ac1aa | ||
|
|
412bc175b4 | ||
|
|
dc0eb76e88 | ||
|
|
2695941a3c | ||
|
|
3506be1a70 | ||
|
|
cbf4786b39 | ||
|
|
8dbf334208 | ||
|
|
60ba5fbc72 | ||
|
|
c3ace0c44f | ||
|
|
8eceb90e63 | ||
|
|
4754e19e83 | ||
|
|
a0559d0dc9 | ||
|
|
8d03ce19b0 | ||
|
|
2470d851e9 | ||
|
|
df99f5c0a5 | ||
|
|
36f5084c52 | ||
|
|
b77d3f123d | ||
|
|
3c14405155 | ||
|
|
c10b0afaa8 | ||
|
|
3f7a2d6bfb | ||
|
|
f2a0d56e01 | ||
|
|
0736cc8414 | ||
|
|
53240d40a0 | ||
|
|
4137dd7cc8 | ||
|
|
8907290d27 | ||
|
|
401dc1cb10 | ||
|
|
a6b5d26f56 | ||
|
|
eb55cba34a | ||
|
|
b0b41d984e | ||
|
|
947f64e32d | ||
|
|
24ccbfa9b6 | ||
|
|
8110acb795 | ||
|
|
7473aede60 | ||
|
|
6f204f721b | ||
|
|
7b0e08094a | ||
|
|
322e1a75b9 | ||
|
|
a0806d98a1 | ||
|
|
182897d971 | ||
|
|
f90a639fcc | ||
|
|
d95d7208a2 | ||
|
|
bbac8ffe64 | ||
|
|
801a649fb1 | ||
|
|
7c09ceecfd | ||
|
|
8c4954fb9b | ||
|
|
fbe892105b | ||
|
|
584e1bb847 | ||
|
|
c437ab282e | ||
|
|
42a100d138 | ||
|
|
65807bf35d | ||
|
|
2995f48ede | ||
|
|
d452702aef | ||
|
|
f8ed9c7357 | ||
|
|
9143120177 | ||
|
|
fd3b1bee92 | ||
|
|
bff42954d1 | ||
|
|
6b74fd6a02 | ||
|
|
0547cebfe2 | ||
|
|
caefdf4300 | ||
|
|
a59df15994 | ||
|
|
33304eb8d9 | ||
|
|
eb21a1bfb3 | ||
|
|
ce0333b0a7 | ||
|
|
25a1b53a91 | ||
|
|
6aba73f970 | ||
|
|
6406bb7fb6 | ||
|
|
2458107903 | ||
|
|
628f9bd9b5 | ||
|
|
2d791571d5 | ||
|
|
ed57127a79 | ||
|
|
6d9bcff8e1 | ||
|
|
8126cd1879 | ||
|
|
ab34c2261c | ||
|
|
6953f65970 | ||
|
|
52073e79fa | ||
|
|
8e3484bb17 | ||
|
|
7110da8a36 | ||
|
|
7ffd6ded51 | ||
|
|
5e04547ecf | ||
|
|
7cbe5f64ce | ||
|
|
47ed78031a | ||
|
|
fd3d24b834 | ||
|
|
c2f607b452 | ||
|
|
b1328bb6e2 | ||
|
|
2a02583e27 | ||
|
|
cfb49f9136 | ||
|
|
5f20091f24 | ||
|
|
a37b8e35a1 | ||
|
|
84c980c3ea | ||
|
|
5823057b41 | ||
|
|
024a9b1763 | ||
|
|
0425780cd3 | ||
|
|
20734dc7f3 | ||
|
|
0574c58f16 | ||
|
|
31e3117190 | ||
|
|
f780ba2c5a | ||
|
|
f125b593bf | ||
|
|
baee4e185d | ||
|
|
ca8476d466 | ||
|
|
757bf82a78 | ||
|
|
644887f727 | ||
|
|
563b643461 | ||
|
|
0e4a6fd2e1 | ||
|
|
d452bf1f1c | ||
|
|
126828a813 | ||
|
|
03dc6fb73a | ||
|
|
3653e89714 | ||
|
|
318dd14e42 | ||
|
|
2d13844b5d | ||
|
|
b777b7432a | ||
|
|
6f91c225c2 | ||
|
|
c355e9ca4a | ||
|
|
4514ea8123 | ||
|
|
a9a1472cb7 | ||
|
|
250b0eee28 | ||
|
|
5cd7527937 | ||
|
|
57ebd5bb7a | ||
|
|
c18a697d6b | ||
|
|
ad40b72508 | ||
|
|
3a72e5910d | ||
|
|
8f3eb65a05 | ||
|
|
700cd83ff5 | ||
|
|
0c27881eaf | ||
|
|
f7fdc6acd2 | ||
|
|
2c5f844edc | ||
|
|
a253de43c5 | ||
|
|
dbaf67a986 | ||
|
|
5175d06e37 | ||
|
|
651a27b558 | ||
|
|
fd41f8def6 | ||
|
|
208ea04fd5 | ||
|
|
5ee83a1af9 | ||
|
|
901c7704f4 | ||
|
|
c6f7290f92 | ||
|
|
5368eda98b | ||
|
|
7b9be209c8 | ||
|
|
cee05fea7c | ||
|
|
b87acb47e2 | ||
|
|
cb192bf9ea | ||
|
|
16351ba7f3 | ||
|
|
96ba128942 | ||
|
|
76c8d4af25 | ||
|
|
3ea2b3cc00 | ||
|
|
0df0936022 | ||
|
|
4fc11a7fd3 | ||
|
|
8c509271a6 | ||
|
|
67d5b63ef9 | ||
|
|
4f999511a6 | ||
|
|
cfbf239175 | ||
|
|
1aedf9bb07 | ||
|
|
c2d4423720 | ||
|
|
c2f7a2620c | ||
|
|
6f0cda34b4 | ||
|
|
1a472fdf1f | ||
|
|
0551f61228 | ||
|
|
b900adfddd | ||
|
|
0e339daef5 | ||
|
|
5f5733e8b9 | ||
|
|
1372050a7b | ||
|
|
1960951c5e | ||
|
|
bc070407c7 | ||
|
|
0172ee0b6b | ||
|
|
2953bc6bb8 | ||
|
|
c0ed3a9e3c | ||
|
|
5456e4fe75 | ||
|
|
867a1e960e | ||
|
|
48dc68c3fe | ||
|
|
2c719f326b | ||
|
|
201f92eb93 | ||
|
|
46f055b216 | ||
|
|
08305e679b | ||
|
|
e9e0b70199 | ||
|
|
441d784027 | ||
|
|
558956bf55 | ||
|
|
0d8250a3ac | ||
|
|
dc1f5826f8 | ||
|
|
06fb06829b | ||
|
|
bbf52d2611 | ||
|
|
f55a6617e9 | ||
|
|
3bd273fbdd | ||
|
|
1b64a543f1 | ||
|
|
97b07f7d42 | ||
|
|
ebb472b8f6 | ||
|
|
1a2ef6479e | ||
|
|
876c63fe80 | ||
|
|
32236962f5 | ||
|
|
ba66af922f | ||
|
|
28b9bbe54f | ||
|
|
bf6bd7cbdc | ||
|
|
ddcb2468a6 | ||
|
|
f048b58935 | ||
|
|
09f6200c2e | ||
|
|
354692fb06 | ||
|
|
2c5858c2e0 | ||
|
|
1f41fd0436 | ||
|
|
e0bbefdfae | ||
|
|
bc6fbb2797 | ||
|
|
b579cf8128 | ||
|
|
a94ed014b7 | ||
|
|
0db991b668 | ||
|
|
347ced6942 | ||
|
|
5d7a775b2b | ||
|
|
df732ab4bf | ||
|
|
31cd3953d6 | ||
|
|
4666b13892 | ||
|
|
37d7ddb4b0 | ||
|
|
3abbaeb44b | ||
|
|
847ea49042 | ||
|
|
779068c2ee | ||
|
|
140cd6882d | ||
|
|
2e295c2391 | ||
|
|
596b0995f4 | ||
|
|
b61fe97893 | ||
|
|
209aa2ebe6 | ||
|
|
c03a0e857e | ||
|
|
2854d698e6 | ||
|
|
944163be0e | ||
|
|
269a9eaff0 | ||
|
|
7f9c49cbc4 | ||
|
|
2b6bfeeb15 | ||
|
|
fa9742bc92 | ||
|
|
472e419abc | ||
|
|
169d11387b | ||
|
|
e59ac6d947 | ||
|
|
e193b45562 | ||
|
|
1ac34f810e | ||
|
|
e65e5c6e5f | ||
|
|
af6365c76a | ||
|
|
8c672b23b5 | ||
|
|
3b53f5ac11 | ||
|
|
ccdc744748 | ||
|
|
261f0b4bf0 | ||
|
|
495b59c2e5 | ||
|
|
d6e1c13c39 | ||
|
|
f7f13b9e07 | ||
|
|
62564d747f | ||
|
|
1d5d59c4c0 | ||
|
|
e8380b8a12 | ||
|
|
c304d9cc62 | ||
|
|
aad4ebf287 | ||
|
|
6c2f48181c | ||
|
|
480b6ff7d6 | ||
|
|
4bdd6f972c | ||
|
|
6674d8456a | ||
|
|
d1478ff694 | ||
|
|
cb20d46b74 | ||
|
|
9dd2538043 | ||
|
|
f25136a512 | ||
|
|
03eb56ad2a | ||
|
|
2508840701 | ||
|
|
6e098f5a4f | ||
|
|
31b33406fd | ||
|
|
7ab7c763ed | ||
|
|
06258e757a | ||
|
|
5919b43a21 | ||
|
|
7d4b9521e7 | ||
|
|
f9d2fd7997 | ||
|
|
bdbc20c3c6 | ||
|
|
69d6d03714 | ||
|
|
f40e1e55b0 | ||
|
|
b9082ed838 | ||
|
|
4edfefa9a2 | ||
|
|
0f98ee5407 | ||
|
|
7fdf119873 | ||
|
|
3c054e6ea1 | ||
|
|
98899ece72 | ||
|
|
2061a006d0 | ||
|
|
5496c2d7fd | ||
|
|
d6b862a4a9 | ||
|
|
d581f8a852 | ||
|
|
3a593ee35a | ||
|
|
415d34fdaa | ||
|
|
7d28191bb5 | ||
|
|
e2c7693370 | ||
|
|
f17ff02f4d | ||
|
|
225043e01d | ||
|
|
56f78349f8 | ||
|
|
8839d4f55a | ||
|
|
2562aec1d2 | ||
|
|
db2361be84 | ||
|
|
d08fcbfef3 | ||
|
|
7601b93e65 | ||
|
|
1103ec40e0 | ||
|
|
af32c7e3db | ||
|
|
170918eb3b | ||
|
|
a91e615a8d | ||
|
|
cc92c26fe3 | ||
|
|
937135db32 | ||
|
|
01366558b4 | ||
|
|
b0dbd54ea4 | ||
|
|
f113915307 | ||
|
|
0a3c3d9bb1 | ||
|
|
ba2e005c3e | ||
|
|
b9ea52d65f | ||
|
|
f1e328d333 | ||
|
|
23f1965398 | ||
|
|
fc82f185cb | ||
|
|
56b25f373f | ||
|
|
1ac6add122 | ||
|
|
91b1a903f9 | ||
|
|
a8d6654ef5 | ||
|
|
63093b1be6 | ||
|
|
60abe8f37e | ||
|
|
7ba3909aa1 | ||
|
|
eecdba2d05 | ||
|
|
7bdc005aa7 | ||
|
|
d46703fdc4 | ||
|
|
e4aa85f603 | ||
|
|
233124ef50 | ||
|
|
36a3012de2 | ||
|
|
2b4ee96ed7 | ||
|
|
85a2afd55c | ||
|
|
6cd0d8456a | ||
|
|
7750a0a773 | ||
|
|
a5364b9257 | ||
|
|
e0e7b1406d | ||
|
|
38b67a0002 | ||
|
|
18dd4f8a52 | ||
|
|
879f9b4ea9 | ||
|
|
3db0dda67a | ||
|
|
ed9ee15b90 | ||
|
|
44ff85e8e9 | ||
|
|
cb07e9ba11 | ||
|
|
bfe05ce5fc | ||
|
|
64ee23cec0 | ||
|
|
c022d3c4a4 | ||
|
|
69c764301f | ||
|
|
2f777daef6 | ||
|
|
a10bf7330e | ||
|
|
782bb5967d | ||
|
|
aeb2f55f0d | ||
|
|
ae68749b1b | ||
|
|
a3c25d56a0 | ||
|
|
d2b9cc8df9 | ||
|
|
2027daa75c | ||
|
|
f3493a08bd | ||
|
|
f3963269ae | ||
|
|
ae2212c245 | ||
|
|
3a19ac4c93 | ||
|
|
666f546cf0 | ||
|
|
464f57d7da | ||
|
|
2a192f33a1 | ||
|
|
9ca2674261 | ||
|
|
24bc91dc0c | ||
|
|
cf2d5b502f | ||
|
|
61450ef602 | ||
|
|
78f1d1738e | ||
|
|
9f595cf5f7 | ||
|
|
25b8e49975 | ||
|
|
d40086cd13 | ||
|
|
8f9d8d93b9 | ||
|
|
1080c10004 | ||
|
|
866aeca220 | ||
|
|
121b3afc61 | ||
|
|
e8406b04b4 | ||
|
|
8e7fe81806 | ||
|
|
852807b5d7 | ||
|
|
9928d47fa2 | ||
|
|
412a1bd62a | ||
|
|
b290520951 | ||
|
|
dde677b6d3 | ||
|
|
75030847bd | ||
|
|
e7b9cb76bc | ||
|
|
e96c4c0dd3 | ||
|
|
b553b3fa50 | ||
|
|
c6fb924b8f | ||
|
|
b13844c4a6 | ||
|
|
ab6c83a3fc | ||
|
|
7e0a97973f | ||
|
|
6a8a79bba5 | ||
|
|
4a0c58c50a | ||
|
|
eb0c963332 | ||
|
|
023fe82932 | ||
|
|
2e1a06c7bf | ||
|
|
8b6961d40c | ||
|
|
53351877da | ||
|
|
522445894e | ||
|
|
550351bb16 | ||
|
|
328adbb56f | ||
|
|
44a36bbba3 | ||
|
|
4cc4adeda6 | ||
|
|
c14e6f2a63 | ||
|
|
cfcb2d54d8 | ||
|
|
010d60e504 | ||
|
|
eabde07ff6 | ||
|
|
be19ad5f2a | ||
|
|
d1d0816961 | ||
|
|
7be7170504 | ||
|
|
478272f515 | ||
|
|
09af6958c8 | ||
|
|
adb3a2b64e | ||
|
|
1ee7e842dc | ||
|
|
b080a57406 | ||
|
|
7c017e345a | ||
|
|
4b91343155 | ||
|
|
02a3df8ad0 | ||
|
|
6a7080f4ee | ||
|
|
4547042577 | ||
|
|
0e39eea7f8 | ||
|
|
1e5aefea63 | ||
|
|
02c4f333b0 | ||
|
|
1e8fc4020b | ||
|
|
f969701ac1 | ||
|
|
b236243857 | ||
|
|
39edc64922 | ||
|
|
f22ece403f | ||
|
|
f5423bb314 | ||
|
|
b1e5945ebe | ||
|
|
76b5be8171 | ||
|
|
804bca2041 | ||
|
|
10602b47b4 | ||
|
|
8d7c522596 | ||
|
|
3ac455c5a7 | ||
|
|
2b19a459df | ||
|
|
41ba2d9bf6 | ||
|
|
a7b5eb69d3 | ||
|
|
67c209bb5e | ||
|
|
a6d436d9ea | ||
|
|
652c784e13 | ||
|
|
a0a3b7a158 | ||
|
|
789f51bd2a | ||
|
|
c2f1a74f96 | ||
|
|
a9ed7a3f3b | ||
|
|
b348e88a5f | ||
|
|
1615395866 | ||
|
|
e483abcad0 | ||
|
|
12b6760f6e | ||
|
|
6fde6d7eac | ||
|
|
a7ef891217 | ||
|
|
8f22dfe87b | ||
|
|
2dc7fab39a | ||
|
|
74cb2e3c63 | ||
|
|
6e763a58f1 | ||
|
|
a8e72ed410 | ||
|
|
fcdfd5f936 | ||
|
|
f1faa463c1 | ||
|
|
a0f4952b54 | ||
|
|
bd82ded07d | ||
|
|
016e17dedb | ||
|
|
5cd3e1b368 | ||
|
|
b2b39458da | ||
|
|
556bbe394d | ||
|
|
07288b3f26 | ||
|
|
90f79b7708 | ||
|
|
e220786a20 | ||
|
|
f16b993294 | ||
|
|
c241bea3bf | ||
|
|
084654cd3c | ||
|
|
ab3577c369 | ||
|
|
6efb90c94e | ||
|
|
cbcc400eb4 | ||
|
|
15aec7da7e | ||
|
|
46535e4f56 | ||
|
|
e3f945c079 | ||
|
|
04239c57fe | ||
|
|
ad4439ed55 | ||
|
|
9fe3ef430f | ||
|
|
ff30773097 | ||
|
|
f7531d1e18 | ||
|
|
658008ab64 | ||
|
|
b089d63112 | ||
|
|
ee9b1b7f57 | ||
|
|
cd0fc8176f | ||
|
|
8e291e3e46 | ||
|
|
e3024076cd | ||
|
|
6105874abc | ||
|
|
1855f7829d | ||
|
|
456e8bd9c0 | ||
|
|
d5f2efac26 | ||
|
|
21e692623c | ||
|
|
80e9589af5 | ||
|
|
b2b9ae0677 | ||
|
|
63122905e6 | ||
|
|
f99b6f4646 | ||
|
|
39090c2a22 | ||
|
|
76baa8c791 | ||
|
|
74e4b9d6d2 | ||
|
|
bbfc5039f7 | ||
|
|
b2fd694483 | ||
|
|
b03f38ff22 | ||
|
|
fe48811047 | ||
|
|
bd9396b031 | ||
|
|
f0497ec16d | ||
|
|
7e9e179fa7 | ||
|
|
de62464ad8 | ||
|
|
f6911ca195 | ||
|
|
aec09ed8d2 | ||
|
|
51a983e460 | ||
|
|
0eb46e29c7 | ||
|
|
5ee11c7b6b | ||
|
|
b55accd76f | ||
|
|
fef2be1bc7 | ||
|
|
0b3858f91d | ||
|
|
d07ea1b337 |
15
.babelrc
15
.babelrc
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"comments": false,
|
||||
"compact": true,
|
||||
"optional": [
|
||||
// Experimental features.
|
||||
// "minification.constantFolding",
|
||||
// "minification.deadCodeElimination",
|
||||
|
||||
"es7.asyncFunctions",
|
||||
"es7.decorators",
|
||||
"es7.exportExtensions",
|
||||
"es7.functionBind",
|
||||
"runtime"
|
||||
]
|
||||
}
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,5 +1,9 @@
|
||||
/.nyc_output/
|
||||
/dist/
|
||||
/node_modules/
|
||||
/src/api/index.js
|
||||
/src/xapi/mixins/index.js
|
||||
/src/xo-mixins/index.js
|
||||
|
||||
npm-debug.log
|
||||
npm-debug.log.*
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ node_js:
|
||||
# - 'stable'
|
||||
- '4'
|
||||
- '0.12'
|
||||
- '0.10'
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
|
||||
@@ -37,6 +37,4 @@ $ npm run dev
|
||||
|
||||
## How to report a bug?
|
||||
|
||||
If you are certain the bug is exclusively related to XO-Server, you may use the [bugtracker of this repository](https://github.com/vatesfr/xo-server/issues).
|
||||
|
||||
Otherwise, please consider using the [bugtracker of the general repository](https://github.com/vatesfr/xo/issues).
|
||||
All bug reports should go into the [bugtracker of xo-web](https://github.com/vatesfr/xo-web/issues).
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
Error.stackTraceLimit = 100
|
||||
|
||||
// Async stacks.
|
||||
try { require('trace') } catch (_) {}
|
||||
//
|
||||
// Disabled for now as it cause a huge memory usage with
|
||||
// fs.createReadStream().
|
||||
// TODO: find a way to reenable.
|
||||
//
|
||||
// try { require('trace') } catch (_) {}
|
||||
|
||||
// Removes internal modules.
|
||||
try {
|
||||
|
||||
@@ -7,4 +7,25 @@
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
// Use Bluebird for all promises as it provides better performance and
|
||||
// less memory usage.
|
||||
global.Promise = require('bluebird')
|
||||
|
||||
// Make unhandled rejected promises visible.
|
||||
process.on('unhandledRejection', function (reason) {
|
||||
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
|
||||
})
|
||||
|
||||
;(function (EE) {
|
||||
var proto = EE.prototype
|
||||
var emit = proto.emit
|
||||
proto.emit = function patchedError (event, error) {
|
||||
if (event === 'error' && !this.listenerCount(event)) {
|
||||
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
|
||||
}
|
||||
|
||||
return emit.apply(this, arguments)
|
||||
}
|
||||
})(require('events').EventEmitter)
|
||||
|
||||
require('exec-promise')(require('../'))
|
||||
|
||||
10
bin/xo-server-logs
Executable file
10
bin/xo-server-logs
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
require('exec-promise')(require('../dist/logs-cli').default)
|
||||
40
config.json
Normal file
40
config.json
Normal file
@@ -0,0 +1,40 @@
|
||||
// Vendor config: DO NOT TOUCH!
|
||||
//
|
||||
// See sample.config.yaml to override.
|
||||
{
|
||||
"http": {
|
||||
"listen": [
|
||||
{
|
||||
"port": 80
|
||||
}
|
||||
],
|
||||
"mounts": {},
|
||||
|
||||
// Ciphers to use.
|
||||
//
|
||||
// These are the default ciphers in Node 4.2.6, we are setting
|
||||
// them explicitly for older Node versions.
|
||||
"ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
|
||||
|
||||
// Tell Node to respect the cipher order.
|
||||
"honorCipherOrder": true,
|
||||
|
||||
// Specify to use at least TLSv1.1.
|
||||
// See: https://github.com/certsimple/minimum-tls-version
|
||||
"secureOptions": 117440512
|
||||
},
|
||||
"datadir": "/var/lib/xo-server/data",
|
||||
|
||||
// Should users be created on first sign in?
|
||||
//
|
||||
// Necessary for external authentication providers.
|
||||
"createUserOnFirstSignin": true,
|
||||
|
||||
// Whether API logs should contains the full request/response on
|
||||
// errors.
|
||||
//
|
||||
// This is disabled by default for performance (lots of data) and
|
||||
// security concerns (avoiding sensitive data in the logs) but can
|
||||
// be turned for investigation by the administrator.
|
||||
"verboseApiLogsOnErrors": false
|
||||
}
|
||||
13
gulpfile.js
13
gulpfile.js
@@ -7,13 +7,16 @@ var gulp = require('gulp')
|
||||
var babel = require('gulp-babel')
|
||||
var coffee = require('gulp-coffee')
|
||||
var plumber = require('gulp-plumber')
|
||||
var rimraf = require('rimraf')
|
||||
var sourceMaps = require('gulp-sourcemaps')
|
||||
var watch = require('gulp-watch')
|
||||
|
||||
var join = require('path').join
|
||||
|
||||
// ===================================================================
|
||||
|
||||
var SRC_DIR = __dirname + '/src'
|
||||
var DIST_DIR = __dirname + '/dist'
|
||||
var SRC_DIR = join(__dirname, 'src')
|
||||
var DIST_DIR = join(__dirname, 'dist')
|
||||
|
||||
var PRODUCTION = process.argv.indexOf('--production') !== -1
|
||||
|
||||
@@ -36,6 +39,10 @@ function src (patterns) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
gulp.task(function clean (cb) {
|
||||
rimraf(DIST_DIR, cb)
|
||||
})
|
||||
|
||||
gulp.task(function buildCoffee () {
|
||||
return src('**/*.coffee')
|
||||
.pipe(sourceMaps.init())
|
||||
@@ -60,4 +67,4 @@ gulp.task(function buildEs6 () {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
|
||||
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))
|
||||
|
||||
2
index.js
2
index.js
@@ -8,4 +8,4 @@ if (process.env.DEBUG === undefined) {
|
||||
}
|
||||
|
||||
// Import the real main module.
|
||||
module.exports = require('./dist')
|
||||
module.exports = require('./dist').default
|
||||
|
||||
136
package.json
136
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "4.9.2",
|
||||
"version": "5.1.2",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -11,7 +11,11 @@
|
||||
],
|
||||
"homepage": "http://github.com/vatesfr/xo-server/",
|
||||
"bugs": {
|
||||
"url": "https://github.com/vatesfr/xo-server/issues"
|
||||
"url": "https://github.com/vatesfr/xo-web/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
},
|
||||
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
|
||||
"preferGlobal": true,
|
||||
@@ -19,94 +23,98 @@
|
||||
"better-stacks.js",
|
||||
"bin/",
|
||||
"dist/",
|
||||
"config.json",
|
||||
"index.js",
|
||||
"signin.jade"
|
||||
"signin.pug"
|
||||
],
|
||||
"directories": {
|
||||
"bin": "bin"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/vatesfr/xo-server.git"
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2": "^0.7.1",
|
||||
"@marsaud/smb2-promise": "^0.2.0",
|
||||
"app-conf": "^0.4.0",
|
||||
"babel-runtime": "^5",
|
||||
"base64url": "1.0.4",
|
||||
"babel-runtime": "^6.5.0",
|
||||
"base64url": "^2.0.0",
|
||||
"blocked": "^1.1.0",
|
||||
"bluebird": "^2.9.14",
|
||||
"bluebird": "^3.1.1",
|
||||
"body-parser": "^1.13.3",
|
||||
"connect-flash": "^0.1.1",
|
||||
"cookie": "^0.3.0",
|
||||
"cookie-parser": "^1.3.5",
|
||||
"cron": "^1.0.9",
|
||||
"d3-time-format": "^0.1.3",
|
||||
"d3-time-format": "^2.0.0",
|
||||
"debug": "^2.1.3",
|
||||
"escape-string-regexp": "^1.0.3",
|
||||
"event-to-promise": "^0.4.0",
|
||||
"exec-promise": "^0.5.1",
|
||||
"event-to-promise": "^0.7.0",
|
||||
"exec-promise": "^0.6.1",
|
||||
"execa": "^0.4.0",
|
||||
"express": "^4.13.3",
|
||||
"express-session": "^1.11.3",
|
||||
"fs-extra": "^0.26.2",
|
||||
"fs-promise": "^0.3.1",
|
||||
"got": "^5.0.0",
|
||||
"graceful-fs": "^4.1.2",
|
||||
"fatfs": "^0.10.3",
|
||||
"fs-extra": "^0.30.0",
|
||||
"fs-promise": "^0.4.1",
|
||||
"get-stream": "^2.1.0",
|
||||
"hashy": "~0.4.2",
|
||||
"helmet": "^2.0.0",
|
||||
"highland": "^2.5.1",
|
||||
"http-server-plus": "^0.5.1",
|
||||
"human-format": "^0.5.0",
|
||||
"http-proxy": "^1.13.2",
|
||||
"http-server-plus": "^0.6.4",
|
||||
"human-format": "^0.6.0",
|
||||
"is-my-json-valid": "^2.12.2",
|
||||
"jade": "^1.11.0",
|
||||
"js-yaml": "^3.2.7",
|
||||
"json5": "^0.4.0",
|
||||
"json-rpc-peer": "^0.11.0",
|
||||
"json5": "^0.4.0",
|
||||
"julien-f-source-map-support": "0.0.0",
|
||||
"julien-f-unzip": "^0.2.1",
|
||||
"kindof": "^2.0.0",
|
||||
"level": "^1.3.0",
|
||||
"level-party": "^3.0.4",
|
||||
"level-sublevel": "^6.5.2",
|
||||
"lodash.assign": "^3.0.0",
|
||||
"lodash.bind": "^3.0.0",
|
||||
"lodash.difference": "^3.2.0",
|
||||
"lodash.endswith": "^3.0.2",
|
||||
"lodash.filter": "^3.1.0",
|
||||
"lodash.find": "^3.0.0",
|
||||
"lodash.findindex": "^3.0.0",
|
||||
"lodash.foreach": "^3.0.1",
|
||||
"lodash.has": "^3.0.0",
|
||||
"lodash.includes": "^3.1.1",
|
||||
"lodash.isarray": "^3.0.0",
|
||||
"lodash.isempty": "^3.0.0",
|
||||
"lodash.isfunction": "^3.0.1",
|
||||
"lodash.isobject": "^3.0.0",
|
||||
"lodash.isstring": "^3.0.0",
|
||||
"lodash.keys": "^3.0.4",
|
||||
"lodash.map": "^3.0.0",
|
||||
"lodash.pick": "^3.0.0",
|
||||
"lodash.sortby": "^3.1.4",
|
||||
"lodash.startswith": "^3.0.1",
|
||||
"leveldown": "^1.4.2",
|
||||
"lodash": "^4.13.1",
|
||||
"make-error": "^1",
|
||||
"micromatch": "^2.3.2",
|
||||
"minimist": "^1.2.0",
|
||||
"moment-timezone": "^0.5.4",
|
||||
"ms": "^0.7.1",
|
||||
"multikey-hash": "^1.0.1",
|
||||
"ndjson": "^1.4.3",
|
||||
"partial-stream": "0.0.0",
|
||||
"passport": "^0.3.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"proxy-http-request": "0.1.0",
|
||||
"promise-toolbox": "^0.3.2",
|
||||
"proxy-agent": "^2.0.0",
|
||||
"pug": "^2.0.0-alpha6",
|
||||
"redis": "^2.0.1",
|
||||
"schema-inspector": "^1.5.1",
|
||||
"semver": "^5.1.0",
|
||||
"serve-static": "^1.9.2",
|
||||
"stack-chain": "^1.3.3",
|
||||
"struct-fu": "^1.0.0",
|
||||
"through2": "^2.0.0",
|
||||
"trace": "^2.0.1",
|
||||
"ws": "~0.8.0",
|
||||
"xen-api": "^0.6.4",
|
||||
"ws": "^1.1.1",
|
||||
"xen-api": "^0.9.0",
|
||||
"xml2js": "~0.4.6",
|
||||
"xo-collection": "^0.4.0"
|
||||
"xo-acl-resolver": "^0.2.1",
|
||||
"xo-collection": "^0.4.0",
|
||||
"xo-remote-parser": "^0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"babel-eslint": "^4.0.10",
|
||||
"babel-eslint": "^6.0.4",
|
||||
"babel-plugin-transform-decorators-legacy": "^1.3.4",
|
||||
"babel-plugin-transform-runtime": "^6.5.2",
|
||||
"babel-preset-es2015": "^6.5.0",
|
||||
"babel-preset-stage-0": "^6.5.0",
|
||||
"chai": "^3.0.0",
|
||||
"dependency-check": "^2.4.0",
|
||||
"ghooks": "^1.0.3",
|
||||
"gulp": "git://github.com/gulpjs/gulp#4.0",
|
||||
"gulp-babel": "^5",
|
||||
"gulp-babel": "^6",
|
||||
"gulp-coffee": "^2.3.1",
|
||||
"gulp-plumber": "^1.0.0",
|
||||
"gulp-sourcemaps": "^1.5.1",
|
||||
@@ -114,22 +122,44 @@
|
||||
"leche": "^2.1.1",
|
||||
"mocha": "^2.2.1",
|
||||
"must": "^0.13.1",
|
||||
"node-inspector": "^0.12.2",
|
||||
"nyc": "^7.0.0",
|
||||
"rimraf": "^2.5.2",
|
||||
"sinon": "^1.14.1",
|
||||
"standard": "^5.2.1"
|
||||
"standard": "^7.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "gulp build --production",
|
||||
"dev": "gulp build",
|
||||
"build": "npm run build-indexes && gulp build --production",
|
||||
"depcheck": "dependency-check ./package.json",
|
||||
"build-indexes": "./tools/generate-index src/api src/xapi/mixins src/xo-mixins",
|
||||
"dev": "npm run build-indexes && gulp build",
|
||||
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
|
||||
"lint": "standard",
|
||||
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
|
||||
"posttest": "npm run lint && npm run depcheck",
|
||||
"prepublish": "npm run build",
|
||||
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
|
||||
"release": "npm version",
|
||||
"start": "node bin/xo-server",
|
||||
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
|
||||
"posttest": "npm run lint && dependency-check ./package.json"
|
||||
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
|
||||
},
|
||||
"babel": {
|
||||
"plugins": [
|
||||
"transform-decorators-legacy",
|
||||
"transform-runtime"
|
||||
],
|
||||
"presets": [
|
||||
"stage-0",
|
||||
"es2015"
|
||||
]
|
||||
},
|
||||
"config": {
|
||||
"ghooks": {
|
||||
"commit-msg": "npm test"
|
||||
}
|
||||
},
|
||||
"standard": {
|
||||
"ignore": [
|
||||
"dist/**"
|
||||
"dist"
|
||||
],
|
||||
"parser": "babel-eslint"
|
||||
}
|
||||
|
||||
@@ -66,6 +66,8 @@ http:
|
||||
#socket: './http.sock'
|
||||
|
||||
# Basic HTTPS.
|
||||
#
|
||||
# You can find the list of possible options there https://nodejs.org/docs/latest/api/tls.html#tls.createServer
|
||||
# -
|
||||
# # The only difference is the presence of the certificate and the
|
||||
# # key.
|
||||
@@ -83,7 +85,7 @@ http:
|
||||
# # certificate authority up to the root.
|
||||
# #
|
||||
# # Default: undefined
|
||||
# certificate: './certificate.pem'
|
||||
# cert: './certificate.pem'
|
||||
|
||||
# # File containing the private key (PEM format).
|
||||
# #
|
||||
@@ -93,6 +95,10 @@ http:
|
||||
# # Default: undefined
|
||||
# key: './key.pem'
|
||||
|
||||
# If set to true, all HTTP traffic will be redirected to the first
|
||||
# HTTPs configuration.
|
||||
#redirectToHttps: true
|
||||
|
||||
# List of files/directories which will be served.
|
||||
mounts:
|
||||
#'/': '/path/to/xo-web/dist/'
|
||||
@@ -101,13 +107,19 @@ http:
|
||||
proxies:
|
||||
# '/any/url': 'http://localhost:54722'
|
||||
|
||||
# HTTP proxy configuration used by xo-server to fetch resources on the
|
||||
# Internet.
|
||||
#
|
||||
# See: https://github.com/TooTallNate/node-proxy-agent#maps-proxy-protocols-to-httpagent-implementations
|
||||
#httpProxy: 'http://jsmith:qwerty@proxy.lan:3128'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Connection to the Redis server.
|
||||
redis:
|
||||
# Syntax: tcp://[db[:password]@]hostname[:port]
|
||||
# Syntax: redis://[db[:password]@]hostname[:port]
|
||||
#
|
||||
# Default: tcp://localhost:6379
|
||||
# Default: redis://localhost:6379
|
||||
#uri: ''
|
||||
|
||||
# Directory containing the database of XO.
|
||||
|
||||
@@ -7,6 +7,7 @@ html
|
||||
title Xen Orchestra
|
||||
meta(name = 'author' content = 'Vates SAS')
|
||||
link(rel = 'stylesheet' href = 'styles/main.css')
|
||||
link(rel = 'stylesheet' href = 'v4/styles/main.css')
|
||||
body
|
||||
.container
|
||||
.row-login
|
||||
123
src/acl.js
123
src/acl.js
@@ -1,123 +0,0 @@
|
||||
// These global variables are not a problem because the algorithm is
|
||||
// synchronous.
|
||||
let permissionsByObject
|
||||
let getObject
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const authorized = () => true // eslint-disable-line no-unused-vars
|
||||
const forbiddden = () => false // eslint-disable-line no-unused-vars
|
||||
|
||||
function and (...checkers) { // eslint-disable-line no-unused-vars
|
||||
return function (object, permission) {
|
||||
for (const checker of checkers) {
|
||||
if (!checker(object, permission)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
function or (...checkers) { // eslint-disable-line no-unused-vars
|
||||
return function (object, permission) {
|
||||
for (const checker of checkers) {
|
||||
if (checker(object, permission)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function checkMember (memberName) {
|
||||
return function (object, permission) {
|
||||
const member = object[memberName]
|
||||
return checkAuthorization(member, permission)
|
||||
}
|
||||
}
|
||||
|
||||
function checkSelf ({ id }, permission) {
|
||||
const permissionsForObject = permissionsByObject[id]
|
||||
|
||||
return (
|
||||
permissionsForObject &&
|
||||
permissionsForObject[permission]
|
||||
)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const checkAuthorizationByTypes = {
|
||||
host: or(checkSelf, checkMember('$poolId')),
|
||||
|
||||
message: checkMember('$object'),
|
||||
|
||||
network: or(checkSelf, checkMember('$poolId')),
|
||||
|
||||
SR: or(checkSelf, checkMember('$poolId')),
|
||||
|
||||
task: checkMember('$host'),
|
||||
|
||||
VBD: checkMember('VDI'),
|
||||
|
||||
// Access to a VDI is granted if the user has access to the
|
||||
// containing SR or to a linked VM.
|
||||
VDI (vdi, permission) {
|
||||
// Check authorization for the containing SR.
|
||||
if (checkAuthorization(vdi.$SR, permission)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check authorization for each of the connected VMs.
|
||||
for (const { VM: vm } of vdi.$VBDs) {
|
||||
if (checkAuthorization(vm, permission)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
|
||||
VIF: or(checkMember('$network'), checkMember('$VM')),
|
||||
|
||||
VM: or(checkSelf, checkMember('$container')),
|
||||
|
||||
'VM-snapshot': checkMember('$snapshot_of'),
|
||||
|
||||
'VM-template': authorized
|
||||
}
|
||||
|
||||
function checkAuthorization (objectId, permission) {
|
||||
const object = getObject(objectId)
|
||||
const checker = checkAuthorizationByTypes[object.type] || checkSelf
|
||||
|
||||
return checker(object, permission)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export default function (
|
||||
permissionsByObject_,
|
||||
getObject_,
|
||||
permissions
|
||||
) {
|
||||
// Assign global variables.
|
||||
permissionsByObject = permissionsByObject_
|
||||
getObject = getObject_
|
||||
|
||||
try {
|
||||
for (const [objectId, permission] of permissions) {
|
||||
if (!checkAuthorization(objectId, permission)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
} finally {
|
||||
// Free the global variables.
|
||||
permissionsByObject = getObject = null
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import {JsonRpcError} from 'json-rpc-peer'
|
||||
// ===================================================================
|
||||
|
||||
// Export standard JSON-RPC errors.
|
||||
export {
|
||||
export { // eslint-disable-line no-duplicate-imports
|
||||
InvalidJson,
|
||||
InvalidParameters,
|
||||
InvalidRequest,
|
||||
@@ -50,3 +50,21 @@ export class AlreadyAuthenticated extends JsonRpcError {
|
||||
super('already authenticated', 4)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export class ForbiddenOperation extends JsonRpcError {
|
||||
constructor (operation, reason) {
|
||||
super(`forbidden operation: ${operation}`, 5, reason)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// To be used with a user-readable message.
|
||||
// The message can be destined to be displayed to the front-end user.
|
||||
export class GenericError extends JsonRpcError {
|
||||
constructor (message) {
|
||||
super(message, 6)
|
||||
}
|
||||
}
|
||||
|
||||
153
src/api.js
153
src/api.js
@@ -1,9 +1,9 @@
|
||||
import createDebug from 'debug'
|
||||
const debug = createDebug('xo:api')
|
||||
|
||||
import getKeys from 'lodash.keys'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import getKeys from 'lodash/keys'
|
||||
import kindOf from 'kindof'
|
||||
import moment from 'moment-timezone'
|
||||
import ms from 'ms'
|
||||
import schemaInspector from 'schema-inspector'
|
||||
|
||||
@@ -13,13 +13,29 @@ import {
|
||||
NoSuchObject,
|
||||
Unauthorized
|
||||
} from './api-errors'
|
||||
import {
|
||||
version as xoServerVersion
|
||||
} from '../package.json'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach
|
||||
forEach,
|
||||
isFunction,
|
||||
noop
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const PERMISSIONS = {
|
||||
none: 0,
|
||||
read: 1,
|
||||
write: 2,
|
||||
admin: 3
|
||||
}
|
||||
|
||||
const hasPermission = (user, permission) => (
|
||||
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
|
||||
)
|
||||
|
||||
// FIXME: this function is specific to XO and should not be defined in
|
||||
// this file.
|
||||
function checkPermission (method) {
|
||||
@@ -42,7 +58,7 @@ function checkPermission (method) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!user.hasPermission(permission)) {
|
||||
if (!hasPermission(user, permission)) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
}
|
||||
@@ -78,7 +94,10 @@ function resolveParams (method, params) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
|
||||
const userId = user.get('id')
|
||||
const userId = user.id
|
||||
|
||||
// Do not alter the original object.
|
||||
params = { ...params }
|
||||
|
||||
const permissions = []
|
||||
forEach(resolve, ([param, types, permission = 'administrate'], key) => {
|
||||
@@ -95,7 +114,12 @@ function resolveParams (method, params) {
|
||||
// Register this new value.
|
||||
params[key] = object
|
||||
|
||||
permissions.push([ object.id, permission ])
|
||||
// Permission default to 'administrate' but can be set to a falsy
|
||||
// value (except null or undefined which trigger the default
|
||||
// value) to simply do a resolve without checking any permissions.
|
||||
if (permission) {
|
||||
permissions.push([ object.id, permission ])
|
||||
}
|
||||
})
|
||||
|
||||
return this.hasPermissions(userId, permissions).then(success => {
|
||||
@@ -112,13 +136,13 @@ function resolveParams (method, params) {
|
||||
function getMethodsInfo () {
|
||||
const methods = {}
|
||||
|
||||
forEach(this.api._methods, function (method, name) {
|
||||
this[name] = {
|
||||
forEach(this.api._methods, (method, name) => {
|
||||
methods[name] = {
|
||||
description: method.description,
|
||||
params: method.params || {},
|
||||
permission: method.permission
|
||||
}
|
||||
}, methods)
|
||||
})
|
||||
|
||||
return methods
|
||||
}
|
||||
@@ -126,6 +150,11 @@ getMethodsInfo.description = 'returns the signatures of all available API method
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const getServerVersion = () => xoServerVersion
|
||||
getServerVersion.description = 'return the version of xo-server'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const getVersion = () => '0.1'
|
||||
getVersion.description = 'API version (unstable)'
|
||||
|
||||
@@ -160,14 +189,25 @@ methodSignature.description = 'returns the signature of an API method'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const getServerTimezone = (tz => () => tz)(moment.tz.guess())
|
||||
getServerTimezone.description = 'return the timezone server'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class Api {
|
||||
constructor ({context} = {}) {
|
||||
constructor ({
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
} = {}) {
|
||||
this._methods = createRawObject()
|
||||
this._verboseLogsOnErrors = verboseLogsOnErrors
|
||||
this.context = context
|
||||
|
||||
this.addMethods({
|
||||
system: {
|
||||
getMethodsInfo,
|
||||
getServerVersion,
|
||||
getServerTimezone,
|
||||
getVersion,
|
||||
listMethods,
|
||||
methodSignature
|
||||
@@ -176,12 +216,25 @@ export default class Api {
|
||||
}
|
||||
|
||||
addMethod (name, method) {
|
||||
this._methods[name] = method
|
||||
const methods = this._methods
|
||||
|
||||
if (name in methods) {
|
||||
throw new Error(`API method ${name} already exists`)
|
||||
}
|
||||
|
||||
methods[name] = method
|
||||
|
||||
let unset = () => {
|
||||
delete methods[name]
|
||||
unset = noop
|
||||
}
|
||||
return () => unset()
|
||||
}
|
||||
|
||||
addMethods (methods) {
|
||||
let base = ''
|
||||
forEach(methods, function addMethod (method, name) {
|
||||
|
||||
const addMethod = (method, name) => {
|
||||
name = base + name
|
||||
|
||||
if (isFunction(method)) {
|
||||
@@ -191,9 +244,10 @@ export default class Api {
|
||||
|
||||
const oldBase = base
|
||||
base = name + '.'
|
||||
forEach(method, addMethod, this)
|
||||
forEach(method, addMethod)
|
||||
base = oldBase
|
||||
}, this)
|
||||
}
|
||||
forEach(methods, addMethod)
|
||||
}
|
||||
|
||||
async call (session, name, params) {
|
||||
@@ -204,24 +258,46 @@ export default class Api {
|
||||
throw new MethodNotFound(name)
|
||||
}
|
||||
|
||||
const context = Object.create(this.context)
|
||||
context.api = this // Used by system.*().
|
||||
context.session = session
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = Object.create(this.context, {
|
||||
api: { // Used by system.*().
|
||||
value: this
|
||||
},
|
||||
session: {
|
||||
value: session
|
||||
}
|
||||
})
|
||||
|
||||
// FIXME: too coupled with XO.
|
||||
// Fetch and inject the current user.
|
||||
const userId = session.get('user_id', undefined)
|
||||
if (userId) {
|
||||
context.user = await context._getUser(userId)
|
||||
}
|
||||
context.user = userId && await context.getUser(userId)
|
||||
const userName = context.user
|
||||
? context.user.email
|
||||
: '(unknown user)'
|
||||
|
||||
try {
|
||||
await checkPermission.call(context, method)
|
||||
|
||||
// API methods are in a namespace.
|
||||
// Some methods use the namespace or an id parameter like:
|
||||
//
|
||||
// vm.detachPci vm=<string>
|
||||
// vm.ejectCd id=<string>
|
||||
//
|
||||
// The goal here is to standardize the calls by always providing
|
||||
// an id parameter when possible to simplify calls to the API.
|
||||
if (params && params.id === undefined) {
|
||||
const namespace = name.slice(0, name.indexOf('.'))
|
||||
params.id = params[namespace]
|
||||
}
|
||||
|
||||
checkParams(method, params)
|
||||
|
||||
await resolveParams.call(context, method, params)
|
||||
const resolvedParams = await resolveParams.call(context, method, params)
|
||||
|
||||
let result = await method.call(context, params)
|
||||
let result = await method.call(context, resolvedParams)
|
||||
|
||||
// If nothing was returned, consider this operation a success
|
||||
// and return true.
|
||||
@@ -230,7 +306,8 @@ export default class Api {
|
||||
}
|
||||
|
||||
debug(
|
||||
'%s(...) [%s] ==> %s',
|
||||
'%s | %s(...) [%s] ==> %s',
|
||||
userName,
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
kindOf(result)
|
||||
@@ -238,16 +315,28 @@ export default class Api {
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
debug(
|
||||
'%s(...) [%s] =!> %s',
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
if (this._verboseLogsOnErrors) {
|
||||
debug(
|
||||
'%s | %s(%j) [%s] =!> %s',
|
||||
userName,
|
||||
name,
|
||||
params,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
|
||||
const stack = error && error.stack
|
||||
if (stack) {
|
||||
console.error(stack)
|
||||
const stack = error && error.stack
|
||||
if (stack) {
|
||||
console.error(stack)
|
||||
}
|
||||
} else {
|
||||
debug(
|
||||
'%s | %s(...) [%s] =!> %s',
|
||||
userName,
|
||||
name,
|
||||
ms(Date.now() - startTime),
|
||||
error
|
||||
)
|
||||
}
|
||||
|
||||
throw error
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# TODO: this generation should probably be automated and integrated
|
||||
# into the build system.
|
||||
|
||||
set -e -u
|
||||
|
||||
cd "$(dirname "$(which "$0")")"
|
||||
|
||||
{
|
||||
printf %s '//
|
||||
// This file has been generated by ./.generate-index.sh
|
||||
//
|
||||
// It MUST be re-generated each time an API namespace (read file) is
|
||||
// added or removed.
|
||||
//
|
||||
'
|
||||
|
||||
for f in *.js *.coffee
|
||||
do
|
||||
base=${f%.*}
|
||||
[ "$base" != index ] || continue
|
||||
|
||||
printf '%s\n' "export * as $base from './$base'"
|
||||
done | sort
|
||||
} > index.js
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function get () {
|
||||
return await this.getAllAcls()
|
||||
return /* await */ this.getAllAcls()
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
@@ -9,7 +9,7 @@ get.description = 'get existing ACLs'
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getCurrentPermissions () {
|
||||
return await this.getPermissionsForUser(this.session.get('user_id'))
|
||||
return /* await */ this.getPermissionsForUser(this.session.get('user_id'))
|
||||
}
|
||||
|
||||
getCurrentPermissions.permission = ''
|
||||
|
||||
@@ -3,9 +3,9 @@ import {parseSize} from '../utils'
|
||||
// ===================================================================
|
||||
|
||||
export async function create ({name, size, sr}) {
|
||||
const vdi = await this.getXAPI(sr).createVdi(parseSize(size), {
|
||||
const vdi = await this.getXapi(sr).createVdi(parseSize(size), {
|
||||
name_label: name,
|
||||
sr: sr.id
|
||||
sr: sr._xapiId
|
||||
})
|
||||
return vdi.$id
|
||||
}
|
||||
@@ -14,10 +14,27 @@ create.description = 'create a new disk on a SR'
|
||||
|
||||
create.params = {
|
||||
name: { type: 'string' },
|
||||
size: { type: 'string' },
|
||||
size: { type: ['integer', 'string'] },
|
||||
sr: { type: 'string' }
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
sr: ['sr', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function resize ({ vdi, size }) {
|
||||
await this.getXapi(vdi).resizeVdi(vdi._xapiId, parseSize(size))
|
||||
}
|
||||
|
||||
resize.description = 'resize an existing VDI'
|
||||
|
||||
resize.params = {
|
||||
id: { type: 'string' },
|
||||
size: { type: ['integer', 'string'] }
|
||||
}
|
||||
|
||||
resize.resolve = {
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate']
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
export async function register ({vm}) {
|
||||
await this.getXAPI(vm).registerDockerContainer(vm.id)
|
||||
await this.getXapi(vm).registerDockerContainer(vm._xapiId)
|
||||
}
|
||||
register.permission = 'admin'
|
||||
|
||||
register.description = 'Register the VM for Docker management'
|
||||
|
||||
register.params = {
|
||||
@@ -16,10 +14,8 @@ register.resolve = {
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function deregister ({vm}) {
|
||||
await this.getXAPI(vm).unregisterDockerContainer(vm.id)
|
||||
await this.getXapi(vm).unregisterDockerContainer(vm._xapiId)
|
||||
}
|
||||
deregister.permission = 'admin'
|
||||
|
||||
deregister.description = 'Deregister the VM for Docker management'
|
||||
|
||||
deregister.params = {
|
||||
@@ -33,28 +29,26 @@ deregister.resolve = {
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export async function start ({vm, container}) {
|
||||
await this.getXAPI(vm).startDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).startDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function stop ({vm, container}) {
|
||||
await this.getXAPI(vm).stopDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).stopDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function restart ({vm, container}) {
|
||||
await this.getXAPI(vm).restartDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).restartDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function pause ({vm, container}) {
|
||||
await this.getXAPI(vm).pauseDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
export async function unpause ({vm, container}) {
|
||||
await this.getXAPI(vm).unpauseDockerContainer(vm.id, container)
|
||||
await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container)
|
||||
}
|
||||
|
||||
for (let fn of [start, stop, restart, pause, unpause]) {
|
||||
fn.permission = 'admin'
|
||||
|
||||
fn.params = {
|
||||
vm: { type: 'string' },
|
||||
container: { type: 'string' }
|
||||
|
||||
@@ -27,14 +27,11 @@ delete_.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getAll () {
|
||||
return await this._groups.get()
|
||||
return /* await */ this.getAllGroups()
|
||||
}
|
||||
|
||||
delete_.description = 'returns all the existing group'
|
||||
delete_.permission = 'admin'
|
||||
delete_.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
getAll.description = 'returns all the existing group'
|
||||
getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -1,28 +1,29 @@
|
||||
$debug = (require 'debug') 'xo:api:vm'
|
||||
$find = require 'lodash.find'
|
||||
$findIndex = require 'lodash.findindex'
|
||||
$forEach = require 'lodash.foreach'
|
||||
endsWith = require 'lodash.endswith'
|
||||
got = require('got')
|
||||
startsWith = require 'lodash.startswith'
|
||||
$find = require 'lodash/find'
|
||||
$findIndex = require 'lodash/findIndex'
|
||||
$forEach = require 'lodash/forEach'
|
||||
endsWith = require 'lodash/endsWith'
|
||||
startsWith = require 'lodash/startsWith'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
{parseXml, promisify} = require '../utils'
|
||||
{
|
||||
extractProperty,
|
||||
parseXml,
|
||||
promisify
|
||||
} = require '../utils'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
set = $coroutine (params) ->
|
||||
{host} = params
|
||||
xapi = @getXAPI host
|
||||
set = ({
|
||||
host,
|
||||
|
||||
for param, field of {
|
||||
'name_label'
|
||||
'name_description'
|
||||
}
|
||||
continue unless param of params
|
||||
|
||||
yield xapi.call "host.set_#{field}", host.ref, params[param]
|
||||
|
||||
return true
|
||||
# TODO: use camel case.
|
||||
name_label: nameLabel,
|
||||
name_description: nameDescription
|
||||
}) ->
|
||||
return @getXapi(host).setHostProperties(host._xapiId, {
|
||||
nameLabel,
|
||||
nameDescription
|
||||
})
|
||||
|
||||
set.description = 'changes the properties of an host'
|
||||
|
||||
@@ -43,18 +44,19 @@ exports.set = set
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
restart = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
yield xapi.call 'host.reboot', host.ref
|
||||
|
||||
return true
|
||||
# FIXME: set force to false per default when correctly implemented in
|
||||
# UI.
|
||||
restart = ({host, force = true}) ->
|
||||
return @getXapi(host).rebootHost(host._xapiId, force)
|
||||
|
||||
restart.description = 'restart the host'
|
||||
|
||||
restart.params = {
|
||||
id: { type: 'string' }
|
||||
id: { type: 'string' },
|
||||
force: {
|
||||
type: 'boolean',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
restart.resolve = {
|
||||
@@ -65,12 +67,8 @@ exports.restart = restart
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
restartAgent = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.restart_agent', host.ref
|
||||
|
||||
return true
|
||||
restartAgent = ({host}) ->
|
||||
return @getXapi(host).restartHostAgent(host._xapiId)
|
||||
|
||||
restartAgent.description = 'restart the Xen agent on the host'
|
||||
|
||||
@@ -79,7 +77,7 @@ restartAgent.params = {
|
||||
}
|
||||
|
||||
restartAgent.resolve = {
|
||||
host: ['id', 'host', 'operate'],
|
||||
host: ['id', 'host', 'administrate'],
|
||||
}
|
||||
|
||||
# TODO camel case
|
||||
@@ -87,12 +85,8 @@ exports.restart_agent = restartAgent
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
start = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.power_on', host.ref
|
||||
|
||||
return true
|
||||
start = ({host}) ->
|
||||
return @getXapi(host).powerOnHost(host._xapiId)
|
||||
|
||||
start.description = 'start the host'
|
||||
|
||||
@@ -108,13 +102,8 @@ exports.start = start
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
stop = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
yield xapi.call 'host.shutdown', host.ref
|
||||
|
||||
return true
|
||||
stop = ({host}) ->
|
||||
return @getXapi(host).shutdownHost(host._xapiId)
|
||||
|
||||
stop.description = 'stop the host'
|
||||
|
||||
@@ -130,12 +119,8 @@ exports.stop = stop
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
detach = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'pool.eject', host.ref
|
||||
|
||||
return true
|
||||
detach = ({host}) ->
|
||||
return @getXapi(host).ejectHostFromPool(host._xapiId)
|
||||
|
||||
detach.description = 'eject the host of a pool'
|
||||
|
||||
@@ -151,12 +136,8 @@ exports.detach = detach
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
enable = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.enable', host.ref
|
||||
|
||||
return true
|
||||
enable = ({host}) ->
|
||||
return @getXapi(host).enableHost(host._xapiId)
|
||||
|
||||
enable.description = 'enable to create VM on the host'
|
||||
|
||||
@@ -172,12 +153,8 @@ exports.enable = enable
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
disable = $coroutine ({host}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
yield xapi.call 'host.disable', host.ref
|
||||
|
||||
return true
|
||||
disable = ({host}) ->
|
||||
return @getXapi(host).disableHost(host._xapiId)
|
||||
|
||||
disable.description = 'disable to create VM on the hsot'
|
||||
|
||||
@@ -191,49 +168,13 @@ disable.resolve = {
|
||||
|
||||
exports.disable = disable
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
createNetwork = $coroutine ({host, name, description, pif, mtu, vlan}) ->
|
||||
xapi = @getXAPI host
|
||||
|
||||
description = description ? 'Created with Xen Orchestra'
|
||||
|
||||
network_ref = yield xapi.call 'network.create', {
|
||||
name_label: name,
|
||||
name_description: description,
|
||||
MTU: mtu ? '1500'
|
||||
other_config: {}
|
||||
}
|
||||
|
||||
if pif?
|
||||
vlan = vlan ? '0'
|
||||
pif = @getObject pif, 'PIF'
|
||||
yield xapi.call 'pool.create_VLAN_from_PIF', pif.ref, network_ref, vlan
|
||||
|
||||
return true
|
||||
|
||||
createNetwork.params = {
|
||||
host: { type: 'string' }
|
||||
name: { type: 'string' }
|
||||
description: { type: 'string', optional: true }
|
||||
pif: { type: 'string', optional: true }
|
||||
mtu: { type: 'string', optional: true }
|
||||
vlan: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
createNetwork.resolve = {
|
||||
host: ['host', 'host', 'administrate'],
|
||||
}
|
||||
createNetwork.permission = 'admin'
|
||||
exports.createNetwork = createNetwork
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
# Returns an array of missing new patches in the host
|
||||
# Returns an empty array if up-to-date
|
||||
# Throws an error if the host is not running the latest XS version
|
||||
|
||||
listMissingPatches = ({host}) ->
|
||||
return @getXAPI(host).listMissingPoolPatchesOnHost(host.id)
|
||||
return @getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
|
||||
|
||||
listMissingPatches.params = {
|
||||
host: { type: 'string' }
|
||||
@@ -250,7 +191,7 @@ listMissingPatches.description = 'return an array of missing new patches in the
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
installPatch = ({host, patch: patchUuid}) ->
|
||||
return @getXAPI(host).installPoolPatchOnHost(patchUuid, host.id)
|
||||
return @getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId)
|
||||
|
||||
installPatch.description = 'install a patch on an host'
|
||||
|
||||
@@ -268,7 +209,7 @@ exports.installPatch = installPatch
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
installAllPatches = ({host}) ->
|
||||
return @getXAPI(host).installAllPoolPatchesOnHost(host.id)
|
||||
return @getXapi(host).installAllPoolPatchesOnHost(host._xapiId)
|
||||
|
||||
installAllPatches.description = 'install all the missing patches on a host'
|
||||
|
||||
@@ -284,9 +225,25 @@ exports.installAllPatches = installAllPatches
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
stats = $coroutine ({host, granularity}) ->
|
||||
stats = yield @getXapiHostStats(host, granularity)
|
||||
return stats
|
||||
emergencyShutdownHost = ({host}) ->
|
||||
return @getXapi(host).emergencyShutdownHost(host._xapiId)
|
||||
|
||||
emergencyShutdownHost.description = 'suspend all VMs and shutdown host'
|
||||
|
||||
emergencyShutdownHost.params = {
|
||||
host: { type: 'string' }
|
||||
}
|
||||
|
||||
emergencyShutdownHost.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
exports.emergencyShutdownHost = emergencyShutdownHost
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
stats = ({host, granularity}) ->
|
||||
return @getXapiHostStats(host, granularity)
|
||||
|
||||
stats.description = 'returns statistic of the host'
|
||||
|
||||
@@ -303,3 +260,9 @@ stats.resolve = {
|
||||
}
|
||||
|
||||
exports.stats = stats;
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
//
|
||||
// This file has been generated by ./.generate-index.sh
|
||||
//
|
||||
// It MUST be re-generated each time an API namespace (read file) is
|
||||
// added or removed.
|
||||
//
|
||||
export * as acl from './acl'
|
||||
export * as disk from './disk'
|
||||
export * as docker from './docker'
|
||||
export * as group from './group'
|
||||
export * as host from './host'
|
||||
export * as job from './job'
|
||||
export * as log from './log'
|
||||
export * as message from './message'
|
||||
export * as pbd from './pbd'
|
||||
export * as pif from './pif'
|
||||
export * as plugin from './plugin'
|
||||
export * as pool from './pool'
|
||||
export * as remote from './remote'
|
||||
export * as role from './role'
|
||||
export * as schedule from './schedule'
|
||||
export * as scheduler from './scheduler'
|
||||
export * as server from './server'
|
||||
export * as session from './session'
|
||||
export * as sr from './sr'
|
||||
export * as tag from './tag'
|
||||
export * as task from './task'
|
||||
export * as test from './test'
|
||||
export * as token from './token'
|
||||
export * as user from './user'
|
||||
export * as vbd from './vbd'
|
||||
export * as vdi from './vdi'
|
||||
export * as vif from './vif'
|
||||
export * as vm from './vm'
|
||||
export * as xo from './xo'
|
||||
@@ -1,14 +1,14 @@
|
||||
// FIXME so far, no acls for jobs
|
||||
|
||||
export async function getAll () {
|
||||
return await this.getAllJobs()
|
||||
return /* await */ this.getAllJobs()
|
||||
}
|
||||
|
||||
getAll.permission = 'admin'
|
||||
getAll.description = 'Gets all available jobs'
|
||||
|
||||
export async function get (id) {
|
||||
return await this.getJob(id)
|
||||
return /* await */ this.getJob(id)
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
@@ -27,6 +27,7 @@ create.params = {
|
||||
job: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: {type: 'string', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
@@ -47,7 +48,8 @@ create.params = {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -64,6 +66,7 @@ set.params = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {type: 'string'},
|
||||
name: {type: 'string', optional: true},
|
||||
type: {type: 'string'},
|
||||
key: {type: 'string'},
|
||||
method: {type: 'string'},
|
||||
@@ -84,7 +87,8 @@ set.params = {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -101,3 +105,13 @@ delete_.params = {
|
||||
}
|
||||
|
||||
export {delete_ as delete}
|
||||
|
||||
export async function runSequence ({idSequence}) {
|
||||
await this.runJobSequence(idSequence)
|
||||
}
|
||||
|
||||
runSequence.permission = 'admin'
|
||||
runSequence.description = 'Runs jobs sequentially, in the provided order'
|
||||
runSequence.params = {
|
||||
idSequence: {type: 'array', items: {type: 'string'}}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function get ({namespace}) {
|
||||
const logger = this.getLogger(namespace)
|
||||
const logger = await this.getLogger(namespace)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const logs = {}
|
||||
@@ -16,3 +16,23 @@ export async function get ({namespace}) {
|
||||
}
|
||||
|
||||
get.description = 'returns logs list for one namespace'
|
||||
get.params = {
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
get.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function delete_ ({namespace, id}) {
|
||||
const logger = await this.getLogger(namespace)
|
||||
logger.del(id)
|
||||
}
|
||||
|
||||
delete_.description = 'deletes one or several logs from a namespace'
|
||||
delete_.params = {
|
||||
id: { type: [ 'array', 'string' ] },
|
||||
namespace: { type: 'string' }
|
||||
}
|
||||
delete_.permission = 'admin'
|
||||
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
async function delete_ ({message}) {
|
||||
await this.getXAPI(message).call('message.destroy', message.ref)
|
||||
async function delete_ ({ message }) {
|
||||
await this.getXapi(message).call('message.destroy', message._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
|
||||
38
src/api/network.js
Normal file
38
src/api/network.js
Normal file
@@ -0,0 +1,38 @@
|
||||
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
|
||||
return this.getXapi(pool).createNetwork({
|
||||
name,
|
||||
description,
|
||||
pifId: pif && this.getObject(pif, 'PIF')._xapiId,
|
||||
mtu: +mtu,
|
||||
vlan: +vlan
|
||||
})
|
||||
}
|
||||
|
||||
create.params = {
|
||||
pool: { type: 'string' },
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string', optional: true },
|
||||
pif: { type: 'string', optional: true },
|
||||
mtu: { type: ['integer', 'string'], optional: true },
|
||||
vlan: { type: ['integer', 'string'], optional: true }
|
||||
}
|
||||
|
||||
create.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
create.permission = 'admin'
|
||||
|
||||
// =================================================================
|
||||
|
||||
export async function delete_ ({ network }) {
|
||||
return this.getXapi(network).deleteNetwork(network._xapiId)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
delete_.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
network: ['id', 'network', 'administrate']
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
async function delete_ ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.destroy', PBD.ref)
|
||||
await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -20,9 +20,8 @@ delete_.resolve = {
|
||||
// ===================================================================
|
||||
// Disconnect
|
||||
|
||||
export async function disconnect ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.unplug', PBD.ref)
|
||||
export async function disconnect ({ pbd }) {
|
||||
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -30,7 +29,7 @@ disconnect.params = {
|
||||
}
|
||||
|
||||
disconnect.resolve = {
|
||||
PBD: ['id', 'PBD', 'administrate']
|
||||
pbd: ['id', 'PBD', 'administrate']
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -38,7 +37,7 @@ disconnect.resolve = {
|
||||
|
||||
export async function connect ({PBD}) {
|
||||
// TODO: check if PBD is attached before
|
||||
await this.getXAPI(PBD).call('PBD.plug', PBD.ref)
|
||||
await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
// TODO: too low level, move into host.
|
||||
|
||||
// ===================================================================
|
||||
// Delete
|
||||
|
||||
async function delete_ ({PIF}) {
|
||||
async function delete_ ({pif}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.destroy', PIF.ref)
|
||||
await this.getXapi(pif).call('PIF.destroy', pif._xapiRef)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -12,15 +14,15 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
// Disconnect
|
||||
|
||||
export async function disconnect ({PIF}) {
|
||||
export async function disconnect ({pif}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.unplug', PIF.ref)
|
||||
await this.getXapi(pif).call('PIF.unplug', pif._xapiRef)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -28,14 +30,14 @@ disconnect.params = {
|
||||
}
|
||||
|
||||
disconnect.resolve = {
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
// ===================================================================
|
||||
// Connect
|
||||
|
||||
export async function connect ({PIF}) {
|
||||
export async function connect ({pif}) {
|
||||
// TODO: check if PIF is attached before
|
||||
await this.getXAPI(PIF).call('PIF.plug', PIF.ref)
|
||||
await this.getXapi(pif).call('PIF.plug', pif._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
@@ -43,5 +45,24 @@ connect.params = {
|
||||
}
|
||||
|
||||
connect.resolve = {
|
||||
PIF: ['id', 'PIF', 'administrate']
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
// ===================================================================
|
||||
// Reconfigure IP
|
||||
|
||||
export async function reconfigureIp ({ pif, mode = 'DHCP', ip, netmask, gateway, dns }) {
|
||||
await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns)
|
||||
}
|
||||
|
||||
reconfigureIp.params = {
|
||||
id: { type: 'string', optional: true },
|
||||
mode: { type: 'string', optional: true },
|
||||
ip: { type: 'string', optional: true },
|
||||
netmask: { type: 'string', optional: true },
|
||||
gateway: { type: 'string', optional: true },
|
||||
dns: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
reconfigureIp.resolve = {
|
||||
pif: ['id', 'PIF', 'administrate']
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function get () {
|
||||
return await this.getPlugins()
|
||||
return /* await */ this.getPlugins()
|
||||
}
|
||||
|
||||
get.description = 'returns a list of all installed plugins'
|
||||
@@ -86,3 +86,19 @@ unload.params = {
|
||||
}
|
||||
|
||||
unload.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function purgeConfiguration ({ id }) {
|
||||
await this.purgePluginConfiguration(id)
|
||||
}
|
||||
|
||||
purgeConfiguration.description = 'removes a plugin configuration'
|
||||
|
||||
purgeConfiguration.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
purgeConfiguration.permission = 'admin'
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
import {JsonRpcError} from '../api-errors'
|
||||
import {extractProperty} from '../utils'
|
||||
import {GenericError} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function set (params) {
|
||||
const pool = extractProperty(params, 'pool')
|
||||
export async function set ({
|
||||
pool,
|
||||
|
||||
await this.getXAPI(pool).setPoolProperties(params)
|
||||
// TODO: use camel case.
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel
|
||||
}) {
|
||||
await this.getXapi(pool).setPoolProperties({
|
||||
nameDescription,
|
||||
nameLabel
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
@@ -29,8 +35,27 @@ set.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function setDefaultSr ({pool, sr}) {
|
||||
await this.getXapi(pool).setDefaultSr(sr._xapiId)
|
||||
}
|
||||
|
||||
setDefaultSr.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
},
|
||||
sr: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
setDefaultSr.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate'],
|
||||
sr: ['sr', 'SR']
|
||||
}
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function installPatch ({pool, patch: patchUuid}) {
|
||||
await this.getXAPI(pool).installPoolPatchOnAllHosts(patchUuid)
|
||||
await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid)
|
||||
}
|
||||
|
||||
installPatch.params = {
|
||||
@@ -49,14 +74,14 @@ installPatch.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
async function handlePatchUpload (req, res, {pool}) {
|
||||
const {headers: {['content-length']: contentLength}} = req
|
||||
const contentLength = req.headers['content-length']
|
||||
if (!contentLength) {
|
||||
res.writeHead(411)
|
||||
res.end('Content length is mandatory')
|
||||
return
|
||||
}
|
||||
|
||||
await this.getXAPI(pool).uploadPoolPatch(req, contentLength)
|
||||
await this.getXapi(pool).uploadPoolPatch(req, contentLength)
|
||||
}
|
||||
|
||||
export async function uploadPatch ({pool}) {
|
||||
@@ -82,10 +107,10 @@ export {uploadPatch as patch}
|
||||
|
||||
export async function mergeInto ({ source, target, force }) {
|
||||
try {
|
||||
await this.mergeXenPools(source.id, target.id, force)
|
||||
await this.mergeXenPools(source._xapiId, target._xapiId, force)
|
||||
} catch (e) {
|
||||
// FIXME: should we expose plain XAPI error messages?
|
||||
throw new JsonRpcError(e.message)
|
||||
throw new GenericError(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,3 +124,22 @@ mergeInto.resolve = {
|
||||
source: ['source', 'pool', 'administrate'],
|
||||
target: ['target', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getLicenseState ({pool}) {
|
||||
return this.getXapi(pool).call(
|
||||
'pool.get_license_state',
|
||||
pool._xapiId.$ref,
|
||||
)
|
||||
}
|
||||
|
||||
getLicenseState.params = {
|
||||
pool: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
getLicenseState.resolve = {
|
||||
pool: ['pool', 'pool', 'administrate']
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
export async function getAll () {
|
||||
return await this.getAllRemotes()
|
||||
return this.getAllRemotes()
|
||||
}
|
||||
|
||||
getAll.permission = 'admin'
|
||||
getAll.description = 'Gets all existing fs remote points'
|
||||
|
||||
export async function get (id) {
|
||||
return await this.getRemote(id)
|
||||
export async function get ({id}) {
|
||||
return this.getRemote(id)
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
@@ -15,8 +15,18 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function list (id) {
|
||||
return await this.listRemote(id)
|
||||
export async function test ({id}) {
|
||||
return this.testRemote(id)
|
||||
}
|
||||
|
||||
test.permission = 'admin'
|
||||
test.description = 'Performs a read/write matching test on a remote point'
|
||||
test.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function list ({id}) {
|
||||
return this.listRemoteBackups(id)
|
||||
}
|
||||
|
||||
list.permission = 'admin'
|
||||
@@ -26,7 +36,7 @@ list.params = {
|
||||
}
|
||||
|
||||
export async function create ({name, url}) {
|
||||
return await this.createRemote({name, url})
|
||||
return this.createRemote({name, url})
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
@@ -49,22 +59,6 @@ set.params = {
|
||||
enabled: {type: 'boolean', optional: true}
|
||||
}
|
||||
|
||||
export async function importVm ({id, file, host}) {
|
||||
await this.importVmFromRemote(id, file, host)
|
||||
}
|
||||
|
||||
importVm.permission = 'admin'
|
||||
importVm.description = 'Imports a VM into host, from a file found in the chosen remote'
|
||||
importVm.params = {
|
||||
id: {type: 'string'},
|
||||
file: {type: 'string'},
|
||||
host: {type: 'string'}
|
||||
}
|
||||
|
||||
importVm.resolve = {
|
||||
host: ['host', 'host', 'administrate']
|
||||
}
|
||||
|
||||
async function delete_ ({id}) {
|
||||
await this.removeRemote(id)
|
||||
}
|
||||
|
||||
229
src/api/resource-set.js
Normal file
229
src/api/resource-set.js
Normal file
@@ -0,0 +1,229 @@
|
||||
import {
|
||||
Unauthorized
|
||||
} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function create ({ name, subjects, objects, limits }) {
|
||||
return this.createResourceSet(name, subjects, objects, limits)
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
|
||||
create.params = {
|
||||
name: {
|
||||
type: 'string'
|
||||
},
|
||||
subjects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
objects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
limits: {
|
||||
type: 'object',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function delete_ ({ id }) {
|
||||
return this.deleteResourceSet(id)
|
||||
}
|
||||
export { delete_ as delete }
|
||||
|
||||
delete_.permission = 'admin'
|
||||
|
||||
delete_.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function set ({ id, name, subjects, objects, limits }) {
|
||||
return this.updateResourceSet(id, {
|
||||
limits,
|
||||
name,
|
||||
objects,
|
||||
subjects
|
||||
})
|
||||
}
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
set.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
subjects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
objects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
limits: {
|
||||
type: 'object',
|
||||
optional: true
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function get ({ id }) {
|
||||
return this.getResourceSet(id)
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
|
||||
get.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function getAll () {
|
||||
const { user } = this
|
||||
if (!user) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
|
||||
return this.getAllResourceSets(user.id)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function addObject ({ id, object }) {
|
||||
return this.addObjectToResourceSet(object, id)
|
||||
}
|
||||
|
||||
addObject.permission = 'admin'
|
||||
|
||||
addObject.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
object: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function removeObject ({ id, object }) {
|
||||
return this.removeObjectFromResourceSet(object, id)
|
||||
}
|
||||
|
||||
removeObject.permission = 'admin'
|
||||
|
||||
removeObject.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
object: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function addSubject ({ id, subject }) {
|
||||
return this.addSubjectToResourceSet(subject, id)
|
||||
}
|
||||
|
||||
addSubject.permission = 'admin'
|
||||
|
||||
addSubject.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
subject: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function removeSubject ({ id, subject }) {
|
||||
return this.removeSubjectFromResourceSet(subject, id)
|
||||
}
|
||||
|
||||
removeSubject.permission = 'admin'
|
||||
|
||||
removeSubject.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
subject: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function addLimit ({ id, limitId, quantity }) {
|
||||
return this.addLimitToResourceSet(limitId, quantity, id)
|
||||
}
|
||||
|
||||
addLimit.permission = 'admin'
|
||||
|
||||
addLimit.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
limitId: {
|
||||
type: 'string'
|
||||
},
|
||||
quantity: {
|
||||
type: 'integer'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function removeLimit ({ id, limitId }) {
|
||||
return this.removeLimitFromResourceSet(limitId, id)
|
||||
}
|
||||
|
||||
removeLimit.permission = 'admin'
|
||||
|
||||
removeLimit.params = {
|
||||
id: {
|
||||
type: 'string'
|
||||
},
|
||||
limitId: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function recomputeAllLimits () {
|
||||
return this.recomputeResourceSetsLimits()
|
||||
}
|
||||
|
||||
recomputeAllLimits.permission = 'admin'
|
||||
@@ -1,3 +1,3 @@
|
||||
export async function getAll () {
|
||||
return await this.getRoles()
|
||||
return /* await */ this.getRoles()
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
// FIXME so far, no acls for schedules
|
||||
|
||||
export async function getAll () {
|
||||
return await this.getAllSchedules()
|
||||
return /* await */ this.getAllSchedules()
|
||||
}
|
||||
|
||||
getAll.permission = 'admin'
|
||||
getAll.description = 'Gets all existing schedules'
|
||||
|
||||
export async function get (id) {
|
||||
return await this.getSchedule(id)
|
||||
return /* await */ this.getSchedule(id)
|
||||
}
|
||||
|
||||
get.permission = 'admin'
|
||||
@@ -17,8 +17,8 @@ get.params = {
|
||||
id: {type: 'string'}
|
||||
}
|
||||
|
||||
export async function create ({jobId, cron, enabled}) {
|
||||
return await this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled})
|
||||
export async function create ({ jobId, cron, enabled, name, timezone }) {
|
||||
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
|
||||
}
|
||||
|
||||
create.permission = 'admin'
|
||||
@@ -26,11 +26,12 @@ create.description = 'Creates a new schedule'
|
||||
create.params = {
|
||||
jobId: {type: 'string'},
|
||||
cron: {type: 'string'},
|
||||
enabled: {type: 'boolean', optional: true}
|
||||
enabled: {type: 'boolean', optional: true},
|
||||
name: {type: 'string', optional: true}
|
||||
}
|
||||
|
||||
export async function set ({id, jobId, cron, enabled}) {
|
||||
await this.updateSchedule(id, {job: jobId, cron, enabled})
|
||||
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
|
||||
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
|
||||
}
|
||||
|
||||
set.permission = 'admin'
|
||||
@@ -39,7 +40,8 @@ set.params = {
|
||||
id: {type: 'string'},
|
||||
jobId: {type: 'string', optional: true},
|
||||
cron: {type: 'string', optional: true},
|
||||
enabled: {type: 'boolean', optional: true}
|
||||
enabled: {type: 'boolean', optional: true},
|
||||
name: {type: 'string', optional: true}
|
||||
}
|
||||
|
||||
async function delete_ ({id}) {
|
||||
|
||||
@@ -23,7 +23,7 @@ disable.params = {
|
||||
}
|
||||
|
||||
export function getScheduleTable () {
|
||||
return this.scheduler.scheduleTable
|
||||
return this.scheduleTable
|
||||
}
|
||||
|
||||
disable.permission = 'admin'
|
||||
|
||||
@@ -1,14 +1,20 @@
|
||||
import {
|
||||
noop,
|
||||
pCatch
|
||||
} from '../utils'
|
||||
|
||||
export async function add ({
|
||||
host,
|
||||
username,
|
||||
password,
|
||||
readOnly,
|
||||
autoConnect = true
|
||||
}) {
|
||||
const server = await this.registerXenServer({host, username, password})
|
||||
const server = await this.registerXenServer({host, username, password, readOnly})
|
||||
|
||||
if (autoConnect) {
|
||||
// Connect asynchronously, ignore any error.
|
||||
this.connectXenServer(server.id).catch(() => {})
|
||||
// Connect asynchronously, ignore any errors.
|
||||
this.connectXenServer(server.id)::pCatch(noop)
|
||||
}
|
||||
|
||||
return server.id
|
||||
@@ -54,14 +60,8 @@ remove.params = {
|
||||
|
||||
// TODO: remove this function when users are integrated to the main
|
||||
// collection.
|
||||
export async function getAll () {
|
||||
const servers = await this._servers.get()
|
||||
|
||||
for (let i = 0, n = servers.length; i < n; ++i) {
|
||||
servers[i] = this.getServerPublicProperties(servers[i])
|
||||
}
|
||||
|
||||
return servers
|
||||
export function getAll () {
|
||||
return this.getAllXenServers()
|
||||
}
|
||||
|
||||
getAll.description = 'returns all the registered Xen server'
|
||||
@@ -70,11 +70,11 @@ getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, host, username, password}) {
|
||||
await this.updateXenServer(id, {host, username, password})
|
||||
export async function set ({id, host, username, password, readOnly}) {
|
||||
await this.updateXenServer(id, {host, username, password, readOnly})
|
||||
}
|
||||
|
||||
set.description = 'changes the propeorties of a Xen server'
|
||||
set.description = 'changes the properties of a Xen server'
|
||||
|
||||
set.permission = 'admin'
|
||||
|
||||
@@ -99,6 +99,7 @@ set.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connect ({id}) {
|
||||
this.updateXenServer(id, {enabled: true})::pCatch(noop)
|
||||
await this.connectXenServer(id)
|
||||
}
|
||||
|
||||
@@ -115,6 +116,7 @@ connect.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnect ({id}) {
|
||||
this.updateXenServer(id, {enabled: false})::pCatch(noop)
|
||||
await this.disconnectXenServer(id)
|
||||
}
|
||||
|
||||
|
||||
125
src/api/sr.js
125
src/api/sr.js
@@ -1,3 +1,4 @@
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import {
|
||||
ensureArray,
|
||||
forEach,
|
||||
@@ -6,11 +7,17 @@ import {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function set (params) {
|
||||
const {sr} = params
|
||||
delete params.sr
|
||||
export async function set ({
|
||||
sr,
|
||||
|
||||
await this.getXAPI(sr).setSrProperties(sr.id, params)
|
||||
// TODO: use camel case.
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel
|
||||
}) {
|
||||
await this.getXapi(sr).setSrProperties(sr._xapiId, {
|
||||
nameDescription,
|
||||
nameLabel
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
@@ -28,7 +35,7 @@ set.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function scan ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.scan', SR.ref)
|
||||
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
|
||||
}
|
||||
|
||||
scan.params = {
|
||||
@@ -42,8 +49,8 @@ scan.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: find a way to call this "delete" and not destroy
|
||||
export async function destroy ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.destroy', SR.ref)
|
||||
export async function destroy ({ sr }) {
|
||||
await this.getXapi(sr).destroySr(sr._xapiId)
|
||||
}
|
||||
|
||||
destroy.params = {
|
||||
@@ -51,13 +58,13 @@ destroy.params = {
|
||||
}
|
||||
|
||||
destroy.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
sr: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function forget ({SR}) {
|
||||
await this.getXAPI(SR).call('SR.forget', SR.ref)
|
||||
await this.getXapi(SR).forgetSr(SR._xapiId)
|
||||
}
|
||||
|
||||
forget.params = {
|
||||
@@ -70,24 +77,59 @@ forget.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function connectAllPbds ({SR}) {
|
||||
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
|
||||
}
|
||||
|
||||
connectAllPbds.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
connectAllPbds.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function disconnectAllPbds ({SR}) {
|
||||
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
|
||||
}
|
||||
|
||||
disconnectAllPbds.params = {
|
||||
id: { type: 'string' }
|
||||
}
|
||||
|
||||
disconnectAllPbds.resolve = {
|
||||
SR: ['id', 'SR', 'administrate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function createIso ({
|
||||
host,
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
path
|
||||
path,
|
||||
type,
|
||||
user,
|
||||
password
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
// FIXME: won't work for IPv6
|
||||
// Detect if NFS or local path for ISO files
|
||||
const deviceConfig = {location: path}
|
||||
if (path.indexOf(':') === -1) { // not NFS share
|
||||
// TODO: legacy will be removed in XAPI soon by FileSR
|
||||
const deviceConfig = {}
|
||||
if (type === 'local') {
|
||||
deviceConfig.legacy_mode = 'true'
|
||||
} else if (type === 'smb') {
|
||||
path = path.replace(/\\/g, '/')
|
||||
deviceConfig.username = user
|
||||
deviceConfig.cifspassword = password
|
||||
}
|
||||
|
||||
deviceConfig.location = path
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0', // SR size 0 because ISO
|
||||
nameLabel,
|
||||
@@ -106,7 +148,10 @@ createIso.params = {
|
||||
host: { type: 'string' },
|
||||
nameLabel: { type: 'string' },
|
||||
nameDescription: { type: 'string' },
|
||||
path: { type: 'string' }
|
||||
path: { type: 'string' },
|
||||
type: { type: 'string' },
|
||||
user: { type: 'string', optional: true },
|
||||
password: { type: 'string', optional: true }
|
||||
}
|
||||
|
||||
createIso.resolve = {
|
||||
@@ -126,7 +171,7 @@ export async function createNfs ({
|
||||
serverPath,
|
||||
nfsVersion
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server,
|
||||
@@ -140,7 +185,7 @@ export async function createNfs ({
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -179,7 +224,7 @@ export async function createLvm ({
|
||||
nameDescription,
|
||||
device
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
device
|
||||
@@ -187,7 +232,7 @@ export async function createLvm ({
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -221,7 +266,7 @@ export async function probeNfs ({
|
||||
host,
|
||||
server
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server
|
||||
@@ -232,7 +277,7 @@ export async function probeNfs ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'nfs',
|
||||
{}
|
||||
@@ -284,7 +329,7 @@ export async function createIscsi ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target,
|
||||
@@ -300,12 +345,12 @@ export async function createIscsi ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
const srRef = await xapi.call(
|
||||
'SR.create',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'0',
|
||||
nameLabel,
|
||||
@@ -347,7 +392,7 @@ export async function probeIscsiIqns ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp
|
||||
@@ -361,7 +406,7 @@ export async function probeIscsiIqns ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
let xml
|
||||
@@ -369,7 +414,7 @@ export async function probeIscsiIqns ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
@@ -424,7 +469,7 @@ export async function probeIscsiLuns ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp,
|
||||
@@ -439,7 +484,7 @@ export async function probeIscsiLuns ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
let xml
|
||||
@@ -447,7 +492,7 @@ export async function probeIscsiLuns ({
|
||||
try {
|
||||
await xapi.call(
|
||||
'SR.probe',
|
||||
host.ref,
|
||||
host._xapiRef,
|
||||
deviceConfig,
|
||||
'lvmoiscsi',
|
||||
{}
|
||||
@@ -502,7 +547,7 @@ export async function probeIscsiExists ({
|
||||
chapUser,
|
||||
chapPassword
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
target: targetIp,
|
||||
@@ -518,10 +563,10 @@ export async function probeIscsiExists ({
|
||||
|
||||
// if we give another port than default iSCSI
|
||||
if (port) {
|
||||
deviceConfig.port = port
|
||||
deviceConfig.port = asInteger(port)
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'lvmoiscsi', {}))
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
|
||||
|
||||
const srs = []
|
||||
forEach(ensureArray(xml['SRlist'].SR), sr => {
|
||||
@@ -555,14 +600,14 @@ export async function probeNfsExists ({
|
||||
server,
|
||||
serverPath
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
const deviceConfig = {
|
||||
server,
|
||||
serverpath: serverPath
|
||||
}
|
||||
|
||||
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'nfs', {}))
|
||||
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}))
|
||||
|
||||
const srs = []
|
||||
|
||||
@@ -594,7 +639,7 @@ export async function reattach ({
|
||||
nameDescription,
|
||||
type
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
if (type === 'iscsi') {
|
||||
type = 'lvmoiscsi' // the internal XAPI name
|
||||
@@ -637,7 +682,7 @@ export async function reattachIso ({
|
||||
nameDescription,
|
||||
type
|
||||
}) {
|
||||
const xapi = this.getXAPI(host)
|
||||
const xapi = this.getXapi(host)
|
||||
|
||||
if (type === 'iscsi') {
|
||||
type = 'lvmoiscsi' // the internal XAPI name
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function add ({tag, object}) {
|
||||
await this.getXAPI(object).addTag(object.id, tag)
|
||||
await this.getXapi(object).addTag(object._xapiId, tag)
|
||||
}
|
||||
|
||||
add.description = 'add a new tag to an object'
|
||||
@@ -16,7 +16,7 @@ add.params = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function remove ({tag, object}) {
|
||||
await this.getXAPI(object).removeTag(object.id, tag)
|
||||
await this.getXapi(object).removeTag(object._xapiId, tag)
|
||||
}
|
||||
|
||||
remove.description = 'remove an existing tag from an object'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export async function cancel ({task}) {
|
||||
await this.getXAPI(task).call('task.cancel', task.ref)
|
||||
await this.getXapi(task).call('task.cancel', task._xapiRef)
|
||||
}
|
||||
|
||||
cancel.params = {
|
||||
@@ -13,7 +13,7 @@ cancel.resolve = {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function destroy ({task}) {
|
||||
await this.getXAPI(task).call('task.destroy', task.ref)
|
||||
await this.getXapi(task).call('task.destroy', task._xapiRef)
|
||||
}
|
||||
|
||||
destroy.params = {
|
||||
|
||||
@@ -22,7 +22,7 @@ create.params = {
|
||||
// Deletes an existing user.
|
||||
async function delete_ ({id}) {
|
||||
if (id === this.session.get('user_id')) {
|
||||
throw new InvalidParameters('an user cannot delete itself')
|
||||
throw new InvalidParameters('a user cannot delete itself')
|
||||
}
|
||||
|
||||
await this.deleteUser(id)
|
||||
@@ -45,7 +45,7 @@ delete_.params = {
|
||||
// collection.
|
||||
export async function getAll () {
|
||||
// Retrieves the users.
|
||||
const users = await this._users.get()
|
||||
const users = await this.getAllUsers()
|
||||
|
||||
// Filters out private properties.
|
||||
return mapToArray(users, this.getUserPublicProperties)
|
||||
@@ -57,8 +57,11 @@ getAll.permission = 'admin'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function set ({id, email, password, permission}) {
|
||||
await this.updateUser(id, {email, password, permission})
|
||||
export async function set ({id, email, password, permission, preferences}) {
|
||||
if (permission && id === this.session.get('user_id')) {
|
||||
throw new InvalidParameters('a user cannot change its own permission')
|
||||
}
|
||||
await this.updateUser(id, {email, password, permission, preferences})
|
||||
}
|
||||
|
||||
set.description = 'changes the properties of an existing user'
|
||||
@@ -69,7 +72,8 @@ set.params = {
|
||||
id: { type: 'string' },
|
||||
email: { type: 'string', optional: true },
|
||||
password: { type: 'string', optional: true },
|
||||
permission: { type: 'string', optional: true }
|
||||
permission: { type: 'string', optional: true },
|
||||
preferences: { type: 'object', optional: true }
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
#=====================================================================
|
||||
|
||||
delete_ = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
xapi = @getXapi vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.destroy', vbd.ref
|
||||
yield xapi.call 'VBD.destroy', vbd._xapiRef
|
||||
|
||||
return true
|
||||
|
||||
@@ -25,12 +25,9 @@ exports.delete = delete_
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
disconnect = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.unplug_force', vbd.ref
|
||||
|
||||
return true
|
||||
xapi = @getXapi vbd
|
||||
yield xapi.disconnectVbd(vbd._xapiRef)
|
||||
return
|
||||
|
||||
disconnect.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -45,12 +42,9 @@ exports.disconnect = disconnect
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
connect = $coroutine ({vbd}) ->
|
||||
xapi = @getXAPI vbd
|
||||
|
||||
# TODO: check if VBD is attached before
|
||||
yield xapi.call 'VBD.plug', vbd.ref
|
||||
|
||||
return true
|
||||
xapi = @getXapi vbd
|
||||
yield xapi.connectVbd(vbd._xapiRef)
|
||||
return
|
||||
|
||||
connect.params = {
|
||||
id: { type: 'string' }
|
||||
@@ -66,9 +60,9 @@ exports.connect = connect
|
||||
|
||||
set = $coroutine (params) ->
|
||||
{vbd} = params
|
||||
xapi = @getXAPI vbd
|
||||
xapi = @getXapi vbd
|
||||
|
||||
{ref} = vbd
|
||||
{ _xapiRef: ref } = vbd
|
||||
|
||||
# VBD position
|
||||
if 'position' of params
|
||||
@@ -87,3 +81,29 @@ set.resolve = {
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
setBootable = $coroutine ({vbd, bootable}) ->
|
||||
xapi = @getXapi vbd
|
||||
{ _xapiRef: ref } = vbd
|
||||
|
||||
yield xapi.call 'VBD.set_bootable', ref, bootable
|
||||
return
|
||||
|
||||
setBootable.params = {
|
||||
vbd: { type: 'string' }
|
||||
bootable: { type: 'boolean' }
|
||||
}
|
||||
|
||||
setBootable.resolve = {
|
||||
vbd: ['vbd', 'VBD', 'administrate'],
|
||||
}
|
||||
|
||||
exports.setBootable = setBootable
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
# FIXME: rename to disk.*
|
||||
|
||||
$isArray = require 'lodash.isarray'
|
||||
{coroutine: $coroutine} = require 'bluebird'
|
||||
|
||||
{format} = require 'json-rpc-peer'
|
||||
{InvalidParameters} = require '../api-errors'
|
||||
{parseSize} = require '../utils'
|
||||
{isArray: $isArray, parseSize} = require '../utils'
|
||||
{JsonRpcError} = require '../api-errors'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
delete_ = $coroutine ({vdi}) ->
|
||||
yield @getXAPI(vdi).deleteVdi(vdi.id)
|
||||
yield @getXapi(vdi).deleteVdi(vdi._xapiId)
|
||||
|
||||
return
|
||||
|
||||
@@ -18,7 +19,7 @@ delete_.params = {
|
||||
}
|
||||
|
||||
delete_.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
}
|
||||
|
||||
exports.delete = delete_
|
||||
@@ -28,9 +29,9 @@ exports.delete = delete_
|
||||
# FIXME: human readable strings should be handled.
|
||||
set = $coroutine (params) ->
|
||||
{vdi} = params
|
||||
xapi = @getXAPI vdi
|
||||
xapi = @getXapi vdi
|
||||
|
||||
{ref} = vdi
|
||||
{_xapiRef: ref} = vdi
|
||||
|
||||
# Size.
|
||||
if 'size' of params
|
||||
@@ -40,8 +41,7 @@ set = $coroutine (params) ->
|
||||
throw new InvalidParameters(
|
||||
"cannot set new size (#{size}) below the current size (#{vdi.size})"
|
||||
)
|
||||
|
||||
yield xapi.call 'VDI.resize_online', ref, "#{size}"
|
||||
yield xapi.resizeVdi(ref, size)
|
||||
|
||||
# Other fields.
|
||||
for param, fields of {
|
||||
@@ -68,7 +68,7 @@ set.params = {
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
}
|
||||
|
||||
exports.set = set
|
||||
@@ -76,10 +76,9 @@ exports.set = set
|
||||
#---------------------------------------------------------------------
|
||||
|
||||
migrate = $coroutine ({vdi, sr}) ->
|
||||
xapi = @getXAPI vdi
|
||||
xapi = @getXapi vdi
|
||||
|
||||
# TODO: check if VDI is attached before
|
||||
yield xapi.call 'VDI.pool_migrate', vdi.ref, sr.ref, {}
|
||||
yield xapi.moveVdi(vdi._xapiRef, sr._xapiRef)
|
||||
|
||||
return true
|
||||
|
||||
@@ -89,8 +88,14 @@ migrate.params = {
|
||||
}
|
||||
|
||||
migrate.resolve = {
|
||||
vdi: ['id', 'VDI', 'administrate'],
|
||||
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
|
||||
sr: ['sr_id', 'SR', 'administrate'],
|
||||
}
|
||||
|
||||
exports.migrate = migrate
|
||||
|
||||
#=====================================================================
|
||||
|
||||
Object.defineProperty(exports, '__esModule', {
|
||||
value: true
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// TODO: move into vm and rename to removeInterface
|
||||
async function delete_ ({vif}) {
|
||||
await this.getXAPI(vif).deleteVif(vif.id)
|
||||
await this.getXapi(vif).deleteVif(vif._xapiId)
|
||||
}
|
||||
export {delete_ as delete}
|
||||
|
||||
@@ -16,7 +16,7 @@ delete_.resolve = {
|
||||
// TODO: move into vm and rename to disconnectInterface
|
||||
export async function disconnect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXAPI(vif).call('VIF.unplug_force', vif.ref)
|
||||
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
|
||||
}
|
||||
|
||||
disconnect.params = {
|
||||
@@ -31,7 +31,7 @@ disconnect.resolve = {
|
||||
// TODO: move into vm and rename to connectInterface
|
||||
export async function connect ({vif}) {
|
||||
// TODO: check if VIF is attached before
|
||||
await this.getXAPI(vif).call('VIF.plug', vif.ref)
|
||||
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
|
||||
}
|
||||
|
||||
connect.params = {
|
||||
@@ -41,3 +41,31 @@ connect.params = {
|
||||
connect.resolve = {
|
||||
vif: ['id', 'VIF', 'operate']
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const set = ({ vif, allowedIpv4Addresses, allowedIpv6Addresses }) => (
|
||||
this.getXapi(vif._xapiId).editVif({
|
||||
ipv4Allowed: allowedIpv4Addresses,
|
||||
ipv6Allowed: allowedIpv6Addresses
|
||||
})
|
||||
)
|
||||
|
||||
set.params = {
|
||||
allowedIpv4Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
},
|
||||
allowedIpv6Addresses: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set.resolve = {
|
||||
vif: ['id', 'VIF', 'operate']
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,11 @@
|
||||
import isArray from 'lodash.isarray'
|
||||
import isObject from 'lodash.isobject'
|
||||
import Model from './model'
|
||||
import {BaseError} from 'make-error'
|
||||
import {EventEmitter} from 'events'
|
||||
import {mapInPlace} from './utils'
|
||||
import {
|
||||
isArray,
|
||||
isObject,
|
||||
map
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -31,10 +33,6 @@ export default class Collection extends EventEmitter {
|
||||
})
|
||||
}
|
||||
|
||||
constructor () {
|
||||
super()
|
||||
}
|
||||
|
||||
async add (models, opts) {
|
||||
const array = isArray(models)
|
||||
if (!array) {
|
||||
@@ -42,7 +40,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
const {Model} = this
|
||||
mapInPlace(models, model => {
|
||||
map(models, model => {
|
||||
if (!(model instanceof Model)) {
|
||||
model = new Model(model)
|
||||
}
|
||||
@@ -54,7 +52,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
return model.properties
|
||||
})
|
||||
}, models)
|
||||
|
||||
models = await this._add(models, opts)
|
||||
this.emit('add', models)
|
||||
@@ -82,7 +80,7 @@ export default class Collection extends EventEmitter {
|
||||
: {}
|
||||
}
|
||||
|
||||
return await this._get(properties)
|
||||
return /* await */ this._get(properties)
|
||||
}
|
||||
|
||||
async remove (ids) {
|
||||
@@ -103,7 +101,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
const {Model} = this
|
||||
mapInPlace(models, model => {
|
||||
map(models, model => {
|
||||
if (!(model instanceof Model)) {
|
||||
// TODO: Problems, we may be mixing in some default
|
||||
// properties which will overwrite existing ones.
|
||||
@@ -125,7 +123,7 @@ export default class Collection extends EventEmitter {
|
||||
}
|
||||
|
||||
return model.properties
|
||||
})
|
||||
}, models)
|
||||
|
||||
models = await this._update(models)
|
||||
this.emit('update', models)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import Collection, {ModelAlreadyExists} from '../collection'
|
||||
import difference from 'lodash.difference'
|
||||
import filter from 'lodash.filter'
|
||||
import getKey from 'lodash.keys'
|
||||
import {createClient as createRedisClient, RedisClient, Multi} from 'redis'
|
||||
import difference from 'lodash/difference'
|
||||
import filter from 'lodash/filter'
|
||||
import getKey from 'lodash/keys'
|
||||
import {createClient as createRedisClient} from 'redis'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
@@ -13,11 +13,6 @@ import {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
promisifyAll(RedisClient.prototype)
|
||||
promisifyAll(Multi.prototype)
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// ///////////////////////////////////////////////////////////////////
|
||||
// Data model:
|
||||
// - prefix +'_id': value of the last generated identifier;
|
||||
@@ -46,7 +41,7 @@ export default class Redis extends Collection {
|
||||
|
||||
this.indexes = indexes
|
||||
this.prefix = prefix
|
||||
this.redis = connection || createRedisClient(uri)
|
||||
this.redis = promisifyAll.call(connection || createRedisClient(uri))
|
||||
}
|
||||
|
||||
_extract (ids) {
|
||||
@@ -55,7 +50,7 @@ export default class Redis extends Collection {
|
||||
|
||||
const models = []
|
||||
return Promise.all(mapToArray(ids, id => {
|
||||
return redis.hgetallAsync(prefix + id).then(model => {
|
||||
return redis.hgetall(prefix + id).then(model => {
|
||||
// If empty, consider it a no match.
|
||||
if (isEmpty(model)) {
|
||||
return
|
||||
@@ -78,10 +73,10 @@ export default class Redis extends Collection {
|
||||
return Promise.all(mapToArray(models, async model => {
|
||||
// Generate a new identifier if necessary.
|
||||
if (model.id === undefined) {
|
||||
model.id = idPrefix + String(await redis.incrAsync(prefix + '_id'))
|
||||
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
|
||||
}
|
||||
|
||||
const success = await redis.saddAsync(prefix + '_ids', model.id)
|
||||
const success = await redis.sadd(prefix + '_ids', model.id)
|
||||
|
||||
// The entry already exists an we are not in replace mode.
|
||||
if (!success && !replace) {
|
||||
@@ -100,8 +95,10 @@ export default class Redis extends Collection {
|
||||
params.push(name, value)
|
||||
})
|
||||
|
||||
const key = `${prefix}:${model.id}`
|
||||
const promises = [
|
||||
redis.hmsetAsync(prefix + ':' + model.id, ...params)
|
||||
redis.del(key),
|
||||
redis.hmset(key, ...params)
|
||||
]
|
||||
|
||||
// Update indexes.
|
||||
@@ -112,7 +109,7 @@ export default class Redis extends Collection {
|
||||
}
|
||||
|
||||
const key = prefix + '_' + index + ':' + value
|
||||
promises.push(redis.saddAsync(key, model.id))
|
||||
promises.push(redis.sadd(key, model.id))
|
||||
})
|
||||
|
||||
await Promise.all(promises)
|
||||
@@ -125,7 +122,7 @@ export default class Redis extends Collection {
|
||||
const {prefix, redis} = this
|
||||
|
||||
if (isEmpty(properties)) {
|
||||
return redis.smembersAsync(prefix + '_ids').then(ids => this._extract(ids))
|
||||
return redis.smembers(prefix + '_ids').then(ids => this._extract(ids))
|
||||
}
|
||||
|
||||
// Special treatment for the identifier.
|
||||
@@ -148,7 +145,7 @@ export default class Redis extends Collection {
|
||||
}
|
||||
|
||||
const keys = mapToArray(properties, (value, index) => `${prefix}_${index}:${value}`)
|
||||
return redis.sinterAsync(...keys).then(ids => this._extract(ids))
|
||||
return redis.sinter(...keys).then(ids => this._extract(ids))
|
||||
}
|
||||
|
||||
_remove (ids) {
|
||||
@@ -158,10 +155,10 @@ export default class Redis extends Collection {
|
||||
|
||||
return Promise.all([
|
||||
// Remove the identifiers from the main index.
|
||||
redis.sremAsync(prefix + '_ids', ...ids),
|
||||
redis.srem(prefix + '_ids', ...ids),
|
||||
|
||||
// Remove the models.
|
||||
redis.delAsync(mapToArray(ids, id => `${prefix}:${id}`))
|
||||
redis.del(mapToArray(ids, id => `${prefix}:${id}`))
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import {EventEmitter} from 'events'
|
||||
|
||||
import {createRawObject} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// const noop = () => {}
|
||||
import {createRawObject, noop} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -18,7 +14,7 @@ export default class Connection extends EventEmitter {
|
||||
// Close the connection.
|
||||
close () {
|
||||
// Prevent errors when the connection is closed more than once.
|
||||
// this.close = noop
|
||||
this.close = noop
|
||||
|
||||
this.emit('close')
|
||||
}
|
||||
|
||||
@@ -1,69 +1,79 @@
|
||||
import bind from 'lodash.bind'
|
||||
import bind from 'lodash/bind'
|
||||
|
||||
import {
|
||||
isArray,
|
||||
isPromise,
|
||||
isFunction,
|
||||
noop,
|
||||
pFinally
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {defineProperty} = Object
|
||||
const {
|
||||
defineProperties,
|
||||
defineProperty,
|
||||
getOwnPropertyDescriptor
|
||||
} = Object
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// See: https://github.com/jayphelps/core-decorators.js#autobind
|
||||
export function autobind (target, key, {
|
||||
//
|
||||
// TODO: make it work for all class methods.
|
||||
export const autobind = (target, key, {
|
||||
configurable,
|
||||
enumerable,
|
||||
value: fn,
|
||||
writable
|
||||
}) {
|
||||
return {
|
||||
configurable,
|
||||
enumerable,
|
||||
}) => ({
|
||||
configurable,
|
||||
enumerable,
|
||||
|
||||
get () {
|
||||
const bounded = bind(fn, this)
|
||||
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bounded,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bounded
|
||||
},
|
||||
set (newValue) {
|
||||
if (this === target) {
|
||||
// New value directly set on the prototype.
|
||||
delete this[key]
|
||||
this[key] = newValue
|
||||
} else {
|
||||
// New value set on a child object.
|
||||
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
get () {
|
||||
if (this === target) {
|
||||
return fn
|
||||
}
|
||||
|
||||
const bound = bind(fn, this)
|
||||
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
value: bound,
|
||||
writable: true
|
||||
})
|
||||
|
||||
return bound
|
||||
},
|
||||
set (newValue) {
|
||||
// Cannot use assignment because it will call the setter on
|
||||
// the prototype.
|
||||
defineProperty(this, key, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: newValue,
|
||||
writable: true
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Debounce decorator for methods.
|
||||
//
|
||||
// See: https://github.com/wycats/javascript-decorators
|
||||
export const debounce = (duration) => (target, name, descriptor) => {
|
||||
const {value: fn} = descriptor
|
||||
//
|
||||
// TODO: make it work for single functions.
|
||||
export const debounce = duration => (target, name, descriptor) => {
|
||||
const fn = descriptor.value
|
||||
|
||||
// This symbol is used to store the related data directly on the
|
||||
// current object.
|
||||
const s = Symbol()
|
||||
|
||||
function debounced () {
|
||||
let data = this[s] || (this[s] = {
|
||||
const data = this[s] || (this[s] = {
|
||||
lastCall: 0,
|
||||
wrapper: null
|
||||
})
|
||||
@@ -80,8 +90,258 @@ export const debounce = (duration) => (target, name, descriptor) => {
|
||||
}
|
||||
return data.wrapper()
|
||||
}
|
||||
debounced.reset = (obj) => { delete obj[s] }
|
||||
debounced.reset = obj => { delete obj[s] }
|
||||
|
||||
descriptor.value = debounced
|
||||
return descriptor
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _push = Array.prototype.push
|
||||
|
||||
export const deferrable = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn () {
|
||||
const deferreds = []
|
||||
const defer = fn => {
|
||||
deferreds.push(fn)
|
||||
}
|
||||
defer.clear = () => {
|
||||
deferreds.length = 0
|
||||
}
|
||||
|
||||
const args = [ defer ]
|
||||
_push.apply(args, arguments)
|
||||
|
||||
let executeDeferreds = () => {
|
||||
let i = deferreds.length
|
||||
while (i) {
|
||||
deferreds[--i]()
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = fn.apply(this, args)
|
||||
|
||||
if (isPromise(result)) {
|
||||
result::pFinally(executeDeferreds)
|
||||
|
||||
// Do not execute the deferreds in the finally block.
|
||||
executeDeferreds = noop
|
||||
}
|
||||
|
||||
return result
|
||||
} finally {
|
||||
executeDeferreds()
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
fn = target
|
||||
return newFn
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on failures.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of success.
|
||||
deferrable.onFailure = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(result => {
|
||||
defer.clear()
|
||||
return result
|
||||
})
|
||||
: (defer.clear(), result)
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// Deferred functions are only executed on success.
|
||||
//
|
||||
// i.e.: defer.clear() is automatically called in case of failure.
|
||||
deferrable.onSuccess = (target, name, descriptor) => {
|
||||
let fn
|
||||
function newFn (defer) {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
|
||||
return isPromise(result)
|
||||
? result.then(null, error => {
|
||||
defer.clear()
|
||||
throw error
|
||||
})
|
||||
: result
|
||||
} catch (error) {
|
||||
defer.clear()
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
if (descriptor) {
|
||||
fn = descriptor.value
|
||||
descriptor.value = newFn
|
||||
} else {
|
||||
fn = target
|
||||
target = newFn
|
||||
}
|
||||
|
||||
return deferrable(target, name, descriptor)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _ownKeys = (
|
||||
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
|
||||
(({
|
||||
getOwnPropertyNames: names,
|
||||
getOwnPropertySymbols: symbols
|
||||
}) => symbols
|
||||
? obj => names(obj).concat(symbols(obj))
|
||||
: names
|
||||
)(Object)
|
||||
)
|
||||
|
||||
const _bindPropertyDescriptor = (descriptor, thisArg) => {
|
||||
const { get, set, value } = descriptor
|
||||
if (get) {
|
||||
descriptor.get = bind(get, thisArg)
|
||||
}
|
||||
if (set) {
|
||||
descriptor.set = bind(set, thisArg)
|
||||
}
|
||||
|
||||
if (isFunction(value)) {
|
||||
descriptor.value = bind(value, thisArg)
|
||||
}
|
||||
|
||||
return descriptor
|
||||
}
|
||||
|
||||
const _isIgnoredProperty = name => (
|
||||
name[0] === '_' ||
|
||||
name === 'constructor'
|
||||
)
|
||||
|
||||
const _IGNORED_STATIC_PROPERTIES = {
|
||||
__proto__: null,
|
||||
|
||||
arguments: true,
|
||||
caller: true,
|
||||
length: true,
|
||||
name: true,
|
||||
prototype: true
|
||||
}
|
||||
const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name]
|
||||
|
||||
export const mixin = MixIns => Class => {
|
||||
if (!isArray(MixIns)) {
|
||||
MixIns = [ MixIns ]
|
||||
}
|
||||
|
||||
const { name } = Class
|
||||
|
||||
// Copy properties of plain object mix-ins to the prototype.
|
||||
{
|
||||
const allMixIns = MixIns
|
||||
MixIns = []
|
||||
const { prototype } = Class
|
||||
const descriptors = { __proto__: null }
|
||||
for (const MixIn of allMixIns) {
|
||||
if (isFunction(MixIn)) {
|
||||
MixIns.push(MixIn)
|
||||
continue
|
||||
}
|
||||
|
||||
for (const prop of _ownKeys(MixIn)) {
|
||||
if (prop in prototype) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
(
|
||||
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
|
||||
).enumerable = false // Object methods are enumerable but class methods are not.
|
||||
}
|
||||
}
|
||||
defineProperties(prototype, descriptors)
|
||||
}
|
||||
|
||||
const Decorator = (...args) => {
|
||||
const instance = new Class(...args)
|
||||
|
||||
for (const MixIn of MixIns) {
|
||||
const { prototype } = MixIn
|
||||
const mixinInstance = new MixIn(instance)
|
||||
const descriptors = { __proto__: null }
|
||||
for (const prop of _ownKeys(prototype)) {
|
||||
if (_isIgnoredProperty(prop)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (prop in instance) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = _bindPropertyDescriptor(
|
||||
getOwnPropertyDescriptor(prototype, prop),
|
||||
mixinInstance
|
||||
)
|
||||
}
|
||||
defineProperties(instance, descriptors)
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
// Copy original and mixed-in static properties on Decorator class.
|
||||
const descriptors = { __proto__: null }
|
||||
for (const prop of _ownKeys(Class)) {
|
||||
let descriptor
|
||||
if (!(
|
||||
// Special properties are not defined...
|
||||
_isIgnoredStaticProperty(prop) &&
|
||||
|
||||
// if they already exist...
|
||||
(descriptor = getOwnPropertyDescriptor(Decorator, prop)) &&
|
||||
|
||||
// and are not configurable.
|
||||
!descriptor.configurable
|
||||
)) {
|
||||
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
|
||||
}
|
||||
}
|
||||
for (const MixIn of MixIns) {
|
||||
for (const prop of _ownKeys(MixIn)) {
|
||||
if (_isIgnoredStaticProperty(prop)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (prop in descriptors) {
|
||||
throw new Error(`${name}.${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
|
||||
}
|
||||
}
|
||||
defineProperties(Decorator, descriptors)
|
||||
|
||||
return Decorator
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import expect from 'must'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
import {autobind, debounce} from './decorators'
|
||||
import {autobind, debounce, deferrable} from './decorators'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
describe('autobind', function () {
|
||||
describe('autobind()', () => {
|
||||
class Foo {
|
||||
@autobind
|
||||
getFoo () {
|
||||
@@ -16,25 +16,25 @@ describe('autobind', function () {
|
||||
}
|
||||
}
|
||||
|
||||
it('returns a bound instance for a method', function () {
|
||||
it('returns a bound instance for a method', () => {
|
||||
const foo = new Foo()
|
||||
const {getFoo} = foo
|
||||
const { getFoo } = foo
|
||||
|
||||
expect(getFoo()).to.equal(foo)
|
||||
})
|
||||
|
||||
it('returns the same bound instance each time', function () {
|
||||
it('returns the same bound instance each time', () => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(foo.getFoo).to.equal(foo.getFoo)
|
||||
})
|
||||
|
||||
it('works with multiple instances of the same class', function () {
|
||||
it('works with multiple instances of the same class', () => {
|
||||
const foo1 = new Foo()
|
||||
const foo2 = new Foo()
|
||||
|
||||
const {getFoo: getFoo1} = foo1
|
||||
const {getFoo: getFoo2} = foo2
|
||||
const getFoo1 = foo1.getFoo
|
||||
const getFoo2 = foo2.getFoo
|
||||
|
||||
expect(getFoo1()).to.equal(foo1)
|
||||
expect(getFoo2()).to.equal(foo2)
|
||||
@@ -43,7 +43,7 @@ describe('autobind', function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('debounce', function () {
|
||||
describe('debounce()', () => {
|
||||
let i
|
||||
|
||||
class Foo {
|
||||
@@ -53,11 +53,11 @@ describe('debounce', function () {
|
||||
}
|
||||
}
|
||||
|
||||
beforeEach(function () {
|
||||
beforeEach(() => {
|
||||
i = 0
|
||||
})
|
||||
|
||||
it('works', function (done) {
|
||||
it('works', done => {
|
||||
const foo = new Foo()
|
||||
|
||||
expect(i).to.equal(0)
|
||||
@@ -68,7 +68,7 @@ describe('debounce', function () {
|
||||
foo.foo()
|
||||
expect(i).to.equal(1)
|
||||
|
||||
setTimeout(function () {
|
||||
setTimeout(() => {
|
||||
foo.foo()
|
||||
expect(i).to.equal(2)
|
||||
|
||||
@@ -76,3 +76,98 @@ describe('debounce', function () {
|
||||
}, 2e1)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('deferrable()', () => {
|
||||
it('works with normal termination', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('defer.clear() removes previous deferreds', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
defer.clear()
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
expect(fn()).to.equal(4)
|
||||
expect(i).to.equal(2)
|
||||
})
|
||||
|
||||
it('works with exception', () => {
|
||||
let i = 0
|
||||
const fn = deferrable(defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
expect(() => fn()).to.throw(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise resolution', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
return i
|
||||
})
|
||||
|
||||
await expect(fn()).to.eventually.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
|
||||
it('works with promise rejection', async () => {
|
||||
let i = 0
|
||||
const fn = deferrable(async defer => {
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
i += 2
|
||||
defer(() => { i -= 2 })
|
||||
|
||||
i *= 2
|
||||
defer(() => { i /= 2 })
|
||||
|
||||
// Wait a turn of the events loop.
|
||||
await Promise.resolve()
|
||||
|
||||
throw i
|
||||
})
|
||||
|
||||
await expect(fn()).to.reject.to.equal(4)
|
||||
expect(i).to.equal(0)
|
||||
})
|
||||
})
|
||||
|
||||
84
src/fatfs-buffer.js
Normal file
84
src/fatfs-buffer.js
Normal file
@@ -0,0 +1,84 @@
|
||||
// Buffer driver for [fatfs](https://github.com/natevw/fatfs).
|
||||
//
|
||||
// Usage:
|
||||
//
|
||||
// ```js
|
||||
// import fatfs from 'fatfs'
|
||||
// import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer'
|
||||
//
|
||||
// const buffer = fatfsBufferinit()
|
||||
//
|
||||
// const fs = fatfs.createFileSystem(fatfsBuffer(buffer))
|
||||
//
|
||||
// fs.writeFile('/foo', 'content of foo', function (err, content) {
|
||||
// if (err) {
|
||||
// console.error(err)
|
||||
// }
|
||||
// })
|
||||
|
||||
import { boot16 as fat16 } from 'fatfs/structs'
|
||||
|
||||
const SECTOR_SIZE = 512
|
||||
|
||||
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
|
||||
export function init () {
|
||||
const buf = new Buffer(10 * 1024 * 1024) // 10MB
|
||||
buf.fill(0)
|
||||
|
||||
// https://github.com/natevw/fatfs/blob/master/structs.js
|
||||
fat16.pack({
|
||||
jmpBoot: new Buffer('eb3c90', 'hex'),
|
||||
OEMName: 'mkfs.fat',
|
||||
BytsPerSec: SECTOR_SIZE,
|
||||
SecPerClus: 4,
|
||||
ResvdSecCnt: 1,
|
||||
NumFATs: 2,
|
||||
RootEntCnt: 512,
|
||||
TotSec16: 20480,
|
||||
Media: 248,
|
||||
FATSz16: 20,
|
||||
SecPerTrk: 32,
|
||||
NumHeads: 64,
|
||||
HiddSec: 0,
|
||||
TotSec32: 0,
|
||||
DrvNum: 128,
|
||||
Reserved1: 0,
|
||||
BootSig: 41,
|
||||
VolID: 895111106,
|
||||
VolLab: 'NO NAME ',
|
||||
FilSysType: 'FAT16 '
|
||||
}, buf)
|
||||
|
||||
// End of sector.
|
||||
buf[0x1fe] = 0x55
|
||||
buf[0x1ff] = 0xaa
|
||||
|
||||
// Mark sector as reserved.
|
||||
buf[0x200] = 0xf8
|
||||
buf[0x201] = 0xff
|
||||
buf[0x202] = 0xff
|
||||
buf[0x203] = 0xff
|
||||
|
||||
// Mark sector as reserved.
|
||||
buf[0x2a00] = 0xf8
|
||||
buf[0x2a01] = 0xff
|
||||
buf[0x2a02] = 0xff
|
||||
buf[0x2a03] = 0xff
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
export default buffer => {
|
||||
return {
|
||||
sectorSize: SECTOR_SIZE,
|
||||
numSectors: Math.floor(buffer.length / SECTOR_SIZE),
|
||||
readSectors: (i, target, cb) => {
|
||||
buffer.copy(target, 0, i * SECTOR_SIZE)
|
||||
cb()
|
||||
},
|
||||
writeSectors: (i, source, cb) => {
|
||||
source.copy(buffer, i * SECTOR_SIZE, 0)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
54
src/glob-matcher.js
Normal file
54
src/glob-matcher.js
Normal file
@@ -0,0 +1,54 @@
|
||||
// See: https://gist.github.com/julien-f/5b9a3537eb82a34b04e2
|
||||
|
||||
var matcher = require('micromatch').matcher
|
||||
|
||||
module.exports = function globMatcher (patterns, opts) {
|
||||
if (!Array.isArray(patterns)) {
|
||||
if (patterns[0] === '!') {
|
||||
var m = matcher(patterns.slice(1), opts)
|
||||
return function (string) {
|
||||
return !m(string)
|
||||
}
|
||||
} else {
|
||||
return matcher(patterns, opts)
|
||||
}
|
||||
}
|
||||
|
||||
var noneMustMatch = []
|
||||
var anyMustMatch = []
|
||||
|
||||
// TODO: could probably be optimized by combining all positive patterns (and all negative patterns) as a single matcher.
|
||||
for (var i = 0, n = patterns.length; i < n; ++i) {
|
||||
var pattern = patterns[i]
|
||||
if (pattern[0] === '!') {
|
||||
noneMustMatch.push(matcher(pattern.slice(1), opts))
|
||||
} else {
|
||||
anyMustMatch.push(matcher(pattern, opts))
|
||||
}
|
||||
}
|
||||
|
||||
var nNone = noneMustMatch.length
|
||||
var nAny = anyMustMatch.length
|
||||
|
||||
return function (string) {
|
||||
var i
|
||||
|
||||
for (i = 0; i < nNone; ++i) {
|
||||
if (noneMustMatch[i](string)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if (nAny === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (i = 0; i < nAny; ++i) {
|
||||
if (anyMustMatch[i](string)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
13
src/http-proxy.js
Normal file
13
src/http-proxy.js
Normal file
@@ -0,0 +1,13 @@
|
||||
import ProxyAgent from 'proxy-agent'
|
||||
|
||||
let agent
|
||||
export { agent as default }
|
||||
|
||||
export function setup (uri) {
|
||||
agent = uri != null
|
||||
? new ProxyAgent(uri)
|
||||
: undefined
|
||||
}
|
||||
|
||||
const { env } = process
|
||||
setup(env.http_proxy || env.HTTP_PROXY)
|
||||
124
src/http-request.js
Normal file
124
src/http-request.js
Normal file
@@ -0,0 +1,124 @@
|
||||
import assign from 'lodash/assign'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import { parse as parseUrl } from 'url'
|
||||
import { request as httpRequest } from 'http'
|
||||
import { request as httpsRequest } from 'https'
|
||||
import { stringify as formatQueryString } from 'querystring'
|
||||
|
||||
import {
|
||||
isString,
|
||||
streamToBuffer
|
||||
} from './utils'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export default (...args) => {
|
||||
let req
|
||||
|
||||
const pResponse = new Promise((resolve, reject) => {
|
||||
const opts = {}
|
||||
for (let i = 0, length = args.length; i < length; ++i) {
|
||||
const arg = args[i]
|
||||
assign(opts, isString(arg) ? parseUrl(arg) : arg)
|
||||
}
|
||||
|
||||
const {
|
||||
body,
|
||||
headers: { ...headers } = {},
|
||||
protocol,
|
||||
query,
|
||||
...rest
|
||||
} = opts
|
||||
|
||||
if (headers['content-length'] == null && body != null) {
|
||||
let tmp
|
||||
if (isString(body)) {
|
||||
headers['content-length'] = Buffer.byteLength(body)
|
||||
} else if (
|
||||
(
|
||||
(tmp = body.headers) &&
|
||||
(tmp = tmp['content-length']) != null
|
||||
) ||
|
||||
(tmp = body.length) != null
|
||||
) {
|
||||
headers['content-length'] = tmp
|
||||
}
|
||||
}
|
||||
|
||||
if (query) {
|
||||
rest.path = `${rest.pathname || rest.path || '/'}?${
|
||||
isString(query)
|
||||
? query
|
||||
: formatQueryString(query)
|
||||
}`
|
||||
}
|
||||
|
||||
// Some headers can be explicitly removed by setting them to null.
|
||||
const headersToRemove = []
|
||||
for (const header in headers) {
|
||||
if (headers[header] === null) {
|
||||
delete headers[header]
|
||||
headersToRemove.push(header)
|
||||
}
|
||||
}
|
||||
|
||||
req = (
|
||||
protocol && startsWith(protocol.toLowerCase(), 'https')
|
||||
? httpsRequest
|
||||
: httpRequest
|
||||
)({
|
||||
...rest,
|
||||
headers
|
||||
})
|
||||
|
||||
for (let i = 0, length = headersToRemove.length; i < length; ++i) {
|
||||
req.removeHeader(headersToRemove[i])
|
||||
}
|
||||
|
||||
if (body) {
|
||||
if (typeof body.pipe === 'function') {
|
||||
body.pipe(req)
|
||||
} else {
|
||||
req.end(body)
|
||||
}
|
||||
} else {
|
||||
req.end()
|
||||
}
|
||||
req.on('error', reject)
|
||||
req.once('response', resolve)
|
||||
}).then(response => {
|
||||
response.cancel = () => {
|
||||
req.abort()
|
||||
}
|
||||
response.readAll = () => streamToBuffer(response)
|
||||
|
||||
const length = response.headers['content-length']
|
||||
if (length) {
|
||||
response.length = length
|
||||
}
|
||||
|
||||
const code = response.statusCode
|
||||
if (code < 200 || code >= 300) {
|
||||
const error = new Error(response.statusMessage)
|
||||
error.code = code
|
||||
Object.defineProperty(error, 'response', {
|
||||
configurable: true,
|
||||
value: response,
|
||||
writable: true
|
||||
})
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
return response
|
||||
})
|
||||
|
||||
pResponse.cancel = () => {
|
||||
req.emit('error', new Error('HTTP request canceled!'))
|
||||
req.abort()
|
||||
}
|
||||
pResponse.readAll = () => pResponse.then(response => response.readAll())
|
||||
pResponse.request = req
|
||||
|
||||
return pResponse
|
||||
}
|
||||
456
src/index.js
456
src/index.js
@@ -2,19 +2,21 @@ import createLogger from 'debug'
|
||||
const debug = createLogger('xo:main')
|
||||
|
||||
import appConf from 'app-conf'
|
||||
import bind from 'lodash.bind'
|
||||
import bind from 'lodash/bind'
|
||||
import blocked from 'blocked'
|
||||
import createExpress from 'express'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import has from 'lodash.has'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isFunction from 'lodash.isfunction'
|
||||
import pick from 'lodash.pick'
|
||||
import has from 'lodash/has'
|
||||
import helmet from 'helmet'
|
||||
import includes from 'lodash/includes'
|
||||
import pick from 'lodash/pick'
|
||||
import proxyConsole from './proxy-console'
|
||||
import proxyRequest from 'proxy-http-request'
|
||||
import serveStatic from 'serve-static'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import WebSocket from 'ws'
|
||||
import {compile as compileJade} from 'jade'
|
||||
import { compile as compilePug } from 'pug'
|
||||
import { createServer as createProxyServer } from 'http-proxy'
|
||||
import { join as joinPath } from 'path'
|
||||
|
||||
import {
|
||||
AlreadyAuthenticated,
|
||||
@@ -24,20 +26,25 @@ import {
|
||||
NotImplemented
|
||||
} from './api-errors'
|
||||
import JsonRpcPeer from 'json-rpc-peer'
|
||||
import {readFile} from 'fs-promise'
|
||||
import {
|
||||
readFile,
|
||||
readdir
|
||||
} from 'fs-promise'
|
||||
|
||||
import * as apiMethods from './api/index'
|
||||
import Api from './api'
|
||||
import JobExecutor from './job-executor'
|
||||
import RemoteHandler from './remote-handler'
|
||||
import Scheduler from './scheduler'
|
||||
import WebServer from 'http-server-plus'
|
||||
import wsProxy from './ws-proxy'
|
||||
import Xo from './xo'
|
||||
import {
|
||||
setup as setupHttpProxy
|
||||
} from './http-proxy'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
mapToArray
|
||||
isArray,
|
||||
isFunction,
|
||||
mapToArray,
|
||||
pFromCallback
|
||||
} from './utils'
|
||||
|
||||
import bodyParser from 'body-parser'
|
||||
@@ -45,30 +52,17 @@ import connectFlash from 'connect-flash'
|
||||
import cookieParser from 'cookie-parser'
|
||||
import expressSession from 'express-session'
|
||||
import passport from 'passport'
|
||||
import {Strategy as LocalStrategy} from 'passport-local'
|
||||
import { parse as parseCookies } from 'cookie'
|
||||
import { Strategy as LocalStrategy } from 'passport-local'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const info = (...args) => {
|
||||
console.info('[Info]', ...args)
|
||||
}
|
||||
|
||||
const warn = (...args) => {
|
||||
console.warn('[Warn]', ...args)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DEFAULTS = {
|
||||
http: {
|
||||
listen: [
|
||||
{ port: 80 }
|
||||
],
|
||||
mounts: {}
|
||||
},
|
||||
datadir: '/var/lib/xo-server/data'
|
||||
}
|
||||
|
||||
const DEPRECATED_ENTRIES = [
|
||||
'users',
|
||||
'servers'
|
||||
@@ -76,7 +70,6 @@ const DEPRECATED_ENTRIES = [
|
||||
|
||||
async function loadConfiguration () {
|
||||
const config = await appConf.load('xo-server', {
|
||||
defaults: DEFAULTS,
|
||||
ignoreUnknownFormats: true
|
||||
})
|
||||
|
||||
@@ -97,6 +90,8 @@ async function loadConfiguration () {
|
||||
function createExpressApp () {
|
||||
const app = createExpress()
|
||||
|
||||
app.use(helmet())
|
||||
|
||||
// Registers the cookie-parser and express-session middlewares,
|
||||
// necessary for connect-flash.
|
||||
app.use(cookieParser())
|
||||
@@ -134,8 +129,8 @@ async function setUpPassport (express, xo) {
|
||||
}
|
||||
|
||||
// Registers the sign in form.
|
||||
const signInPage = compileJade(
|
||||
await readFile(__dirname + '/../signin.jade')
|
||||
const signInPage = compilePug(
|
||||
await readFile(joinPath(__dirname, '..', 'signin.pug'))
|
||||
)
|
||||
express.get('/signin', (req, res, next) => {
|
||||
res.send(signInPage({
|
||||
@@ -146,7 +141,8 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
|
||||
express.use(async (req, res, next) => {
|
||||
const matches = req.url.match(SIGNIN_STRATEGY_RE)
|
||||
const { url } = req
|
||||
const matches = url.match(SIGNIN_STRATEGY_RE)
|
||||
|
||||
if (matches) {
|
||||
return passport.authenticate(matches[1], async (err, user, info) => {
|
||||
@@ -172,7 +168,7 @@ async function setUpPassport (express, xo) {
|
||||
matches[1] === 'local' && req.body['remember-me'] === 'on'
|
||||
)
|
||||
|
||||
res.redirect('/')
|
||||
res.redirect(req.flash('return-url')[0] || '/')
|
||||
})(req, res, next)
|
||||
}
|
||||
|
||||
@@ -192,9 +188,10 @@ async function setUpPassport (express, xo) {
|
||||
next()
|
||||
} else if (req.cookies.token) {
|
||||
next()
|
||||
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
|
||||
} else if (/favicon|fontawesome|images|styles/.test(url)) {
|
||||
next()
|
||||
} else {
|
||||
req.flash('return-url', url)
|
||||
return res.redirect('/signin')
|
||||
}
|
||||
})
|
||||
@@ -214,25 +211,21 @@ async function setUpPassport (express, xo) {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
async function registerPlugin (pluginConf, pluginName) {
|
||||
debugPlugin('register %s', pluginName)
|
||||
|
||||
const pluginPath = (function (name) {
|
||||
try {
|
||||
return require.resolve('xo-server-' + name)
|
||||
} catch (e) {
|
||||
return require.resolve(name)
|
||||
}
|
||||
})(pluginName)
|
||||
|
||||
async function registerPlugin (pluginPath, pluginName) {
|
||||
const plugin = require(pluginPath)
|
||||
const { version = 'unknown' } = (() => {
|
||||
try {
|
||||
return require(pluginPath + '/package.json')
|
||||
} catch (_) {
|
||||
return {}
|
||||
}
|
||||
})()
|
||||
|
||||
// Supports both “normal” CommonJS and Babel's ES2015 modules.
|
||||
const {
|
||||
default: factory = plugin,
|
||||
configurationSchema
|
||||
configurationSchema,
|
||||
configurationPresets
|
||||
} = plugin
|
||||
|
||||
// The default export can be either a factory or directly a plugin
|
||||
@@ -241,36 +234,74 @@ async function registerPlugin (pluginConf, pluginName) {
|
||||
? factory({ xo: this })
|
||||
: factory
|
||||
|
||||
await this._registerPlugin(
|
||||
await this.registerPlugin(
|
||||
pluginName,
|
||||
instance,
|
||||
configurationSchema,
|
||||
pluginConf
|
||||
configurationPresets,
|
||||
version
|
||||
)
|
||||
}
|
||||
|
||||
function registerPlugins (plugins, xo) {
|
||||
return Promise.all(mapToArray(plugins, (conf, name) => {
|
||||
return registerPlugin.call(xo, conf, name).then(
|
||||
() => {
|
||||
debugPlugin(`successfully register ${name}`)
|
||||
},
|
||||
error => {
|
||||
debugPlugin(`failed register ${name}`)
|
||||
debugPlugin(error)
|
||||
}
|
||||
)
|
||||
const debugPlugin = createLogger('xo:plugin')
|
||||
|
||||
function registerPluginWrapper (pluginPath, pluginName) {
|
||||
debugPlugin('register %s', pluginName)
|
||||
|
||||
return registerPlugin.call(this, pluginPath, pluginName).then(
|
||||
() => {
|
||||
debugPlugin(`successfully register ${pluginName}`)
|
||||
},
|
||||
error => {
|
||||
debugPlugin(`failed register ${pluginName}`)
|
||||
debugPlugin(error)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
const PLUGIN_PREFIX = 'xo-server-'
|
||||
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
|
||||
|
||||
async function registerPluginsInPath (path) {
|
||||
const files = await readdir(path).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return []
|
||||
}
|
||||
throw error
|
||||
})
|
||||
|
||||
await Promise.all(mapToArray(files, name => {
|
||||
if (startsWith(name, PLUGIN_PREFIX)) {
|
||||
return registerPluginWrapper.call(
|
||||
this,
|
||||
`${path}/${name}`,
|
||||
name.slice(PLUGIN_PREFIX_LENGTH)
|
||||
)
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
async function registerPlugins (xo) {
|
||||
await Promise.all(mapToArray([
|
||||
`${__dirname}/../node_modules/`,
|
||||
'/usr/local/lib/node_modules/'
|
||||
], xo::registerPluginsInPath))
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function makeWebServerListen (opts) {
|
||||
// Read certificate and key if necessary.
|
||||
const {certificate, key} = opts
|
||||
if (certificate && key) {
|
||||
[opts.certificate, opts.key] = await Promise.all([
|
||||
readFile(certificate),
|
||||
async function makeWebServerListen ({
|
||||
certificate,
|
||||
|
||||
// The properties was called `certificate` before.
|
||||
cert = certificate,
|
||||
|
||||
key,
|
||||
...opts
|
||||
}) {
|
||||
if (cert && key) {
|
||||
[opts.cert, opts.key] = await Promise.all([
|
||||
readFile(cert),
|
||||
readFile(key)
|
||||
])
|
||||
}
|
||||
@@ -279,14 +310,18 @@ async function makeWebServerListen (opts) {
|
||||
const niceAddress = await this.listen(opts)
|
||||
debug(`Web server listening on ${niceAddress}`)
|
||||
} catch (error) {
|
||||
warn(`Web server could not listen on ${error.niceAddress}`)
|
||||
if (error.niceAddress) {
|
||||
warn(`Web server could not listen on ${error.niceAddress}`)
|
||||
|
||||
const {code} = error
|
||||
if (code === 'EACCES') {
|
||||
warn(' Access denied.')
|
||||
warn(' Ports < 1024 are often reserved to privileges users.')
|
||||
} else if (code === 'EADDRINUSE') {
|
||||
warn(' Address already in use.')
|
||||
const {code} = error
|
||||
if (code === 'EACCES') {
|
||||
warn(' Access denied.')
|
||||
warn(' Ports < 1024 are often reserved to privileges users.')
|
||||
} else if (code === 'EADDRINUSE') {
|
||||
warn(' Address already in use.')
|
||||
}
|
||||
} else {
|
||||
warn('Web server could not listen:', error.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -294,40 +329,60 @@ async function makeWebServerListen (opts) {
|
||||
async function createWebServer (opts) {
|
||||
const webServer = new WebServer()
|
||||
|
||||
await Promise.all(mapToArray(opts, makeWebServerListen, webServer))
|
||||
await Promise.all(mapToArray(opts, webServer::makeWebServerListen))
|
||||
|
||||
return webServer
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const setUpProxies = (express, opts) => {
|
||||
const setUpProxies = (express, opts, xo) => {
|
||||
if (!opts) {
|
||||
return
|
||||
}
|
||||
|
||||
const proxy = createProxyServer({
|
||||
ignorePath: true
|
||||
}).on('error', (error) => console.error(error))
|
||||
|
||||
// TODO: sort proxies by descending prefix length.
|
||||
|
||||
// HTTP request proxy.
|
||||
forEach(opts, (target, url) => {
|
||||
express.use(url, (req, res) => {
|
||||
proxyRequest(target + req.url, req, res)
|
||||
})
|
||||
express.use((req, res, next) => {
|
||||
const { url } = req
|
||||
|
||||
for (const prefix in opts) {
|
||||
if (startsWith(url, prefix)) {
|
||||
const target = opts[prefix]
|
||||
|
||||
proxy.web(req, res, {
|
||||
target: target + url.slice(prefix.length)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
})
|
||||
|
||||
// WebSocket proxy.
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
express.on('upgrade', (req, socket, head) => {
|
||||
const {url} = req
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
for (let prefix in opts) {
|
||||
if (url.lastIndexOf(prefix, 0) !== -1) {
|
||||
const target = opts[prefix] + url.slice(prefix.length)
|
||||
webSocketServer.handleUpgrade(req, socket, head, socket => {
|
||||
wsProxy(socket, target)
|
||||
express.on('upgrade', (req, socket, head) => {
|
||||
const { url } = req
|
||||
|
||||
for (const prefix in opts) {
|
||||
if (startsWith(url, prefix)) {
|
||||
const target = opts[prefix]
|
||||
|
||||
proxy.ws(req, socket, head, {
|
||||
target: target + url.slice(prefix.length)
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -352,13 +407,6 @@ const setUpStaticFiles = (express, opts) => {
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function setUpWebSocketServer (webServer) {
|
||||
return new WebSocket.Server({
|
||||
server: webServer,
|
||||
path: '/api/'
|
||||
})
|
||||
}
|
||||
|
||||
const errorClasses = {
|
||||
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
|
||||
INVALID_CREDENTIAL: InvalidCredential,
|
||||
@@ -372,20 +420,7 @@ const apiHelpers = {
|
||||
// Handles both properties and wrapped models.
|
||||
const properties = user.properties || user
|
||||
|
||||
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
|
||||
},
|
||||
|
||||
getServerPublicProperties (server) {
|
||||
// Handles both properties and wrapped models.
|
||||
const properties = server.properties || server
|
||||
|
||||
server = pick(properties, 'id', 'host', 'username')
|
||||
|
||||
// Injects connection status.
|
||||
const xapi = this._xapis[server.id]
|
||||
server.status = xapi ? xapi.status : 'disconnected'
|
||||
|
||||
return server
|
||||
return pick(properties, 'id', 'email', 'groups', 'permission', 'preferences', 'provider')
|
||||
},
|
||||
|
||||
throw (errorId, data) {
|
||||
@@ -393,16 +428,29 @@ const apiHelpers = {
|
||||
}
|
||||
}
|
||||
|
||||
const setUpApi = (webSocketServer, xo) => {
|
||||
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
server: webServer,
|
||||
path: '/api/'
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
// FIXME: it can cause issues if there any property assignments in
|
||||
// XO methods called from the API.
|
||||
const context = { __proto__: xo, ...apiHelpers }
|
||||
|
||||
const api = new Api({
|
||||
context
|
||||
context,
|
||||
verboseLogsOnErrors
|
||||
})
|
||||
xo.defineProperty('api', api)
|
||||
|
||||
api.addMethods(apiMethods)
|
||||
|
||||
webSocketServer.on('connection', socket => {
|
||||
debug('+ WebSocket connection')
|
||||
const { remoteAddress } = socket.upgradeReq.socket
|
||||
|
||||
debug('+ WebSocket connection (%s)', remoteAddress)
|
||||
|
||||
// Create the abstract XO object for this connection.
|
||||
const connection = xo.createUserConnection()
|
||||
@@ -420,7 +468,7 @@ const setUpApi = (webSocketServer, xo) => {
|
||||
|
||||
// Close the XO connection with this WebSocket.
|
||||
socket.once('close', () => {
|
||||
debug('- WebSocket connection')
|
||||
debug('- WebSocket connection (%s)', remoteAddress)
|
||||
|
||||
connection.close()
|
||||
})
|
||||
@@ -443,25 +491,6 @@ const setUpApi = (webSocketServer, xo) => {
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return api
|
||||
}
|
||||
|
||||
const setUpScheduler = (api, xo) => {
|
||||
const jobExecutor = new JobExecutor(xo, api)
|
||||
const scheduler = new Scheduler(xo, {executor: jobExecutor})
|
||||
xo.scheduler = scheduler
|
||||
|
||||
return scheduler
|
||||
}
|
||||
|
||||
const setUpRemoteHandler = async xo => {
|
||||
const remoteHandler = new RemoteHandler()
|
||||
xo.remoteHandler = remoteHandler
|
||||
xo.initRemotes()
|
||||
xo.syncAllRemotes()
|
||||
|
||||
return remoteHandler
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
@@ -472,8 +501,9 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
const webSocketServer = new WebSocket.Server({
|
||||
noServer: true
|
||||
})
|
||||
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
|
||||
|
||||
webServer.on('upgrade', (req, socket, head) => {
|
||||
webServer.on('upgrade', async (req, socket, head) => {
|
||||
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
|
||||
if (!matches) {
|
||||
return
|
||||
@@ -481,68 +511,49 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
|
||||
const [, id] = matches
|
||||
try {
|
||||
const xapi = xo.getXAPI(id, ['VM', 'VM-controller'])
|
||||
// TODO: factorize permissions checking in an Express middleware.
|
||||
{
|
||||
const { token } = parseCookies(req.headers.cookie)
|
||||
|
||||
const user = await xo.authenticateUser({ token })
|
||||
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
|
||||
throw new InvalidCredential()
|
||||
}
|
||||
|
||||
const { remoteAddress } = socket
|
||||
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
socket.on('close', () => {
|
||||
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
|
||||
})
|
||||
}
|
||||
|
||||
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
|
||||
const vmConsole = xapi.getVmConsole(id)
|
||||
|
||||
// FIXME: lost connection due to VM restart is not detected.
|
||||
webSocketServer.handleUpgrade(req, socket, head, connection => {
|
||||
proxyConsole(connection, vmConsole, xapi.sessionId)
|
||||
})
|
||||
} catch (_) {
|
||||
console.error(_)
|
||||
} catch (error) {
|
||||
console.error(error && error.stack || error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const registerPasswordAuthenticationProvider = xo => {
|
||||
async function passwordAuthenticationProvider ({
|
||||
username,
|
||||
password
|
||||
}) {
|
||||
if (username === undefined || password === undefined) {
|
||||
return
|
||||
}
|
||||
const USAGE = (({
|
||||
name,
|
||||
version
|
||||
}) => `Usage: ${name} [--safe-mode]
|
||||
|
||||
const user = await xo.getUserByName(username, true)
|
||||
if (user && await xo.checkUserPassword(user.id, password)) {
|
||||
return user.id
|
||||
}
|
||||
}
|
||||
|
||||
xo.registerAuthenticationProvider(passwordAuthenticationProvider)
|
||||
}
|
||||
|
||||
const registerTokenAuthenticationProvider = xo => {
|
||||
async function tokenAuthenticationProvider ({
|
||||
token: tokenId
|
||||
}) {
|
||||
if (!tokenId) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
} catch (e) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
xo.registerAuthenticationProvider(tokenAuthenticationProvider)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const help = (function ({name, version}) {
|
||||
return () => `${name} v${version}`
|
||||
})(require('../package.json'))
|
||||
${name} v${version}`)(require('../package.json'))
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default async function main (args) {
|
||||
if (args.indexOf('--help') !== -1 || args.indexOf('-h') !== -1) {
|
||||
return help()
|
||||
if (includes(args, '--help') || includes(args, '-h')) {
|
||||
return USAGE
|
||||
}
|
||||
|
||||
{
|
||||
@@ -571,18 +582,47 @@ export default async function main (args) {
|
||||
warn('Failed to change user/group:', error)
|
||||
}
|
||||
|
||||
// Create the main object which will connects to Xen servers and
|
||||
// manages all the models.
|
||||
const xo = new Xo()
|
||||
await xo.start(config)
|
||||
if (config.httpProxy) {
|
||||
setupHttpProxy(config.httpProxy)
|
||||
}
|
||||
|
||||
// Loads default authentication providers.
|
||||
registerPasswordAuthenticationProvider(xo)
|
||||
registerTokenAuthenticationProvider(xo)
|
||||
// Creates main object.
|
||||
const xo = new Xo(config)
|
||||
|
||||
// Register web server close on XO stop.
|
||||
xo.on('stop', () => pFromCallback(cb => webServer.close(cb)))
|
||||
|
||||
// Connects to all registered servers.
|
||||
await xo.start()
|
||||
|
||||
// Express is used to manage non WebSocket connections.
|
||||
const express = createExpressApp()
|
||||
|
||||
if (config.http.redirectToHttps) {
|
||||
let port
|
||||
forEach(config.http.listen, listen => {
|
||||
if (
|
||||
listen.port &&
|
||||
(listen.cert || listen.certificate)
|
||||
) {
|
||||
port = listen.port
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
if (port === undefined) {
|
||||
warn('Could not setup HTTPs redirection: no HTTPs port found')
|
||||
} else {
|
||||
express.use((req, res, next) => {
|
||||
if (req.secure) {
|
||||
return next()
|
||||
}
|
||||
|
||||
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Must be set up before the API.
|
||||
setUpConsoleProxy(webServer, xo)
|
||||
|
||||
@@ -600,50 +640,28 @@ export default async function main (args) {
|
||||
})
|
||||
|
||||
// Must be set up before the static files.
|
||||
const webSocketServer = setUpWebSocketServer(webServer)
|
||||
const api = setUpApi(webSocketServer, xo)
|
||||
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
|
||||
|
||||
const scheduler = setUpScheduler(api, xo)
|
||||
setUpRemoteHandler(xo)
|
||||
|
||||
setUpProxies(express, config.http.proxies)
|
||||
setUpProxies(express, config.http.proxies, xo)
|
||||
|
||||
setUpStaticFiles(express, config.http.mounts)
|
||||
|
||||
if (config.plugins) {
|
||||
await registerPlugins(config.plugins, xo)
|
||||
if (!includes(args, '--safe-mode')) {
|
||||
await registerPlugins(xo)
|
||||
}
|
||||
|
||||
if (!(await xo._users.exists())) {
|
||||
const email = 'admin@admin.net'
|
||||
const password = 'admin'
|
||||
|
||||
await xo.createUser(email, {password, permission: 'admin'})
|
||||
info('Default user created:', email, ' with password', password)
|
||||
// TODO: implements a timeout? (or maybe it is the services launcher
|
||||
// responsibility?)
|
||||
const shutdown = signal => {
|
||||
debug('%s caught, closing…', signal)
|
||||
xo.stop()
|
||||
}
|
||||
|
||||
// Gracefully shutdown on signals.
|
||||
//
|
||||
// TODO: implements a timeout? (or maybe it is the services launcher
|
||||
// responsibility?)
|
||||
process.on('SIGINT', async () => {
|
||||
debug('SIGINT caught, closing web server…')
|
||||
process.on('SIGINT', () => shutdown('SIGINT'))
|
||||
process.on('SIGTERM', () => shutdown('SIGTERM'))
|
||||
|
||||
webServer.close()
|
||||
await eventToPromise(xo, 'stopped')
|
||||
|
||||
webSocketServer.close()
|
||||
scheduler.disableAll()
|
||||
await xo.disableAllRemotes()
|
||||
})
|
||||
process.on('SIGTERM', async () => {
|
||||
debug('SIGTERM caught, closing web server…')
|
||||
|
||||
webServer.close()
|
||||
|
||||
webSocketServer.close()
|
||||
scheduler.disableAll()
|
||||
await xo.disableAllRemotes()
|
||||
})
|
||||
|
||||
return eventToPromise(webServer, 'close')
|
||||
debug('bye :-)')
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import assign from 'lodash.assign'
|
||||
import assign from 'lodash/assign'
|
||||
import {BaseError} from 'make-error'
|
||||
|
||||
import {
|
||||
@@ -43,13 +43,16 @@ export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
|
||||
}
|
||||
|
||||
export default class JobExecutor {
|
||||
constructor (xo, api) {
|
||||
constructor (xo) {
|
||||
this.xo = xo
|
||||
this.api = api
|
||||
this._extractValueCb = {
|
||||
'set': items => items.values
|
||||
}
|
||||
this._logger = this.xo.getLogger('jobs')
|
||||
|
||||
// The logger is not available until Xo has started.
|
||||
xo.on('start', () => xo.getLogger('jobs').then(logger => {
|
||||
this._logger = logger
|
||||
}))
|
||||
}
|
||||
|
||||
async exec (job) {
|
||||
@@ -62,7 +65,9 @@ export default class JobExecutor {
|
||||
|
||||
try {
|
||||
if (job.type === 'call') {
|
||||
await this._execCall(job, runJobId)
|
||||
const execStatus = await this._execCall(job, runJobId)
|
||||
|
||||
this.xo.emit('job:terminated', execStatus)
|
||||
} else {
|
||||
throw new UnsupportedJobType(job)
|
||||
}
|
||||
@@ -83,10 +88,14 @@ export default class JobExecutor {
|
||||
async _execCall (job, runJobId) {
|
||||
let paramsFlatVector
|
||||
|
||||
if (job.paramsVector.type === 'crossProduct') {
|
||||
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
|
||||
if (job.paramsVector) {
|
||||
if (job.paramsVector.type === 'crossProduct') {
|
||||
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
|
||||
} else {
|
||||
throw new UnsupportedVectorType(job.paramsVector)
|
||||
}
|
||||
} else {
|
||||
throw new UnsupportedVectorType(job.paramsVector)
|
||||
paramsFlatVector = [{}] // One call with no parameters
|
||||
}
|
||||
|
||||
const connection = this.xo.createUserConnection()
|
||||
@@ -94,6 +103,12 @@ export default class JobExecutor {
|
||||
|
||||
connection.set('user_id', job.userId)
|
||||
|
||||
const execStatus = {
|
||||
runJobId,
|
||||
start: Date.now(),
|
||||
calls: {}
|
||||
}
|
||||
|
||||
forEach(paramsFlatVector, params => {
|
||||
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
|
||||
event: 'jobCall.start',
|
||||
@@ -102,8 +117,14 @@ export default class JobExecutor {
|
||||
params
|
||||
})
|
||||
|
||||
const call = execStatus.calls[runCallId] = {
|
||||
method: job.method,
|
||||
params,
|
||||
start: Date.now()
|
||||
}
|
||||
|
||||
promises.push(
|
||||
this.api.call(connection, job.method, assign({}, params)).then(
|
||||
this.xo.api.call(connection, job.method, assign({}, params)).then(
|
||||
value => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
@@ -111,21 +132,29 @@ export default class JobExecutor {
|
||||
runCallId,
|
||||
returnedValue: value
|
||||
})
|
||||
|
||||
call.returnedValue = value
|
||||
call.end = Date.now()
|
||||
},
|
||||
reason => {
|
||||
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
|
||||
event: 'jobCall.end',
|
||||
runJobId,
|
||||
runCallId,
|
||||
error: reason
|
||||
error: {...reason, message: reason.message}
|
||||
})
|
||||
|
||||
call.error = reason
|
||||
call.end = Date.now()
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
connection.close()
|
||||
|
||||
await Promise.all(promises)
|
||||
execStatus.end = Date.now()
|
||||
|
||||
return execStatus
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
import {expect} from 'chai'
|
||||
import leche from 'leche'
|
||||
|
||||
import {productParams} from './job-executor'
|
||||
import {_computeCrossProduct} from './job-executor'
|
||||
import {
|
||||
_computeCrossProduct,
|
||||
productParams
|
||||
} from './job-executor'
|
||||
|
||||
describe('productParams', function () {
|
||||
leche.withData({
|
||||
|
||||
22
src/loggers/abstract.js
Normal file
22
src/loggers/abstract.js
Normal file
@@ -0,0 +1,22 @@
|
||||
export default class AbstractLogger {}
|
||||
|
||||
// See: https://en.wikipedia.org/wiki/Syslog#Severity_level
|
||||
const LEVELS = [
|
||||
'emergency',
|
||||
'alert',
|
||||
'critical',
|
||||
'error',
|
||||
'warning',
|
||||
'notice',
|
||||
'informational',
|
||||
'debug'
|
||||
]
|
||||
|
||||
// Create high level log methods.
|
||||
for (const level of LEVELS) {
|
||||
Object.defineProperty(AbstractLogger.prototype, level, {
|
||||
value (message, data) {
|
||||
return this._add(level, message, data)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,44 +1,41 @@
|
||||
import highland from 'highland'
|
||||
|
||||
// See: https://en.wikipedia.org/wiki/Syslog#Severity_level
|
||||
const LEVELS = [
|
||||
'emergency',
|
||||
'alert',
|
||||
'critical',
|
||||
'error',
|
||||
'warning',
|
||||
'notice',
|
||||
'informational',
|
||||
'debug'
|
||||
]
|
||||
import AbstractLogger from './abstract'
|
||||
import { forEach, noop } from '../utils'
|
||||
|
||||
let lastDate = 0
|
||||
let lastId = 0
|
||||
let increment = 0
|
||||
|
||||
function generateUniqueKey (date) {
|
||||
lastId = (date === lastDate) ? (lastId + 1) : 0
|
||||
lastDate = date
|
||||
if (date === lastDate) {
|
||||
return `${date}:${increment++}`
|
||||
}
|
||||
|
||||
return `${lastDate}:${lastId}`
|
||||
increment = 0
|
||||
return String(lastDate = date)
|
||||
}
|
||||
|
||||
export default class LevelDbLogger {
|
||||
export default class LevelDbLogger extends AbstractLogger {
|
||||
constructor (db, namespace) {
|
||||
super()
|
||||
|
||||
this._db = db
|
||||
this._namespace = namespace
|
||||
}
|
||||
|
||||
_add (level, message, data) {
|
||||
const time = Date.now()
|
||||
|
||||
const log = {
|
||||
level,
|
||||
message,
|
||||
data,
|
||||
namespace: this._namespace,
|
||||
time: Date.now()
|
||||
time
|
||||
}
|
||||
|
||||
const key = generateUniqueKey(log.time)
|
||||
this._db.put(key, log)
|
||||
const key = generateUniqueKey(time)
|
||||
this._db.putSync(key, log)
|
||||
return key
|
||||
}
|
||||
|
||||
@@ -46,13 +43,17 @@ export default class LevelDbLogger {
|
||||
return highland(this._db.createReadStream())
|
||||
.filter(({value}) => value.namespace === this._namespace)
|
||||
}
|
||||
}
|
||||
|
||||
// Create high level log methods.
|
||||
for (const level of LEVELS) {
|
||||
Object.defineProperty(LevelDbLogger.prototype, level, {
|
||||
value (message, data) {
|
||||
return this._add(level, message, data)
|
||||
del (id) {
|
||||
if (!Array.isArray(id)) {
|
||||
id = [id]
|
||||
}
|
||||
})
|
||||
forEach(id, id => {
|
||||
this._db.get(id).then(value => {
|
||||
if (value.namespace === this._namespace) {
|
||||
this._db.delSync(id, noop)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
202
src/logs-cli.js
Normal file
202
src/logs-cli.js
Normal file
@@ -0,0 +1,202 @@
|
||||
import appConf from 'app-conf'
|
||||
import get from 'lodash/get'
|
||||
import highland from 'highland'
|
||||
import levelup from 'level-party'
|
||||
import ndjson from 'ndjson'
|
||||
import parseArgs from 'minimist'
|
||||
import sublevel from 'level-sublevel'
|
||||
import util from 'util'
|
||||
import { repair as repairDb } from 'leveldown'
|
||||
|
||||
import {forEach} from './utils'
|
||||
import globMatcher from './glob-matcher'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
async function printLogs (db, args) {
|
||||
let stream = highland(db.createReadStream({reverse: true}))
|
||||
|
||||
if (args.since) {
|
||||
stream = stream.filter(({value}) => (value.time >= args.since))
|
||||
}
|
||||
|
||||
if (args.until) {
|
||||
stream = stream.filter(({value}) => (value.time <= args.until))
|
||||
}
|
||||
|
||||
const fields = Object.keys(args.matchers)
|
||||
|
||||
if (fields.length > 0) {
|
||||
stream = stream.filter(({value}) => {
|
||||
for (const field of fields) {
|
||||
const fieldValue = get(value, field)
|
||||
if (fieldValue === undefined || !args.matchers[field](fieldValue)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
stream = stream.take(args.limit)
|
||||
|
||||
if (args.json) {
|
||||
stream = highland(stream.pipe(ndjson.serialize()))
|
||||
.each(value => {
|
||||
process.stdout.write(value)
|
||||
})
|
||||
} else {
|
||||
stream = stream.each(value => {
|
||||
console.log(util.inspect(value, { depth: null }))
|
||||
})
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
stream.done(resolve)
|
||||
})
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function helper () {
|
||||
console.error(`
|
||||
xo-server-logs --help, -h
|
||||
|
||||
Display this help message.
|
||||
|
||||
xo-server-logs [--json] [--limit=<limit>] [--since=<date>] [--until=<date>] [<pattern>...]
|
||||
|
||||
Prints the logs.
|
||||
|
||||
--json
|
||||
Display the results as new line delimited JSON for consumption
|
||||
by another program.
|
||||
|
||||
--limit=<limit>, -n <limit>
|
||||
Limit the number of results to be displayed (default 100)
|
||||
|
||||
--since=<date>, --until=<date>
|
||||
Start showing entries on or newer than the specified date, or on
|
||||
or older than the specified date.
|
||||
|
||||
<date> should use the format \`YYYY-MM-DD\`.
|
||||
|
||||
<pattern>
|
||||
Patterns can be used to filter the entries.
|
||||
|
||||
Patterns have the following format \`<field>=<value>\`/\`<field>\`.
|
||||
|
||||
xo-server-logs --repair
|
||||
|
||||
Repair/compact the database.
|
||||
|
||||
This is an advanced operation and should be used only when necessary and offline (xo-server should be stopped).
|
||||
`)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function getArgs () {
|
||||
const stringArgs = ['since', 'until', 'limit']
|
||||
const args = parseArgs(process.argv.slice(2), {
|
||||
string: stringArgs,
|
||||
boolean: ['help', 'json', 'repair'],
|
||||
default: {
|
||||
limit: 100,
|
||||
json: false,
|
||||
help: false
|
||||
},
|
||||
alias: {
|
||||
limit: 'n',
|
||||
help: 'h'
|
||||
}
|
||||
})
|
||||
|
||||
const patterns = {}
|
||||
|
||||
for (let value of args._) {
|
||||
value = String(value)
|
||||
|
||||
const i = value.indexOf('=')
|
||||
|
||||
if (i !== -1) {
|
||||
const field = value.slice(0, i)
|
||||
const pattern = value.slice(i + 1)
|
||||
|
||||
patterns[pattern]
|
||||
? patterns[field].push(pattern)
|
||||
: patterns[field] = [ pattern ]
|
||||
} else if (!patterns[value]) {
|
||||
patterns[value] = null
|
||||
}
|
||||
}
|
||||
|
||||
const trueFunction = () => true
|
||||
args.matchers = {}
|
||||
|
||||
for (const field in patterns) {
|
||||
const values = patterns[field]
|
||||
args.matchers[field] = (values === null) ? trueFunction : globMatcher(values)
|
||||
}
|
||||
|
||||
// Warning: minimist makes one array of values if the same option is used many times.
|
||||
// (But only for strings args, not boolean)
|
||||
forEach(stringArgs, arg => {
|
||||
if (args[arg] instanceof Array) {
|
||||
throw new Error(`error: too many values for ${arg} argument`)
|
||||
}
|
||||
})
|
||||
|
||||
;['since', 'until'].forEach(arg => {
|
||||
if (args[arg] !== undefined) {
|
||||
args[arg] = Date.parse(args[arg])
|
||||
|
||||
if (isNaN(args[arg])) {
|
||||
throw new Error(`error: bad ${arg} timestamp format`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (isNaN(args.limit = +args.limit)) {
|
||||
throw new Error('error: limit is not a valid number')
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default async function main () {
|
||||
const args = getArgs()
|
||||
|
||||
if (args.help) {
|
||||
helper()
|
||||
return
|
||||
}
|
||||
|
||||
const config = await appConf.load('xo-server', {
|
||||
ignoreUnknownFormats: true
|
||||
})
|
||||
|
||||
if (args.repair) {
|
||||
await new Promise((resolve, reject) => {
|
||||
repairDb(`${config.datadir}/leveldb`, error => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const db = sublevel(levelup(
|
||||
`${config.datadir}/leveldb`,
|
||||
{ valueEncoding: 'json' }
|
||||
)).sublevel('logs')
|
||||
|
||||
return printLogs(db, args)
|
||||
}
|
||||
@@ -2,7 +2,8 @@ import {EventEmitter} from 'events'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
isEmpty
|
||||
isEmpty,
|
||||
isString
|
||||
} from './utils'
|
||||
|
||||
// ===================================================================
|
||||
@@ -41,7 +42,7 @@ export default class Model extends EventEmitter {
|
||||
set (properties, value) {
|
||||
// This method can also be used with two arguments to set a single
|
||||
// property.
|
||||
if (value !== undefined) {
|
||||
if (isString(properties)) {
|
||||
properties = { [properties]: value }
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ export class Groups extends Collection {
|
||||
// Serializes.
|
||||
group.users = JSON.stringify(group.users)
|
||||
|
||||
return await this.update(group)
|
||||
return /* await */ this.update(group)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
|
||||
@@ -19,13 +19,13 @@ export class Jobs extends Collection {
|
||||
job.userId = userId
|
||||
// Serializes.
|
||||
job.paramsVector = JSON.stringify(job.paramsVector)
|
||||
return await this.add(new Job(job))
|
||||
return /* await */ this.add(new Job(job))
|
||||
}
|
||||
|
||||
async save (job) {
|
||||
// Serializes.
|
||||
job.paramsVector = JSON.stringify(job.paramsVector)
|
||||
return await this.update(job)
|
||||
return /* await */ this.update(job)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
|
||||
@@ -18,7 +18,7 @@ export class PluginsMetadata extends Collection {
|
||||
}
|
||||
|
||||
async save ({ id, autoload, configuration }) {
|
||||
return await this.update({
|
||||
return /* await */ this.update({
|
||||
id,
|
||||
autoload: autoload ? 'true' : 'false',
|
||||
configuration: configuration && JSON.stringify(configuration)
|
||||
@@ -31,7 +31,7 @@ export class PluginsMetadata extends Collection {
|
||||
throw new Error('no such plugin metadata')
|
||||
}
|
||||
|
||||
return await this.save({
|
||||
return /* await */ this.save({
|
||||
...pluginMetadata.properties,
|
||||
...data
|
||||
})
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
import { forEach } from '../utils'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -25,7 +27,7 @@ export class Remotes extends Collection {
|
||||
}
|
||||
|
||||
async save (remote) {
|
||||
return await this.update(remote)
|
||||
return /* await */ this.update(remote)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
|
||||
@@ -15,17 +15,19 @@ export class Schedules extends Collection {
|
||||
return 'schedule:'
|
||||
}
|
||||
|
||||
create (userId, job, cron, enabled) {
|
||||
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
|
||||
return this.add(new Schedule({
|
||||
userId,
|
||||
job,
|
||||
cron,
|
||||
enabled
|
||||
enabled,
|
||||
name,
|
||||
timezone
|
||||
}))
|
||||
}
|
||||
|
||||
async save (schedule) {
|
||||
return await this.update(schedule)
|
||||
return /* await */ this.update(schedule)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
|
||||
@@ -12,11 +12,11 @@ export class Servers extends Collection {
|
||||
return Server
|
||||
}
|
||||
|
||||
async create ({host, username, password}) {
|
||||
async create ({host, username, password, readOnly}) {
|
||||
if (await this.exists({host})) {
|
||||
throw new Error('server already exists')
|
||||
}
|
||||
|
||||
return await this.add({host, username, password})
|
||||
return /* await */ this.add({host, username, password, readOnly})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { hash } from 'hashy'
|
||||
import isEmpty from 'lodash/isEmpty'
|
||||
|
||||
import Collection from '../collection/redis'
|
||||
import Model from '../model'
|
||||
@@ -6,26 +6,7 @@ import { forEach } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const PERMISSIONS = {
|
||||
none: 0,
|
||||
read: 1,
|
||||
write: 2,
|
||||
admin: 3
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class User extends Model {
|
||||
hasPermission (permission) {
|
||||
return PERMISSIONS[this.get('permission')] >= PERMISSIONS[permission]
|
||||
}
|
||||
|
||||
setPassword (password) {
|
||||
return hash(password).then(hash => {
|
||||
return this.set('pw_hash', hash)
|
||||
})
|
||||
}
|
||||
}
|
||||
export default class User extends Model {}
|
||||
|
||||
User.prototype.default = {
|
||||
permission: 'none'
|
||||
@@ -33,6 +14,18 @@ User.prototype.default = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const parseProp = (obj, name) => {
|
||||
const value = obj[name]
|
||||
if (value == null) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
return JSON.parse(value)
|
||||
} catch (error) {
|
||||
console.warn('cannot parse user[%s] (%s):', name, value, error)
|
||||
}
|
||||
}
|
||||
|
||||
export class Users extends Collection {
|
||||
get Model () {
|
||||
return User
|
||||
@@ -44,30 +37,27 @@ export class Users extends Collection {
|
||||
throw new Error(`the user ${email} already exists`)
|
||||
}
|
||||
|
||||
// Password is a special case.
|
||||
const password = properties.password
|
||||
delete properties.password
|
||||
|
||||
// Adds the email to the user's properties.
|
||||
properties.email = email
|
||||
|
||||
// Create the user object.
|
||||
const user = new User(properties)
|
||||
|
||||
// Sets the password if any.
|
||||
if (password != null) {
|
||||
await user.setPassword(password)
|
||||
}
|
||||
|
||||
// Adds the user to the collection.
|
||||
return await this.add(user)
|
||||
return /* await */ this.add(user)
|
||||
}
|
||||
|
||||
async save (user) {
|
||||
// Serializes.
|
||||
user.groups = JSON.stringify(user.groups)
|
||||
let tmp
|
||||
if (!isEmpty(tmp = user.groups)) {
|
||||
user.groups = JSON.stringify(tmp)
|
||||
}
|
||||
if (!isEmpty(tmp = user.preferences)) {
|
||||
user.preferences = JSON.stringify(tmp)
|
||||
}
|
||||
|
||||
return await this.update(user)
|
||||
return /* await */ this.update(user)
|
||||
}
|
||||
|
||||
async get (properties) {
|
||||
@@ -75,13 +65,11 @@ export class Users extends Collection {
|
||||
|
||||
// Deserializes
|
||||
forEach(users, user => {
|
||||
const {groups} = user
|
||||
try {
|
||||
user.groups = groups ? JSON.parse(groups) : []
|
||||
} catch (_) {
|
||||
console.warn('cannot parse user.groups:', groups)
|
||||
user.groups = []
|
||||
}
|
||||
let tmp
|
||||
user.groups = ((tmp = parseProp(user, 'groups')) && tmp.length)
|
||||
? tmp
|
||||
: undefined
|
||||
user.preferences = parseProp(user, 'preferences')
|
||||
})
|
||||
|
||||
return users
|
||||
|
||||
@@ -23,13 +23,19 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
|
||||
'', ''
|
||||
].join('\r\n'))
|
||||
|
||||
const onSend = (error) => {
|
||||
if (error) {
|
||||
debug('error sending to the XO client: %s', error.stack || error.message || error)
|
||||
}
|
||||
}
|
||||
|
||||
socket.pipe(partialStream('\r\n\r\n', headers => {
|
||||
// TODO: check status code 200.
|
||||
debug('connected')
|
||||
})).on('data', data => {
|
||||
if (!closed) {
|
||||
// Encode to base 64.
|
||||
ws.send(data.toString('base64'))
|
||||
ws.send(data.toString('base64'), onSend)
|
||||
}
|
||||
}).on('end', () => {
|
||||
if (!closed) {
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
import filter from 'lodash.filter'
|
||||
import fs from 'fs-promise'
|
||||
import {exec} from 'child_process'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
promisify
|
||||
} from './utils'
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
class NfsMounter {
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
try {
|
||||
[stdout] = await execAsync('findmnt -P -t nfs,nfs4 --output SOURCE,TARGET --noheadings')
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
}
|
||||
const mounted = {}
|
||||
if (stdout) {
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2]
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_fullPath (path) {
|
||||
return path
|
||||
}
|
||||
|
||||
_matchesRealMount (mount) {
|
||||
return this._fullPath(mount.path) in this._realMounts
|
||||
}
|
||||
|
||||
async _mount (mount) {
|
||||
const path = this._fullPath(mount.path)
|
||||
await fs.ensureDir(path)
|
||||
return await execAsync(`mount -t nfs ${mount.host}:${mount.share} ${path}`)
|
||||
}
|
||||
|
||||
async forget (mount) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (mount) {
|
||||
const path = this._fullPath(mount.path)
|
||||
await execAsync(`umount ${path}`)
|
||||
}
|
||||
|
||||
async sync (mount) {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount(mount) && !mount.enabled) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (exc) {
|
||||
mount.enabled = true
|
||||
mount.error = exc.message
|
||||
}
|
||||
} else if (!this._matchesRealMount(mount) && mount.enabled) {
|
||||
try {
|
||||
await this._mount(mount)
|
||||
} catch (exc) {
|
||||
mount.enabled = false
|
||||
mount.error = exc.message
|
||||
}
|
||||
}
|
||||
return mount
|
||||
}
|
||||
|
||||
async disableAll (mounts) {
|
||||
await this._loadRealMounts()
|
||||
forEach(mounts, async mount => {
|
||||
if (this._matchesRealMount(mount)) {
|
||||
try {
|
||||
await this._umount(mount)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class LocalHandler {
|
||||
constructor () {
|
||||
this.forget = noop
|
||||
this.disableAll = noop
|
||||
}
|
||||
|
||||
async sync (local) {
|
||||
if (local.enabled) {
|
||||
try {
|
||||
await fs.ensureDir(local.path)
|
||||
await fs.access(local.path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
local.enabled = false
|
||||
local.error = exc.message
|
||||
}
|
||||
}
|
||||
return local
|
||||
}
|
||||
}
|
||||
|
||||
export default class RemoteHandler {
|
||||
constructor () {
|
||||
this.handlers = {
|
||||
nfs: new NfsMounter(),
|
||||
local: new LocalHandler()
|
||||
}
|
||||
}
|
||||
|
||||
async sync (remote) {
|
||||
return await this.handlers[remote.type].sync(remote)
|
||||
}
|
||||
|
||||
async forget (remote) {
|
||||
return await this.handlers[remote.type].forget(remote)
|
||||
}
|
||||
|
||||
async disableAll (remotes) {
|
||||
const promises = []
|
||||
forEach(['local', 'nfs'], type => promises.push(this.handlers[type].disableAll(filter(remotes, remote => remote.type === type))))
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
210
src/remote-handlers/abstract.js
Normal file
210
src/remote-handlers/abstract.js
Normal file
@@ -0,0 +1,210 @@
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import through2 from 'through2'
|
||||
|
||||
import {
|
||||
parse
|
||||
} from 'xo-remote-parser'
|
||||
|
||||
import {
|
||||
addChecksumToReadStream,
|
||||
getPseudoRandomBytes,
|
||||
noop,
|
||||
pCatch,
|
||||
streamToBuffer,
|
||||
validChecksumOfReadStream
|
||||
} from '../utils'
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
this._remote = {...remote, ...parse(remote.url)}
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
}
|
||||
|
||||
get type () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks the handler to sync the state of the effective remote with its' metadata
|
||||
*/
|
||||
async sync () {
|
||||
return this._sync()
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
|
||||
*/
|
||||
async forget () {
|
||||
return this._forget()
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async test () {
|
||||
const testFileName = `${Date.now()}.test`
|
||||
const data = getPseudoRandomBytes(1024 * 1024)
|
||||
let step = 'write'
|
||||
try {
|
||||
await this.outputFile(testFileName, data)
|
||||
step = 'read'
|
||||
const read = await this.readFile(testFileName)
|
||||
if (data.compare(read) !== 0) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error)
|
||||
}
|
||||
} finally {
|
||||
this.unlink(testFileName).catch(noop)
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile (file, data, options) {
|
||||
return this._outputFile(file, data, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
const promise = eventToPromise(stream, 'finish')
|
||||
stream.end(data)
|
||||
return promise
|
||||
}
|
||||
|
||||
async readFile (file, options) {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
_readFile (file, options) {
|
||||
return this.createReadStream(file, options).then(streamToBuffer)
|
||||
}
|
||||
|
||||
async rename (oldPath, newPath) {
|
||||
return this._rename(oldPath, newPath)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async list (dir = '.') {
|
||||
return this._list(dir)
|
||||
}
|
||||
|
||||
async _list (dir) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async createReadStream (file, {
|
||||
checksum = false,
|
||||
ignoreMissingChecksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createReadStream(file, options).then(async stream => {
|
||||
await eventToPromise(stream, 'readable')
|
||||
|
||||
if (stream.length === undefined) {
|
||||
stream.length = await this.getSize(file)::pCatch(noop)
|
||||
}
|
||||
|
||||
return stream
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
try {
|
||||
checksum = await this.readFile(`${file}.checksum`)
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
let stream = await streamP
|
||||
|
||||
const { length } = stream
|
||||
stream = validChecksumOfReadStream(stream, checksum.toString())
|
||||
stream.length = length
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async createOutputStream (file, {
|
||||
checksum = false,
|
||||
...options
|
||||
} = {}) {
|
||||
const streamP = this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const connectorStream = through2()
|
||||
const forwardError = error => {
|
||||
connectorStream.emit('error', error)
|
||||
}
|
||||
|
||||
const streamWithChecksum = addChecksumToReadStream(connectorStream)
|
||||
streamWithChecksum.pipe(await streamP)
|
||||
|
||||
streamWithChecksum.checksum
|
||||
.then(value => this.outputFile(`${file}.checksum`, value))
|
||||
.catch(forwardError)
|
||||
|
||||
return connectorStream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async unlink (file, {
|
||||
checksum = false
|
||||
} = {}) {
|
||||
if (checksum) {
|
||||
this._unlink(`${file}.checksum`)::pCatch(noop)
|
||||
}
|
||||
|
||||
return this._unlink(file)
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getSize (file) {
|
||||
return this._getSize(file)
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
90
src/remote-handlers/local.js
Normal file
90
src/remote-handlers/local.js
Normal file
@@ -0,0 +1,90 @@
|
||||
import fs from 'fs-promise'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import {
|
||||
dirname,
|
||||
resolve
|
||||
} from 'path'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import {
|
||||
noop
|
||||
} from '../utils'
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
return 'file'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return this._remote.path
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const realPath = this._getRealPath()
|
||||
const parts = [realPath]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, realPath)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
return noop()
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(path, data, options)
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
return fs.createReadStream(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
return fs.createWriteStream(path, options)
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
return fs.unlink(this._getFilePath(file))
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const stats = await fs.stat(this._getFilePath(file))
|
||||
return stats.size
|
||||
}
|
||||
|
||||
}
|
||||
84
src/remote-handlers/nfs.js
Normal file
84
src/remote-handlers/nfs.js
Normal file
@@ -0,0 +1,84 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-promise'
|
||||
|
||||
import LocalHandler from './local'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
|
||||
export default class NfsHandler extends LocalHandler {
|
||||
get type () {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return `/tmp/xo-server/mounts/${this._remote.id}`
|
||||
}
|
||||
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2]
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
if (exc.stderr !== '') {
|
||||
throw exc
|
||||
}
|
||||
}
|
||||
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_matchesRealMount () {
|
||||
return this._getRealPath() in this._realMounts
|
||||
}
|
||||
|
||||
async _mount () {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount() && !this._remote.enabled) {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = true
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
} else if (!this._matchesRealMount() && this._remote.enabled) {
|
||||
try {
|
||||
await this._mount()
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (remote) {
|
||||
await execa('umount', [remote.path])
|
||||
}
|
||||
}
|
||||
191
src/remote-handlers/smb.js
Normal file
191
src/remote-handlers/smb.js
Normal file
@@ -0,0 +1,191 @@
|
||||
import Smb2 from '@marsaud/smb2-promise'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import {
|
||||
noop,
|
||||
pFinally
|
||||
} from '../utils'
|
||||
|
||||
// Normalize the error code for file not found.
|
||||
const normalizeError = error => {
|
||||
const { code } = error
|
||||
|
||||
return (
|
||||
code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
)
|
||||
? Object.create(error, {
|
||||
code: {
|
||||
configurable: true,
|
||||
readable: true,
|
||||
value: 'ENOENT',
|
||||
writable: true
|
||||
}
|
||||
})
|
||||
: error
|
||||
}
|
||||
|
||||
export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
super(remote)
|
||||
this._forget = noop
|
||||
}
|
||||
|
||||
get type () {
|
||||
return 'smb'
|
||||
}
|
||||
|
||||
_getClient (remote) {
|
||||
return new Smb2({
|
||||
share: `\\\\${remote.host}`,
|
||||
domain: remote.domain,
|
||||
username: remote.username,
|
||||
password: remote.password,
|
||||
autoCloseTimeout: 0
|
||||
})
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
if (file === '.') {
|
||||
file = undefined
|
||||
}
|
||||
|
||||
let path = (this._remote.path !== '')
|
||||
? this._remote.path
|
||||
: ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
path += '\\'
|
||||
}
|
||||
|
||||
if (file) {
|
||||
path += file.replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
_dirname (file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
} catch (error) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = error.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options = {}) {
|
||||
const client = this._getClient(this._remote)
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
|
||||
return client.writeFile(path, data, options)::pFinally(() => { client.close() })
|
||||
}
|
||||
|
||||
async _readFile (file, options = {}) {
|
||||
const client = this._getClient(this._remote)
|
||||
let content
|
||||
|
||||
try {
|
||||
content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() })
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
const client = this._getClient(this._remote)
|
||||
|
||||
try {
|
||||
await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() })
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
const client = this._getClient(this._remote)
|
||||
let list
|
||||
|
||||
try {
|
||||
list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() })
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
async _createReadStream (file, options = {}) {
|
||||
const client = this._getClient(this._remote)
|
||||
let stream
|
||||
|
||||
try {
|
||||
// FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
stream = await client.createReadStream(this._getFilePath(file), options)
|
||||
stream.on('end', () => client.close())
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options = {}) {
|
||||
const client = this._getClient(this._remote)
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
let stream
|
||||
try {
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
} catch (err) {
|
||||
client.close()
|
||||
throw err
|
||||
}
|
||||
stream.on('finish', () => client.close())
|
||||
return stream
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
const client = this._getClient(this._remote)
|
||||
|
||||
try {
|
||||
await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() })
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const client = await this._getClient(this._remote)
|
||||
let size
|
||||
|
||||
try {
|
||||
size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() })
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
164
src/scheduler.js
164
src/scheduler.js
@@ -1,164 +0,0 @@
|
||||
import {BaseError} from 'make-error'
|
||||
import {CronJob} from 'cron'
|
||||
|
||||
import { forEach } from './utils'
|
||||
|
||||
const _resolveId = scheduleOrId => scheduleOrId.id || scheduleOrId
|
||||
|
||||
export class SchedulerError extends BaseError {}
|
||||
export class ScheduleOverride extends SchedulerError {
|
||||
constructor (scheduleOrId) {
|
||||
super('Schedule ID ' + _resolveId(scheduleOrId) + ' is already added')
|
||||
}
|
||||
}
|
||||
export class NoSuchSchedule extends SchedulerError {
|
||||
constructor (scheduleOrId) {
|
||||
super('No schedule found for ID ' + _resolveId(scheduleOrId))
|
||||
}
|
||||
}
|
||||
export class ScheduleNotEnabled extends SchedulerError {
|
||||
constructor (scheduleOrId) {
|
||||
super('Schedule ' + _resolveId(scheduleOrId)) + ' is not enabled'
|
||||
}
|
||||
}
|
||||
export class ScheduleAlreadyEnabled extends SchedulerError {
|
||||
constructor (scheduleOrId) {
|
||||
super('Schedule ' + _resolveId(scheduleOrId) + ' is already enabled')
|
||||
}
|
||||
}
|
||||
export class ScheduleJobNotFound extends SchedulerError {
|
||||
constructor (jobId, scheduleId) {
|
||||
super('Job ' + jobId + ' not found for Schedule ' + scheduleId)
|
||||
}
|
||||
}
|
||||
|
||||
export default class Scheduler {
|
||||
constructor (xo, {executor}) {
|
||||
this.executor = executor
|
||||
this.xo = xo
|
||||
this._scheduleTable = undefined
|
||||
this._loadSchedules()
|
||||
}
|
||||
|
||||
async _loadSchedules () {
|
||||
this._schedules = {}
|
||||
const schedules = await this.xo.getAllSchedules()
|
||||
this._scheduleTable = {}
|
||||
this._cronJobs = {}
|
||||
forEach(schedules, schedule => {
|
||||
this._add(schedule)
|
||||
})
|
||||
}
|
||||
|
||||
add (schedule) {
|
||||
if (this.exists(schedule)) {
|
||||
throw new ScheduleOverride(schedule)
|
||||
}
|
||||
this._add(schedule)
|
||||
}
|
||||
|
||||
_add (schedule) {
|
||||
const id = _resolveId(schedule)
|
||||
this._schedules[id] = schedule
|
||||
this._scheduleTable[id] = false
|
||||
if (schedule.enabled) {
|
||||
this._enable(schedule)
|
||||
}
|
||||
}
|
||||
|
||||
remove (id) {
|
||||
try {
|
||||
this._disable(id)
|
||||
} catch (exc) {
|
||||
if (!exc instanceof SchedulerError) {
|
||||
throw exc
|
||||
}
|
||||
} finally {
|
||||
delete this._schedules[id]
|
||||
delete this._scheduleTable[id]
|
||||
}
|
||||
}
|
||||
|
||||
exists (scheduleOrId) {
|
||||
const id_ = _resolveId(scheduleOrId)
|
||||
return id_ in this._schedules
|
||||
}
|
||||
|
||||
async get (id) {
|
||||
if (!this.exists(id)) {
|
||||
throw new NoSuchSchedule(id)
|
||||
}
|
||||
return this._schedules[id]
|
||||
}
|
||||
|
||||
async _get (id) {
|
||||
const schedule = await this.xo.getSchedule(id)
|
||||
if (!schedule) {
|
||||
throw new NoSuchSchedule(id)
|
||||
}
|
||||
return schedule
|
||||
}
|
||||
|
||||
async update (schedule) {
|
||||
if (!this.exists(schedule)) {
|
||||
throw new NoSuchSchedule(schedule)
|
||||
}
|
||||
const enabled = this.isEnabled(schedule)
|
||||
if (enabled) {
|
||||
await this._disable(schedule)
|
||||
}
|
||||
this._add(schedule)
|
||||
}
|
||||
|
||||
isEnabled (scheduleOrId) {
|
||||
return this._scheduleTable[_resolveId(scheduleOrId)]
|
||||
}
|
||||
|
||||
_enable (schedule) {
|
||||
const jobId = schedule.job
|
||||
const cronJob = new CronJob(schedule.cron, async () => {
|
||||
try {
|
||||
const job = await this._getJob(jobId, schedule.id)
|
||||
this.executor.exec(job)
|
||||
} catch (_) {
|
||||
// FIXME What do we do ?
|
||||
}
|
||||
})
|
||||
this._cronJobs[schedule.id] = cronJob
|
||||
cronJob.start()
|
||||
this._scheduleTable[schedule.id] = true
|
||||
}
|
||||
|
||||
async _getJob (id, scheduleId) {
|
||||
const job = await this.xo.getJob(id)
|
||||
if (!job) {
|
||||
throw new ScheduleJobNotFound(id, scheduleId)
|
||||
}
|
||||
return job
|
||||
}
|
||||
|
||||
_disable (scheduleOrId) {
|
||||
if (!this.exists(scheduleOrId)) {
|
||||
throw new NoSuchSchedule(scheduleOrId)
|
||||
}
|
||||
if (!this.isEnabled(scheduleOrId)) {
|
||||
throw new ScheduleNotEnabled(scheduleOrId)
|
||||
}
|
||||
const id = _resolveId(scheduleOrId)
|
||||
this._cronJobs[id].stop()
|
||||
delete this._cronJobs[id]
|
||||
this._scheduleTable[id] = false
|
||||
}
|
||||
|
||||
disableAll () {
|
||||
forEach(this.scheduleTable, (enabled, id) => {
|
||||
if (enabled) {
|
||||
this._disable(id)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
get scheduleTable () {
|
||||
return this._scheduleTable
|
||||
}
|
||||
}
|
||||
39
src/schemas/job.js
Normal file
39
src/schemas/job.js
Normal file
@@ -0,0 +1,39 @@
|
||||
import paramsVector from 'job/params-vector'
|
||||
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
enum: ['call']
|
||||
},
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'job identifier'
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'human readable name'
|
||||
},
|
||||
userId: {
|
||||
type: 'string',
|
||||
description: 'identifier of the user who have created the job (the permissions of the user are used by the job)'
|
||||
},
|
||||
key: {
|
||||
type: 'string'
|
||||
// TODO description
|
||||
},
|
||||
method: {
|
||||
type: 'string',
|
||||
description: 'called method'
|
||||
},
|
||||
paramsVector
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'id',
|
||||
'userId',
|
||||
'key',
|
||||
'method'
|
||||
]
|
||||
}
|
||||
59
src/schemas/job/params-vector.js
Normal file
59
src/schemas/job/params-vector.js
Normal file
@@ -0,0 +1,59 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
enum: ['crossProduct']
|
||||
},
|
||||
items: {
|
||||
type: 'array',
|
||||
description: 'vector of values to multiply with others vectors',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
type: {
|
||||
enum: ['set']
|
||||
},
|
||||
values: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object'
|
||||
},
|
||||
minItems: 1
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'values'
|
||||
]
|
||||
},
|
||||
minItems: 1
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'type',
|
||||
'items'
|
||||
]
|
||||
}
|
||||
|
||||
/* Example:
|
||||
{
|
||||
"type": "cross product",
|
||||
"items": [
|
||||
{
|
||||
"type": "set",
|
||||
"values": [
|
||||
{"id": 0, "name": "snapshost de 0"},
|
||||
{"id": 1, "name": "snapshost de 1"}
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "set",
|
||||
"values": [
|
||||
{"force": true}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
*/
|
||||
@@ -8,7 +8,7 @@ export default {
|
||||
},
|
||||
time: {
|
||||
type: 'string',
|
||||
description: 'timestamp (in miliseconds) of this log'
|
||||
description: 'timestamp (in milliseconds) of this log'
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
@@ -18,14 +18,7 @@ export default {
|
||||
type: 'string',
|
||||
description: 'space to store logs'
|
||||
},
|
||||
data: {
|
||||
oneOf: [
|
||||
{ '$ref': 'log/jobStart.js' },
|
||||
{ '$ref': 'log/jobEnd.js' },
|
||||
{ '$ref': 'log/jobCallStart.js' },
|
||||
{ '$ref': 'log/jobCallEnd.js' }
|
||||
]
|
||||
}
|
||||
data: {}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
|
||||
50
src/schemas/user.js
Normal file
50
src/schemas/user.js
Normal file
@@ -0,0 +1,50 @@
|
||||
export default {
|
||||
$schema: 'http://json-schema.org/draft-04/schema#',
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: {
|
||||
type: 'string',
|
||||
description: 'unique identifier for this user'
|
||||
},
|
||||
email: {
|
||||
type: 'string',
|
||||
description: 'email address of this user'
|
||||
},
|
||||
groups: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
description: 'identifier of groups this user belong to'
|
||||
},
|
||||
permission: {
|
||||
enum: ['none', 'read', 'write', 'admin'],
|
||||
description: 'root permission for this user, none and admin are the only significant ones'
|
||||
},
|
||||
preferences: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
lang: { type: 'string' },
|
||||
sshKeys: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
key: { type: 'string' },
|
||||
title: { type: 'string' }
|
||||
},
|
||||
required: [
|
||||
'key',
|
||||
'title'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
description: 'various user preferences'
|
||||
}
|
||||
},
|
||||
required: [
|
||||
'id',
|
||||
'email'
|
||||
]
|
||||
}
|
||||
366
src/utils.js
366
src/utils.js
@@ -1,17 +1,62 @@
|
||||
import base64url from 'base64url'
|
||||
import forEach from 'lodash.foreach'
|
||||
import has from 'lodash.has'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import forEach from 'lodash/forEach'
|
||||
import getStream from 'get-stream'
|
||||
import has from 'lodash/has'
|
||||
import highland from 'highland'
|
||||
import humanFormat from 'human-format'
|
||||
import isArray from 'lodash.isarray'
|
||||
import isString from 'lodash.isstring'
|
||||
import invert from 'lodash/invert'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isString from 'lodash/isString'
|
||||
import keys from 'lodash/keys'
|
||||
import kindOf from 'kindof'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
import xml2js from 'xml2js'
|
||||
import {promisify} from 'bluebird'
|
||||
import {randomBytes} from 'crypto'
|
||||
|
||||
// Moment timezone can be loaded only one time, it's a workaround to load
|
||||
// the latest version because cron module uses an old version of moment which
|
||||
// does not implement `guess` function for example.
|
||||
import 'moment-timezone'
|
||||
|
||||
import { CronJob } from 'cron'
|
||||
import {
|
||||
all as pAll,
|
||||
defer,
|
||||
promisify,
|
||||
reflect as pReflect
|
||||
} from 'promise-toolbox'
|
||||
import {
|
||||
createHash,
|
||||
randomBytes
|
||||
} from 'crypto'
|
||||
import { Readable } from 'stream'
|
||||
import through2 from 'through2'
|
||||
import {utcFormat as d3TimeFormat} from 'd3-time-format'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function bufferToStream (buf) {
|
||||
const stream = new Readable()
|
||||
|
||||
let i = 0
|
||||
const { length } = buf
|
||||
stream._read = function (size) {
|
||||
if (i === length) {
|
||||
return this.push(null)
|
||||
}
|
||||
|
||||
const newI = Math.min(i + size, length)
|
||||
this.push(buf.slice(i, newI))
|
||||
i = newI
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
export const streamToBuffer = getStream.buffer
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function camelToSnakeCase (string) {
|
||||
return string.replace(
|
||||
/([a-z])([A-Z])/g,
|
||||
@@ -24,7 +69,91 @@ export function camelToSnakeCase (string) {
|
||||
// Returns an empty object without prototype (if possible).
|
||||
export const createRawObject = Object.create
|
||||
? (createObject => () => createObject(null))(Object.create)
|
||||
: () => {}
|
||||
: () => ({})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const ALGORITHM_TO_ID = {
|
||||
md5: '1',
|
||||
sha256: '5',
|
||||
sha512: '6'
|
||||
}
|
||||
|
||||
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
|
||||
|
||||
// Wrap a readable stream in a stream with a checksum promise
|
||||
// attribute which is resolved at the end of an input stream.
|
||||
// (Finally .checksum contains the checksum of the input stream)
|
||||
//
|
||||
// Example:
|
||||
// const sourceStream = ...
|
||||
// const targetStream = ...
|
||||
// const checksumStream = addChecksumToReadStream(sourceStream)
|
||||
// await Promise.all([
|
||||
// eventToPromise(checksumStream.pipe(targetStream), 'finish'),
|
||||
// checksumStream.checksum.then(console.log)
|
||||
// ])
|
||||
export const addChecksumToReadStream = (stream, algorithm = 'md5') => {
|
||||
const algorithmId = ALGORITHM_TO_ID[algorithm]
|
||||
|
||||
if (!algorithmId) {
|
||||
throw new Error(`unknown algorithm: ${algorithm}`)
|
||||
}
|
||||
|
||||
const hash = createHash(algorithm)
|
||||
const { promise, resolve } = defer()
|
||||
|
||||
const wrapper = stream.pipe(through2(
|
||||
(chunk, enc, callback) => {
|
||||
hash.update(chunk)
|
||||
callback(null, chunk)
|
||||
},
|
||||
callback => {
|
||||
resolve(hash.digest('hex'))
|
||||
callback()
|
||||
}
|
||||
))
|
||||
|
||||
stream.on('error', error => wrapper.emit('error', error))
|
||||
wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`)
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
// Check if the checksum of a readable stream is equals to an expected checksum.
|
||||
// The given stream is wrapped in a stream which emits an error event
|
||||
// if the computed checksum is not equals to the expected checksum.
|
||||
export const validChecksumOfReadStream = (stream, expectedChecksum) => {
|
||||
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
|
||||
|
||||
if (!algorithmId) {
|
||||
throw new Error(`unknown algorithm: ${algorithmId}`)
|
||||
}
|
||||
|
||||
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
|
||||
|
||||
const wrapper = stream.pipe(through2(
|
||||
{ highWaterMark: 0 },
|
||||
(chunk, enc, callback) => {
|
||||
hash.update(chunk)
|
||||
callback(null, chunk)
|
||||
},
|
||||
callback => {
|
||||
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
|
||||
|
||||
callback(
|
||||
checksum !== expectedChecksum
|
||||
? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`)
|
||||
: null
|
||||
)
|
||||
}
|
||||
))
|
||||
|
||||
stream.on('error', error => wrapper.emit('error', error))
|
||||
wrapper.checksumVerified = eventToPromise(wrapper, 'end')
|
||||
|
||||
return wrapper
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -48,10 +177,27 @@ export function extractProperty (obj, prop) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const getPseudoRandomBytes = n => {
|
||||
const bytes = new Buffer(n)
|
||||
|
||||
const odd = n & 1
|
||||
for (let i = 0, m = n - odd; i < m; i += 2) {
|
||||
bytes.writeUInt16BE(Math.random() * 65536 | 0, i)
|
||||
}
|
||||
|
||||
if (odd) {
|
||||
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
|
||||
}
|
||||
|
||||
return bytes
|
||||
}
|
||||
|
||||
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
|
||||
|
||||
// Generate a secure random Base64 string.
|
||||
export const generateToken = (function (randomBytes) {
|
||||
export const generateToken = (randomBytes => {
|
||||
return (n = 32) => randomBytes(n).then(base64url)
|
||||
})(promisify(randomBytes))
|
||||
})(randomBytes::promisify())
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -87,63 +233,99 @@ export const parseXml = (function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Very light and fast set.
|
||||
//
|
||||
// - works only with strings
|
||||
// - methods are already bound and chainable
|
||||
export const lightSet = collection => {
|
||||
const data = createRawObject()
|
||||
if (collection) {
|
||||
forEach(collection, value => {
|
||||
data[value] = true
|
||||
})
|
||||
collection = null
|
||||
}
|
||||
|
||||
const set = {
|
||||
add: value => {
|
||||
data[value] = true
|
||||
return set
|
||||
},
|
||||
clear: () => {
|
||||
for (const value in data) {
|
||||
delete data[value]
|
||||
}
|
||||
return set
|
||||
},
|
||||
delete: value => {
|
||||
delete data[value]
|
||||
return set
|
||||
},
|
||||
has: value => data[value],
|
||||
toArray: () => keys(data)
|
||||
}
|
||||
return set
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// This function does nothing and returns undefined.
|
||||
//
|
||||
// It is often used to swallow promise's errors.
|
||||
export function noop () {}
|
||||
export const noop = () => {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Ponyfill for Promise.finally(cb)
|
||||
//
|
||||
// Usage: promise::pFinally(cb)
|
||||
export function pFinally (cb) {
|
||||
return this.then(
|
||||
value => this.constructor.resolve(cb()).then(() => value),
|
||||
reason => this.constructor.resolve(cb()).then(() => {
|
||||
throw reason
|
||||
})
|
||||
// Usage: pDebug(promise, name) or promise::pDebug(name)
|
||||
export function pDebug (promise, name) {
|
||||
if (arguments.length === 1) {
|
||||
name = promise
|
||||
promise = this
|
||||
}
|
||||
|
||||
Promise.resolve(promise).then(
|
||||
value => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}`
|
||||
)
|
||||
},
|
||||
reason => {
|
||||
console.log(
|
||||
'%s',
|
||||
`Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}`
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
return promise
|
||||
}
|
||||
|
||||
// Given an array which contains promises return a promise that is
|
||||
// fulfilled when all the items in the array are either fulfilled or
|
||||
// rejected.
|
||||
// Given a collection (array or object) which contains promises,
|
||||
// return a promise that is fulfilled when all the items in the
|
||||
// collection are either fulfilled or rejected.
|
||||
//
|
||||
// This promise will be fulfilled with a collection (of the same type,
|
||||
// array or object) containing promise inspections.
|
||||
//
|
||||
// Usage: pSettle(promises) or promises::pSettle()
|
||||
export function pSettle (promises) {
|
||||
const statuses = promises.map(promise => promise.then(
|
||||
value => ({
|
||||
isFulfilled: () => true,
|
||||
isRejected: () => false,
|
||||
value: () => value,
|
||||
reason: () => {
|
||||
throw new Error('no reason, the promise has been fulfilled')
|
||||
}
|
||||
}),
|
||||
reason => ({
|
||||
isFulfilled: () => false,
|
||||
isRejected: () => true,
|
||||
value: () => {
|
||||
throw new Error('no value, the promise has been rejected')
|
||||
},
|
||||
reason: () => reason
|
||||
})
|
||||
))
|
||||
|
||||
return Promise.all(statuses)
|
||||
return (this || promises)::pAll(p => p::pReflect())
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export {
|
||||
// Create a function which returns promises instead of taking a
|
||||
// callback.
|
||||
export { // eslint-disable-line no-duplicate-imports
|
||||
all as pAll,
|
||||
catchPlus as pCatch,
|
||||
delay as pDelay,
|
||||
fromCallback as pFromCallback,
|
||||
isPromise,
|
||||
lastly as pFinally,
|
||||
promisify,
|
||||
|
||||
// For all enumerable methods of an object, create a new method
|
||||
// which name is suffixed with `Async` which return promises instead
|
||||
// of taking a callback.
|
||||
promisifyAll
|
||||
} from 'bluebird'
|
||||
promisifyAll,
|
||||
reflect as pReflect
|
||||
} from 'promise-toolbox'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -165,6 +347,19 @@ export function parseSize (size) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _has = Object.prototype.hasOwnProperty
|
||||
|
||||
// Removes an own property from an object and returns its value.
|
||||
export const popProperty = obj => {
|
||||
for (const prop in obj) {
|
||||
if (_has.call(obj, prop)) {
|
||||
return extractProperty(obj, prop)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Format a date in ISO 8601 in a safe way to be used in filenames
|
||||
// (even on Windows).
|
||||
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
|
||||
@@ -173,10 +368,16 @@ export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
|
||||
|
||||
// This functions are often used throughout xo-server.
|
||||
//
|
||||
// Exports them from here to avoid direct dependencies on lodash.
|
||||
export { default as forEach } from 'lodash.foreach'
|
||||
export { default as isEmpty } from 'lodash.isempty'
|
||||
export { default as mapToArray } from 'lodash.map'
|
||||
// Exports them from here to avoid direct dependencies on lodash/
|
||||
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
|
||||
export { default as isBoolean } from 'lodash/isBoolean'
|
||||
export { default as isEmpty } from 'lodash/isEmpty'
|
||||
export { default as isFunction } from 'lodash/isFunction'
|
||||
export { default as isInteger } from 'lodash/isInteger'
|
||||
export { default as isObject } from 'lodash/isObject'
|
||||
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
|
||||
export { default as mapToArray } from 'lodash/map'
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
@@ -200,11 +401,10 @@ export const DONE = {}
|
||||
export function map (
|
||||
collection,
|
||||
iteratee,
|
||||
thisArg,
|
||||
target = has(collection, 'length') ? [] : {}
|
||||
) {
|
||||
forEach(collection, (item, i) => {
|
||||
const value = iteratee.call(thisArg, item, i, collection, DONE)
|
||||
const value = iteratee(item, i, collection, DONE)
|
||||
if (value === DONE) {
|
||||
return false
|
||||
}
|
||||
@@ -215,11 +415,6 @@ export function map (
|
||||
return target
|
||||
}
|
||||
|
||||
// Helper to `map()` to update the current collection.
|
||||
export function mapInPlace (collection, iteratee, thisArg) {
|
||||
return map(collection, iteratee, thisArg, collection)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Create a hash from multiple values.
|
||||
@@ -234,5 +429,52 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const streamToArray = (stream, {
|
||||
filter,
|
||||
mapper
|
||||
} = {}) => new Promise((resolve, reject) => {
|
||||
stream = highland(stream).stopOnError(reject)
|
||||
if (filter) {
|
||||
stream = stream.filter(filter)
|
||||
}
|
||||
if (mapper) {
|
||||
stream = stream.map(mapper)
|
||||
}
|
||||
stream.toArray(resolve)
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const scheduleFn = (cronTime, fn, timeZone) => {
|
||||
let running = false
|
||||
|
||||
const job = new CronJob({
|
||||
cronTime,
|
||||
onTick: async () => {
|
||||
if (running) {
|
||||
return
|
||||
}
|
||||
|
||||
running = true
|
||||
|
||||
try {
|
||||
await fn()
|
||||
} catch (error) {
|
||||
console.error('[WARN] scheduled function:', error && error.stack || error)
|
||||
} finally {
|
||||
running = false
|
||||
}
|
||||
},
|
||||
start: true,
|
||||
timeZone
|
||||
})
|
||||
|
||||
return () => {
|
||||
job.stop()
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Wrap a value in a function.
|
||||
export const wrap = value => () => value
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-env mocha */
|
||||
|
||||
import expect from 'must'
|
||||
import sinon from 'sinon'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -13,7 +12,6 @@ import {
|
||||
formatXml,
|
||||
generateToken,
|
||||
parseSize,
|
||||
pFinally,
|
||||
pSettle
|
||||
} from './utils'
|
||||
|
||||
@@ -122,32 +120,6 @@ describe('generateToken()', () => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pSettle()', () => {
|
||||
it('makes an array of PromiseInspection', async () => {
|
||||
const [
|
||||
status1,
|
||||
status2
|
||||
] = await pSettle([
|
||||
Promise.resolve(42),
|
||||
Promise.reject('fatality')
|
||||
])
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(::status2.value).to.throw()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(status2.reason()).to.equal('fatality')
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('parseSize()', function () {
|
||||
it('parses a human size', function () {
|
||||
expect(parseSize('1G')).to.equal(1e9)
|
||||
@@ -170,56 +142,60 @@ describe('parseSize()', function () {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pFinally()', () => {
|
||||
it('calls a callback on resolution', async () => {
|
||||
const value = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.resolve(value)::pFinally(spy)
|
||||
).to.resolve.to.equal(
|
||||
value
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
})
|
||||
|
||||
it('calls a callback on rejection', async () => {
|
||||
const reason = {}
|
||||
const spy = sinon.spy()
|
||||
|
||||
await expect(
|
||||
Promise.reject(reason)::pFinally(spy)
|
||||
).to.reject.to.equal(
|
||||
reason
|
||||
)
|
||||
|
||||
expect(spy.callCount).to.equal(1)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('pSettle()', () => {
|
||||
it('makes an array of PromiseInspection', async () => {
|
||||
it('works with arrays', async () => {
|
||||
const [
|
||||
status1,
|
||||
status2
|
||||
status2,
|
||||
status3
|
||||
] = await pSettle([
|
||||
Promise.resolve(42),
|
||||
Math.PI,
|
||||
Promise.reject('fatality')
|
||||
])
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(true)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(false)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(::status2.value).to.throw()
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(status2.reason()).to.equal('fatality')
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
})
|
||||
|
||||
it('works with objects', async () => {
|
||||
const {
|
||||
a: status1,
|
||||
b: status2,
|
||||
c: status3
|
||||
} = await pSettle({
|
||||
a: Promise.resolve(42),
|
||||
b: Math.PI,
|
||||
c: Promise.reject('fatality')
|
||||
})
|
||||
|
||||
expect(status1.isRejected()).to.equal(false)
|
||||
expect(status2.isRejected()).to.equal(false)
|
||||
expect(status3.isRejected()).to.equal(true)
|
||||
|
||||
expect(status1.isFulfilled()).to.equal(true)
|
||||
expect(status2.isFulfilled()).to.equal(true)
|
||||
expect(status3.isFulfilled()).to.equal(false)
|
||||
|
||||
expect(status1.value()).to.equal(42)
|
||||
expect(status2.value()).to.equal(Math.PI)
|
||||
expect(::status3.value).to.throw()
|
||||
|
||||
expect(::status1.reason).to.throw()
|
||||
expect(::status2.reason).to.throw()
|
||||
expect(status3.reason()).to.equal('fatality')
|
||||
})
|
||||
})
|
||||
|
||||
566
src/vhd-merge.js
Normal file
566
src/vhd-merge.js
Normal file
@@ -0,0 +1,566 @@
|
||||
import fu from 'struct-fu'
|
||||
|
||||
import {
|
||||
noop,
|
||||
streamToBuffer
|
||||
} from './utils'
|
||||
|
||||
const VHD_UTIL_DEBUG = 0
|
||||
const debug = VHD_UTIL_DEBUG
|
||||
? str => console.log(`[vhd-util]${str}`)
|
||||
: noop
|
||||
|
||||
// ===================================================================
|
||||
//
|
||||
// Spec:
|
||||
// https://www.microsoft.com/en-us/download/details.aspx?id=23850
|
||||
//
|
||||
// C implementation:
|
||||
// https://github.com/rubiojr/vhd-util-convert
|
||||
//
|
||||
// ===================================================================
|
||||
|
||||
// Sizes in bytes.
|
||||
const VHD_FOOTER_SIZE = 512
|
||||
const VHD_HEADER_SIZE = 1024
|
||||
const VHD_SECTOR_SIZE = 512
|
||||
|
||||
// Block allocation table entry size. (Block addr)
|
||||
const VHD_ENTRY_SIZE = 4
|
||||
|
||||
const VHD_PARENT_LOCATOR_ENTRIES = 8
|
||||
const VHD_PLATFORM_CODE_NONE = 0
|
||||
|
||||
// Types of backup treated. Others are not supported.
|
||||
const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
|
||||
const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
|
||||
|
||||
// Other.
|
||||
const BLOCK_UNUSED = 0xFFFFFFFF
|
||||
const BIT_MASK = 0x80
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const fuFooter = fu.struct([
|
||||
fu.char('cookie', 8), // 0
|
||||
fu.uint32('features'), // 8
|
||||
fu.uint32('fileFormatVersion'), // 12
|
||||
fu.struct('dataOffset', [
|
||||
fu.uint32('high'), // 16
|
||||
fu.uint32('low') // 20
|
||||
]),
|
||||
fu.uint32('timestamp'), // 24
|
||||
fu.char('creatorApplication', 4), // 28
|
||||
fu.uint32('creatorVersion'), // 32
|
||||
fu.uint32('creatorHostOs'), // 36
|
||||
fu.struct('originalSize', [ // At the creation, current size of the hard disk.
|
||||
fu.uint32('high'), // 40
|
||||
fu.uint32('low') // 44
|
||||
]),
|
||||
fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize.
|
||||
fu.uint32('high'), // 48
|
||||
fu.uint32('low') // 52
|
||||
]),
|
||||
fu.struct('diskGeometry', [
|
||||
fu.uint16('cylinders'), // 56
|
||||
fu.uint8('heads'), // 58
|
||||
fu.uint8('sectorsPerTrackCylinder') // 59
|
||||
]),
|
||||
fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING.
|
||||
fu.uint32('checksum'), // 64
|
||||
fu.uint8('uuid', 16), // 68
|
||||
fu.char('saved'), // 84
|
||||
fu.char('hidden'), // 85
|
||||
fu.char('reserved', 426) // 86
|
||||
])
|
||||
|
||||
const fuHeader = fu.struct([
|
||||
fu.char('cookie', 8),
|
||||
fu.struct('dataOffset', [
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low')
|
||||
]),
|
||||
fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table.
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low')
|
||||
]),
|
||||
fu.uint32('headerVersion'),
|
||||
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
|
||||
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
|
||||
fu.uint32('checksum'),
|
||||
fu.uint8('parentUuid', 16),
|
||||
fu.uint32('parentTimestamp'),
|
||||
fu.uint32('reserved1'),
|
||||
fu.char('parentUnicodeName', 512),
|
||||
fu.struct('parentLocatorEntry', [
|
||||
fu.uint32('platformCode'),
|
||||
fu.uint32('platformDataSpace'),
|
||||
fu.uint32('platformDataLength'),
|
||||
fu.uint32('reserved'),
|
||||
fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data.
|
||||
fu.uint32('high'),
|
||||
fu.uint32('low')
|
||||
])
|
||||
], VHD_PARENT_LOCATOR_ENTRIES),
|
||||
fu.char('reserved2', 256)
|
||||
])
|
||||
|
||||
// ===================================================================
|
||||
// Helpers
|
||||
// ===================================================================
|
||||
|
||||
const SIZE_OF_32_BITS = Math.pow(2, 32)
|
||||
const uint32ToUint64 = (fu) => fu.high * SIZE_OF_32_BITS + fu.low
|
||||
|
||||
// Returns a 32 bits integer corresponding to a Vhd version.
|
||||
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF)
|
||||
|
||||
// Sectors conversions.
|
||||
const sectorsRoundUp = bytes => Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE)
|
||||
const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1
|
||||
const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE
|
||||
|
||||
// Check/Set a bit on a vhd map.
|
||||
const mapTestBit = (map, bit) => ((map[bit >> 3] << (bit & 7)) & BIT_MASK) !== 0
|
||||
const mapSetBit = (map, bit) => { map[bit >> 3] |= (BIT_MASK >> (bit & 7)) }
|
||||
|
||||
const packField = (field, value, buf) => {
|
||||
const { offset } = field
|
||||
|
||||
field.pack(
|
||||
value,
|
||||
buf,
|
||||
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
|
||||
)
|
||||
}
|
||||
|
||||
const unpackField = (field, buf) => {
|
||||
const { offset } = field
|
||||
|
||||
return field.unpack(
|
||||
buf,
|
||||
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
|
||||
)
|
||||
}
|
||||
// ===================================================================
|
||||
|
||||
// Returns the checksum of a raw footer.
|
||||
// The raw footer is altered with the new sum.
|
||||
function checksumFooter (rawFooter) {
|
||||
const checksumField = fuFooter.fields.checksum
|
||||
|
||||
let sum = 0
|
||||
|
||||
// Reset current sum.
|
||||
packField(checksumField, 0, rawFooter)
|
||||
|
||||
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
|
||||
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
|
||||
}
|
||||
|
||||
sum = 0xFFFFFFFF - sum
|
||||
|
||||
// Write new sum.
|
||||
packField(checksumField, sum, rawFooter)
|
||||
|
||||
return sum
|
||||
}
|
||||
|
||||
function getParentLocatorSize (parentLocatorEntry) {
|
||||
const { platformDataSpace } = parentLocatorEntry
|
||||
|
||||
if (platformDataSpace < VHD_SECTOR_SIZE) {
|
||||
return sectorsToBytes(platformDataSpace)
|
||||
}
|
||||
|
||||
return (platformDataSpace % VHD_SECTOR_SIZE === 0)
|
||||
? platformDataSpace
|
||||
: 0
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class Vhd {
|
||||
constructor (handler, path) {
|
||||
this._handler = handler
|
||||
this._path = path
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Read functions.
|
||||
// =================================================================
|
||||
|
||||
// Returns the first address after metadata. (In bytes)
|
||||
getEndOfHeaders () {
|
||||
const { header } = this
|
||||
|
||||
let end = uint32ToUint64(this.footer.dataOffset) + VHD_HEADER_SIZE
|
||||
|
||||
const blockAllocationTableSize = sectorsToBytes(
|
||||
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
|
||||
)
|
||||
|
||||
// Max(end, block allocation table end)
|
||||
end = Math.max(end, uint32ToUint64(header.tableOffset) + blockAllocationTableSize)
|
||||
|
||||
for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) {
|
||||
const entry = header.parentLocatorEntry[i]
|
||||
|
||||
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
|
||||
const dataOffset = uint32ToUint64(entry.platformDataOffset)
|
||||
|
||||
// Max(end, locator end)
|
||||
end = Math.max(end, dataOffset + getParentLocatorSize(entry))
|
||||
}
|
||||
}
|
||||
|
||||
debug(`End of headers: ${end}.`)
|
||||
|
||||
return end
|
||||
}
|
||||
|
||||
// Returns the first sector after data.
|
||||
getEndOfData () {
|
||||
let end = Math.floor(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
|
||||
|
||||
const { maxTableEntries } = this.header
|
||||
for (let i = 0; i < maxTableEntries; i++) {
|
||||
let blockAddr = this.readAllocationTableEntry(i)
|
||||
|
||||
if (blockAddr !== BLOCK_UNUSED) {
|
||||
// Compute next block address.
|
||||
blockAddr += this.sectorsPerBlock + this.sectorsOfBitmap
|
||||
|
||||
end = Math.max(end, blockAddr)
|
||||
}
|
||||
}
|
||||
|
||||
debug(`End of data: ${end}.`)
|
||||
|
||||
return sectorsToBytes(end)
|
||||
}
|
||||
|
||||
// Returns the start position of the vhd footer.
|
||||
// The real footer, not the copy at the beginning of the vhd file.
|
||||
async getFooterStart () {
|
||||
const stats = await this._handler.getSize(this._path)
|
||||
return stats.size - VHD_FOOTER_SIZE
|
||||
}
|
||||
|
||||
// Get the beginning (footer + header) of a vhd file.
|
||||
async readHeaderAndFooter () {
|
||||
const buf = await streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: 0,
|
||||
end: VHD_FOOTER_SIZE + VHD_HEADER_SIZE - 1
|
||||
})
|
||||
)
|
||||
|
||||
const sum = unpackField(fuFooter.fields.checksum, buf)
|
||||
const sumToTest = checksumFooter(buf)
|
||||
|
||||
// Checksum child & parent.
|
||||
if (sumToTest !== sum) {
|
||||
throw new Error(`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString('hex')})`)
|
||||
}
|
||||
|
||||
const header = this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE))
|
||||
this.footer = fuFooter.unpack(buf)
|
||||
|
||||
// Compute the number of sectors in one block.
|
||||
// Default: One block contains 4096 sectors of 512 bytes.
|
||||
const sectorsPerBlock = this.sectorsPerBlock = Math.floor(header.blockSize / VHD_SECTOR_SIZE)
|
||||
|
||||
// Compute bitmap size in sectors.
|
||||
// Default: 1.
|
||||
const sectorsOfBitmap = this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3)
|
||||
|
||||
// Full block size => data block size + bitmap size.
|
||||
this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap)
|
||||
|
||||
// In bytes.
|
||||
// Default: 512.
|
||||
this.bitmapSize = sectorsToBytes(sectorsOfBitmap)
|
||||
}
|
||||
|
||||
// Check if a vhd object has a block allocation table.
|
||||
hasBlockAllocationTableMap () {
|
||||
return this.footer.fileFormatVersion > getVhdVersion(1, 0)
|
||||
}
|
||||
|
||||
// Returns a buffer that contains the block allocation table of a vhd file.
|
||||
async readBlockTable () {
|
||||
const { header } = this
|
||||
|
||||
const offset = uint32ToUint64(header.tableOffset)
|
||||
const size = sectorsToBytes(
|
||||
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
|
||||
)
|
||||
|
||||
this.blockTable = await streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: offset,
|
||||
end: offset + size - 1
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// Returns the address block at the entry location of one table.
|
||||
readAllocationTableEntry (entry) {
|
||||
return this.blockTable.readUInt32BE(entry * VHD_ENTRY_SIZE)
|
||||
}
|
||||
|
||||
// Returns the data content of a block. (Not the bitmap !)
|
||||
async readBlockData (blockAddr) {
|
||||
const { blockSize } = this.header
|
||||
|
||||
const handler = this._handler
|
||||
const path = this._path
|
||||
|
||||
const blockDataAddr = sectorsToBytes(blockAddr + this.sectorsOfBitmap)
|
||||
const footerStart = await this.getFooterStart()
|
||||
const isPadded = footerStart < (blockDataAddr + blockSize)
|
||||
|
||||
// Size ot the current block in the vhd file.
|
||||
const size = isPadded ? (footerStart - blockDataAddr) : sectorsToBytes(this.sectorsPerBlock)
|
||||
|
||||
debug(`Read block data at: ${blockDataAddr}. (size=${size})`)
|
||||
|
||||
const buf = await streamToBuffer(
|
||||
await handler.createReadStream(path, {
|
||||
start: blockDataAddr,
|
||||
end: blockDataAddr + size - 1
|
||||
})
|
||||
)
|
||||
|
||||
// Padded by zero !
|
||||
if (isPadded) {
|
||||
return Buffer.concat([buf, new Buffer(blockSize - size).fill(0)])
|
||||
}
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
// Returns a buffer that contains the bitmap of a block.
|
||||
//
|
||||
// TODO: merge with readBlockData().
|
||||
async readBlockBitmap (blockAddr) {
|
||||
const { bitmapSize } = this
|
||||
const offset = sectorsToBytes(blockAddr)
|
||||
|
||||
debug(`Read bitmap at: ${offset}. (size=${bitmapSize})`)
|
||||
|
||||
return streamToBuffer(
|
||||
await this._handler.createReadStream(this._path, {
|
||||
start: offset,
|
||||
end: offset + bitmapSize - 1
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Write functions.
|
||||
// =================================================================
|
||||
|
||||
// Write a buffer at a given position in a vhd file.
|
||||
async _write (buffer, offset) {
|
||||
// TODO: could probably be merged in remote handlers.
|
||||
return this._handler.createOutputStream(this._path, {
|
||||
start: offset,
|
||||
flags: 'r+'
|
||||
}).then(stream => new Promise((resolve, reject) => {
|
||||
stream.on('error', reject)
|
||||
stream.write(buffer, () => {
|
||||
stream.end()
|
||||
resolve()
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
// Write an entry in the allocation table.
|
||||
writeAllocationTableEntry (entry, value) {
|
||||
this.blockTable.writeUInt32BE(value, entry * VHD_ENTRY_SIZE)
|
||||
}
|
||||
|
||||
// Make a new empty block at vhd end.
|
||||
// Update block allocation table in context and in file.
|
||||
async createBlock (blockId) {
|
||||
// End of file !
|
||||
let offset = this.getEndOfData()
|
||||
|
||||
// Padded on bound sector.
|
||||
if (offset % VHD_SECTOR_SIZE) {
|
||||
offset += (VHD_SECTOR_SIZE - (offset % VHD_SECTOR_SIZE))
|
||||
}
|
||||
|
||||
const blockAddr = Math.floor(offset / VHD_SECTOR_SIZE)
|
||||
|
||||
const {
|
||||
blockTable,
|
||||
fullBlockSize
|
||||
} = this
|
||||
debug(`Create block at ${blockAddr}. (size=${fullBlockSize}, offset=${offset})`)
|
||||
|
||||
// New entry in block allocation table.
|
||||
this.writeAllocationTableEntry(blockId, blockAddr)
|
||||
|
||||
const tableOffset = uint32ToUint64(this.header.tableOffset)
|
||||
const entry = blockId * VHD_ENTRY_SIZE
|
||||
|
||||
// Write an empty block and addr in vhd file.
|
||||
await this._write(new Buffer(fullBlockSize).fill(0), offset)
|
||||
await this._write(blockTable.slice(entry, entry + VHD_ENTRY_SIZE), tableOffset + entry)
|
||||
|
||||
return blockAddr
|
||||
}
|
||||
|
||||
// Write a bitmap at a block address.
|
||||
async writeBlockBitmap (blockAddr, bitmap) {
|
||||
const { bitmapSize } = this
|
||||
|
||||
if (bitmap.length !== bitmapSize) {
|
||||
throw new Error(`Bitmap length is not correct ! ${bitmap.length}`)
|
||||
}
|
||||
|
||||
const offset = sectorsToBytes(blockAddr)
|
||||
|
||||
debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`)
|
||||
await this._write(bitmap, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeBlockSectors (block, beginSectorId, n) {
|
||||
let blockAddr = this.readAllocationTableEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
blockAddr = await this.createBlock(block.id)
|
||||
}
|
||||
|
||||
const endSectorId = beginSectorId + n
|
||||
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
|
||||
|
||||
debug(`Write block data at: ${offset}. (counter=${n}, blockId=${block.id}, blockSector=${beginSectorId})`)
|
||||
|
||||
await this._write(
|
||||
block.data.slice(
|
||||
sectorsToBytes(beginSectorId),
|
||||
sectorsToBytes(endSectorId)
|
||||
),
|
||||
sectorsToBytes(offset)
|
||||
)
|
||||
|
||||
const bitmap = await this.readBlockBitmap(this.bitmapSize, blockAddr)
|
||||
|
||||
for (let i = beginSectorId; i < endSectorId; ++i) {
|
||||
mapSetBit(bitmap, i)
|
||||
}
|
||||
|
||||
await this.writeBlockBitmap(blockAddr, bitmap)
|
||||
}
|
||||
|
||||
// Merge block id (of vhd child) into vhd parent.
|
||||
async coalesceBlock (child, blockAddr, blockId) {
|
||||
// Get block data and bitmap of block id.
|
||||
const blockData = await child.readBlockData(blockAddr)
|
||||
const blockBitmap = await child.readBlockBitmap(blockAddr)
|
||||
|
||||
debug(`Coalesce block ${blockId} at ${blockAddr}.`)
|
||||
|
||||
// For each sector of block data...
|
||||
const { sectorsPerBlock } = child
|
||||
for (let i = 0; i < sectorsPerBlock; i++) {
|
||||
// If no changes on one sector, skip.
|
||||
if (!mapTestBit(blockBitmap, i)) {
|
||||
continue
|
||||
}
|
||||
|
||||
let sectors = 0
|
||||
|
||||
// Count changed sectors.
|
||||
for (; sectors + i < sectorsPerBlock; sectors++) {
|
||||
if (!mapTestBit(blockBitmap, sectors + i)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Write n sectors into parent.
|
||||
debug(`Coalesce block: write. (offset=${i}, sectors=${sectors})`)
|
||||
await this.writeBlockSectors(
|
||||
{ id: blockId, data: blockData },
|
||||
i,
|
||||
sectors
|
||||
)
|
||||
|
||||
i += sectors
|
||||
}
|
||||
}
|
||||
|
||||
// Write a context footer. (At the end and beggining of a vhd file.)
|
||||
async writeFooter () {
|
||||
const { footer } = this
|
||||
|
||||
const offset = this.getEndOfData()
|
||||
const rawFooter = fuFooter.pack(footer)
|
||||
|
||||
footer.checksum = checksumFooter(rawFooter)
|
||||
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
|
||||
|
||||
await this._write(rawFooter, 0)
|
||||
await this._write(rawFooter, offset)
|
||||
}
|
||||
}
|
||||
|
||||
// Merge vhd child into vhd parent.
|
||||
//
|
||||
// Child must be a delta backup !
|
||||
// Parent must be a full backup !
|
||||
export default async function vhdMerge (
|
||||
parentHandler, parentPath,
|
||||
childHandler, childPath
|
||||
) {
|
||||
const parentVhd = new Vhd(parentHandler, parentPath)
|
||||
const childVhd = new Vhd(childHandler, childPath)
|
||||
|
||||
// Reading footer and header.
|
||||
await Promise.all([
|
||||
parentVhd.readHeaderAndFooter(),
|
||||
childVhd.readHeaderAndFooter()
|
||||
])
|
||||
|
||||
// Child must be a delta.
|
||||
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
|
||||
throw new Error('Unable to merge, child is not a delta backup.')
|
||||
}
|
||||
|
||||
// Merging in differencing disk is prohibited in our case.
|
||||
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
|
||||
throw new Error('Unable to merge, parent is not a full backup.')
|
||||
}
|
||||
|
||||
// Allocation table map is not yet implemented.
|
||||
if (
|
||||
parentVhd.hasBlockAllocationTableMap() ||
|
||||
childVhd.hasBlockAllocationTableMap()
|
||||
) {
|
||||
throw new Error('Unsupported allocation table map.')
|
||||
}
|
||||
|
||||
// Read allocation table of child/parent.
|
||||
await Promise.all([
|
||||
parentVhd.readBlockTable(),
|
||||
childVhd.readBlockTable()
|
||||
])
|
||||
|
||||
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
|
||||
const blockAddr = childVhd.readAllocationTableEntry(blockId)
|
||||
|
||||
if (blockAddr !== BLOCK_UNUSED) {
|
||||
await parentVhd.coalesceBlock(
|
||||
childVhd,
|
||||
blockAddr,
|
||||
blockId
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await parentVhd.writeFooter()
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
import createDebug from 'debug'
|
||||
import WebSocket from 'ws'
|
||||
|
||||
const debug = createDebug('xo:wsProxy')
|
||||
|
||||
const defaults = {
|
||||
// Automatically close the client connection when the remote close.
|
||||
autoClose: true
|
||||
}
|
||||
|
||||
// Proxy a WebSocket `client` to a remote server which has `url` as
|
||||
// address.
|
||||
export default function wsProxy (client, url, opts) {
|
||||
opts = {
|
||||
...defaults,
|
||||
protocol: client.protocol,
|
||||
...opts
|
||||
}
|
||||
const autoClose = !!opts.autoClose
|
||||
delete opts.autoClose
|
||||
|
||||
function onClientSend (error) {
|
||||
if (error) {
|
||||
debug('client send error', error)
|
||||
}
|
||||
}
|
||||
function onRemoteSend (error) {
|
||||
if (error) {
|
||||
debug('remote send error', error)
|
||||
}
|
||||
}
|
||||
|
||||
const remote = new WebSocket(url, opts).once('open', function () {
|
||||
debug('connected to %s', url)
|
||||
}).once('close', function () {
|
||||
debug('remote closed')
|
||||
|
||||
if (autoClose) {
|
||||
client.close()
|
||||
}
|
||||
}).once('error', function (error) {
|
||||
debug('remote error: %s', error)
|
||||
}).on('message', function (message) {
|
||||
client.send(message, onClientSend)
|
||||
})
|
||||
|
||||
client.once('close', function () {
|
||||
debug('client closed')
|
||||
remote.close()
|
||||
}).on('message', function (message) {
|
||||
remote.send(message, onRemoteSend)
|
||||
})
|
||||
}
|
||||
641
src/xapi-object-to-xo.js
Normal file
641
src/xapi-object-to-xo.js
Normal file
@@ -0,0 +1,641 @@
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
forEach,
|
||||
isArray,
|
||||
mapToArray,
|
||||
parseXml
|
||||
} from './utils'
|
||||
import {
|
||||
isHostRunning,
|
||||
isVmHvm,
|
||||
isVmRunning,
|
||||
parseDateTime
|
||||
} from './xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const {
|
||||
defineProperties,
|
||||
freeze
|
||||
} = Object
|
||||
|
||||
function link (obj, prop, idField = '$id') {
|
||||
const dynamicValue = obj[`$${prop}`]
|
||||
if (dynamicValue == null) {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
return mapToArray(dynamicValue, idField)
|
||||
}
|
||||
|
||||
return dynamicValue[idField]
|
||||
}
|
||||
|
||||
// Parse a string date time to a Unix timestamp (in seconds).
|
||||
//
|
||||
// If the value is a number or can be converted as one, it is assumed
|
||||
// to already be a timestamp and returned.
|
||||
//
|
||||
// If there are no data or if the timestamp is 0, returns null.
|
||||
function toTimestamp (date) {
|
||||
if (!date) {
|
||||
return null
|
||||
}
|
||||
|
||||
const timestamp = +date
|
||||
|
||||
// Not NaN.
|
||||
if (timestamp === timestamp) { // eslint-disable-line no-self-compare
|
||||
return timestamp
|
||||
}
|
||||
|
||||
const ms = parseDateTime(date)
|
||||
if (!ms) {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(ms.getTime() / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const TRANSFORMS = {
|
||||
pool (obj) {
|
||||
return {
|
||||
default_SR: link(obj, 'default_SR'),
|
||||
HA_enabled: Boolean(obj.ha_enabled),
|
||||
master: link(obj, 'master'),
|
||||
tags: obj.tags,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label || obj.$master.name_label
|
||||
|
||||
// TODO
|
||||
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
|
||||
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
|
||||
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
|
||||
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
|
||||
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
|
||||
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
|
||||
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
host (obj) {
|
||||
const {
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
|
||||
return {
|
||||
// Deprecated
|
||||
CPUs: obj.cpu_info,
|
||||
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
enabled: Boolean(obj.enabled),
|
||||
cpus: {
|
||||
cores: +obj.cpu_info.cpu_count,
|
||||
sockets: +obj.cpu_info.socket_count
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
license_params: obj.license_params,
|
||||
license_server: obj.license_server,
|
||||
license_expiry: toTimestamp(obj.license_params.expiry),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
if (metrics) {
|
||||
const free = +metrics.memory_free
|
||||
const total = +metrics.memory_total
|
||||
|
||||
return {
|
||||
usage: total - free,
|
||||
size: total
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
size: 0,
|
||||
|
||||
// Deprecated
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
powerOnMode: obj.power_on_mode,
|
||||
power_state: metrics
|
||||
? (isRunning ? 'Running' : 'Halted')
|
||||
: 'Unknown',
|
||||
startTime: toTimestamp(otherConfig.boot_time),
|
||||
agentStartTime: toTimestamp(otherConfig.agent_start_time),
|
||||
tags: obj.tags,
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
// TODO: dedupe.
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
$PIFs: link(obj, 'PIFs'),
|
||||
PCIs: link(obj, 'PCIs'),
|
||||
$PCIs: link(obj, 'PCIs'),
|
||||
PGPUs: link(obj, 'PGPUs'),
|
||||
$PGPUs: link(obj, 'PGPUs'),
|
||||
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
|
||||
// TODO:
|
||||
// - controller = vmControllersByContainer.items[host.id]
|
||||
// - SRs = srsByContainer.items[host.id]
|
||||
// - tasks = tasksByHost.items[host.id]
|
||||
// - templates = vmTemplatesByContainer.items[host.id]
|
||||
// - VMs = vmsByContainer.items[host.id]
|
||||
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vm (obj) {
|
||||
const {
|
||||
$guest_metrics: guestMetrics,
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isHvm = isVmHvm(obj)
|
||||
const isRunning = isVmRunning(obj)
|
||||
const xenTools = (() => {
|
||||
if (!isRunning || !metrics) {
|
||||
// Unknown status, returns nothing.
|
||||
return
|
||||
}
|
||||
|
||||
if (!guestMetrics) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { PV_drivers_version: { major, minor } } = guestMetrics
|
||||
if (major === undefined || minor === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return guestMetrics.PV_drivers_up_to_date
|
||||
? 'up to date'
|
||||
: 'out of date'
|
||||
})()
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics && xenTools
|
||||
? +metrics.VCPUs_number
|
||||
: +obj.VCPUs_at_startup
|
||||
)
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
docker: (function () {
|
||||
const monitor = otherConfig['xscontainer-monitor']
|
||||
if (!monitor) {
|
||||
return
|
||||
}
|
||||
|
||||
if (monitor === 'False') {
|
||||
return {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
docker_ps: process,
|
||||
docker_info: info,
|
||||
docker_version: version
|
||||
} = otherConfig
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
|
||||
// TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: Boolean(obj.ha_restart_priority),
|
||||
|
||||
memory: (function () {
|
||||
const dynamicMin = +obj.memory_dynamic_min
|
||||
const dynamicMax = +obj.memory_dynamic_max
|
||||
const staticMin = +obj.memory_static_min
|
||||
const staticMax = +obj.memory_static_max
|
||||
|
||||
const memory = {
|
||||
dynamic: [ dynamicMin, dynamicMax ],
|
||||
static: [ staticMin, staticMax ]
|
||||
}
|
||||
|
||||
const gmMemory = guestMetrics && guestMetrics.memory
|
||||
|
||||
if (!isRunning) {
|
||||
memory.size = dynamicMax
|
||||
} else if (gmMemory && gmMemory.used) {
|
||||
memory.usage = +gmMemory.used
|
||||
memory.size = +gmMemory.total
|
||||
} else if (metrics) {
|
||||
memory.size = +metrics.memory_actual
|
||||
} else {
|
||||
memory.size = dynamicMax
|
||||
}
|
||||
|
||||
return memory
|
||||
})(),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
power_state: obj.power_state,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
startTime: metrics && toTimestamp(metrics.start_time),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
virtualizationMode: isHvm ? 'hvm' : 'pv',
|
||||
|
||||
// <=> Are the Xen Server tools installed?
|
||||
//
|
||||
// - undefined: unknown status
|
||||
// - false: not optimized
|
||||
// - 'out of date': optimized but drivers should be updated
|
||||
// - 'up to date': optimized
|
||||
xenTools,
|
||||
|
||||
$container: (
|
||||
isRunning
|
||||
? link(obj, 'resident_on')
|
||||
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
|
||||
),
|
||||
$VBDs: link(obj, 'VBDs'),
|
||||
|
||||
// TODO: dedupe
|
||||
VGPUs: link(obj, 'VGPUs'),
|
||||
$VGPUs: link(obj, 'VGPUs')
|
||||
}
|
||||
|
||||
if (obj.is_control_domain) {
|
||||
vm.type += '-controller'
|
||||
} else if (obj.is_a_snapshot) {
|
||||
vm.type += '-snapshot'
|
||||
|
||||
vm.snapshot_time = toTimestamp(obj.snapshot_time)
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
vm.template_info = {
|
||||
arch: otherConfig['install-arch'],
|
||||
disks: (function () {
|
||||
const {disks: xml} = otherConfig
|
||||
let data
|
||||
if (!xml || !(data = parseXml(xml)).provision) {
|
||||
return []
|
||||
}
|
||||
|
||||
const disks = ensureArray(data.provision.disk)
|
||||
forEach(disks, function normalize (disk) {
|
||||
disk.bootable = disk.bootable === 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = extractProperty(disk, 'sr')
|
||||
})
|
||||
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const methods = otherConfig['install-methods']
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})(),
|
||||
install_repository: otherConfig['install-repository']
|
||||
}
|
||||
}
|
||||
|
||||
let tmp
|
||||
if ((tmp = obj.VCPUs_params)) {
|
||||
tmp.cap && (vm.cpuCap = +tmp.cap)
|
||||
tmp.weight && (vm.cpuWeight = +tmp.weight)
|
||||
}
|
||||
|
||||
if (!isHvm) {
|
||||
vm.PV_args = obj.PV_args
|
||||
}
|
||||
|
||||
return vm
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
sr (obj) {
|
||||
return {
|
||||
type: 'SR',
|
||||
|
||||
content_type: obj.content_type,
|
||||
|
||||
// TODO: Should it replace usage?
|
||||
physical_usage: +obj.physical_utilisation,
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.physical_size,
|
||||
SR_type: obj.type,
|
||||
tags: obj.tags,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared || !obj.$PBDs[0]
|
||||
? link(obj, 'pool')
|
||||
: link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pbd (obj) {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pif (obj) {
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
dns: obj.DNS,
|
||||
disallowUnplug: Boolean(obj.disallow_unplug),
|
||||
gateway: obj.gateway,
|
||||
ip: obj.IP,
|
||||
mac: obj.MAC,
|
||||
management: Boolean(obj.management), // TODO: find a better name.
|
||||
mode: obj.ip_configuration_mode,
|
||||
mtu: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
// A non physical PIF is a "copy" of an existing physical PIF (same device)
|
||||
// A physical PIF cannot be unplugged
|
||||
physical: Boolean(obj.physical),
|
||||
vlan: +obj.VLAN,
|
||||
$host: link(obj, 'host'),
|
||||
$network: link(obj, 'network')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vdi (obj) {
|
||||
if (!obj.managed) {
|
||||
return
|
||||
}
|
||||
|
||||
const vdi = {
|
||||
type: 'VDI',
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.virtual_size,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
tags: obj.tags,
|
||||
usage: +obj.physical_utilisation,
|
||||
|
||||
$SR: link(obj, 'SR'),
|
||||
$VBDs: link(obj, 'VBDs')
|
||||
}
|
||||
|
||||
if (obj.is_a_snapshot) {
|
||||
vdi.type += '-snapshot'
|
||||
vdi.snapshot_time = toTimestamp(obj.snapshot_time)
|
||||
vdi.$snapshot_of = link(obj, 'snapshot_of')
|
||||
}
|
||||
|
||||
return vdi
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vbd (obj) {
|
||||
return {
|
||||
type: 'VBD',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
bootable: Boolean(obj.bootable),
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
VDI: link(obj, 'VDI'),
|
||||
VM: link(obj, 'VM')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vif (obj) {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
allowedIpv4Addresses: obj.ipv4_allowed,
|
||||
allowedIpv6Addresses: obj.ipv6_allowed,
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
MTU: +obj.MTU,
|
||||
|
||||
$network: link(obj, 'network'),
|
||||
$VM: link(obj, 'VM')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
tags: obj.tags,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
message (obj) {
|
||||
return {
|
||||
body: obj.body,
|
||||
name: obj.name,
|
||||
time: toTimestamp(obj.timestamp),
|
||||
|
||||
$object: obj.obj_uuid // Special link as it is already an UUID.
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
task (obj) {
|
||||
return {
|
||||
created: toTimestamp(obj.created),
|
||||
current_operations: obj.current_operations,
|
||||
finished: toTimestamp(obj.finished),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
progress: +obj.progress,
|
||||
result: obj.result,
|
||||
status: obj.status,
|
||||
|
||||
$host: link(obj, 'resident_on')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
host_patch (obj) {
|
||||
return {
|
||||
applied: Boolean(obj.applied),
|
||||
time: toTimestamp(obj.timestamp_applied),
|
||||
pool_patch: link(obj, 'pool_patch', '$ref'),
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pool_patch (obj) {
|
||||
return {
|
||||
id: obj.$ref,
|
||||
|
||||
applied: Boolean(obj.pool_applied),
|
||||
description: obj.name_description,
|
||||
guidance: obj.after_apply_guidance,
|
||||
name: obj.name_label,
|
||||
size: +obj.size,
|
||||
uuid: obj.uuid,
|
||||
|
||||
// TODO: what does it mean, should we handle it?
|
||||
// version: obj.version,
|
||||
|
||||
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
|
||||
$host_patches: link(obj, 'host_patches')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pci (obj) {
|
||||
return {
|
||||
type: 'PCI',
|
||||
|
||||
class_name: obj.class_name,
|
||||
device_name: obj.device_name,
|
||||
pci_id: obj.pci_id,
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
pgpu (obj) {
|
||||
return {
|
||||
type: 'PGPU',
|
||||
|
||||
pci: link(obj, 'PCI'),
|
||||
|
||||
// TODO: dedupe.
|
||||
host: link(obj, 'host'),
|
||||
$host: link(obj, 'host'),
|
||||
vgpus: link(obj, 'resident_VGPUs'),
|
||||
$vgpus: link(obj, 'resident_VGPUs')
|
||||
}
|
||||
},
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
vgpu (obj) {
|
||||
return {
|
||||
type: 'VGPU',
|
||||
|
||||
currentlyAttached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
resident_on: link(obj, 'resident_on'),
|
||||
vm: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default xapiObj => {
|
||||
const transform = TRANSFORMS[xapiObj.$type.toLowerCase()]
|
||||
if (!transform) {
|
||||
return
|
||||
}
|
||||
|
||||
const xoObj = transform(xapiObj)
|
||||
if (!xoObj) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!('id' in xoObj)) {
|
||||
xoObj.id = xapiObj.$id
|
||||
}
|
||||
if (!('type' in xoObj)) {
|
||||
xoObj.type = xapiObj.$type
|
||||
}
|
||||
if (
|
||||
'uuid' in xapiObj &&
|
||||
!('uuid' in xoObj)
|
||||
) {
|
||||
xoObj.uuid = xapiObj.uuid
|
||||
}
|
||||
xoObj.$pool = xapiObj.$pool.$id
|
||||
xoObj.$poolId = xoObj.$pool // TODO: deprecated, remove when no longer used in xo-web
|
||||
|
||||
// Internal properties.
|
||||
defineProperties(xoObj, {
|
||||
_xapiId: {
|
||||
value: xapiObj.$id
|
||||
},
|
||||
_xapiRef: {
|
||||
value: xapiObj.$ref
|
||||
}
|
||||
})
|
||||
|
||||
// Freezes and returns the new object.
|
||||
return freeze(xoObj)
|
||||
}
|
||||
@@ -1,549 +0,0 @@
|
||||
import isArray from 'lodash.isarray'
|
||||
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
forEach,
|
||||
mapToArray,
|
||||
parseXml
|
||||
} from './utils'
|
||||
import {
|
||||
isHostRunning,
|
||||
isVmHvm,
|
||||
isVmRunning,
|
||||
parseDateTime
|
||||
} from './xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
function link (obj, prop, idField = '$id') {
|
||||
const dynamicValue = obj[`$${prop}`]
|
||||
if (dynamicValue == null) {
|
||||
return dynamicValue // Properly handles null and undefined.
|
||||
}
|
||||
|
||||
if (isArray(dynamicValue)) {
|
||||
return mapToArray(dynamicValue, idField)
|
||||
}
|
||||
|
||||
return dynamicValue[idField]
|
||||
}
|
||||
|
||||
// Parse a string date time to a Unix timestamp (in seconds).
|
||||
//
|
||||
// If there are no data or if the timestamp is 0, returns null.
|
||||
function toTimestamp (date) {
|
||||
if (!date) {
|
||||
return null
|
||||
}
|
||||
|
||||
const ms = parseDateTime(date).getTime()
|
||||
if (!ms) {
|
||||
return null
|
||||
}
|
||||
|
||||
return Math.round(ms / 1000)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export function pool (obj) {
|
||||
return {
|
||||
default_SR: link(obj, 'default_SR'),
|
||||
HA_enabled: Boolean(obj.ha_enabled),
|
||||
master: link(obj, 'master'),
|
||||
tags: obj.tags,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label || obj.$master.name_label
|
||||
|
||||
// TODO
|
||||
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
|
||||
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
|
||||
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
|
||||
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
|
||||
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
|
||||
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
|
||||
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
|
||||
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host (obj) {
|
||||
const {
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isRunning = isHostRunning(obj)
|
||||
|
||||
return {
|
||||
address: obj.address,
|
||||
bios_strings: obj.bios_strings,
|
||||
build: obj.software_version.build_number,
|
||||
CPUs: obj.cpu_info,
|
||||
enabled: Boolean(obj.enabled),
|
||||
current_operations: obj.current_operations,
|
||||
hostname: obj.hostname,
|
||||
iSCSI_name: otherConfig.iscsi_iqn || null,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
memory: (function () {
|
||||
if (metrics) {
|
||||
const free = +metrics.memory_free
|
||||
const total = +metrics.memory_total
|
||||
|
||||
return {
|
||||
usage: total - free,
|
||||
size: total
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
usage: 0,
|
||||
total: 0
|
||||
}
|
||||
})(),
|
||||
patches: link(obj, 'patches'),
|
||||
powerOnMode: obj.power_on_mode,
|
||||
power_state: isRunning ? 'Running' : 'Halted',
|
||||
tags: obj.tags,
|
||||
version: obj.software_version.product_version,
|
||||
|
||||
// TODO: dedupe.
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
$PIFs: link(obj, 'PIFs'),
|
||||
PCIs: link(obj, 'PCIs'),
|
||||
$PCIs: link(obj, 'PCIs'),
|
||||
PGPUs: link(obj, 'PGPUs'),
|
||||
$PGPUs: link(obj, 'PGPUs'),
|
||||
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
|
||||
// TODO:
|
||||
// - controller = vmControllersByContainer.items[host.id]
|
||||
// - SRs = srsByContainer.items[host.id]
|
||||
// - tasks = tasksByHost.items[host.id]
|
||||
// - templates = vmTemplatesByContainer.items[host.id]
|
||||
// - VMs = vmsByContainer.items[host.id]
|
||||
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vm (obj) {
|
||||
const {
|
||||
$guest_metrics: guestMetrics,
|
||||
$metrics: metrics,
|
||||
other_config: otherConfig
|
||||
} = obj
|
||||
|
||||
const isHvm = isVmHvm(obj)
|
||||
const isRunning = isVmRunning(obj)
|
||||
|
||||
const vm = {
|
||||
// type is redefined after for controllers/, templates &
|
||||
// snapshots.
|
||||
type: 'VM',
|
||||
|
||||
addresses: guestMetrics && guestMetrics.networks || null,
|
||||
auto_poweron: Boolean(otherConfig.auto_poweron),
|
||||
boot: obj.HVM_boot_params,
|
||||
CPUs: {
|
||||
max: +obj.VCPUs_max,
|
||||
number: (
|
||||
isRunning && metrics
|
||||
? +metrics.VCPUs_number
|
||||
: +obj.VCPUs_at_startup
|
||||
)
|
||||
},
|
||||
current_operations: obj.current_operations,
|
||||
docker: (function () {
|
||||
const monitor = otherConfig['xscontainer-monitor']
|
||||
if (!monitor) {
|
||||
return
|
||||
}
|
||||
|
||||
if (monitor === 'False') {
|
||||
return {
|
||||
enabled: false
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
docker_ps: process,
|
||||
docker_info: info,
|
||||
docker_version: version
|
||||
} = otherConfig
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
info: info && parseXml(info).docker_info,
|
||||
process: process && parseXml(process).docker_ps,
|
||||
version: version && parseXml(version).docker_version
|
||||
}
|
||||
})(),
|
||||
|
||||
// TODO: there is two possible value: "best-effort" and "restart"
|
||||
high_availability: Boolean(obj.ha_restart_priority),
|
||||
|
||||
memory: (function () {
|
||||
const dynamicMin = +obj.memory_dynamic_min
|
||||
const dynamicMax = +obj.memory_dynamic_max
|
||||
const staticMin = +obj.memory_static_min
|
||||
const staticMax = +obj.memory_static_max
|
||||
|
||||
const memory = {
|
||||
dynamic: [ dynamicMin, dynamicMax ],
|
||||
static: [ staticMin, staticMax ]
|
||||
}
|
||||
|
||||
const gmMemory = guestMetrics && guestMetrics.memory
|
||||
|
||||
if (!isRunning) {
|
||||
memory.size = dynamicMax
|
||||
} else if (gmMemory && gmMemory.used) {
|
||||
memory.usage = +gmMemory.used
|
||||
memory.size = +gmMemory.total
|
||||
} else if (metrics) {
|
||||
memory.size = +metrics.memory_actual
|
||||
} else {
|
||||
memory.size = dynamicMax
|
||||
}
|
||||
|
||||
return memory
|
||||
})(),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
other: otherConfig,
|
||||
os_version: guestMetrics && guestMetrics.os_version || null,
|
||||
power_state: obj.power_state,
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
virtualizationMode: isHvm ? 'hvm' : 'pv',
|
||||
|
||||
// <=> Are the Xen Server tools installed?
|
||||
//
|
||||
// - undefined: unknown status
|
||||
// - false: not optimized
|
||||
// - 'out of date': optimized but drivers should be updated
|
||||
// - 'up to date': optimized
|
||||
xenTools: (() => {
|
||||
if (!isRunning || !metrics) {
|
||||
// Unknown status, returns nothing.
|
||||
return
|
||||
}
|
||||
|
||||
if (!guestMetrics) {
|
||||
return false
|
||||
}
|
||||
|
||||
const { PV_drivers_version: { major, minor } } = guestMetrics
|
||||
if (major === undefined || minor === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
return guestMetrics.PV_drivers_up_to_date
|
||||
? 'up to date'
|
||||
: 'out of date'
|
||||
})(),
|
||||
|
||||
$container: (
|
||||
isRunning
|
||||
? link(obj, 'resident_on')
|
||||
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
|
||||
),
|
||||
$VBDs: link(obj, 'VBDs'),
|
||||
|
||||
// TODO: dedupe
|
||||
VGPUs: link(obj, 'VGPUs'),
|
||||
$VGPUs: link(obj, 'VGPUs')
|
||||
}
|
||||
|
||||
if (obj.is_control_domain) {
|
||||
vm.type += '-controller'
|
||||
} else if (obj.is_a_snapshot) {
|
||||
vm.type += '-snapshot'
|
||||
|
||||
vm.$snapshot_of = link(obj, 'snapshot_of')
|
||||
} else if (obj.is_a_template) {
|
||||
vm.type += '-template'
|
||||
|
||||
vm.CPUs.number = +obj.VCPUs_at_startup
|
||||
vm.template_info = {
|
||||
arch: otherConfig['install-arch'],
|
||||
disks: (function () {
|
||||
const {disks: xml} = otherConfig
|
||||
let data
|
||||
if (!xml || !(data = parseXml(xml)).provision) {
|
||||
return []
|
||||
}
|
||||
|
||||
const disks = ensureArray(data.provision.disk)
|
||||
forEach(disks, function normalize (disk) {
|
||||
disk.bootable = disk.bootable === 'true'
|
||||
disk.size = +disk.size
|
||||
disk.SR = extractProperty(disk, 'sr')
|
||||
})
|
||||
|
||||
return disks
|
||||
})(),
|
||||
install_methods: (function () {
|
||||
const {['install-methods']: methods} = otherConfig
|
||||
|
||||
return methods ? methods.split(',') : []
|
||||
})()
|
||||
}
|
||||
}
|
||||
|
||||
if (!isHvm) {
|
||||
vm.PV_args = obj.PV_args
|
||||
}
|
||||
|
||||
return vm
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function sr (obj) {
|
||||
return {
|
||||
type: 'SR',
|
||||
|
||||
content_type: obj.content_type,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
physical_usage: +obj.physical_utilisation,
|
||||
size: +obj.physical_size,
|
||||
SR_type: obj.type,
|
||||
tags: obj.tags,
|
||||
usage: +obj.virtual_allocation,
|
||||
VDIs: link(obj, 'VDIs'),
|
||||
|
||||
$container: (
|
||||
obj.shared
|
||||
? link(obj, 'pool')
|
||||
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
|
||||
),
|
||||
$PBDs: link(obj, 'PBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pbd (obj) {
|
||||
return {
|
||||
type: 'PBD',
|
||||
|
||||
attached: obj.currently_attached,
|
||||
host: link(obj, 'host'),
|
||||
SR: link(obj, 'SR')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pif (obj) {
|
||||
return {
|
||||
type: 'PIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
IP: obj.IP,
|
||||
MAC: obj.MAC,
|
||||
management: Boolean(obj.management), // TODO: find a better name.
|
||||
mode: obj.ip_configuration_mode,
|
||||
MTU: +obj.MTU,
|
||||
netmask: obj.netmask,
|
||||
vlan: +obj.VLAN,
|
||||
|
||||
// TODO: What is it?
|
||||
//
|
||||
// Could it mean “is this a physical interface?”.
|
||||
// How could a PIF not be physical?
|
||||
// physical: obj.physical,
|
||||
|
||||
$host: link(obj, 'host'),
|
||||
$network: link(obj, 'network')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// TODO: should we have a VDI-snapshot type like we have with VMs?
|
||||
export function vdi (obj) {
|
||||
if (!obj.managed) {
|
||||
return
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'VDI',
|
||||
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
size: +obj.virtual_size,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
snapshot_time: toTimestamp(obj.snapshot_time),
|
||||
tags: obj.tags,
|
||||
usage: +obj.physical_utilisation,
|
||||
|
||||
$snapshot_of: link(obj, 'snapshot_of'),
|
||||
$SR: link(obj, 'SR'),
|
||||
$VBDs: link(obj, 'VBDs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vbd (obj) {
|
||||
return {
|
||||
type: 'VBD',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
bootable: Boolean(obj.bootable),
|
||||
is_cd_drive: obj.type === 'CD',
|
||||
position: obj.userdevice,
|
||||
read_only: obj.mode === 'RO',
|
||||
VDI: link(obj, 'VDI'),
|
||||
VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vif (obj) {
|
||||
return {
|
||||
type: 'VIF',
|
||||
|
||||
attached: Boolean(obj.currently_attached),
|
||||
device: obj.device, // TODO: should it be cast to a number?
|
||||
MAC: obj.MAC,
|
||||
MTU: +obj.MTU,
|
||||
|
||||
$network: link(obj, 'network'),
|
||||
$VM: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function network (obj) {
|
||||
return {
|
||||
bridge: obj.bridge,
|
||||
MTU: +obj.MTU,
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
tags: obj.tags,
|
||||
PIFs: link(obj, 'PIFs'),
|
||||
VIFs: link(obj, 'VIFs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function message (obj) {
|
||||
return {
|
||||
body: obj.body,
|
||||
name: obj.name,
|
||||
time: toTimestamp(obj.timestamp),
|
||||
|
||||
$object: obj.obj_uuid // Special link as it is already an UUID.
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function task (obj) {
|
||||
return {
|
||||
created: toTimestamp(obj.created),
|
||||
current_operations: obj.current_operations,
|
||||
finished: toTimestamp(obj.finished),
|
||||
name_description: obj.name_description,
|
||||
name_label: obj.name_label,
|
||||
progress: +obj.progress,
|
||||
result: obj.result,
|
||||
status: obj.status,
|
||||
|
||||
$host: link(obj, 'resident_on')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function host_patch (obj) {
|
||||
return {
|
||||
applied: Boolean(obj.applied),
|
||||
time: toTimestamp(obj.timestamp_applied),
|
||||
pool_patch: link(obj, 'pool_patch', '$ref'),
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pool_patch (obj) {
|
||||
return {
|
||||
id: obj.$ref,
|
||||
|
||||
applied: Boolean(obj.pool_applied),
|
||||
description: obj.name_description,
|
||||
guidance: obj.after_apply_guidance,
|
||||
name: obj.name_label,
|
||||
size: +obj.size,
|
||||
uuid: obj.uuid,
|
||||
|
||||
// TODO: what does it mean, should we handle it?
|
||||
// version: obj.version,
|
||||
|
||||
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
|
||||
$host_patches: link(obj, 'host_patches')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pci (obj) {
|
||||
return {
|
||||
type: 'PCI',
|
||||
|
||||
class_name: obj.class_name,
|
||||
device_name: obj.device_name,
|
||||
pci_id: obj.pci_id,
|
||||
|
||||
$host: link(obj, 'host')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function pgpu (obj) {
|
||||
return {
|
||||
type: 'PGPU',
|
||||
|
||||
pci: link(obj, 'PCI'),
|
||||
|
||||
// TODO: dedupe.
|
||||
host: link(obj, 'host'),
|
||||
$host: link(obj, 'host'),
|
||||
vgpus: link(obj, 'resident_VGPUs'),
|
||||
$vgpus: link(obj, 'resident_VGPUs')
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export function vgpu (obj) {
|
||||
return {
|
||||
type: 'VGPU',
|
||||
|
||||
currentlyAttached: Boolean(obj.currently_attached),
|
||||
device: obj.device,
|
||||
resident_on: link(obj, 'resident_on'),
|
||||
vm: link(obj, 'VM')
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import endsWith from 'lodash.endswith'
|
||||
import got from 'got'
|
||||
import endsWith from 'lodash/endsWith'
|
||||
import JSON5 from 'json5'
|
||||
import { BaseError } from 'make-error'
|
||||
|
||||
import httpRequest from './http-request'
|
||||
import { parseDateTime } from './xapi'
|
||||
|
||||
const RRD_STEP_SECONDS = 5
|
||||
@@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
|
||||
}
|
||||
}
|
||||
|
||||
export class FaultyGranularity extends XapiStatsError {
|
||||
constructor (msg) {
|
||||
super(msg)
|
||||
}
|
||||
}
|
||||
export class FaultyGranularity extends XapiStatsError {}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// Utils
|
||||
@@ -289,9 +285,10 @@ export default class XapiStats {
|
||||
// Load
|
||||
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
|
||||
|
||||
// Memory
|
||||
const memory = values[hostLegends.memory]
|
||||
const memoryFree = values[hostLegends.memoryFree]
|
||||
// Memory.
|
||||
// WARNING! memory/memoryFree are in kB.
|
||||
const memory = values[hostLegends.memory] * 1024
|
||||
const memoryFree = values[hostLegends.memoryFree] * 1024
|
||||
|
||||
hostStats.memory.push(memory)
|
||||
|
||||
@@ -391,8 +388,8 @@ export default class XapiStats {
|
||||
// Execute one http request on a XenServer for get stats
|
||||
// Return stats (Json format) or throws got exception
|
||||
async _getJson (url) {
|
||||
const response = await got(url, { rejectUnauthorized: false })
|
||||
return JSON5.parse(response.body)
|
||||
const body = await httpRequest(url, { rejectUnauthorized: false }).readAll()
|
||||
return JSON5.parse(body)
|
||||
}
|
||||
|
||||
async _getLastTimestamp (xapi, host, step) {
|
||||
@@ -405,19 +402,24 @@ export default class XapiStats {
|
||||
}
|
||||
|
||||
_getPoints (hostname, step, vmId) {
|
||||
const hostStats = this._hosts[hostname][step]
|
||||
|
||||
// Return host points
|
||||
if (vmId === undefined) {
|
||||
return this._hosts[hostname][step]
|
||||
return {
|
||||
interval: step,
|
||||
...hostStats
|
||||
}
|
||||
}
|
||||
|
||||
const vmsStats = this._vms[hostname][step]
|
||||
|
||||
// Return vm points
|
||||
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
|
||||
|
||||
if (this._vms[hostname][step] !== undefined) {
|
||||
points.stats = this._vms[hostname][step][vmId]
|
||||
return {
|
||||
interval: step,
|
||||
endTimestamp: hostStats.endTimestamp,
|
||||
stats: (vmsStats && vmsStats[vmId]) || getNewVmStats()
|
||||
}
|
||||
|
||||
return points
|
||||
}
|
||||
|
||||
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
|
||||
@@ -528,6 +530,11 @@ export default class XapiStats {
|
||||
async getVmPoints (xapi, vmId, granularity) {
|
||||
const vm = xapi.getObject(vmId)
|
||||
const host = vm.$resident_on
|
||||
|
||||
if (!host) {
|
||||
throw new Error(`VM ${vmId} is halted or host could not be found.`)
|
||||
}
|
||||
|
||||
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
|
||||
}
|
||||
}
|
||||
|
||||
1275
src/xapi.js
1275
src/xapi.js
File diff suppressed because it is too large
Load Diff
2169
src/xapi/index.js
Normal file
2169
src/xapi/index.js
Normal file
File diff suppressed because it is too large
Load Diff
10
src/xapi/mixins/networking.js
Normal file
10
src/xapi/mixins/networking.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import {
|
||||
makeEditObject
|
||||
} from '../utils'
|
||||
|
||||
export default {
|
||||
editVif: makeEditObject({
|
||||
ipv4Allowed: true,
|
||||
ipv6Allowed: true
|
||||
})
|
||||
}
|
||||
53
src/xapi/mixins/storage.js
Normal file
53
src/xapi/mixins/storage.js
Normal file
@@ -0,0 +1,53 @@
|
||||
import {
|
||||
mapToArray
|
||||
} from '../../utils'
|
||||
|
||||
export default {
|
||||
_connectAllSrPbds (sr) {
|
||||
return Promise.all(
|
||||
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
|
||||
)
|
||||
},
|
||||
|
||||
async connectAllSrPbds (id) {
|
||||
await this._connectAllSrPbds(this.getObject(id))
|
||||
},
|
||||
|
||||
_disconnectAllSrPbds (sr) {
|
||||
return Promise.all(
|
||||
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
|
||||
)
|
||||
},
|
||||
|
||||
async disconnectAllSrPbds (id) {
|
||||
await this._disconnectAllSrPbds(this.getObject(id))
|
||||
},
|
||||
|
||||
async destroySr (id) {
|
||||
const sr = this.getObject(id)
|
||||
await this._disconnectAllSrPbds(sr)
|
||||
await this.call('SR.destroy', sr.$ref)
|
||||
},
|
||||
|
||||
async forgetSr (id) {
|
||||
const sr = this.getObject(id)
|
||||
await this._disconnectAllSrPbds(sr)
|
||||
await this.call('SR.forget', sr.$ref)
|
||||
},
|
||||
|
||||
_plugPbd (pbd) {
|
||||
return this.call('PBD.plug', pbd.$ref)
|
||||
},
|
||||
|
||||
async plugPbd (id) {
|
||||
await this._plugPbd(this.getObject(id))
|
||||
},
|
||||
|
||||
_unplugPbd (pbd) {
|
||||
return this.call('PBD.unplug', pbd.$ref)
|
||||
},
|
||||
|
||||
async unplugPbd (id) {
|
||||
await this._unplugPbd(this.getObject(id))
|
||||
}
|
||||
}
|
||||
316
src/xapi/mixins/vm.js
Normal file
316
src/xapi/mixins/vm.js
Normal file
@@ -0,0 +1,316 @@
|
||||
import find from 'lodash/find'
|
||||
import gte from 'lodash/gte'
|
||||
import lte from 'lodash/lte'
|
||||
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
noop,
|
||||
parseSize,
|
||||
pCatch
|
||||
} from '../../utils'
|
||||
|
||||
import {
|
||||
isVmHvm,
|
||||
isVmRunning,
|
||||
makeEditObject
|
||||
} from '../utils'
|
||||
|
||||
export default {
|
||||
// TODO: clean up on error.
|
||||
async createVm (templateId, {
|
||||
name_label, // deprecated
|
||||
nameLabel = name_label, // eslint-disable-line camelcase
|
||||
|
||||
bootAfterCreate = false,
|
||||
|
||||
clone = true,
|
||||
installRepository = undefined,
|
||||
vdis = undefined,
|
||||
vifs = undefined,
|
||||
existingVdis = undefined,
|
||||
|
||||
coreOs = false,
|
||||
cloudConfig = undefined,
|
||||
|
||||
...props
|
||||
} = {}) {
|
||||
const installMethod = (() => {
|
||||
if (installRepository == null) {
|
||||
return 'none'
|
||||
}
|
||||
|
||||
try {
|
||||
installRepository = this.getObject(installRepository)
|
||||
return 'cd'
|
||||
} catch (_) {
|
||||
return 'network'
|
||||
}
|
||||
})()
|
||||
const template = this.getObject(templateId)
|
||||
|
||||
// Clones the template.
|
||||
let vm = await this._getOrWaitObject(
|
||||
await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
|
||||
)
|
||||
|
||||
// TODO: copy BIOS strings?
|
||||
|
||||
// Removes disks from the provision XML, we will create them by
|
||||
// ourselves.
|
||||
await this.call('VM.remove_from_other_config', vm.$ref, 'disks')::pCatch(noop)
|
||||
|
||||
// Creates the VDIs and executes the initial steps of the
|
||||
// installation.
|
||||
await this.call('VM.provision', vm.$ref)
|
||||
|
||||
// Set VMs params.
|
||||
// TODO: checkLimits
|
||||
this._editVm(vm, props)
|
||||
|
||||
// Sets boot parameters.
|
||||
{
|
||||
const isHvm = isVmHvm(vm)
|
||||
|
||||
if (isHvm) {
|
||||
if (!vdis.length || installMethod === 'network') {
|
||||
const { HVM_boot_params: bootParams } = vm
|
||||
let order = bootParams.order
|
||||
if (order) {
|
||||
order = 'n' + order.replace('n', '')
|
||||
} else {
|
||||
order = 'ncd'
|
||||
}
|
||||
|
||||
this._setObjectProperties(vm, {
|
||||
HVM_boot_params: { ...bootParams, order }
|
||||
})
|
||||
}
|
||||
} else { // PV
|
||||
if (vm.PV_bootloader === 'eliloader') {
|
||||
if (installMethod === 'network') {
|
||||
// TODO: normalize RHEL URL?
|
||||
|
||||
await this._updateObjectMapProperty(vm, 'other_config', {
|
||||
'install-repository': installRepository
|
||||
})
|
||||
} else if (installMethod === 'cd') {
|
||||
await this._updateObjectMapProperty(vm, 'other_config', {
|
||||
'install-repository': 'cdrom'
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Inserts the CD if necessary.
|
||||
if (installMethod === 'cd') {
|
||||
// When the VM is started, if PV, the CD drive will become not
|
||||
// bootable and the first disk bootable.
|
||||
await this._insertCdIntoVm(installRepository, vm, {
|
||||
bootable: true
|
||||
})
|
||||
}
|
||||
|
||||
// Modify existing (previous template) disks if necessary
|
||||
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
|
||||
const vbd = find(vm.$VBDs, { userdevice })
|
||||
if (!vbd) {
|
||||
return
|
||||
}
|
||||
const vdi = vbd.$VDI
|
||||
await this._setObjectProperties(vdi, properties)
|
||||
|
||||
// if the disk is bigger
|
||||
if (
|
||||
size != null &&
|
||||
size > vdi.virtual_size
|
||||
) {
|
||||
await this.resizeVdi(vdi.$id, size)
|
||||
}
|
||||
// if another SR is set, move it there
|
||||
if (srId) {
|
||||
await this.moveVdi(vdi.$id, srId)
|
||||
}
|
||||
}))
|
||||
|
||||
// Creates the user defined VDIs.
|
||||
//
|
||||
// TODO: set vm.suspend_SR
|
||||
if (vdis) {
|
||||
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
|
||||
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
|
||||
return this._createVdi(
|
||||
vdiDescription.size, // FIXME: Should not be done in Xapi.
|
||||
{
|
||||
name_label: vdiDescription.name_label,
|
||||
name_description: vdiDescription.name_description,
|
||||
sr: vdiDescription.sr || vdiDescription.SR
|
||||
}
|
||||
)
|
||||
.then(ref => this._getOrWaitObject(ref))
|
||||
.then(vdi => this._createVbd(vm, vdi, {
|
||||
// Only the first VBD if installMethod is not cd is bootable.
|
||||
bootable: installMethod !== 'cd' && !i,
|
||||
|
||||
userdevice: devices[i]
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
// Destroys the VIFs cloned from the template.
|
||||
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
|
||||
|
||||
// Creates the VIFs specified by the user.
|
||||
if (vifs) {
|
||||
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
|
||||
await Promise.all(mapToArray(vifs, (vif, index) => this._createVif(
|
||||
vm,
|
||||
this.getObject(vif.network),
|
||||
{
|
||||
device: devices[index],
|
||||
mac: vif.mac,
|
||||
mtu: vif.mtu
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
// TODO: Assign VGPUs.
|
||||
|
||||
if (cloudConfig != null) {
|
||||
// Refresh the record.
|
||||
vm = this.getObject(vm.$id)
|
||||
|
||||
// Find the SR of the first VDI.
|
||||
let srRef
|
||||
forEach(vm.$VBDs, vbd => {
|
||||
const vdi = vbd.$VDI
|
||||
if (vdi) {
|
||||
srRef = vdi.SR
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
const method = coreOs
|
||||
? 'createCoreOsCloudInitConfigDrive'
|
||||
: 'createCloudInitConfigDrive'
|
||||
await this[method](vm.$id, srRef, cloudConfig)
|
||||
}
|
||||
|
||||
if (bootAfterCreate) {
|
||||
this._startVm(vm)::pCatch(noop)
|
||||
}
|
||||
|
||||
return this._waitObject(vm.$id)
|
||||
},
|
||||
|
||||
// High level method to edit a VM.
|
||||
//
|
||||
// Params do not correspond directly to XAPI props.
|
||||
_editVm: makeEditObject({
|
||||
autoPoweron: {
|
||||
set (value, vm) {
|
||||
return Promise.all([
|
||||
this._updateObjectMapProperty(vm, 'other_config', {
|
||||
autoPoweron: value ? 'true' : null
|
||||
}),
|
||||
value && this.setPoolProperties({
|
||||
autoPoweron: true
|
||||
})
|
||||
])
|
||||
}
|
||||
},
|
||||
|
||||
CPUs: 'cpus',
|
||||
cpus: {
|
||||
addToLimits: true,
|
||||
|
||||
// Current value may have constraints with other values.
|
||||
//
|
||||
// If the other value is not set and the constraint is not
|
||||
// respected, the other value is changed first.
|
||||
constraints: {
|
||||
cpusStaticMax: gte
|
||||
},
|
||||
|
||||
get: vm => +vm.VCPUs_at_startup,
|
||||
set: [
|
||||
'VCPUs_at_startup',
|
||||
function (value, vm) {
|
||||
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
cpuCap: {
|
||||
addToLimits: true,
|
||||
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
|
||||
set (cap, vm) {
|
||||
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
|
||||
}
|
||||
},
|
||||
|
||||
cpusMax: 'cpusStaticMax',
|
||||
cpusStaticMax: {
|
||||
constraints: {
|
||||
cpus: lte
|
||||
},
|
||||
get: vm => +vm.VCPUs_max,
|
||||
set: 'VCPUs_max'
|
||||
},
|
||||
|
||||
cpuWeight: {
|
||||
addToLimits: true,
|
||||
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
|
||||
set (weight, vm) {
|
||||
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
|
||||
}
|
||||
},
|
||||
|
||||
highAvailability: {
|
||||
set (ha, vm) {
|
||||
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
|
||||
}
|
||||
},
|
||||
|
||||
memoryMin: {
|
||||
constraints: {
|
||||
memoryMax: gte
|
||||
},
|
||||
get: vm => +vm.memory_dynamic_min,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_dynamic_min'
|
||||
},
|
||||
|
||||
memory: 'memoryMax',
|
||||
memoryMax: {
|
||||
addToLimits: true,
|
||||
constraints: {
|
||||
memoryMin: lte,
|
||||
memoryStaticMax: gte
|
||||
},
|
||||
get: vm => +vm.memory_dynamic_max,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_dynamic_max'
|
||||
},
|
||||
|
||||
memoryStaticMax: {
|
||||
constraints: {
|
||||
memoryMax: lte
|
||||
},
|
||||
get: vm => +vm.memory_static_max,
|
||||
preprocess: parseSize,
|
||||
set: 'memory_static_max'
|
||||
},
|
||||
|
||||
nameDescription: true,
|
||||
|
||||
nameLabel: true,
|
||||
|
||||
PV_args: true
|
||||
}),
|
||||
|
||||
async editVm (id, props) {
|
||||
return /* await */ this._editVm(this.getObject(id), props)
|
||||
}
|
||||
}
|
||||
332
src/xapi/utils.js
Normal file
332
src/xapi/utils.js
Normal file
@@ -0,0 +1,332 @@
|
||||
// import isFinite from 'lodash/isFinite'
|
||||
import camelCase from 'lodash/camelCase'
|
||||
import isEqual from 'lodash/isEqual'
|
||||
import pickBy from 'lodash/pickBy'
|
||||
import { utcFormat, utcParse } from 'd3-time-format'
|
||||
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
createRawObject,
|
||||
forEach,
|
||||
isArray,
|
||||
isBoolean,
|
||||
isFunction,
|
||||
isInteger,
|
||||
isObject,
|
||||
isString,
|
||||
map,
|
||||
mapToArray,
|
||||
noop
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const asBoolean = value => Boolean(value)
|
||||
|
||||
// const asFloat = value => {
|
||||
// value = String(value)
|
||||
// return value.indexOf('.') === -1
|
||||
// ? `${value}.0`
|
||||
// : value
|
||||
// }
|
||||
|
||||
export const asInteger = value => String(value)
|
||||
|
||||
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
|
||||
|
||||
export const optional = (value, fn) => value == null
|
||||
? undefined
|
||||
: fn ? fn(value) : value
|
||||
|
||||
export const prepareXapiParam = param => {
|
||||
// if (isFinite(param) && !isInteger(param)) {
|
||||
// return asFloat(param)
|
||||
// }
|
||||
if (isInteger(param)) {
|
||||
return asInteger(param)
|
||||
}
|
||||
if (isBoolean(param)) {
|
||||
return asBoolean(param)
|
||||
}
|
||||
if (isObject(param)) {
|
||||
return map(filterUndefineds(param), prepareXapiParam)
|
||||
}
|
||||
|
||||
return param
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
|
||||
export const extractOpaqueRef = str => {
|
||||
const matches = OPAQUE_REF_RE.exec(str)
|
||||
if (!matches) {
|
||||
throw new Error('no opaque ref found')
|
||||
}
|
||||
return matches[0]
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const TYPE_TO_NAMESPACE = createRawObject()
|
||||
forEach([
|
||||
'Bond',
|
||||
'DR_task',
|
||||
'GPU_group',
|
||||
'PBD',
|
||||
'PCI',
|
||||
'PGPU',
|
||||
'PIF',
|
||||
'PIF_metrics',
|
||||
'SM',
|
||||
'SR',
|
||||
'VBD',
|
||||
'VBD_metrics',
|
||||
'VDI',
|
||||
'VGPU',
|
||||
'VGPU_type',
|
||||
'VLAN',
|
||||
'VM',
|
||||
'VM_appliance',
|
||||
'VM_guest_metrics',
|
||||
'VM_metrics',
|
||||
'VMPP',
|
||||
'VTPM'
|
||||
], namespace => {
|
||||
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
|
||||
})
|
||||
|
||||
// Object types given by `xen-api` are always lowercase but the
|
||||
// namespaces in the Xen API can have a different casing.
|
||||
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Format a date (pseudo ISO 8601) from one XenServer get by
|
||||
// xapi.call('host.get_servertime', host.$ref) for example
|
||||
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
|
||||
|
||||
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const isHostRunning = host => {
|
||||
const { $metrics } = host
|
||||
|
||||
return $metrics && $metrics.live
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
|
||||
|
||||
const VM_RUNNING_POWER_STATES = {
|
||||
Running: true,
|
||||
Paused: true
|
||||
}
|
||||
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
|
||||
|
||||
const _mapFilter = (collection, iteratee) => {
|
||||
const result = []
|
||||
forEach(collection, (...args) => {
|
||||
const value = iteratee(...args)
|
||||
if (value) {
|
||||
result.push(value)
|
||||
}
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
export const makeEditObject = specs => {
|
||||
const normalizeGet = (get, name) => {
|
||||
if (get === true) {
|
||||
const prop = camelToSnakeCase(name)
|
||||
return object => object[prop]
|
||||
}
|
||||
|
||||
if (isString(get)) {
|
||||
return object => object[get]
|
||||
}
|
||||
|
||||
return get
|
||||
}
|
||||
const normalizeSet = (set, name) => {
|
||||
if (isFunction(set)) {
|
||||
return set
|
||||
}
|
||||
|
||||
if (set === true) {
|
||||
const prop = camelToSnakeCase(name)
|
||||
return function (value) {
|
||||
return this._set(prop, value)
|
||||
}
|
||||
}
|
||||
|
||||
if (isString(set)) {
|
||||
const index = set.indexOf('.')
|
||||
if (index === -1) {
|
||||
return function (value) {
|
||||
return this._set(set, value)
|
||||
}
|
||||
}
|
||||
|
||||
const map = set.slice(0, index)
|
||||
const prop = set.slice(index + 1)
|
||||
|
||||
return function (value, object) {
|
||||
return this._updateObjectMapProperty(object, map, { [prop]: value })
|
||||
}
|
||||
}
|
||||
|
||||
if (!isArray(set)) {
|
||||
throw new Error('must be an array, a function or a string')
|
||||
}
|
||||
|
||||
set = mapToArray(set, normalizeSet)
|
||||
|
||||
const { length } = set
|
||||
if (!length) {
|
||||
throw new Error('invalid setter')
|
||||
}
|
||||
|
||||
if (length === 1) {
|
||||
return set[0]
|
||||
}
|
||||
|
||||
return function (value, object) {
|
||||
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
|
||||
}
|
||||
}
|
||||
|
||||
const normalizeSpec = (spec, name) => {
|
||||
if (spec === true) {
|
||||
spec = {
|
||||
get: true,
|
||||
set: true
|
||||
}
|
||||
}
|
||||
|
||||
if (spec.addToLimits === true) {
|
||||
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
|
||||
}
|
||||
|
||||
forEach(spec.constraints, (constraint, constraintName) => {
|
||||
if (!isFunction(constraint)) {
|
||||
throw new Error('constraint must be a function')
|
||||
}
|
||||
|
||||
const constraintSpec = specs[constraintName]
|
||||
if (!constraintSpec.get) {
|
||||
throw new Error('constraint values must have a get')
|
||||
}
|
||||
})
|
||||
|
||||
const { get } = spec
|
||||
if (get) {
|
||||
spec.get = normalizeGet(get, name)
|
||||
} else if (spec.addToLimits) {
|
||||
throw new Error('addToLimits cannot be defined without get')
|
||||
}
|
||||
|
||||
spec.set = normalizeSet(spec.set, name)
|
||||
|
||||
return spec
|
||||
}
|
||||
forEach(specs, (spec, name) => {
|
||||
isString(spec) || (specs[name] = normalizeSpec(spec, name))
|
||||
})
|
||||
|
||||
// Resolves aliases and add camelCase and snake_case aliases.
|
||||
forEach(specs, (spec, name) => {
|
||||
if (isString(spec)) {
|
||||
do {
|
||||
spec = specs[spec]
|
||||
} while (isString(spec))
|
||||
specs[name] = spec
|
||||
}
|
||||
|
||||
let tmp
|
||||
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
|
||||
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
|
||||
})
|
||||
|
||||
return async function _editObject_ (id, values, checkLimits) {
|
||||
const limits = checkLimits && {}
|
||||
const object = this.getObject(id)
|
||||
|
||||
const _objectRef = object.$ref
|
||||
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
|
||||
|
||||
// Context used to execute functions.
|
||||
const context = {
|
||||
__proto__: this,
|
||||
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
|
||||
}
|
||||
|
||||
const set = (value, name) => {
|
||||
if (value === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const spec = specs[name]
|
||||
if (!spec) {
|
||||
return
|
||||
}
|
||||
|
||||
const { preprocess } = spec
|
||||
if (preprocess) {
|
||||
value = preprocess(value)
|
||||
}
|
||||
|
||||
const { get } = spec
|
||||
if (get) {
|
||||
const current = get(object)
|
||||
if (isEqual(value, current)) {
|
||||
return
|
||||
}
|
||||
|
||||
let addToLimits
|
||||
if (limits && (addToLimits = spec.addToLimits)) {
|
||||
limits[name] = addToLimits(value, current)
|
||||
}
|
||||
}
|
||||
|
||||
const cb = () => spec.set.call(context, value, object)
|
||||
|
||||
const { constraints } = spec
|
||||
if (constraints) {
|
||||
const cbs = []
|
||||
|
||||
forEach(constraints, (constraint, constraintName) => {
|
||||
// This constraint value is already defined: bypass the constraint.
|
||||
if (values[constraintName] != null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!constraint(specs[constraintName].get(object), value)) {
|
||||
const cb = set(value, constraintName)
|
||||
cbs.push(cb)
|
||||
}
|
||||
})
|
||||
|
||||
if (cbs.length) {
|
||||
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
|
||||
}
|
||||
}
|
||||
|
||||
return cb
|
||||
}
|
||||
|
||||
const cbs = _mapFilter(values, set)
|
||||
|
||||
if (checkLimits) {
|
||||
await checkLimits(limits, object)
|
||||
}
|
||||
|
||||
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
|
||||
}
|
||||
}
|
||||
167
src/xo-mixins/acls.js
Normal file
167
src/xo-mixins/acls.js
Normal file
@@ -0,0 +1,167 @@
|
||||
import checkAuthorization from 'xo-acl-resolver'
|
||||
|
||||
import {
|
||||
ModelAlreadyExists
|
||||
} from '../collection'
|
||||
import {
|
||||
Acls
|
||||
} from '../models/acl'
|
||||
import {
|
||||
createRawObject,
|
||||
forEach,
|
||||
includes,
|
||||
mapToArray
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
|
||||
this._acls = new Acls({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:acl',
|
||||
indexes: ['subject', 'object']
|
||||
})
|
||||
}
|
||||
|
||||
async _getAclsForUser (userId) {
|
||||
const user = await this._xo.getUser(userId)
|
||||
const { groups } = user
|
||||
|
||||
const subjects = groups
|
||||
? groups.concat(userId)
|
||||
: [ userId ]
|
||||
|
||||
const acls = []
|
||||
const pushAcls = (push => entries => {
|
||||
push.apply(acls, entries)
|
||||
})(acls.push)
|
||||
|
||||
const collection = this._acls
|
||||
await Promise.all(mapToArray(
|
||||
subjects,
|
||||
subject => collection.get({subject}).then(pushAcls)
|
||||
))
|
||||
|
||||
return acls
|
||||
}
|
||||
|
||||
async addAcl (subjectId, objectId, action) {
|
||||
try {
|
||||
await this._acls.create(subjectId, objectId, action)
|
||||
} catch (error) {
|
||||
if (!(error instanceof ModelAlreadyExists)) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async removeAcl (subjectId, objectId, action) {
|
||||
await this._acls.delete(subjectId, objectId, action)
|
||||
}
|
||||
|
||||
// TODO: remove when new collection.
|
||||
async getAllAcls () {
|
||||
return this._acls.get()
|
||||
}
|
||||
|
||||
async getPermissionsForUser (userId) {
|
||||
const [
|
||||
acls,
|
||||
permissionsByRole
|
||||
] = await Promise.all([
|
||||
this._getAclsForUser(userId),
|
||||
this._getPermissionsByRole()
|
||||
])
|
||||
|
||||
const permissions = createRawObject()
|
||||
for (const { action, object: objectId } of acls) {
|
||||
const current = (
|
||||
permissions[objectId] ||
|
||||
(permissions[objectId] = createRawObject())
|
||||
)
|
||||
|
||||
const permissionsForRole = permissionsByRole[action]
|
||||
if (permissionsForRole) {
|
||||
for (const permission of permissionsForRole) {
|
||||
current[permission] = 1
|
||||
}
|
||||
} else {
|
||||
current[action] = 1
|
||||
}
|
||||
}
|
||||
return permissions
|
||||
}
|
||||
|
||||
async hasPermissions (userId, permissions) {
|
||||
const user = await this._xo.getUser(userId)
|
||||
|
||||
// Special case for super XO administrators.
|
||||
if (user.permission === 'admin') {
|
||||
return true
|
||||
}
|
||||
|
||||
return checkAuthorization(
|
||||
await this.getPermissionsForUser(userId),
|
||||
id => this._xo.getObject(id),
|
||||
permissions
|
||||
)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _getPermissionsByRole () {
|
||||
const roles = await this.getRoles()
|
||||
|
||||
const permissions = createRawObject()
|
||||
for (const role of roles) {
|
||||
permissions[role.id] = role.permissions
|
||||
}
|
||||
return permissions
|
||||
}
|
||||
|
||||
// TODO: delete when merged with the new collection.
|
||||
async getRoles () {
|
||||
return [
|
||||
{
|
||||
id: 'viewer',
|
||||
name: 'Viewer',
|
||||
permissions: [
|
||||
'view'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'operator',
|
||||
name: 'Operator',
|
||||
permissions: [
|
||||
'view',
|
||||
'operate'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'admin',
|
||||
name: 'Admin',
|
||||
permissions: [
|
||||
'view',
|
||||
'operate',
|
||||
'administrate'
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
// Returns an array of roles which have a given permission.
|
||||
async getRolesForPermission (permission) {
|
||||
const roles = []
|
||||
|
||||
forEach(await this.getRoles(), role => {
|
||||
if (includes(role.permissions, permission)) {
|
||||
roles.push(role.id)
|
||||
}
|
||||
})
|
||||
|
||||
return roles
|
||||
}
|
||||
}
|
||||
181
src/xo-mixins/authentication.js
Normal file
181
src/xo-mixins/authentication.js
Normal file
@@ -0,0 +1,181 @@
|
||||
import Token, { Tokens } from '../models/token'
|
||||
import {
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import {
|
||||
createRawObject,
|
||||
generateToken,
|
||||
pCatch,
|
||||
noop
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchAuthenticationToken extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'authentication token')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
|
||||
// Store last failures by user to throttle tries (slow bruteforce
|
||||
// attacks).
|
||||
this._failures = createRawObject()
|
||||
|
||||
this._providers = new Set()
|
||||
|
||||
// Creates persistent collections.
|
||||
this._tokens = new Tokens({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:token',
|
||||
indexes: ['user_id']
|
||||
})
|
||||
|
||||
// Password authentication provider.
|
||||
this.registerAuthenticationProvider(async ({
|
||||
username,
|
||||
password
|
||||
}) => {
|
||||
if (username === undefined || password === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const user = await xo.getUserByName(username, true)
|
||||
if (user && await xo.checkUserPassword(user.id, password)) {
|
||||
return user.id
|
||||
}
|
||||
})
|
||||
|
||||
// Token authentication provider.
|
||||
this.registerAuthenticationProvider(async ({
|
||||
token: tokenId
|
||||
}) => {
|
||||
if (!tokenId) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
} catch (e) {
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
registerAuthenticationProvider (provider) {
|
||||
return this._providers.add(provider)
|
||||
}
|
||||
|
||||
unregisterAuthenticationProvider (provider) {
|
||||
return this._providers.delete(provider)
|
||||
}
|
||||
|
||||
async _authenticateUser (credentials) {
|
||||
for (const provider of this._providers) {
|
||||
try {
|
||||
// A provider can return:
|
||||
// - `null` if the user could not be authenticated
|
||||
// - the identifier of the authenticated user
|
||||
// - an object with a property `username` containing the name
|
||||
// of the authenticated user
|
||||
const result = await provider(credentials)
|
||||
|
||||
// No match.
|
||||
if (!result) {
|
||||
continue
|
||||
}
|
||||
|
||||
return result.username
|
||||
? await this._xo.registerUser(undefined, result.username)
|
||||
: await this._xo.getUser(result)
|
||||
} catch (error) {
|
||||
// DEPRECATED: Authentication providers may just throw `null`
|
||||
// to indicate they could not authenticate the user without
|
||||
// any special errors.
|
||||
if (error) console.error(error.stack || error)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
async authenticateUser (credentials) {
|
||||
// TODO: remove when email has been replaced by username.
|
||||
if (credentials.email) {
|
||||
credentials.username = credentials.email
|
||||
} else if (credentials.username) {
|
||||
credentials.email = credentials.username
|
||||
}
|
||||
|
||||
const failures = this._failures
|
||||
|
||||
const { username } = credentials
|
||||
const now = Date.now()
|
||||
let lastFailure
|
||||
if (
|
||||
username &&
|
||||
(lastFailure = failures[username]) &&
|
||||
(lastFailure + 2e3) > now
|
||||
) {
|
||||
throw new Error('too fast authentication tries')
|
||||
}
|
||||
|
||||
const user = await this._authenticateUser(credentials)
|
||||
if (user) {
|
||||
delete failures[username]
|
||||
} else {
|
||||
failures[username] = now
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async createAuthenticationToken ({userId}) {
|
||||
const token = new Token({
|
||||
id: await generateToken(),
|
||||
user_id: userId,
|
||||
expiration: Date.now() + 1e3 * 60 * 60 * 24 * 30 // 1 month validity.
|
||||
})
|
||||
|
||||
await this._tokens.add(token)
|
||||
|
||||
// TODO: use plain properties directly.
|
||||
return token.properties
|
||||
}
|
||||
|
||||
async deleteAuthenticationToken (id) {
|
||||
if (!await this._tokens.remove(id)) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
}
|
||||
|
||||
async getAuthenticationToken (id) {
|
||||
let token = await this._tokens.first(id)
|
||||
if (!token) {
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
|
||||
token = token.properties
|
||||
|
||||
if (!(
|
||||
token.expiration > Date.now()
|
||||
)) {
|
||||
this._tokens.remove(id)::pCatch(noop)
|
||||
|
||||
throw new NoSuchAuthenticationToken(id)
|
||||
}
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
async getAuthenticationTokensForUser (userId) {
|
||||
return this._tokens.get({ user_id: userId })
|
||||
}
|
||||
}
|
||||
716
src/xo-mixins/backups.js
Normal file
716
src/xo-mixins/backups.js
Normal file
@@ -0,0 +1,716 @@
|
||||
import endsWith from 'lodash/endsWith'
|
||||
import escapeStringRegexp from 'escape-string-regexp'
|
||||
import eventToPromise from 'event-to-promise'
|
||||
import filter from 'lodash/filter'
|
||||
import find from 'lodash/find'
|
||||
import findIndex from 'lodash/findIndex'
|
||||
import sortBy from 'lodash/sortBy'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import {
|
||||
basename,
|
||||
dirname
|
||||
} from 'path'
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
|
||||
import vhdMerge from '../vhd-merge'
|
||||
import xapiObjectToXo from '../xapi-object-to-xo'
|
||||
import {
|
||||
deferrable
|
||||
} from '../decorators'
|
||||
import {
|
||||
forEach,
|
||||
mapToArray,
|
||||
noop,
|
||||
pCatch,
|
||||
pSettle,
|
||||
safeDateFormat
|
||||
} from '../utils'
|
||||
import {
|
||||
VDI_FORMAT_VHD
|
||||
} from '../xapi'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const DELTA_BACKUP_EXT = '.json'
|
||||
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
|
||||
|
||||
// Test if a file is a vdi backup. (full or delta)
|
||||
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
|
||||
|
||||
// Test if a file is a delta/full vdi backup.
|
||||
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
|
||||
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
|
||||
|
||||
// Get the timestamp of a vdi backup. (full or delta)
|
||||
const getVdiTimestamp = name => {
|
||||
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
|
||||
return arr[1]
|
||||
}
|
||||
|
||||
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
|
||||
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
|
||||
|
||||
async function checkFileIntegrity (handler, name) {
|
||||
let stream
|
||||
|
||||
try {
|
||||
stream = await handler.createReadStream(name, { checksum: true })
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
stream.resume()
|
||||
await eventToPromise(stream, 'finish')
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
}
|
||||
|
||||
async listRemoteBackups (remoteId) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
|
||||
// List backups. (No delta)
|
||||
const backupFilter = file => endsWith(file, '.xva')
|
||||
|
||||
const files = await handler.list()
|
||||
const backups = filter(files, backupFilter)
|
||||
|
||||
// List delta backups.
|
||||
const deltaDirs = filter(files, file => startsWith(file, 'vm_delta_'))
|
||||
|
||||
for (const deltaDir of deltaDirs) {
|
||||
const files = await handler.list(deltaDir)
|
||||
const deltaBackups = filter(files, isDeltaBackup)
|
||||
|
||||
backups.push(...mapToArray(
|
||||
deltaBackups,
|
||||
deltaBackup => {
|
||||
return `${deltaDir}/${getDeltaBackupNameWithoutExt(deltaBackup)}`
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
return backups
|
||||
}
|
||||
|
||||
async importVmBackup (remoteId, file, sr) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const stream = await handler.createReadStream(file)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
|
||||
return xapiObjectToXo(vm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
@deferrable.onFailure
|
||||
async deltaCopyVm ($onFailure, srcVm, targetSr) {
|
||||
const srcXapi = this._xo.getXapi(srcVm)
|
||||
const targetXapi = this._xo.getXapi(targetSr)
|
||||
|
||||
// Get Xen objects from XO objects.
|
||||
srcVm = srcXapi.getObject(srcVm._xapiId)
|
||||
targetSr = targetXapi.getObject(targetSr._xapiId)
|
||||
|
||||
// 1. Find the local base for this SR (if any).
|
||||
const TAG_LAST_BASE_DELTA = `xo:base_delta:${targetSr.uuid}`
|
||||
const localBaseUuid = (id => {
|
||||
if (id != null) {
|
||||
const base = srcXapi.getObject(id, null)
|
||||
return base && base.uuid
|
||||
}
|
||||
})(srcVm.other_config[TAG_LAST_BASE_DELTA])
|
||||
|
||||
// 2. Copy.
|
||||
const dstVm = await (async () => {
|
||||
const delta = await srcXapi.exportDeltaVm(srcVm.$id, localBaseUuid, {
|
||||
snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${targetSr.uuid})`
|
||||
})
|
||||
$onFailure(async () => {
|
||||
await Promise.all(mapToArray(
|
||||
delta.streams,
|
||||
stream => stream.cancel()
|
||||
))
|
||||
|
||||
return srcXapi.deleteVm(delta.vm.uuid, true)
|
||||
})
|
||||
|
||||
const promise = targetXapi.importDeltaVm(
|
||||
delta,
|
||||
{
|
||||
deleteBase: true, // Remove the remote base.
|
||||
srId: targetSr.$id
|
||||
}
|
||||
)
|
||||
|
||||
// Once done, (asynchronously) remove the (now obsolete) local
|
||||
// base.
|
||||
if (localBaseUuid) {
|
||||
promise.then(() => srcXapi.deleteVm(localBaseUuid, true))::pCatch(noop)
|
||||
}
|
||||
|
||||
// (Asynchronously) Identify snapshot as future base.
|
||||
promise.then(() => {
|
||||
return srcXapi._updateObjectMapProperty(srcVm, 'other_config', {
|
||||
[TAG_LAST_BASE_DELTA]: delta.vm.uuid
|
||||
})
|
||||
})::pCatch(noop)
|
||||
|
||||
return promise
|
||||
})()
|
||||
|
||||
// 5. Return the identifier of the new XO VM object.
|
||||
return xapiObjectToXo(dstVm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
// TODO: The other backup methods must use this function !
|
||||
// Prerequisite: The backups array must be ordered. (old to new backups)
|
||||
async _removeOldBackups (backups, handler, dir, n) {
|
||||
if (n <= 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const getPath = (file, dir) => dir ? `${dir}/${file}` : file
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(backups.slice(0, n), async backup => /* await */ handler.unlink(getPath(backup, dir)))
|
||||
)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _legacyImportDeltaVdiBackup (xapi, { vmId, handler, dir, vdiInfo }) {
|
||||
const vdi = await xapi.createVdi(vdiInfo.virtual_size, vdiInfo)
|
||||
const vdiId = vdi.$id
|
||||
|
||||
// dir = vm_delta_xxx
|
||||
// xoPath = vdi_xxx/timestamp_(full|delta).vhd
|
||||
// vdiDir = vdi_xxx
|
||||
const { xoPath } = vdiInfo
|
||||
const filePath = `${dir}/${xoPath}`
|
||||
const vdiDir = dirname(xoPath)
|
||||
|
||||
const backups = await this._listDeltaVdiDependencies(handler, filePath)
|
||||
|
||||
for (const backup of backups) {
|
||||
const stream = await handler.createReadStream(`${dir}/${vdiDir}/${backup}`)
|
||||
|
||||
await xapi.importVdiContent(vdiId, stream, {
|
||||
format: VDI_FORMAT_VHD
|
||||
})
|
||||
}
|
||||
|
||||
return vdiId
|
||||
}
|
||||
|
||||
async _legacyImportDeltaVmBackup (xapi, { remoteId, handler, filePath, info, sr }) {
|
||||
// Import vm metadata.
|
||||
const vm = await (async () => {
|
||||
const stream = await handler.createReadStream(`${filePath}.xva`)
|
||||
return /* await */ xapi.importVm(stream, { onlyMetadata: true })
|
||||
})()
|
||||
|
||||
const vmName = vm.name_label
|
||||
const dir = dirname(filePath)
|
||||
|
||||
// Disable start and change the VM name label during import.
|
||||
await Promise.all([
|
||||
xapi.addForbiddenOperationToVm(vm.$id, 'start', 'Delta backup import...'),
|
||||
xapi._setObjectProperties(vm, { name_label: `[Importing...] ${vmName}` })
|
||||
])
|
||||
|
||||
// Destroy vbds if necessary. Why ?
|
||||
// Because XenServer creates Vbds linked to the vdis of the backup vm if it exists.
|
||||
await xapi.destroyVbdsFromVm(vm.uuid)
|
||||
|
||||
// Import VDIs.
|
||||
const vdiIds = {}
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
info.vdis,
|
||||
async vdiInfo => {
|
||||
vdiInfo.sr = sr._xapiId
|
||||
|
||||
const vdiId = await this._legacyImportDeltaVdiBackup(xapi, { vmId: vm.$id, handler, dir, vdiInfo })
|
||||
vdiIds[vdiInfo.uuid] = vdiId
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
info.vbds,
|
||||
vbdInfo => {
|
||||
xapi.attachVdiToVm(vdiIds[vbdInfo.xoVdi], vm.$id, vbdInfo)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
// Import done, reenable start and set real vm name.
|
||||
await Promise.all([
|
||||
xapi.removeForbiddenOperationFromVm(vm.$id, 'start'),
|
||||
xapi._setObjectProperties(vm, { name_label: vmName })
|
||||
])
|
||||
|
||||
return vm
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _listVdiBackups (handler, dir) {
|
||||
let files
|
||||
|
||||
try {
|
||||
files = await handler.list(dir)
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
files = []
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const backups = sortBy(filter(files, fileName => isVdiBackup(fileName)))
|
||||
let i
|
||||
|
||||
// Avoid unstable state: No full vdi found to the beginning of array. (base)
|
||||
for (i = 0; i < backups.length && isDeltaVdiBackup(backups[i]); i++);
|
||||
await this._removeOldBackups(backups, handler, dir, i)
|
||||
|
||||
return backups.slice(i)
|
||||
}
|
||||
|
||||
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
let i = backups.length - depth
|
||||
|
||||
// No merge.
|
||||
if (i <= 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const timestamp = getVdiTimestamp(backups[i])
|
||||
const newFullBackup = `${dir}/${timestamp}_full.vhd`
|
||||
|
||||
await checkFileIntegrity(handler, `${dir}/${backups[i]}`)
|
||||
|
||||
let j = i
|
||||
for (; j > 0 && isDeltaVdiBackup(backups[j]); j--);
|
||||
const fullBackupId = j
|
||||
|
||||
// Remove old backups before the most recent full.
|
||||
if (j > 0) {
|
||||
for (j--; j >= 0; j--) {
|
||||
await handler.unlink(`${dir}/${backups[j]}`, { checksum: true })
|
||||
}
|
||||
}
|
||||
|
||||
const parent = `${dir}/${backups[fullBackupId]}`
|
||||
|
||||
for (j = fullBackupId + 1; j <= i; j++) {
|
||||
const backup = `${dir}/${backups[j]}`
|
||||
|
||||
try {
|
||||
await checkFileIntegrity(handler, backup)
|
||||
await vhdMerge(handler, parent, handler, backup)
|
||||
} catch (e) {
|
||||
console.error('Unable to use vhd-util.', e)
|
||||
throw e
|
||||
}
|
||||
|
||||
await handler.unlink(backup, { checksum: true })
|
||||
}
|
||||
|
||||
// Rename the first old full backup to the new full backup.
|
||||
await handler.rename(parent, newFullBackup)
|
||||
}
|
||||
|
||||
async _listDeltaVdiDependencies (handler, filePath) {
|
||||
const dir = dirname(filePath)
|
||||
const filename = basename(filePath)
|
||||
const backups = await this._listVdiBackups(handler, dir)
|
||||
|
||||
// Search file. (delta or full backup)
|
||||
const i = findIndex(backups, backup =>
|
||||
getVdiTimestamp(backup) === getVdiTimestamp(filename)
|
||||
)
|
||||
|
||||
if (i === -1) {
|
||||
throw new Error('VDI to import not found in this remote.')
|
||||
}
|
||||
|
||||
// Search full backup.
|
||||
let j
|
||||
|
||||
for (j = i; j >= 0 && isDeltaVdiBackup(backups[j]); j--);
|
||||
|
||||
if (j === -1) {
|
||||
throw new Error(`Unable to found full vdi backup of: ${filePath}`)
|
||||
}
|
||||
|
||||
return backups.slice(j, i + 1)
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async _listDeltaVmBackups (handler, dir) {
|
||||
const files = await handler.list(dir)
|
||||
return sortBy(filter(files, isDeltaBackup))
|
||||
}
|
||||
|
||||
async _saveDeltaVdiBackup (xapi, { vdiParent, isFull, handler, stream, dir, depth }) {
|
||||
const backupDirectory = `vdi_${vdiParent.uuid}`
|
||||
dir = `${dir}/${backupDirectory}`
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
|
||||
// For old versions: remove old bases if exists.
|
||||
const bases = sortBy(
|
||||
filter(vdiParent.$snapshots, { name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT' }),
|
||||
base => base.snapshot_time
|
||||
)
|
||||
forEach(bases, base => { xapi.deleteVdi(base.$id)::pCatch(noop) })
|
||||
|
||||
// Export full or delta backup.
|
||||
const vdiFilename = `${date}_${isFull ? 'full' : 'delta'}.vhd`
|
||||
const backupFullPath = `${dir}/${vdiFilename}`
|
||||
|
||||
try {
|
||||
const targetStream = await handler.createOutputStream(backupFullPath, {
|
||||
// FIXME: Checksum is not computed for full vdi backups.
|
||||
// The problem is in the merge case, a delta merged in a full vdi
|
||||
// backup forces us to browse the resulting file =>
|
||||
// Significant transfer time on the network !
|
||||
checksum: !isFull
|
||||
})
|
||||
|
||||
stream.on('error', error => targetStream.emit('error', error))
|
||||
|
||||
await Promise.all([
|
||||
eventToPromise(stream.pipe(targetStream), 'finish'),
|
||||
stream.task
|
||||
])
|
||||
} catch (error) {
|
||||
// Remove new backup. (corrupt).
|
||||
await handler.unlink(backupFullPath, { checksum: true })::pCatch(noop)
|
||||
|
||||
throw error
|
||||
}
|
||||
|
||||
// Returns relative path.
|
||||
return `${backupDirectory}/${vdiFilename}`
|
||||
}
|
||||
|
||||
async _removeOldDeltaVmBackups (xapi, { handler, dir, depth }) {
|
||||
const backups = await this._listDeltaVmBackups(handler, dir)
|
||||
const nOldBackups = backups.length - depth
|
||||
|
||||
if (nOldBackups > 0) {
|
||||
await Promise.all(
|
||||
mapToArray(backups.slice(0, nOldBackups), async backup => {
|
||||
// Remove json file.
|
||||
await handler.unlink(`${dir}/${backup}`)
|
||||
|
||||
// Remove xva file.
|
||||
// Version 0.0.0 (Legacy) Delta Backup.
|
||||
handler.unlink(`${dir}/${getDeltaBackupNameWithoutExt(backup)}.xva`)::pCatch(noop)
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@deferrable.onFailure
|
||||
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
const xapi = this._xo.getXapi(vm)
|
||||
|
||||
vm = xapi.getObject(vm._xapiId)
|
||||
|
||||
// Get most recent base.
|
||||
const bases = sortBy(
|
||||
filter(vm.$snapshots, { name_label: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}` }),
|
||||
base => base.snapshot_time
|
||||
)
|
||||
const baseVm = bases.pop()
|
||||
forEach(bases, base => { xapi.deleteVm(base.$id, true)::pCatch(noop) })
|
||||
|
||||
// Check backup dirs.
|
||||
const dir = `vm_delta_${tag}_${vm.uuid}`
|
||||
const fullVdisRequired = []
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(vm.$VBDs, async vbd => {
|
||||
if (!vbd.VDI || vbd.type !== 'Disk') {
|
||||
return
|
||||
}
|
||||
|
||||
const vdi = vbd.$VDI
|
||||
const backups = await this._listVdiBackups(handler, `${dir}/vdi_${vdi.uuid}`)
|
||||
|
||||
// Force full if missing full.
|
||||
if (!find(backups, isFullVdiBackup)) {
|
||||
fullVdisRequired.push(vdi.$id)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// Export...
|
||||
const delta = await xapi.exportDeltaVm(vm.$id, baseVm && baseVm.$id, {
|
||||
snapshotNameLabel: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}`,
|
||||
fullVdisRequired,
|
||||
disableBaseTags: true
|
||||
})
|
||||
|
||||
$onFailure(async () => {
|
||||
await Promise.all(mapToArray(
|
||||
delta.streams,
|
||||
stream => stream.cancel()
|
||||
))
|
||||
|
||||
await xapi.deleteVm(delta.vm.$id, true)
|
||||
})
|
||||
|
||||
// Save vdis.
|
||||
const vdiBackups = await pSettle(
|
||||
mapToArray(delta.vdis, async (vdi, key) => {
|
||||
const vdiParent = xapi.getObject(vdi.snapshot_of)
|
||||
|
||||
return this._saveDeltaVdiBackup(xapi, {
|
||||
vdiParent,
|
||||
isFull: !baseVm || find(fullVdisRequired, id => vdiParent.$id === id),
|
||||
handler,
|
||||
stream: delta.streams[`${key}.vhd`],
|
||||
dir,
|
||||
depth
|
||||
})
|
||||
.then(path => {
|
||||
delta.vdis[key] = {
|
||||
...delta.vdis[key],
|
||||
xoPath: path
|
||||
}
|
||||
|
||||
return path
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
const fulFilledVdiBackups = []
|
||||
let success = true
|
||||
|
||||
// One or many vdi backups have failed.
|
||||
for (const vdiBackup of vdiBackups) {
|
||||
if (vdiBackup.isFulfilled()) {
|
||||
fulFilledVdiBackups.push(vdiBackup)
|
||||
} else {
|
||||
console.error(`Rejected backup: ${vdiBackup.reason()}`)
|
||||
success = false
|
||||
}
|
||||
}
|
||||
|
||||
$onFailure(async () => {
|
||||
await Promise.all(
|
||||
mapToArray(fulFilledVdiBackups, vdiBackup => {
|
||||
return handler.unlink(`${dir}/${vdiBackup.value()}`, { checksum: true })::pCatch(noop)
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
if (!success) {
|
||||
throw new Error('Rolling delta vm backup failed.')
|
||||
}
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
const backupFormat = `${date}_${vm.name_label}`
|
||||
const infoPath = `${dir}/${backupFormat}${DELTA_BACKUP_EXT}`
|
||||
|
||||
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
|
||||
|
||||
// Write Metadata.
|
||||
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
|
||||
|
||||
// Here we have a completed backup. We can merge old vdis.
|
||||
await Promise.all(
|
||||
mapToArray(vdiBackups, vdiBackup => {
|
||||
const backupName = vdiBackup.value()
|
||||
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
|
||||
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
|
||||
})
|
||||
)
|
||||
|
||||
// Delete old backups.
|
||||
await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, depth })
|
||||
|
||||
if (baseVm) {
|
||||
xapi.deleteVm(baseVm.$id, true)::pCatch(noop)
|
||||
}
|
||||
|
||||
// Returns relative path.
|
||||
return `${dir}/${backupFormat}`
|
||||
}
|
||||
|
||||
async importDeltaVmBackup ({sr, remoteId, filePath}) {
|
||||
const handler = await this._xo.getRemoteHandler(remoteId)
|
||||
const xapi = this._xo.getXapi(sr)
|
||||
|
||||
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
|
||||
let vm
|
||||
const { version } = delta
|
||||
|
||||
if (!version) {
|
||||
// Legacy import. (Version 0.0.0)
|
||||
vm = await this._legacyImportDeltaVmBackup(xapi, {
|
||||
remoteId, handler, filePath, info: delta, sr
|
||||
})
|
||||
} else if (versionSatisfies(delta.version, '^1')) {
|
||||
const basePath = dirname(filePath)
|
||||
const streams = delta.streams = {}
|
||||
|
||||
await Promise.all(
|
||||
mapToArray(
|
||||
delta.vdis,
|
||||
async (vdi, id) => {
|
||||
const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}`
|
||||
const backups = await this._listDeltaVdiDependencies(handler, `${basePath}/${vdi.xoPath}`)
|
||||
|
||||
streams[`${id}.vhd`] = await Promise.all(mapToArray(backups, async backup =>
|
||||
handler.createReadStream(`${vdisFolder}/${backup}`, { checksum: true, ignoreMissingChecksum: true })
|
||||
))
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
vm = await xapi.importDeltaVm(delta, {
|
||||
srId: sr._xapiId,
|
||||
disableStartAfterImport: false
|
||||
})
|
||||
} else {
|
||||
throw new Error(`Unsupported delta backup version: ${version}`)
|
||||
}
|
||||
|
||||
return xapiObjectToXo(vm).id
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
|
||||
}
|
||||
|
||||
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
|
||||
const targetStream = await handler.createOutputStream(file)
|
||||
const promise = eventToPromise(targetStream, 'finish')
|
||||
|
||||
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
|
||||
compress,
|
||||
onlyMetadata: onlyMetadata || false
|
||||
})
|
||||
sourceStream.pipe(targetStream)
|
||||
|
||||
await promise
|
||||
}
|
||||
|
||||
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
|
||||
const remote = await this._xo.getRemote(remoteId)
|
||||
|
||||
if (!remote) {
|
||||
throw new Error(`No such Remote ${remoteId}`)
|
||||
}
|
||||
if (!remote.enabled) {
|
||||
throw new Error(`Backup remote ${remoteId} is disabled`)
|
||||
}
|
||||
|
||||
const handler = await this._xo.getRemoteHandler(remote)
|
||||
|
||||
const files = await handler.list()
|
||||
|
||||
const reg = new RegExp('^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`))
|
||||
const backups = sortBy(filter(files, (fileName) => reg.test(fileName)))
|
||||
|
||||
const date = safeDateFormat(new Date())
|
||||
const file = `${date}_${tag}_${vm.name_label}.xva`
|
||||
|
||||
await this._backupVm(vm, handler, file, {compress, onlyMetadata})
|
||||
await this._removeOldBackups(backups, handler, undefined, backups.length - (depth - 1))
|
||||
}
|
||||
|
||||
async rollingSnapshotVm (vm, tag, depth) {
|
||||
const xapi = this._xo.getXapi(vm)
|
||||
vm = xapi.getObject(vm._xapiId)
|
||||
|
||||
const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_')
|
||||
const snapshots = sortBy(filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), 'name_label')
|
||||
const date = safeDateFormat(new Date())
|
||||
|
||||
await xapi.snapshotVm(vm.$id, `rollingSnapshot_${date}_${tag}_${vm.name_label}`)
|
||||
|
||||
const promises = []
|
||||
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldSnap = snapshots.shift()
|
||||
promises.push(xapi.deleteVm(oldSnap.uuid, true))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
async rollingDrCopyVm ({vm, sr, tag, depth}) {
|
||||
tag = 'DR_' + tag
|
||||
const reg = new RegExp('^' + escapeStringRegexp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$')
|
||||
|
||||
const targetXapi = this._xo.getXapi(sr)
|
||||
sr = targetXapi.getObject(sr._xapiId)
|
||||
const sourceXapi = this._xo.getXapi(vm)
|
||||
vm = sourceXapi.getObject(vm._xapiId)
|
||||
|
||||
const vms = []
|
||||
forEach(sr.$VDIs, vdi => {
|
||||
const vbds = vdi.$VBDs
|
||||
const vm = vbds && vbds[0] && vbds[0].$VM
|
||||
if (vm && reg.test(vm.name_label)) {
|
||||
vms.push(vm)
|
||||
}
|
||||
})
|
||||
const olderCopies = sortBy(vms, 'name_label')
|
||||
|
||||
const copyName = `${vm.name_label}_${tag}_${safeDateFormat(new Date())}`
|
||||
const drCopy = await sourceXapi.remoteCopyVm(vm.$id, targetXapi, sr.$id, {
|
||||
nameLabel: copyName
|
||||
})
|
||||
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
|
||||
|
||||
const promises = []
|
||||
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
|
||||
const oldDRVm = olderCopies.shift()
|
||||
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
79
src/xo-mixins/jobs.js
Normal file
79
src/xo-mixins/jobs.js
Normal file
@@ -0,0 +1,79 @@
|
||||
import assign from 'lodash/assign'
|
||||
import JobExecutor from '../job-executor'
|
||||
import { Jobs } from '../models/job'
|
||||
import {
|
||||
GenericError,
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchJob extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'job')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._executor = new JobExecutor(xo)
|
||||
this._jobs = new Jobs({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:job',
|
||||
indexes: ['user_id', 'key']
|
||||
})
|
||||
}
|
||||
|
||||
async getAllJobs () {
|
||||
return /* await */ this._jobs.get()
|
||||
}
|
||||
|
||||
async getJob (id) {
|
||||
const job = await this._jobs.first(id)
|
||||
if (!job) {
|
||||
throw new NoSuchJob(id)
|
||||
}
|
||||
|
||||
return job.properties
|
||||
}
|
||||
|
||||
async createJob (userId, job) {
|
||||
// TODO: use plain objects
|
||||
const job_ = await this._jobs.create(userId, job)
|
||||
return job_.properties
|
||||
}
|
||||
|
||||
async updateJob ({id, type, name, key, method, paramsVector}) {
|
||||
const oldJob = await this.getJob(id)
|
||||
assign(oldJob, {type, name, key, method, paramsVector})
|
||||
return /* await */ this._jobs.save(oldJob)
|
||||
}
|
||||
|
||||
async removeJob (id) {
|
||||
return /* await */ this._jobs.remove(id)
|
||||
}
|
||||
|
||||
async runJobSequence (idSequence) {
|
||||
const notFound = []
|
||||
for (const id of idSequence) {
|
||||
let job
|
||||
try {
|
||||
job = await this.getJob(id)
|
||||
} catch (error) {
|
||||
if (error instanceof NoSuchJob) {
|
||||
notFound.push(id)
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
if (job) {
|
||||
await this._executor.exec(job)
|
||||
}
|
||||
}
|
||||
if (notFound.length > 0) {
|
||||
throw new GenericError(`The following jobs were not found: ${notFound.join()}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
222
src/xo-mixins/plugins.js
Normal file
222
src/xo-mixins/plugins.js
Normal file
@@ -0,0 +1,222 @@
|
||||
import createJsonSchemaValidator from 'is-my-json-valid'
|
||||
|
||||
import { PluginsMetadata } from '../models/plugin-metadata'
|
||||
import {
|
||||
InvalidParameters,
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import {
|
||||
createRawObject,
|
||||
isFunction,
|
||||
mapToArray
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchPlugin extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'plugin')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._plugins = createRawObject()
|
||||
|
||||
this._pluginsMetadata = new PluginsMetadata({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:plugin-metadata'
|
||||
})
|
||||
}
|
||||
|
||||
_getRawPlugin (id) {
|
||||
const plugin = this._plugins[id]
|
||||
if (!plugin) {
|
||||
throw new NoSuchPlugin(id)
|
||||
}
|
||||
return plugin
|
||||
}
|
||||
|
||||
async _getPluginMetadata (id) {
|
||||
const metadata = await this._pluginsMetadata.first(id)
|
||||
return metadata
|
||||
? metadata.properties
|
||||
: null
|
||||
}
|
||||
|
||||
async registerPlugin (
|
||||
name,
|
||||
instance,
|
||||
configurationSchema,
|
||||
configurationPresets,
|
||||
version
|
||||
) {
|
||||
const id = name
|
||||
const plugin = this._plugins[id] = {
|
||||
configured: !configurationSchema,
|
||||
configurationPresets,
|
||||
configurationSchema,
|
||||
id,
|
||||
instance,
|
||||
name,
|
||||
unloadable: isFunction(instance.unload),
|
||||
version
|
||||
}
|
||||
|
||||
const metadata = await this._getPluginMetadata(id)
|
||||
let autoload = true
|
||||
let configuration
|
||||
|
||||
if (metadata) {
|
||||
({
|
||||
autoload,
|
||||
configuration
|
||||
} = metadata)
|
||||
} else {
|
||||
console.log(`[NOTICE] register plugin ${name} for the first time`)
|
||||
await this._pluginsMetadata.save({
|
||||
id,
|
||||
autoload
|
||||
})
|
||||
}
|
||||
|
||||
// Configure plugin if necessary. (i.e. configurationSchema)
|
||||
// Load plugin.
|
||||
// Ignore configuration and loading errors.
|
||||
Promise.resolve()
|
||||
.then(() => {
|
||||
if (!plugin.configured) {
|
||||
return this._configurePlugin(plugin, configuration)
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
if (autoload) {
|
||||
return this.loadPlugin(id)
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('register plugin %s: %s', name, error && error.stack || error)
|
||||
})
|
||||
}
|
||||
|
||||
async _getPlugin (id) {
|
||||
const {
|
||||
configurationPresets,
|
||||
configurationSchema,
|
||||
loaded,
|
||||
name,
|
||||
unloadable,
|
||||
version
|
||||
} = this._getRawPlugin(id)
|
||||
const {
|
||||
autoload,
|
||||
configuration
|
||||
} = (await this._getPluginMetadata(id)) || {}
|
||||
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
autoload,
|
||||
loaded,
|
||||
unloadable,
|
||||
version,
|
||||
configuration,
|
||||
configurationPresets,
|
||||
configurationSchema
|
||||
}
|
||||
}
|
||||
|
||||
async getPlugins () {
|
||||
return /* await */ Promise.all(
|
||||
mapToArray(this._plugins, ({ id }) => this._getPlugin(id))
|
||||
)
|
||||
}
|
||||
|
||||
// Validate the configuration and configure the plugin instance.
|
||||
async _configurePlugin (plugin, configuration) {
|
||||
const { configurationSchema } = plugin
|
||||
|
||||
if (!configurationSchema) {
|
||||
throw new InvalidParameters('plugin not configurable')
|
||||
}
|
||||
|
||||
// See: https://github.com/mafintosh/is-my-json-valid/issues/116
|
||||
if (configuration == null) {
|
||||
throw new InvalidParameters([{
|
||||
field: 'data',
|
||||
message: 'is the wrong type'
|
||||
}])
|
||||
}
|
||||
|
||||
const validate = createJsonSchemaValidator(configurationSchema)
|
||||
if (!validate(configuration)) {
|
||||
throw new InvalidParameters(validate.errors)
|
||||
}
|
||||
|
||||
// Sets the plugin configuration.
|
||||
await plugin.instance.configure({
|
||||
// Shallow copy of the configuration object to avoid most of the
|
||||
// errors when the plugin is altering the configuration object
|
||||
// which is handed over to it.
|
||||
...configuration
|
||||
})
|
||||
plugin.configured = true
|
||||
}
|
||||
|
||||
// Validate the configuration, configure the plugin instance and
|
||||
// save the new configuration.
|
||||
async configurePlugin (id, configuration) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
|
||||
await this._configurePlugin(plugin, configuration)
|
||||
|
||||
// Saves the configuration.
|
||||
await this._pluginsMetadata.merge(id, { configuration })
|
||||
}
|
||||
|
||||
async disablePluginAutoload (id) {
|
||||
// TODO: handle case where autoload is already disabled.
|
||||
|
||||
await this._pluginsMetadata.merge(id, { autoload: false })
|
||||
}
|
||||
|
||||
async enablePluginAutoload (id) {
|
||||
// TODO: handle case where autoload is already enabled.
|
||||
|
||||
await this._pluginsMetadata.merge(id, { autoload: true })
|
||||
}
|
||||
|
||||
async loadPlugin (id) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
if (plugin.loaded) {
|
||||
throw new InvalidParameters('plugin already loaded')
|
||||
}
|
||||
|
||||
if (!plugin.configured) {
|
||||
throw new InvalidParameters('plugin not configured')
|
||||
}
|
||||
|
||||
await plugin.instance.load()
|
||||
plugin.loaded = true
|
||||
}
|
||||
|
||||
async unloadPlugin (id) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
if (!plugin.loaded) {
|
||||
throw new InvalidParameters('plugin already unloaded')
|
||||
}
|
||||
|
||||
if (plugin.unloadable === false) {
|
||||
throw new InvalidParameters('plugin cannot be unloaded')
|
||||
}
|
||||
|
||||
await plugin.instance.unload()
|
||||
plugin.loaded = false
|
||||
}
|
||||
|
||||
async purgePluginConfiguration (id) {
|
||||
await this._pluginsMetadata.merge(id, { configuration: undefined })
|
||||
}
|
||||
}
|
||||
135
src/xo-mixins/remotes.js
Normal file
135
src/xo-mixins/remotes.js
Normal file
@@ -0,0 +1,135 @@
|
||||
import RemoteHandlerLocal from '../remote-handlers/local'
|
||||
import RemoteHandlerNfs from '../remote-handlers/nfs'
|
||||
import RemoteHandlerSmb from '../remote-handlers/smb'
|
||||
import {
|
||||
forEach
|
||||
} from '../utils'
|
||||
import {
|
||||
NoSuchObject
|
||||
} from '../api-errors'
|
||||
import {
|
||||
Remotes
|
||||
} from '../models/remote'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchRemote extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'remote')
|
||||
}
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._remotes = new Remotes({
|
||||
connection: xo._redis,
|
||||
prefix: 'xo:remote',
|
||||
indexes: ['enabled']
|
||||
})
|
||||
|
||||
xo.on('start', async () => {
|
||||
await this.initRemotes()
|
||||
await this.syncAllRemotes()
|
||||
})
|
||||
xo.on('stop', () => this.forgetAllRemotes())
|
||||
}
|
||||
|
||||
async getRemoteHandler (remote) {
|
||||
if (typeof remote === 'string') {
|
||||
remote = await this.getRemote(remote)
|
||||
}
|
||||
const Handler = {
|
||||
file: RemoteHandlerLocal,
|
||||
smb: RemoteHandlerSmb,
|
||||
nfs: RemoteHandlerNfs
|
||||
}
|
||||
|
||||
// FIXME: should be done in xo-remote-parser.
|
||||
const type = remote.url.split('://')[0]
|
||||
if (!Handler[type]) {
|
||||
throw new Error('Unhandled remote type')
|
||||
}
|
||||
return new Handler[type](remote)
|
||||
}
|
||||
|
||||
async testRemote (remote) {
|
||||
const handler = await this.getRemoteHandler(remote)
|
||||
return handler.test()
|
||||
}
|
||||
|
||||
async getAllRemotes () {
|
||||
return this._remotes.get()
|
||||
}
|
||||
|
||||
async _getRemote (id) {
|
||||
const remote = await this._remotes.first(id)
|
||||
if (!remote) {
|
||||
throw new NoSuchRemote(id)
|
||||
}
|
||||
|
||||
return remote
|
||||
}
|
||||
|
||||
async getRemote (id) {
|
||||
return (await this._getRemote(id)).properties
|
||||
}
|
||||
|
||||
async createRemote ({name, url}) {
|
||||
let remote = await this._remotes.create(name, url)
|
||||
return /* await */ this.updateRemote(remote.get('id'), {enabled: true})
|
||||
}
|
||||
|
||||
async updateRemote (id, {name, url, enabled, error}) {
|
||||
const remote = await this._getRemote(id)
|
||||
this._updateRemote(remote, {name, url, enabled, error})
|
||||
const handler = await this.getRemoteHandler(remote.properties)
|
||||
const props = await handler.sync()
|
||||
this._updateRemote(remote, props)
|
||||
return (await this._remotes.save(remote)).properties
|
||||
}
|
||||
|
||||
_updateRemote (remote, {name, url, enabled, error}) {
|
||||
if (name) remote.set('name', name)
|
||||
if (url) remote.set('url', url)
|
||||
if (enabled !== undefined) remote.set('enabled', enabled)
|
||||
if (error) {
|
||||
remote.set('error', error)
|
||||
} else {
|
||||
remote.set('error', '')
|
||||
}
|
||||
}
|
||||
|
||||
async removeRemote (id) {
|
||||
const handler = await this.getRemoteHandler(id)
|
||||
await handler.forget()
|
||||
await this._remotes.remove(id)
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async syncAllRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
forEach(remotes, remote => {
|
||||
this.updateRemote(remote.id, {})
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async forgetAllRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
for (let remote of remotes) {
|
||||
try {
|
||||
(await this.getRemoteHandler(remote)).forget()
|
||||
} catch (_) {}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Should it be private?
|
||||
async initRemotes () {
|
||||
const remotes = await this.getAllRemotes()
|
||||
if (!remotes || !remotes.length) {
|
||||
await this.createRemote({name: 'default', url: 'file://var/lib/xoa-backups'})
|
||||
}
|
||||
}
|
||||
}
|
||||
315
src/xo-mixins/resource-sets.js
Normal file
315
src/xo-mixins/resource-sets.js
Normal file
@@ -0,0 +1,315 @@
|
||||
import every from 'lodash/every'
|
||||
import keyBy from 'lodash/keyBy'
|
||||
import remove from 'lodash/remove'
|
||||
import some from 'lodash/some'
|
||||
|
||||
import {
|
||||
NoSuchObject,
|
||||
Unauthorized
|
||||
} from '../api-errors'
|
||||
import {
|
||||
forEach,
|
||||
generateUnsecureToken,
|
||||
isObject,
|
||||
lightSet,
|
||||
map,
|
||||
mapToArray,
|
||||
streamToArray
|
||||
} from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class NoSuchResourceSet extends NoSuchObject {
|
||||
constructor (id) {
|
||||
super(id, 'resource set')
|
||||
}
|
||||
}
|
||||
|
||||
const computeVmResourcesUsage = vm => {
|
||||
const processed = {}
|
||||
let disks = 0
|
||||
let disk = 0
|
||||
|
||||
forEach(vm.$VBDs, vbd => {
|
||||
let vdi, vdiId
|
||||
if (
|
||||
vbd.type === 'Disk' &&
|
||||
!processed[vdiId = vbd.VDI] &&
|
||||
(vdi = vbd.$VDI)
|
||||
) {
|
||||
processed[vdiId] = true
|
||||
++disks
|
||||
disk += +vdi.virtual_size
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
cpus: vm.VCPUs_at_startup,
|
||||
disk,
|
||||
disks,
|
||||
memory: vm.memory_dynamic_max,
|
||||
vms: 1
|
||||
}
|
||||
}
|
||||
|
||||
const normalize = set => ({
|
||||
id: set.id,
|
||||
limits: set.limits
|
||||
? map(set.limits, limit => isObject(limit)
|
||||
? limit
|
||||
: {
|
||||
available: limit,
|
||||
total: limit
|
||||
}
|
||||
)
|
||||
: {},
|
||||
name: set.name || '',
|
||||
objects: set.objects || [],
|
||||
subjects: set.subjects || []
|
||||
})
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor (xo) {
|
||||
this._xo = xo
|
||||
|
||||
this._store = null
|
||||
xo.on('start', async () => {
|
||||
this._store = await xo.getStore('resourceSets')
|
||||
})
|
||||
}
|
||||
|
||||
async _generateId () {
|
||||
let id
|
||||
do {
|
||||
id = generateUnsecureToken(8)
|
||||
} while (await this._store.has(id))
|
||||
return id
|
||||
}
|
||||
|
||||
_save (set) {
|
||||
return this._store.put(set.id, set)
|
||||
}
|
||||
|
||||
async checkResourceSetConstraints (id, userId, objectIds) {
|
||||
const set = await this.getResourceSet(id)
|
||||
|
||||
const user = await this._xo.getUser(userId)
|
||||
if ((
|
||||
user.permission !== 'admin' &&
|
||||
|
||||
// The set does not contains ANY subjects related to this user
|
||||
// (itself or its groups).
|
||||
!some(set.subjects, lightSet(user.groups).add(user.id).has)
|
||||
) || (
|
||||
objectIds &&
|
||||
|
||||
// The set does not contains ALL objects.
|
||||
!every(objectIds, lightSet(set.objects).has)
|
||||
)) {
|
||||
throw new Unauthorized()
|
||||
}
|
||||
}
|
||||
|
||||
computeVmResourcesUsage (vm) {
|
||||
return computeVmResourcesUsage(
|
||||
this._xo.getXapi(vm).getObject(vm._xapiId)
|
||||
)
|
||||
}
|
||||
|
||||
async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) {
|
||||
const id = await this._generateId()
|
||||
const set = normalize({
|
||||
id,
|
||||
name,
|
||||
objects,
|
||||
subjects,
|
||||
limits
|
||||
})
|
||||
|
||||
await this._store.put(id, set)
|
||||
|
||||
return set
|
||||
}
|
||||
|
||||
async deleteResourceSet (id) {
|
||||
const store = this._store
|
||||
|
||||
if (await store.has(id)) {
|
||||
return store.del(id)
|
||||
}
|
||||
|
||||
throw new NoSuchResourceSet(id)
|
||||
}
|
||||
|
||||
async updateResourceSet (id, {
|
||||
name = undefined,
|
||||
subjects = undefined,
|
||||
objects = undefined,
|
||||
limits = undefined
|
||||
}) {
|
||||
const set = await this.getResourceSet(id)
|
||||
if (name) {
|
||||
set.name = name
|
||||
}
|
||||
if (subjects) {
|
||||
set.subjects = subjects
|
||||
}
|
||||
if (objects) {
|
||||
set.objects = objects
|
||||
}
|
||||
if (limits) {
|
||||
const previousLimits = set.limits
|
||||
set.limits = map(limits, (quantity, id) => {
|
||||
const previous = previousLimits[id]
|
||||
if (!previous) {
|
||||
return {
|
||||
available: quantity,
|
||||
total: quantity
|
||||
}
|
||||
}
|
||||
|
||||
const { available, total } = previous
|
||||
|
||||
return {
|
||||
available: available - total + quantity,
|
||||
total: quantity
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
// If userId is provided, only resource sets available to that user
|
||||
// will be returned.
|
||||
async getAllResourceSets (userId = undefined) {
|
||||
let filter
|
||||
if (userId != null) {
|
||||
const user = await this._xo.getUser(userId)
|
||||
if (user.permission !== 'admin') {
|
||||
const userHasSubject = lightSet(user.groups).add(user.id).has
|
||||
filter = set => some(set.subjects, userHasSubject)
|
||||
}
|
||||
}
|
||||
|
||||
return streamToArray(this._store.createValueStream(), {
|
||||
filter,
|
||||
mapper: normalize
|
||||
})
|
||||
}
|
||||
|
||||
getResourceSet (id) {
|
||||
return this._store.get(id).then(normalize, error => {
|
||||
if (error.notFound) {
|
||||
throw new NoSuchResourceSet(id)
|
||||
}
|
||||
|
||||
throw error
|
||||
})
|
||||
}
|
||||
|
||||
async addObjectToResourceSet (objectId, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
set.objects.push(objectId)
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async removeObjectFromResourceSet (objectId, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
remove(set.objects)
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async addSubjectToResourceSet (subjectId, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
set.subjects.push(subjectId)
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async removeSubjectToResourceSet (subjectId, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
remove(set.subjects, subjectId)
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async addLimitToResourceSet (limitId, quantity, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
set.limits[limitId] = quantity
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async removeLimitFromResourceSet (limitId, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
delete set.limits[limitId]
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async allocateLimitsInResourceSet (limits, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
forEach(limits, (quantity, id) => {
|
||||
const limit = set.limits[id]
|
||||
if (!limit) {
|
||||
return
|
||||
}
|
||||
|
||||
if ((limit.available -= quantity) < 0) {
|
||||
throw new Error(`not enough ${id} available in the set ${setId}`)
|
||||
}
|
||||
})
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async releaseLimitsInResourceSet (limits, setId) {
|
||||
const set = await this.getResourceSet(setId)
|
||||
forEach(limits, (quantity, id) => {
|
||||
const limit = set.limits[id]
|
||||
if (!limit) {
|
||||
return
|
||||
}
|
||||
|
||||
if ((limit.available += quantity) > limit.total) {
|
||||
limit.available = limit.total
|
||||
}
|
||||
})
|
||||
await this._save(set)
|
||||
}
|
||||
|
||||
async recomputeResourceSetsLimits () {
|
||||
const sets = keyBy(await this.getAllResourceSets(), 'id')
|
||||
forEach(sets, ({ limits }) => {
|
||||
forEach(limits, (limit, id) => {
|
||||
limit.available = limit.total
|
||||
})
|
||||
})
|
||||
|
||||
forEach(this._xo.getAllXapis(), xapi => {
|
||||
forEach(xapi.objects.all, object => {
|
||||
let id
|
||||
let set
|
||||
if (
|
||||
object.$type !== 'vm' ||
|
||||
|
||||
// No set for this VM.
|
||||
!(id = xapi.xo.getData(object, 'resourceSet')) ||
|
||||
|
||||
// Not our set.
|
||||
!(set = sets[id])
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const { limits } = set
|
||||
forEach(computeVmResourcesUsage(object), (usage, resource) => {
|
||||
const limit = limits[resource]
|
||||
if (limit) {
|
||||
limit.available -= usage
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
await Promise.all(mapToArray(sets, set => this._save(set)))
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user