Compare commits
588 Commits
defined-v0
...
xo-server-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8e0727d4d | ||
|
|
a46a95b6fa | ||
|
|
ab4c3bc416 | ||
|
|
8a2f012b79 | ||
|
|
5fd9eea3f6 | ||
|
|
1b12aa90de | ||
|
|
dfb6d1b58e | ||
|
|
53add3bf2d | ||
|
|
63414d5db9 | ||
|
|
1312df8c88 | ||
|
|
94d36c3458 | ||
|
|
0c3623e0f8 | ||
|
|
ad01fcc880 | ||
|
|
b7f20a963f | ||
|
|
c51aad61eb | ||
|
|
12bbdba82c | ||
|
|
eb3760ee4a | ||
|
|
af00adcfcc | ||
|
|
93985e1a51 | ||
|
|
36f7af8576 | ||
|
|
0608cda6d7 | ||
|
|
9565823900 | ||
|
|
48b833c3b3 | ||
|
|
9990439594 | ||
|
|
e9fb37325d | ||
|
|
810c976d37 | ||
|
|
c1cbc3b5aa | ||
|
|
8298db1f2e | ||
|
|
47844fcf69 | ||
|
|
f26f8b2af9 | ||
|
|
b246e84c48 | ||
|
|
6545e47193 | ||
|
|
0a78c2bb94 | ||
|
|
36102e0dff | ||
|
|
bce0bf05e5 | ||
|
|
55b762f490 | ||
|
|
ad58f6a147 | ||
|
|
d67038c78d | ||
|
|
4badf48c45 | ||
|
|
449dd2998b | ||
|
|
c613b4cab3 | ||
|
|
370a0e8851 | ||
|
|
eb4f9f0b18 | ||
|
|
bbf5e82c5d | ||
|
|
27835bfbd0 | ||
|
|
f663dbe7a7 | ||
|
|
02e7eeec51 | ||
|
|
29a7bd0cb2 | ||
|
|
0fd22b9fd8 | ||
|
|
df809baaaf | ||
|
|
cfd956631b | ||
|
|
23687f62f0 | ||
|
|
5aabea1121 | ||
|
|
eac07a96de | ||
|
|
e9d1876699 | ||
|
|
f25705d559 | ||
|
|
ea48136797 | ||
|
|
270185d9dc | ||
|
|
308d53dc6b | ||
|
|
a97c5f4cd9 | ||
|
|
3d7e0df4dd | ||
|
|
53a0b7eed0 | ||
|
|
1ed2a6b620 | ||
|
|
76f9017482 | ||
|
|
86425f5d51 | ||
|
|
d77894310f | ||
|
|
cbee05e0c7 | ||
|
|
7f36dddefb | ||
|
|
56b2dbd4fd | ||
|
|
df67908784 | ||
|
|
5dcdb81843 | ||
|
|
7f85935e43 | ||
|
|
2ab820d511 | ||
|
|
db19668453 | ||
|
|
0f0ad029a6 | ||
|
|
062a98839c | ||
|
|
c38f21b76b | ||
|
|
e34a0a6e33 | ||
|
|
f3c3889531 | ||
|
|
7de22013a4 | ||
|
|
711d88765b | ||
|
|
e9a7421be6 | ||
|
|
83fe490dbb | ||
|
|
20c92c668b | ||
|
|
5d0f1c9cce | ||
|
|
20317448a1 | ||
|
|
b8a3d00343 | ||
|
|
b459f74a8c | ||
|
|
96a966b9ea | ||
|
|
1af42617c2 | ||
|
|
100dd38c33 | ||
|
|
2bf4950f4f | ||
|
|
e8a98945f5 | ||
|
|
6c2e493576 | ||
|
|
f4fb0a1c79 | ||
|
|
3a9b68fd8d | ||
|
|
c9f0481efc | ||
|
|
93724218b3 | ||
|
|
74b97e6518 | ||
|
|
f096bdc5d8 | ||
|
|
0c64596a17 | ||
|
|
267be8e904 | ||
|
|
841a8ed1a5 | ||
|
|
c55daae734 | ||
|
|
9762fb1912 | ||
|
|
4047d11b2f | ||
|
|
d4215eb452 | ||
|
|
17014c2819 | ||
|
|
4d24803b72 | ||
|
|
6b30465ef2 | ||
|
|
eac9ce597b | ||
|
|
5c8c18fbe6 | ||
|
|
6f35a1a850 | ||
|
|
917701e2f6 | ||
|
|
4d4e87aa93 | ||
|
|
e3bbfc6b19 | ||
|
|
0d26ac9858 | ||
|
|
4069264ad8 | ||
|
|
120e01897d | ||
|
|
06755cb6b6 | ||
|
|
27409f4fd5 | ||
|
|
82253509d0 | ||
|
|
c450685ddd | ||
|
|
9a79088e8a | ||
|
|
83760157ad | ||
|
|
985aa2225e | ||
|
|
0ad340d971 | ||
|
|
97726dce12 | ||
|
|
342320b481 | ||
|
|
1bfcbf49b9 | ||
|
|
9d1eb8182b | ||
|
|
18a6c57f02 | ||
|
|
d3b6d1a97f | ||
|
|
ece881c02c | ||
|
|
631e8ce52d | ||
|
|
cb5d3b9750 | ||
|
|
995e6664f9 | ||
|
|
1f497aa4df | ||
|
|
184dbc5516 | ||
|
|
a1f25a4e3e | ||
|
|
cc4ab94428 | ||
|
|
48727740c4 | ||
|
|
cc26e378e5 | ||
|
|
28579258b3 | ||
|
|
70b9b67f67 | ||
|
|
224b053eb1 | ||
|
|
39bce978bc | ||
|
|
9435bd5493 | ||
|
|
2284b3ef0a | ||
|
|
99dc64e8bb | ||
|
|
e47525b60b | ||
|
|
10d4782ee2 | ||
|
|
11cff2c065 | ||
|
|
11f742b020 | ||
|
|
2353552e11 | ||
|
|
d6012d8639 | ||
|
|
7089ee778a | ||
|
|
629931782e | ||
|
|
1b48c626f4 | ||
|
|
ba35f51459 | ||
|
|
ee5f3fc68d | ||
|
|
7be671f0f7 | ||
|
|
48c3748c28 | ||
|
|
3814a261d6 | ||
|
|
81b82ce06b | ||
|
|
20c3f76278 | ||
|
|
2c93b69144 | ||
|
|
043b381733 | ||
|
|
ff014df231 | ||
|
|
055d1e81da | ||
|
|
b60678e79f | ||
|
|
fd93dfbc18 | ||
|
|
d74a5d73f0 | ||
|
|
16a6d395c8 | ||
|
|
d6654807fa | ||
|
|
e25ff221ba | ||
|
|
7df965ccd7 | ||
|
|
f6b73b8303 | ||
|
|
d617214c62 | ||
|
|
e85744cec0 | ||
|
|
da74555e02 | ||
|
|
4a9f489f20 | ||
|
|
18b17bda7c | ||
|
|
7faff824ff | ||
|
|
e08d03687e | ||
|
|
75f1d80a86 | ||
|
|
3967bfa099 | ||
|
|
6f6f463592 | ||
|
|
8a760823b8 | ||
|
|
8cd66af3f8 | ||
|
|
8569dbf985 | ||
|
|
9a03a70a3d | ||
|
|
42badbb08e | ||
|
|
956bdf0e03 | ||
|
|
91ff02d5c3 | ||
|
|
84d88cf2b9 | ||
|
|
fca2693730 | ||
|
|
d7ac1b9659 | ||
|
|
ddb1a8ff51 | ||
|
|
56a2f8858b | ||
|
|
55d7a1def0 | ||
|
|
7b354f364c | ||
|
|
12dd40d330 | ||
|
|
205f09a633 | ||
|
|
f7dcccd8af | ||
|
|
75592023f2 | ||
|
|
a794a61c9b | ||
|
|
804da115c9 | ||
|
|
89df4f771b | ||
|
|
db3c5cfcb8 | ||
|
|
19d191a472 | ||
|
|
d906fec236 | ||
|
|
552482275d | ||
|
|
f06d40cf95 | ||
|
|
cf3f1a1705 | ||
|
|
08583c06ef | ||
|
|
5271a5c984 | ||
|
|
e69610643b | ||
|
|
ef61e4fe6d | ||
|
|
4f776e1370 | ||
|
|
aa72708996 | ||
|
|
8751180634 | ||
|
|
2e327be49d | ||
|
|
f06a937c9c | ||
|
|
e65b3200cd | ||
|
|
30d3701ab1 | ||
|
|
05fa76dad3 | ||
|
|
4020081492 | ||
|
|
2fbd4a62b2 | ||
|
|
b773f5e821 | ||
|
|
76c5ced1dd | ||
|
|
197768875b | ||
|
|
f0483862a5 | ||
|
|
ac46d3a5a2 | ||
|
|
2da576a1f8 | ||
|
|
2e1ac27cf5 | ||
|
|
258404affc | ||
|
|
5121d9d1d7 | ||
|
|
f2a38c5ddd | ||
|
|
97a77b1a33 | ||
|
|
88ca41231f | ||
|
|
9a8f84ccb5 | ||
|
|
dd50fc37fe | ||
|
|
cafcadb286 | ||
|
|
db3d6bba79 | ||
|
|
11a0fc2a22 | ||
|
|
1e0a8a5034 | ||
|
|
34ef3e5998 | ||
|
|
e73fcc450d | ||
|
|
2946eaa156 | ||
|
|
6dcae9a7d7 | ||
|
|
abeb36f06c | ||
|
|
41139578ba | ||
|
|
cda7621b5d | ||
|
|
b75dd2d424 | ||
|
|
273f208722 | ||
|
|
c01e8e892e | ||
|
|
9dfd81c28f | ||
|
|
5dd26ebe33 | ||
|
|
4c0fe3c14f | ||
|
|
2353581da8 | ||
|
|
2934b23d2f | ||
|
|
82e4197237 | ||
|
|
a23189f132 | ||
|
|
47fa1ec81e | ||
|
|
4b468663f3 | ||
|
|
6628dc777d | ||
|
|
3ef3ae0166 | ||
|
|
bc6dbe2771 | ||
|
|
5651160d1c | ||
|
|
6da2669c6f | ||
|
|
8094b5097f | ||
|
|
bdb0547b86 | ||
|
|
ea08fbbfba | ||
|
|
b4cbd8b2b5 | ||
|
|
f8fbb6b7d3 | ||
|
|
c8da9fec0a | ||
|
|
79fb3ec8bd | ||
|
|
2243966ce1 | ||
|
|
ca7d520997 | ||
|
|
df44487363 | ||
|
|
b39eb0f60d | ||
|
|
a3dcdc4fd5 | ||
|
|
2daac73c17 | ||
|
|
23eb3c3094 | ||
|
|
776d0f9e4a | ||
|
|
54bdcc6dd2 | ||
|
|
38084c8199 | ||
|
|
4525ee7491 | ||
|
|
66a476bd21 | ||
|
|
be6cc12632 | ||
|
|
673475dcb2 | ||
|
|
7dc1a80a83 | ||
|
|
d49294849f | ||
|
|
6b394302c1 | ||
|
|
00e1601f85 | ||
|
|
b75e746586 | ||
|
|
32a9fa9bb0 | ||
|
|
79d68dece4 | ||
|
|
1701e1d4ba | ||
|
|
497b3eb296 | ||
|
|
ecfafa0fea | ||
|
|
def66d8218 | ||
|
|
eeb08abec2 | ||
|
|
90923c657d | ||
|
|
4ff6eeb424 | ||
|
|
2d98fb40f1 | ||
|
|
256a58ded2 | ||
|
|
bf3b31a9ef | ||
|
|
7fc8d59605 | ||
|
|
1a39b2113a | ||
|
|
cb9f3fbb2c | ||
|
|
487f413cdd | ||
|
|
f847969206 | ||
|
|
5d9aad44c2 | ||
|
|
ba2027e6d7 | ||
|
|
087da9376f | ||
|
|
218e3b46e0 | ||
|
|
f9921e354e | ||
|
|
341148a7d3 | ||
|
|
7216165f1e | ||
|
|
a9557af04b | ||
|
|
abb80270ad | ||
|
|
72e93384a5 | ||
|
|
663b1b76ec | ||
|
|
24b8c671fa | ||
|
|
986fec1cd3 | ||
|
|
f6c2cbc5cf | ||
|
|
289ed89a78 | ||
|
|
73de421d47 | ||
|
|
dc1eb82295 | ||
|
|
6629c12166 | ||
|
|
ec5bc1db95 | ||
|
|
ac2c40c842 | ||
|
|
61bf669252 | ||
|
|
4105c53155 | ||
|
|
aeab2b2a08 | ||
|
|
95e33ee612 | ||
|
|
093bda7039 | ||
|
|
4e35b19ac5 | ||
|
|
244d8a51e8 | ||
|
|
9d6cc77cc8 | ||
|
|
d5e0150880 | ||
|
|
5cf29a98b3 | ||
|
|
165c2262c0 | ||
|
|
74f5d2e0cd | ||
|
|
2d93456f52 | ||
|
|
fd401ca335 | ||
|
|
97ba93a9ad | ||
|
|
0788c25710 | ||
|
|
82bba951db | ||
|
|
6efd611b80 | ||
|
|
b7d43b42b9 | ||
|
|
801b71d9ae | ||
|
|
0ff7c2188a | ||
|
|
bc1667440f | ||
|
|
227b464a8e | ||
|
|
f6c43650b4 | ||
|
|
597689fde0 | ||
|
|
da6b71fde8 | ||
|
|
5f2590c858 | ||
|
|
37b0867151 | ||
|
|
85031cfb9d | ||
|
|
a13f86fb7c | ||
|
|
7cbc5e642f | ||
|
|
48d4abc259 | ||
|
|
c805f3b1a7 | ||
|
|
40212582a9 | ||
|
|
2c875928de | ||
|
|
c24eb9778e | ||
|
|
eb079b1360 | ||
|
|
120a519303 | ||
|
|
95def95678 | ||
|
|
8274a00f91 | ||
|
|
3c6c4976cd | ||
|
|
0fd35b1679 | ||
|
|
3c931604be | ||
|
|
02dddbd662 | ||
|
|
675763d039 | ||
|
|
63acc7ef32 | ||
|
|
cbd78bdfef | ||
|
|
a09a2ed6c3 | ||
|
|
7d18a6d8a9 | ||
|
|
65a5984d4c | ||
|
|
d5f519bf5a | ||
|
|
bede39c8f3 | ||
|
|
a9e3682776 | ||
|
|
87c3c8732f | ||
|
|
0011bfea8c | ||
|
|
e047649c3b | ||
|
|
de397b63c5 | ||
|
|
75b7726fca | ||
|
|
d83a2366c2 | ||
|
|
2d4d653c55 | ||
|
|
c7a1d55f6f | ||
|
|
46b5c5ccd1 | ||
|
|
4607417e7a | ||
|
|
76887c7e25 | ||
|
|
ab7cae5816 | ||
|
|
b1ce389ad8 | ||
|
|
52aa5ff780 | ||
|
|
30372e511e | ||
|
|
dc15a6282a | ||
|
|
97dcc204ef | ||
|
|
ecda3e0174 | ||
|
|
8342bb2bc8 | ||
|
|
51a137c4e5 | ||
|
|
a26ced5de9 | ||
|
|
85f0c69c03 | ||
|
|
3aac757ef5 | ||
|
|
91541d0ba4 | ||
|
|
dfd66a56c3 | ||
|
|
60f9393d29 | ||
|
|
cdced7cdc1 | ||
|
|
69709009ed | ||
|
|
bf14560709 | ||
|
|
775b629ee9 | ||
|
|
ec9717dafb | ||
|
|
0cd84ee250 | ||
|
|
b3681e7c39 | ||
|
|
a7a7597d9a | ||
|
|
bed3da81e1 | ||
|
|
c43dc31a55 | ||
|
|
c5a21922d1 | ||
|
|
2ae660a46b | ||
|
|
f6fcae4489 | ||
|
|
e0a3b8ace8 | ||
|
|
b67231c56b | ||
|
|
aa5b3dc426 | ||
|
|
1a528adfbb | ||
|
|
64d295ee3f | ||
|
|
b940ade902 | ||
|
|
37a906a233 | ||
|
|
e76603ce7e | ||
|
|
aca9aa0a7a | ||
|
|
6d20ef5d51 | ||
|
|
4d18ab1ae0 | ||
|
|
fa5c707fbc | ||
|
|
37b9d8ec10 | ||
|
|
61db0269a2 | ||
|
|
a8ad13f60e | ||
|
|
f14dd04ea7 | ||
|
|
0add8cd5a3 | ||
|
|
16cc539a57 | ||
|
|
5ba25a34cb | ||
|
|
61de65fc21 | ||
|
|
5195539a95 | ||
|
|
ce93fb0e4c | ||
|
|
3cb58ed700 | ||
|
|
bb48c960fe | ||
|
|
286a0031dd | ||
|
|
dcbd7e1113 | ||
|
|
0a43454c8a | ||
|
|
f5f1491e47 | ||
|
|
e935ae567f | ||
|
|
3f08f099fe | ||
|
|
18a5ba0029 | ||
|
|
c426d0328f | ||
|
|
91b2456c15 | ||
|
|
585aa74e0c | ||
|
|
eefaec5abd | ||
|
|
c7a5eebff6 | ||
|
|
f077528936 | ||
|
|
39728974b1 | ||
|
|
e14585895b | ||
|
|
0999042718 | ||
|
|
4e2e669533 | ||
|
|
de266ae6a8 | ||
|
|
d7cd87a6e4 | ||
|
|
c5aabbadc2 | ||
|
|
36a5e3c2ab | ||
|
|
f475261b9a | ||
|
|
62dce8f92a | ||
|
|
e6d90d2154 | ||
|
|
b5d823ec1a | ||
|
|
a786c68e8b | ||
|
|
e6fa00c4d8 | ||
|
|
5721fac793 | ||
|
|
674ed4384a | ||
|
|
1d7d83f8c6 | ||
|
|
f812cc2729 | ||
|
|
abc50a5e84 | ||
|
|
e94cae3044 | ||
|
|
0b35a35576 | ||
|
|
0253c63db3 | ||
|
|
0d3b2bc814 | ||
|
|
65307e5bc7 | ||
|
|
90de47d708 | ||
|
|
72a4179c03 | ||
|
|
4eee195d21 | ||
|
|
9488711406 | ||
|
|
4cf04aca72 | ||
|
|
410d6762bf | ||
|
|
0a95426e63 | ||
|
|
4ec4970d49 | ||
|
|
e57ae0a8ce | ||
|
|
1e7852369f | ||
|
|
bdefd0bcd8 | ||
|
|
fa88e1789c | ||
|
|
df90094cae | ||
|
|
efdbc18a0a | ||
|
|
fc1dd3ce09 | ||
|
|
10aff53d2c | ||
|
|
85c3d64c04 | ||
|
|
5a71ab53be | ||
|
|
d022b40732 | ||
|
|
e105c0aad1 | ||
|
|
eb9655125c | ||
|
|
a10fea2823 | ||
|
|
0c05d89d3f | ||
|
|
d600d4cc28 | ||
|
|
4f0e5317ed | ||
|
|
fce7c7fd49 | ||
|
|
929ca767ca | ||
|
|
342c1bc6fa | ||
|
|
317e301067 | ||
|
|
ac5741a341 | ||
|
|
3f1fb7092b | ||
|
|
7298a8b8f0 | ||
|
|
856924c970 | ||
|
|
9a285d280f | ||
|
|
9c3fc56d4a | ||
|
|
aaaee45eeb | ||
|
|
ac2ab21826 | ||
|
|
b22514646e | ||
|
|
c7ab1ddb0c | ||
|
|
7ddc595a1c | ||
|
|
4147800266 | ||
|
|
99e6d54647 | ||
|
|
dac5901c6b | ||
|
|
308928a7d4 | ||
|
|
e6e3d2cd52 | ||
|
|
2e01de7ff8 | ||
|
|
90f94da4e4 | ||
|
|
873db3bf26 | ||
|
|
c795887a35 | ||
|
|
23824bafe8 | ||
|
|
5cca58f2b3 | ||
|
|
d05c9b6133 | ||
|
|
39a84a1ac0 | ||
|
|
b1c851c9d6 | ||
|
|
6280a9365c | ||
|
|
2741dacd64 | ||
|
|
4c2c2390bd | ||
|
|
635b8ce5f0 | ||
|
|
efc13cc456 | ||
|
|
078f319fe1 | ||
|
|
0f0e785871 | ||
|
|
4e4c85121c | ||
|
|
019d6f4cb6 | ||
|
|
725b0342d1 | ||
|
|
c93ccb8111 | ||
|
|
670befdaf6 | ||
|
|
55eefd865f | ||
|
|
43e5d610e3 | ||
|
|
b1245bc5be | ||
|
|
c2feab245e | ||
|
|
cb3753213e | ||
|
|
ec8c7a24af | ||
|
|
2456be2da3 | ||
|
|
8c5d4240f9 | ||
|
|
b1e12d1542 | ||
|
|
a58d7d2ff4 | ||
|
|
5308b8b9ed | ||
|
|
c15dffce8f | ||
|
|
874680462e | ||
|
|
bb42540775 | ||
|
|
b18511c905 | ||
|
|
5c660f4f64 | ||
|
|
f2bae73f77 | ||
|
|
e54d34f269 | ||
|
|
6470cbd2ee | ||
|
|
c06ebcb4a4 | ||
|
|
3eaa72c98c | ||
|
|
694fff060d | ||
|
|
2705062ac3 | ||
|
|
3df055a296 | ||
|
|
802bc15e0c | ||
|
|
ad2de40a9d | ||
|
|
19298570f8 | ||
|
|
1da4d1f1e9 | ||
|
|
fe4e9c18fa | ||
|
|
2c9f84f17f | ||
|
|
0b2e76600b | ||
|
|
873554fc01 | ||
|
|
82e2d013ae | ||
|
|
1eb5e80f1f | ||
|
|
9c0ab5b3cb |
2
.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
# xo_fs_nfs=nfs://ip:/folder
|
||||
# xo_fs_smb=smb://login:pass@domain\\ip\folder
|
||||
@@ -1,9 +1,10 @@
|
||||
module.exports = {
|
||||
extends: ['standard', 'standard-jsx'],
|
||||
extends: ['standard', 'standard-jsx', 'prettier'],
|
||||
globals: {
|
||||
__DEV__: true,
|
||||
$Dict: true,
|
||||
$Diff: true,
|
||||
$ElementType: true,
|
||||
$Exact: true,
|
||||
$Keys: true,
|
||||
$PropertyType: true,
|
||||
@@ -16,12 +17,12 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'comma-dangle': ['error', 'always-multiline'],
|
||||
indent: 'off',
|
||||
'no-var': 'error',
|
||||
'node/no-extraneous-import': 'error',
|
||||
'node/no-extraneous-require': 'error',
|
||||
'prefer-const': 'error',
|
||||
|
||||
// See https://github.com/prettier/eslint-config-prettier/issues/65
|
||||
'react/jsx-indent': 'off',
|
||||
},
|
||||
}
|
||||
|
||||
1
.gitignore
vendored
@@ -30,3 +30,4 @@ pnpm-debug.log
|
||||
pnpm-debug.log.*
|
||||
yarn-error.log
|
||||
yarn-error.log.*
|
||||
.env
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
jsxSingleQuote: true,
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'es5',
|
||||
|
||||
10
.travis.yml
@@ -2,7 +2,6 @@ language: node_js
|
||||
node_js:
|
||||
#- stable # disable for now due to an issue of indirect dep upath with Node 9
|
||||
- 8
|
||||
- 6
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
@@ -10,9 +9,9 @@ sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
|
||||
before_install:
|
||||
- curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
@@ -22,5 +21,4 @@ cache:
|
||||
yarn: true
|
||||
|
||||
script:
|
||||
- yarn run test
|
||||
- yarn run test-integration
|
||||
- yarn run travis-tests
|
||||
|
||||
@@ -30,10 +30,10 @@
|
||||
"lodash": "^4.17.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
@@ -44,6 +44,7 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ const configs = {
|
||||
'@babel/plugin-proposal-pipeline-operator': {
|
||||
proposal: 'minimal',
|
||||
},
|
||||
'@babel/preset-env' (pkg) {
|
||||
'@babel/preset-env'(pkg) {
|
||||
return {
|
||||
debug: !__TEST__,
|
||||
|
||||
@@ -42,11 +42,11 @@ const getConfig = (key, ...args) => {
|
||||
return config === undefined
|
||||
? {}
|
||||
: typeof config === 'function'
|
||||
? config(...args)
|
||||
: config
|
||||
? config(...args)
|
||||
: config
|
||||
}
|
||||
|
||||
module.exports = function (pkg, plugins, presets) {
|
||||
module.exports = function(pkg, plugins, presets) {
|
||||
plugins === undefined && (plugins = {})
|
||||
presets === undefined && (presets = {})
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const { NULL_REF, Xapi } = require('xen-api')
|
||||
|
||||
const pkg = require('./package.json')
|
||||
|
||||
Xapi.prototype.getVmDisks = async function (vm) {
|
||||
Xapi.prototype.getVmDisks = async function(vm) {
|
||||
const disks = { __proto__: null }
|
||||
await Promise.all([
|
||||
...vm.VBDs.map(async vbdRef => {
|
||||
@@ -19,7 +19,7 @@ Xapi.prototype.getVmDisks = async function (vm) {
|
||||
return disks
|
||||
}
|
||||
|
||||
defer(async function main ($defer, args) {
|
||||
defer(async function main($defer, args) {
|
||||
if (args.length === 0 || args.includes('-h') || args.includes('--help')) {
|
||||
const cliName = Object.keys(pkg.bin)[0]
|
||||
return console.error(
|
||||
@@ -83,6 +83,9 @@ ${cliName} v${pkg.version}
|
||||
|
||||
await Promise.all([
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', metadata),
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', {
|
||||
'xo:backup:exported': 'true',
|
||||
}),
|
||||
tgtXapi.setField(
|
||||
tgtVm,
|
||||
'name_label',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/cr-seed-cli",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/cr-seed-cli",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
@@ -15,6 +15,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.18.0"
|
||||
"xen-api": "^0.24.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,10 +41,10 @@
|
||||
"moment-timezone": "^0.5.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -6,9 +6,14 @@ import parse from './parse'
|
||||
const MAX_DELAY = 2 ** 31 - 1
|
||||
|
||||
class Job {
|
||||
constructor (schedule, fn) {
|
||||
constructor(schedule, fn) {
|
||||
const wrapper = () => {
|
||||
const result = fn()
|
||||
let result
|
||||
try {
|
||||
result = fn()
|
||||
} catch (_) {
|
||||
// catch any thrown value to ensure it does not break the job
|
||||
}
|
||||
let then
|
||||
if (result != null && typeof (then = result.then) === 'function') {
|
||||
then.call(result, scheduleNext, scheduleNext)
|
||||
@@ -28,32 +33,32 @@ class Job {
|
||||
this._timeout = undefined
|
||||
}
|
||||
|
||||
start () {
|
||||
start() {
|
||||
this.stop()
|
||||
this._scheduleNext()
|
||||
}
|
||||
|
||||
stop () {
|
||||
stop() {
|
||||
clearTimeout(this._timeout)
|
||||
}
|
||||
}
|
||||
|
||||
class Schedule {
|
||||
constructor (pattern, zone = 'utc') {
|
||||
constructor(pattern, zone = 'utc') {
|
||||
this._schedule = parse(pattern)
|
||||
this._createDate =
|
||||
zone.toLowerCase() === 'utc'
|
||||
? moment.utc
|
||||
: zone === 'local'
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
}
|
||||
|
||||
createJob (fn) {
|
||||
createJob(fn) {
|
||||
return new Job(this, fn)
|
||||
}
|
||||
|
||||
next (n) {
|
||||
next(n) {
|
||||
const dates = new Array(n)
|
||||
const schedule = this._schedule
|
||||
let date = this._createDate()
|
||||
@@ -63,12 +68,12 @@ class Schedule {
|
||||
return dates
|
||||
}
|
||||
|
||||
_nextDelay () {
|
||||
_nextDelay() {
|
||||
const now = this._createDate()
|
||||
return next(this._schedule, now) - now
|
||||
}
|
||||
|
||||
startJob (fn) {
|
||||
startJob(fn) {
|
||||
const job = this.createJob(fn)
|
||||
job.start()
|
||||
return job.stop.bind(job)
|
||||
|
||||
@@ -28,10 +28,10 @@
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
// process.env.http_proxy
|
||||
// ])
|
||||
// ```
|
||||
export default function defined () {
|
||||
export default function defined() {
|
||||
let args = arguments
|
||||
let n = args.length
|
||||
if (n === 1) {
|
||||
|
||||
@@ -28,9 +28,9 @@
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export default function emitAsync (event) {
|
||||
export default function emitAsync(event) {
|
||||
let opts
|
||||
let i = 1
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.3.0",
|
||||
"version": "0.6.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
@@ -20,24 +20,30 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@marsaud/smb2": "^0.9.0",
|
||||
"@marsaud/smb2": "^0.13.0",
|
||||
"@sindresorhus/df": "^2.1.0",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"get-stream": "^4.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"through2": "^2.0.3",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"xo-remote-parser": "^0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/plugin-proposal-function-bind": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/plugin-proposal-decorators": "^7.1.6",
|
||||
"@babel/plugin-proposal-function-bind": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"dotenv": "^6.1.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
77
@xen-orchestra/fs/src/_mount.js
Normal file
@@ -0,0 +1,77 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import LocalHandler from './local'
|
||||
|
||||
const sudoExeca = (command, args, opts) =>
|
||||
execa('sudo', [command, ...args], opts)
|
||||
|
||||
export default class MountHandler extends LocalHandler {
|
||||
constructor(
|
||||
remote,
|
||||
{
|
||||
mountsDir = join(tmpdir(), 'xo-fs-mounts'),
|
||||
useSudo = false,
|
||||
...opts
|
||||
} = {},
|
||||
params
|
||||
) {
|
||||
super(remote, opts)
|
||||
|
||||
this._execa = useSudo ? sudoExeca : execa
|
||||
this._params = params
|
||||
this._realPath = join(
|
||||
mountsDir,
|
||||
remote.id ||
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.slice(2)
|
||||
)
|
||||
}
|
||||
|
||||
async _forget() {
|
||||
await this._execa('umount', ['--force', this._getRealPath()], {
|
||||
env: {
|
||||
LANG: 'C',
|
||||
},
|
||||
}).catch(error => {
|
||||
if (
|
||||
error == null ||
|
||||
typeof error.stderr !== 'string' ||
|
||||
!error.stderr.includes('not mounted')
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
_getRealPath() {
|
||||
return this._realPath
|
||||
}
|
||||
|
||||
async _sync() {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
const { type, device, options, env } = this._params
|
||||
return this._execa(
|
||||
'mount',
|
||||
['-t', type, device, this._getRealPath(), '-o', options],
|
||||
{
|
||||
env: {
|
||||
LANG: 'C',
|
||||
...env,
|
||||
},
|
||||
}
|
||||
).catch(error => {
|
||||
let stderr
|
||||
if (
|
||||
error == null ||
|
||||
typeof (stderr = error.stderr) !== 'string' ||
|
||||
!(stderr.includes('already mounted') || stderr.includes('busy'))
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
9
@xen-orchestra/fs/src/_normalizePath.js
Normal file
@@ -0,0 +1,9 @@
|
||||
import path from 'path'
|
||||
|
||||
const { resolve } = path.posix
|
||||
|
||||
// normalize the path:
|
||||
// - does not contains `.` or `..` (cannot escape root dir)
|
||||
// - always starts with `/`
|
||||
const normalizePath = path => resolve('/', path)
|
||||
export { normalizePath as default }
|
||||
@@ -1,192 +1,171 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import getStream from 'get-stream'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { fromCallback, fromEvent, ignoreErrors } from 'promise-toolbox'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import path from 'path'
|
||||
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
|
||||
import normalizePath from './_normalizePath'
|
||||
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
|
||||
|
||||
const { dirname } = path.posix
|
||||
|
||||
type Data = Buffer | Readable | string
|
||||
type FileDescriptor = {| fd: mixed, path: string |}
|
||||
type LaxReadable = Readable & Object
|
||||
type LaxWritable = Writable & Object
|
||||
type RemoteInfo = { used?: number, size?: number }
|
||||
|
||||
type File = FileDescriptor | string
|
||||
|
||||
const checksumFile = file => file + '.checksum'
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
_remote: Object
|
||||
constructor (remote: any) {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
const DEFAULT_TIMEOUT = 6e5 // 10 min
|
||||
|
||||
const ignoreEnoent = error => {
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
class PrefixWrapper {
|
||||
constructor(remote, prefix) {
|
||||
this._prefix = prefix
|
||||
this._remote = remote
|
||||
}
|
||||
|
||||
get type (): string {
|
||||
throw new Error('Not implemented')
|
||||
get type() {
|
||||
return this._remote.type
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks the handler to sync the state of the effective remote with its' metadata
|
||||
*/
|
||||
async sync (): Promise<mixed> {
|
||||
return this._sync()
|
||||
}
|
||||
|
||||
async _sync (): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
|
||||
*/
|
||||
async forget (): Promise<void> {
|
||||
await this._forget()
|
||||
}
|
||||
|
||||
async _forget (): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async test (): Promise<Object> {
|
||||
const testFileName = `${Date.now()}.test`
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
await this.outputFile(testFileName, data)
|
||||
step = 'read'
|
||||
const read = await this.readFile(testFileName)
|
||||
if (data.compare(read) !== 0) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error),
|
||||
}
|
||||
} finally {
|
||||
ignoreErrors.call(this.unlink(testFileName))
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
return this._outputFile(file, data, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
const promise = fromEvent(stream, 'finish')
|
||||
stream.end(data)
|
||||
await promise
|
||||
}
|
||||
|
||||
async read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(file, buffer, position)
|
||||
}
|
||||
|
||||
_read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
_readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this.createReadStream(file, options).then(getStream.buffer)
|
||||
}
|
||||
|
||||
async rename (
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
let p = this._rename(oldPath, newPath)
|
||||
if (checksum) {
|
||||
p = Promise.all([
|
||||
p,
|
||||
this._rename(checksumFile(oldPath), checksumFile(newPath)),
|
||||
])
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
async _rename (oldPath: string, newPath: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async list (
|
||||
dir: string = '.',
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
let entries = await this._list(dir)
|
||||
if (filter !== undefined) {
|
||||
entries = entries.filter(filter)
|
||||
}
|
||||
|
||||
if (prependDir) {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = dir + '/' + entry
|
||||
// necessary to remove the prefix from the path with `prependDir` option
|
||||
async list(dir, opts) {
|
||||
const entries = await this._remote.list(this._resolve(dir), opts)
|
||||
if (opts != null && opts.prependDir) {
|
||||
const n = this._prefix.length
|
||||
entries.forEach((entry, i, entries) => {
|
||||
entries[i] = entry.slice(n)
|
||||
})
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
async _list (dir: string): Promise<string[]> {
|
||||
rename(oldPath, newPath) {
|
||||
return this._remote.rename(this._resolve(oldPath), this._resolve(newPath))
|
||||
}
|
||||
|
||||
_resolve(path) {
|
||||
return this._prefix + normalizePath(path)
|
||||
}
|
||||
}
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
_remote: Object
|
||||
_timeout: number
|
||||
|
||||
constructor(remote: any, options: Object = {}) {
|
||||
if (remote.url === 'test://') {
|
||||
this._remote = remote
|
||||
} else {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
}
|
||||
;({ timeout: this._timeout = DEFAULT_TIMEOUT } = options)
|
||||
}
|
||||
|
||||
// Public members
|
||||
|
||||
get type(): string {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
createReadStream (
|
||||
file: string,
|
||||
addPrefix(prefix: string) {
|
||||
prefix = normalizePath(prefix)
|
||||
return prefix === '/' ? this : new PrefixWrapper(this, prefix)
|
||||
}
|
||||
|
||||
async closeFile(fd: FileDescriptor): Promise<void> {
|
||||
await timeout.call(this._closeFile(fd.fd), this._timeout)
|
||||
}
|
||||
|
||||
async createOutputStream(
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = timeout.call(
|
||||
this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}),
|
||||
this._timeout
|
||||
)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value =>
|
||||
this._outputFile(checksumFile(path), value, { flags: 'wx' })
|
||||
)
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
createReadStream(
|
||||
file: File,
|
||||
{ checksum = false, ignoreMissingChecksum = false, ...options }: Object = {}
|
||||
): Promise<LaxReadable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createReadStream(file, options).then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
const streamP = timeout
|
||||
.call(this._createReadStream(file, options), this._timeout)
|
||||
.then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this.getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this._getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
@@ -195,7 +174,7 @@ export default class RemoteHandlerAbstract {
|
||||
// avoid a unhandled rejection warning
|
||||
ignoreErrors.call(streamP)
|
||||
|
||||
return this.readFile(checksumFile(path)).then(
|
||||
return this._readFile(checksumFile(path), { flags: 'r' }).then(
|
||||
checksum =>
|
||||
streamP.then(stream => {
|
||||
const { length } = stream
|
||||
@@ -216,92 +195,380 @@ export default class RemoteHandlerAbstract {
|
||||
)
|
||||
}
|
||||
|
||||
async _createReadStream (
|
||||
createWriteStream(
|
||||
file: File,
|
||||
options: { end?: number, flags?: string, start?: number } = {}
|
||||
): Promise<LaxWritable> {
|
||||
return timeout.call(
|
||||
this._createWriteStream(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
{
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Free the resources possibly dedicated to put the remote at work, when it
|
||||
// is no more needed
|
||||
//
|
||||
// FIXME: Some handlers are implemented based on system-wide mecanisms (such
|
||||
// as mount), forgetting them might breaking other processes using the same
|
||||
// remote.
|
||||
async forget(): Promise<void> {
|
||||
await this._forget()
|
||||
}
|
||||
|
||||
async getInfo(): Promise<RemoteInfo> {
|
||||
return timeout.call(this._getInfo(), this._timeout)
|
||||
}
|
||||
|
||||
async getSize(file: File): Promise<number> {
|
||||
return timeout.call(
|
||||
this._getSize(typeof file === 'string' ? normalizePath(file) : file),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async list(
|
||||
dir: string,
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
const virtualDir = normalizePath(dir)
|
||||
dir = normalizePath(dir)
|
||||
|
||||
let entries = await timeout.call(this._list(dir), this._timeout)
|
||||
if (filter !== undefined) {
|
||||
entries = entries.filter(filter)
|
||||
}
|
||||
|
||||
if (prependDir) {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = virtualDir + '/' + entry
|
||||
})
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
async mkdir(dir: string): Promise<void> {
|
||||
dir = normalizePath(dir)
|
||||
try {
|
||||
await this._mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error == null || error.code !== 'EEXIST') {
|
||||
throw error
|
||||
}
|
||||
|
||||
// this operation will throw if it's not already a directory
|
||||
await this._list(dir)
|
||||
}
|
||||
}
|
||||
|
||||
async mktree(dir: string): Promise<void> {
|
||||
await this._mktree(normalizePath(dir))
|
||||
}
|
||||
|
||||
async openFile(path: string, flags: string): Promise<FileDescriptor> {
|
||||
path = normalizePath(path)
|
||||
|
||||
return {
|
||||
fd: await timeout.call(this._openFile(path, flags), this._timeout),
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile(
|
||||
file: string,
|
||||
options?: Object
|
||||
): Promise<LaxReadable> {
|
||||
throw new Error('Not implemented')
|
||||
data: Data,
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._outputFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
async openFile (path: string, flags?: string): Promise<FileDescriptor> {
|
||||
return { fd: await this._openFile(path, flags), path }
|
||||
async read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
buffer,
|
||||
position
|
||||
)
|
||||
}
|
||||
|
||||
async _openFile (path: string, flags?: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
async readFile(
|
||||
file: string,
|
||||
{ flags = 'r' }: { flags?: string } = {}
|
||||
): Promise<Buffer> {
|
||||
return this._readFile(normalizePath(file), { flags })
|
||||
}
|
||||
|
||||
async closeFile (fd: FileDescriptor): Promise<void> {
|
||||
await this._closeFile(fd.fd)
|
||||
}
|
||||
async refreshChecksum(path: string): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
|
||||
async _closeFile (fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async refreshChecksum (path: string): Promise<void> {
|
||||
const stream = (await this.createReadStream(path)).pipe(
|
||||
const stream = (await this._createReadStream(path, { flags: 'r' })).pipe(
|
||||
createChecksumStream()
|
||||
)
|
||||
stream.resume() // start reading the whole file
|
||||
await this.outputFile(checksumFile(path), await stream.checksum)
|
||||
}
|
||||
|
||||
async createOutputStream (
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createOutputStream(file, {
|
||||
await this._outputFile(checksumFile(path), await stream.checksum, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value => this.outputFile(checksumFile(path), value))
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
async _createOutputStream (
|
||||
file: mixed,
|
||||
options?: Object
|
||||
): Promise<LaxWritable> {
|
||||
throw new Error('Not implemented')
|
||||
async rename(
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
oldPath = normalizePath(oldPath)
|
||||
newPath = normalizePath(newPath)
|
||||
|
||||
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
|
||||
if (checksum) {
|
||||
p = Promise.all([
|
||||
p,
|
||||
this._rename(checksumFile(oldPath), checksumFile(newPath)),
|
||||
])
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
async unlink (file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
async rmdir(dir: string): Promise<void> {
|
||||
await timeout.call(
|
||||
this._rmdir(normalizePath(dir)).catch(ignoreEnoent),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async rmtree(dir: string): Promise<void> {
|
||||
await this._rmtree(normalizePath(dir))
|
||||
}
|
||||
|
||||
// Asks the handler to sync the state of the effective remote with its'
|
||||
// metadata
|
||||
//
|
||||
// This method MUST ALWAYS be called before using the handler.
|
||||
async sync(): Promise<void> {
|
||||
await this._sync()
|
||||
}
|
||||
|
||||
async test(): Promise<Object> {
|
||||
const testFileName = normalizePath(`${Date.now()}.test`)
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
await this._outputFile(testFileName, data, { flags: 'wx' })
|
||||
step = 'read'
|
||||
const read = await this._readFile(testFileName, { flags: 'r' })
|
||||
if (!data.equals(read)) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error),
|
||||
}
|
||||
} finally {
|
||||
ignoreErrors.call(this._unlink(testFileName))
|
||||
}
|
||||
}
|
||||
|
||||
async unlink(file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
file = normalizePath(file)
|
||||
|
||||
if (checksum) {
|
||||
ignoreErrors.call(this._unlink(checksumFile(file)))
|
||||
}
|
||||
|
||||
await this._unlink(file)
|
||||
await this._unlink(file).catch(ignoreEnoent)
|
||||
}
|
||||
|
||||
async _unlink (file: mixed): Promise<void> {
|
||||
async writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._writeFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
// Methods that can be implemented by inheriting classes
|
||||
|
||||
async _closeFile(fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getSize (file: mixed): Promise<number> {
|
||||
return this._getSize(file)
|
||||
async _createOutputStream(file: File, options: Object): Promise<LaxWritable> {
|
||||
try {
|
||||
return await this._createWriteStream(file, options)
|
||||
} catch (error) {
|
||||
if (typeof file !== 'string' || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file))
|
||||
return this._createOutputStream(file, options)
|
||||
}
|
||||
|
||||
async _getSize (file: mixed): Promise<number> {
|
||||
async _createReadStream(file: File, options?: Object): Promise<LaxReadable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _createWriteStream(file: File, options: Object): Promise<LaxWritable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
// called to finalize the remote
|
||||
async _forget(): Promise<void> {}
|
||||
|
||||
async _getInfo(): Promise<Object> {
|
||||
return {}
|
||||
}
|
||||
|
||||
async _getSize(file: File): Promise<number> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _list(dir: string): Promise<string[]> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _mkdir(dir: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _mktree(dir: string): Promise<void> {
|
||||
try {
|
||||
return await this.mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(dir))
|
||||
return this._mktree(dir)
|
||||
}
|
||||
|
||||
async _openFile(path: string, flags: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _outputFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
try {
|
||||
return await this._writeFile(file, data, options)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file))
|
||||
return this._outputFile(file, data, options)
|
||||
}
|
||||
|
||||
_read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
_readFile(file: string, options?: Object): Promise<Buffer> {
|
||||
return this._createReadStream(file, options).then(getStream.buffer)
|
||||
}
|
||||
|
||||
async _rename(oldPath: string, newPath: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _rmdir(dir: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _rmtree(dir: string) {
|
||||
try {
|
||||
return await this._rmdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOTEMPTY') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const files = await this._list(dir)
|
||||
await asyncMap(files, file =>
|
||||
this._unlink(`${dir}/${file}`).catch(error => {
|
||||
if (error.code === 'EISDIR') {
|
||||
return this._rmtree(`${dir}/${file}`)
|
||||
}
|
||||
throw error
|
||||
})
|
||||
)
|
||||
return this._rmtree(dir)
|
||||
}
|
||||
|
||||
// called to initialize the remote
|
||||
async _sync(): Promise<void> {}
|
||||
|
||||
async _unlink(file: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
function createPrefixWrapperMethods() {
|
||||
const pPw = PrefixWrapper.prototype
|
||||
const pRha = RemoteHandlerAbstract.prototype
|
||||
|
||||
const {
|
||||
defineProperty,
|
||||
getOwnPropertyDescriptor,
|
||||
prototype: { hasOwnProperty },
|
||||
} = Object
|
||||
|
||||
Object.getOwnPropertyNames(pRha).forEach(name => {
|
||||
let descriptor, value
|
||||
if (
|
||||
hasOwnProperty.call(pPw, name) ||
|
||||
name[0] === '_' ||
|
||||
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name))
|
||||
.value) !== 'function'
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
descriptor.value = function() {
|
||||
let path
|
||||
if (arguments.length !== 0 && typeof (path = arguments[0]) === 'string') {
|
||||
arguments[0] = this._resolve(path)
|
||||
}
|
||||
return value.apply(this._remote, arguments)
|
||||
}
|
||||
|
||||
defineProperty(pPw, name, descriptor)
|
||||
})
|
||||
}
|
||||
createPrefixWrapperMethods()
|
||||
|
||||
125
@xen-orchestra/fs/src/abstract.spec.js
Normal file
@@ -0,0 +1,125 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { TimeoutError } from 'promise-toolbox'
|
||||
|
||||
import AbstractHandler from './abstract'
|
||||
|
||||
const TIMEOUT = 10e3
|
||||
|
||||
class TestHandler extends AbstractHandler {
|
||||
constructor(impl) {
|
||||
super({ url: 'test://' }, { timeout: TIMEOUT })
|
||||
|
||||
Object.keys(impl).forEach(method => {
|
||||
this[`_${method}`] = impl[method]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
describe('closeFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
closeFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.closeFile({ fd: undefined, path: '' })
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createOutputStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createOutputStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createOutputStream('File')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createReadStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createReadStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createReadStream('file')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getInfo()', () => {
|
||||
it('throws in case of timeout', async () => {
|
||||
const testHandler = new TestHandler({
|
||||
getInfo: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.getInfo()
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSize()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
getSize: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.getSize('')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('list()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
list: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.list('.')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('openFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
openFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.openFile('path')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('rename()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
rename: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.rename('oldPath', 'newPath')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('rmdir()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
rmdir: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.rmdir('dir')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import through2 from 'through2'
|
||||
import { createHash } from 'crypto'
|
||||
import { defer, fromEvent } from 'promise-toolbox'
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
import { pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import { getHandler } from '.'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
beforeEach(async () => {
|
||||
const dir = await pFromCallback(cb => tmp.dir(cb))
|
||||
process.chdir(dir)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const tmpDir = process.cwd()
|
||||
process.chdir(initialDir)
|
||||
await pFromCallback(cb => rimraf(tmpDir, cb))
|
||||
})
|
||||
|
||||
test("fs test doesn't crash", async () => {
|
||||
const handler = getHandler({ url: 'file://' + process.cwd() })
|
||||
const result = await handler.test()
|
||||
expect(result.success).toBeTruthy()
|
||||
})
|
||||
312
@xen-orchestra/fs/src/fs.spec.js
Normal file
@@ -0,0 +1,312 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import 'dotenv/config'
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import getStream from 'get-stream'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
import { random } from 'lodash'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import { getHandler } from '.'
|
||||
|
||||
// https://gist.github.com/julien-f/3228c3f34fdac01ade09
|
||||
const unsecureRandomBytes = n => {
|
||||
const bytes = Buffer.alloc(n)
|
||||
|
||||
const odd = n & 1
|
||||
for (let i = 0, m = n - odd; i < m; i += 2) {
|
||||
bytes.writeUInt16BE((Math.random() * 65536) | 0, i)
|
||||
}
|
||||
|
||||
if (odd) {
|
||||
bytes.writeUInt8((Math.random() * 256) | 0, n - 1)
|
||||
}
|
||||
|
||||
return bytes
|
||||
}
|
||||
|
||||
const TEST_DATA_LEN = 1024
|
||||
const TEST_DATA = unsecureRandomBytes(TEST_DATA_LEN)
|
||||
const createTestDataStream = asyncIteratorToStream(function*() {
|
||||
yield TEST_DATA
|
||||
})
|
||||
|
||||
const rejectionOf = p =>
|
||||
p.then(
|
||||
value => {
|
||||
throw value
|
||||
},
|
||||
reason => reason
|
||||
)
|
||||
|
||||
const handlers = [`file://${tmpdir()}`]
|
||||
if (process.env.xo_fs_nfs) handlers.push(process.env.xo_fs_nfs)
|
||||
if (process.env.xo_fs_smb) handlers.push(process.env.xo_fs_smb)
|
||||
|
||||
handlers.forEach(url => {
|
||||
describe(url, () => {
|
||||
let handler
|
||||
|
||||
const testWithFileDescriptor = (path, flags, fn) => {
|
||||
it('with path', () => fn({ file: path, flags }))
|
||||
it('with file descriptor', async () => {
|
||||
const file = await handler.openFile(path, flags)
|
||||
try {
|
||||
await fn({ file })
|
||||
} finally {
|
||||
await handler.closeFile(file)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
handler = getHandler({ url }).addPrefix(`xo-fs-tests-${Date.now()}`)
|
||||
await handler.sync()
|
||||
})
|
||||
afterAll(async () => {
|
||||
await handler.forget()
|
||||
handler = undefined
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
// ensure test dir exists
|
||||
await handler.mkdir('.')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await handler.rmtree('.')
|
||||
})
|
||||
|
||||
describe('#type', () => {
|
||||
it('returns the type of the remote', () => {
|
||||
expect(typeof handler.type).toBe('string')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createOutputStream()', () => {
|
||||
it('creates parent dir if missing', async () => {
|
||||
const stream = await handler.createOutputStream('dir/file')
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('dir/file')).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createReadStream()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file, flags }) => {
|
||||
await expect(
|
||||
await getStream.buffer(
|
||||
await handler.createReadStream(file, { flags })
|
||||
)
|
||||
).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createWriteStream()', () => {
|
||||
testWithFileDescriptor('file', 'wx', async ({ file, flags }) => {
|
||||
const stream = await handler.createWriteStream(file, { flags })
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('fails if parent dir is missing', async () => {
|
||||
const error = await rejectionOf(handler.createWriteStream('dir/file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getInfo()', () => {
|
||||
let info
|
||||
beforeAll(async () => {
|
||||
info = await handler.getInfo()
|
||||
})
|
||||
|
||||
it('should return an object with info', async () => {
|
||||
expect(typeof info).toBe('object')
|
||||
})
|
||||
|
||||
it('should return correct type of attribute', async () => {
|
||||
if (info.size !== undefined) {
|
||||
expect(typeof info.size).toBe('number')
|
||||
}
|
||||
if (info.used !== undefined) {
|
||||
expect(typeof info.used).toBe('number')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getSize()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async () => {
|
||||
expect(await handler.getSize('file')).toEqual(TEST_DATA_LEN)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#list()', () => {
|
||||
it(`should list the content of folder`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await expect(await handler.list('.')).toEqual(['file'])
|
||||
})
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('#mkdir()', () => {
|
||||
it('creates a directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await expect(await handler.list('.')).toEqual(['dir'])
|
||||
})
|
||||
|
||||
it('does not throw on existing directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await handler.mkdir('dir')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.mkdir('file'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#mktree()', () => {
|
||||
it('creates a tree of directories', async () => {
|
||||
await handler.mktree('dir/dir')
|
||||
await expect(await handler.list('.')).toEqual(['dir'])
|
||||
await expect(await handler.list('dir')).toEqual(['dir'])
|
||||
})
|
||||
|
||||
it('does not throw on existing directory', async () => {
|
||||
await handler.mktree('dir/dir')
|
||||
await handler.mktree('dir/dir')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
const error = await rejectionOf(handler.mktree('dir/file'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file in path', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.mktree('file/dir'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#outputFile()', () => {
|
||||
it('writes data to a file', async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('throws on existing files', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.outputFile('file', ''))
|
||||
expect(error.code).toBe('EEXIST')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#read()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
const start = random(TEST_DATA_LEN)
|
||||
const size = random(TEST_DATA_LEN)
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file }) => {
|
||||
const buffer = Buffer.alloc(size)
|
||||
const result = await handler.read(file, buffer, start)
|
||||
expect(result.buffer).toBe(buffer)
|
||||
expect(result).toEqual({
|
||||
buffer,
|
||||
bytesRead: Math.min(size, TEST_DATA_LEN - start),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('#readFile', () => {
|
||||
it('returns a buffer containing the contents of the file', async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('throws on missing file', async () => {
|
||||
const error = await rejectionOf(handler.readFile('file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rename()', () => {
|
||||
it(`should rename the file`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await handler.rename('file', `file2`)
|
||||
|
||||
expect(await handler.list('.')).toEqual(['file2'])
|
||||
expect(await handler.readFile(`file2`)).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rmdir()', () => {
|
||||
it('should remove an empty directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await handler.rmdir('dir')
|
||||
expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
|
||||
it(`should throw on non-empty directory`, async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
|
||||
const error = await rejectionOf(handler.rmdir('.'))
|
||||
await expect(error.code).toEqual('ENOTEMPTY')
|
||||
})
|
||||
|
||||
it('does not throw on missing directory', async () => {
|
||||
await handler.rmdir('dir')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rmtree', () => {
|
||||
it(`should remove a directory resursively`, async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
await handler.rmtree('dir')
|
||||
|
||||
expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('#test()', () => {
|
||||
it('tests the remote appears to be working', async () => {
|
||||
expect(await handler.test()).toEqual({
|
||||
success: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('#unlink()', () => {
|
||||
it(`should remove the file`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await handler.unlink('file')
|
||||
|
||||
await expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
|
||||
it('does not throw on missing file', async () => {
|
||||
await handler.unlink('file')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,20 +1,28 @@
|
||||
// @flow
|
||||
import execa from 'execa'
|
||||
|
||||
import type RemoteHandler from './abstract'
|
||||
import RemoteHandlerLocal from './local'
|
||||
import RemoteHandlerNfs from './nfs'
|
||||
import RemoteHandlerSmb from './smb'
|
||||
import RemoteHandlerSmbMount from './smb-mount'
|
||||
|
||||
export type { default as RemoteHandler } from './abstract'
|
||||
export type Remote = { url: string }
|
||||
|
||||
const HANDLERS = {
|
||||
file: RemoteHandlerLocal,
|
||||
smb: RemoteHandlerSmb,
|
||||
nfs: RemoteHandlerNfs,
|
||||
}
|
||||
|
||||
export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
try {
|
||||
execa.sync('mount.cifs', ['-V'])
|
||||
HANDLERS.smb = RemoteHandlerSmbMount
|
||||
} catch (_) {
|
||||
HANDLERS.smb = RemoteHandlerSmb
|
||||
}
|
||||
|
||||
export const getHandler = (remote: Remote, ...rest: any): RemoteHandler => {
|
||||
// FIXME: should be done in xo-remote-parser.
|
||||
const type = remote.url.split('://')[0]
|
||||
|
||||
@@ -22,5 +30,5 @@ export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
if (!Handler) {
|
||||
throw new Error('Unhandled remote type')
|
||||
}
|
||||
return new Handler(remote)
|
||||
return new Handler(remote, ...rest)
|
||||
}
|
||||
|
||||
@@ -1,51 +1,76 @@
|
||||
import df from '@sindresorhus/df'
|
||||
import fs from 'fs-extra'
|
||||
import { dirname, resolve } from 'path'
|
||||
import { noop, startsWith } from 'lodash'
|
||||
import { fromEvent } from 'promise-toolbox'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
get type() {
|
||||
return 'file'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
_getRealPath() {
|
||||
return this._remote.path
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const realPath = this._getRealPath()
|
||||
const parts = [realPath]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
_getFilePath(file) {
|
||||
return this._getRealPath() + file
|
||||
}
|
||||
|
||||
async _closeFile(fd) {
|
||||
return fs.close(fd)
|
||||
}
|
||||
|
||||
async _createReadStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const stream = fs.createReadStream(this._getFilePath(file), options)
|
||||
await fromEvent(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, realPath)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
return fs.createReadStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
}
|
||||
|
||||
async _createWriteStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const stream = fs.createWriteStream(this._getFilePath(file), options)
|
||||
await fromEvent(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
return path
|
||||
return fs.createWriteStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
}
|
||||
return this._remote
|
||||
_getInfo() {
|
||||
return df.file(this._getFilePath('/'))
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
return noop()
|
||||
async _getSize(file) {
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(path, data, options)
|
||||
async _list(dir) {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
_mkdir(dir) {
|
||||
return fs.mkdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _openFile(path, flags) {
|
||||
return fs.open(this._getFilePath(path), flags)
|
||||
}
|
||||
|
||||
async _read(file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
|
||||
try {
|
||||
@@ -63,62 +88,29 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
async _readFile(file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
async _rename(oldPath, newPath) {
|
||||
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
async _rmdir(dir) {
|
||||
return fs.rmdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
return typeof file === 'string'
|
||||
? fs.createReadStream(this._getFilePath(file), options)
|
||||
: fs.createReadStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
async _sync() {
|
||||
const path = this._getRealPath('/')
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
return fs.createWriteStream(path, options)
|
||||
}
|
||||
return fs.createWriteStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
async _unlink(file) {
|
||||
return fs.unlink(this._getFilePath(file))
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
return fs.unlink(this._getFilePath(file)).catch(error => {
|
||||
// do not throw if the file did not exist
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
async _openFile (path, flags) {
|
||||
return fs.open(this._getFilePath(path), flags)
|
||||
}
|
||||
|
||||
async _closeFile (fd) {
|
||||
return fs.close(fd)
|
||||
_writeFile(file, data, { flags }) {
|
||||
return fs.writeFile(this._getFilePath(file), data, { flag: flags })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,89 +1,21 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import { forEach } from 'lodash'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
|
||||
import LocalHandler from './local'
|
||||
import MountHandler from './_mount'
|
||||
|
||||
const DEFAULT_NFS_OPTIONS = 'vers=3'
|
||||
|
||||
export default class NfsHandler extends LocalHandler {
|
||||
get type () {
|
||||
export default class NfsHandler extends MountHandler {
|
||||
constructor(remote, opts) {
|
||||
const { host, port, path, options } = parse(remote.url)
|
||||
super(remote, opts, {
|
||||
type: 'nfs',
|
||||
device: `${host}${port !== undefined ? ':' + port : ''}:${path}`,
|
||||
options:
|
||||
DEFAULT_NFS_OPTIONS + (options !== undefined ? `,${options}` : ''),
|
||||
})
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return `/run/xo-server/mounts/${this._remote.id}`
|
||||
}
|
||||
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
stdout = await execa.stdout('findmnt', [
|
||||
'-P',
|
||||
'-t',
|
||||
'nfs,nfs4',
|
||||
'--output',
|
||||
'SOURCE,TARGET',
|
||||
'--noheadings',
|
||||
])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2],
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
if (exc.stderr !== '') {
|
||||
throw exc
|
||||
}
|
||||
}
|
||||
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_matchesRealMount () {
|
||||
return this._getRealPath() in this._realMounts
|
||||
}
|
||||
|
||||
async _mount () {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
const { host, path, port, options } = this._remote
|
||||
return execa('mount', [
|
||||
'-t',
|
||||
'nfs',
|
||||
'-o',
|
||||
DEFAULT_NFS_OPTIONS + (options !== undefined ? `,${options}` : ''),
|
||||
`${host}${port !== undefined ? ':' + port : ''}:${path}`,
|
||||
this._getRealPath(),
|
||||
])
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount() && !this._remote.enabled) {
|
||||
await this._umount(this._remote)
|
||||
} else if (!this._matchesRealMount() && this._remote.enabled) {
|
||||
await this._mount()
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (_) {
|
||||
// We have to go on...
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (remote) {
|
||||
await execa('umount', ['--force', this._getRealPath()])
|
||||
}
|
||||
}
|
||||
|
||||
31
@xen-orchestra/fs/src/smb-mount.js
Normal file
@@ -0,0 +1,31 @@
|
||||
import { parse } from 'xo-remote-parser'
|
||||
|
||||
import MountHandler from './_mount'
|
||||
import normalizePath from './_normalizePath'
|
||||
|
||||
export default class SmbMountHandler extends MountHandler {
|
||||
constructor(remote, opts) {
|
||||
const {
|
||||
domain = 'WORKGROUP',
|
||||
host,
|
||||
options,
|
||||
password,
|
||||
path,
|
||||
username,
|
||||
} = parse(remote.url)
|
||||
super(remote, opts, {
|
||||
type: 'cifs',
|
||||
device: '//' + host + normalizePath(path),
|
||||
options:
|
||||
`domain=${domain}` + (options !== undefined ? `,${options}` : ''),
|
||||
env: {
|
||||
USER: username,
|
||||
PASSWD: password,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 'smb'
|
||||
}
|
||||
}
|
||||
@@ -1,244 +1,167 @@
|
||||
import Smb2 from '@marsaud/smb2'
|
||||
import { pFinally } from 'promise-toolbox'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
// Normalize the error code for file not found.
|
||||
const normalizeError = error => {
|
||||
const wrapError = (error, code) => ({
|
||||
__proto__: error,
|
||||
cause: error,
|
||||
code,
|
||||
})
|
||||
const normalizeError = (error, shouldBeDirectory) => {
|
||||
const { code } = error
|
||||
|
||||
return code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? Object.create(error, {
|
||||
code: {
|
||||
configurable: true,
|
||||
readable: true,
|
||||
value: 'ENOENT',
|
||||
writable: true,
|
||||
},
|
||||
})
|
||||
throw code === 'STATUS_DIRECTORY_NOT_EMPTY'
|
||||
? wrapError(error, 'ENOTEMPTY')
|
||||
: code === 'STATUS_FILE_IS_A_DIRECTORY'
|
||||
? wrapError(error, 'EISDIR')
|
||||
: code === 'STATUS_NOT_A_DIRECTORY'
|
||||
? wrapError(error, 'ENOTDIR')
|
||||
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? wrapError(error, 'ENOENT')
|
||||
: code === 'STATUS_OBJECT_NAME_COLLISION'
|
||||
? wrapError(error, 'EEXIST')
|
||||
: code === 'STATUS_NOT_SUPPORTED' || code === 'STATUS_INVALID_PARAMETER'
|
||||
? wrapError(error, shouldBeDirectory ? 'ENOTDIR' : 'EISDIR')
|
||||
: error
|
||||
}
|
||||
const normalizeDirError = error => normalizeError(error, true)
|
||||
|
||||
export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
super(remote)
|
||||
this._forget = noop
|
||||
constructor(remote, opts) {
|
||||
super(remote, opts)
|
||||
|
||||
// defined in _sync()
|
||||
this._client = undefined
|
||||
|
||||
const prefix = this._remote.path
|
||||
this._prefix = prefix !== '' ? prefix + '\\' : prefix
|
||||
}
|
||||
|
||||
get type () {
|
||||
get type() {
|
||||
return 'smb'
|
||||
}
|
||||
|
||||
_getClient () {
|
||||
_getFilePath(file) {
|
||||
return (
|
||||
this._prefix +
|
||||
(typeof file === 'string' ? file : file.path)
|
||||
.slice(1)
|
||||
.replace(/\//g, '\\')
|
||||
)
|
||||
}
|
||||
|
||||
_dirname(file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
_closeFile(file) {
|
||||
return this._client.close(file).catch(normalizeError)
|
||||
}
|
||||
|
||||
_createReadStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
file = this._getFilePath(file)
|
||||
} else {
|
||||
options = { autoClose: false, ...options, fd: file.fd }
|
||||
file = ''
|
||||
}
|
||||
return this._client.createReadStream(file, options).catch(normalizeError)
|
||||
}
|
||||
|
||||
_createWriteStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
file = this._getFilePath(file)
|
||||
} else {
|
||||
options = { autoClose: false, ...options, fd: file.fd }
|
||||
file = ''
|
||||
}
|
||||
return this._client.createWriteStream(file, options).catch(normalizeError)
|
||||
}
|
||||
|
||||
_forget() {
|
||||
const client = this._client
|
||||
this._client = undefined
|
||||
return client.disconnect()
|
||||
}
|
||||
|
||||
_getSize(file) {
|
||||
return this._client.getSize(this._getFilePath(file)).catch(normalizeError)
|
||||
}
|
||||
|
||||
_list(dir) {
|
||||
return this._client.readdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
_mkdir(dir) {
|
||||
return this._client.mkdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
_openFile(path, flags) {
|
||||
return this._client
|
||||
.open(this._getFilePath(path), flags)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
async _read(file, buffer, position) {
|
||||
const client = this._client
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await client.open(this._getFilePath(file)) : file.fd
|
||||
try {
|
||||
return await client.read(file, buffer, 0, buffer.length, position)
|
||||
} catch (error) {
|
||||
normalizeError(error)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await client.close(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_readFile(file, options) {
|
||||
return this._client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_rename(oldPath, newPath) {
|
||||
return this._client
|
||||
.rename(this._getFilePath(oldPath), this._getFilePath(newPath), {
|
||||
replace: true,
|
||||
})
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_rmdir(dir) {
|
||||
return this._client.rmdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
_sync() {
|
||||
const remote = this._remote
|
||||
|
||||
return new Smb2({
|
||||
this._client = new Smb2({
|
||||
share: `\\\\${remote.host}`,
|
||||
domain: remote.domain,
|
||||
username: remote.username,
|
||||
password: remote.password,
|
||||
autoCloseTimeout: 0,
|
||||
})
|
||||
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
return this.list('.')
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
if (file === '.') {
|
||||
file = undefined
|
||||
}
|
||||
|
||||
let path = this._remote.path !== '' ? this._remote.path : ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
path += '\\'
|
||||
}
|
||||
|
||||
if (file) {
|
||||
path += file.replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
return path
|
||||
_unlink(file) {
|
||||
return this._client.unlink(this._getFilePath(file)).catch(normalizeError)
|
||||
}
|
||||
|
||||
_dirname (file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options = {}) {
|
||||
const client = this._getClient()
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
|
||||
return client.writeFile(path, data, options)::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
|
||||
let client
|
||||
if (needsClose) {
|
||||
client = this._getClient()
|
||||
file = await client.open(this._getFilePath(file))
|
||||
} else {
|
||||
;({ client, file } = file.fd)
|
||||
}
|
||||
|
||||
try {
|
||||
return await client.read(file, buffer, 0, buffer.length, position)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await client.close(file)
|
||||
client.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options = {}) {
|
||||
const client = this._getClient()
|
||||
let content
|
||||
|
||||
try {
|
||||
content = await client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
await client
|
||||
.rename(this._getFilePath(oldPath), this._getFilePath(newPath), {
|
||||
replace: true,
|
||||
})
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
const client = this._getClient()
|
||||
let list
|
||||
|
||||
try {
|
||||
list = await client.readdir(this._getFilePath(dir))::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
async _createReadStream (file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
const client = this._getClient()
|
||||
let stream
|
||||
|
||||
try {
|
||||
// FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
stream = await client.createReadStream(this._getFilePath(file), options)
|
||||
stream.on('end', () => client.disconnect())
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
const client = this._getClient()
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
let stream
|
||||
try {
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
} catch (err) {
|
||||
client.disconnect()
|
||||
throw err
|
||||
}
|
||||
stream.on('finish', () => client.disconnect())
|
||||
return stream
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
await client.unlink(this._getFilePath(file))::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const client = await this._getClient()
|
||||
let size
|
||||
|
||||
try {
|
||||
size = await client
|
||||
.getSize(this._getFilePath(typeof file === 'string' ? file : file.path))
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
async _openFile (path) {
|
||||
const client = this._getClient()
|
||||
return {
|
||||
client,
|
||||
file: await client.open(this._getFilePath(path)),
|
||||
}
|
||||
}
|
||||
|
||||
async _closeFile ({ client, file }) {
|
||||
await client.close(file)
|
||||
client.disconnect()
|
||||
_writeFile(file, data, options) {
|
||||
return this._client
|
||||
.writeFile(this._getFilePath(file), data, options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ catchGlobalErrors(transport)
|
||||
```js
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
|
||||
configure(transports.console())
|
||||
configure(transportConsole())
|
||||
```
|
||||
|
||||
#### Email
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@xen-orchestra/log",
|
||||
"version": "0.0.0",
|
||||
"version": "0.1.4",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -19,7 +18,9 @@
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
"configure.js",
|
||||
"dist/",
|
||||
"transports/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
@@ -28,14 +29,13 @@
|
||||
"node": ">=4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/polyfill": "7.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.10.1"
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"index-modules": "^0.3.0",
|
||||
|
||||
@@ -12,7 +12,7 @@ const createTransport = config => {
|
||||
if (Array.isArray(config)) {
|
||||
const transports = config.map(createTransport)
|
||||
const { length } = transports
|
||||
return function () {
|
||||
return function() {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
transports[i].apply(this, arguments)
|
||||
}
|
||||
@@ -29,14 +29,14 @@ const createTransport = config => {
|
||||
}
|
||||
|
||||
const orig = transport
|
||||
transport = function (log) {
|
||||
transport = function(log) {
|
||||
if ((level !== undefined && log.level >= level) || filter(log)) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
} else if (level !== undefined) {
|
||||
const orig = transport
|
||||
transport = function (log) {
|
||||
transport = function(log) {
|
||||
if (log.level >= level) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
@@ -85,8 +85,8 @@ export const catchGlobalErrors = logger => {
|
||||
const EventEmitter = require('events')
|
||||
const { prototype } = EventEmitter
|
||||
const { emit } = prototype
|
||||
function patchedEmit (event, error) {
|
||||
if (event === 'error' && !this.listenerCount(event)) {
|
||||
function patchedEmit(event, error) {
|
||||
if (event === 'error' && this.listenerCount(event) === 0) {
|
||||
logger.error('unhandled error event', { error })
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ if (!(symbol in global)) {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function Log (data, level, namespace, message, time) {
|
||||
function Log(data, level, namespace, message, time) {
|
||||
this.data = data
|
||||
this.level = level
|
||||
this.namespace = namespace
|
||||
@@ -22,7 +22,7 @@ function Log (data, level, namespace, message, time) {
|
||||
this.time = time
|
||||
}
|
||||
|
||||
function Logger (namespace) {
|
||||
function Logger(namespace) {
|
||||
this._namespace = namespace
|
||||
|
||||
// bind all logging methods
|
||||
@@ -37,18 +37,29 @@ const { prototype } = Logger
|
||||
for (const name in LEVELS) {
|
||||
const level = LEVELS[name]
|
||||
|
||||
prototype[name.toLowerCase()] = function (message, data) {
|
||||
prototype[name.toLowerCase()] = function(message, data) {
|
||||
if (typeof message !== 'string') {
|
||||
if (message instanceof Error) {
|
||||
data = { error: message }
|
||||
;({ message = 'an error has occurred' } = message)
|
||||
} else {
|
||||
return this.warn('incorrect value passed to logger', {
|
||||
level,
|
||||
value: message,
|
||||
})
|
||||
}
|
||||
}
|
||||
global[symbol](new Log(data, level, this._namespace, message, new Date()))
|
||||
}
|
||||
}
|
||||
|
||||
prototype.wrap = function (message, fn) {
|
||||
prototype.wrap = function(message, fn) {
|
||||
const logger = this
|
||||
const warnAndRethrow = error => {
|
||||
logger.warn(message, { error })
|
||||
throw error
|
||||
}
|
||||
return function () {
|
||||
return function() {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
const then = result != null && result.then
|
||||
|
||||
@@ -13,10 +13,10 @@ const consoleTransport = ({ data, level, namespace, message, time }) => {
|
||||
level < INFO
|
||||
? debugConsole
|
||||
: level < WARN
|
||||
? infoConsole
|
||||
: level < ERROR
|
||||
? warnConsole
|
||||
: errorConsole
|
||||
? infoConsole
|
||||
: level < ERROR
|
||||
? warnConsole
|
||||
: errorConsole
|
||||
|
||||
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
|
||||
data != null && fn(data)
|
||||
|
||||
@@ -53,14 +53,12 @@ export default ({
|
||||
fromCallback(cb =>
|
||||
transporter.sendMail(
|
||||
{
|
||||
subject: evalTemplate(
|
||||
subject,
|
||||
key =>
|
||||
key === 'level'
|
||||
? NAMES[log.level]
|
||||
: key === 'time'
|
||||
? log.time.toISOString()
|
||||
: log[key]
|
||||
subject: evalTemplate(subject, key =>
|
||||
key === 'level'
|
||||
? NAMES[log.level]
|
||||
: key === 'time'
|
||||
? log.time.toISOString()
|
||||
: log[key]
|
||||
),
|
||||
text: prettyFormat(log.data),
|
||||
},
|
||||
|
||||
@@ -55,7 +55,8 @@ export const required = name => {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const serializeError = error => ({
|
||||
...error,
|
||||
...error, // Copy enumerable properties.
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
stack: error.stack,
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
dist/transports
|
||||
1
@xen-orchestra/log/transports/console.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/console.js')
|
||||
1
@xen-orchestra/log/transports/email.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/email.js')
|
||||
1
@xen-orchestra/log/transports/memory.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/memory.js')
|
||||
1
@xen-orchestra/log/transports/syslog.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/syslog.js')
|
||||
@@ -30,9 +30,9 @@
|
||||
"bind-property-descriptor": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-dev": "^1.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
|
||||
@@ -61,7 +61,7 @@ const mixin = Mixins => Class => {
|
||||
|
||||
const n = Mixins.length
|
||||
|
||||
function DecoratedClass (...args) {
|
||||
function DecoratedClass(...args) {
|
||||
const instance = new Class(...args)
|
||||
|
||||
for (let i = 0; i < n; ++i) {
|
||||
|
||||
288
CHANGELOG.md
@@ -4,6 +4,292 @@
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM migration] Display hosts' free memory [#3264](https://github.com/vatesfr/xen-orchestra/issues/3264) (PR [#3832](https://github.com/vatesfr/xen-orchestra/pull/3832))
|
||||
- [Plugins] New field to filter displayed plugins (PR [#3832](https://github.com/vatesfr/xen-orchestra/pull/3871))
|
||||
- Ability to copy ID of "unknown item"s [#3833](https://github.com/vatesfr/xen-orchestra/issues/3833) (PR [#3856](https://github.com/vatesfr/xen-orchestra/pull/3856))
|
||||
- [Cloud-Init] switch config drive type to `nocloud` to prepare for the passing of network config (PR [#3877](https://github.com/vatesfr/xen-orchestra/pull/3877))
|
||||
- [UI] Show pool name next to templates' names [#3894](https://github.com/vatesfr/xen-orchestra/issues/3894) (PR [#3896](https://github.com/vatesfr/xen-orchestra/pull/3896))
|
||||
- [Backup NG] Support zstd compression for full backups [#3773](https://github.com/vatesfr/xen-orchestra/issues/3773) (PR [#3883](https://github.com/vatesfr/xen-orchestra/pull/3883))
|
||||
- [VM] Ability to copy a VM with zstd compression [#3773](https://github.com/vatesfr/xen-orchestra/issues/3773) (PR [#3889](https://github.com/vatesfr/xen-orchestra/pull/3889))
|
||||
- [VM & Host] "Pool > Host" breadcrumb at the top of the page (PR [#3898](https://github.com/vatesfr/xen-orchestra/pull/3898))
|
||||
- [Hosts] Ability to enable/disable host multipathing [#3659](https://github.com/vatesfr/xen-orchestra/issues/3659) (PR [#3865](https://github.com/vatesfr/xen-orchestra/pull/3865))
|
||||
- [Login] Add OTP authentication [#2044](https://github.com/vatesfr/xen-orchestra/issues/2044) (PR [#3879](https://github.com/vatesfr/xen-orchestra/pull/3879))
|
||||
- [Notifications] New notification page to provide important information about XOA (PR [#3904](https://github.com/vatesfr/xen-orchestra/pull/3904))
|
||||
- [VM] Ability to export a VM with zstd compression [#3773](https://github.com/vatesfr/xen-orchestra/issues/3773) (PR [#3891](https://github.com/vatesfr/xen-orchestra/pull/3891))
|
||||
- [Host/network] Display PIF speed [#3887](https://github.com/vatesfr/xen-orchestra/issues/3887) (PR [#3901](https://github.com/vatesfr/xen-orchestra/pull/3901))
|
||||
- [SR] Display iscsi paths and mark the SR with a yellow dot if one path is not available. [#3659](https://github.com/vatesfr/xen-orchestra/issues/3659) (PR [#3829](https://github.com/vatesfr/xen-orchestra/pull/3829))
|
||||
- [UI] Unifies the Signin buttons (PR [#3913](https://github.com/vatesfr/xen-orchestra/pull/3913))
|
||||
- [Settings/remotes] NFS: display default option on placeholder [#3631](https://github.com/vatesfr/xen-orchestra/issues/3631) (PR [#3921](https://github.com/vatesfr/xen-orchestra/pull/3921))
|
||||
- [VM/advanced] Ability to pin vCPU to physical cores [#3241](https://github.com/vatesfr/xen-orchestra/issues/3241) (PR [#3254](https://github.com/vatesfr/xen-orchestra/pull/3254))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [New SR] No redirection if the SR creation failed or canceled [#3843](https://github.com/vatesfr/xen-orchestra/issues/3843) (PR [#3853](https://github.com/vatesfr/xen-orchestra/pull/3853))
|
||||
- [Home] Fix two tabs opened by middle click in Firefox [#3450](https://github.com/vatesfr/xen-orchestra/issues/3450) (PR [#3825](https://github.com/vatesfr/xen-orchestra/pull/3825))
|
||||
- [XOA] Enable downgrade for ending trial (PR [#3867](https://github.com/vatesfr/xen-orchestra/pull/3867))
|
||||
- [OVA import] allow import of big files [#3468](https://github.com/vatesfr/xen-orchestra/issues/3468) (PR [#3504](https://github.com/vatesfr/xen-orchestra/pull/3504))
|
||||
- [Backup NG] Smart settings not saved when editing a backup job [#3885](https://github.com/vatesfr/xen-orchestra/issues/3885) (PR [#3886](https://github.com/vatesfr/xen-orchestra/pull/3886))
|
||||
- [VM/snapshot] New snapshot with memory: fix "invalid parameters" error (PR [#3903](https://github.com/vatesfr/xen-orchestra/pull/3903))
|
||||
- [VM creation] Broken CloudInit config drive when VM created on local SR
|
||||
- [Legacy Backup] Fix error when restoring a backup
|
||||
- [Home] Fix `user.getAll` error when user is not admin [#3573](https://github.com/vatesfr/xen-orchestra/issues/3573) (PR [#3918](https://github.com/vatesfr/xen-orchestra/pull/3918))
|
||||
- [Backup NG] Fix restore issue when a disk has grown [#3910](https://github.com/vatesfr/xen-orchestra/issues/3910) (PR [#3920](https://github.com/vatesfr/xen-orchestra/pull/3920))
|
||||
- [Backup NG] Delete _importing_ VMs due to interrupted CR/DR (PR [#3923](https://github.com/vatesfr/xen-orchestra/pull/3923))
|
||||
|
||||
### Released packages
|
||||
|
||||
- vhd-lib v0.5.1
|
||||
- xoa-updater v0.15.0
|
||||
- xen-api v0.24.1
|
||||
- xo-vmdk-to-vhd v0.1.6
|
||||
- xo-server v5.34.0
|
||||
- xo-web v5.34.0
|
||||
|
||||
## *staging*
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Backup NG] Restore logs moved to restore tab [#3772](https://github.com/vatesfr/xen-orchestra/issues/3772) (PR [#3802](https://github.com/vatesfr/xen-orchestra/pull/3802))
|
||||
- [Remotes] New SMB implementation that provides better stability and performance [#2257](https://github.com/vatesfr/xen-orchestra/issues/2257) (PR [#3708](https://github.com/vatesfr/xen-orchestra/pull/3708))
|
||||
- [VM/advanced] ACL management from VM view [#3040](https://github.com/vatesfr/xen-orchestra/issues/3040) (PR [#3774](https://github.com/vatesfr/xen-orchestra/pull/3774))
|
||||
- [VM / snapshots] Ability to save the VM memory [#3795](https://github.com/vatesfr/xen-orchestra/issues/3795) (PR [#3812](https://github.com/vatesfr/xen-orchestra/pull/3812))
|
||||
- [Backup NG / Health] Show number of lone snapshots in tab label [#3500](https://github.com/vatesfr/xen-orchestra/issues/3500) (PR [#3824](https://github.com/vatesfr/xen-orchestra/pull/3824))
|
||||
- [Login] Add autofocus on username input on login page [#3835](https://github.com/vatesfr/xen-orchestra/issues/3835) (PR [#3836](https://github.com/vatesfr/xen-orchestra/pull/3836))
|
||||
- [Home/VM] Bulk snapshot: specify snapshots' names [#3778](https://github.com/vatesfr/xen-orchestra/issues/3778) (PR [#3787](https://github.com/vatesfr/xen-orchestra/pull/3787))
|
||||
- [Remotes] Show free space and disk usage on remote [#3055](https://github.com/vatesfr/xen-orchestra/issues/3055) (PR [#3767](https://github.com/vatesfr/xen-orchestra/pull/3767))
|
||||
- [New SR] Add tooltip for reattach action button [#3845](https://github.com/vatesfr/xen-orchestra/issues/3845) (PR [#3852](https://github.com/vatesfr/xen-orchestra/pull/3852))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Self] Display sorted Resource Sets [#3818](https://github.com/vatesfr/xen-orchestra/issues/3818) (PR [#3823](https://github.com/vatesfr/xen-orchestra/pull/3823))
|
||||
- [Servers] Correctly report connecting status (PR [#3838](https://github.com/vatesfr/xen-orchestra/pull/3838))
|
||||
- [Servers] Fix cannot reconnect to a server after connection has been lost [#3839](https://github.com/vatesfr/xen-orchestra/issues/3839) (PR [#3841](https://github.com/vatesfr/xen-orchestra/pull/3841))
|
||||
- [New VM] Fix `NO_HOSTS_AVAILABLE()` error when creating a VM on a local SR from template on another local SR [#3084](https://github.com/vatesfr/xen-orchestra/issues/3084) (PR [#3827](https://github.com/vatesfr/xen-orchestra/pull/3827))
|
||||
- [Backup NG] Fix typo in the form [#3854](https://github.com/vatesfr/xen-orchestra/issues/3854) (PR [#3855](https://github.com/vatesfr/xen-orchestra/pull/3855))
|
||||
- [New SR] No warning when creating a NFS SR on a path that is already used as NFS SR [#3844](https://github.com/vatesfr/xen-orchestra/issues/3844) (PR [#3851](https://github.com/vatesfr/xen-orchestra/pull/3851))
|
||||
|
||||
### Released packages
|
||||
|
||||
- vhd-lib v0.5.0
|
||||
- vhd-cli v0.2.0
|
||||
- xen-api v0.24.0
|
||||
- @xen-orchestra/fs v0.6.0
|
||||
- xo-server v5.33.0
|
||||
- xo-web v5.33.0
|
||||
|
||||
## **5.30.0** (2018-12-20)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Users] Display user groups [#3719](https://github.com/vatesfr/xen-orchestra/issues/3719) (PR [#3740](https://github.com/vatesfr/xen-orchestra/pull/3740))
|
||||
- [VDI] Display VDI's SR [3021](https://github.com/vatesfr/xen-orchestra/issues/3021) (PR [#3285](https://github.com/vatesfr/xen-orchestra/pull/3285))
|
||||
- [Health, VM/disks] Display SR's container [#3021](https://github.com/vatesfr/xen-orchestra/issues/3021) (PRs [#3747](https://github.com/vatesfr/xen-orchestra/pull/3747), [#3751](https://github.com/vatesfr/xen-orchestra/pull/3751))
|
||||
- [Servers] Auto-connect to ejected host [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3738](https://github.com/vatesfr/xen-orchestra/pull/3738))
|
||||
- [Backup NG] Add "XOSAN" in excluded tags by default [#2128](https://github.com/vatesfr/xen-orchestra/issues/3563) (PR [#3559](https://github.com/vatesfr/xen-orchestra/pull/3563))
|
||||
- [VM] add tooltip for VM status icon [#3749](https://github.com/vatesfr/xen-orchestra/issues/3749) (PR [#3765](https://github.com/vatesfr/xen-orchestra/pull/3765))
|
||||
- [New XOSAN] Improve view and possibility to sort SRs by name/size/free space [#2416](https://github.com/vatesfr/xen-orchestra/issues/2416) (PR [#3691](https://github.com/vatesfr/xen-orchestra/pull/3691))
|
||||
- [Backup NG] Disable HA on replicated VM (CR, DR) [#2359](https://github.com/vatesfr/xen-orchestra/issues/2359) (PR [#3755](https://github.com/vatesfr/xen-orchestra/pull/3755))
|
||||
- [Backup NG] Display the last run status for each schedule with the possibility to show the associated log [#3769](https://github.com/vatesfr/xen-orchestra/issues/3769) (PR [#3779](https://github.com/vatesfr/xen-orchestra/pull/3779))
|
||||
- [Backup NG] Add a link to the documentation [#3789](https://github.com/vatesfr/xen-orchestra/issues/3789) (PR [#3790](https://github.com/vatesfr/xen-orchestra/pull/3790))
|
||||
- [Backup NG] Ability to copy schedule/job id to the clipboard [#3753](https://github.com/vatesfr/xen-orchestra/issues/3753) (PR [#3791](https://github.com/vatesfr/xen-orchestra/pull/3791))
|
||||
- [Backup NG / logs] Merge the job log status with the display details button [#3797](https://github.com/vatesfr/xen-orchestra/issues/3797) (PR [#3800](https://github.com/vatesfr/xen-orchestra/pull/3800))
|
||||
- [XOA] Notification banner when XOA is not registered [#3803](https://github.com/vatesfr/xen-orchestra/issues/3803) (PR [#3808](https://github.com/vatesfr/xen-orchestra/pull/3808))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Home/SRs] Fixed SR status for non admin users [#2204](https://github.com/vatesfr/xen-orchestra/issues/2204) (PR [#3742](https://github.com/vatesfr/xen-orchestra/pull/3742))
|
||||
- [Servers] Fix occasional "server's pool already connected" errors when pool is not connected (PR [#3782](https://github.com/vatesfr/xen-orchestra/pull/3782))
|
||||
- [Self] Fix missing objects when the self service view is the first one to be loaded when opening XO [#2689](https://github.com/vatesfr/xen-orchestra/issues/2689) (PR [#3096](https://github.com/vatesfr/xen-orchestra/pull/3096))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.5.0
|
||||
- xen-api v0.23.0
|
||||
- xo-acl-resolver v0.4.1
|
||||
- xo-server v5.32.0
|
||||
- xo-web v5.32.0
|
||||
|
||||
## **5.29.0** (2018-11-29)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Perf alert] Ability to trigger an alarm if a host/VM/SR usage value is below the threshold [#3612](https://github.com/vatesfr/xen-orchestra/issues/3612) (PR [#3675](https://github.com/vatesfr/xen-orchestra/pull/3675))
|
||||
- [Home/VMs] Display pool's name [#2226](https://github.com/vatesfr/xen-orchestra/issues/2226) (PR [#3709](https://github.com/vatesfr/xen-orchestra/pull/3709))
|
||||
- [Servers] Prevent new connection if pool is already connected [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3724](https://github.com/vatesfr/xen-orchestra/pull/3724))
|
||||
- [VM] Pause (like Suspend but doesn't copy RAM on disk) [#3727](https://github.com/vatesfr/xen-orchestra/issues/3727) (PR [#3731](https://github.com/vatesfr/xen-orchestra/pull/3731))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Servers] Fix deleting server on joining a pool [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3728](https://github.com/vatesfr/xen-orchestra/pull/3728))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.22.0
|
||||
- xo-server-perf-alert v0.2.0
|
||||
- xo-server-usage-report v0.7.1
|
||||
- xo-server v5.31.0
|
||||
- xo-web v5.31.0
|
||||
|
||||
## **5.28.2** (2018-11-16)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM] Ability to set nested virtualization in settings [#3619](https://github.com/vatesfr/xen-orchestra/issues/3619) (PR [#3625](https://github.com/vatesfr/xen-orchestra/pull/3625))
|
||||
- [Legacy Backup] Restore and File restore functionalities moved to the Backup NG view [#3499](https://github.com/vatesfr/xen-orchestra/issues/3499) (PR [#3610](https://github.com/vatesfr/xen-orchestra/pull/3610))
|
||||
- [Backup NG logs] Display warning in case of missing VMs instead of a ghosts VMs tasks (PR [#3647](https://github.com/vatesfr/xen-orchestra/pull/3647))
|
||||
- [VM] On migration, automatically selects the host and SR when only one is available [#3502](https://github.com/vatesfr/xen-orchestra/issues/3502) (PR [#3654](https://github.com/vatesfr/xen-orchestra/pull/3654))
|
||||
- [VM] Display VGA and video RAM for PVHVM guests [#3576](https://github.com/vatesfr/xen-orchestra/issues/3576) (PR [#3664](https://github.com/vatesfr/xen-orchestra/pull/3664))
|
||||
- [Backup NG form] Display a warning to let the user know that the Delta Backup and the Continuous Replication are not supported on XenServer < 6.5 [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [Backup NG form] Omit VMs(Simple Backup)/pools(Smart Backup/Resident on) with XenServer < 6.5 from the selection when the Delta Backup mode or the Continuous Replication mode are selected [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [VM] Allow to switch the Virtualization mode [#2372](https://github.com/vatesfr/xen-orchestra/issues/2372) (PR [#3669](https://github.com/vatesfr/xen-orchestra/pull/3669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup ng logs] Fix restarting VMs with concurrency issue [#3603](https://github.com/vatesfr/xen-orchestra/issues/3603) (PR [#3634](https://github.com/vatesfr/xen-orchestra/pull/3634))
|
||||
- Validate modal containing a confirm text input by pressing the Enter key [#2735](https://github.com/vatesfr/xen-orchestra/issues/2735) (PR [#2890](https://github.com/vatesfr/xen-orchestra/pull/2890))
|
||||
- [Patches] Bulk install correctly ignores upgrade patches on licensed hosts (PR [#3651](https://github.com/vatesfr/xen-orchestra/pull/3651))
|
||||
- [Backup NG logs] Handle failed restores (PR [#3648](https://github.com/vatesfr/xen-orchestra/pull/3648))
|
||||
- [Self/New VM] Incorrect limit computation [#3658](https://github.com/vatesfr/xen-orchestra/issues/3658) (PR [#3666](https://github.com/vatesfr/xen-orchestra/pull/3666))
|
||||
- [Plugins] Don't expose credentials in config to users (PR [#3671](https://github.com/vatesfr/xen-orchestra/pull/3671))
|
||||
- [Self/New VM] `not enough … available in the set …` error in some cases (PR [#3667](https://github.com/vatesfr/xen-orchestra/pull/3667))
|
||||
- [XOSAN] Creation stuck at "Configuring VMs" [#3688](https://github.com/vatesfr/xen-orchestra/issues/3688) (PR [#3689](https://github.com/vatesfr/xen-orchestra/pull/3689))
|
||||
- [Backup NG] Errors listing backups on SMB remotes with extraneous files (PR [#3685](https://github.com/vatesfr/xen-orchestra/pull/3685))
|
||||
- [Remotes] Don't expose credentials to users [#3682](https://github.com/vatesfr/xen-orchestra/issues/3682) (PR [#3687](https://github.com/vatesfr/xen-orchestra/pull/3687))
|
||||
- [VM] Correctly display guest metrics updates (tools, network, etc.) [#3533](https://github.com/vatesfr/xen-orchestra/issues/3533) (PR [#3694](https://github.com/vatesfr/xen-orchestra/pull/3694))
|
||||
- [VM Templates] Fix deletion [#3498](https://github.com/vatesfr/xen-orchestra/issues/3498) (PR [#3695](https://github.com/vatesfr/xen-orchestra/pull/3695))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.21.0
|
||||
- xo-common v0.2.0
|
||||
- xo-acl-resolver v0.4.0
|
||||
- xo-server v5.30.1
|
||||
- xo-web v5.30.0
|
||||
|
||||
## **5.28.1** (2018-11-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Increase timeout in stale remotes detection to limit false positives (PR [#3632](https://github.com/vatesfr/xen-orchestra/pull/3632))
|
||||
- Fix re-registration issue ([4e35b19ac](https://github.com/vatesfr/xen-orchestra/commit/4e35b19ac56c60f61c0e771cde70a50402797b8a))
|
||||
- [Backup NG logs] Fix started jobs filter [#3636](https://github.com/vatesfr/xen-orchestra/issues/3636) (PR [#3641](https://github.com/vatesfr/xen-orchestra/pull/3641))
|
||||
- [New VM] CPU and memory user inputs were ignored since previous release [#3644](https://github.com/vatesfr/xen-orchestra/issues/3644) (PR [#3646](https://github.com/vatesfr/xen-orchestra/pull/3646))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.4.1
|
||||
- xo-server v5.29.4
|
||||
- xo-web v5.29.3
|
||||
|
||||
## **5.28.0** (2018-10-31)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Usage Report] Add IOPS read/write/total per VM [#3309](https://github.com/vatesfr/xen-orchestra/issues/3309) (PR [#3455](https://github.com/vatesfr/xen-orchestra/pull/3455))
|
||||
- [Self service] Sort resource sets by name (PR [#3507](https://github.com/vatesfr/xen-orchestra/pull/3507))
|
||||
- [Usage Report] Add top 3 SRs which use the most IOPS read/write/total [#3306](https://github.com/vatesfr/xen-orchestra/issues/3306) (PR [#3508](https://github.com/vatesfr/xen-orchestra/pull/3508))
|
||||
- [New VM] Display a warning when the memory is below the template memory static min [#3496](https://github.com/vatesfr/xen-orchestra/issues/3496) (PR [#3513](https://github.com/vatesfr/xen-orchestra/pull/3513))
|
||||
- [Backup NG form] Add link to plugins setting [#3457](https://github.com/vatesfr/xen-orchestra/issues/3457) (PR [#3514](https://github.com/vatesfr/xen-orchestra/pull/3514))
|
||||
- [Backup reports] Add job and run ID [#3488](https://github.com/vatesfr/xen-orchestra/issues/3488) (PR [#3516](https://github.com/vatesfr/xen-orchestra/pull/3516))
|
||||
- [Usage Report] Add top 3 VMs which use the most IOPS read/write/total [#3308](https://github.com/vatesfr/xen-orchestra/issues/3308) (PR [#3463](https://github.com/vatesfr/xen-orchestra/pull/3463))
|
||||
- [Settings/logs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3528](https://github.com/vatesfr/xen-orchestra/pull/3528))
|
||||
- [Settings/acls] Add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3536](https://github.com/vatesfr/xen-orchestra/pull/3536))
|
||||
- [Home] Improve search usage: raw numbers also match in names [#2906](https://github.com/vatesfr/xen-orchestra/issues/2906) (PR [#3552](https://github.com/vatesfr/xen-orchestra/pull/3552))
|
||||
- [Backup NG] Timeout of a job is now in hours [#3550](https://github.com/vatesfr/xen-orchestra/issues/3550) (PR [#3553](https://github.com/vatesfr/xen-orchestra/pull/3553))
|
||||
- [Backup NG] Explicit error if a VM is missing [#3434](https://github.com/vatesfr/xen-orchestra/issues/3434) (PR [#3522](https://github.com/vatesfr/xen-orchestra/pull/3522))
|
||||
- [Backup NG] Show all advanced settings with non-default values in overview [#3549](https://github.com/vatesfr/xen-orchestra/issues/3549) (PR [#3554](https://github.com/vatesfr/xen-orchestra/pull/3554))
|
||||
- [Backup NG] Collapse advanced settings by default [#3551](https://github.com/vatesfr/xen-orchestra/issues/3551) (PR [#3559](https://github.com/vatesfr/xen-orchestra/pull/3559))
|
||||
- [Scheduling] Merge selection and interval tabs [#1902](https://github.com/vatesfr/xen-orchestra/issues/1902) (PR [#3519](https://github.com/vatesfr/xen-orchestra/pull/3519))
|
||||
- [Backup NG/Restore] The backup selector now also shows the job name [#3366](https://github.com/vatesfr/xen-orchestra/issues/3366) (PR [#3564](https://github.com/vatesfr/xen-orchestra/pull/3564))
|
||||
- Sort buttons by criticality in tables [#3168](https://github.com/vatesfr/xen-orchestra/issues/3168) (PR [#3545](https://github.com/vatesfr/xen-orchestra/pull/3545))
|
||||
- [Usage Report] Ability to send a daily report [#3544](https://github.com/vatesfr/xen-orchestra/issues/3544) (PR [#3582](https://github.com/vatesfr/xen-orchestra/pull/3582))
|
||||
- [Backup NG logs] Disable state filters with no entries [#3438](https://github.com/vatesfr/xen-orchestra/issues/3438) (PR [#3442](https://github.com/vatesfr/xen-orchestra/pull/3442))
|
||||
- [ACLs] Global performance improvement on UI for non-admin users [#3578](https://github.com/vatesfr/xen-orchestra/issues/3578) (PR [#3584](https://github.com/vatesfr/xen-orchestra/pull/3584))
|
||||
- [Backup NG] Improve the Schedule's view (Replace table by list) [#3491](https://github.com/vatesfr/xen-orchestra/issues/3491) (PR [#3586](https://github.com/vatesfr/xen-orchestra/pull/3586))
|
||||
- ([Host/Storage], [Sr/hosts]) add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3539](https://github.com/vatesfr/xen-orchestra/pull/3539))
|
||||
- [xo-server] Use @xen-orchestra/log for basic logging [#3555](https://github.com/vatesfr/xen-orchestra/issues/3555) (PR [#3579](https://github.com/vatesfr/xen-orchestra/pull/3579))
|
||||
- [Backup Report] Log error when job failed [#3458](https://github.com/vatesfr/xen-orchestra/issues/3458) (PR [#3593](https://github.com/vatesfr/xen-orchestra/pull/3593))
|
||||
- [Backup NG] Display logs for backup restoration [#2511](https://github.com/vatesfr/xen-orchestra/issues/2511) (PR [#3609](https://github.com/vatesfr/xen-orchestra/pull/3609))
|
||||
- [XOA] Display product version and list of all installed packages [#3560](https://github.com/vatesfr/xen-orchestra/issues/3560) (PR [#3621](https://github.com/vatesfr/xen-orchestra/pull/3621))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remotes] Fix removal of broken remotes [#3327](https://github.com/vatesfr/xen-orchestra/issues/3327) (PR [#3521](https://github.com/vatesfr/xen-orchestra/pull/3521))
|
||||
- [Backups] Fix stuck backups due to broken NFS remotes [#3467](https://github.com/vatesfr/xen-orchestra/issues/3467) (PR [#3534](https://github.com/vatesfr/xen-orchestra/pull/3534))
|
||||
- [New VM] Fix missing cloud config when creating multiple VMs at once in some cases [#3532](https://github.com/vatesfr/xen-orchestra/issues/3532) (PR [#3535](https://github.com/vatesfr/xen-orchestra/pull/3535))
|
||||
- [VM] Fix an error when an admin tried to add a disk on a Self VM whose resource set had been deleted [#2814](https://github.com/vatesfr/xen-orchestra/issues/2814) (PR [#3530](https://github.com/vatesfr/xen-orchestra/pull/3530))
|
||||
- [Self/Create VM] Fix some quotas based on the template instead of the user inputs [#2683](https://github.com/vatesfr/xen-orchestra/issues/2683) (PR [#3546](https://github.com/vatesfr/xen-orchestra/pull/3546))
|
||||
- [Self] Ignore DR and CR VMs when computing quotas [#3064](https://github.com/vatesfr/xen-orchestra/issues/3064) (PR [#3561](https://github.com/vatesfr/xen-orchestra/pull/3561))
|
||||
- [Patches] Wrongly requiring to eject CDs from halted VMs and snapshots before installing patches (PR [#3611](https://github.com/vatesfr/xen-orchestra/pull/3611))
|
||||
- [Jobs] Ensure the scheduling is not interrupted in rare cases (PR [#3617](https://github.com/vatesfr/xen-orchestra/pull/3617))
|
||||
- [Home] Fix `server.getAll` error at login when user is not admin [#2335](https://github.com/vatesfr/xen-orchestra/issues/2335) (PR [#3613](https://github.com/vatesfr/xen-orchestra/pull/3613))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server-backup-reports v0.15.0
|
||||
- xo-common v0.1.2
|
||||
- @xen-orchestra/log v0.1.0
|
||||
- @xen-orchestra/fs v0.4.0
|
||||
- complex-matcher v0.5.0
|
||||
- vhd-lib v0.4.0
|
||||
- xen-api v0.20.0
|
||||
- xo-server-usage-report v0.7.0
|
||||
- xo-server v5.29.0
|
||||
- xo-web v5.29.0
|
||||
|
||||
## **5.27.2** (2018-10-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Host/Networks] Remove "Add network" button [#3386](https://github.com/vatesfr/xen-orchestra/issues/3386) (PR [#3478](https://github.com/vatesfr/xen-orchestra/pull/3478))
|
||||
- [Host/networks] Private networks table [#3387](https://github.com/vatesfr/xen-orchestra/issues/3387) (PR [#3481](https://github.com/vatesfr/xen-orchestra/pull/3481))
|
||||
- [Home/pool] Patch count pill now shows the number of unique patches in the pool [#3321](https://github.com/vatesfr/xen-orchestra/issues/3321) (PR [#3483](https://github.com/vatesfr/xen-orchestra/pull/3483))
|
||||
- [Patches] Pre-install checks to avoid errors [#3252](https://github.com/vatesfr/xen-orchestra/issues/3252) (PR [#3484](https://github.com/vatesfr/xen-orchestra/pull/3484))
|
||||
- [Vm/Snapshots] Allow VM operators to create snapshots and delete those they created [#3443](https://github.com/vatesfr/xen-orchestra/issues/3443) (PR [#3482](https://github.com/vatesfr/xen-orchestra/pull/3482))
|
||||
- [VM/clone] Handle ACLs and Self Service [#3139](https://github.com/vatesfr/xen-orchestra/issues/3139) (PR [#3493](https://github.com/vatesfr/xen-orchestra/pull/3493))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Fix `Cannot read property 'uuid' of undefined` when a disk is removed from a VM to backup (PR [#3479](https://github.com/vatesfr/xen-orchestra/pull/3479))
|
||||
- [Backup NG] Fix unexpected full after failure, interruption or basic rolling snapshot (PR [#3485](https://github.com/vatesfr/xen-orchestra/pull/3485))
|
||||
- [Usage report] Display top 3 used SRs instead of top 3 biggest SRs [#3307](https://github.com/vatesfr/xen-orchestra/issues/3307) (PR [#3475](https://github.com/vatesfr/xen-orchestra/pull/3475))
|
||||
|
||||
### Released packages
|
||||
|
||||
- vhd-lib v0.3.2
|
||||
- xo-vmdk-to-vhd v0.1.5
|
||||
- xo-server-usage-report v0.6.0
|
||||
- xo-acl-resolver v0.3.0
|
||||
- xo-server v5.28.0
|
||||
- xo-web v5.28.0
|
||||
|
||||
## **5.27.1** (2018-09-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [OVA Import] Allow import of files bigger than 127GB (PR [#3451](https://github.com/vatesfr/xen-orchestra/pull/3451))
|
||||
- [File restore] Fix a path issue when going back to the parent folder (PR [#3446](https://github.com/vatesfr/xen-orchestra/pull/3446))
|
||||
- [File restore] Fix a minor issue when showing which selected files are redundant (PR [#3447](https://github.com/vatesfr/xen-orchestra/pull/3447))
|
||||
- [Memory] Fix a major leak [#2580](https://github.com/vatesfr/xen-orchestra/issues/2580) [#2820](https://github.com/vatesfr/xen-orchestra/issues/2820) (PR [#3453](https://github.com/vatesfr/xen-orchestra/pull/3453))
|
||||
- [NFS Remotes] Fix `already mounted` race condition [#3380](https://github.com/vatesfr/xen-orchestra/issues/3380) (PR [#3460](https://github.com/vatesfr/xen-orchestra/pull/3460))
|
||||
- Fix `Cannot read property 'type' of undefined` when deleting a VM (PR [#3465](https://github.com/vatesfr/xen-orchestra/pull/3465))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.3.1
|
||||
- vhd-lib v0.3.1
|
||||
- xo-vmdk-to-vhd v0.1.4
|
||||
- xo-server v5.27.2
|
||||
- xo-web v5.27.1
|
||||
|
||||
## **5.27.0** (2018-09-24)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Remotes] Test the remote automatically on changes [#3323](https://github.com/vatesfr/xen-orchestra/issues/3323) (PR [#3397](https://github.com/vatesfr/xen-orchestra/pull/3397))
|
||||
- [Remotes] Use *WORKGROUP* as default domain for new SMB remote (PR [#3398](https://github.com/vatesfr/xen-orchestra/pull/3398))
|
||||
- [Backup NG form] Display a tip to encourage users to create vms on a thin-provisioned storage [#3334](https://github.com/vatesfr/xen-orchestra/issues/3334) (PR [#3402](https://github.com/vatesfr/xen-orchestra/pull/3402))
|
||||
@@ -13,6 +299,7 @@
|
||||
- [Backup reports] Ability to test the plugin (PR [#3421](https://github.com/vatesfr/xen-orchestra/pull/3421))
|
||||
- [Backup NG] Ability to restart failed VMs' backup [#3339](https://github.com/vatesfr/xen-orchestra/issues/3339) (PR [#3420](https://github.com/vatesfr/xen-orchestra/pull/3420))
|
||||
- [VM] Ability to change the NIC type [#3423](https://github.com/vatesfr/xen-orchestra/issues/3423) (PR [#3440](https://github.com/vatesfr/xen-orchestra/pull/3440))
|
||||
- [Backup NG Overview] Display the schedule's name [#3444](https://github.com/vatesfr/xen-orchestra/issues/3444) (PR [#3445](https://github.com/vatesfr/xen-orchestra/pull/3445))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
@@ -24,6 +311,7 @@
|
||||
- [Backup NG] Fix issue when *Delete first* was enabled for some of the remotes [#3424](https://github.com/vatesfr/xen-orchestra/issues/3424) (PR [#3427](https://github.com/vatesfr/xen-orchestra/pull/3427))
|
||||
- [VM/host consoles] Work around a XenServer/XCP-ng issue which lead to some consoles not working [#3432](https://github.com/vatesfr/xen-orchestra/issues/3432) (PR [#3435](https://github.com/vatesfr/xen-orchestra/pull/3435))
|
||||
- [Backup NG] Remove extraneous snapshots in case of multiple schedules [#3132](https://github.com/vatesfr/xen-orchestra/issues/3132) (PR [#3439](https://github.com/vatesfr/xen-orchestra/pull/3439))
|
||||
- [Backup NG] Fix page reloaded on creating a schedule [#3461](https://github.com/vatesfr/xen-orchestra/issues/3461) (PR [#3462](https://github.com/vatesfr/xen-orchestra/pull/3462))
|
||||
|
||||
### Released packages
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
- enhancement/bug fix entry added
|
||||
- list of packages to release updated (`${name} v${new version}`)
|
||||
- [ ] documentation updated
|
||||
- [ ] **I have tested added/updated features** (and impacted code)
|
||||
|
||||
### Process
|
||||
|
||||
|
||||
BIN
docs/assets/billing_info.png
Normal file
|
After Width: | Height: | Size: 96 KiB |
BIN
docs/assets/disabled-cr-ha-tag.png
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/assets/disabled-dr-ha-tag.png
Normal file
|
After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 109 KiB |
BIN
docs/assets/payment_mode.png
Normal file
|
After Width: | Height: | Size: 98 KiB |
@@ -39,6 +39,10 @@ You can check if a coalesce job is currently active by running `ps axf | grep vh
|
||||
|
||||
If you don't see any running coalesce jobs, and can't find any other reason that XenServer has not started one, you can attempt to make it start a coalesce job by rescanning the SR. This is harmless to try, but will not always result in a coalesce. Visit the problematic SR in the XOA UI, then click the "Rescan All Disks" button towards the top right: it looks like a refresh circle icon. This should begin the coalesce process - if you click the Advanced tab in the SR view, the "disks needing to be coalesced" list should become smaller and smaller.
|
||||
|
||||
### Parse Error
|
||||
|
||||
This is most likely due to running a backup job that uses Delta functionality (eg: delta backups, or continuous replication) on a version of XenServer older than 6.5. To use delta functionality you must run [XenServer 6.5 or later](https://xen-orchestra.com/docs/supported-version.html).
|
||||
|
||||
### SR_BACKEND_FAILURE_44 (insufficient space)
|
||||
|
||||
> This message can be triggered by any backup method.
|
||||
@@ -72,4 +76,4 @@ To check your free space, enter your XOA and run `xoa check` to check free syste
|
||||
|
||||
This is happening when you have a *smart backup job* that doesn't match any VMs. For example: you created a job to backup all running VMs. If no VMs are running on backup schedule, you'll have this message. This could also happen if you lost connection with your pool master (the VMs aren't visible anymore from Xen Orchestra).
|
||||
|
||||
Edit your job and try to see matching VMs or check if your pool is connected to XOA.
|
||||
Edit your job and try to see matching VMs or check if your pool is connected to XOA.
|
||||
|
||||
@@ -141,3 +141,12 @@ To make the mount point persistent in XOA, edit the `/etc/fstab` file, and add:
|
||||
```
|
||||
|
||||
This way, without modifying your previous scheduled snapshot, they will be written to this new local mountpoint!
|
||||
|
||||
## High availability (HA) disabled on replicated VMs
|
||||
|
||||
Replicated VMs HA are taken into account by XS/XCP-ng. To avoid the resultant troubles, HA will be disabled from the replicated VMs and a tag indicating this change will be added.
|
||||
|
||||

|
||||

|
||||
|
||||
> The tag won't be automatically removed by XO on the replicated VMs, even if HA is re-enabled.
|
||||
|
||||
@@ -4,42 +4,37 @@ Once Xen Orchestra is installed, you can configure some parameters in the config
|
||||
|
||||
## Configuration
|
||||
|
||||
The configuration file is located at `/etc/xo-server/config.yaml`.
|
||||
|
||||
**WARNING: YAML is very strict with indentation: use spaces, not tabs.**
|
||||
The configuration file is located at `/etc/xo-server/config.toml`.
|
||||
|
||||
### User to run XO-server as
|
||||
|
||||
By default, XO-server runs as 'root'. You can change that by uncommenting these lines and choose whatever user/group you want:
|
||||
|
||||
```yaml
|
||||
user: 'nobody'
|
||||
group: 'nogroup'
|
||||
```toml
|
||||
user = 'nobody'
|
||||
group = 'nogroup'
|
||||
```
|
||||
|
||||
**Warning!** A non-privileged user:
|
||||
|
||||
* can't bind to a port < 1024
|
||||
* can't mount NFS shares
|
||||
**Warning!** A non-privileged user requires the use of ``sudo`` to mount NFS shares. See [installation from the sources](from_the_sources.md).
|
||||
|
||||
### HTTP listen address and port
|
||||
|
||||
By default, XO-server listens on all addresses (0.0.0.0) and runs on port 80. If you need to, you can change this in the `# Basic HTTP` section:
|
||||
|
||||
```yaml
|
||||
host: '0.0.0.0'
|
||||
port: 80
|
||||
```toml
|
||||
host = '0.0.0.0'
|
||||
port = 80
|
||||
```
|
||||
|
||||
### HTTPS
|
||||
|
||||
XO-server can also run in HTTPS (you can run HTTP and HTTPS at the same time) - just modify what's needed in the `# Basic HTTPS` section, this time with the certificates/keys you need and their path:
|
||||
|
||||
```yaml
|
||||
host: '0.0.0.0'
|
||||
port: 443
|
||||
certificate: './certificate.pem'
|
||||
key: './key.pem'
|
||||
```toml
|
||||
host = '0.0.0.0'
|
||||
port = 443
|
||||
certificate = './certificate.pem'
|
||||
key = './key.pem'
|
||||
```
|
||||
|
||||
> If a chain of certificates authorities is needed, you may bundle them directly in the certificate. Note: the order of certificates does matter, your certificate should come first followed by the certificate of the above certificate authority up to the root.
|
||||
@@ -60,10 +55,9 @@ This should be written just before the `mount` option, inside the `http:` block.
|
||||
|
||||
You shouldn't have to change this. It's the path where `xo-web` files are served by `xo-server`.
|
||||
|
||||
```yaml
|
||||
mounts:
|
||||
'/':
|
||||
- '../xo-web/dist/'
|
||||
```toml
|
||||
[http.mounts]
|
||||
'/' = '../xo-web/dist/'
|
||||
```
|
||||
|
||||
### Custom certificate authority
|
||||
@@ -87,8 +81,8 @@ Don't forget to reload `systemd` conf and restart `xo-server`:
|
||||
|
||||
By default, XO-server will try to contact Redis server on `localhost`, with the port `6379`. But you can define whatever you want:
|
||||
|
||||
```yaml
|
||||
uri: 'tcp://db:password@hostname:port'
|
||||
```toml
|
||||
uri = 'tcp://db:password@hostname:port'
|
||||
```
|
||||
|
||||
### Proxy for XenServer updates and patches
|
||||
@@ -101,12 +95,12 @@ To do that behind a corporate proxy, just add the `httpProxy` variable to match
|
||||
|
||||
You can add this at the end of your config file:
|
||||
|
||||
```yaml
|
||||
```toml
|
||||
# HTTP proxy configuration used by xo-server to fetch resources on the Internet.
|
||||
#
|
||||
# See: https://github.com/TooTallNate/node-proxy-agent#maps-proxy-protocols-to-httpagent-implementations
|
||||
|
||||
httpProxy: 'http://username:password@proxyAddress:port'
|
||||
httpProxy = 'http://username:password@proxyAddress:port'
|
||||
```
|
||||
|
||||
### Log file
|
||||
|
||||
@@ -36,38 +36,52 @@ To protect the replication, we removed the possibility to boot your copied VM di
|
||||
|
||||
## Manual initial seed
|
||||
|
||||
> This is **only** if you need to make the initial copy without making the whole transfer through your network. Otherwise, **you don't need this**.
|
||||
**If you can't transfer the first backup through your network because it's too large**, you can make a seed locally. In order to do this, follow this procedure (until we make it accessible directly in XO).
|
||||
|
||||
**If you can't transfer the first backup through your network**, you can make a seed locally. In order to do this, follow this procedure (until we make it accessible directly in XO):
|
||||
> This is **only** if you need to make the initial copy without making the whole transfer through your network. Otherwise, **you don't need this**. These instructions are for Backup-NG jobs, and will not work to seed a legacy backup job. Please migrate any legacy jobs to Backup-NG!
|
||||
|
||||
### Preparation
|
||||
|
||||
1. create a cont. rep job to a non-distant SR (even the SR where the VM currently is). Do NOT enable the job during creation.
|
||||
1. manually start the first replication (only the first)
|
||||
1. when finished, export the replicated VM (via XOA or any other means, doesn't matter how you get your XVA file)
|
||||
1. import the replicated VM on your distant destination
|
||||
1. you can now remove your local replicated copy
|
||||
### Job creation
|
||||
|
||||
### Modifications
|
||||
Create the Continuous Replication backup job, and leave it disabled for now. On the main Backup-NG page, note its identifiers, the main `backupJobId` and the ID of one on the schedules for the job, `backupScheduleId`.
|
||||
|
||||
In your source host:
|
||||
### Seed creation
|
||||
|
||||
1. Get the UUID of the remote destination SR where your VM was imported
|
||||
1. On the source host: `xe vm-param-list uuid=<SourceVM_UUID> | grep other-config`.
|
||||
* You should see somewhere in other-config: `xo:base_delta:<SR_UUID>: <VM_snapshot_UUID>;`
|
||||
* Remove this entry with `xe vm-param-remove uuid=<OriginalVM_UUID> param-name=other-config param-key=xo:base_delta:<SR_UUID>`
|
||||
* Recreate the correct param: `xe vm-param-set uuid=<OriginalVM_UUID> other-config:xo:base_delta:<destination_SR_UUID>=<VM_snapshot_UUID>`
|
||||
Manually create a snapshot on the VM to backup, and note its UUID as `snapshotUuid` from the snapshot panel for the VM.
|
||||
|
||||
In XO:
|
||||
> DO NOT ever delete or alter this snapshot, feel free to rename it to make that clear.
|
||||
|
||||
1. Edit the replication job and select the new destination SR
|
||||
### Seed copy
|
||||
|
||||
On the destination host; to avoid data corruption, you need to avoid any VM start:
|
||||
Export this snapshot to a file, then import it on the target SR.
|
||||
|
||||
Note the UUID of this newly created VM as `targetVmUuid`.
|
||||
|
||||
> DO not start this VM or it will break the Continuous Replication job! You can rename this VM to more easily remember this.
|
||||
|
||||
### Set up metadata
|
||||
|
||||
The XOA backup system requires metadata to correctly associate the source snapshot and the target VM to the backup job. We're going to use the `xo-cr-seed` utility to help us set them up.
|
||||
|
||||
First install the tool (all the following is done from the XOA VM CLI):
|
||||
|
||||
```
|
||||
xe vm-param-set blocked-operations:start uuid=<DestinationVM_UUID>
|
||||
npm i -g xo-cr-seed
|
||||
```
|
||||
|
||||
### Enable
|
||||
Here is an example of how the utility expects the UUIDs and info passed to it:
|
||||
|
||||
Manually run the job the first time to check if everything is OK. Then, enable the job. **Now, only the deltas are sent, your initial seed saved you a LOT of time if you have a slow network.**
|
||||
```
|
||||
xo-cr-seed
|
||||
Usage: xo-cr-seed <source XAPI URL> <source snapshot UUID> <target XAPI URL> <target VM UUID> <backup job id> <backup schedule id>
|
||||
|
||||
xo-cr-seed v0.2.0
|
||||
```
|
||||
Putting it altogether and putting our values and UUID's into the command, it will look like this (it is a long command):
|
||||
```
|
||||
xo-cr-seed https://root:password@xen1.company.tld 4a21c1cd-e8bd-4466-910a-f7524ecc07b1 https://root:password@xen2.company.tld 5aaf86ca-ae06-4a4e-b6e1-d04f0609e64d 90d11a94-a88f-4a84-b7c1-ed207d3de2f9 369a26f0-da77-41ab-a998-fa6b02c69b9a
|
||||
```
|
||||
|
||||
### Finished
|
||||
|
||||
Your backup job should now be working correctly! Manually run the job the first time to check if everything is OK. Then, enable the job. **Now, only the deltas are sent, your initial seed saved you a LOT of time if you have a slow network.**
|
||||
|
||||
@@ -24,10 +24,11 @@ to create a [GitHub pull request](https://help.github.com/articles/using-pull-re
|
||||
|
||||
|
||||
1. Create a branch for your work
|
||||
2. Create a pull request for this branch against the `master` branch
|
||||
3. Push into the branch until the pull request is ready to merge
|
||||
4. Avoid unnecessary merges: keep you branch up to date by regularly rebasing `git rebase origin/master`
|
||||
5. When ready to merge, clean up the history (reorder commits, squash some of them together, rephrase messages): `git rebase -i origin/master`
|
||||
2. Add a summary of your changes to `CHANGELOG.md` under the `next` section, if your changes do not relate to an existing changelog item
|
||||
3. Create a pull request for this branch against the `master` branch
|
||||
4. Push into the branch until the pull request is ready to merge
|
||||
5. Avoid unnecessary merges: keep you branch up to date by regularly rebasing `git rebase origin/master`
|
||||
6. When ready to merge, clean up the history (reorder commits, squash some of them together, rephrase messages): `git rebase -i origin/master`
|
||||
|
||||
### Issue triage
|
||||
|
||||
|
||||
@@ -2,26 +2,50 @@
|
||||
|
||||
This is the easiest purchase option: you can buy XOA with your registered email account on `xen-orchestra.com`.
|
||||
|
||||
## Choose your edition
|
||||
|
||||
You can choose the edition you want in two places:
|
||||
|
||||
* [the pricing page](https://xen-orchestra.com/#!/pricing)
|
||||
* [your account/member page](https://xen-orchestra.com/#!/member)
|
||||
* [your account/purchases page](https://xen-orchestra.com/#!/purchases)
|
||||
|
||||
|
||||
> You need to be logged to make a purchase. If you don't have an account, please [register here](https://xen-orchestra.com/#!/signup).
|
||||
> You need to be logged in to make a purchase. If you don't have an account, please [register here](https://xen-orchestra.com/#!/signup).
|
||||
|
||||
From your account page, click on the purchase menu, then select the edition you need:
|
||||
|
||||

|
||||
|
||||
Then you need to fill in your information and select **"buy it for my own use"**:
|
||||
## Purchase options
|
||||
|
||||
The second step is to select your purchase option:
|
||||
|
||||
- Subscription: only available with a credit card payment. Choose this option for a monthly payment or a yearly payment **renewed automatically** each year.
|
||||
|
||||
- Paid period: **check or wire transfer only**. This purchase allows you to subscribe for a one, two or three year period
|
||||
|
||||
> A 2 year subscription period grants you 1 month discounted
|
||||
> A 3 year subscription period grants you 2 months discounted
|
||||
|
||||
Then you need to fill in your information and select **"Buy for my own use"** (direct purchase)
|
||||
|
||||

|
||||
|
||||
The default payment method is by **credit card**. But you can also choose the "wire transfer" tab (with the "bank" icon):
|
||||
## Billing information
|
||||
You need to complete all the required information on this page in order to move forward.
|
||||
|
||||

|
||||
> Note: If you are part of the Eurozone, you will need to provide a valid EU VAT number in order to proceed to payment. Transactions between companies inside the Eurozone are VAT free.
|
||||
Transactions outside the Eurozone are VAT free.
|
||||
|
||||
## Wire transfer process
|
||||

|
||||
|
||||
If you select wire transfer, you need to upload a proof of transfer before we can unlock your XOA. If you don't, you'll have to wait for funds to actually be transferred into our account.
|
||||
## Select your payment mode
|
||||
|
||||
Credit Card, Wire transfer or Bank check are the three payment methods available on our store. Some methods can be unavailable regarding the purchase option you have selected during step one.
|
||||
|
||||
> Wire transfer is not available for monthly and yearly subscription - Credit Card is not available for paid period.
|
||||
|
||||

|
||||
|
||||
> All required information for wire transfer and Check payment will be available in the last step of the payment AND on your proforma invoice.
|
||||
> ⚠ Please, use an explicit reference for your wire transfer in order for us to easily identify your payment.
|
||||
|
||||
@@ -14,13 +14,13 @@ As you may have seen,in other parts of the documentation, XO is composed of two
|
||||
|
||||
### NodeJS
|
||||
|
||||
XO needs Node.js. **Please always use the LTS version of Node**.
|
||||
XO needs Node.js. **Please use Node 8**.
|
||||
|
||||
We'll consider at this point that you've got a working node on your box. E.g:
|
||||
|
||||
```
|
||||
$ node -v
|
||||
v8.9.1
|
||||
v8.12.0
|
||||
```
|
||||
|
||||
If not, see [this page](https://nodejs.org/en/download/package-manager/) for instructions on how to install Node.
|
||||
@@ -43,7 +43,7 @@ apt-get install build-essential redis-server libpng-dev git python-minimal libvh
|
||||
|
||||
## Fetching the Code
|
||||
|
||||
You need to use the `git` source code manager to fetch the code. Ideally you should run XO as a non-root user, however if you don't run as root you will not be able to mount NFS remotes. As your chosen non-root (or root) user, run the following:
|
||||
You need to use the `git` source code manager to fetch the code. Ideally, you should run XO as a non-root user, and if you choose to, you need to set up `sudo` to be able to mount NFS remotes. As your chosen non-root (or root) user, run the following:
|
||||
|
||||
```
|
||||
git clone -b master http://github.com/vatesfr/xen-orchestra
|
||||
@@ -64,17 +64,15 @@ Now you have to create a config file for `xo-server`:
|
||||
|
||||
```
|
||||
$ cd packages/xo-server
|
||||
$ cp sample.config.yaml .xo-server.yaml
|
||||
$ cp sample.config.toml .xo-server.toml
|
||||
```
|
||||
|
||||
Edit and uncomment it to have the right path to serve `xo-web`, because `xo-server` embeds an HTTP server (we assume that `xen-orchestra` and `xo-web` are in the same directory). It's near the end of the file:
|
||||
Edit and uncomment it to have the right path to serve `xo-web`, because `xo-server` embeds an HTTP server (we assume that `xen-orchestra` and `xo-web` are in the same directory):
|
||||
|
||||
```yaml
|
||||
mounts: '/': '../xo-web/dist/'
|
||||
```toml
|
||||
[http.mounts]
|
||||
'/' = '../xo-web/dist/'
|
||||
```
|
||||
> Note this `dist` folder will be created in the next step.
|
||||
|
||||
**WARNING: YAML is very strict with indentation: use spaces for it, not tabs**.
|
||||
|
||||
In this config file, you can also change default ports (80 and 443) for xo-server. If you are running the server as a non-root user, you will need to set the port to 1024 or higher.
|
||||
|
||||
@@ -100,6 +98,9 @@ That's it! Use your browser to visit the xo-server IP address, and it works! :)
|
||||
If you would like to update your current version, enter your `xen-orchestra` directory and run the following:
|
||||
|
||||
```
|
||||
# This will clear any changes you made in the repository!!
|
||||
$ git checkout .
|
||||
|
||||
$ git pull --ff-only
|
||||
$ yarn
|
||||
$ yarn build
|
||||
@@ -140,9 +141,6 @@ If you need to delete the service:
|
||||
forever-service delete orchestra
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you have problems during the building phase, follow these steps in your `xen-orchestra` directory:
|
||||
@@ -184,3 +182,17 @@ Don't forget to start redis if you don't reboot now:
|
||||
```
|
||||
service redis start
|
||||
```
|
||||
|
||||
## SUDO
|
||||
|
||||
If you are running `xo-server` as a non-root user, you need to use `sudo` to be able to mount NFS remotes. You can do this by editing `xo-server/.xo-server.toml` and setting `useSudo = true`. It's near the end of the file:
|
||||
|
||||
```
|
||||
useSudo = true
|
||||
```
|
||||
|
||||
You need to configure `sudo` to allow the user of your choice to run mount/umount commands without asking for a password. Depending on your operating system / sudo version, the location of this configuration may change. Regardless, you can use:
|
||||
|
||||
```
|
||||
username ALL=(root)NOPASSWD: /bin/mount, /bin/umount
|
||||
```
|
||||
|
||||
@@ -6,10 +6,11 @@ Xen Orchestra is designed to work exclusively on [XCP-ng](https://xcp-ng.org/) a
|
||||
|
||||
Backup restore for large VM disks (>1TiB usage) is [broken on all XenServer versions](https://bugs.xenserver.org/browse/XSO-868) until Citrix release a fix.
|
||||
|
||||
* XenServer 7.6
|
||||
* XenServer 7.5
|
||||
* [VDI I/O error](https://bugs.xenserver.org/browse/XSO-873), waiting for Citrix to release our fix
|
||||
* XenServer 7.4
|
||||
* XenServer 7.3
|
||||
* XenServer 7.3
|
||||
* XenServer 7.2
|
||||
* XenServer 7.1
|
||||
* XenServer 7.0
|
||||
@@ -26,7 +27,8 @@ Backup restore for large VM disks (>1TiB usage) is [broken on all XenServer vers
|
||||
|
||||
All the pending fixes are already integrated in the latest XCP-ng version. We strongly suggest people to keep using the latest XCP-ng version as far as possible.
|
||||
|
||||
* XCP-ng 7.6
|
||||
* XCP-ng 7.5
|
||||
* XCP-ng 7.4.1
|
||||
|
||||

|
||||

|
||||
|
||||
@@ -4,7 +4,7 @@ If you can't purchase using your own account, usually because you need to go thr
|
||||
|
||||
Typically, you will provide two contacts:
|
||||
|
||||
* The "billing contact" (in general, the purchaser email). This account will have access to invoices. This is the account which makes the purchase and then binds the XO plan to the second contact account, the technical contact.
|
||||
* The "billing contact" (in general, the purchaser email). This account will have access to invoices. This is the account doing the purchase. Once purchased, the license needs to be bound to the second contact account, the technical contact.
|
||||
* The "technical contact", the email of the system administrator using the solution and making support requests.
|
||||
|
||||
## As "billing contact"
|
||||
@@ -17,10 +17,10 @@ Typically, you will provide two contacts:
|
||||
|
||||
Now, you just have to pick the edition of Xen Orchestra you want to purchase for your IT team.
|
||||
|
||||
2. You will then see the payment screen. If your are not purchasing the edition for yourself, you have to pick the **buy for another account** option.
|
||||
2. On the first payment screen, after you choose the plan and the subscription method. You can select the option "Buy for another account"
|
||||
|
||||
|
||||

|
||||

|
||||
|
||||
3. Once the payment is completed, you will have to bind the plan with the end-user account (technical contact). If the end-user doesn't have an account yet, the system will create one and send an e-mail to your end user.
|
||||
|
||||
@@ -29,4 +29,4 @@ Now, you just have to pick the edition of Xen Orchestra you want to purchase for
|
||||
|
||||
That's it, you have now completed the purchase.
|
||||
|
||||
**Once you have bound the plan to your end user account, you cannot change it. Double check the spelling of the e-mail before binding the account.**
|
||||
**⚠ Once you have bound the plan to your end user account, you cannot change it. Double check the spelling of the e-mail before binding the account.**
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# xo-cli
|
||||
|
||||
This is another client of `xo-server` - this time in command line form.
|
||||
@@ -104,5 +103,6 @@ encoding by prefixing with `json:`:
|
||||
##### VM import
|
||||
|
||||
```
|
||||
> xo-cli vm.import host=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
> xo-cli vm.import sr=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
```
|
||||
> Note: `xo-cli` only supports the import of XVA files. It will not import OVA files. To import OVA images, you must use the XOA web UI.
|
||||
|
||||
27
package.json
@@ -1,33 +1,40 @@
|
||||
{
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/register": "7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/register": "^7.0.0",
|
||||
"babel-core": "^7.0.0-0",
|
||||
"babel-eslint": "^9.0.0",
|
||||
"babel-eslint": "^10.0.1",
|
||||
"babel-jest": "^23.0.1",
|
||||
"benchmark": "^2.1.4",
|
||||
"eslint": "^5.1.0",
|
||||
"eslint-config-prettier": "^3.3.0",
|
||||
"eslint-config-standard": "12.0.0",
|
||||
"eslint-config-standard-jsx": "^6.0.2",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
"eslint-plugin-node": "^7.0.1",
|
||||
"eslint-plugin-node": "^8.0.0",
|
||||
"eslint-plugin-promise": "^4.0.0",
|
||||
"eslint-plugin-react": "^7.6.1",
|
||||
"eslint-plugin-standard": "^4.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"flow-bin": "^0.80.0",
|
||||
"globby": "^8.0.0",
|
||||
"husky": "^0.14.3",
|
||||
"flow-bin": "^0.90.0",
|
||||
"globby": "^9.0.0",
|
||||
"husky": "^1.2.1",
|
||||
"jest": "^23.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"prettier": "^1.10.2",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"sorted-object": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"yarn": "^1.7.0"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "scripts/lint-staged"
|
||||
}
|
||||
},
|
||||
"jest": {
|
||||
"timers": "fake",
|
||||
"collectCoverage": true,
|
||||
"projects": [
|
||||
"<rootDir>"
|
||||
@@ -49,11 +56,11 @@
|
||||
"dev": "scripts/run-script --parallel dev",
|
||||
"dev-test": "jest --bail --watch \"^(?!.*\\.integ\\.spec\\.js$)\"",
|
||||
"posttest": "scripts/run-script test",
|
||||
"precommit": "scripts/lint-staged",
|
||||
"prepare": "scripts/run-script prepare",
|
||||
"pretest": "eslint --ignore-path .gitignore .",
|
||||
"test": "jest \"^(?!.*\\.integ\\.spec\\.js$)\"",
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\""
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\"",
|
||||
"travis-tests": "scripts/travis-tests"
|
||||
},
|
||||
"workspaces": [
|
||||
"@xen-orchestra/*",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "complex-matcher",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -30,9 +30,9 @@
|
||||
"lodash": "^4.17.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.1",
|
||||
"rimraf": "^2.6.2"
|
||||
|
||||
@@ -11,7 +11,7 @@ export const ast = new CM.And([
|
||||
new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])
|
||||
),
|
||||
new CM.TruthyProperty('hasCape'),
|
||||
new CM.Property('age', new CM.Number(32)),
|
||||
new CM.Property('age', new CM.NumberOrStringNode('32')),
|
||||
new CM.GlobPattern('chi*go'),
|
||||
new CM.RegExp('^foo/bar\\.', 'i'),
|
||||
])
|
||||
|
||||
@@ -33,17 +33,17 @@ const isRawString = string => {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class Node {
|
||||
createPredicate () {
|
||||
createPredicate() {
|
||||
return value => this.match(value)
|
||||
}
|
||||
}
|
||||
|
||||
export class Null extends Node {
|
||||
match () {
|
||||
match() {
|
||||
return true
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@ export class Null extends Node {
|
||||
const formatTerms = terms => terms.map(term => term.toString(true)).join(' ')
|
||||
|
||||
export class And extends Node {
|
||||
constructor (children) {
|
||||
constructor(children) {
|
||||
super()
|
||||
|
||||
if (children.length === 1) {
|
||||
@@ -60,29 +60,29 @@ export class And extends Node {
|
||||
this.children = children
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return this.children.every(child => child.match(value))
|
||||
}
|
||||
|
||||
toString (isNested) {
|
||||
toString(isNested) {
|
||||
const terms = formatTerms(this.children)
|
||||
return isNested ? `(${terms})` : terms
|
||||
}
|
||||
}
|
||||
|
||||
export class Comparison extends Node {
|
||||
constructor (operator, value) {
|
||||
constructor(operator, value) {
|
||||
super()
|
||||
this._comparator = Comparison.comparators[operator]
|
||||
this._operator = operator
|
||||
this._value = value
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return typeof value === 'number' && this._comparator(value, this._value)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return this._operator + String(this._value)
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,7 @@ Comparison.comparators = {
|
||||
}
|
||||
|
||||
export class Or extends Node {
|
||||
constructor (children) {
|
||||
constructor(children) {
|
||||
super()
|
||||
|
||||
if (children.length === 1) {
|
||||
@@ -103,33 +103,33 @@ export class Or extends Node {
|
||||
this.children = children
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return this.children.some(child => child.match(value))
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return `|(${formatTerms(this.children)})`
|
||||
}
|
||||
}
|
||||
|
||||
export class Not extends Node {
|
||||
constructor (child) {
|
||||
constructor(child) {
|
||||
super()
|
||||
|
||||
this.child = child
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return !this.child.match(value)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return '!' + this.child.toString(true)
|
||||
}
|
||||
}
|
||||
|
||||
export class NumberNode extends Node {
|
||||
constructor (value) {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
@@ -140,32 +140,60 @@ export class NumberNode extends Node {
|
||||
})
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return (
|
||||
value === this.value ||
|
||||
(value !== null && typeof value === 'object' && some(value, this.match))
|
||||
)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return String(this.value)
|
||||
}
|
||||
}
|
||||
export { NumberNode as Number }
|
||||
|
||||
export class NumberOrStringNode extends Node {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this, value.toLowerCase(), +value),
|
||||
})
|
||||
}
|
||||
|
||||
match(lcValue, numValue, value) {
|
||||
return (
|
||||
value === numValue ||
|
||||
(typeof value === 'string'
|
||||
? value.toLowerCase().indexOf(lcValue) !== -1
|
||||
: (Array.isArray(value) || isPlainObject(value)) &&
|
||||
some(value, this.match))
|
||||
)
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
export { NumberOrStringNode as NumberOrString }
|
||||
|
||||
export class Property extends Node {
|
||||
constructor (name, child) {
|
||||
constructor(name, child) {
|
||||
super()
|
||||
|
||||
this.name = name
|
||||
this.child = child
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return value != null && this.child.match(value[this.name])
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return `${formatString(this.name)}:${this.child.toString(true)}`
|
||||
}
|
||||
}
|
||||
@@ -179,7 +207,7 @@ const formatString = value =>
|
||||
: `"${value}"`
|
||||
|
||||
export class GlobPattern extends Node {
|
||||
constructor (value) {
|
||||
constructor(value) {
|
||||
// fallback to string node if no wildcard
|
||||
if (value.indexOf('*') === -1) {
|
||||
return new StringNode(value)
|
||||
@@ -204,7 +232,7 @@ export class GlobPattern extends Node {
|
||||
})
|
||||
}
|
||||
|
||||
match (re, value) {
|
||||
match(re, value) {
|
||||
if (typeof value === 'string') {
|
||||
return re.test(value)
|
||||
}
|
||||
@@ -216,13 +244,13 @@ export class GlobPattern extends Node {
|
||||
return false
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
|
||||
export class RegExpNode extends Node {
|
||||
constructor (pattern, flags) {
|
||||
constructor(pattern, flags) {
|
||||
super()
|
||||
|
||||
this.re = new RegExp(pattern, flags)
|
||||
@@ -233,7 +261,7 @@ export class RegExpNode extends Node {
|
||||
})
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
if (typeof value === 'string') {
|
||||
return this.re.test(value)
|
||||
}
|
||||
@@ -245,14 +273,14 @@ export class RegExpNode extends Node {
|
||||
return false
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return this.re.toString()
|
||||
}
|
||||
}
|
||||
export { RegExpNode as RegExp }
|
||||
|
||||
export class StringNode extends Node {
|
||||
constructor (value) {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
@@ -263,7 +291,7 @@ export class StringNode extends Node {
|
||||
})
|
||||
}
|
||||
|
||||
match (lcValue, value) {
|
||||
match(lcValue, value) {
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().indexOf(lcValue) !== -1
|
||||
}
|
||||
@@ -275,24 +303,24 @@ export class StringNode extends Node {
|
||||
return false
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return formatString(this.value)
|
||||
}
|
||||
}
|
||||
export { StringNode as String }
|
||||
|
||||
export class TruthyProperty extends Node {
|
||||
constructor (name) {
|
||||
constructor(name) {
|
||||
super()
|
||||
|
||||
this.name = name
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return value != null && !!value[this.name]
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return formatString(this.name) + '?'
|
||||
}
|
||||
}
|
||||
@@ -302,12 +330,12 @@ export class TruthyProperty extends Node {
|
||||
// https://gist.github.com/yelouafi/556e5159e869952335e01f6b473c4ec1
|
||||
|
||||
class Failure {
|
||||
constructor (pos, expected) {
|
||||
constructor(pos, expected) {
|
||||
this.expected = expected
|
||||
this.pos = pos
|
||||
}
|
||||
|
||||
get value () {
|
||||
get value() {
|
||||
throw new Error(
|
||||
`parse error: expected ${this.expected} at position ${this.pos}`
|
||||
)
|
||||
@@ -315,7 +343,7 @@ class Failure {
|
||||
}
|
||||
|
||||
class Success {
|
||||
constructor (pos, value) {
|
||||
constructor(pos, value) {
|
||||
this.pos = pos
|
||||
this.value = value
|
||||
}
|
||||
@@ -324,7 +352,7 @@ class Success {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class P {
|
||||
static alt (...parsers) {
|
||||
static alt(...parsers) {
|
||||
const { length } = parsers
|
||||
return new P((input, pos, end) => {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
@@ -337,7 +365,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static grammar (rules) {
|
||||
static grammar(rules) {
|
||||
const grammar = {}
|
||||
Object.keys(rules).forEach(k => {
|
||||
const rule = rules[k]
|
||||
@@ -346,14 +374,14 @@ class P {
|
||||
return grammar
|
||||
}
|
||||
|
||||
static lazy (parserCreator, arg) {
|
||||
static lazy(parserCreator, arg) {
|
||||
const parser = new P((input, pos, end) =>
|
||||
(parser._parse = parserCreator(arg)._parse)(input, pos, end)
|
||||
)
|
||||
return parser
|
||||
}
|
||||
|
||||
static regex (regex) {
|
||||
static regex(regex) {
|
||||
regex = new RegExp(regex.source, 'y')
|
||||
return new P((input, pos) => {
|
||||
regex.lastIndex = pos
|
||||
@@ -364,7 +392,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static seq (...parsers) {
|
||||
static seq(...parsers) {
|
||||
const { length } = parsers
|
||||
return new P((input, pos, end) => {
|
||||
const values = new Array(length)
|
||||
@@ -380,21 +408,20 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static text (text) {
|
||||
static text(text) {
|
||||
const { length } = text
|
||||
return new P(
|
||||
(input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
return new P((input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
)
|
||||
}
|
||||
|
||||
constructor (parse) {
|
||||
constructor(parse) {
|
||||
this._parse = parse
|
||||
}
|
||||
|
||||
map (fn) {
|
||||
map(fn) {
|
||||
return new P((input, pos, end) => {
|
||||
const result = this._parse(input, pos, end)
|
||||
if (result instanceof Success) {
|
||||
@@ -404,11 +431,11 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
parse (input, pos = 0, end = input.length) {
|
||||
parse(input, pos = 0, end = input.length) {
|
||||
return this._parse(input, pos, end).value
|
||||
}
|
||||
|
||||
repeat (min = 0, max = Infinity) {
|
||||
repeat(min = 0, max = Infinity) {
|
||||
return new P((input, pos, end) => {
|
||||
const value = []
|
||||
let result
|
||||
@@ -434,7 +461,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
skip (otherParser) {
|
||||
skip(otherParser) {
|
||||
return new P((input, pos, end) => {
|
||||
const result = this._parse(input, pos, end)
|
||||
if (result instanceof Failure) {
|
||||
@@ -450,17 +477,16 @@ class P {
|
||||
}
|
||||
}
|
||||
|
||||
P.eof = new P(
|
||||
(input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
P.eof = new P((input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const parser = P.grammar({
|
||||
default: r =>
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(
|
||||
([, terms]) => (terms.length === 0 ? new Null() : new And(terms))
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(([, terms]) =>
|
||||
terms.length === 0 ? new Null() : new And(terms)
|
||||
),
|
||||
globPattern: new P((input, pos, end) => {
|
||||
let value = ''
|
||||
@@ -564,7 +590,7 @@ const parser = P.grammar({
|
||||
const asNum = +str
|
||||
return Number.isNaN(asNum)
|
||||
? new GlobPattern(str)
|
||||
: new NumberNode(asNum)
|
||||
: new NumberOrStringNode(str)
|
||||
})
|
||||
),
|
||||
ws: P.regex(/\s*/),
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
GlobPattern,
|
||||
Null,
|
||||
NumberNode,
|
||||
NumberOrStringNode,
|
||||
parse,
|
||||
setPropertyClause,
|
||||
} from './'
|
||||
@@ -32,7 +33,7 @@ describe('parse', () => {
|
||||
|
||||
node = parse('32')
|
||||
expect(node.match(32)).toBe(true)
|
||||
expect(node.match('32')).toBe(false)
|
||||
expect(node.match('32')).toBe(true)
|
||||
expect(node.toString()).toBe('32')
|
||||
|
||||
node = parse('"32"')
|
||||
@@ -54,6 +55,12 @@ describe('Number', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('NumberOrStringNode', () => {
|
||||
it('match a string', () => {
|
||||
expect(new NumberOrStringNode('123').match([{ foo: '123' }])).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setPropertyClause', () => {
|
||||
it('creates a node if none passed', () => {
|
||||
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
|
||||
|
||||
@@ -28,10 +28,10 @@
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vhd-cli",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -26,21 +26,22 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/fs": "^0.3.0",
|
||||
"@xen-orchestra/fs": "^0.6.0",
|
||||
"cli-progress": "^2.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"getopts": "^2.2.3",
|
||||
"struct-fu": "^1.2.0",
|
||||
"vhd-lib": "^0.3.0"
|
||||
"vhd-lib": "^0.5.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"rimraf": "^2.6.1",
|
||||
"tmp": "^0.0.33"
|
||||
},
|
||||
|
||||
@@ -1,12 +1,24 @@
|
||||
import Vhd from 'vhd-lib'
|
||||
import Vhd, { checkVhdChain } from 'vhd-lib'
|
||||
import getopts from 'getopts'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { resolve } from 'path'
|
||||
|
||||
export default async args => {
|
||||
const checkVhd = (handler, path) => new Vhd(handler, path).readHeaderAndFooter()
|
||||
|
||||
export default async rawArgs => {
|
||||
const { chain, _: args } = getopts(rawArgs, {
|
||||
boolean: ['chain'],
|
||||
default: {
|
||||
chain: false,
|
||||
},
|
||||
})
|
||||
|
||||
const check = chain ? checkVhdChain : checkVhd
|
||||
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
for (const vhd of args) {
|
||||
try {
|
||||
await new Vhd(handler, resolve(vhd)).readHeaderAndFooter()
|
||||
await check(handler, resolve(vhd))
|
||||
console.log('ok:', vhd)
|
||||
} catch (error) {
|
||||
console.error('nok:', vhd, error)
|
||||
|
||||
@@ -3,7 +3,7 @@ import { mergeVhd } from 'vhd-lib'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { resolve } from 'path'
|
||||
|
||||
export default async function main (args) {
|
||||
export default async function main(args) {
|
||||
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
|
||||
return `Usage: ${this.command} <child VHD> <parent VHD>`
|
||||
}
|
||||
@@ -11,7 +11,7 @@ export default async function main (args) {
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
let bar
|
||||
await mergeVhd(handler, resolve(args[1]), handler, resolve(args[0]), {
|
||||
onProgress ({ done, total }) {
|
||||
onProgress({ done, total }) {
|
||||
if (bar === undefined) {
|
||||
bar = new Bar({
|
||||
format:
|
||||
|
||||
@@ -3,19 +3,18 @@ import { createSyntheticStream } from 'vhd-lib'
|
||||
import { createWriteStream } from 'fs'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
|
||||
export default async function main (args) {
|
||||
export default async function main(args) {
|
||||
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
|
||||
return `Usage: ${this.command} <input VHD> <output VHD>`
|
||||
}
|
||||
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
const stream = await createSyntheticStream(handler, path.resolve(args[0]))
|
||||
return new Promise((resolve, reject) => {
|
||||
createSyntheticStream(handler, path.resolve(args[0]))
|
||||
.on('error', reject)
|
||||
.pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
stream.on('error', reject).pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import execPromise from 'exec-promise'
|
||||
|
||||
import commands from './commands'
|
||||
|
||||
function runCommand (commands, [command, ...args]) {
|
||||
function runCommand(commands, [command, ...args]) {
|
||||
if (command === undefined || command === '-h' || command === '--help') {
|
||||
command = 'help'
|
||||
}
|
||||
@@ -16,9 +16,9 @@ function runCommand (commands, [command, ...args]) {
|
||||
return `Usage:
|
||||
|
||||
${Object.keys(commands)
|
||||
.filter(command => command !== 'help')
|
||||
.map(command => ` ${this.command} ${command}`)
|
||||
.join('\n\n')}`
|
||||
.filter(command => command !== 'help')
|
||||
.map(command => ` ${this.command} ${command}`)
|
||||
.join('\n\n')}`
|
||||
}
|
||||
|
||||
throw `invalid command ${command}` // eslint-disable-line no-throw-literal
|
||||
|
||||
@@ -9,11 +9,9 @@ import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { randomBytes } from 'crypto'
|
||||
|
||||
import chainVhd from './chain'
|
||||
import createReadStream from './createSyntheticStream'
|
||||
import Vhd from './vhd'
|
||||
import vhdMerge from './merge'
|
||||
import { SECTOR_SIZE } from './_constants'
|
||||
import Vhd, { chainVhd, createSyntheticStream, mergeVhd as vhdMerge } from './'
|
||||
|
||||
import { SECTOR_SIZE } from './src/_constants'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -30,18 +28,18 @@ afterEach(async () => {
|
||||
await pFromCallback(cb => rimraf(tmpDir, cb))
|
||||
})
|
||||
|
||||
async function createRandomFile (name, sizeMb) {
|
||||
async function createRandomFile(name, sizeMb) {
|
||||
await execa('bash', [
|
||||
'-c',
|
||||
`< /dev/urandom tr -dc "\\t\\n [:alnum:]" | head -c ${sizeMb}M >${name}`,
|
||||
])
|
||||
}
|
||||
|
||||
async function checkFile (vhdName) {
|
||||
async function checkFile(vhdName) {
|
||||
await execa('vhd-util', ['check', '-p', '-b', '-t', '-n', vhdName])
|
||||
}
|
||||
|
||||
async function recoverRawContent (vhdName, rawName, originalSize) {
|
||||
async function recoverRawContent(vhdName, rawName, originalSize) {
|
||||
await checkFile(vhdName)
|
||||
await execa('qemu-img', ['convert', '-fvpc', '-Oraw', vhdName, rawName])
|
||||
if (originalSize !== undefined) {
|
||||
@@ -49,7 +47,7 @@ async function recoverRawContent (vhdName, rawName, originalSize) {
|
||||
}
|
||||
}
|
||||
|
||||
async function convertFromRawToVhd (rawName, vhdName) {
|
||||
async function convertFromRawToVhd(rawName, vhdName) {
|
||||
await execa('qemu-img', ['convert', '-f', 'raw', '-Ovpc', rawName, vhdName])
|
||||
}
|
||||
|
||||
@@ -270,14 +268,18 @@ test('coalesce works in normal cases', async () => {
|
||||
|
||||
test('createSyntheticStream passes vhd-util check', async () => {
|
||||
const initalSize = 4
|
||||
const expectedVhdSize = 4197888
|
||||
await createRandomFile('randomfile', initalSize)
|
||||
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
|
||||
const handler = getHandler({ url: 'file://' + process.cwd() })
|
||||
const stream = createReadStream(handler, 'randomfile.vhd')
|
||||
const stream = await createSyntheticStream(handler, 'randomfile.vhd')
|
||||
expect(stream.length).toEqual(expectedVhdSize)
|
||||
await fromEvent(
|
||||
stream.pipe(await fs.createWriteStream('recovered.vhd')),
|
||||
'finish'
|
||||
)
|
||||
await checkFile('recovered.vhd')
|
||||
const stats = await fs.stat('recovered.vhd')
|
||||
expect(stats.size).toEqual(expectedVhdSize)
|
||||
await execa('qemu-img', ['compare', 'recovered.vhd', 'randomfile'])
|
||||
})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vhd-lib",
|
||||
"version": "0.3.0",
|
||||
"version": "0.5.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Primitives for VHD file handling",
|
||||
"keywords": [],
|
||||
@@ -21,19 +21,20 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"core-js": "3.0.0-beta.3",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/preset-flow": "7.0.0",
|
||||
"@xen-orchestra/fs": "^0.3.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"@xen-orchestra/fs": "^0.6.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { SECTOR_SIZE } from './_constants'
|
||||
|
||||
export default function computeGeometryForSize (size) {
|
||||
const totalSectors = Math.ceil(size / 512)
|
||||
export default function computeGeometryForSize(size) {
|
||||
const totalSectors = Math.min(Math.ceil(size / 512), 65535 * 16 * 255)
|
||||
let sectorsPerTrackCylinder
|
||||
let heads
|
||||
let cylinderTimesHeads
|
||||
if (totalSectors > 65535 * 16 * 255) {
|
||||
throw Error('disk is too big')
|
||||
}
|
||||
// straight copypasta from the file spec appendix on CHS Calculation
|
||||
if (totalSectors >= 65535 * 16 * 63) {
|
||||
sectorsPerTrackCylinder = 255
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
PLATFORM_WI2K,
|
||||
} from './_constants'
|
||||
|
||||
export function createFooter (
|
||||
export function createFooter(
|
||||
size,
|
||||
timestamp,
|
||||
geometry,
|
||||
@@ -39,7 +39,7 @@ export function createFooter (
|
||||
return footer
|
||||
}
|
||||
|
||||
export function createHeader (
|
||||
export function createHeader(
|
||||
maxTableEntries,
|
||||
tableOffset = HEADER_SIZE + FOOTER_SIZE,
|
||||
blockSize = VHD_BLOCK_SIZE_BYTES
|
||||
|
||||
6
packages/vhd-lib/src/_resolveRelativeFromFile.js
Normal file
@@ -0,0 +1,6 @@
|
||||
import { dirname, resolve } from 'path'
|
||||
|
||||
const resolveRelativeFromFile = (file, path) =>
|
||||
resolve('/', dirname(file), path).slice(1)
|
||||
|
||||
export { resolveRelativeFromFile as default }
|
||||
@@ -95,7 +95,7 @@ export const unpackField = (field, buf) => {
|
||||
|
||||
// Returns the checksum of a raw struct.
|
||||
// The raw struct (footer or header) is altered with the new sum.
|
||||
export function checksumStruct (buf, struct) {
|
||||
export function checksumStruct(buf, struct) {
|
||||
const checksumField = struct.fields.checksum
|
||||
let sum = 0
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { dirname, relative } from 'path'
|
||||
import Vhd from './vhd'
|
||||
import { DISK_TYPE_DIFFERENCING } from './_constants'
|
||||
|
||||
export default async function chain (
|
||||
export default async function chain(
|
||||
parentHandler,
|
||||
parentPath,
|
||||
childHandler,
|
||||
|
||||
16
packages/vhd-lib/src/checkChain.js
Normal file
@@ -0,0 +1,16 @@
|
||||
import Vhd from './vhd'
|
||||
import resolveRelativeFromFile from './_resolveRelativeFromFile'
|
||||
import { DISK_TYPE_DYNAMIC } from './_constants'
|
||||
|
||||
export default async function checkChain(handler, path) {
|
||||
while (true) {
|
||||
const vhd = new Vhd(handler, path)
|
||||
await vhd.readHeaderAndFooter()
|
||||
|
||||
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
|
||||
break
|
||||
}
|
||||
|
||||
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
|
||||
import Vhd from './vhd'
|
||||
|
||||
export default asyncIteratorToStream(async function * (handler, path) {
|
||||
export default asyncIteratorToStream(async function*(handler, path) {
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
try {
|
||||
const vhd = new Vhd(handler, fd)
|
||||
|
||||
@@ -3,7 +3,7 @@ import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import computeGeometryForSize from './_computeGeometryForSize'
|
||||
import { createFooter } from './_createFooterHeader'
|
||||
|
||||
export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
export default asyncIteratorToStream(async function*(size, blockParser) {
|
||||
const geometry = computeGeometryForSize(size)
|
||||
const actualSize = geometry.actualSize
|
||||
const footer = createFooter(
|
||||
@@ -13,7 +13,7 @@ export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
)
|
||||
let position = 0
|
||||
|
||||
function * filePadding (paddingLength) {
|
||||
function* filePadding(paddingLength) {
|
||||
if (paddingLength > 0) {
|
||||
const chunkSize = 1024 * 1024 // 1Mo
|
||||
for (
|
||||
@@ -33,10 +33,10 @@ export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
if (paddingLength < 0) {
|
||||
throw new Error('Received out of order blocks')
|
||||
}
|
||||
yield * filePadding(paddingLength)
|
||||
yield* filePadding(paddingLength)
|
||||
yield next.data
|
||||
position = next.offsetBytes + next.data.length
|
||||
}
|
||||
yield * filePadding(actualSize - position)
|
||||
yield* filePadding(actualSize - position)
|
||||
yield footer
|
||||
})
|
||||
|
||||
@@ -17,9 +17,9 @@ import { set as setBitmap } from './_bitmap'
|
||||
const VHD_BLOCK_SIZE_SECTORS = VHD_BLOCK_SIZE_BYTES / SECTOR_SIZE
|
||||
|
||||
/**
|
||||
* @returns {Array} an array of occupation bitmap, each bit mapping an input block size of bytes
|
||||
* @returns currentVhdPositionSector the first free sector after the data
|
||||
*/
|
||||
function createBAT (
|
||||
function createBAT(
|
||||
firstBlockPosition,
|
||||
blockAddressList,
|
||||
ratio,
|
||||
@@ -36,9 +36,10 @@ function createBAT (
|
||||
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
|
||||
}
|
||||
})
|
||||
return currentVhdPositionSector
|
||||
}
|
||||
|
||||
export default asyncIteratorToStream(async function * (
|
||||
export default async function createReadableStream(
|
||||
diskSize,
|
||||
incomingBlockSize,
|
||||
blockAddressList,
|
||||
@@ -79,9 +80,16 @@ export default asyncIteratorToStream(async function * (
|
||||
const bitmapSize =
|
||||
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
|
||||
createBAT(firstBlockPosition, blockAddressList, ratio, bat, bitmapSize)
|
||||
const endOfData = createBAT(
|
||||
firstBlockPosition,
|
||||
blockAddressList,
|
||||
ratio,
|
||||
bat,
|
||||
bitmapSize
|
||||
)
|
||||
const fileSize = endOfData * SECTOR_SIZE + FOOTER_SIZE
|
||||
let position = 0
|
||||
function * yieldAndTrack (buffer, expectedPosition) {
|
||||
function* yieldAndTrack(buffer, expectedPosition) {
|
||||
if (expectedPosition !== undefined) {
|
||||
assert.strictEqual(position, expectedPosition)
|
||||
}
|
||||
@@ -90,7 +98,7 @@ export default asyncIteratorToStream(async function * (
|
||||
position += buffer.length
|
||||
}
|
||||
}
|
||||
async function * generateFileContent (blockIterator, bitmapSize, ratio) {
|
||||
async function* generateFileContent(blockIterator, bitmapSize, ratio) {
|
||||
let currentBlock = -1
|
||||
let currentVhdBlockIndex = -1
|
||||
let currentBlockWithBitmap = Buffer.alloc(0)
|
||||
@@ -100,7 +108,7 @@ export default asyncIteratorToStream(async function * (
|
||||
const batIndex = Math.floor(next.offsetBytes / VHD_BLOCK_SIZE_BYTES)
|
||||
if (batIndex !== currentVhdBlockIndex) {
|
||||
if (currentVhdBlockIndex >= 0) {
|
||||
yield * yieldAndTrack(
|
||||
yield* yieldAndTrack(
|
||||
currentBlockWithBitmap,
|
||||
bat.readUInt32BE(currentVhdBlockIndex * 4) * SECTOR_SIZE
|
||||
)
|
||||
@@ -118,11 +126,18 @@ export default asyncIteratorToStream(async function * (
|
||||
bitmapSize + (next.offsetBytes % VHD_BLOCK_SIZE_BYTES)
|
||||
)
|
||||
}
|
||||
yield * yieldAndTrack(currentBlockWithBitmap)
|
||||
yield* yieldAndTrack(currentBlockWithBitmap)
|
||||
}
|
||||
yield * yieldAndTrack(footer, 0)
|
||||
yield * yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield * yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield * generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield * yieldAndTrack(footer)
|
||||
})
|
||||
|
||||
async function* iterator() {
|
||||
yield* yieldAndTrack(footer, 0)
|
||||
yield* yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield* yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield* generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield* yieldAndTrack(footer)
|
||||
}
|
||||
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import { dirname, resolve } from 'path'
|
||||
|
||||
import resolveRelativeFromFile from './_resolveRelativeFromFile'
|
||||
|
||||
import Vhd from './vhd'
|
||||
import {
|
||||
@@ -12,21 +13,24 @@ import {
|
||||
import { fuFooter, fuHeader, checksumStruct } from './_structs'
|
||||
import { test as mapTestBit } from './_bitmap'
|
||||
|
||||
const resolveRelativeFromFile = (file, path) =>
|
||||
resolve('/', dirname(file), path).slice(1)
|
||||
|
||||
export default asyncIteratorToStream(function * (handler, path) {
|
||||
export default async function createSyntheticStream(handler, path) {
|
||||
const fds = []
|
||||
|
||||
const cleanup = () => {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
}
|
||||
try {
|
||||
const vhds = []
|
||||
while (true) {
|
||||
const fd = yield handler.openFile(path, 'r')
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
fds.push(fd)
|
||||
const vhd = new Vhd(handler, fd)
|
||||
vhds.push(vhd)
|
||||
yield vhd.readHeaderAndFooter()
|
||||
yield vhd.readBlockAllocationTable()
|
||||
await vhd.readHeaderAndFooter()
|
||||
await vhd.readBlockAllocationTable()
|
||||
|
||||
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
|
||||
break
|
||||
@@ -64,14 +68,8 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
const nBlocks = Math.ceil(footer.currentSize / header.blockSize)
|
||||
|
||||
const blocksOwner = new Array(nBlocks)
|
||||
for (
|
||||
let iBlock = 0,
|
||||
blockOffset = Math.ceil(
|
||||
(header.tableOffset + bat.length) / SECTOR_SIZE
|
||||
);
|
||||
iBlock < nBlocks;
|
||||
++iBlock
|
||||
) {
|
||||
let blockOffset = Math.ceil((header.tableOffset + bat.length) / SECTOR_SIZE)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
let blockSector = BLOCK_UNUSED
|
||||
for (let i = 0; i < nVhds; ++i) {
|
||||
if (vhds[i].containsBlock(iBlock)) {
|
||||
@@ -83,71 +81,78 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
}
|
||||
bat.writeUInt32BE(blockSector, iBlock * 4)
|
||||
}
|
||||
const fileSize = blockOffset * SECTOR_SIZE + FOOTER_SIZE
|
||||
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
const iterator = function*() {
|
||||
try {
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
|
||||
yield bat
|
||||
yield bat
|
||||
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function * (iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, i, n)
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, start, i)
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function*(iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield* emitBlockSectors(iVhd + 1, i, n)
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield* emitBlockSectors(iVhd + 1, start, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
yield footer
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
yield * emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
|
||||
yield footer
|
||||
} finally {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
} catch (e) {
|
||||
cleanup()
|
||||
throw e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
// see https://github.com/babel/babel/issues/8450
|
||||
import 'core-js/features/symbol/async-iterator'
|
||||
|
||||
export { default } from './vhd'
|
||||
export { default as chainVhd } from './chain'
|
||||
export { default as checkVhdChain } from './checkChain'
|
||||
export { default as createContentStream } from './createContentStream'
|
||||
export { default as createReadableRawStream } from './createReadableRawStream'
|
||||
export {
|
||||
|
||||
@@ -10,7 +10,7 @@ import { DISK_TYPE_DIFFERENCING, DISK_TYPE_DYNAMIC } from './_constants'
|
||||
// Merge vhd child into vhd parent.
|
||||
//
|
||||
// TODO: rename the VHD file during the merge
|
||||
export default concurrency(2)(async function merge (
|
||||
export default concurrency(2)(async function merge(
|
||||
parentHandler,
|
||||
parentPath,
|
||||
childHandler,
|
||||
|
||||
@@ -79,11 +79,11 @@ BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
|
||||
// - sectorSize = 512
|
||||
|
||||
export default class Vhd {
|
||||
get batSize () {
|
||||
get batSize() {
|
||||
return computeBatSize(this.header.maxTableEntries)
|
||||
}
|
||||
|
||||
constructor (handler, path) {
|
||||
constructor(handler, path) {
|
||||
this._handler = handler
|
||||
this._path = path
|
||||
}
|
||||
@@ -92,7 +92,7 @@ export default class Vhd {
|
||||
// Read functions.
|
||||
// =================================================================
|
||||
|
||||
async _read (start, n) {
|
||||
async _read(start, n) {
|
||||
const { bytesRead, buffer } = await this._handler.read(
|
||||
this._path,
|
||||
Buffer.alloc(n),
|
||||
@@ -102,12 +102,12 @@ export default class Vhd {
|
||||
return buffer
|
||||
}
|
||||
|
||||
containsBlock (id) {
|
||||
containsBlock(id) {
|
||||
return this._getBatEntry(id) !== BLOCK_UNUSED
|
||||
}
|
||||
|
||||
// Returns the first address after metadata. (In bytes)
|
||||
getEndOfHeaders () {
|
||||
getEndOfHeaders() {
|
||||
const { header } = this
|
||||
|
||||
let end = FOOTER_SIZE + HEADER_SIZE
|
||||
@@ -132,7 +132,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Returns the first sector after data.
|
||||
getEndOfData () {
|
||||
getEndOfData() {
|
||||
let end = Math.ceil(this.getEndOfHeaders() / SECTOR_SIZE)
|
||||
|
||||
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
|
||||
@@ -153,7 +153,7 @@ export default class Vhd {
|
||||
// TODO: extract the checks into reusable functions:
|
||||
// - better human reporting
|
||||
// - auto repair if possible
|
||||
async readHeaderAndFooter (checkSecondFooter = true) {
|
||||
async readHeaderAndFooter(checkSecondFooter = true) {
|
||||
const buf = await this._read(0, FOOTER_SIZE + HEADER_SIZE)
|
||||
const bufFooter = buf.slice(0, FOOTER_SIZE)
|
||||
const bufHeader = buf.slice(FOOTER_SIZE)
|
||||
@@ -206,7 +206,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Returns a buffer that contains the block allocation table of a vhd file.
|
||||
async readBlockAllocationTable () {
|
||||
async readBlockAllocationTable() {
|
||||
const { header } = this
|
||||
this.blockTable = await this._read(
|
||||
header.tableOffset,
|
||||
@@ -215,11 +215,13 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// return the first sector (bitmap) of a block
|
||||
_getBatEntry (block) {
|
||||
return this.blockTable.readUInt32BE(block * 4)
|
||||
_getBatEntry(block) {
|
||||
const i = block * 4
|
||||
const { blockTable } = this
|
||||
return i < blockTable.length ? blockTable.readUInt32BE(i) : BLOCK_UNUSED
|
||||
}
|
||||
|
||||
_readBlock (blockId, onlyBitmap = false) {
|
||||
_readBlock(blockId, onlyBitmap = false) {
|
||||
const blockAddr = this._getBatEntry(blockId)
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
throw new Error(`no such block ${blockId}`)
|
||||
@@ -228,23 +230,22 @@ export default class Vhd {
|
||||
return this._read(
|
||||
sectorsToBytes(blockAddr),
|
||||
onlyBitmap ? this.bitmapSize : this.fullBlockSize
|
||||
).then(
|
||||
buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
).then(buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// get the identifiers and first sectors of the first and last block
|
||||
// in the file
|
||||
//
|
||||
_getFirstAndLastBlocks () {
|
||||
_getFirstAndLastBlocks() {
|
||||
const n = this.header.maxTableEntries
|
||||
const bat = this.blockTable
|
||||
let i = 0
|
||||
@@ -289,7 +290,7 @@ export default class Vhd {
|
||||
// =================================================================
|
||||
|
||||
// Write a buffer/stream at a given position in a vhd file.
|
||||
async _write (data, offset) {
|
||||
async _write(data, offset) {
|
||||
debug(
|
||||
`_write offset=${offset} size=${
|
||||
Buffer.isBuffer(data) ? data.length : '???'
|
||||
@@ -308,7 +309,7 @@ export default class Vhd {
|
||||
: fromEvent(data.pipe(stream), 'finish')
|
||||
}
|
||||
|
||||
async _freeFirstBlockSpace (spaceNeededBytes) {
|
||||
async _freeFirstBlockSpace(spaceNeededBytes) {
|
||||
try {
|
||||
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
|
||||
const tableOffset = this.header.tableOffset
|
||||
@@ -348,7 +349,7 @@ export default class Vhd {
|
||||
}
|
||||
}
|
||||
|
||||
async ensureBatSize (entries) {
|
||||
async ensureBatSize(entries) {
|
||||
const { header } = this
|
||||
const prevMaxTableEntries = header.maxTableEntries
|
||||
if (prevMaxTableEntries >= entries) {
|
||||
@@ -373,7 +374,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// set the first sector (bitmap) of a block
|
||||
_setBatEntry (block, blockSector) {
|
||||
_setBatEntry(block, blockSector) {
|
||||
const i = block * 4
|
||||
const { blockTable } = this
|
||||
|
||||
@@ -384,7 +385,7 @@ export default class Vhd {
|
||||
|
||||
// Make a new empty block at vhd end.
|
||||
// Update block allocation table in context and in file.
|
||||
async createBlock (blockId) {
|
||||
async createBlock(blockId) {
|
||||
const blockAddr = Math.ceil(this.getEndOfData() / SECTOR_SIZE)
|
||||
|
||||
debug(`create block ${blockId} at ${blockAddr}`)
|
||||
@@ -403,7 +404,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Write a bitmap at a block address.
|
||||
async writeBlockBitmap (blockAddr, bitmap) {
|
||||
async writeBlockBitmap(blockAddr, bitmap) {
|
||||
const { bitmapSize } = this
|
||||
|
||||
if (bitmap.length !== bitmapSize) {
|
||||
@@ -420,7 +421,7 @@ export default class Vhd {
|
||||
await this._write(bitmap, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeEntireBlock (block) {
|
||||
async writeEntireBlock(block) {
|
||||
let blockAddr = this._getBatEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
@@ -429,7 +430,7 @@ export default class Vhd {
|
||||
await this._write(block.buffer, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeBlockSectors (block, beginSectorId, endSectorId, parentBitmap) {
|
||||
async writeBlockSectors(block, beginSectorId, endSectorId, parentBitmap) {
|
||||
let blockAddr = this._getBatEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
@@ -461,7 +462,7 @@ export default class Vhd {
|
||||
)
|
||||
}
|
||||
|
||||
async coalesceBlock (child, blockId) {
|
||||
async coalesceBlock(child, blockId) {
|
||||
const block = await child._readBlock(blockId)
|
||||
const { bitmap, data } = block
|
||||
|
||||
@@ -503,7 +504,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Write a context footer. (At the end and beginning of a vhd file.)
|
||||
async writeFooter (onlyEndFooter = false) {
|
||||
async writeFooter(onlyEndFooter = false) {
|
||||
const { footer } = this
|
||||
|
||||
const rawFooter = fuFooter.pack(footer)
|
||||
@@ -523,7 +524,7 @@ export default class Vhd {
|
||||
await this._write(rawFooter, offset)
|
||||
}
|
||||
|
||||
writeHeader () {
|
||||
writeHeader() {
|
||||
const { header } = this
|
||||
const rawHeader = fuHeader.pack(header)
|
||||
header.checksum = checksumStruct(rawHeader, fuHeader)
|
||||
@@ -536,7 +537,7 @@ export default class Vhd {
|
||||
return this._write(rawHeader, offset)
|
||||
}
|
||||
|
||||
async writeData (offsetSectors, buffer) {
|
||||
async writeData(offsetSectors, buffer) {
|
||||
const bufferSizeSectors = Math.ceil(buffer.length / SECTOR_SIZE)
|
||||
const startBlock = Math.floor(offsetSectors / this.sectorsPerBlock)
|
||||
const endBufferSectors = offsetSectors + bufferSizeSectors
|
||||
@@ -589,7 +590,7 @@ export default class Vhd {
|
||||
await this.writeFooter()
|
||||
}
|
||||
|
||||
async ensureSpaceForParentLocators (neededSectors) {
|
||||
async ensureSpaceForParentLocators(neededSectors) {
|
||||
const firstLocatorOffset = FOOTER_SIZE + HEADER_SIZE
|
||||
const currentSpace =
|
||||
Math.floor(this.header.tableOffset / SECTOR_SIZE) -
|
||||
@@ -603,7 +604,7 @@ export default class Vhd {
|
||||
return firstLocatorOffset
|
||||
}
|
||||
|
||||
async setUniqueParentLocator (fileNameString) {
|
||||
async setUniqueParentLocator(fileNameString) {
|
||||
const { header } = this
|
||||
header.parentLocatorEntry[0].platformCode = PLATFORM_W2KU
|
||||
const encodedFilename = Buffer.from(fileNameString, 'utf16le')
|
||||
|
||||
@@ -5,9 +5,9 @@ import tmp from 'tmp'
|
||||
import { createWriteStream, readFile } from 'fs-promise'
|
||||
import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import { createFooter } from './_createFooterHeader'
|
||||
import createReadableRawVHDStream from './createReadableRawStream'
|
||||
import createReadableSparseVHDStream from './createReadableSparseStream'
|
||||
import { createReadableRawStream, createReadableSparseStream } from './'
|
||||
|
||||
import { createFooter } from './src/_createFooterHeader'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -54,7 +54,7 @@ test('ReadableRawVHDStream does not crash', async () => {
|
||||
},
|
||||
}
|
||||
const fileSize = 1000
|
||||
const stream = createReadableRawVHDStream(fileSize, mockParser)
|
||||
const stream = createReadableRawStream(fileSize, mockParser)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
@@ -85,7 +85,7 @@ test('ReadableRawVHDStream detects when blocks are out of order', async () => {
|
||||
}
|
||||
return expect(
|
||||
new Promise((resolve, reject) => {
|
||||
const stream = createReadableRawVHDStream(100000, mockParser)
|
||||
const stream = createReadableRawStream(100000, mockParser)
|
||||
stream.on('error', reject)
|
||||
const pipe = stream.pipe(createWriteStream('outputStream'))
|
||||
pipe.on('finish', resolve)
|
||||
@@ -107,12 +107,13 @@ test('ReadableSparseVHDStream can handle a sparse file', async () => {
|
||||
},
|
||||
]
|
||||
const fileSize = blockSize * 110
|
||||
const stream = createReadableSparseVHDStream(
|
||||
const stream = await createReadableSparseStream(
|
||||
fileSize,
|
||||
blockSize,
|
||||
blocks.map(b => b.offsetBytes),
|
||||
blocks
|
||||
)
|
||||
expect(stream.length).toEqual(4197888)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
3
packages/xapi-explore-sr/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
packages/xapi-explore-sr/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
52
packages/xapi-explore-sr/README.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# xapi-explore-sr [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> Display the list of VDIs (unmanaged and snapshots included) of a SR
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/xapi-explore-sr):
|
||||
|
||||
```
|
||||
> npm install --global xapi-explore-sr
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
> xapi-explore-sr
|
||||
Usage: xapi-explore-sr [--full] <SR UUID> <XenServer URL> <XenServer user> [<XenServer password>]
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> npm install
|
||||
|
||||
# Run the tests
|
||||
> npm test
|
||||
|
||||
# Continuously compile
|
||||
> npm run dev
|
||||
|
||||
# Continuously run the tests
|
||||
> npm run dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> npm run build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
60
packages/xapi-explore-sr/package.json
Normal file
@@ -0,0 +1,60 @@
|
||||
{
|
||||
"name": "xapi-explore-sr",
|
||||
"version": "0.2.1",
|
||||
"license": "ISC",
|
||||
"description": "Display the list of VDIs (unmanaged and snapshots included) of a SR",
|
||||
"keywords": [
|
||||
"api",
|
||||
"sr",
|
||||
"vdi",
|
||||
"vdis",
|
||||
"xen",
|
||||
"xen-api",
|
||||
"xenapi"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/xapi-explore-sr",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"main": "dist/",
|
||||
"bin": {
|
||||
"xapi-explore-sr": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"dependencies": {
|
||||
"archy": "^1.0.0",
|
||||
"chalk": "^2.3.2",
|
||||
"exec-promise": "^0.7.0",
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"pw": "^0.0.4",
|
||||
"xen-api": "^0.24.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.1.5",
|
||||
"@babel/core": "^7.1.5",
|
||||
"@babel/preset-env": "^7.1.5",
|
||||
"babel-plugin-lodash": "^3.2.11",
|
||||
"cross-env": "^5.1.4",
|
||||
"rimraf": "^2.6.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "rimraf dist/",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
161
packages/xapi-explore-sr/src/index.js
Executable file
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import archy from 'archy'
|
||||
import chalk from 'chalk'
|
||||
import execPromise from 'exec-promise'
|
||||
import humanFormat from 'human-format'
|
||||
import pw from 'pw'
|
||||
import { createClient } from 'xen-api'
|
||||
import { forEach, map, orderBy } from 'lodash'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const askPassword = prompt =>
|
||||
new Promise(resolve => {
|
||||
prompt && process.stderr.write(`${prompt}: `)
|
||||
pw(resolve)
|
||||
})
|
||||
|
||||
const formatSize = bytes =>
|
||||
humanFormat(bytes, {
|
||||
prefix: 'Gi',
|
||||
scale: 'binary',
|
||||
})
|
||||
|
||||
const required = name => {
|
||||
const e = `missing required argument <${name}>`
|
||||
throw e
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const STYLES = [
|
||||
[
|
||||
vdi => !vdi.managed,
|
||||
chalk.enabled ? chalk.red : label => `[unmanaged] ${label}`,
|
||||
],
|
||||
[
|
||||
vdi => vdi.is_a_snapshot,
|
||||
chalk.enabled ? chalk.yellow : label => `[snapshot] ${label}`,
|
||||
],
|
||||
]
|
||||
const getStyle = vdi => {
|
||||
for (let i = 0, n = STYLES.length; i < n; ++i) {
|
||||
const entry = STYLES[i]
|
||||
if (entry[0](vdi)) {
|
||||
return entry[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mapFilter = (collection, iteratee, results = []) => {
|
||||
forEach(collection, function() {
|
||||
const result = iteratee.apply(this, arguments)
|
||||
if (result !== undefined) {
|
||||
results.push(result)
|
||||
}
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
execPromise(async args => {
|
||||
if (args.length === 0 || args[0] === '-h' || args[0] === '--help') {
|
||||
return `Usage: xapi-explore-sr [--full] <SR UUID> <XenServer URL> <XenServer user> [<XenServer password>]`
|
||||
}
|
||||
|
||||
const full = args[0] === '--full'
|
||||
if (full) {
|
||||
args.shift()
|
||||
}
|
||||
|
||||
const [
|
||||
srUuid = required('SR UUID'),
|
||||
url = required('XenServer URL'),
|
||||
user = required('XenServer user'),
|
||||
password = await askPassword('XenServer password'),
|
||||
] = args
|
||||
|
||||
const xapi = createClient({
|
||||
allowUnauthorized: true,
|
||||
auth: { user, password },
|
||||
readOnly: true,
|
||||
url,
|
||||
watchEvents: false,
|
||||
})
|
||||
await xapi.connect()
|
||||
|
||||
const srRef = await xapi.call('SR.get_by_uuid', srUuid)
|
||||
const sr = await xapi.call('SR.get_record', srRef)
|
||||
|
||||
const vdisByRef = {}
|
||||
await Promise.all(
|
||||
map(sr.VDIs, async ref => {
|
||||
const vdi = await xapi.call('VDI.get_record', ref)
|
||||
vdisByRef[ref] = vdi
|
||||
})
|
||||
)
|
||||
|
||||
const hasParents = {}
|
||||
const vhdChildrenByUuid = {}
|
||||
forEach(vdisByRef, vdi => {
|
||||
const vhdParent = vdi.sm_config['vhd-parent']
|
||||
if (vhdParent) {
|
||||
;(
|
||||
vhdChildrenByUuid[vhdParent] || (vhdChildrenByUuid[vhdParent] = [])
|
||||
).push(vdi)
|
||||
} else if (!(vdi.snapshot_of in vdisByRef)) {
|
||||
return
|
||||
}
|
||||
|
||||
hasParents[vdi.uuid] = true
|
||||
})
|
||||
|
||||
const makeVdiNode = vdi => {
|
||||
const { uuid } = vdi
|
||||
|
||||
let label = `${vdi.name_label} - ${uuid} - ${formatSize(
|
||||
+vdi.physical_utilisation
|
||||
)}`
|
||||
const nodes = []
|
||||
|
||||
const vhdChildren = vhdChildrenByUuid[uuid]
|
||||
if (vhdChildren) {
|
||||
mapFilter(
|
||||
orderBy(vhdChildren, 'is_a_snapshot', 'desc'),
|
||||
makeVdiNode,
|
||||
nodes
|
||||
)
|
||||
}
|
||||
|
||||
mapFilter(
|
||||
vdi.snapshots,
|
||||
ref => {
|
||||
const vdi = vdisByRef[ref]
|
||||
if (full || !vdi.sm_config['vhd-parent']) {
|
||||
return makeVdiNode(vdi)
|
||||
}
|
||||
},
|
||||
nodes
|
||||
)
|
||||
|
||||
const style = getStyle(vdi)
|
||||
if (style) {
|
||||
label = style(label)
|
||||
}
|
||||
|
||||
return { label, nodes }
|
||||
}
|
||||
|
||||
const nodes = mapFilter(orderBy(vdisByRef, ['name_label', 'uuid']), vdi => {
|
||||
if (!hasParents[vdi.uuid]) {
|
||||
return makeVdiNode(vdi)
|
||||
}
|
||||
})
|
||||
|
||||
return archy({
|
||||
label: `${sr.name_label} (${sr.VDIs.length} VDIs)`,
|
||||
nodes,
|
||||
})
|
||||
})
|
||||
@@ -4,6 +4,9 @@
|
||||
|
||||
Tested with:
|
||||
|
||||
- XenServer 7.6
|
||||
- XenServer 7.5
|
||||
- XenServer 7.4
|
||||
- XenServer 7.3
|
||||
- XenServer 7.2
|
||||
- XenServer 7.1
|
||||
@@ -44,6 +47,7 @@ Options:
|
||||
- `allowUnauthorized`: whether to accept self-signed certificates
|
||||
- `auth`: credentials used to sign in (can also be specified in the URL)
|
||||
- `readOnly = false`: if true, no methods with side-effects can be called
|
||||
- `callTimeout`: number of milliseconds after which a call is considered failed (can also be a map of timeouts by methods)
|
||||
|
||||
```js
|
||||
// Force connection.
|
||||
|
||||
@@ -1,30 +1,42 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
process.env.DEBUG = '*'
|
||||
process.env.DEBUG = 'xen-api'
|
||||
|
||||
const createProgress = require('progress-stream')
|
||||
const createTop = require('process-top')
|
||||
const defer = require('golike-defer').default
|
||||
const pump = require('pump')
|
||||
const { CancelToken, fromCallback } = require('promise-toolbox')
|
||||
const getopts = require('getopts')
|
||||
const { CancelToken } = require('promise-toolbox')
|
||||
|
||||
const { createClient } = require('../')
|
||||
|
||||
const { createOutputStream, resolveRef } = require('./utils')
|
||||
const {
|
||||
createOutputStream,
|
||||
formatProgress,
|
||||
pipeline,
|
||||
resolveRecord,
|
||||
throttle,
|
||||
} = require('./utils')
|
||||
|
||||
defer(async ($defer, args) => {
|
||||
let raw = false
|
||||
if (args[0] === '--raw') {
|
||||
raw = true
|
||||
args.shift()
|
||||
}
|
||||
defer(async ($defer, rawArgs) => {
|
||||
const { raw, throttle: bps, _: args } = getopts(rawArgs, {
|
||||
boolean: 'raw',
|
||||
alias: {
|
||||
raw: 'r',
|
||||
throttle: 't',
|
||||
},
|
||||
})
|
||||
|
||||
if (args.length < 2) {
|
||||
return console.log('Usage: export-vdi [--raw] <XS URL> <VDI identifier> [<VHD file>]')
|
||||
return console.log(
|
||||
'Usage: export-vdi [--raw] <XS URL> <VDI identifier> [<VHD file>]'
|
||||
)
|
||||
}
|
||||
|
||||
const xapi = createClient({
|
||||
allowUnauthorized: true,
|
||||
url: args[0],
|
||||
watchEvents: false
|
||||
watchEvents: false,
|
||||
})
|
||||
|
||||
await xapi.connect()
|
||||
@@ -33,21 +45,32 @@ defer(async ($defer, args) => {
|
||||
const { cancel, token } = CancelToken.source()
|
||||
process.on('SIGINT', cancel)
|
||||
|
||||
const vdi = await resolveRecord(xapi, 'VDI', args[1])
|
||||
|
||||
// https://xapi-project.github.io/xen-api/snapshots.html#downloading-a-disk-or-snapshot
|
||||
const exportStream = await xapi.getResource(token, '/export_raw_vdi/', {
|
||||
query: {
|
||||
format: raw ? 'raw' : 'vhd',
|
||||
vdi: await resolveRef(xapi, 'VDI', args[1])
|
||||
}
|
||||
vdi: vdi.$ref,
|
||||
},
|
||||
})
|
||||
|
||||
console.warn('Export task:', exportStream.headers['task-id'])
|
||||
|
||||
await fromCallback(cb => pump(
|
||||
const top = createTop()
|
||||
const progressStream = createProgress()
|
||||
|
||||
$defer(
|
||||
clearInterval,
|
||||
setInterval(() => {
|
||||
console.warn('\r %s | %s', top.toString(), formatProgress(progressStream.progress()))
|
||||
}, 1e3)
|
||||
)
|
||||
|
||||
await pipeline(
|
||||
exportStream,
|
||||
createOutputStream(args[2]),
|
||||
cb
|
||||
))
|
||||
})(process.argv.slice(2)).catch(
|
||||
console.error.bind(console, 'error')
|
||||
)
|
||||
progressStream,
|
||||
throttle(bps),
|
||||
createOutputStream(args[2])
|
||||
)
|
||||
})(process.argv.slice(2)).catch(console.error.bind(console, 'error'))
|
||||
|
||||
@@ -2,15 +2,25 @@
|
||||
|
||||
process.env.DEBUG = '*'
|
||||
|
||||
const createProgress = require('progress-stream')
|
||||
const defer = require('golike-defer').default
|
||||
const pump = require('pump')
|
||||
const { CancelToken, fromCallback } = require('promise-toolbox')
|
||||
const getopts = require('getopts')
|
||||
const { CancelToken } = require('promise-toolbox')
|
||||
|
||||
const { createClient } = require('../')
|
||||
|
||||
const { createOutputStream, resolveRef } = require('./utils')
|
||||
const {
|
||||
createOutputStream,
|
||||
formatProgress,
|
||||
pipeline,
|
||||
resolveRecord,
|
||||
} = require('./utils')
|
||||
|
||||
defer(async ($defer, rawArgs) => {
|
||||
const { gzip, zstd, _: args } = getopts(rawArgs, {
|
||||
boolean: ['gzip', 'zstd'],
|
||||
})
|
||||
|
||||
defer(async ($defer, args) => {
|
||||
if (args.length < 2) {
|
||||
return console.log('Usage: export-vm <XS URL> <VM identifier> [<XVA file>]')
|
||||
}
|
||||
@@ -18,7 +28,7 @@ defer(async ($defer, args) => {
|
||||
const xapi = createClient({
|
||||
allowUnauthorized: true,
|
||||
url: args[0],
|
||||
watchEvents: false
|
||||
watchEvents: false,
|
||||
})
|
||||
|
||||
await xapi.connect()
|
||||
@@ -30,18 +40,16 @@ defer(async ($defer, args) => {
|
||||
// https://xapi-project.github.io/xen-api/importexport.html
|
||||
const exportStream = await xapi.getResource(token, '/export/', {
|
||||
query: {
|
||||
ref: await resolveRef(xapi, 'VM', args[1]),
|
||||
use_compression: 'true'
|
||||
}
|
||||
ref: (await resolveRecord(xapi, 'VM', args[1])).$ref,
|
||||
use_compression: zstd ? 'zstd' : gzip ? 'true' : 'false',
|
||||
},
|
||||
})
|
||||
|
||||
console.warn('Export task:', exportStream.headers['task-id'])
|
||||
|
||||
await fromCallback(cb => pump(
|
||||
await pipeline(
|
||||
exportStream,
|
||||
createOutputStream(args[2]),
|
||||
cb
|
||||
))
|
||||
})(process.argv.slice(2)).catch(
|
||||
console.error.bind(console, 'error')
|
||||
)
|
||||
createProgress({ time: 1e3 }, p => console.warn(formatProgress(p))),
|
||||
createOutputStream(args[2])
|
||||
)
|
||||
})(process.argv.slice(2)).catch(console.error.bind(console, 'error'))
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"getopts": "^2.2.3",
|
||||
"golike-defer": "^0.4.1",
|
||||
"pump": "^3.0.0"
|
||||
"human-format": "^0.10.1",
|
||||
"process-top": "^1.0.0",
|
||||
"progress-stream": "^2.0.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"readable-stream": "^3.1.1",
|
||||
"throttle": "^1.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
const { createReadStream, createWriteStream, statSync } = require('fs')
|
||||
const { PassThrough } = require('stream')
|
||||
const { fromCallback } = require('promise-toolbox')
|
||||
const { PassThrough, pipeline } = require('readable-stream')
|
||||
const humanFormat = require('human-format')
|
||||
const Throttle = require('throttle')
|
||||
|
||||
const { isOpaqueRef } = require('../')
|
||||
|
||||
@@ -26,7 +29,32 @@ exports.createOutputStream = path => {
|
||||
return stream
|
||||
}
|
||||
|
||||
exports.resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
|
||||
const formatSizeOpts = { scale: 'binary', unit: 'B' }
|
||||
const formatSize = bytes => humanFormat(bytes, formatSizeOpts)
|
||||
|
||||
exports.formatProgress = p =>
|
||||
[
|
||||
formatSize(p.transferred),
|
||||
' / ',
|
||||
formatSize(p.length),
|
||||
' | ',
|
||||
p.runtime,
|
||||
's / ',
|
||||
p.eta,
|
||||
's | ',
|
||||
formatSize(p.speed),
|
||||
'/s',
|
||||
].join('')
|
||||
|
||||
exports.pipeline = (...streams) => {
|
||||
return fromCallback(cb => {
|
||||
streams = streams.filter(_ => _ != null)
|
||||
streams.push(cb)
|
||||
pipeline.apply(undefined, streams)
|
||||
})
|
||||
}
|
||||
|
||||
const resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
|
||||
isOpaqueRef(refOrUuidOrNameLabel)
|
||||
? refOrUuidOrNameLabel
|
||||
: xapi.call(`${type}.get_by_uuid`, refOrUuidOrNameLabel).catch(() =>
|
||||
@@ -41,3 +69,10 @@ exports.resolveRef = (xapi, type, refOrUuidOrNameLabel) =>
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
exports.resolveRecord = async (xapi, type, refOrUuidOrNameLabel) =>
|
||||
xapi.getRecord(type, await resolveRef(xapi, type, refOrUuidOrNameLabel))
|
||||
|
||||
exports.resolveRef = resolveRef
|
||||
|
||||
exports.throttle = opts => (opts != null ? new Throttle(opts) : undefined)
|
||||
|
||||
@@ -2,29 +2,178 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
end-of-stream@^1.1.0:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
|
||||
core-util-is@~1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
|
||||
|
||||
debug@2:
|
||||
version "2.6.9"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
|
||||
integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
|
||||
dependencies:
|
||||
once "^1.4.0"
|
||||
ms "2.0.0"
|
||||
|
||||
event-loop-delay@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/event-loop-delay/-/event-loop-delay-1.0.0.tgz#5af6282549494fd0d868c499cbdd33e027978b8c"
|
||||
integrity sha512-8YtyeIWHXrvTqlAhv+fmtaGGARmgStbvocERYzrZ3pwhnQULe5PuvMUTjIWw/emxssoaftfHZsJtkeY8xjiXCg==
|
||||
dependencies:
|
||||
napi-macros "^1.8.2"
|
||||
node-gyp-build "^3.7.0"
|
||||
|
||||
getopts@^2.2.3:
|
||||
version "2.2.3"
|
||||
resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.2.3.tgz#11d229775e2ec2067ed8be6fcc39d9b4bf39cf7d"
|
||||
integrity sha512-viEcb8TpgeG05+Nqo5EzZ8QR0hxdyrYDp6ZSTZqe2M/h53Bk036NmqG38Vhf5RGirC/Of9Xql+v66B2gp256SQ==
|
||||
|
||||
golike-defer@^0.4.1:
|
||||
version "0.4.1"
|
||||
resolved "https://registry.yarnpkg.com/golike-defer/-/golike-defer-0.4.1.tgz#7a1cd435d61e461305805d980b133a0f3db4e1cc"
|
||||
|
||||
once@^1.3.1, once@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||
dependencies:
|
||||
wrappy "1"
|
||||
human-format@^0.10.1:
|
||||
version "0.10.1"
|
||||
resolved "https://registry.yarnpkg.com/human-format/-/human-format-0.10.1.tgz#107793f355912e256148d5b5dcf66a0230187ee9"
|
||||
integrity sha512-UzCHToSw3HI9MxH9tYzMr1JbHJbgzr6o0hZCun7sruv59S1leps21bmgpBkkwEvQon5n/2OWKH1iU7BEko02cg==
|
||||
|
||||
pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
inherits@^2.0.3, inherits@~2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||
|
||||
wrappy@1:
|
||||
isarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
|
||||
|
||||
make-error@^1.3.2:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.5.tgz#efe4e81f6db28cadd605c70f29c831b58ef776c8"
|
||||
integrity sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==
|
||||
|
||||
ms@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
|
||||
integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
|
||||
|
||||
napi-macros@^1.8.2:
|
||||
version "1.8.2"
|
||||
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-1.8.2.tgz#299265c1d8aa401351ad0675107d751228c03eda"
|
||||
integrity sha512-Tr0DNY4RzTaBG2W2m3l7ZtFuJChTH6VZhXVhkGGjF/4cZTt+i8GcM9ozD+30Lmr4mDoZ5Xx34t2o4GJqYWDGcg==
|
||||
|
||||
node-gyp-build@^3.7.0:
|
||||
version "3.7.0"
|
||||
resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-3.7.0.tgz#daa77a4f547b9aed3e2aac779eaf151afd60ec8d"
|
||||
integrity sha512-L/Eg02Epx6Si2NXmedx+Okg+4UHqmaf3TNcxd50SF9NQGcJaON3AtU++kax69XV7YWz4tUspqZSAsVofhFKG2w==
|
||||
|
||||
prettier-bytes@^1.0.4:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/prettier-bytes/-/prettier-bytes-1.0.4.tgz#994b02aa46f699c50b6257b5faaa7fe2557e62d6"
|
||||
integrity sha1-mUsCqkb2mcULYle1+qp/4lV+YtY=
|
||||
|
||||
process-nextick-args@~2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa"
|
||||
integrity sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==
|
||||
|
||||
process-top@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/process-top/-/process-top-1.0.0.tgz#52892bedb581c5abf0df2d0aa5c429e34275cc7e"
|
||||
integrity sha512-er8iSmBMslOt5cgIHg9m6zilTPsuUqpEb1yfQ4bDmO80zr/e/5hNn+Tay3CJM/FOBnJo8Bt3fFiDDH6GvIgeAg==
|
||||
dependencies:
|
||||
event-loop-delay "^1.0.0"
|
||||
prettier-bytes "^1.0.4"
|
||||
|
||||
progress-stream@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/progress-stream/-/progress-stream-2.0.0.tgz#fac63a0b3d11deacbb0969abcc93b214bce19ed5"
|
||||
integrity sha1-+sY6Cz0R3qy7CWmrzJOyFLzhntU=
|
||||
dependencies:
|
||||
speedometer "~1.0.0"
|
||||
through2 "~2.0.3"
|
||||
|
||||
promise-toolbox@^0.11.0:
|
||||
version "0.11.0"
|
||||
resolved "https://registry.yarnpkg.com/promise-toolbox/-/promise-toolbox-0.11.0.tgz#9ed928355355395072dace3f879879504e07d1bc"
|
||||
integrity sha512-bjHk0kq+Ke3J3zbkbbJH6kXCyQZbFHwOTrE/Et7vS0uS0tluoV+PLqU/kEyxl8aARM7v04y2wFoDo/wWAEPvjA==
|
||||
dependencies:
|
||||
make-error "^1.3.2"
|
||||
|
||||
"readable-stream@>= 0.3.0", readable-stream@^3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.1.1.tgz#ed6bbc6c5ba58b090039ff18ce670515795aeb06"
|
||||
integrity sha512-DkN66hPyqDhnIQ6Jcsvx9bFjhw214O4poMBcIMgPVpQvNy9a0e0Uhg5SqySyDKAmUlwt8LonTBz1ezOnM8pUdA==
|
||||
dependencies:
|
||||
inherits "^2.0.3"
|
||||
string_decoder "^1.1.1"
|
||||
util-deprecate "^1.0.1"
|
||||
|
||||
readable-stream@~2.3.6:
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
|
||||
integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
isarray "~1.0.0"
|
||||
process-nextick-args "~2.0.0"
|
||||
safe-buffer "~5.1.1"
|
||||
string_decoder "~1.1.1"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
|
||||
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
|
||||
speedometer@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/speedometer/-/speedometer-1.0.0.tgz#cd671cb06752c22bca3370e2f334440be4fc62e2"
|
||||
integrity sha1-zWccsGdSwivKM3Di8zREC+T8YuI=
|
||||
|
||||
"stream-parser@>= 0.0.2":
|
||||
version "0.3.1"
|
||||
resolved "https://registry.yarnpkg.com/stream-parser/-/stream-parser-0.3.1.tgz#1618548694420021a1182ff0af1911c129761773"
|
||||
integrity sha1-FhhUhpRCACGhGC/wrxkRwSl2F3M=
|
||||
dependencies:
|
||||
debug "2"
|
||||
|
||||
string_decoder@^1.1.1:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.2.0.tgz#fe86e738b19544afe70469243b2a1ee9240eae8d"
|
||||
integrity sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w==
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
|
||||
throttle@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/throttle/-/throttle-1.0.3.tgz#8a32e4a15f1763d997948317c5ebe3ad8a41e4b7"
|
||||
integrity sha1-ijLkoV8XY9mXlIMXxevjrYpB5Lc=
|
||||
dependencies:
|
||||
readable-stream ">= 0.3.0"
|
||||
stream-parser ">= 0.0.2"
|
||||
|
||||
through2@~2.0.3:
|
||||
version "2.0.5"
|
||||
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd"
|
||||
integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==
|
||||
dependencies:
|
||||
readable-stream "~2.3.6"
|
||||
xtend "~4.0.1"
|
||||
|
||||
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
xtend@~4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
|
||||
integrity sha1-pcbVMr5lbiPbgg77lDofBJmNY68=
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xen-api",
|
||||
"version": "0.18.0",
|
||||
"version": "0.24.1",
|
||||
"license": "ISC",
|
||||
"description": "Connector to the Xen API",
|
||||
"keywords": [
|
||||
@@ -33,28 +33,28 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"blocked": "^1.2.1",
|
||||
"debug": "^3.1.0",
|
||||
"debug": "^4.0.1",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"http-request-plus": "^0.5.0",
|
||||
"http-request-plus": "^0.7.1",
|
||||
"iterable-backoff": "^0.0.0",
|
||||
"jest-diff": "^23.5.0",
|
||||
"json-rpc-protocol": "^0.12.0",
|
||||
"json-rpc-protocol": "^0.13.1",
|
||||
"kindof": "^2.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"make-error": "^1.3.0",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^2.1.1",
|
||||
"promise-toolbox": "^0.10.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"pw": "0.0.4",
|
||||
"xmlrpc": "^1.3.2",
|
||||
"xo-collection": "^0.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0",
|
||||
"@babel/core": "7.0.0",
|
||||
"@babel/plugin-proposal-decorators": "7.0.0",
|
||||
"@babel/preset-env": "7.0.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/plugin-proposal-decorators": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.1"
|
||||
|
||||
17
packages/xen-api/src/_replaceSensitiveValues.js
Normal file
@@ -0,0 +1,17 @@
|
||||
import mapValues from 'lodash/mapValues'
|
||||
|
||||
export default function replaceSensitiveValues(value, replacement) {
|
||||
function helper(value, name) {
|
||||
if (name === 'password' && typeof value === 'string') {
|
||||
return replacement
|
||||
}
|
||||
|
||||
if (typeof value !== 'object' || value === null) {
|
||||
return value
|
||||
}
|
||||
|
||||
return Array.isArray(value) ? value.map(helper) : mapValues(value, helper)
|
||||
}
|
||||
|
||||
return helper(value)
|
||||
}
|
||||