Compare commits
728 Commits
xo-server-
...
fs-v0.5.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
120e01897d | ||
|
|
06755cb6b6 | ||
|
|
27409f4fd5 | ||
|
|
82253509d0 | ||
|
|
c450685ddd | ||
|
|
9a79088e8a | ||
|
|
83760157ad | ||
|
|
985aa2225e | ||
|
|
0ad340d971 | ||
|
|
97726dce12 | ||
|
|
342320b481 | ||
|
|
1bfcbf49b9 | ||
|
|
9d1eb8182b | ||
|
|
18a6c57f02 | ||
|
|
d3b6d1a97f | ||
|
|
ece881c02c | ||
|
|
631e8ce52d | ||
|
|
cb5d3b9750 | ||
|
|
995e6664f9 | ||
|
|
1f497aa4df | ||
|
|
184dbc5516 | ||
|
|
a1f25a4e3e | ||
|
|
cc4ab94428 | ||
|
|
48727740c4 | ||
|
|
cc26e378e5 | ||
|
|
28579258b3 | ||
|
|
70b9b67f67 | ||
|
|
224b053eb1 | ||
|
|
39bce978bc | ||
|
|
9435bd5493 | ||
|
|
2284b3ef0a | ||
|
|
99dc64e8bb | ||
|
|
e47525b60b | ||
|
|
10d4782ee2 | ||
|
|
11cff2c065 | ||
|
|
11f742b020 | ||
|
|
2353552e11 | ||
|
|
d6012d8639 | ||
|
|
7089ee778a | ||
|
|
629931782e | ||
|
|
1b48c626f4 | ||
|
|
ba35f51459 | ||
|
|
ee5f3fc68d | ||
|
|
7be671f0f7 | ||
|
|
48c3748c28 | ||
|
|
3814a261d6 | ||
|
|
81b82ce06b | ||
|
|
20c3f76278 | ||
|
|
2c93b69144 | ||
|
|
043b381733 | ||
|
|
ff014df231 | ||
|
|
055d1e81da | ||
|
|
b60678e79f | ||
|
|
fd93dfbc18 | ||
|
|
d74a5d73f0 | ||
|
|
16a6d395c8 | ||
|
|
d6654807fa | ||
|
|
e25ff221ba | ||
|
|
7df965ccd7 | ||
|
|
f6b73b8303 | ||
|
|
d617214c62 | ||
|
|
e85744cec0 | ||
|
|
da74555e02 | ||
|
|
4a9f489f20 | ||
|
|
18b17bda7c | ||
|
|
7faff824ff | ||
|
|
e08d03687e | ||
|
|
75f1d80a86 | ||
|
|
3967bfa099 | ||
|
|
6f6f463592 | ||
|
|
8a760823b8 | ||
|
|
8cd66af3f8 | ||
|
|
8569dbf985 | ||
|
|
9a03a70a3d | ||
|
|
42badbb08e | ||
|
|
956bdf0e03 | ||
|
|
91ff02d5c3 | ||
|
|
84d88cf2b9 | ||
|
|
fca2693730 | ||
|
|
d7ac1b9659 | ||
|
|
ddb1a8ff51 | ||
|
|
56a2f8858b | ||
|
|
55d7a1def0 | ||
|
|
7b354f364c | ||
|
|
12dd40d330 | ||
|
|
205f09a633 | ||
|
|
f7dcccd8af | ||
|
|
75592023f2 | ||
|
|
a794a61c9b | ||
|
|
804da115c9 | ||
|
|
89df4f771b | ||
|
|
db3c5cfcb8 | ||
|
|
19d191a472 | ||
|
|
d906fec236 | ||
|
|
552482275d | ||
|
|
f06d40cf95 | ||
|
|
cf3f1a1705 | ||
|
|
08583c06ef | ||
|
|
5271a5c984 | ||
|
|
e69610643b | ||
|
|
ef61e4fe6d | ||
|
|
4f776e1370 | ||
|
|
aa72708996 | ||
|
|
8751180634 | ||
|
|
2e327be49d | ||
|
|
f06a937c9c | ||
|
|
e65b3200cd | ||
|
|
30d3701ab1 | ||
|
|
05fa76dad3 | ||
|
|
4020081492 | ||
|
|
2fbd4a62b2 | ||
|
|
b773f5e821 | ||
|
|
76c5ced1dd | ||
|
|
197768875b | ||
|
|
f0483862a5 | ||
|
|
ac46d3a5a2 | ||
|
|
2da576a1f8 | ||
|
|
2e1ac27cf5 | ||
|
|
258404affc | ||
|
|
5121d9d1d7 | ||
|
|
f2a38c5ddd | ||
|
|
97a77b1a33 | ||
|
|
88ca41231f | ||
|
|
9a8f84ccb5 | ||
|
|
dd50fc37fe | ||
|
|
cafcadb286 | ||
|
|
db3d6bba79 | ||
|
|
11a0fc2a22 | ||
|
|
1e0a8a5034 | ||
|
|
34ef3e5998 | ||
|
|
e73fcc450d | ||
|
|
2946eaa156 | ||
|
|
6dcae9a7d7 | ||
|
|
abeb36f06c | ||
|
|
41139578ba | ||
|
|
cda7621b5d | ||
|
|
b75dd2d424 | ||
|
|
273f208722 | ||
|
|
c01e8e892e | ||
|
|
9dfd81c28f | ||
|
|
5dd26ebe33 | ||
|
|
4c0fe3c14f | ||
|
|
2353581da8 | ||
|
|
2934b23d2f | ||
|
|
82e4197237 | ||
|
|
a23189f132 | ||
|
|
47fa1ec81e | ||
|
|
4b468663f3 | ||
|
|
6628dc777d | ||
|
|
3ef3ae0166 | ||
|
|
bc6dbe2771 | ||
|
|
5651160d1c | ||
|
|
6da2669c6f | ||
|
|
8094b5097f | ||
|
|
bdb0547b86 | ||
|
|
ea08fbbfba | ||
|
|
b4cbd8b2b5 | ||
|
|
f8fbb6b7d3 | ||
|
|
c8da9fec0a | ||
|
|
79fb3ec8bd | ||
|
|
2243966ce1 | ||
|
|
ca7d520997 | ||
|
|
df44487363 | ||
|
|
b39eb0f60d | ||
|
|
a3dcdc4fd5 | ||
|
|
2daac73c17 | ||
|
|
23eb3c3094 | ||
|
|
776d0f9e4a | ||
|
|
54bdcc6dd2 | ||
|
|
38084c8199 | ||
|
|
4525ee7491 | ||
|
|
66a476bd21 | ||
|
|
be6cc12632 | ||
|
|
673475dcb2 | ||
|
|
7dc1a80a83 | ||
|
|
d49294849f | ||
|
|
6b394302c1 | ||
|
|
00e1601f85 | ||
|
|
b75e746586 | ||
|
|
32a9fa9bb0 | ||
|
|
79d68dece4 | ||
|
|
1701e1d4ba | ||
|
|
497b3eb296 | ||
|
|
ecfafa0fea | ||
|
|
def66d8218 | ||
|
|
eeb08abec2 | ||
|
|
90923c657d | ||
|
|
4ff6eeb424 | ||
|
|
2d98fb40f1 | ||
|
|
256a58ded2 | ||
|
|
bf3b31a9ef | ||
|
|
7fc8d59605 | ||
|
|
1a39b2113a | ||
|
|
cb9f3fbb2c | ||
|
|
487f413cdd | ||
|
|
f847969206 | ||
|
|
5d9aad44c2 | ||
|
|
ba2027e6d7 | ||
|
|
087da9376f | ||
|
|
218e3b46e0 | ||
|
|
f9921e354e | ||
|
|
341148a7d3 | ||
|
|
7216165f1e | ||
|
|
a9557af04b | ||
|
|
abb80270ad | ||
|
|
72e93384a5 | ||
|
|
663b1b76ec | ||
|
|
24b8c671fa | ||
|
|
986fec1cd3 | ||
|
|
f6c2cbc5cf | ||
|
|
289ed89a78 | ||
|
|
73de421d47 | ||
|
|
dc1eb82295 | ||
|
|
6629c12166 | ||
|
|
ec5bc1db95 | ||
|
|
ac2c40c842 | ||
|
|
61bf669252 | ||
|
|
4105c53155 | ||
|
|
aeab2b2a08 | ||
|
|
95e33ee612 | ||
|
|
093bda7039 | ||
|
|
4e35b19ac5 | ||
|
|
244d8a51e8 | ||
|
|
9d6cc77cc8 | ||
|
|
d5e0150880 | ||
|
|
5cf29a98b3 | ||
|
|
165c2262c0 | ||
|
|
74f5d2e0cd | ||
|
|
2d93456f52 | ||
|
|
fd401ca335 | ||
|
|
97ba93a9ad | ||
|
|
0788c25710 | ||
|
|
82bba951db | ||
|
|
6efd611b80 | ||
|
|
b7d43b42b9 | ||
|
|
801b71d9ae | ||
|
|
0ff7c2188a | ||
|
|
bc1667440f | ||
|
|
227b464a8e | ||
|
|
f6c43650b4 | ||
|
|
597689fde0 | ||
|
|
da6b71fde8 | ||
|
|
5f2590c858 | ||
|
|
37b0867151 | ||
|
|
85031cfb9d | ||
|
|
a13f86fb7c | ||
|
|
7cbc5e642f | ||
|
|
48d4abc259 | ||
|
|
c805f3b1a7 | ||
|
|
40212582a9 | ||
|
|
2c875928de | ||
|
|
c24eb9778e | ||
|
|
eb079b1360 | ||
|
|
120a519303 | ||
|
|
95def95678 | ||
|
|
8274a00f91 | ||
|
|
3c6c4976cd | ||
|
|
0fd35b1679 | ||
|
|
3c931604be | ||
|
|
02dddbd662 | ||
|
|
675763d039 | ||
|
|
63acc7ef32 | ||
|
|
cbd78bdfef | ||
|
|
a09a2ed6c3 | ||
|
|
7d18a6d8a9 | ||
|
|
65a5984d4c | ||
|
|
d5f519bf5a | ||
|
|
bede39c8f3 | ||
|
|
a9e3682776 | ||
|
|
87c3c8732f | ||
|
|
0011bfea8c | ||
|
|
e047649c3b | ||
|
|
de397b63c5 | ||
|
|
75b7726fca | ||
|
|
d83a2366c2 | ||
|
|
2d4d653c55 | ||
|
|
c7a1d55f6f | ||
|
|
46b5c5ccd1 | ||
|
|
4607417e7a | ||
|
|
76887c7e25 | ||
|
|
ab7cae5816 | ||
|
|
b1ce389ad8 | ||
|
|
52aa5ff780 | ||
|
|
30372e511e | ||
|
|
dc15a6282a | ||
|
|
97dcc204ef | ||
|
|
ecda3e0174 | ||
|
|
8342bb2bc8 | ||
|
|
51a137c4e5 | ||
|
|
a26ced5de9 | ||
|
|
85f0c69c03 | ||
|
|
3aac757ef5 | ||
|
|
91541d0ba4 | ||
|
|
dfd66a56c3 | ||
|
|
60f9393d29 | ||
|
|
cdced7cdc1 | ||
|
|
69709009ed | ||
|
|
bf14560709 | ||
|
|
775b629ee9 | ||
|
|
ec9717dafb | ||
|
|
0cd84ee250 | ||
|
|
b3681e7c39 | ||
|
|
a7a7597d9a | ||
|
|
bed3da81e1 | ||
|
|
c43dc31a55 | ||
|
|
c5a21922d1 | ||
|
|
2ae660a46b | ||
|
|
f6fcae4489 | ||
|
|
e0a3b8ace8 | ||
|
|
b67231c56b | ||
|
|
aa5b3dc426 | ||
|
|
1a528adfbb | ||
|
|
64d295ee3f | ||
|
|
b940ade902 | ||
|
|
37a906a233 | ||
|
|
e76603ce7e | ||
|
|
aca9aa0a7a | ||
|
|
6d20ef5d51 | ||
|
|
4d18ab1ae0 | ||
|
|
fa5c707fbc | ||
|
|
37b9d8ec10 | ||
|
|
61db0269a2 | ||
|
|
a8ad13f60e | ||
|
|
f14dd04ea7 | ||
|
|
0add8cd5a3 | ||
|
|
16cc539a57 | ||
|
|
5ba25a34cb | ||
|
|
61de65fc21 | ||
|
|
5195539a95 | ||
|
|
ce93fb0e4c | ||
|
|
3cb58ed700 | ||
|
|
bb48c960fe | ||
|
|
286a0031dd | ||
|
|
dcbd7e1113 | ||
|
|
0a43454c8a | ||
|
|
f5f1491e47 | ||
|
|
e935ae567f | ||
|
|
3f08f099fe | ||
|
|
18a5ba0029 | ||
|
|
c426d0328f | ||
|
|
91b2456c15 | ||
|
|
585aa74e0c | ||
|
|
eefaec5abd | ||
|
|
c7a5eebff6 | ||
|
|
f077528936 | ||
|
|
39728974b1 | ||
|
|
e14585895b | ||
|
|
0999042718 | ||
|
|
4e2e669533 | ||
|
|
de266ae6a8 | ||
|
|
d7cd87a6e4 | ||
|
|
c5aabbadc2 | ||
|
|
36a5e3c2ab | ||
|
|
f475261b9a | ||
|
|
62dce8f92a | ||
|
|
e6d90d2154 | ||
|
|
b5d823ec1a | ||
|
|
a786c68e8b | ||
|
|
e6fa00c4d8 | ||
|
|
5721fac793 | ||
|
|
674ed4384a | ||
|
|
1d7d83f8c6 | ||
|
|
f812cc2729 | ||
|
|
abc50a5e84 | ||
|
|
e94cae3044 | ||
|
|
0b35a35576 | ||
|
|
0253c63db3 | ||
|
|
0d3b2bc814 | ||
|
|
65307e5bc7 | ||
|
|
90de47d708 | ||
|
|
72a4179c03 | ||
|
|
4eee195d21 | ||
|
|
9488711406 | ||
|
|
4cf04aca72 | ||
|
|
410d6762bf | ||
|
|
0a95426e63 | ||
|
|
4ec4970d49 | ||
|
|
e57ae0a8ce | ||
|
|
1e7852369f | ||
|
|
bdefd0bcd8 | ||
|
|
fa88e1789c | ||
|
|
df90094cae | ||
|
|
efdbc18a0a | ||
|
|
fc1dd3ce09 | ||
|
|
10aff53d2c | ||
|
|
85c3d64c04 | ||
|
|
5a71ab53be | ||
|
|
d022b40732 | ||
|
|
e105c0aad1 | ||
|
|
eb9655125c | ||
|
|
a10fea2823 | ||
|
|
0c05d89d3f | ||
|
|
d600d4cc28 | ||
|
|
4f0e5317ed | ||
|
|
fce7c7fd49 | ||
|
|
929ca767ca | ||
|
|
342c1bc6fa | ||
|
|
317e301067 | ||
|
|
ac5741a341 | ||
|
|
3f1fb7092b | ||
|
|
7298a8b8f0 | ||
|
|
856924c970 | ||
|
|
9a285d280f | ||
|
|
9c3fc56d4a | ||
|
|
aaaee45eeb | ||
|
|
ac2ab21826 | ||
|
|
b22514646e | ||
|
|
c7ab1ddb0c | ||
|
|
7ddc595a1c | ||
|
|
4147800266 | ||
|
|
99e6d54647 | ||
|
|
dac5901c6b | ||
|
|
308928a7d4 | ||
|
|
e6e3d2cd52 | ||
|
|
2e01de7ff8 | ||
|
|
90f94da4e4 | ||
|
|
29b5acef3f | ||
|
|
599b094b50 | ||
|
|
7c6e423d24 | ||
|
|
82956af785 | ||
|
|
6ca09dc9fe | ||
|
|
e0ecbab841 | ||
|
|
83d1a5ff13 | ||
|
|
a71740d49a | ||
|
|
0215c19d1d | ||
|
|
ea1c3ab54a | ||
|
|
b98b618be8 | ||
|
|
5e363761a2 | ||
|
|
62d48bd59d | ||
|
|
a0049bae8d | ||
|
|
18660cb0e1 | ||
|
|
e3c6c1c1ca | ||
|
|
56114a7d18 | ||
|
|
0fe70b1a91 | ||
|
|
527eb0b1e6 | ||
|
|
cfd6fd722a | ||
|
|
42591bd4bd | ||
|
|
8689cff26b | ||
|
|
b4e4d32255 | ||
|
|
18c88ba770 | ||
|
|
d212168f59 | ||
|
|
3625477187 | ||
|
|
9be8525439 | ||
|
|
6e013a0dc5 | ||
|
|
06d555d4f9 | ||
|
|
63fe0f2c88 | ||
|
|
2a276dfb99 | ||
|
|
1f009ca954 | ||
|
|
5cf1ba41f3 | ||
|
|
1ef205cb74 | ||
|
|
1e59af3ab2 | ||
|
|
bb88b420c1 | ||
|
|
e8475d144c | ||
|
|
dedac62269 | ||
|
|
f8fdd888c4 | ||
|
|
d8bd30e355 | ||
|
|
4fec816274 | ||
|
|
1211447e81 | ||
|
|
ed78f4c5ee | ||
|
|
4a2e9d4c88 | ||
|
|
1807917204 | ||
|
|
b3004a38aa | ||
|
|
0ad6c073ee | ||
|
|
0abaadb391 | ||
|
|
b43cf27479 | ||
|
|
bcab6bb584 | ||
|
|
8213601df6 | ||
|
|
e7ad577661 | ||
|
|
070213dd7f | ||
|
|
395cbb33ef | ||
|
|
a429a7aa35 | ||
|
|
b70e09937b | ||
|
|
109ff4a4da | ||
|
|
0e185ab92a | ||
|
|
aefb76a4f6 | ||
|
|
fe653dc7dd | ||
|
|
8b5c0e706c | ||
|
|
d25584edf9 | ||
|
|
496ca2c716 | ||
|
|
4a09074a40 | ||
|
|
385fd80bb9 | ||
|
|
b6818abd0d | ||
|
|
45b5d10f1b | ||
|
|
7866a265fe | ||
|
|
774c0d09b1 | ||
|
|
df0029db3b | ||
|
|
c1a5364448 | ||
|
|
557ba1a4bc | ||
|
|
bf932aada1 | ||
|
|
2304deab3f | ||
|
|
ea9c0cfb10 | ||
|
|
2fd5a6501b | ||
|
|
0ca5a32142 | ||
|
|
f55f812d30 | ||
|
|
98bbd53c28 | ||
|
|
1f0bfe2518 | ||
|
|
c5eae6e498 | ||
|
|
bfcdd29f10 | ||
|
|
8db0b59fe1 | ||
|
|
6bc4c03b4c | ||
|
|
4d63f93390 | ||
|
|
9722f2a5bd | ||
|
|
ee9c6817d6 | ||
|
|
58564306ca | ||
|
|
17c6f1762d | ||
|
|
17f13307bb | ||
|
|
a43c141ddd | ||
|
|
1e6da359cc | ||
|
|
626a9a777f | ||
|
|
2768ad9e3b | ||
|
|
3eec8f1a55 | ||
|
|
6148daa8b8 | ||
|
|
338de7985f | ||
|
|
df99528445 | ||
|
|
890901366d | ||
|
|
13271aa196 | ||
|
|
71aea20c7d | ||
|
|
b09da76b04 | ||
|
|
ce35bbaeb4 | ||
|
|
0034f0a1d3 | ||
|
|
e3391fa81f | ||
|
|
bacdc63c70 | ||
|
|
b0285799c6 | ||
|
|
f8b34c5d64 | ||
|
|
40f4a66bda | ||
|
|
6125dae158 | ||
|
|
dfe4a934e9 | ||
|
|
ecc33f46ab | ||
|
|
00d1985cf9 | ||
|
|
138aed8ae1 | ||
|
|
def9f947b7 | ||
|
|
20dc4af4a4 | ||
|
|
4d2909567c | ||
|
|
92a93e4393 | ||
|
|
389967d40c | ||
|
|
d66e8f29b7 | ||
|
|
3e9425bf79 | ||
|
|
41506e785e | ||
|
|
32d7ccfea5 | ||
|
|
8a9f952ada | ||
|
|
d15efae43f | ||
|
|
627c06f4a8 | ||
|
|
3e6201e93a | ||
|
|
b941a649b5 | ||
|
|
7b2b9ca618 | ||
|
|
404cf2b7e6 | ||
|
|
42698293de | ||
|
|
563969f2e9 | ||
|
|
887e21daa5 | ||
|
|
b75a2a8dca | ||
|
|
31f32ba23c | ||
|
|
936a4068d5 | ||
|
|
266f287774 | ||
|
|
5808485812 | ||
|
|
b6dd83e32b | ||
|
|
2236bd71c4 | ||
|
|
ec869ffdd3 | ||
|
|
1aa4966a92 | ||
|
|
235da199f9 | ||
|
|
4cad820271 | ||
|
|
4a0b29e1f2 | ||
|
|
1db9ca9e31 | ||
|
|
942ddfa742 | ||
|
|
355cddc044 | ||
|
|
c23cccf2ce | ||
|
|
1407fb7bab | ||
|
|
4ba542b70f | ||
|
|
76f75401ee | ||
|
|
011cc7ad65 | ||
|
|
b966e6097f | ||
|
|
809e1a35cd | ||
|
|
8e7e1fccbe | ||
|
|
e86b30f205 | ||
|
|
7dc0c4cf15 | ||
|
|
561a9f140d | ||
|
|
ba56114e9f | ||
|
|
11bd75d2fe | ||
|
|
ececbaf201 | ||
|
|
1985134d94 | ||
|
|
2cc59078b1 | ||
|
|
3d4d7db5da | ||
|
|
dc40ceaafe | ||
|
|
0110e223ee | ||
|
|
e7467dca8a | ||
|
|
4a1e87b534 | ||
|
|
5a32f904bc | ||
|
|
14d023a9f5 | ||
|
|
2e5e81e93e | ||
|
|
c9cc106be6 | ||
|
|
29fc17f260 | ||
|
|
c7d16fd345 | ||
|
|
ffe29b8957 | ||
|
|
be1bd9254d | ||
|
|
6156649faa | ||
|
|
2edddaa835 | ||
|
|
d00fff3d4c | ||
|
|
c0b84ae848 | ||
|
|
f68d1539aa | ||
|
|
455e48596c | ||
|
|
ba6b968256 | ||
|
|
8fbf2786fd | ||
|
|
06d38808be | ||
|
|
49565a672b | ||
|
|
2d980f3d50 | ||
|
|
e29ba3e0cf | ||
|
|
dff342d2bd | ||
|
|
d38ad3a17f | ||
|
|
ce21da4146 | ||
|
|
09a381806b | ||
|
|
0ca7b8539f | ||
|
|
cc90d41be4 | ||
|
|
a7dd83772e | ||
|
|
eea52b5166 | ||
|
|
4c20e89a8b | ||
|
|
0d88a9f8f4 | ||
|
|
7ad4fc0e6c | ||
|
|
c7c205d52f | ||
|
|
cdc43a6cde | ||
|
|
a58a9a9e6a | ||
|
|
9d2f15cf33 | ||
|
|
2d596af9a5 | ||
|
|
f70a25e82f | ||
|
|
33859303bd | ||
|
|
1ed0c01add | ||
|
|
dfcd41992a | ||
|
|
26cb388c64 | ||
|
|
b2be6e5d5b | ||
|
|
2fec62968e | ||
|
|
b15a6963e4 | ||
|
|
f7b4f78c8d | ||
|
|
9bebfe71ac | ||
|
|
a7b3e8e59c | ||
|
|
b9f3313903 | ||
|
|
9c4bd0d0dd | ||
|
|
ac2e490569 | ||
|
|
114cf7ac7d | ||
|
|
255e0c61d5 | ||
|
|
288868fe91 | ||
|
|
aba7b730f5 | ||
|
|
b95fc86667 | ||
|
|
0ed1df3af6 | ||
|
|
eff38b9aee | ||
|
|
53477be12d | ||
|
|
d99555a4a8 | ||
|
|
f00be23e02 | ||
|
|
ee6025f3ad | ||
|
|
990c0e570d | ||
|
|
b07b4e3744 | ||
|
|
6d25e11f72 | ||
|
|
16c250e122 | ||
|
|
e6a10e1ac2 | ||
|
|
829beb84e2 | ||
|
|
c3066921ab | ||
|
|
ca424f166b | ||
|
|
4192abf3c2 | ||
|
|
d51527b2d8 | ||
|
|
a1c829e393 | ||
|
|
7dfb1635c2 | ||
|
|
e58d56a656 | ||
|
|
93037ee44f | ||
|
|
ecd8265098 | ||
|
|
4c73cfa423 | ||
|
|
19b3c207c7 | ||
|
|
58d4408909 | ||
|
|
4925e678ed | ||
|
|
7d14705dcf | ||
|
|
5d0040acd1 | ||
|
|
8aa6c6551b | ||
|
|
c9f08a2f8f | ||
|
|
70b17a6fa9 | ||
|
|
c1a118f2a1 | ||
|
|
fa3eb1cdf5 | ||
|
|
16a6902bd2 | ||
|
|
296e30a40f | ||
|
|
873db3bf26 | ||
|
|
c795887a35 | ||
|
|
23824bafe8 | ||
|
|
5cca58f2b3 | ||
|
|
d05c9b6133 | ||
|
|
39a84a1ac0 | ||
|
|
b1c851c9d6 | ||
|
|
6280a9365c | ||
|
|
2741dacd64 | ||
|
|
4c2c2390bd | ||
|
|
635b8ce5f0 | ||
|
|
efc13cc456 | ||
|
|
078f319fe1 | ||
|
|
0f0e785871 | ||
|
|
4e4c85121c | ||
|
|
019d6f4cb6 | ||
|
|
725b0342d1 | ||
|
|
c93ccb8111 | ||
|
|
670befdaf6 | ||
|
|
55eefd865f | ||
|
|
43e5d610e3 | ||
|
|
b1245bc5be | ||
|
|
c2feab245e | ||
|
|
cb3753213e | ||
|
|
ec8c7a24af | ||
|
|
2456be2da3 | ||
|
|
8c5d4240f9 | ||
|
|
b1e12d1542 | ||
|
|
a58d7d2ff4 | ||
|
|
5308b8b9ed | ||
|
|
c15dffce8f | ||
|
|
874680462e | ||
|
|
bb42540775 | ||
|
|
b18511c905 | ||
|
|
5c660f4f64 | ||
|
|
f2bae73f77 | ||
|
|
e54d34f269 | ||
|
|
6470cbd2ee | ||
|
|
c06ebcb4a4 | ||
|
|
3eaa72c98c | ||
|
|
694fff060d | ||
|
|
2705062ac3 | ||
|
|
3df055a296 | ||
|
|
802bc15e0c | ||
|
|
ad2de40a9d | ||
|
|
19298570f8 | ||
|
|
1da4d1f1e9 | ||
|
|
fe4e9c18fa | ||
|
|
2c9f84f17f | ||
|
|
0b2e76600b | ||
|
|
873554fc01 | ||
|
|
82e2d013ae | ||
|
|
1eb5e80f1f | ||
|
|
9c0ab5b3cb |
2
.env.example
Normal file
2
.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
# xo_fs_nfs=nfs://ip:/folder
|
||||
# xo_fs_smb=smb://login:pass@domain\\ip\folder
|
||||
12
.eslintrc.js
12
.eslintrc.js
@@ -1,22 +1,28 @@
|
||||
module.exports = {
|
||||
extends: ['standard', 'standard-jsx'],
|
||||
extends: ['standard', 'standard-jsx', 'prettier'],
|
||||
globals: {
|
||||
__DEV__: true,
|
||||
$Dict: true,
|
||||
$Diff: true,
|
||||
$ElementType: true,
|
||||
$Exact: true,
|
||||
$Keys: true,
|
||||
$PropertyType: true,
|
||||
$Shape: true,
|
||||
},
|
||||
parser: 'babel-eslint',
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
legacyDecorators: true,
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'comma-dangle': ['error', 'always-multiline'],
|
||||
indent: 'off',
|
||||
'no-var': 'error',
|
||||
'node/no-extraneous-import': 'error',
|
||||
'node/no-extraneous-require': 'error',
|
||||
'prefer-const': 'error',
|
||||
|
||||
// See https://github.com/prettier/eslint-config-prettier/issues/65
|
||||
'react/jsx-indent': 'off',
|
||||
},
|
||||
}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,3 +30,4 @@ pnpm-debug.log
|
||||
pnpm-debug.log.*
|
||||
yarn-error.log
|
||||
yarn-error.log.*
|
||||
.env
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
jsxSingleQuote: true,
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'es5',
|
||||
|
||||
10
.travis.yml
10
.travis.yml
@@ -2,7 +2,6 @@ language: node_js
|
||||
node_js:
|
||||
#- stable # disable for now due to an issue of indirect dep upath with Node 9
|
||||
- 8
|
||||
- 6
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
@@ -10,9 +9,9 @@ sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
- qemu-utils
|
||||
- blktap-utils
|
||||
- vmdk-stream-converter
|
||||
|
||||
before_install:
|
||||
- curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
@@ -22,5 +21,4 @@ cache:
|
||||
yarn: true
|
||||
|
||||
script:
|
||||
- yarn run test
|
||||
- yarn run test-integration
|
||||
- yarn run travis-tests
|
||||
|
||||
3
@xen-orchestra/async-map/.babelrc.js
Normal file
3
@xen-orchestra/async-map/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/async-map/.npmignore
Normal file
24
@xen-orchestra/async-map/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
49
@xen-orchestra/async-map/README.md
Normal file
49
@xen-orchestra/async-map/README.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# @xen-orchestra/async-map [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/async-map):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/async-map
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
50
@xen-orchestra/async-map/package.json
Normal file
50
@xen-orchestra/async-map/package.json
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "@xen-orchestra/async-map",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/async-map",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
43
@xen-orchestra/async-map/src/index.js
Normal file
43
@xen-orchestra/async-map/src/index.js
Normal file
@@ -0,0 +1,43 @@
|
||||
// type MaybePromise<T> = Promise<T> | T
|
||||
//
|
||||
// declare export function asyncMap<T1, T2>(
|
||||
// collection: MaybePromise<T1[]>,
|
||||
// (T1, number) => MaybePromise<T2>
|
||||
// ): Promise<T2[]>
|
||||
// declare export function asyncMap<K, V1, V2>(
|
||||
// collection: MaybePromise<{ [K]: V1 }>,
|
||||
// (V1, K) => MaybePromise<V2>
|
||||
// ): Promise<V2[]>
|
||||
|
||||
import map from 'lodash/map'
|
||||
|
||||
// Similar to map() + Promise.all() but wait for all promises to
|
||||
// settle before rejecting (with the first error)
|
||||
const asyncMap = (collection, iteratee) => {
|
||||
let then
|
||||
if (collection != null && typeof (then = collection.then) === 'function') {
|
||||
return then.call(collection, collection => asyncMap(collection, iteratee))
|
||||
}
|
||||
|
||||
let errorContainer
|
||||
const onError = error => {
|
||||
if (errorContainer === undefined) {
|
||||
errorContainer = { error }
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
map(collection, (item, key, collection) =>
|
||||
new Promise(resolve => {
|
||||
resolve(iteratee(item, key, collection))
|
||||
}).catch(onError)
|
||||
)
|
||||
).then(values => {
|
||||
if (errorContainer !== undefined) {
|
||||
throw errorContainer.error
|
||||
}
|
||||
return values
|
||||
})
|
||||
}
|
||||
|
||||
export { asyncMap as default }
|
||||
@@ -11,23 +11,27 @@ const configs = {
|
||||
'@babel/plugin-proposal-decorators': {
|
||||
legacy: true,
|
||||
},
|
||||
'@babel/preset-env' (pkg) {
|
||||
'@babel/plugin-proposal-pipeline-operator': {
|
||||
proposal: 'minimal',
|
||||
},
|
||||
'@babel/preset-env'(pkg) {
|
||||
return {
|
||||
debug: !__TEST__,
|
||||
loose: true,
|
||||
|
||||
// disabled until https://github.com/babel/babel/issues/8323 is resolved
|
||||
// loose: true,
|
||||
|
||||
shippedProposals: true,
|
||||
targets: __PROD__
|
||||
? (() => {
|
||||
let node = (pkg.engines || {}).node
|
||||
if (node !== undefined) {
|
||||
const trimChars = '^=>~'
|
||||
while (trimChars.includes(node[0])) {
|
||||
node = node.slice(1)
|
||||
}
|
||||
return { node: node }
|
||||
}
|
||||
})()
|
||||
: { browsers: '', node: 'current' },
|
||||
targets: (() => {
|
||||
let node = (pkg.engines || {}).node
|
||||
if (node !== undefined) {
|
||||
const trimChars = '^=>~'
|
||||
while (trimChars.includes(node[0])) {
|
||||
node = node.slice(1)
|
||||
}
|
||||
}
|
||||
return { browsers: pkg.browserslist, node }
|
||||
})(),
|
||||
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
|
||||
}
|
||||
},
|
||||
@@ -38,11 +42,11 @@ const getConfig = (key, ...args) => {
|
||||
return config === undefined
|
||||
? {}
|
||||
: typeof config === 'function'
|
||||
? config(...args)
|
||||
: config
|
||||
? config(...args)
|
||||
: config
|
||||
}
|
||||
|
||||
module.exports = function (pkg, plugins, presets) {
|
||||
module.exports = function(pkg, plugins, presets) {
|
||||
plugins === undefined && (plugins = {})
|
||||
presets === undefined && (presets = {})
|
||||
|
||||
|
||||
117
@xen-orchestra/cr-seed-cli/index.js
Executable file
117
@xen-orchestra/cr-seed-cli/index.js
Executable file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const defer = require('golike-defer').default
|
||||
const { NULL_REF, Xapi } = require('xen-api')
|
||||
|
||||
const pkg = require('./package.json')
|
||||
|
||||
Xapi.prototype.getVmDisks = async function(vm) {
|
||||
const disks = { __proto__: null }
|
||||
await Promise.all([
|
||||
...vm.VBDs.map(async vbdRef => {
|
||||
const vbd = await this.getRecord('VBD', vbdRef)
|
||||
let vdiRef
|
||||
if (vbd.type === 'Disk' && (vdiRef = vbd.VDI) !== NULL_REF) {
|
||||
disks[vbd.userdevice] = await this.getRecord('VDI', vdiRef)
|
||||
}
|
||||
}),
|
||||
])
|
||||
return disks
|
||||
}
|
||||
|
||||
defer(async function main($defer, args) {
|
||||
if (args.length === 0 || args.includes('-h') || args.includes('--help')) {
|
||||
const cliName = Object.keys(pkg.bin)[0]
|
||||
return console.error(
|
||||
'%s',
|
||||
`
|
||||
Usage: ${cliName} <source XAPI URL> <source snapshot UUID> <target XAPI URL> <target VM UUID> <backup job id> <backup schedule id>
|
||||
|
||||
${cliName} v${pkg.version}
|
||||
`
|
||||
)
|
||||
}
|
||||
|
||||
const [
|
||||
srcXapiUrl,
|
||||
srcSnapshotUuid,
|
||||
tgtXapiUrl,
|
||||
tgtVmUuid,
|
||||
jobId,
|
||||
scheduleId,
|
||||
] = args
|
||||
|
||||
const srcXapi = new Xapi({
|
||||
allowUnauthorized: true,
|
||||
url: srcXapiUrl,
|
||||
watchEvents: false,
|
||||
})
|
||||
await srcXapi.connect()
|
||||
defer.call(srcXapi, 'disconnect')
|
||||
|
||||
const tgtXapi = new Xapi({
|
||||
allowUnauthorized: true,
|
||||
url: tgtXapiUrl,
|
||||
watchEvents: false,
|
||||
})
|
||||
await tgtXapi.connect()
|
||||
defer.call(tgtXapi, 'disconnect')
|
||||
|
||||
const [srcSnapshot, tgtVm] = await Promise.all([
|
||||
srcXapi.getRecordByUuid('VM', srcSnapshotUuid),
|
||||
tgtXapi.getRecordByUuid('VM', tgtVmUuid),
|
||||
])
|
||||
const srcVm = await srcXapi.getRecord('VM', srcSnapshot.snapshot_of)
|
||||
|
||||
const metadata = {
|
||||
'xo:backup:datetime': srcSnapshot.snapshot_time,
|
||||
'xo:backup:job': jobId,
|
||||
'xo:backup:schedule': scheduleId,
|
||||
'xo:backup:vm': srcVm.uuid,
|
||||
}
|
||||
|
||||
const [srcDisks, tgtDisks] = await Promise.all([
|
||||
srcXapi.getVmDisks(srcSnapshot),
|
||||
tgtXapi.getVmDisks(tgtVm),
|
||||
])
|
||||
const userDevices = Object.keys(tgtDisks)
|
||||
|
||||
const tgtSr = await tgtXapi.getRecord(
|
||||
'SR',
|
||||
tgtDisks[Object.keys(tgtDisks)[0]].SR
|
||||
)
|
||||
|
||||
await Promise.all([
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', metadata),
|
||||
srcXapi.setFieldEntries(srcSnapshot, 'other_config', {
|
||||
'xo:backup:exported': 'true',
|
||||
}),
|
||||
tgtXapi.setField(
|
||||
tgtVm,
|
||||
'name_label',
|
||||
`${srcVm.name_label} (${srcSnapshot.snapshot_time})`
|
||||
),
|
||||
tgtXapi.setFieldEntries(tgtVm, 'other_config', metadata),
|
||||
tgtXapi.setFieldEntries(tgtVm, 'other_config', {
|
||||
'xo:backup:sr': tgtSr.uuid,
|
||||
'xo:copy_of': srcSnapshotUuid,
|
||||
}),
|
||||
tgtXapi.setFieldEntries(tgtVm, 'blocked_operations', {
|
||||
start:
|
||||
'Start operation for this vm is blocked, clone it if you want to use it.',
|
||||
}),
|
||||
Promise.all(
|
||||
userDevices.map(userDevice => {
|
||||
const srcDisk = srcDisks[userDevice]
|
||||
const tgtDisk = tgtDisks[userDevice]
|
||||
|
||||
return tgtXapi.setFieldEntry(
|
||||
tgtDisk,
|
||||
'other_config',
|
||||
'xo:copy_of',
|
||||
srcDisk.uuid
|
||||
)
|
||||
})
|
||||
),
|
||||
])
|
||||
})(process.argv.slice(2)).catch(console.error.bind(console, 'Fatal error:'))
|
||||
20
@xen-orchestra/cr-seed-cli/package.json
Normal file
20
@xen-orchestra/cr-seed-cli/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "@xen-orchestra/cr-seed-cli",
|
||||
"version": "0.2.0",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/cr-seed-cli",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"bin": {
|
||||
"xo-cr-seed": "./index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.22.0"
|
||||
}
|
||||
}
|
||||
@@ -41,10 +41,10 @@
|
||||
"moment-timezone": "^0.5.14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0-beta.49",
|
||||
"@babel/core": "7.0.0-beta.49",
|
||||
"@babel/preset-env": "7.0.0-beta.49",
|
||||
"@babel/preset-flow": "7.0.0-beta.49",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -6,9 +6,14 @@ import parse from './parse'
|
||||
const MAX_DELAY = 2 ** 31 - 1
|
||||
|
||||
class Job {
|
||||
constructor (schedule, fn) {
|
||||
constructor(schedule, fn) {
|
||||
const wrapper = () => {
|
||||
const result = fn()
|
||||
let result
|
||||
try {
|
||||
result = fn()
|
||||
} catch (_) {
|
||||
// catch any thrown value to ensure it does not break the job
|
||||
}
|
||||
let then
|
||||
if (result != null && typeof (then = result.then) === 'function') {
|
||||
then.call(result, scheduleNext, scheduleNext)
|
||||
@@ -28,30 +33,32 @@ class Job {
|
||||
this._timeout = undefined
|
||||
}
|
||||
|
||||
start () {
|
||||
start() {
|
||||
this.stop()
|
||||
this._scheduleNext()
|
||||
}
|
||||
|
||||
stop () {
|
||||
stop() {
|
||||
clearTimeout(this._timeout)
|
||||
}
|
||||
}
|
||||
|
||||
class Schedule {
|
||||
constructor (pattern, zone = 'utc') {
|
||||
constructor(pattern, zone = 'utc') {
|
||||
this._schedule = parse(pattern)
|
||||
this._createDate =
|
||||
zone.toLowerCase() === 'utc'
|
||||
? moment.utc
|
||||
: zone === 'local' ? moment : () => moment.tz(zone)
|
||||
: zone === 'local'
|
||||
? moment
|
||||
: () => moment.tz(zone)
|
||||
}
|
||||
|
||||
createJob (fn) {
|
||||
createJob(fn) {
|
||||
return new Job(this, fn)
|
||||
}
|
||||
|
||||
next (n) {
|
||||
next(n) {
|
||||
const dates = new Array(n)
|
||||
const schedule = this._schedule
|
||||
let date = this._createDate()
|
||||
@@ -61,12 +68,12 @@ class Schedule {
|
||||
return dates
|
||||
}
|
||||
|
||||
_nextDelay () {
|
||||
_nextDelay() {
|
||||
const now = this._createDate()
|
||||
return next(this._schedule, now) - now
|
||||
}
|
||||
|
||||
startJob (fn) {
|
||||
startJob(fn) {
|
||||
const job = this.createJob(fn)
|
||||
job.start()
|
||||
return job.stop.bind(job)
|
||||
|
||||
3
@xen-orchestra/defined/.babelrc.js
Normal file
3
@xen-orchestra/defined/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/defined/.npmignore
Normal file
24
@xen-orchestra/defined/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
49
@xen-orchestra/defined/README.md
Normal file
49
@xen-orchestra/defined/README.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# ${pkg.name} [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/${pkg.name}):
|
||||
|
||||
```
|
||||
> npm install --save ${pkg.name}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](${pkg.bugs})
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
${pkg.license} © [${pkg.author.name}](${pkg.author.url})
|
||||
47
@xen-orchestra/defined/package.json
Normal file
47
@xen-orchestra/defined/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "@xen-orchestra/defined",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/defined",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
// @flow
|
||||
|
||||
// Usage:
|
||||
//
|
||||
// ```js
|
||||
@@ -11,7 +13,7 @@
|
||||
// process.env.http_proxy
|
||||
// ])
|
||||
// ```
|
||||
export default function defined () {
|
||||
export default function defined() {
|
||||
let args = arguments
|
||||
let n = args.length
|
||||
if (n === 1) {
|
||||
@@ -39,7 +41,7 @@ export default function defined () {
|
||||
// const getFriendName = _ => _.friends[0].name
|
||||
// const friendName = get(getFriendName, props.user)
|
||||
// ```
|
||||
export const get = (accessor, arg) => {
|
||||
export const get = (accessor: (input: ?any) => any, arg: ?any) => {
|
||||
try {
|
||||
return accessor(arg)
|
||||
} catch (error) {
|
||||
@@ -58,5 +60,5 @@ export const get = (accessor, arg) => {
|
||||
// _ => new ProxyAgent(_)
|
||||
// )
|
||||
// ```
|
||||
export const ifDef = (value, thenFn) =>
|
||||
export const ifDef = (value: ?any, thenFn: (value: any) => any) =>
|
||||
value !== undefined ? thenFn(value) : value
|
||||
3
@xen-orchestra/emit-async/.babelrc.js
Normal file
3
@xen-orchestra/emit-async/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/emit-async/.npmignore
Normal file
24
@xen-orchestra/emit-async/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
71
@xen-orchestra/emit-async/README.md
Normal file
71
@xen-orchestra/emit-async/README.md
Normal file
@@ -0,0 +1,71 @@
|
||||
# @xen-orchestra/emit-async [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/emit-async):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/emit-async
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import EE from 'events'
|
||||
import emitAsync from '@xen-orchestra/emit-async'
|
||||
|
||||
const ee = new EE()
|
||||
ee.emitAsync = emitAsync
|
||||
|
||||
ee.on('start', async function () {
|
||||
// whatever
|
||||
})
|
||||
|
||||
// similar to EventEmmiter#emit() but returns a promise which resolves when all
|
||||
// listeners have resolved
|
||||
await ee.emitAsync('start')
|
||||
|
||||
// by default, it will rejects as soon as one listener reject, you can customise
|
||||
// error handling though:
|
||||
await ee.emitAsync({
|
||||
onError (error) {
|
||||
console.warn(error)
|
||||
}
|
||||
}, 'start')
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](${pkg.bugs})
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
${pkg.license} © [${pkg.author.name}](${pkg.author.url})
|
||||
46
@xen-orchestra/emit-async/package.json
Normal file
46
@xen-orchestra/emit-async/package.json
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "@xen-orchestra/emit-async",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/emit-async",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
26
@xen-orchestra/emit-async/src/index.js
Normal file
26
@xen-orchestra/emit-async/src/index.js
Normal file
@@ -0,0 +1,26 @@
|
||||
export default function emitAsync(event) {
|
||||
let opts
|
||||
let i = 1
|
||||
|
||||
// an option object has been passed as first param
|
||||
if (typeof event !== 'string') {
|
||||
opts = event
|
||||
event = arguments[i++]
|
||||
}
|
||||
|
||||
const n = arguments.length - i
|
||||
const args = new Array(n)
|
||||
for (let j = 0; j < n; ++j) {
|
||||
args[j] = arguments[j + i]
|
||||
}
|
||||
|
||||
const onError = opts != null && opts.onError
|
||||
|
||||
return Promise.all(
|
||||
this.listeners(event).map(listener =>
|
||||
new Promise(resolve => {
|
||||
resolve(listener.apply(this, args))
|
||||
}).catch(onError)
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.2.0",
|
||||
"version": "0.5.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
@@ -20,26 +20,29 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.0.0-beta.49",
|
||||
"@marsaud/smb2": "^0.8.0",
|
||||
"execa": "^0.10.0",
|
||||
"fs-extra": "^6.0.1",
|
||||
"get-stream": "^3.0.0",
|
||||
"@marsaud/smb2": "^0.13.0",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"get-stream": "^4.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.9.5",
|
||||
"through2": "^2.0.3",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"xo-remote-parser": "^0.4.0"
|
||||
"xo-remote-parser": "^0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0-beta.49",
|
||||
"@babel/core": "7.0.0-beta.49",
|
||||
"@babel/plugin-proposal-function-bind": "7.0.0-beta.49",
|
||||
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
|
||||
"@babel/preset-env": "7.0.0-beta.49",
|
||||
"@babel/preset-flow": "7.0.0-beta.49",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/plugin-proposal-decorators": "^7.1.6",
|
||||
"@babel/plugin-proposal-function-bind": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"dotenv": "^6.1.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import getStream from 'get-stream'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { fromCallback, fromEvent, ignoreErrors } from 'promise-toolbox'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
import path from 'path'
|
||||
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { type Readable, type Writable } from 'stream'
|
||||
|
||||
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
|
||||
|
||||
const { dirname, resolve } = path.posix
|
||||
|
||||
type Data = Buffer | Readable | string
|
||||
type FileDescriptor = {| fd: mixed, path: string |}
|
||||
type LaxReadable = Readable & Object
|
||||
@@ -17,176 +22,152 @@ type File = FileDescriptor | string
|
||||
|
||||
const checksumFile = file => file + '.checksum'
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
_remote: Object
|
||||
constructor (remote: any) {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
// normalize the path:
|
||||
// - does not contains `.` or `..` (cannot escape root dir)
|
||||
// - always starts with `/`
|
||||
const normalizePath = path => resolve('/', path)
|
||||
|
||||
const DEFAULT_TIMEOUT = 6e5 // 10 min
|
||||
|
||||
const ignoreEnoent = error => {
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
class PrefixWrapper {
|
||||
constructor(remote, prefix) {
|
||||
this._prefix = prefix
|
||||
this._remote = remote
|
||||
}
|
||||
|
||||
get type (): string {
|
||||
throw new Error('Not implemented')
|
||||
get type() {
|
||||
return this._remote.type
|
||||
}
|
||||
|
||||
/**
|
||||
* Asks the handler to sync the state of the effective remote with its' metadata
|
||||
*/
|
||||
async sync (): Promise<mixed> {
|
||||
return this._sync()
|
||||
}
|
||||
|
||||
async _sync (): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
/**
|
||||
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
|
||||
*/
|
||||
async forget (): Promise<void> {
|
||||
await this._forget()
|
||||
}
|
||||
|
||||
async _forget (): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async test (): Promise<Object> {
|
||||
const testFileName = `${Date.now()}.test`
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
await this.outputFile(testFileName, data)
|
||||
step = 'read'
|
||||
const read = await this.readFile(testFileName)
|
||||
if (data.compare(read) !== 0) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error),
|
||||
}
|
||||
} finally {
|
||||
ignoreErrors.call(this.unlink(testFileName))
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
return this._outputFile(file, data, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
async _outputFile (file: string, data: Data, options?: Object): Promise<void> {
|
||||
const stream = await this.createOutputStream(file, options)
|
||||
const promise = fromEvent(stream, 'finish')
|
||||
stream.end(data)
|
||||
await promise
|
||||
}
|
||||
|
||||
async read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(file, buffer, position)
|
||||
}
|
||||
|
||||
_read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
_readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this.createReadStream(file, options).then(getStream.buffer)
|
||||
}
|
||||
|
||||
async rename (
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
let p = this._rename(oldPath, newPath)
|
||||
if (checksum) {
|
||||
p = Promise.all([
|
||||
p,
|
||||
this._rename(checksumFile(oldPath), checksumFile(newPath)),
|
||||
])
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
async _rename (oldPath: string, newPath: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async list (
|
||||
dir: string = '.',
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
let entries = await this._list(dir)
|
||||
if (filter !== undefined) {
|
||||
entries = entries.filter(filter)
|
||||
}
|
||||
|
||||
if (prependDir) {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = dir + '/' + entry
|
||||
// necessary to remove the prefix from the path with `prependDir` option
|
||||
async list(dir, opts) {
|
||||
const entries = await this._remote.list(this._resolve(dir), opts)
|
||||
if (opts != null && opts.prependDir) {
|
||||
const n = this._prefix.length
|
||||
entries.forEach((entry, i, entries) => {
|
||||
entries[i] = entry.slice(n)
|
||||
})
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
async _list (dir: string): Promise<string[]> {
|
||||
rename(oldPath, newPath) {
|
||||
return this._remote.rename(this._resolve(oldPath), this._resolve(newPath))
|
||||
}
|
||||
|
||||
_resolve(path) {
|
||||
return this._prefix + normalizePath(path)
|
||||
}
|
||||
}
|
||||
|
||||
export default class RemoteHandlerAbstract {
|
||||
_remote: Object
|
||||
_timeout: number
|
||||
|
||||
constructor(remote: any, options: Object = {}) {
|
||||
if (remote.url === 'test://') {
|
||||
this._remote = remote
|
||||
} else {
|
||||
this._remote = { ...remote, ...parse(remote.url) }
|
||||
if (this._remote.type !== this.type) {
|
||||
throw new Error('Incorrect remote type')
|
||||
}
|
||||
}
|
||||
;({ timeout: this._timeout = DEFAULT_TIMEOUT } = options)
|
||||
}
|
||||
|
||||
// Public members
|
||||
|
||||
get type(): string {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
createReadStream (
|
||||
file: string,
|
||||
addPrefix(prefix: string) {
|
||||
prefix = normalizePath(prefix)
|
||||
return prefix === '/' ? this : new PrefixWrapper(this, prefix)
|
||||
}
|
||||
|
||||
async closeFile(fd: FileDescriptor): Promise<void> {
|
||||
await timeout.call(this._closeFile(fd.fd), this._timeout)
|
||||
}
|
||||
|
||||
async createOutputStream(
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = timeout.call(
|
||||
this._createOutputStream(file, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}),
|
||||
this._timeout
|
||||
)
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value =>
|
||||
this._outputFile(checksumFile(path), value, { flags: 'wx' })
|
||||
)
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
createReadStream(
|
||||
file: File,
|
||||
{ checksum = false, ignoreMissingChecksum = false, ...options }: Object = {}
|
||||
): Promise<LaxReadable> {
|
||||
if (typeof file === 'string') {
|
||||
file = normalizePath(file)
|
||||
}
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createReadStream(file, options).then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
const streamP = timeout
|
||||
.call(this._createReadStream(file, options), this._timeout)
|
||||
.then(stream => {
|
||||
// detect early errors
|
||||
let promise = fromEvent(stream, 'readable')
|
||||
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this.getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
// try to add the length prop if missing and not a range stream
|
||||
if (
|
||||
stream.length === undefined &&
|
||||
options.end === undefined &&
|
||||
options.start === undefined
|
||||
) {
|
||||
promise = Promise.all([
|
||||
promise,
|
||||
ignoreErrors.call(
|
||||
this._getSize(file).then(size => {
|
||||
stream.length = size
|
||||
})
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
return promise.then(() => stream)
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
@@ -195,7 +176,7 @@ export default class RemoteHandlerAbstract {
|
||||
// avoid a unhandled rejection warning
|
||||
ignoreErrors.call(streamP)
|
||||
|
||||
return this.readFile(checksumFile(path)).then(
|
||||
return this._readFile(checksumFile(path), { flags: 'r' }).then(
|
||||
checksum =>
|
||||
streamP.then(stream => {
|
||||
const { length } = stream
|
||||
@@ -216,92 +197,372 @@ export default class RemoteHandlerAbstract {
|
||||
)
|
||||
}
|
||||
|
||||
async _createReadStream (
|
||||
createWriteStream(
|
||||
file: File,
|
||||
options: { end?: number, flags?: string, start?: number } = {}
|
||||
): Promise<LaxWritable> {
|
||||
return timeout.call(
|
||||
this._createWriteStream(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
{
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Free the resources possibly dedicated to put the remote at work, when it
|
||||
// is no more needed
|
||||
//
|
||||
// FIXME: Some handlers are implemented based on system-wide mecanisms (such
|
||||
// as mount), forgetting them might breaking other processes using the same
|
||||
// remote.
|
||||
async forget(): Promise<void> {
|
||||
await this._forget()
|
||||
}
|
||||
|
||||
async getSize(file: File): Promise<number> {
|
||||
return timeout.call(
|
||||
this._getSize(typeof file === 'string' ? normalizePath(file) : file),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async list(
|
||||
dir: string,
|
||||
{
|
||||
filter,
|
||||
prependDir = false,
|
||||
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
|
||||
): Promise<string[]> {
|
||||
const virtualDir = normalizePath(dir)
|
||||
dir = normalizePath(dir)
|
||||
|
||||
let entries = await timeout.call(this._list(dir), this._timeout)
|
||||
if (filter !== undefined) {
|
||||
entries = entries.filter(filter)
|
||||
}
|
||||
|
||||
if (prependDir) {
|
||||
entries.forEach((entry, i) => {
|
||||
entries[i] = virtualDir + '/' + entry
|
||||
})
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
async mkdir(dir: string): Promise<void> {
|
||||
dir = normalizePath(dir)
|
||||
try {
|
||||
await this._mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error == null || error.code !== 'EEXIST') {
|
||||
throw error
|
||||
}
|
||||
|
||||
// this operation will throw if it's not already a directory
|
||||
await this._list(dir)
|
||||
}
|
||||
}
|
||||
|
||||
async mktree(dir: string): Promise<void> {
|
||||
await this._mktree(normalizePath(dir))
|
||||
}
|
||||
|
||||
async openFile(path: string, flags: string): Promise<FileDescriptor> {
|
||||
path = normalizePath(path)
|
||||
|
||||
return {
|
||||
fd: await timeout.call(this._openFile(path, flags), this._timeout),
|
||||
path,
|
||||
}
|
||||
}
|
||||
|
||||
async outputFile(
|
||||
file: string,
|
||||
options?: Object
|
||||
): Promise<LaxReadable> {
|
||||
throw new Error('Not implemented')
|
||||
data: Data,
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._outputFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
async openFile (path: string, flags?: string): Promise<FileDescriptor> {
|
||||
return { fd: await this._openFile(path, flags), path }
|
||||
async read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
buffer,
|
||||
position
|
||||
)
|
||||
}
|
||||
|
||||
async _openFile (path: string, flags?: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
async readFile(
|
||||
file: string,
|
||||
{ flags = 'r' }: { flags?: string } = {}
|
||||
): Promise<Buffer> {
|
||||
return this._readFile(normalizePath(file), { flags })
|
||||
}
|
||||
|
||||
async closeFile (fd: FileDescriptor): Promise<void> {
|
||||
await this._closeFile(fd.fd)
|
||||
}
|
||||
async refreshChecksum(path: string): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
|
||||
async _closeFile (fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async refreshChecksum (path: string): Promise<void> {
|
||||
const stream = (await this.createReadStream(path)).pipe(
|
||||
const stream = (await this._createReadStream(path, { flags: 'r' })).pipe(
|
||||
createChecksumStream()
|
||||
)
|
||||
stream.resume() // start reading the whole file
|
||||
await this.outputFile(checksumFile(path), await stream.checksum)
|
||||
}
|
||||
|
||||
async createOutputStream (
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
): Promise<LaxWritable> {
|
||||
const path = typeof file === 'string' ? file : file.path
|
||||
const streamP = this._createOutputStream(file, {
|
||||
await this._outputFile(checksumFile(path), await stream.checksum, {
|
||||
flags: 'wx',
|
||||
...options,
|
||||
})
|
||||
|
||||
if (!checksum) {
|
||||
return streamP
|
||||
}
|
||||
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
|
||||
const stream = await streamP
|
||||
stream.on('error', forwardError)
|
||||
checksumStream.pipe(stream)
|
||||
|
||||
// $FlowFixMe
|
||||
checksumStream.checksumWritten = checksumStream.checksum
|
||||
.then(value => this.outputFile(checksumFile(path), value))
|
||||
.catch(forwardError)
|
||||
|
||||
return checksumStream
|
||||
}
|
||||
|
||||
async _createOutputStream (
|
||||
file: mixed,
|
||||
options?: Object
|
||||
): Promise<LaxWritable> {
|
||||
throw new Error('Not implemented')
|
||||
async rename(
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
{ checksum = false }: Object = {}
|
||||
) {
|
||||
oldPath = normalizePath(oldPath)
|
||||
newPath = normalizePath(newPath)
|
||||
|
||||
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
|
||||
if (checksum) {
|
||||
p = Promise.all([
|
||||
p,
|
||||
this._rename(checksumFile(oldPath), checksumFile(newPath)),
|
||||
])
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
async unlink (file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
async rmdir(dir: string): Promise<void> {
|
||||
await timeout.call(
|
||||
this._rmdir(normalizePath(dir)).catch(ignoreEnoent),
|
||||
this._timeout
|
||||
)
|
||||
}
|
||||
|
||||
async rmtree(dir: string): Promise<void> {
|
||||
await this._rmtree(normalizePath(dir))
|
||||
}
|
||||
|
||||
// Asks the handler to sync the state of the effective remote with its'
|
||||
// metadata
|
||||
//
|
||||
// This method MUST ALWAYS be called before using the handler.
|
||||
async sync(): Promise<void> {
|
||||
await this._sync()
|
||||
}
|
||||
|
||||
async test(): Promise<Object> {
|
||||
const testFileName = normalizePath(`${Date.now()}.test`)
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
await this._outputFile(testFileName, data, { flags: 'wx' })
|
||||
step = 'read'
|
||||
const read = await this._readFile(testFileName, { flags: 'r' })
|
||||
if (!data.equals(read)) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
step,
|
||||
file: testFileName,
|
||||
error: error.message || String(error),
|
||||
}
|
||||
} finally {
|
||||
ignoreErrors.call(this._unlink(testFileName))
|
||||
}
|
||||
}
|
||||
|
||||
async unlink(file: string, { checksum = true }: Object = {}): Promise<void> {
|
||||
file = normalizePath(file)
|
||||
|
||||
if (checksum) {
|
||||
ignoreErrors.call(this._unlink(checksumFile(file)))
|
||||
}
|
||||
|
||||
await this._unlink(file)
|
||||
await this._unlink(file).catch(ignoreEnoent)
|
||||
}
|
||||
|
||||
async _unlink (file: mixed): Promise<void> {
|
||||
async writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
{ flags = 'wx' }: { flags?: string } = {}
|
||||
): Promise<void> {
|
||||
await this._writeFile(normalizePath(file), data, { flags })
|
||||
}
|
||||
|
||||
// Methods that can be implemented by inheriting classes
|
||||
|
||||
async _closeFile(fd: mixed): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getSize (file: mixed): Promise<number> {
|
||||
return this._getSize(file)
|
||||
async _createOutputStream(file: File, options: Object): Promise<LaxWritable> {
|
||||
try {
|
||||
return await this._createWriteStream(file, options)
|
||||
} catch (error) {
|
||||
if (typeof file !== 'string' || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file))
|
||||
return this._createOutputStream(file, options)
|
||||
}
|
||||
|
||||
async _getSize (file: mixed): Promise<number> {
|
||||
async _createReadStream(file: File, options?: Object): Promise<LaxReadable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _createWriteStream(file: File, options: Object): Promise<LaxWritable> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
// called to finalize the remote
|
||||
async _forget(): Promise<void> {}
|
||||
|
||||
async _getSize(file: File): Promise<number> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _list(dir: string): Promise<string[]> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _mkdir(dir: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _mktree(dir: string): Promise<void> {
|
||||
try {
|
||||
return await this.mkdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(dir))
|
||||
return this._mktree(dir)
|
||||
}
|
||||
|
||||
async _openFile(path: string, flags: string): Promise<mixed> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _outputFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
try {
|
||||
return await this._writeFile(file, data, options)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
await this._mktree(dirname(file))
|
||||
return this._outputFile(file, data, options)
|
||||
}
|
||||
|
||||
_read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
_readFile(file: string, options?: Object): Promise<Buffer> {
|
||||
return this._createReadStream(file, options).then(getStream.buffer)
|
||||
}
|
||||
|
||||
async _rename(oldPath: string, newPath: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _rmdir(dir: string) {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _rmtree(dir: string) {
|
||||
try {
|
||||
return await this._rmdir(dir)
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOTEMPTY') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const files = await this._list(dir)
|
||||
await asyncMap(files, file =>
|
||||
this._unlink(`${dir}/${file}`).catch(error => {
|
||||
if (error.code === 'EISDIR') {
|
||||
return this._rmtree(`${dir}/${file}`)
|
||||
}
|
||||
throw error
|
||||
})
|
||||
)
|
||||
return this._rmtree(dir)
|
||||
}
|
||||
|
||||
// called to initialize the remote
|
||||
async _sync(): Promise<void> {}
|
||||
|
||||
async _unlink(file: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async _writeFile(
|
||||
file: string,
|
||||
data: Data,
|
||||
options: { flags?: string }
|
||||
): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
|
||||
function createPrefixWrapperMethods() {
|
||||
const pPw = PrefixWrapper.prototype
|
||||
const pRha = RemoteHandlerAbstract.prototype
|
||||
|
||||
const {
|
||||
defineProperty,
|
||||
getOwnPropertyDescriptor,
|
||||
prototype: { hasOwnProperty },
|
||||
} = Object
|
||||
|
||||
Object.getOwnPropertyNames(pRha).forEach(name => {
|
||||
let descriptor, value
|
||||
if (
|
||||
hasOwnProperty.call(pPw, name) ||
|
||||
name[0] === '_' ||
|
||||
typeof (value = (descriptor = getOwnPropertyDescriptor(pRha, name))
|
||||
.value) !== 'function'
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
descriptor.value = function() {
|
||||
let path
|
||||
if (arguments.length !== 0 && typeof (path = arguments[0]) === 'string') {
|
||||
arguments[0] = this._resolve(path)
|
||||
}
|
||||
return value.apply(this._remote, arguments)
|
||||
}
|
||||
|
||||
defineProperty(pPw, name, descriptor)
|
||||
})
|
||||
}
|
||||
createPrefixWrapperMethods()
|
||||
|
||||
113
@xen-orchestra/fs/src/abstract.spec.js
Normal file
113
@xen-orchestra/fs/src/abstract.spec.js
Normal file
@@ -0,0 +1,113 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { TimeoutError } from 'promise-toolbox'
|
||||
|
||||
import AbstractHandler from './abstract'
|
||||
|
||||
const TIMEOUT = 10e3
|
||||
|
||||
class TestHandler extends AbstractHandler {
|
||||
constructor(impl) {
|
||||
super({ url: 'test://' }, { timeout: TIMEOUT })
|
||||
|
||||
Object.keys(impl).forEach(method => {
|
||||
this[`_${method}`] = impl[method]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
describe('closeFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
closeFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.closeFile({ fd: undefined, path: '' })
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createOutputStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createOutputStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createOutputStream('File')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createReadStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createReadStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createReadStream('file')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getSize()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
getSize: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.getSize('')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('list()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
list: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.list('.')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('openFile()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
openFile: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.openFile('path')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('rename()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
rename: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.rename('oldPath', 'newPath')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('rmdir()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
rmdir: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.rmdir('dir')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,5 @@
|
||||
// @flow
|
||||
|
||||
// $FlowFixMe
|
||||
import through2 from 'through2'
|
||||
import { createHash } from 'crypto'
|
||||
import { defer, fromEvent } from 'promise-toolbox'
|
||||
@@ -85,8 +84,8 @@ export const validChecksumOfReadStream = (
|
||||
callback(
|
||||
checksum !== expectedChecksum
|
||||
? new Error(
|
||||
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
|
||||
)
|
||||
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
|
||||
)
|
||||
: null
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
|
||||
import { fromCallback as pFromCallback } from 'promise-toolbox'
|
||||
import { getHandler } from '.'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
beforeEach(async () => {
|
||||
const dir = await pFromCallback(cb => tmp.dir(cb))
|
||||
process.chdir(dir)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const tmpDir = process.cwd()
|
||||
process.chdir(initialDir)
|
||||
await pFromCallback(cb => rimraf(tmpDir, cb))
|
||||
})
|
||||
|
||||
test("fs test doesn't crash", async () => {
|
||||
const handler = getHandler({ url: 'file://' + process.cwd() })
|
||||
const result = await handler.test()
|
||||
expect(result.success).toBeTruthy()
|
||||
})
|
||||
292
@xen-orchestra/fs/src/fs.spec.js
Normal file
292
@xen-orchestra/fs/src/fs.spec.js
Normal file
@@ -0,0 +1,292 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import 'dotenv/config'
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import getStream from 'get-stream'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
import { random } from 'lodash'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import { getHandler } from '.'
|
||||
|
||||
// https://gist.github.com/julien-f/3228c3f34fdac01ade09
|
||||
const unsecureRandomBytes = n => {
|
||||
const bytes = Buffer.alloc(n)
|
||||
|
||||
const odd = n & 1
|
||||
for (let i = 0, m = n - odd; i < m; i += 2) {
|
||||
bytes.writeUInt16BE((Math.random() * 65536) | 0, i)
|
||||
}
|
||||
|
||||
if (odd) {
|
||||
bytes.writeUInt8((Math.random() * 256) | 0, n - 1)
|
||||
}
|
||||
|
||||
return bytes
|
||||
}
|
||||
|
||||
const TEST_DATA_LEN = 1024
|
||||
const TEST_DATA = unsecureRandomBytes(TEST_DATA_LEN)
|
||||
const createTestDataStream = asyncIteratorToStream(function*() {
|
||||
yield TEST_DATA
|
||||
})
|
||||
|
||||
const rejectionOf = p =>
|
||||
p.then(
|
||||
value => {
|
||||
throw value
|
||||
},
|
||||
reason => reason
|
||||
)
|
||||
|
||||
const handlers = [`file://${tmpdir()}`]
|
||||
if (process.env.xo_fs_nfs) handlers.push(process.env.xo_fs_nfs)
|
||||
if (process.env.xo_fs_smb) handlers.push(process.env.xo_fs_smb)
|
||||
|
||||
handlers.forEach(url => {
|
||||
describe(url, () => {
|
||||
let handler
|
||||
|
||||
const testWithFileDescriptor = (path, flags, fn) => {
|
||||
it('with path', () => fn({ file: path, flags }))
|
||||
it('with file descriptor', async () => {
|
||||
const file = await handler.openFile(path, flags)
|
||||
try {
|
||||
await fn({ file })
|
||||
} finally {
|
||||
await handler.closeFile(file)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
handler = getHandler({ url }).addPrefix(`xo-fs-tests-${Date.now()}`)
|
||||
await handler.sync()
|
||||
})
|
||||
afterAll(async () => {
|
||||
await handler.forget()
|
||||
handler = undefined
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
// ensure test dir exists
|
||||
await handler.mkdir('.')
|
||||
})
|
||||
afterEach(async () => {
|
||||
await handler.rmtree('.')
|
||||
})
|
||||
|
||||
describe('#type', () => {
|
||||
it('returns the type of the remote', () => {
|
||||
expect(typeof handler.type).toBe('string')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createOutputStream()', () => {
|
||||
it('creates parent dir if missing', async () => {
|
||||
const stream = await handler.createOutputStream('dir/file')
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('dir/file')).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createReadStream()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file, flags }) => {
|
||||
await expect(
|
||||
await getStream.buffer(
|
||||
await handler.createReadStream(file, { flags })
|
||||
)
|
||||
).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createWriteStream()', () => {
|
||||
testWithFileDescriptor('file', 'wx', async ({ file, flags }) => {
|
||||
const stream = await handler.createWriteStream(file, { flags })
|
||||
await fromCallback(cb => pipeline(createTestDataStream(), stream, cb))
|
||||
await expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('fails if parent dir is missing', async () => {
|
||||
const error = await rejectionOf(handler.createWriteStream('dir/file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getSize()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async () => {
|
||||
expect(await handler.getSize('file')).toEqual(TEST_DATA_LEN)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#list()', () => {
|
||||
it(`should list the content of folder`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await expect(await handler.list('.')).toEqual(['file'])
|
||||
})
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
|
||||
it('can prepend the directory to entries', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
expect(await handler.list('dir', { prependDir: true })).toEqual([
|
||||
'/dir/file',
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('#mkdir()', () => {
|
||||
it('creates a directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await expect(await handler.list('.')).toEqual(['dir'])
|
||||
})
|
||||
|
||||
it('does not throw on existing directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await handler.mkdir('dir')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.mkdir('file'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#mktree()', () => {
|
||||
it('creates a tree of directories', async () => {
|
||||
await handler.mktree('dir/dir')
|
||||
await expect(await handler.list('.')).toEqual(['dir'])
|
||||
await expect(await handler.list('dir')).toEqual(['dir'])
|
||||
})
|
||||
|
||||
it('does not throw on existing directory', async () => {
|
||||
await handler.mktree('dir/dir')
|
||||
await handler.mktree('dir/dir')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file', async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
const error = await rejectionOf(handler.mktree('dir/file'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
|
||||
it('throws ENOTDIR on existing file in path', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.mktree('file/dir'))
|
||||
expect(error.code).toBe('ENOTDIR')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#outputFile()', () => {
|
||||
it('writes data to a file', async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('throws on existing files', async () => {
|
||||
await handler.outputFile('file', '')
|
||||
const error = await rejectionOf(handler.outputFile('file', ''))
|
||||
expect(error.code).toBe('EEXIST')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#read()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
const start = random(TEST_DATA_LEN)
|
||||
const size = random(TEST_DATA_LEN)
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file }) => {
|
||||
const buffer = Buffer.alloc(size)
|
||||
const result = await handler.read(file, buffer, start)
|
||||
expect(result.buffer).toBe(buffer)
|
||||
expect(result).toEqual({
|
||||
buffer,
|
||||
bytesRead: Math.min(size, TEST_DATA_LEN - start),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('#readFile', () => {
|
||||
it('returns a buffer containing the contents of the file', async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('throws on missing file', async () => {
|
||||
const error = await rejectionOf(handler.readFile('file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rename()', () => {
|
||||
it(`should rename the file`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await handler.rename('file', `file2`)
|
||||
|
||||
expect(await handler.list('.')).toEqual(['file2'])
|
||||
expect(await handler.readFile(`file2`)).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rmdir()', () => {
|
||||
it('should remove an empty directory', async () => {
|
||||
await handler.mkdir('dir')
|
||||
await handler.rmdir('dir')
|
||||
expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
|
||||
it(`should throw on non-empty directory`, async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
|
||||
const error = await rejectionOf(handler.rmdir('.'))
|
||||
await expect(error.code).toEqual('ENOTEMPTY')
|
||||
})
|
||||
|
||||
it('does not throw on missing directory', async () => {
|
||||
await handler.rmdir('dir')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#rmtree', () => {
|
||||
it(`should remove a directory resursively`, async () => {
|
||||
await handler.outputFile('dir/file', '')
|
||||
await handler.rmtree('dir')
|
||||
|
||||
expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('#test()', () => {
|
||||
it('tests the remote appears to be working', async () => {
|
||||
expect(await handler.test()).toEqual({
|
||||
success: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('#unlink()', () => {
|
||||
it(`should remove the file`, async () => {
|
||||
await handler.outputFile('file', TEST_DATA)
|
||||
await handler.unlink('file')
|
||||
|
||||
await expect(await handler.list('.')).toEqual([])
|
||||
})
|
||||
|
||||
it('does not throw on missing file', async () => {
|
||||
await handler.unlink('file')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -14,7 +14,7 @@ const HANDLERS = {
|
||||
nfs: RemoteHandlerNfs,
|
||||
}
|
||||
|
||||
export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
export const getHandler = (remote: Remote, ...rest: any): RemoteHandler => {
|
||||
// FIXME: should be done in xo-remote-parser.
|
||||
const type = remote.url.split('://')[0]
|
||||
|
||||
@@ -22,5 +22,5 @@ export const getHandler = (remote: Remote): RemoteHandler => {
|
||||
if (!Handler) {
|
||||
throw new Error('Unhandled remote type')
|
||||
}
|
||||
return new Handler(remote)
|
||||
return new Handler(remote, ...rest)
|
||||
}
|
||||
|
||||
@@ -1,56 +1,71 @@
|
||||
import fs from 'fs-extra'
|
||||
import { dirname, resolve } from 'path'
|
||||
import { noop, startsWith } from 'lodash'
|
||||
import { fromEvent } from 'promise-toolbox'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
|
||||
export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
get type () {
|
||||
get type() {
|
||||
return 'file'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
_getRealPath() {
|
||||
return this._remote.path
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
const realPath = this._getRealPath()
|
||||
const parts = [realPath]
|
||||
if (file) {
|
||||
parts.push(file)
|
||||
_getFilePath(file) {
|
||||
return this._getRealPath() + file
|
||||
}
|
||||
|
||||
async _closeFile(fd) {
|
||||
return fs.close(fd)
|
||||
}
|
||||
|
||||
async _createReadStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const stream = fs.createReadStream(this._getFilePath(file), options)
|
||||
await fromEvent(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
const path = resolve.apply(null, parts)
|
||||
if (!startsWith(path, realPath)) {
|
||||
throw new Error('Remote path is unavailable')
|
||||
return fs.createReadStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
}
|
||||
|
||||
async _createWriteStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const stream = fs.createWriteStream(this._getFilePath(file), options)
|
||||
await fromEvent(stream, 'open')
|
||||
return stream
|
||||
}
|
||||
return path
|
||||
return fs.createWriteStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
const path = this._getRealPath()
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
async _getSize(file) {
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
return noop()
|
||||
async _list(dir) {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options) {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
await fs.writeFile(path, data, options)
|
||||
_mkdir(dir) {
|
||||
return fs.mkdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
async _openFile(path, flags) {
|
||||
return fs.open(this._getFilePath(path), flags)
|
||||
}
|
||||
|
||||
async _read(file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
|
||||
try {
|
||||
@@ -68,62 +83,29 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
async _readFile(file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
async _rename(oldPath, newPath) {
|
||||
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
return fs.readdir(this._getFilePath(dir))
|
||||
async _rmdir(dir) {
|
||||
return fs.rmdir(this._getFilePath(dir))
|
||||
}
|
||||
|
||||
async _createReadStream (file, options) {
|
||||
return typeof file === 'string'
|
||||
? fs.createReadStream(this._getFilePath(file), options)
|
||||
: fs.createReadStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
async _sync() {
|
||||
const path = this._getRealPath('/')
|
||||
await fs.ensureDir(path)
|
||||
await fs.access(path, fs.R_OK | fs.W_OK)
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options) {
|
||||
if (typeof file === 'string') {
|
||||
const path = this._getFilePath(file)
|
||||
await fs.ensureDir(dirname(path))
|
||||
return fs.createWriteStream(path, options)
|
||||
}
|
||||
return fs.createWriteStream('', {
|
||||
autoClose: false,
|
||||
...options,
|
||||
fd: file.fd,
|
||||
})
|
||||
async _unlink(file) {
|
||||
return fs.unlink(this._getFilePath(file))
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
return fs.unlink(this._getFilePath(file)).catch(error => {
|
||||
// do not throw if the file did not exist
|
||||
if (error == null || error.code !== 'ENOENT') {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const stats = await fs.stat(
|
||||
this._getFilePath(typeof file === 'string' ? file : file.path)
|
||||
)
|
||||
return stats.size
|
||||
}
|
||||
|
||||
async _openFile (path, flags) {
|
||||
return fs.open(this._getFilePath(path), flags)
|
||||
}
|
||||
|
||||
async _closeFile (fd) {
|
||||
return fs.close(fd)
|
||||
_writeFile(file, data, { flags }) {
|
||||
return fs.writeFile(this._getFilePath(file), data, { flag: flags })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,89 +1,82 @@
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import { forEach } from 'lodash'
|
||||
import { join } from 'path'
|
||||
import { tmpdir } from 'os'
|
||||
|
||||
import LocalHandler from './local'
|
||||
|
||||
const DEFAULT_NFS_OPTIONS = 'vers=3'
|
||||
|
||||
export default class NfsHandler extends LocalHandler {
|
||||
get type () {
|
||||
constructor(
|
||||
remote,
|
||||
{ mountsDir = join(tmpdir(), 'xo-fs-mounts'), ...opts } = {}
|
||||
) {
|
||||
super(remote, opts)
|
||||
|
||||
this._realPath = join(
|
||||
mountsDir,
|
||||
remote.id ||
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.slice(2)
|
||||
)
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 'nfs'
|
||||
}
|
||||
|
||||
_getRealPath () {
|
||||
return `/run/xo-server/mounts/${this._remote.id}`
|
||||
_getRealPath() {
|
||||
return this._realPath
|
||||
}
|
||||
|
||||
async _loadRealMounts () {
|
||||
let stdout
|
||||
const mounted = {}
|
||||
try {
|
||||
stdout = await execa.stdout('findmnt', [
|
||||
'-P',
|
||||
'-t',
|
||||
'nfs,nfs4',
|
||||
'--output',
|
||||
'SOURCE,TARGET',
|
||||
'--noheadings',
|
||||
])
|
||||
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
|
||||
forEach(stdout.split('\n'), m => {
|
||||
if (m) {
|
||||
const match = regex.exec(m)
|
||||
mounted[match[3]] = {
|
||||
host: match[1],
|
||||
share: match[2],
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (exc) {
|
||||
// When no mounts are found, the call pretends to fail...
|
||||
if (exc.stderr !== '') {
|
||||
throw exc
|
||||
}
|
||||
}
|
||||
|
||||
this._realMounts = mounted
|
||||
return mounted
|
||||
}
|
||||
|
||||
_matchesRealMount () {
|
||||
return this._getRealPath() in this._realMounts
|
||||
}
|
||||
|
||||
async _mount () {
|
||||
async _mount() {
|
||||
await fs.ensureDir(this._getRealPath())
|
||||
const { host, path, port } = this._remote
|
||||
return execa('mount', [
|
||||
'-t',
|
||||
'nfs',
|
||||
'-o',
|
||||
'vers=3',
|
||||
`${host}${port !== undefined ? ':' + port : ''}:${path}`,
|
||||
this._getRealPath(),
|
||||
])
|
||||
const { host, path, port, options } = this._remote
|
||||
return execa(
|
||||
'mount',
|
||||
[
|
||||
'-t',
|
||||
'nfs',
|
||||
'-o',
|
||||
DEFAULT_NFS_OPTIONS + (options !== undefined ? `,${options}` : ''),
|
||||
`${host}${port !== undefined ? ':' + port : ''}:${path}`,
|
||||
this._getRealPath(),
|
||||
],
|
||||
{
|
||||
env: {
|
||||
LANG: 'C',
|
||||
},
|
||||
}
|
||||
).catch(error => {
|
||||
if (
|
||||
error == null ||
|
||||
typeof error.stderr !== 'string' ||
|
||||
!error.stderr.includes('already mounted')
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
await this._loadRealMounts()
|
||||
if (this._matchesRealMount() && !this._remote.enabled) {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (exc) {
|
||||
this._remote.enabled = true
|
||||
this._remote.error = exc.message
|
||||
async _umount() {
|
||||
await execa('umount', ['--force', this._getRealPath()], {
|
||||
env: {
|
||||
LANG: 'C',
|
||||
},
|
||||
}).catch(error => {
|
||||
if (
|
||||
error == null ||
|
||||
typeof error.stderr !== 'string' ||
|
||||
!error.stderr.includes('not mounted')
|
||||
) {
|
||||
throw error
|
||||
}
|
||||
} else if (!this._matchesRealMount() && this._remote.enabled) {
|
||||
try {
|
||||
await this._mount()
|
||||
} catch (exc) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = exc.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
})
|
||||
}
|
||||
|
||||
async _forget () {
|
||||
async _forget() {
|
||||
try {
|
||||
await this._umount(this._remote)
|
||||
} catch (_) {
|
||||
@@ -91,7 +84,9 @@ export default class NfsHandler extends LocalHandler {
|
||||
}
|
||||
}
|
||||
|
||||
async _umount (remote) {
|
||||
await execa('umount', ['--force', this._getRealPath()])
|
||||
async _sync() {
|
||||
await this._mount()
|
||||
|
||||
return this._remote
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,247 +1,167 @@
|
||||
import Smb2 from '@marsaud/smb2'
|
||||
import { lastly as pFinally } from 'promise-toolbox'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
// Normalize the error code for file not found.
|
||||
const normalizeError = error => {
|
||||
const wrapError = (error, code) => ({
|
||||
__proto__: error,
|
||||
cause: error,
|
||||
code,
|
||||
})
|
||||
const normalizeError = (error, shouldBeDirectory) => {
|
||||
const { code } = error
|
||||
|
||||
return code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? Object.create(error, {
|
||||
code: {
|
||||
configurable: true,
|
||||
readable: true,
|
||||
value: 'ENOENT',
|
||||
writable: true,
|
||||
},
|
||||
})
|
||||
throw code === 'STATUS_DIRECTORY_NOT_EMPTY'
|
||||
? wrapError(error, 'ENOTEMPTY')
|
||||
: code === 'STATUS_FILE_IS_A_DIRECTORY'
|
||||
? wrapError(error, 'EISDIR')
|
||||
: code === 'STATUS_NOT_A_DIRECTORY'
|
||||
? wrapError(error, 'ENOTDIR')
|
||||
: code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
|
||||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
|
||||
? wrapError(error, 'ENOENT')
|
||||
: code === 'STATUS_OBJECT_NAME_COLLISION'
|
||||
? wrapError(error, 'EEXIST')
|
||||
: code === 'STATUS_NOT_SUPPORTED' || code === 'STATUS_INVALID_PARAMETER'
|
||||
? wrapError(error, shouldBeDirectory ? 'ENOTDIR' : 'EISDIR')
|
||||
: error
|
||||
}
|
||||
const normalizeDirError = error => normalizeError(error, true)
|
||||
|
||||
export default class SmbHandler extends RemoteHandlerAbstract {
|
||||
constructor (remote) {
|
||||
super(remote)
|
||||
this._forget = noop
|
||||
constructor(remote, opts) {
|
||||
super(remote, opts)
|
||||
|
||||
// defined in _sync()
|
||||
this._client = undefined
|
||||
|
||||
const prefix = this._remote.path
|
||||
this._prefix = prefix !== '' ? prefix + '\\' : prefix
|
||||
}
|
||||
|
||||
get type () {
|
||||
get type() {
|
||||
return 'smb'
|
||||
}
|
||||
|
||||
_getClient () {
|
||||
_getFilePath(file) {
|
||||
return (
|
||||
this._prefix +
|
||||
(typeof file === 'string' ? file : file.path)
|
||||
.slice(1)
|
||||
.replace(/\//g, '\\')
|
||||
)
|
||||
}
|
||||
|
||||
_dirname(file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
_closeFile(file) {
|
||||
return this._client.close(file).catch(normalizeError)
|
||||
}
|
||||
|
||||
_createReadStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
file = this._getFilePath(file)
|
||||
} else {
|
||||
options = { autoClose: false, ...options, fd: file.fd }
|
||||
file = ''
|
||||
}
|
||||
return this._client.createReadStream(file, options).catch(normalizeError)
|
||||
}
|
||||
|
||||
_createWriteStream(file, options) {
|
||||
if (typeof file === 'string') {
|
||||
file = this._getFilePath(file)
|
||||
} else {
|
||||
options = { autoClose: false, ...options, fd: file.fd }
|
||||
file = ''
|
||||
}
|
||||
return this._client.createWriteStream(file, options).catch(normalizeError)
|
||||
}
|
||||
|
||||
_forget() {
|
||||
const client = this._client
|
||||
this._client = undefined
|
||||
return client.disconnect()
|
||||
}
|
||||
|
||||
_getSize(file) {
|
||||
return this._client.getSize(this._getFilePath(file)).catch(normalizeError)
|
||||
}
|
||||
|
||||
_list(dir) {
|
||||
return this._client.readdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
_mkdir(dir) {
|
||||
return this._client.mkdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
_openFile(path, flags) {
|
||||
return this._client
|
||||
.open(this._getFilePath(path), flags)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
async _read(file, buffer, position) {
|
||||
const client = this._client
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await client.open(this._getFilePath(file)) : file.fd
|
||||
try {
|
||||
return await client.read(file, buffer, 0, buffer.length, position)
|
||||
} catch (error) {
|
||||
normalizeError(error)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await client.close(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_readFile(file, options) {
|
||||
return this._client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_rename(oldPath, newPath) {
|
||||
return this._client
|
||||
.rename(this._getFilePath(oldPath), this._getFilePath(newPath), {
|
||||
replace: true,
|
||||
})
|
||||
.catch(normalizeError)
|
||||
}
|
||||
|
||||
_rmdir(dir) {
|
||||
return this._client.rmdir(this._getFilePath(dir)).catch(normalizeDirError)
|
||||
}
|
||||
|
||||
_sync() {
|
||||
const remote = this._remote
|
||||
|
||||
return new Smb2({
|
||||
this._client = new Smb2({
|
||||
share: `\\\\${remote.host}`,
|
||||
domain: remote.domain,
|
||||
username: remote.username,
|
||||
password: remote.password,
|
||||
autoCloseTimeout: 0,
|
||||
})
|
||||
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
return this.list('.')
|
||||
}
|
||||
|
||||
_getFilePath (file) {
|
||||
if (file === '.') {
|
||||
file = undefined
|
||||
}
|
||||
|
||||
let path = this._remote.path !== '' ? this._remote.path : ''
|
||||
|
||||
// Ensure remote path is a directory.
|
||||
if (path !== '' && path[path.length - 1] !== '\\') {
|
||||
path += '\\'
|
||||
}
|
||||
|
||||
if (file) {
|
||||
path += file.replace(/\//g, '\\')
|
||||
}
|
||||
|
||||
return path
|
||||
_unlink(file) {
|
||||
return this._client.unlink(this._getFilePath(file)).catch(normalizeError)
|
||||
}
|
||||
|
||||
_dirname (file) {
|
||||
const parts = file.split('\\')
|
||||
parts.pop()
|
||||
return parts.join('\\')
|
||||
}
|
||||
|
||||
async _sync () {
|
||||
if (this._remote.enabled) {
|
||||
try {
|
||||
// Check access (smb2 does not expose connect in public so far...)
|
||||
await this.list()
|
||||
} catch (error) {
|
||||
this._remote.enabled = false
|
||||
this._remote.error = error.message
|
||||
}
|
||||
}
|
||||
return this._remote
|
||||
}
|
||||
|
||||
async _outputFile (file, data, options = {}) {
|
||||
const client = this._getClient()
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
|
||||
return client.writeFile(path, data, options)::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
|
||||
let client
|
||||
if (needsClose) {
|
||||
client = this._getClient()
|
||||
file = await client.open(this._getFilePath(file))
|
||||
} else {
|
||||
;({ client, file } = file.fd)
|
||||
}
|
||||
|
||||
try {
|
||||
return await client.read(file, buffer, 0, buffer.length, position)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await client.close(file)
|
||||
client.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options = {}) {
|
||||
const client = this._getClient()
|
||||
let content
|
||||
|
||||
try {
|
||||
content = await client
|
||||
.readFile(this._getFilePath(file), options)
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
async _rename (oldPath, newPath) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
await client
|
||||
.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _list (dir = '.') {
|
||||
const client = this._getClient()
|
||||
let list
|
||||
|
||||
try {
|
||||
list = await client.readdir(this._getFilePath(dir))::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return list
|
||||
}
|
||||
|
||||
async _createReadStream (file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
const client = this._getClient()
|
||||
let stream
|
||||
|
||||
try {
|
||||
// FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
stream = await client.createReadStream(this._getFilePath(file), options)
|
||||
stream.on('end', () => client.disconnect())
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
async _createOutputStream (file, options = {}) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.path
|
||||
}
|
||||
const client = this._getClient()
|
||||
const path = this._getFilePath(file)
|
||||
const dir = this._dirname(path)
|
||||
let stream
|
||||
try {
|
||||
if (dir) {
|
||||
await client.ensureDir(dir)
|
||||
}
|
||||
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
|
||||
} catch (err) {
|
||||
client.disconnect()
|
||||
throw err
|
||||
}
|
||||
stream.on('finish', () => client.disconnect())
|
||||
return stream
|
||||
}
|
||||
|
||||
async _unlink (file) {
|
||||
const client = this._getClient()
|
||||
|
||||
try {
|
||||
await client.unlink(this._getFilePath(file))::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
}
|
||||
|
||||
async _getSize (file) {
|
||||
const client = await this._getClient()
|
||||
let size
|
||||
|
||||
try {
|
||||
size = await client
|
||||
.getSize(this._getFilePath(typeof file === 'string' ? file : file.path))
|
||||
::pFinally(() => {
|
||||
client.disconnect()
|
||||
})
|
||||
} catch (error) {
|
||||
throw normalizeError(error)
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
// TODO: add flags
|
||||
async _openFile (path) {
|
||||
const client = this._getClient()
|
||||
return {
|
||||
client,
|
||||
file: await client.open(this._getFilePath(path)),
|
||||
}
|
||||
}
|
||||
|
||||
async _closeFile ({ client, file }) {
|
||||
await client.close(file)
|
||||
client.disconnect()
|
||||
_writeFile(file, data, options) {
|
||||
return this._client
|
||||
.writeFile(this._getFilePath(file), data, options)
|
||||
.catch(normalizeError)
|
||||
}
|
||||
}
|
||||
|
||||
3
@xen-orchestra/log/.babelrc.js
Normal file
3
@xen-orchestra/log/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/log/.npmignore
Normal file
24
@xen-orchestra/log/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
160
@xen-orchestra/log/README.md
Normal file
160
@xen-orchestra/log/README.md
Normal file
@@ -0,0 +1,160 @@
|
||||
# @xen-orchestra/log [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/log):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/log
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Everywhere something should be logged:
|
||||
|
||||
```js
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
|
||||
const log = createLogger('my-module')
|
||||
|
||||
log.debug('only useful for debugging')
|
||||
log.info('this information is relevant to the user')
|
||||
log.warn('something went wrong but did not prevent current action')
|
||||
log.error('something went wrong')
|
||||
log.fatal('service/app is going down')
|
||||
```
|
||||
|
||||
Then, at application level, configure the logs are handled:
|
||||
|
||||
```js
|
||||
import { configure, catchGlobalErrors } from '@xen-orchestra/log/configure'
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
const transport = transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb'
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: [
|
||||
'jane.smith@gmail.com',
|
||||
'sam.doe@yahoo.com'
|
||||
]
|
||||
})
|
||||
|
||||
configure([
|
||||
{
|
||||
// if filter is a string, then it is pattern
|
||||
// (https://github.com/visionmedia/debug#wildcards) which is
|
||||
// matched against the namespace of the logs
|
||||
filter: process.env.DEBUG,
|
||||
|
||||
transport: transportConsole()
|
||||
},
|
||||
{
|
||||
// only levels >= warn
|
||||
level: 'warn',
|
||||
|
||||
transport
|
||||
}
|
||||
])
|
||||
|
||||
// send all global errors (uncaught exceptions, warnings, unhandled rejections)
|
||||
// to this transport
|
||||
catchGlobalErrors(transport)
|
||||
```
|
||||
|
||||
### Transports
|
||||
|
||||
#### Console
|
||||
|
||||
```js
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
|
||||
configure(transports.console())
|
||||
```
|
||||
|
||||
#### Email
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add nodemailer pretty-format
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
configure(transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb'
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: [
|
||||
'jane.smith@gmail.com',
|
||||
'sam.doe@yahoo.com'
|
||||
]
|
||||
}))
|
||||
```
|
||||
|
||||
#### Syslog
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add split-host syslog-client
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportSyslog from '@xen-orchestra/log/transports/syslog'
|
||||
|
||||
// By default, log to udp://localhost:514
|
||||
configure(transportSyslog())
|
||||
|
||||
// But TCP, a different host, or a different port can be used
|
||||
configure(transportSyslog('tcp://syslog.company.lan'))
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xo-web/issues/)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
1
@xen-orchestra/log/configure.js
Normal file
1
@xen-orchestra/log/configure.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./dist/configure')
|
||||
52
@xen-orchestra/log/package.json
Normal file
52
@xen-orchestra/log/package.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "@xen-orchestra/log",
|
||||
"version": "0.1.4",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/log",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"configure.js",
|
||||
"dist/",
|
||||
"transports/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
106
@xen-orchestra/log/src/configure.js
Normal file
106
@xen-orchestra/log/src/configure.js
Normal file
@@ -0,0 +1,106 @@
|
||||
import createConsoleTransport from './transports/console'
|
||||
import LEVELS, { resolve } from './levels'
|
||||
import { compileGlobPattern } from './utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const createTransport = config => {
|
||||
if (typeof config === 'function') {
|
||||
return config
|
||||
}
|
||||
|
||||
if (Array.isArray(config)) {
|
||||
const transports = config.map(createTransport)
|
||||
const { length } = transports
|
||||
return function() {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
transports[i].apply(this, arguments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let { filter, transport } = config
|
||||
const level = resolve(config.level)
|
||||
|
||||
if (filter !== undefined) {
|
||||
if (typeof filter === 'string') {
|
||||
const re = compileGlobPattern(filter)
|
||||
filter = log => re.test(log.namespace)
|
||||
}
|
||||
|
||||
const orig = transport
|
||||
transport = function(log) {
|
||||
if ((level !== undefined && log.level >= level) || filter(log)) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
} else if (level !== undefined) {
|
||||
const orig = transport
|
||||
transport = function(log) {
|
||||
if (log.level >= level) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return transport
|
||||
}
|
||||
|
||||
const symbol =
|
||||
typeof Symbol !== 'undefined'
|
||||
? Symbol.for('@xen-orchestra/log')
|
||||
: '@@@xen-orchestra/log'
|
||||
|
||||
global[symbol] = createTransport({
|
||||
// display warnings or above, and all that are enabled via DEBUG or
|
||||
// NODE_DEBUG env
|
||||
filter: process.env.DEBUG || process.env.NODE_DEBUG,
|
||||
level: LEVELS.INFO,
|
||||
|
||||
transport: createConsoleTransport(),
|
||||
})
|
||||
|
||||
export const configure = config => {
|
||||
global[symbol] = createTransport(config)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const catchGlobalErrors = logger => {
|
||||
// patch process
|
||||
const onUncaughtException = error => {
|
||||
logger.error('uncaught exception', { error })
|
||||
}
|
||||
const onUnhandledRejection = error => {
|
||||
logger.warn('possibly unhandled rejection', { error })
|
||||
}
|
||||
const onWarning = error => {
|
||||
logger.warn('Node warning', { error })
|
||||
}
|
||||
process.on('uncaughtException', onUncaughtException)
|
||||
process.on('unhandledRejection', onUnhandledRejection)
|
||||
process.on('warning', onWarning)
|
||||
|
||||
// patch EventEmitter
|
||||
const EventEmitter = require('events')
|
||||
const { prototype } = EventEmitter
|
||||
const { emit } = prototype
|
||||
function patchedEmit(event, error) {
|
||||
if (event === 'error' && this.listenerCount(event) === 0) {
|
||||
logger.error('unhandled error event', { error })
|
||||
return false
|
||||
}
|
||||
return emit.apply(this, arguments)
|
||||
}
|
||||
prototype.emit = patchedEmit
|
||||
|
||||
return () => {
|
||||
process.removeListener('uncaughtException', onUncaughtException)
|
||||
process.removeListener('unhandledRejection', onUnhandledRejection)
|
||||
process.removeListener('warning', onWarning)
|
||||
|
||||
if (prototype.emit === patchedEmit) {
|
||||
prototype.emit = emit
|
||||
}
|
||||
}
|
||||
}
|
||||
76
@xen-orchestra/log/src/index.js
Normal file
76
@xen-orchestra/log/src/index.js
Normal file
@@ -0,0 +1,76 @@
|
||||
import createTransport from './transports/console'
|
||||
import LEVELS from './levels'
|
||||
|
||||
const symbol =
|
||||
typeof Symbol !== 'undefined'
|
||||
? Symbol.for('@xen-orchestra/log')
|
||||
: '@@@xen-orchestra/log'
|
||||
if (!(symbol in global)) {
|
||||
// the default behavior, without requiring `configure` is to avoid
|
||||
// logging anything unless it's a real error
|
||||
const transport = createTransport()
|
||||
global[symbol] = log => log.level > LEVELS.WARN && transport(log)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function Log(data, level, namespace, message, time) {
|
||||
this.data = data
|
||||
this.level = level
|
||||
this.namespace = namespace
|
||||
this.message = message
|
||||
this.time = time
|
||||
}
|
||||
|
||||
function Logger(namespace) {
|
||||
this._namespace = namespace
|
||||
|
||||
// bind all logging methods
|
||||
for (const name in LEVELS) {
|
||||
const lowerCase = name.toLowerCase()
|
||||
this[lowerCase] = this[lowerCase].bind(this)
|
||||
}
|
||||
}
|
||||
|
||||
const { prototype } = Logger
|
||||
|
||||
for (const name in LEVELS) {
|
||||
const level = LEVELS[name]
|
||||
|
||||
prototype[name.toLowerCase()] = function(message, data) {
|
||||
if (typeof message !== 'string') {
|
||||
if (message instanceof Error) {
|
||||
data = { error: message }
|
||||
;({ message = 'an error has occurred' } = message)
|
||||
} else {
|
||||
return this.warn('incorrect value passed to logger', {
|
||||
level,
|
||||
value: message,
|
||||
})
|
||||
}
|
||||
}
|
||||
global[symbol](new Log(data, level, this._namespace, message, new Date()))
|
||||
}
|
||||
}
|
||||
|
||||
prototype.wrap = function(message, fn) {
|
||||
const logger = this
|
||||
const warnAndRethrow = error => {
|
||||
logger.warn(message, { error })
|
||||
throw error
|
||||
}
|
||||
return function() {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
const then = result != null && result.then
|
||||
return typeof then === 'function'
|
||||
? then.call(result, warnAndRethrow)
|
||||
: result
|
||||
} catch (error) {
|
||||
warnAndRethrow(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const createLogger = namespace => new Logger(namespace)
|
||||
export { createLogger as default }
|
||||
24
@xen-orchestra/log/src/levels.js
Normal file
24
@xen-orchestra/log/src/levels.js
Normal file
@@ -0,0 +1,24 @@
|
||||
const LEVELS = Object.create(null)
|
||||
export { LEVELS as default }
|
||||
|
||||
// https://github.com/trentm/node-bunyan#levels
|
||||
LEVELS.FATAL = 60 // service/app is going down
|
||||
LEVELS.ERROR = 50 // fatal for current action
|
||||
LEVELS.WARN = 40 // something went wrong but it's not fatal
|
||||
LEVELS.INFO = 30 // detail on unusual but normal operation
|
||||
LEVELS.DEBUG = 20
|
||||
|
||||
export const NAMES = Object.create(null)
|
||||
for (const name in LEVELS) {
|
||||
NAMES[LEVELS[name]] = name
|
||||
}
|
||||
|
||||
export const resolve = level => {
|
||||
if (typeof level === 'string') {
|
||||
level = LEVELS[level.toUpperCase()]
|
||||
}
|
||||
return level
|
||||
}
|
||||
|
||||
Object.freeze(LEVELS)
|
||||
Object.freeze(NAMES)
|
||||
32
@xen-orchestra/log/src/levels.spec.js
Normal file
32
@xen-orchestra/log/src/levels.spec.js
Normal file
@@ -0,0 +1,32 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { forEach, isInteger } from 'lodash'
|
||||
|
||||
import LEVELS, { NAMES, resolve } from './levels'
|
||||
|
||||
describe('LEVELS', () => {
|
||||
it('maps level names to their integer values', () => {
|
||||
forEach(LEVELS, (value, name) => {
|
||||
expect(isInteger(value)).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('NAMES', () => {
|
||||
it('maps level values to their names', () => {
|
||||
forEach(LEVELS, (value, name) => {
|
||||
expect(NAMES[value]).toBe(name)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolve()', () => {
|
||||
it('returns level values either from values or names', () => {
|
||||
forEach(LEVELS, value => {
|
||||
expect(resolve(value)).toBe(value)
|
||||
})
|
||||
forEach(NAMES, (name, value) => {
|
||||
expect(resolve(name)).toBe(+value)
|
||||
})
|
||||
})
|
||||
})
|
||||
24
@xen-orchestra/log/src/transports/console.js
Normal file
24
@xen-orchestra/log/src/transports/console.js
Normal file
@@ -0,0 +1,24 @@
|
||||
import LEVELS, { NAMES } from '../levels'
|
||||
|
||||
// Bind console methods (necessary for browsers)
|
||||
const debugConsole = console.log.bind(console)
|
||||
const infoConsole = console.info.bind(console)
|
||||
const warnConsole = console.warn.bind(console)
|
||||
const errorConsole = console.error.bind(console)
|
||||
|
||||
const { ERROR, INFO, WARN } = LEVELS
|
||||
|
||||
const consoleTransport = ({ data, level, namespace, message, time }) => {
|
||||
const fn =
|
||||
level < INFO
|
||||
? debugConsole
|
||||
: level < WARN
|
||||
? infoConsole
|
||||
: level < ERROR
|
||||
? warnConsole
|
||||
: errorConsole
|
||||
|
||||
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
|
||||
data != null && fn(data)
|
||||
}
|
||||
export default () => consoleTransport
|
||||
68
@xen-orchestra/log/src/transports/email.js
Normal file
68
@xen-orchestra/log/src/transports/email.js
Normal file
@@ -0,0 +1,68 @@
|
||||
import fromCallback from 'promise-toolbox/fromCallback'
|
||||
import prettyFormat from 'pretty-format' // eslint-disable-line node/no-extraneous-import
|
||||
import { createTransport } from 'nodemailer' // eslint-disable-line node/no-extraneous-import
|
||||
|
||||
import { evalTemplate, required } from '../utils'
|
||||
import { NAMES } from '../levels'
|
||||
|
||||
export default ({
|
||||
// transport options (https://nodemailer.com/smtp/)
|
||||
auth,
|
||||
authMethod,
|
||||
host,
|
||||
ignoreTLS,
|
||||
port,
|
||||
proxy,
|
||||
requireTLS,
|
||||
secure,
|
||||
service,
|
||||
tls,
|
||||
|
||||
// message options (https://nodemailer.com/message/)
|
||||
bcc,
|
||||
cc,
|
||||
from = required('from'),
|
||||
to = required('to'),
|
||||
subject = '[{{level}} - {{namespace}}] {{time}} {{message}}',
|
||||
}) => {
|
||||
const transporter = createTransport(
|
||||
{
|
||||
auth,
|
||||
authMethod,
|
||||
host,
|
||||
ignoreTLS,
|
||||
port,
|
||||
proxy,
|
||||
requireTLS,
|
||||
secure,
|
||||
service,
|
||||
tls,
|
||||
|
||||
disableFileAccess: true,
|
||||
disableUrlAccess: true,
|
||||
},
|
||||
{
|
||||
bcc,
|
||||
cc,
|
||||
from,
|
||||
to,
|
||||
}
|
||||
)
|
||||
|
||||
return log =>
|
||||
fromCallback(cb =>
|
||||
transporter.sendMail(
|
||||
{
|
||||
subject: evalTemplate(subject, key =>
|
||||
key === 'level'
|
||||
? NAMES[log.level]
|
||||
: key === 'time'
|
||||
? log.time.toISOString()
|
||||
: log[key]
|
||||
),
|
||||
text: prettyFormat(log.data),
|
||||
},
|
||||
cb
|
||||
)
|
||||
)
|
||||
}
|
||||
7
@xen-orchestra/log/src/transports/memory.js
Normal file
7
@xen-orchestra/log/src/transports/memory.js
Normal file
@@ -0,0 +1,7 @@
|
||||
export default () => {
|
||||
const memoryLogger = log => {
|
||||
logs.push(log)
|
||||
}
|
||||
const logs = (memoryLogger.logs = [])
|
||||
return memoryLogger
|
||||
}
|
||||
42
@xen-orchestra/log/src/transports/syslog.js
Normal file
42
@xen-orchestra/log/src/transports/syslog.js
Normal file
@@ -0,0 +1,42 @@
|
||||
import fromCallback from 'promise-toolbox/fromCallback'
|
||||
import splitHost from 'split-host' // eslint-disable-line node/no-extraneous-import node/no-missing-import
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import { createClient, Facility, Severity, Transport } from 'syslog-client' // eslint-disable-line node/no-extraneous-import node/no-missing-import
|
||||
|
||||
import LEVELS from '../levels'
|
||||
|
||||
// https://github.com/paulgrove/node-syslog-client#syslogseverity
|
||||
const LEVEL_TO_SEVERITY = {
|
||||
[LEVELS.FATAL]: Severity.Critical,
|
||||
[LEVELS.ERROR]: Severity.Error,
|
||||
[LEVELS.WARN]: Severity.Warning,
|
||||
[LEVELS.INFO]: Severity.Informational,
|
||||
[LEVELS.DEBUG]: Severity.Debug,
|
||||
}
|
||||
|
||||
const facility = Facility.User
|
||||
|
||||
export default target => {
|
||||
const opts = {}
|
||||
if (target !== undefined) {
|
||||
if (startsWith(target, 'tcp://')) {
|
||||
target = target.slice(6)
|
||||
opts.transport = Transport.Tcp
|
||||
} else if (startsWith(target, 'udp://')) {
|
||||
target = target.slice(6)
|
||||
opts.transport = Transport.Udp
|
||||
}
|
||||
|
||||
;({ host: target, port: opts.port } = splitHost(target))
|
||||
}
|
||||
|
||||
const client = createClient(target, opts)
|
||||
|
||||
return log =>
|
||||
fromCallback(cb =>
|
||||
client.log(log.message, {
|
||||
facility,
|
||||
severity: LEVEL_TO_SEVERITY[log.level],
|
||||
})
|
||||
)
|
||||
}
|
||||
62
@xen-orchestra/log/src/utils.js
Normal file
62
@xen-orchestra/log/src/utils.js
Normal file
@@ -0,0 +1,62 @@
|
||||
import escapeRegExp from 'lodash/escapeRegExp'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const TPL_RE = /\{\{(.+?)\}\}/g
|
||||
export const evalTemplate = (tpl, data) => {
|
||||
const getData =
|
||||
typeof data === 'function' ? (_, key) => data(key) : (_, key) => data[key]
|
||||
|
||||
return tpl.replace(TPL_RE, getData)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const compileGlobPatternFragment = pattern =>
|
||||
pattern
|
||||
.split('*')
|
||||
.map(escapeRegExp)
|
||||
.join('.*')
|
||||
|
||||
export const compileGlobPattern = pattern => {
|
||||
const no = []
|
||||
const yes = []
|
||||
pattern.split(/[\s,]+/).forEach(pattern => {
|
||||
if (pattern[0] === '-') {
|
||||
no.push(pattern.slice(1))
|
||||
} else {
|
||||
yes.push(pattern)
|
||||
}
|
||||
})
|
||||
|
||||
const raw = ['^']
|
||||
|
||||
if (no.length !== 0) {
|
||||
raw.push('(?!', no.map(compileGlobPatternFragment).join('|'), ')')
|
||||
}
|
||||
|
||||
if (yes.length !== 0) {
|
||||
raw.push('(?:', yes.map(compileGlobPatternFragment).join('|'), ')')
|
||||
} else {
|
||||
raw.push('.*')
|
||||
}
|
||||
|
||||
raw.push('$')
|
||||
|
||||
return new RegExp(raw.join(''))
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const required = name => {
|
||||
throw new Error(`missing required arg ${name}`)
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export const serializeError = error => ({
|
||||
...error,
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
stack: error.stack,
|
||||
})
|
||||
13
@xen-orchestra/log/src/utils.spec.js
Normal file
13
@xen-orchestra/log/src/utils.spec.js
Normal file
@@ -0,0 +1,13 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import { compileGlobPattern } from './utils'
|
||||
|
||||
describe('compileGlobPattern()', () => {
|
||||
it('works', () => {
|
||||
const re = compileGlobPattern('foo, ba*, -bar')
|
||||
expect(re.test('foo')).toBe(true)
|
||||
expect(re.test('bar')).toBe(false)
|
||||
expect(re.test('baz')).toBe(true)
|
||||
expect(re.test('qux')).toBe(false)
|
||||
})
|
||||
})
|
||||
1
@xen-orchestra/log/transports/console.js
Normal file
1
@xen-orchestra/log/transports/console.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/console.js')
|
||||
1
@xen-orchestra/log/transports/email.js
Normal file
1
@xen-orchestra/log/transports/email.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/email.js')
|
||||
1
@xen-orchestra/log/transports/memory.js
Normal file
1
@xen-orchestra/log/transports/memory.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/memory.js')
|
||||
1
@xen-orchestra/log/transports/syslog.js
Normal file
1
@xen-orchestra/log/transports/syslog.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('../dist/transports/syslog.js')
|
||||
3
@xen-orchestra/mixin/.babelrc.js
Normal file
3
@xen-orchestra/mixin/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/mixin/.npmignore
Normal file
24
@xen-orchestra/mixin/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
49
@xen-orchestra/mixin/README.md
Normal file
49
@xen-orchestra/mixin/README.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# ${pkg.name} [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/${pkg.name}):
|
||||
|
||||
```
|
||||
> npm install --save ${pkg.name}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](${pkg.bugs})
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
${pkg.license} © [${pkg.author.name}](${pkg.author.url})
|
||||
49
@xen-orchestra/mixin/package.json
Normal file
49
@xen-orchestra/mixin/package.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "@xen-orchestra/mixin",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/mixin",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"browserslist": [
|
||||
">2%"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"bind-property-descriptor": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-dev": "^1.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
130
@xen-orchestra/mixin/src/index.js
Normal file
130
@xen-orchestra/mixin/src/index.js
Normal file
@@ -0,0 +1,130 @@
|
||||
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const { defineProperties, getOwnPropertyDescriptor } = Object
|
||||
|
||||
const isIgnoredProperty = name => name[0] === '_' || name === 'constructor'
|
||||
|
||||
const IGNORED_STATIC_PROPERTIES = {
|
||||
__proto__: null,
|
||||
|
||||
arguments: true,
|
||||
caller: true,
|
||||
length: true,
|
||||
name: true,
|
||||
prototype: true,
|
||||
}
|
||||
const isIgnoredStaticProperty = name => name in IGNORED_STATIC_PROPERTIES
|
||||
|
||||
const ownKeys =
|
||||
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
|
||||
(({ getOwnPropertyNames: names, getOwnPropertySymbols: symbols }) =>
|
||||
symbols !== undefined ? obj => names(obj).concat(symbols(obj)) : names)(
|
||||
Object
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const mixin = Mixins => Class => {
|
||||
if (__DEV__ && !Array.isArray(Mixins)) {
|
||||
throw new TypeError('Mixins should be an array')
|
||||
}
|
||||
|
||||
const { name } = Class
|
||||
|
||||
// Copy properties of plain object mix-ins to the prototype.
|
||||
{
|
||||
const allMixins = Mixins
|
||||
Mixins = []
|
||||
const { prototype } = Class
|
||||
const descriptors = { __proto__: null }
|
||||
allMixins.forEach(Mixin => {
|
||||
if (typeof Mixin === 'function') {
|
||||
Mixins.push(Mixin)
|
||||
return
|
||||
}
|
||||
|
||||
for (const prop of ownKeys(Mixin)) {
|
||||
if (__DEV__ && prop in prototype) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
;(descriptors[prop] = getOwnPropertyDescriptor(
|
||||
Mixin,
|
||||
prop
|
||||
)).enumerable = false // Object methods are enumerable but class methods are not.
|
||||
}
|
||||
})
|
||||
defineProperties(prototype, descriptors)
|
||||
}
|
||||
|
||||
const n = Mixins.length
|
||||
|
||||
function DecoratedClass(...args) {
|
||||
const instance = new Class(...args)
|
||||
|
||||
for (let i = 0; i < n; ++i) {
|
||||
const Mixin = Mixins[i]
|
||||
const { prototype } = Mixin
|
||||
const mixinInstance = new Mixin(instance, ...args)
|
||||
const descriptors = { __proto__: null }
|
||||
const props = ownKeys(prototype)
|
||||
for (let j = 0, m = props.length; j < m; ++j) {
|
||||
const prop = props[j]
|
||||
|
||||
if (isIgnoredProperty(prop)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (prop in instance) {
|
||||
throw new Error(`${name}#${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = getBoundPropertyDescriptor(
|
||||
prototype,
|
||||
prop,
|
||||
mixinInstance
|
||||
)
|
||||
}
|
||||
defineProperties(instance, descriptors)
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
// Copy original and mixed-in static properties on Decorator class.
|
||||
const descriptors = { __proto__: null }
|
||||
ownKeys(Class).forEach(prop => {
|
||||
let descriptor
|
||||
if (
|
||||
!(
|
||||
isIgnoredStaticProperty(prop) &&
|
||||
// if they already exist...
|
||||
(descriptor = getOwnPropertyDescriptor(DecoratedClass, prop)) !==
|
||||
undefined &&
|
||||
// and are not configurable.
|
||||
!descriptor.configurable
|
||||
)
|
||||
) {
|
||||
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
|
||||
}
|
||||
})
|
||||
Mixins.forEach(Mixin => {
|
||||
ownKeys(Mixin).forEach(prop => {
|
||||
if (isIgnoredStaticProperty(prop)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (__DEV__ && prop in descriptors) {
|
||||
throw new Error(`${name}.${prop} is already defined`)
|
||||
}
|
||||
|
||||
descriptors[prop] = getOwnPropertyDescriptor(Mixin, prop)
|
||||
})
|
||||
})
|
||||
defineProperties(DecoratedClass, descriptors)
|
||||
|
||||
return DecoratedClass
|
||||
}
|
||||
export { mixin as default }
|
||||
376
CHANGELOG.md
376
CHANGELOG.md
@@ -2,15 +2,391 @@
|
||||
|
||||
## *next*
|
||||
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Users] Display user groups [#3719](https://github.com/vatesfr/xen-orchestra/issues/3719) (PR [#3740](https://github.com/vatesfr/xen-orchestra/pull/3740))
|
||||
- [VDI] Display VDI's SR [3021](https://github.com/vatesfr/xen-orchestra/issues/3021) (PR [#3285](https://github.com/vatesfr/xen-orchestra/pull/3285))
|
||||
- [Health, VM/disks] Display SR's container [#3021](https://github.com/vatesfr/xen-orchestra/issues/3021) (PRs [#3747](https://github.com/vatesfr/xen-orchestra/pull/3747), [#3751](https://github.com/vatesfr/xen-orchestra/pull/3751))
|
||||
- [Servers] Auto-connect to ejected host [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3738](https://github.com/vatesfr/xen-orchestra/pull/3738))
|
||||
- [Backup NG] Add "XOSAN" in excluded tags by default [#2128](https://github.com/vatesfr/xen-orchestra/issues/3563) (PR [#3559](https://github.com/vatesfr/xen-orchestra/pull/3563))
|
||||
- [VM] add tooltip for VM status icon [#3749](https://github.com/vatesfr/xen-orchestra/issues/3749) (PR [#3765](https://github.com/vatesfr/xen-orchestra/pull/3765))
|
||||
- [New XOSAN] Improve view and possibility to sort SRs by name/size/free space [#2416](https://github.com/vatesfr/xen-orchestra/issues/2416) (PR [#3691](https://github.com/vatesfr/xen-orchestra/pull/3691))
|
||||
- [Backup NG] Disable HA on replicated VM (CR, DR) [#2359](https://github.com/vatesfr/xen-orchestra/issues/2359) (PR [#3755](https://github.com/vatesfr/xen-orchestra/pull/3755))
|
||||
- [Backup NG] Display the last run status for each schedule with the possibility to show the associated log [#3769](https://github.com/vatesfr/xen-orchestra/issues/3769) (PR [#3779](https://github.com/vatesfr/xen-orchestra/pull/3779))
|
||||
- [Backup NG] Add a link to the documentation [#3789](https://github.com/vatesfr/xen-orchestra/issues/3789) (PR [#3790](https://github.com/vatesfr/xen-orchestra/pull/3790))
|
||||
- [Backup NG] Ability to copy schedule/job id to the clipboard [#3753](https://github.com/vatesfr/xen-orchestra/issues/3753) (PR [#3791](https://github.com/vatesfr/xen-orchestra/pull/3791))
|
||||
- [Backup NG / logs] Merge the job log status with the display details button [#3797](https://github.com/vatesfr/xen-orchestra/issues/3797) (PR [#3800](https://github.com/vatesfr/xen-orchestra/pull/3800))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Home/SRs] Fixed SR status for non admin users [#2204](https://github.com/vatesfr/xen-orchestra/issues/2204) (PR [#3742](https://github.com/vatesfr/xen-orchestra/pull/3742))
|
||||
- [Servers] Fix occasional "server's pool already connected" errors when pool is not connected (PR [#3782](https://github.com/vatesfr/xen-orchestra/pull/3782))
|
||||
- [Self] Fix missing objects when the self service view is the first one to be loaded when opening XO [#2689](https://github.com/vatesfr/xen-orchestra/issues/2689) (PR [#3096](https://github.com/vatesfr/xen-orchestra/pull/3096))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.5.0
|
||||
- xen-api v0.23.0
|
||||
- xo-acl-resolver v0.4.1
|
||||
- xo-server v5.32.0
|
||||
- xo-web v5.32.0
|
||||
|
||||
## **5.29.0** (2018-11-29)
|
||||
|
||||
- [Perf alert] Ability to trigger an alarm if a host/VM/SR usage value is below the threshold [#3612](https://github.com/vatesfr/xen-orchestra/issues/3612) (PR [#3675](https://github.com/vatesfr/xen-orchestra/pull/3675))
|
||||
- [Home/VMs] Display pool's name [#2226](https://github.com/vatesfr/xen-orchestra/issues/2226) (PR [#3709](https://github.com/vatesfr/xen-orchestra/pull/3709))
|
||||
- [Servers] Prevent new connection if pool is already connected [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3724](https://github.com/vatesfr/xen-orchestra/pull/3724))
|
||||
- [VM] Pause (like Suspend but doesn't copy RAM on disk) [#3727](https://github.com/vatesfr/xen-orchestra/issues/3727) (PR [#3731](https://github.com/vatesfr/xen-orchestra/pull/3731))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Servers] Fix deleting server on joining a pool [#2238](https://github.com/vatesfr/xen-orchestra/issues/2238) (PR [#3728](https://github.com/vatesfr/xen-orchestra/pull/3728))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.22.0
|
||||
- xo-server-perf-alert v0.2.0
|
||||
- xo-server-usage-report v0.7.1
|
||||
- xo-server v5.31.0
|
||||
- xo-web v5.31.0
|
||||
|
||||
## **5.28.2** (2018-11-16)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM] Ability to set nested virtualization in settings [#3619](https://github.com/vatesfr/xen-orchestra/issues/3619) (PR [#3625](https://github.com/vatesfr/xen-orchestra/pull/3625))
|
||||
- [Legacy Backup] Restore and File restore functionalities moved to the Backup NG view [#3499](https://github.com/vatesfr/xen-orchestra/issues/3499) (PR [#3610](https://github.com/vatesfr/xen-orchestra/pull/3610))
|
||||
- [Backup NG logs] Display warning in case of missing VMs instead of a ghosts VMs tasks (PR [#3647](https://github.com/vatesfr/xen-orchestra/pull/3647))
|
||||
- [VM] On migration, automatically selects the host and SR when only one is available [#3502](https://github.com/vatesfr/xen-orchestra/issues/3502) (PR [#3654](https://github.com/vatesfr/xen-orchestra/pull/3654))
|
||||
- [VM] Display VGA and video RAM for PVHVM guests [#3576](https://github.com/vatesfr/xen-orchestra/issues/3576) (PR [#3664](https://github.com/vatesfr/xen-orchestra/pull/3664))
|
||||
- [Backup NG form] Display a warning to let the user know that the Delta Backup and the Continuous Replication are not supported on XenServer < 6.5 [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [Backup NG form] Omit VMs(Simple Backup)/pools(Smart Backup/Resident on) with XenServer < 6.5 from the selection when the Delta Backup mode or the Continuous Replication mode are selected [#3540](https://github.com/vatesfr/xen-orchestra/issues/3540) (PR [#3668](https://github.com/vatesfr/xen-orchestra/pull/3668))
|
||||
- [VM] Allow to switch the Virtualization mode [#2372](https://github.com/vatesfr/xen-orchestra/issues/2372) (PR [#3669](https://github.com/vatesfr/xen-orchestra/pull/3669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup ng logs] Fix restarting VMs with concurrency issue [#3603](https://github.com/vatesfr/xen-orchestra/issues/3603) (PR [#3634](https://github.com/vatesfr/xen-orchestra/pull/3634))
|
||||
- Validate modal containing a confirm text input by pressing the Enter key [#2735](https://github.com/vatesfr/xen-orchestra/issues/2735) (PR [#2890](https://github.com/vatesfr/xen-orchestra/pull/2890))
|
||||
- [Patches] Bulk install correctly ignores upgrade patches on licensed hosts (PR [#3651](https://github.com/vatesfr/xen-orchestra/pull/3651))
|
||||
- [Backup NG logs] Handle failed restores (PR [#3648](https://github.com/vatesfr/xen-orchestra/pull/3648))
|
||||
- [Self/New VM] Incorrect limit computation [#3658](https://github.com/vatesfr/xen-orchestra/issues/3658) (PR [#3666](https://github.com/vatesfr/xen-orchestra/pull/3666))
|
||||
- [Plugins] Don't expose credentials in config to users (PR [#3671](https://github.com/vatesfr/xen-orchestra/pull/3671))
|
||||
- [Self/New VM] `not enough … available in the set …` error in some cases (PR [#3667](https://github.com/vatesfr/xen-orchestra/pull/3667))
|
||||
- [XOSAN] Creation stuck at "Configuring VMs" [#3688](https://github.com/vatesfr/xen-orchestra/issues/3688) (PR [#3689](https://github.com/vatesfr/xen-orchestra/pull/3689))
|
||||
- [Backup NG] Errors listing backups on SMB remotes with extraneous files (PR [#3685](https://github.com/vatesfr/xen-orchestra/pull/3685))
|
||||
- [Remotes] Don't expose credentials to users [#3682](https://github.com/vatesfr/xen-orchestra/issues/3682) (PR [#3687](https://github.com/vatesfr/xen-orchestra/pull/3687))
|
||||
- [VM] Correctly display guest metrics updates (tools, network, etc.) [#3533](https://github.com/vatesfr/xen-orchestra/issues/3533) (PR [#3694](https://github.com/vatesfr/xen-orchestra/pull/3694))
|
||||
- [VM Templates] Fix deletion [#3498](https://github.com/vatesfr/xen-orchestra/issues/3498) (PR [#3695](https://github.com/vatesfr/xen-orchestra/pull/3695))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.21.0
|
||||
- xo-common v0.2.0
|
||||
- xo-acl-resolver v0.4.0
|
||||
- xo-server v5.30.1
|
||||
- xo-web v5.30.0
|
||||
|
||||
## **5.28.1** (2018-11-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Increase timeout in stale remotes detection to limit false positives (PR [#3632](https://github.com/vatesfr/xen-orchestra/pull/3632))
|
||||
- Fix re-registration issue ([4e35b19ac](https://github.com/vatesfr/xen-orchestra/commit/4e35b19ac56c60f61c0e771cde70a50402797b8a))
|
||||
- [Backup NG logs] Fix started jobs filter [#3636](https://github.com/vatesfr/xen-orchestra/issues/3636) (PR [#3641](https://github.com/vatesfr/xen-orchestra/pull/3641))
|
||||
- [New VM] CPU and memory user inputs were ignored since previous release [#3644](https://github.com/vatesfr/xen-orchestra/issues/3644) (PR [#3646](https://github.com/vatesfr/xen-orchestra/pull/3646))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.4.1
|
||||
- xo-server v5.29.4
|
||||
- xo-web v5.29.3
|
||||
|
||||
## **5.28.0** (2018-10-31)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Usage Report] Add IOPS read/write/total per VM [#3309](https://github.com/vatesfr/xen-orchestra/issues/3309) (PR [#3455](https://github.com/vatesfr/xen-orchestra/pull/3455))
|
||||
- [Self service] Sort resource sets by name (PR [#3507](https://github.com/vatesfr/xen-orchestra/pull/3507))
|
||||
- [Usage Report] Add top 3 SRs which use the most IOPS read/write/total [#3306](https://github.com/vatesfr/xen-orchestra/issues/3306) (PR [#3508](https://github.com/vatesfr/xen-orchestra/pull/3508))
|
||||
- [New VM] Display a warning when the memory is below the template memory static min [#3496](https://github.com/vatesfr/xen-orchestra/issues/3496) (PR [#3513](https://github.com/vatesfr/xen-orchestra/pull/3513))
|
||||
- [Backup NG form] Add link to plugins setting [#3457](https://github.com/vatesfr/xen-orchestra/issues/3457) (PR [#3514](https://github.com/vatesfr/xen-orchestra/pull/3514))
|
||||
- [Backup reports] Add job and run ID [#3488](https://github.com/vatesfr/xen-orchestra/issues/3488) (PR [#3516](https://github.com/vatesfr/xen-orchestra/pull/3516))
|
||||
- [Usage Report] Add top 3 VMs which use the most IOPS read/write/total [#3308](https://github.com/vatesfr/xen-orchestra/issues/3308) (PR [#3463](https://github.com/vatesfr/xen-orchestra/pull/3463))
|
||||
- [Settings/logs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3528](https://github.com/vatesfr/xen-orchestra/pull/3528))
|
||||
- [Settings/acls] Add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3536](https://github.com/vatesfr/xen-orchestra/pull/3536))
|
||||
- [Home] Improve search usage: raw numbers also match in names [#2906](https://github.com/vatesfr/xen-orchestra/issues/2906) (PR [#3552](https://github.com/vatesfr/xen-orchestra/pull/3552))
|
||||
- [Backup NG] Timeout of a job is now in hours [#3550](https://github.com/vatesfr/xen-orchestra/issues/3550) (PR [#3553](https://github.com/vatesfr/xen-orchestra/pull/3553))
|
||||
- [Backup NG] Explicit error if a VM is missing [#3434](https://github.com/vatesfr/xen-orchestra/issues/3434) (PR [#3522](https://github.com/vatesfr/xen-orchestra/pull/3522))
|
||||
- [Backup NG] Show all advanced settings with non-default values in overview [#3549](https://github.com/vatesfr/xen-orchestra/issues/3549) (PR [#3554](https://github.com/vatesfr/xen-orchestra/pull/3554))
|
||||
- [Backup NG] Collapse advanced settings by default [#3551](https://github.com/vatesfr/xen-orchestra/issues/3551) (PR [#3559](https://github.com/vatesfr/xen-orchestra/pull/3559))
|
||||
- [Scheduling] Merge selection and interval tabs [#1902](https://github.com/vatesfr/xen-orchestra/issues/1902) (PR [#3519](https://github.com/vatesfr/xen-orchestra/pull/3519))
|
||||
- [Backup NG/Restore] The backup selector now also shows the job name [#3366](https://github.com/vatesfr/xen-orchestra/issues/3366) (PR [#3564](https://github.com/vatesfr/xen-orchestra/pull/3564))
|
||||
- Sort buttons by criticality in tables [#3168](https://github.com/vatesfr/xen-orchestra/issues/3168) (PR [#3545](https://github.com/vatesfr/xen-orchestra/pull/3545))
|
||||
- [Usage Report] Ability to send a daily report [#3544](https://github.com/vatesfr/xen-orchestra/issues/3544) (PR [#3582](https://github.com/vatesfr/xen-orchestra/pull/3582))
|
||||
- [Backup NG logs] Disable state filters with no entries [#3438](https://github.com/vatesfr/xen-orchestra/issues/3438) (PR [#3442](https://github.com/vatesfr/xen-orchestra/pull/3442))
|
||||
- [ACLs] Global performance improvement on UI for non-admin users [#3578](https://github.com/vatesfr/xen-orchestra/issues/3578) (PR [#3584](https://github.com/vatesfr/xen-orchestra/pull/3584))
|
||||
- [Backup NG] Improve the Schedule's view (Replace table by list) [#3491](https://github.com/vatesfr/xen-orchestra/issues/3491) (PR [#3586](https://github.com/vatesfr/xen-orchestra/pull/3586))
|
||||
- ([Host/Storage], [Sr/hosts]) add bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3539](https://github.com/vatesfr/xen-orchestra/pull/3539))
|
||||
- [xo-server] Use @xen-orchestra/log for basic logging [#3555](https://github.com/vatesfr/xen-orchestra/issues/3555) (PR [#3579](https://github.com/vatesfr/xen-orchestra/pull/3579))
|
||||
- [Backup Report] Log error when job failed [#3458](https://github.com/vatesfr/xen-orchestra/issues/3458) (PR [#3593](https://github.com/vatesfr/xen-orchestra/pull/3593))
|
||||
- [Backup NG] Display logs for backup restoration [#2511](https://github.com/vatesfr/xen-orchestra/issues/2511) (PR [#3609](https://github.com/vatesfr/xen-orchestra/pull/3609))
|
||||
- [XOA] Display product version and list of all installed packages [#3560](https://github.com/vatesfr/xen-orchestra/issues/3560) (PR [#3621](https://github.com/vatesfr/xen-orchestra/pull/3621))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remotes] Fix removal of broken remotes [#3327](https://github.com/vatesfr/xen-orchestra/issues/3327) (PR [#3521](https://github.com/vatesfr/xen-orchestra/pull/3521))
|
||||
- [Backups] Fix stuck backups due to broken NFS remotes [#3467](https://github.com/vatesfr/xen-orchestra/issues/3467) (PR [#3534](https://github.com/vatesfr/xen-orchestra/pull/3534))
|
||||
- [New VM] Fix missing cloud config when creating multiple VMs at once in some cases [#3532](https://github.com/vatesfr/xen-orchestra/issues/3532) (PR [#3535](https://github.com/vatesfr/xen-orchestra/pull/3535))
|
||||
- [VM] Fix an error when an admin tried to add a disk on a Self VM whose resource set had been deleted [#2814](https://github.com/vatesfr/xen-orchestra/issues/2814) (PR [#3530](https://github.com/vatesfr/xen-orchestra/pull/3530))
|
||||
- [Self/Create VM] Fix some quotas based on the template instead of the user inputs [#2683](https://github.com/vatesfr/xen-orchestra/issues/2683) (PR [#3546](https://github.com/vatesfr/xen-orchestra/pull/3546))
|
||||
- [Self] Ignore DR and CR VMs when computing quotas [#3064](https://github.com/vatesfr/xen-orchestra/issues/3064) (PR [#3561](https://github.com/vatesfr/xen-orchestra/pull/3561))
|
||||
- [Patches] Wrongly requiring to eject CDs from halted VMs and snapshots before installing patches (PR [#3611](https://github.com/vatesfr/xen-orchestra/pull/3611))
|
||||
- [Jobs] Ensure the scheduling is not interrupted in rare cases (PR [#3617](https://github.com/vatesfr/xen-orchestra/pull/3617))
|
||||
- [Home] Fix `server.getAll` error at login when user is not admin [#2335](https://github.com/vatesfr/xen-orchestra/issues/2335) (PR [#3613](https://github.com/vatesfr/xen-orchestra/pull/3613))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server-backup-reports v0.15.0
|
||||
- xo-common v0.1.2
|
||||
- @xen-orchestra/log v0.1.0
|
||||
- @xen-orchestra/fs v0.4.0
|
||||
- complex-matcher v0.5.0
|
||||
- vhd-lib v0.4.0
|
||||
- xen-api v0.20.0
|
||||
- xo-server-usage-report v0.7.0
|
||||
- xo-server v5.29.0
|
||||
- xo-web v5.29.0
|
||||
|
||||
## **5.27.2** (2018-10-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Host/Networks] Remove "Add network" button [#3386](https://github.com/vatesfr/xen-orchestra/issues/3386) (PR [#3478](https://github.com/vatesfr/xen-orchestra/pull/3478))
|
||||
- [Host/networks] Private networks table [#3387](https://github.com/vatesfr/xen-orchestra/issues/3387) (PR [#3481](https://github.com/vatesfr/xen-orchestra/pull/3481))
|
||||
- [Home/pool] Patch count pill now shows the number of unique patches in the pool [#3321](https://github.com/vatesfr/xen-orchestra/issues/3321) (PR [#3483](https://github.com/vatesfr/xen-orchestra/pull/3483))
|
||||
- [Patches] Pre-install checks to avoid errors [#3252](https://github.com/vatesfr/xen-orchestra/issues/3252) (PR [#3484](https://github.com/vatesfr/xen-orchestra/pull/3484))
|
||||
- [Vm/Snapshots] Allow VM operators to create snapshots and delete those they created [#3443](https://github.com/vatesfr/xen-orchestra/issues/3443) (PR [#3482](https://github.com/vatesfr/xen-orchestra/pull/3482))
|
||||
- [VM/clone] Handle ACLs and Self Service [#3139](https://github.com/vatesfr/xen-orchestra/issues/3139) (PR [#3493](https://github.com/vatesfr/xen-orchestra/pull/3493))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG] Fix `Cannot read property 'uuid' of undefined` when a disk is removed from a VM to backup (PR [#3479](https://github.com/vatesfr/xen-orchestra/pull/3479))
|
||||
- [Backup NG] Fix unexpected full after failure, interruption or basic rolling snapshot (PR [#3485](https://github.com/vatesfr/xen-orchestra/pull/3485))
|
||||
- [Usage report] Display top 3 used SRs instead of top 3 biggest SRs [#3307](https://github.com/vatesfr/xen-orchestra/issues/3307) (PR [#3475](https://github.com/vatesfr/xen-orchestra/pull/3475))
|
||||
|
||||
### Released packages
|
||||
|
||||
- vhd-lib v0.3.2
|
||||
- xo-vmdk-to-vhd v0.1.5
|
||||
- xo-server-usage-report v0.6.0
|
||||
- xo-acl-resolver v0.3.0
|
||||
- xo-server v5.28.0
|
||||
- xo-web v5.28.0
|
||||
|
||||
## **5.27.1** (2018-09-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [OVA Import] Allow import of files bigger than 127GB (PR [#3451](https://github.com/vatesfr/xen-orchestra/pull/3451))
|
||||
- [File restore] Fix a path issue when going back to the parent folder (PR [#3446](https://github.com/vatesfr/xen-orchestra/pull/3446))
|
||||
- [File restore] Fix a minor issue when showing which selected files are redundant (PR [#3447](https://github.com/vatesfr/xen-orchestra/pull/3447))
|
||||
- [Memory] Fix a major leak [#2580](https://github.com/vatesfr/xen-orchestra/issues/2580) [#2820](https://github.com/vatesfr/xen-orchestra/issues/2820) (PR [#3453](https://github.com/vatesfr/xen-orchestra/pull/3453))
|
||||
- [NFS Remotes] Fix `already mounted` race condition [#3380](https://github.com/vatesfr/xen-orchestra/issues/3380) (PR [#3460](https://github.com/vatesfr/xen-orchestra/pull/3460))
|
||||
- Fix `Cannot read property 'type' of undefined` when deleting a VM (PR [#3465](https://github.com/vatesfr/xen-orchestra/pull/3465))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.3.1
|
||||
- vhd-lib v0.3.1
|
||||
- xo-vmdk-to-vhd v0.1.4
|
||||
- xo-server v5.27.2
|
||||
- xo-web v5.27.1
|
||||
|
||||
## **5.27.0** (2018-09-24)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Remotes] Test the remote automatically on changes [#3323](https://github.com/vatesfr/xen-orchestra/issues/3323) (PR [#3397](https://github.com/vatesfr/xen-orchestra/pull/3397))
|
||||
- [Remotes] Use *WORKGROUP* as default domain for new SMB remote (PR [#3398](https://github.com/vatesfr/xen-orchestra/pull/3398))
|
||||
- [Backup NG form] Display a tip to encourage users to create vms on a thin-provisioned storage [#3334](https://github.com/vatesfr/xen-orchestra/issues/3334) (PR [#3402](https://github.com/vatesfr/xen-orchestra/pull/3402))
|
||||
- [Backup NG form] improve schedule's form [#3138](https://github.com/vatesfr/xen-orchestra/issues/3138) (PR [#3359](https://github.com/vatesfr/xen-orchestra/pull/3359))
|
||||
- [Backup NG Overview] Display transferred and merged data size for backup jobs [#3340](https://github.com/vatesfr/xen-orchestra/issues/3340) (PR [#3408](https://github.com/vatesfr/xen-orchestra/pull/3408))
|
||||
- [VM] Display the PVHVM status [#3014](https://github.com/vatesfr/xen-orchestra/issues/3014) (PR [#3418](https://github.com/vatesfr/xen-orchestra/pull/3418))
|
||||
- [Backup reports] Ability to test the plugin (PR [#3421](https://github.com/vatesfr/xen-orchestra/pull/3421))
|
||||
- [Backup NG] Ability to restart failed VMs' backup [#3339](https://github.com/vatesfr/xen-orchestra/issues/3339) (PR [#3420](https://github.com/vatesfr/xen-orchestra/pull/3420))
|
||||
- [VM] Ability to change the NIC type [#3423](https://github.com/vatesfr/xen-orchestra/issues/3423) (PR [#3440](https://github.com/vatesfr/xen-orchestra/pull/3440))
|
||||
- [Backup NG Overview] Display the schedule's name [#3444](https://github.com/vatesfr/xen-orchestra/issues/3444) (PR [#3445](https://github.com/vatesfr/xen-orchestra/pull/3445))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remotes] Rename connect(ed)/disconnect(ed) to enable(d)/disable(d) [#3323](https://github.com/vatesfr/xen-orchestra/issues/3323) (PR [#3396](https://github.com/vatesfr/xen-orchestra/pull/3396))
|
||||
- [Remotes] Fix error appears twice on testing (PR [#3399](https://github.com/vatesfr/xen-orchestra/pull/3399))
|
||||
- [Backup NG] Don't fail on VMs with empty VBDs (like CDs or floppy disks) (PR [#3410](https://github.com/vatesfr/xen-orchestra/pull/3410))
|
||||
- [XOA updater] Fix issue where trial request would fail [#3407](https://github.com/vatesfr/xen-orchestra/issues/3407) (PR [#3412](https://github.com/vatesfr/xen-orchestra/pull/3412))
|
||||
- [Backup NG logs] Fix log's value not being updated in the copy and report button [#3273](https://github.com/vatesfr/xen-orchestra/issues/3273) (PR [#3360](https://github.com/vatesfr/xen-orchestra/pull/3360))
|
||||
- [Backup NG] Fix issue when *Delete first* was enabled for some of the remotes [#3424](https://github.com/vatesfr/xen-orchestra/issues/3424) (PR [#3427](https://github.com/vatesfr/xen-orchestra/pull/3427))
|
||||
- [VM/host consoles] Work around a XenServer/XCP-ng issue which lead to some consoles not working [#3432](https://github.com/vatesfr/xen-orchestra/issues/3432) (PR [#3435](https://github.com/vatesfr/xen-orchestra/pull/3435))
|
||||
- [Backup NG] Remove extraneous snapshots in case of multiple schedules [#3132](https://github.com/vatesfr/xen-orchestra/issues/3132) (PR [#3439](https://github.com/vatesfr/xen-orchestra/pull/3439))
|
||||
- [Backup NG] Fix page reloaded on creating a schedule [#3461](https://github.com/vatesfr/xen-orchestra/issues/3461) (PR [#3462](https://github.com/vatesfr/xen-orchestra/pull/3462))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server-backup-reports v0.14.0
|
||||
- @xen-orchestra/async-map v0.0.0
|
||||
- @xen-orchestra/defined v0.0.0
|
||||
- @xen-orchestra/emit-async v0.0.0
|
||||
- @xen-orchestra/mixin v0.0.0
|
||||
- xo-server v5.27.0
|
||||
- xo-web v5.27.0
|
||||
|
||||
## **5.26.0** (2018-09-07)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Backup (file) restore] Order backups by date in selector [#3294](https://github.com/vatesfr/xen-orchestra/issues/3294) (PR [#3374](https://github.com/vatesfr/xen-orchestra/pull/3374))
|
||||
- [Self] Hide Tasks entry in menu for self users [#3311](https://github.com/vatesfr/xen-orchestra/issues/3311) (PR [#3373](https://github.com/vatesfr/xen-orchestra/pull/3373))
|
||||
- [Tasks] Show previous tasks [#3266](https://github.com/vatesfr/xen-orchestra/issues/3266) (PR [#3377](https://github.com/vatesfr/xen-orchestra/pull/3377))
|
||||
- [Backup NG] Add job name in names of replicated VMs (PR [#3379](https://github.com/vatesfr/xen-orchestra/pull/3379))
|
||||
- [Backup NG] Restore directories [#1924](https://github.com/vatesfr/xen-orchestra/issues/1924) (PR [#3384](https://github.com/vatesfr/xen-orchestra/pull/3384))
|
||||
- [VM] Start a VM on a specific host [#3191](https://github.com/vatesfr/xen-orchestra/issues/3191) (PR [#3389](https://github.com/vatesfr/xen-orchestra/pull/3389))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Self] Fix Self Service quotas not being correctly updated when deleting multiple VMs at a time (PR [#3368](https://github.com/vatesfr/xen-orchestra/pull/3368))
|
||||
- [Backup NG] Don't fail listing backups when a remote is broken [#3365](https://github.com/vatesfr/xen-orchestra/issues/3365) (PR [#3367](https://github.com/vatesfr/xen-orchestra/pull/3367))
|
||||
- [New XOSAN] Fix error sometimes occurring when selecting the pool (PR [#3370](https://github.com/vatesfr/xen-orchestra/pull/3370))
|
||||
- [New VM] Selecting multiple VMs and clicking Create then Cancel used to redirect to Home [#3268](https://github.com/vatesfr/xen-orchestra/issues/3268) (PR [#3371](https://github.com/vatesfr/xen-orchestra/pull/3371))
|
||||
- [Remotes] `cannot read 'properties' of undefined` error (PR [#3382](https://github.com/vatesfr/xen-orchestra/pull/3382))
|
||||
- [Servers] Various issues when adding a new server [#3385](https://github.com/vatesfr/xen-orchestra/issues/3385) (PR [#3388](https://github.com/vatesfr/xen-orchestra/pull/3388))
|
||||
- [Backup NG] Always delete the correct old replications [#3391](https://github.com/vatesfr/xen-orchestra/issues/3391) (PR [#3394](https://github.com/vatesfr/xen-orchestra/pull/3394))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server v5.26.0
|
||||
- xo-web v5.26.0
|
||||
|
||||
## **5.25.2** (2018-08-27)
|
||||
|
||||
### Enhancements
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remotes] Fix "undefined" mount option issue [#3361](https://github.com/vatesfr/xen-orchestra/issues/3361) (PR [#3363](https://github.com/vatesfr/xen-orchestra/pull/3363))
|
||||
- [Continuous Replication] Don't try to import/export VDIs on halted host [#3354](https://github.com/vatesfr/xen-orchestra/issues/3354) (PR [#3355](https://github.com/vatesfr/xen-orchestra/pull/3355))
|
||||
- [Disaster Recovery] Don't try to import/export VMs on halted host (PR [#3364](https://github.com/vatesfr/xen-orchestra/pull/3364))
|
||||
- [Backup NG] A successful backup job reported as Interrupted [#3018](https://github.com/vatesfr/xen-orchestra/issues/3018) (PR [#3238](https://github.com/vatesfr/xen-orchestra/pull/3238))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server v5.25.2
|
||||
- xo-web v5.25.1
|
||||
|
||||
## **5.25.0** (2018-08-23)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Tables] Filter input now always shows up even if the table is empty [#3295](https://github.com/vatesfr/xen-orchestra/issues/3295) (PR [#3296](https://github.com/vatesfr/xen-orchestra/pull/3296))
|
||||
- [Tasks] The table is now still shown when there are no tasks (PR [#3305](https://github.com/vatesfr/xen-orchestra/pull/3305))
|
||||
- [Host / Logs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3313](https://github.com/vatesfr/xen-orchestra/pull/3313))
|
||||
- [VM/Advanced] Change "Convert" to "Convert to template" and always show the button [#3201](https://github.com/vatesfr/xen-orchestra/issues/3201) (PR [#3319](https://github.com/vatesfr/xen-orchestra/pull/3319))
|
||||
- [Backup NG form] Display a tip when doing a CR on a thick-provisioned SR [#3291](https://github.com/vatesfr/xen-orchestra/issues/3291) (PR [#3333](https://github.com/vatesfr/xen-orchestra/pull/3333))
|
||||
- [SR/new] Add local ext SR type [#3332](https://github.com/vatesfr/xen-orchestra/issues/3332) (PR [#3335](https://github.com/vatesfr/xen-orchestra/pull/3335))
|
||||
- [Backup reports] Send report for the interrupted backup jobs on the server startup [#2998](https://github.com/vatesfr/xen-orchestra/issues/#2998) (PR [3164](https://github.com/vatesfr/xen-orchestra/pull/3164) [3154](https://github.com/vatesfr/xen-orchestra/pull/3154))
|
||||
- [Backup NG form] Move VMs' selection to a dedicated card [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711) (PR [#3338](https://github.com/vatesfr/xen-orchestra/pull/3338))
|
||||
- [Backup NG smart mode] Exclude replicated VMs [#2338](https://github.com/vatesfr/xen-orchestra/issues/2338) (PR [#3312](https://github.com/vatesfr/xen-orchestra/pull/3312))
|
||||
- [Backup NG form] Show the compression checkbox when the full mode is active [#3236](https://github.com/vatesfr/xen-orchestra/issues/3236) (PR [#3345](https://github.com/vatesfr/xen-orchestra/pull/3345))
|
||||
- [New VM] Display an error when the getting of the coreOS default template fails [#3227](https://github.com/vatesfr/xen-orchestra/issues/3227) (PR [#3343](https://github.com/vatesfr/xen-orchestra/pull/3343))
|
||||
- [Backup NG form] Set default retention to 1 [#3134](https://github.com/vatesfr/xen-orchestra/issues/3134) (PR [#3290](https://github.com/vatesfr/xen-orchestra/pull/3290))
|
||||
- [Backup NG] New logs are searchable by job name [#3272](https://github.com/vatesfr/xen-orchestra/issues/3272) (PR [#3351](https://github.com/vatesfr/xen-orchestra/pull/3351))
|
||||
- [Remotes] Add a field for NFS remotes to set mount options [#1793](https://github.com/vatesfr/xen-orchestra/issues/1793) (PR [#3353](https://github.com/vatesfr/xen-orchestra/pull/3353))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup NG form] Fix schedule's name overridden with undefined if it's not been edited [#3286](https://github.com/vatesfr/xen-orchestra/issues/3286) (PR [#3288](https://github.com/vatesfr/xen-orchestra/pull/3288))
|
||||
- [Remotes] Don't change `enabled` state on errors (PR [#3318](https://github.com/vatesfr/xen-orchestra/pull/3318))
|
||||
- [Remotes] Auto-reconnect on use if necessary [#2852](https://github.com/vatesfr/xen-orchestra/issues/2852) (PR [#3320](https://github.com/vatesfr/xen-orchestra/pull/3320))
|
||||
- [XO items' select] Fix adding or removing a XO item from a select make the missing XO items disappear [#3322](https://github.com/vatesfr/xen-orchestra/issues/3322) (PR [#3315](https://github.com/vatesfr/xen-orchestra/pull/3315))
|
||||
- [New VM / Self] Filter out SRs that are not in the template's pool [#3068](https://github.com/vatesfr/xen-orchestra/issues/3068) (PR [#3070](https://github.com/vatesfr/xen-orchestra/pull/3070))
|
||||
- [New VM / Self] Fix 'unknown item' displayed in SR selector [#3267](https://github.com/vatesfr/xen-orchestra/issues/3267) (PR [#3070](https://github.com/vatesfr/xen-orchestra/pull/3070))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-server-backup-reports v0.13.0
|
||||
- @xen-orchestra/fs 0.3.0
|
||||
- xo-server v5.25.0
|
||||
- xo-web v5.25.0
|
||||
|
||||
## **5.24.0** (2018-08-09)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [Remotes] Make SMB subfolder field optional [#3249](https://github.com/vatesfr/xen-orchestra/issues/3249) (PR [#3250](https://github.com/vatesfr/xen-orchestra/pull/3250))
|
||||
- [Backup NG form] Make the smart mode's toggle more visible [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711) (PR [#3263](https://github.com/vatesfr/xen-orchestra/pull/3263))
|
||||
- Move the copy clipboard of the VM's UUID to the header [#3221](https://github.com/vatesfr/xen-orchestra/issues/3221) (PR [#3248](https://github.com/vatesfr/xen-orchestra/pull/3248))
|
||||
- [Health / Orphaned VMs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3274](https://github.com/vatesfr/xen-orchestra/pull/3274))
|
||||
- [Health / Orphaned snapshot VDIs] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3270](https://github.com/vatesfr/xen-orchestra/pull/3270))
|
||||
- [Health / Alarms] Homogenize action buttons in table and enable bulk deletion [#3179](https://github.com/vatesfr/xen-orchestra/issues/3179) (PR [#3271](https://github.com/vatesfr/xen-orchestra/pull/3271))
|
||||
- [Backup NG Overview] List the Backup NG job's modes [#3169](https://github.com/vatesfr/xen-orchestra/issues/3169) (PR [#3277](https://github.com/vatesfr/xen-orchestra/pull/3277))
|
||||
- [Backup NG form] Move "Use compression" checkbox in the advanced settings [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711) (PR [#3281](https://github.com/vatesfr/xen-orchestra/pull/3281))
|
||||
- [Backup NG form] Ability to remove previous backups first before backup the VMs [#3212](https://github.com/vatesfr/xen-orchestra/issues/3212) (PR [#3260](https://github.com/vatesfr/xen-orchestra/pull/3260))
|
||||
- [Patching] Check date consistency before patching to avoid error on install [#3056](https://github.com/vatesfr/xen-orchestra/issues/3056)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Pools] Filter GPU groups by pool [#3176](https://github.com/vatesfr/xen-orchestra/issues/3176) (PR [#3253](https://github.com/vatesfr/xen-orchestra/pull/3253))
|
||||
- [Backup NG] Fix delta backups with SMB remotes [#3224](https://github.com/vatesfr/xen-orchestra/issues/3224) (PR [#3278](https://github.com/vatesfr/xen-orchestra/pull/3278))
|
||||
- Fix VM restoration getting stuck on local SRs [#3245](https://github.com/vatesfr/xen-orchestra/issues/3245) (PR [#3243](https://github.com/vatesfr/xen-orchestra/pull/3243))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xen-api v0.17.0
|
||||
- @xen-orchestra/fs 0.2.1
|
||||
- xo-server v5.24.0
|
||||
- xo-web v5.24.0
|
||||
|
||||
## **5.23.0** (2018-07-26)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Export VDI content [#2432](https://github.com/vatesfr/xen-orchestra/issues/2432) (PR [#3194](https://github.com/vatesfr/xen-orchestra/pull/3194))
|
||||
- Search syntax support wildcard (`*`) and regular expressions [#3190](https://github.com/vatesfr/xen-orchestra/issues/3190) (PRs [#3198](https://github.com/vatesfr/xen-orchestra/pull/3198) & [#3199](https://github.com/vatesfr/xen-orchestra/pull/3199))
|
||||
- Import VDI content [#2432](https://github.com/vatesfr/xen-orchestra/issues/2432) (PR [#3216](https://github.com/vatesfr/xen-orchestra/pull/3216))
|
||||
- [Backup NG form] Ability to edit a schedule's name [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711) [#3071](https://github.com/vatesfr/xen-orchestra/issues/3071) (PR [#3143](https://github.com/vatesfr/xen-orchestra/pull/3143))
|
||||
- [Remotes] Ability to change the type of a remote [#2423](https://github.com/vatesfr/xen-orchestra/issues/2423) (PR [#3207](https://github.com/vatesfr/xen-orchestra/pull/3207))
|
||||
- [Backup NG new] Ability to set a job's timeout [#2978](https://github.com/vatesfr/xen-orchestra/issues/2978) (PR [#3222](https://github.com/vatesfr/xen-orchestra/pull/3222))
|
||||
- [Remotes] Ability to edit/delete a remote with an invalid URL [#3182](https://github.com/vatesfr/xen-orchestra/issues/3182) (PR [#3226](https://github.com/vatesfr/xen-orchestra/pull/3226))
|
||||
- [Backup NG logs] Prevent user from deleting logs to help resolving issues [#3153](https://github.com/vatesfr/xen-orchestra/issues/3153) (PR [#3235](https://github.com/vatesfr/xen-orchestra/pull/3235))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Backup Reports] Report not sent if reportWhen failure and at least a VM is successfull [#3181](https://github.com/vatesfr/xen-orchestra/issues/3181) (PR [#3185](https://github.com/vatesfr/xen-orchestra/pull/3185))
|
||||
- [Backup NG] Correctly migrate report setting from legacy jobs [#3180](https://github.com/vatesfr/xen-orchestra/issues/3180) (PR [#3206](https://github.com/vatesfr/xen-orchestra/pull/3206))
|
||||
- [Backup NG] remove incomplete XVA files [#3159](https://github.com/vatesfr/xen-orchestra/issues/3159) (PR [#3215](https://github.com/vatesfr/xen-orchestra/pull/3215))
|
||||
- [Backup NG form] Ability to edit a schedule's state [#3223](https://github.com/vatesfr/xen-orchestra/issues/3223) (PR [#3228](https://github.com/vatesfr/xen-orchestra/pull/3228))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-remote-parser v0.5.0
|
||||
- complex-matcher v0.4.0
|
||||
- xo-server-backup-reports v0.12.3
|
||||
- xo-server v5.23.0
|
||||
- xo-web v5.23.0
|
||||
|
||||
## **5.22.1** (2018-07-13)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Remote select] Gracefully ignore remotes with invalid URL (PR [#3178](https://github.com/vatesfr/xen-orchestra/pull/3178))
|
||||
|
||||
### Released packages
|
||||
|
||||
- xo-web v5.22.1
|
||||
|
||||
## **5.22.0** (2018-07-12)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
* [Trial activation](trial_activation.md)
|
||||
* [Plugins](plugins.md)
|
||||
* [Logs](logs.md)
|
||||
* [Compatibility](supported-version.md)
|
||||
* [Troubleshooting](troubleshooting.md)
|
||||
* [From the sources](from_the_sources.md)
|
||||
* [Configuration](configuration.md)
|
||||
|
||||
BIN
docs/assets/disabled-cr-ha-tag.png
Normal file
BIN
docs/assets/disabled-cr-ha-tag.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/assets/disabled-dr-ha-tag.png
Normal file
BIN
docs/assets/disabled-dr-ha-tag.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 32 KiB |
BIN
docs/assets/log-runId.png
Normal file
BIN
docs/assets/log-runId.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 38 KiB |
@@ -10,24 +10,40 @@ Another good way to check if there is activity is the XOA VM stats view (on the
|
||||
|
||||
## Error messages
|
||||
|
||||
### `VDI chain protection`
|
||||
### VDI chain protection
|
||||
|
||||
> This message is relevant for *Continuous Delta Backup* or *Continuous Replication* only.
|
||||
|
||||
This means your previous VM disks and snapshots should be "merged" (*coalesced* in the XenServer world) before we take a new snapshot. This mechanism is handled by XenServer itself, not Xen Orchestra. But we can check your existing VDI chain and avoid creating more snapshots than your storage can merge. Otherwise, this will lead to catastrophic consequences. Xen Orchestra is the **only** XenServer/XCP backup product dealing with this.
|
||||
This means your previous VM disks and snapshots should be "merged" (*coalesced* in the XenServer world) before we can take a new snapshot. This mechanism is handled by XenServer itself, not Xen Orchestra. But we can check your existing VDI chain and avoid creating more snapshots than your storage can merge. Otherwise, this will lead to catastrophic consequences. Xen Orchestra is the **only** XenServer/XCP backup product dealing with this.
|
||||
|
||||
Without this detection, you could have 2 potential issues:
|
||||
|
||||
* `The Snapshot Chain is too Long`
|
||||
* `SR_BACKEND_FAILURE_44 (insufficient space)`
|
||||
|
||||
The first issue is a chain that contains more than 30 elements (fixed XenServer limit), and the other one means it's full because the "coalesce" process couldn't keep up the pace and the storage became filled.
|
||||
The first issue is a chain that contains more than 30 elements (fixed XenServer limit), and the other one means it's full because the "coalesce" process couldn't keep up the pace and the storage filled up.
|
||||
|
||||
In the end, this message is a **protection mechanism against damaging your SR**. The backup job will fail, but XenServer itself should eventually automatically coalesce the snapshot chain, and the the next time the backup job should complete.
|
||||
|
||||
Just remember this: **coalesce will happen every time a snapshot is removed**.
|
||||
|
||||
> You can read more on this on our dedicated blog post regarding [XenServer coalesce detection](https://xen-orchestra.com/blog/xenserver-coalesce-detection-in-xen-orchestra/).
|
||||
|
||||
### `SR_BACKEND_FAILURE_44 (insufficient space)`
|
||||
### Troubleshooting a constant VDI Chain Protection message (XenServer failure to coalesce)
|
||||
|
||||
As previously mentioned, this message can be normal and it just means XenServer needs to perform a coalesce to merge old snapshots. However if you repeatedly get this message and it seems XenServer is not coalescing, You can take a few steps to determine why.
|
||||
|
||||
First check SMlog on the XenServer host for messages relating to VDI corruption or coalesce job failure. For example, by running `cat /var/log/SMlog | grep -i exception` or `cat /var/log/SMlog | grep -i error` on the XenServer host with the affected storage.
|
||||
|
||||
Coalesce jobs can also fail to run if the SR does not have enough free space. Check the problematic SR and make sure it has enough free space, generally 30% or more free is recommended depending on VM size.
|
||||
|
||||
You can check if a coalesce job is currently active by running `ps axf | grep vhd` on the XenServer host and looking for a VHD process in the results (one of the resulting processes will be the grep command you just ran, ignore that one).
|
||||
|
||||
If you don't see any running coalesce jobs, and can't find any other reason that XenServer has not started one, you can attempt to make it start a coalesce job by rescanning the SR. This is harmless to try, but will not always result in a coalesce. Visit the problematic SR in the XOA UI, then click the "Rescan All Disks" button towards the top right: it looks like a refresh circle icon. This should begin the coalesce process - if you click the Advanced tab in the SR view, the "disks needing to be coalesced" list should become smaller and smaller.
|
||||
|
||||
### Parse Error
|
||||
|
||||
This is most likely due to running a backup job that uses Delta functionality (eg: delta backups, or continuous replication) on a version of XenServer older than 6.5. To use delta functionality you must run [XenServer 6.5 or later](https://xen-orchestra.com/docs/supported-version.html).
|
||||
|
||||
### SR_BACKEND_FAILURE_44 (insufficient space)
|
||||
|
||||
> This message can be triggered by any backup method.
|
||||
|
||||
@@ -44,7 +60,7 @@ Workarounds:
|
||||
|
||||
This message appears when the previous replicated VM has been deleted on the target side which breaks the replication. To reset the process it's necessary to delete VM snapshot related to this CR job on the original VM. The name of this snapshot is: `XO_DELTA_EXPORT: <name label of target SR> (<UUID of target SR>)`
|
||||
|
||||
### LICENCE_RESTRICTION(PCI_device_for_auto_update)
|
||||
### LICENSE_RESTRICTION(PCI_device_for_auto_update)
|
||||
|
||||
This message appears when you try to do a backup/snapshot from a VM that was previously on a host with an active commercial XenServer license but is now on a host with a free edition of XenServer.
|
||||
|
||||
@@ -54,4 +70,10 @@ To solve it, you have to change a parameter in your VM. `xe vm-param-set has-ven
|
||||
|
||||
This message appears when you do not have enough free space on the target remote when running a backup to it.
|
||||
|
||||
To check your free space, enter your XOA and run `xoa check` to check free system space and `df -h` to check free space on your chosen remote storage.
|
||||
To check your free space, enter your XOA and run `xoa check` to check free system space and `df -h` to check free space on your chosen remote storage.
|
||||
|
||||
### Error: no VMs match this pattern
|
||||
|
||||
This is happening when you have a *smart backup job* that doesn't match any VMs. For example: you created a job to backup all running VMs. If no VMs are running on backup schedule, you'll have this message. This could also happen if you lost connection with your pool master (the VMs aren't visible anymore from Xen Orchestra).
|
||||
|
||||
Edit your job and try to see matching VMs or check if your pool is connected to XOA.
|
||||
|
||||
@@ -29,13 +29,19 @@ You also have a filter to search anything related to these logs.
|
||||
|
||||
> Logs are not "live" tasks. If you restart XOA during a backup, the log associated with the job will stay in orange (in progress), because it wasn't finished. It will stay forever unfinished because the job was cut in the middle.
|
||||
|
||||
## Backups execution
|
||||
|
||||
Each backups' job execution is identified by a `runId`. You can find this `runId` in its detailed log.
|
||||
|
||||

|
||||
|
||||
## Consistent backup (with quiesce snapshots)
|
||||
|
||||
All backup rely on snapshots. But what about data consistency? By default, Xen Orchestra will try to make a **quiesce snapshot** every time a snapshot is done (and fallback to normal snapshot if it's not possible).
|
||||
All backup types rely on snapshots. But what about data consistency? By default, Xen Orchestra will try to take a **quiesced snapshot** every time a snapshot is done (and fall back to normal snapshots if it's not possible).
|
||||
|
||||
All your Windows VMs can be protected (especially MS SQL or Exchange services) after you have installed Xen Tools in your VMs. A quiesce snapshots means the operating system will be notified and the cache will be flushed to disks. This way, your backups will always be consistent.
|
||||
Snapshots of Windows VMs can be quiesced (especially MS SQL or Exchange services) after you have installed Xen Tools in your VMs. However, [there is an extra step to install the VSS provider on windows](quiesce). A quiesced snapshot means the operating system will be notified and the cache will be flushed to disks. This way, your backups will always be consistent.
|
||||
|
||||
To see if you have quiesced snapshots for a VM, just go into its snapshot tab, the "info" icon means it is a quiesced snapshot:
|
||||
To see if you have quiesced snapshots for a VM, just go into its snapshot tab, then the "info" icon means it is a quiesced snapshot:
|
||||
|
||||

|
||||
|
||||
@@ -135,3 +141,12 @@ To make the mount point persistent in XOA, edit the `/etc/fstab` file, and add:
|
||||
```
|
||||
|
||||
This way, without modifying your previous scheduled snapshot, they will be written to this new local mountpoint!
|
||||
|
||||
## High availability (HA) disabled on replicated VMs
|
||||
|
||||
Replicated VMs HA are taken into account by XS/XCP-ng. To avoid the resultant troubles, HA will be disabled from the replicated VMs and a tag indicating this change will be added.
|
||||
|
||||

|
||||

|
||||
|
||||
> The tag won't be automatically removed by XO on the replicated VMs, even if HA is re-enabled.
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# File level restore
|
||||
|
||||
You can also restore individual files inside a VM. It works with all your existing delta backups.
|
||||
You can also restore specific files and directories inside a VM. It works with all your existing delta backups.
|
||||
|
||||
> You must use the latest XOA release. When you connect with the console, you should see `Build number: 16.12.20`. If you have this or higher (eg `17.*`), it means that's OK! Otherwise, please update your XOA.
|
||||
|
||||
> Restoring individual files from an SMB remote is not yet possible, but it's planned for the future!
|
||||
> Restoring individual files from an SMB remote is not possible yet, but it's planned for the future!
|
||||
|
||||
> File level restore **is only possible on delta backups**
|
||||
|
||||
|
||||
@@ -14,13 +14,13 @@ As you may have seen,in other parts of the documentation, XO is composed of two
|
||||
|
||||
### NodeJS
|
||||
|
||||
XO needs Node.js. **Please always use the LTS version of Node**.
|
||||
XO needs Node.js. **Please use Node 8**.
|
||||
|
||||
We'll consider at this point that you've got a working node on your box. E.g:
|
||||
|
||||
```
|
||||
$ node -v
|
||||
v8.9.1
|
||||
v8.12.0
|
||||
```
|
||||
|
||||
If not, see [this page](https://nodejs.org/en/download/package-manager/) for instructions on how to install Node.
|
||||
@@ -38,7 +38,7 @@ XO needs the following packages to be installed. Redis is used as a database by
|
||||
For example, on Debian:
|
||||
|
||||
```
|
||||
apt-get install build-essential redis-server libpng-dev git python-minimal
|
||||
apt-get install build-essential redis-server libpng-dev git python-minimal libvhdi-utils lvm2
|
||||
```
|
||||
|
||||
## Fetching the Code
|
||||
@@ -100,6 +100,9 @@ That's it! Use your browser to visit the xo-server IP address, and it works! :)
|
||||
If you would like to update your current version, enter your `xen-orchestra` directory and run the following:
|
||||
|
||||
```
|
||||
# This will clear any changes you made in the repository!!
|
||||
$ git checkout .
|
||||
|
||||
$ git pull --ff-only
|
||||
$ yarn
|
||||
$ yarn build
|
||||
|
||||
@@ -52,6 +52,19 @@ Simply type the string, if it contains special characters just surround it with
|
||||
|
||||
> The search is recursive, case insensitive and non-anchored (i.e. matches if the pattern is contained in a string).
|
||||
|
||||
A simple string can also contain a wildcard character (`*`) to match any character in a portion of the string:
|
||||
|
||||
- `foo*bar`: matches `foobar`, `foo - bar`, etc.
|
||||
|
||||
#### Regular expression
|
||||
|
||||
For more advanced string matching, you can use regular expressions:
|
||||
|
||||
- `/^DNS server \d+$/`: matches `DNS server 1`, `DNS server 05` but not `DNS server`
|
||||
- `/foo/i`: with the `i` flag, it ignores the case, therefore it matches `Foo` and `FOO`
|
||||
|
||||
[More information about supported regular expressions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions).
|
||||
|
||||
#### Searching a specific property
|
||||
|
||||
Type the property name, followed by a colon `:` and a subfilter:
|
||||
|
||||
34
docs/supported-version.md
Normal file
34
docs/supported-version.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Xen Orchestra compatibility
|
||||
|
||||
Xen Orchestra is designed to work exclusively on [XCP-ng](https://xcp-ng.org/) and [Citrix XenServer](https://xenserver.org/) hypervisor (any version). Xen Orchestra should be fully functional with any version of these two hypervisors. However, to benefit from the best support quality, our product is tested to work the best with the following versions:
|
||||
|
||||
## Citrix XenServer (Citrix hypervisor)
|
||||
|
||||
Backup restore for large VM disks (>1TiB usage) is [broken on all XenServer versions](https://bugs.xenserver.org/browse/XSO-868) until Citrix release a fix.
|
||||
|
||||
* XenServer 7.6
|
||||
* XenServer 7.5
|
||||
* [VDI I/O error](https://bugs.xenserver.org/browse/XSO-873), waiting for Citrix to release our fix
|
||||
* XenServer 7.4
|
||||
* XenServer 7.3
|
||||
* XenServer 7.2
|
||||
* XenServer 7.1
|
||||
* XenServer 7.0
|
||||
* XenServer 6.5
|
||||
* Random Delta backup issues
|
||||
* XenServer 6.1 and 6.2
|
||||
* No Delta backup and CR support
|
||||
* XenServer 5.x
|
||||
* Basic administration features
|
||||
|
||||

|
||||
|
||||
## XCP-ng
|
||||
|
||||
All the pending fixes are already integrated in the latest XCP-ng version. We strongly suggest people to keep using the latest XCP-ng version as far as possible.
|
||||
|
||||
* XCP-ng 7.6
|
||||
* XCP-ng 7.5
|
||||
* XCP-ng 7.4.1
|
||||
|
||||

|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
This page recaps the actions you can perform if you have any problems with your XOA.
|
||||
@@ -7,12 +6,21 @@ This page recaps the actions you can perform if you have any problems with your
|
||||
|
||||
> Auto deploy failed. - No SR specified and Pool default SR is null
|
||||
|
||||
It means you don't have a default SR set on the pool you are importing XOA to. To set a default SR, you must first find the SR UUID you want, with `xe sr-list`. When you have the UUID, you can set the default SR with: `xe pool-param-set default-SR=<SR_UUID>`. When this is done, re-enter the deploy script command and it will work!
|
||||
It means you don't have a default SR set on the pool you are importing XOA on. To set a default SR, you must first find the SR UUID you want, with `xe sr-list`. When you have the UUID, you can set the default SR with: `xe pool-param-set uuid=<pool-uuid> default-SR=<sr-uuid>`. For the pool UUID, just press tab after `xe pool-param-set uuid=` and it will autofill your pool UUID. When this is done, re-enter the deploy script command and it will work!
|
||||
|
||||
## XOA unreachable after boot
|
||||
|
||||
XOA uses HVM mode. If your physical host doesn't support virtualization extensions, XOA won't work. To check if your XenServer supports hardware assisted virtualization (HVM), you can enter this command in your host: `grep --color vmx /proc/cpuinfo`. If you don't have any result, it means XOA won't work on this hardware.
|
||||
|
||||
|
||||
## Recover XOA Web-UI login password
|
||||
|
||||
If you have lost your password to log in to the XOA webpage, you can reset it. From the XOA CLI (for login/access info for the CLI, [see here](xoa.md#first-console-connection)), use the following command and insert the email/account you wish to recover:
|
||||
|
||||
`xo-server-recover-account youremail@here.com`
|
||||
|
||||
It will prompt you to set a new password. If you provide an email here that does not exist in XOA yet, it will create a new account using it, with admin permissions - you can use that new account to log in as well.
|
||||
|
||||
## Empty page after login
|
||||
|
||||
This happens when your antivirus or firewall is blocking the websocket protocol. This is what we use to communicate between `xo-server` and `xo-web` (see the [architecture page](architecture.md)).
|
||||
@@ -93,6 +101,13 @@ memory to xo-server itself:
|
||||
+ ExecStart=/usr/local/bin/node --max-old-space-size=8192 /usr/local/bin/xo-server
|
||||
```
|
||||
|
||||
The last step is to refresh and restart the service:
|
||||
|
||||
```
|
||||
$ systemctl daemon-reload
|
||||
$ systemctl restart xo-server
|
||||
```
|
||||
|
||||
### Behind a transparent proxy
|
||||
|
||||
If your are behind a transparent proxy, you'll probably have issues with the updater (SSL/TLS issues).
|
||||
@@ -120,7 +135,7 @@ $ openssl req -x509 -newkey rsa:2048 -keyout server.key -out server.crt -nodes -
|
||||
$ systemctl restart xo-server.service
|
||||
```
|
||||
|
||||
## XO configuration
|
||||
## XO Configuration
|
||||
|
||||
The system logs are visible by using this command:
|
||||
|
||||
@@ -128,11 +143,11 @@ The system logs are visible by using this command:
|
||||
$ tail -f /var/log/syslog
|
||||
```
|
||||
|
||||
You can read more about logs [in the dedicated chapter](logs.md).
|
||||
You can read more about logs [in the dedicated logs chapter](logs.md).
|
||||
|
||||
### Ghost tasks
|
||||
|
||||
If you have ghost tasks accumulating in your XenOrchestra you can try the following actions in order:
|
||||
If you have ghost tasks accumulating in your Xen Orchestra you can try the following actions in order:
|
||||
|
||||
1. refresh the web page
|
||||
1. disconnect and reconnect the Xen pool/server owning the tasks
|
||||
@@ -150,7 +165,7 @@ If a package disappears due to a build problem or human error, you can redownloa
|
||||
|
||||
### Reset XO configuration
|
||||
|
||||
If you have problems with your `xo-server` configuration, you can reset the database. **This operation will delete all your configured users and servers**:
|
||||
If you have problems with your `xo-server` configuration, you can reset the database. **This operation will delete all your configured users and servers, plus any backup jobs**:
|
||||
|
||||
1. `redis-cli`
|
||||
2. `FLUSHALL`
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
# xo-cli
|
||||
|
||||
This is another client of `xo-server` - this time in command line form.
|
||||
@@ -104,5 +103,6 @@ encoding by prefixing with `json:`:
|
||||
##### VM import
|
||||
|
||||
```
|
||||
> xo-cli vm.import host=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
> xo-cli vm.import sr=60a6939e-8b0a-4352-9954-5bde44bcdf7d @=vm.xva
|
||||
```
|
||||
> Note: `xo-cli` only supports the import of XVA files. It will not import OVA files. To import OVA images, you must use the XOA web UI.
|
||||
|
||||
35
package.json
35
package.json
@@ -1,33 +1,40 @@
|
||||
{
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.0.0-beta.49",
|
||||
"@babel/register": "^7.0.0-beta.49",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/register": "^7.0.0",
|
||||
"babel-core": "^7.0.0-0",
|
||||
"babel-eslint": "^8.1.2",
|
||||
"babel-eslint": "^10.0.1",
|
||||
"babel-jest": "^23.0.1",
|
||||
"benchmark": "^2.1.4",
|
||||
"eslint": "^4.14.0",
|
||||
"eslint-config-standard": "^11.0.0-beta.0",
|
||||
"eslint-config-standard-jsx": "^5.0.0",
|
||||
"eslint": "^5.1.0",
|
||||
"eslint-config-prettier": "^3.3.0",
|
||||
"eslint-config-standard": "12.0.0",
|
||||
"eslint-config-standard-jsx": "^6.0.2",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
"eslint-plugin-node": "^6.0.0",
|
||||
"eslint-plugin-promise": "^3.6.0",
|
||||
"eslint-plugin-node": "^8.0.0",
|
||||
"eslint-plugin-promise": "^4.0.0",
|
||||
"eslint-plugin-react": "^7.6.1",
|
||||
"eslint-plugin-standard": "^3.0.1",
|
||||
"eslint-plugin-standard": "^4.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"flow-bin": "^0.73.0",
|
||||
"flow-bin": "^0.87.0",
|
||||
"globby": "^8.0.0",
|
||||
"husky": "^0.14.3",
|
||||
"husky": "^1.0.0-rc.15",
|
||||
"jest": "^23.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"prettier": "^1.10.2",
|
||||
"promise-toolbox": "^0.9.5",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"sorted-object": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"yarn": "^1.7.0"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "scripts/lint-staged"
|
||||
}
|
||||
},
|
||||
"jest": {
|
||||
"timers": "fake",
|
||||
"collectCoverage": true,
|
||||
"projects": [
|
||||
"<rootDir>"
|
||||
@@ -49,11 +56,11 @@
|
||||
"dev": "scripts/run-script --parallel dev",
|
||||
"dev-test": "jest --bail --watch \"^(?!.*\\.integ\\.spec\\.js$)\"",
|
||||
"posttest": "scripts/run-script test",
|
||||
"precommit": "scripts/lint-staged",
|
||||
"prepare": "scripts/run-script prepare",
|
||||
"pretest": "eslint --ignore-path .gitignore .",
|
||||
"test": "jest \"^(?!.*\\.integ\\.spec\\.js$)\"",
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\""
|
||||
"test-integration": "jest \".integ\\.spec\\.js$\"",
|
||||
"travis-tests": "scripts/travis-tests"
|
||||
},
|
||||
"workspaces": [
|
||||
"@xen-orchestra/*",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "complex-matcher",
|
||||
"version": "0.3.0",
|
||||
"version": "0.5.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -30,9 +30,9 @@
|
||||
"lodash": "^4.17.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0-beta.49",
|
||||
"@babel/core": "7.0.0-beta.49",
|
||||
"@babel/preset-env": "7.0.0-beta.49",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.1",
|
||||
"rimraf": "^2.6.2"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as CM from './'
|
||||
|
||||
export const pattern =
|
||||
'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32'
|
||||
'foo !"\\\\ \\"" name:|(wonderwoman batman) hasCape? age:32 chi*go /^foo\\/bar\\./i'
|
||||
|
||||
export const ast = new CM.And([
|
||||
new CM.String('foo'),
|
||||
@@ -11,5 +11,7 @@ export const ast = new CM.And([
|
||||
new CM.Or([new CM.String('wonderwoman'), new CM.String('batman')])
|
||||
),
|
||||
new CM.TruthyProperty('hasCape'),
|
||||
new CM.Property('age', new CM.Number(32)),
|
||||
new CM.Property('age', new CM.NumberOrStringNode('32')),
|
||||
new CM.GlobPattern('chi*go'),
|
||||
new CM.RegExp('^foo/bar\\.', 'i'),
|
||||
])
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { isPlainObject, some } from 'lodash'
|
||||
import { escapeRegExp, isPlainObject, some } from 'lodash'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -33,17 +33,17 @@ const isRawString = string => {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class Node {
|
||||
createPredicate () {
|
||||
createPredicate() {
|
||||
return value => this.match(value)
|
||||
}
|
||||
}
|
||||
|
||||
export class Null extends Node {
|
||||
match () {
|
||||
match() {
|
||||
return true
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
@@ -51,7 +51,7 @@ export class Null extends Node {
|
||||
const formatTerms = terms => terms.map(term => term.toString(true)).join(' ')
|
||||
|
||||
export class And extends Node {
|
||||
constructor (children) {
|
||||
constructor(children) {
|
||||
super()
|
||||
|
||||
if (children.length === 1) {
|
||||
@@ -60,29 +60,29 @@ export class And extends Node {
|
||||
this.children = children
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return this.children.every(child => child.match(value))
|
||||
}
|
||||
|
||||
toString (isNested) {
|
||||
toString(isNested) {
|
||||
const terms = formatTerms(this.children)
|
||||
return isNested ? `(${terms})` : terms
|
||||
}
|
||||
}
|
||||
|
||||
export class Comparison extends Node {
|
||||
constructor (operator, value) {
|
||||
constructor(operator, value) {
|
||||
super()
|
||||
this._comparator = Comparison.comparators[operator]
|
||||
this._operator = operator
|
||||
this._value = value
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return typeof value === 'number' && this._comparator(value, this._value)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return this._operator + String(this._value)
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,7 @@ Comparison.comparators = {
|
||||
}
|
||||
|
||||
export class Or extends Node {
|
||||
constructor (children) {
|
||||
constructor(children) {
|
||||
super()
|
||||
|
||||
if (children.length === 1) {
|
||||
@@ -103,33 +103,33 @@ export class Or extends Node {
|
||||
this.children = children
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return this.children.some(child => child.match(value))
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return `|(${formatTerms(this.children)})`
|
||||
}
|
||||
}
|
||||
|
||||
export class Not extends Node {
|
||||
constructor (child) {
|
||||
constructor(child) {
|
||||
super()
|
||||
|
||||
this.child = child
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return !this.child.match(value)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return '!' + this.child.toString(true)
|
||||
}
|
||||
}
|
||||
|
||||
export class NumberNode extends Node {
|
||||
constructor (value) {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
@@ -140,32 +140,60 @@ export class NumberNode extends Node {
|
||||
})
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return (
|
||||
value === this.value ||
|
||||
(value !== null && typeof value === 'object' && some(value, this.match))
|
||||
)
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return String(this.value)
|
||||
}
|
||||
}
|
||||
export { NumberNode as Number }
|
||||
|
||||
export class NumberOrStringNode extends Node {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this, value.toLowerCase(), +value),
|
||||
})
|
||||
}
|
||||
|
||||
match(lcValue, numValue, value) {
|
||||
return (
|
||||
value === numValue ||
|
||||
(typeof value === 'string'
|
||||
? value.toLowerCase().indexOf(lcValue) !== -1
|
||||
: (Array.isArray(value) || isPlainObject(value)) &&
|
||||
some(value, this.match))
|
||||
)
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
export { NumberOrStringNode as NumberOrString }
|
||||
|
||||
export class Property extends Node {
|
||||
constructor (name, child) {
|
||||
constructor(name, child) {
|
||||
super()
|
||||
|
||||
this.name = name
|
||||
this.child = child
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return value != null && this.child.match(value[this.name])
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return `${formatString(this.name)}:${this.child.toString(true)}`
|
||||
}
|
||||
}
|
||||
@@ -173,25 +201,40 @@ export class Property extends Node {
|
||||
const escapeChar = char => '\\' + char
|
||||
const formatString = value =>
|
||||
Number.isNaN(+value)
|
||||
? isRawString(value) ? value : `"${value.replace(/\\|"/g, escapeChar)}"`
|
||||
? isRawString(value)
|
||||
? value
|
||||
: `"${value.replace(/\\|"/g, escapeChar)}"`
|
||||
: `"${value}"`
|
||||
|
||||
export class StringNode extends Node {
|
||||
constructor (value) {
|
||||
export class GlobPattern extends Node {
|
||||
constructor(value) {
|
||||
// fallback to string node if no wildcard
|
||||
if (value.indexOf('*') === -1) {
|
||||
return new StringNode(value)
|
||||
}
|
||||
|
||||
super()
|
||||
|
||||
this.lcValue = value.toLowerCase()
|
||||
this.value = value
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this),
|
||||
value: this.match.bind(
|
||||
this,
|
||||
new RegExp(
|
||||
value
|
||||
.split('*')
|
||||
.map(escapeRegExp)
|
||||
.join('.*'),
|
||||
'i'
|
||||
)
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(re, value) {
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().indexOf(this.lcValue) !== -1
|
||||
return re.test(value)
|
||||
}
|
||||
|
||||
if (Array.isArray(value) || isPlainObject(value)) {
|
||||
@@ -201,24 +244,83 @@ export class StringNode extends Node {
|
||||
return false
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return this.value
|
||||
}
|
||||
}
|
||||
|
||||
export class RegExpNode extends Node {
|
||||
constructor(pattern, flags) {
|
||||
super()
|
||||
|
||||
this.re = new RegExp(pattern, flags)
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this),
|
||||
})
|
||||
}
|
||||
|
||||
match(value) {
|
||||
if (typeof value === 'string') {
|
||||
return this.re.test(value)
|
||||
}
|
||||
|
||||
if (Array.isArray(value) || isPlainObject(value)) {
|
||||
return some(value, this.match)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.re.toString()
|
||||
}
|
||||
}
|
||||
export { RegExpNode as RegExp }
|
||||
|
||||
export class StringNode extends Node {
|
||||
constructor(value) {
|
||||
super()
|
||||
|
||||
this.value = value
|
||||
|
||||
// should not be enumerable for the tests
|
||||
Object.defineProperty(this, 'match', {
|
||||
value: this.match.bind(this, value.toLowerCase()),
|
||||
})
|
||||
}
|
||||
|
||||
match(lcValue, value) {
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().indexOf(lcValue) !== -1
|
||||
}
|
||||
|
||||
if (Array.isArray(value) || isPlainObject(value)) {
|
||||
return some(value, this.match)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
toString() {
|
||||
return formatString(this.value)
|
||||
}
|
||||
}
|
||||
export { StringNode as String }
|
||||
|
||||
export class TruthyProperty extends Node {
|
||||
constructor (name) {
|
||||
constructor(name) {
|
||||
super()
|
||||
|
||||
this.name = name
|
||||
}
|
||||
|
||||
match (value) {
|
||||
match(value) {
|
||||
return value != null && !!value[this.name]
|
||||
}
|
||||
|
||||
toString () {
|
||||
toString() {
|
||||
return formatString(this.name) + '?'
|
||||
}
|
||||
}
|
||||
@@ -228,12 +330,12 @@ export class TruthyProperty extends Node {
|
||||
// https://gist.github.com/yelouafi/556e5159e869952335e01f6b473c4ec1
|
||||
|
||||
class Failure {
|
||||
constructor (pos, expected) {
|
||||
constructor(pos, expected) {
|
||||
this.expected = expected
|
||||
this.pos = pos
|
||||
}
|
||||
|
||||
get value () {
|
||||
get value() {
|
||||
throw new Error(
|
||||
`parse error: expected ${this.expected} at position ${this.pos}`
|
||||
)
|
||||
@@ -241,7 +343,7 @@ class Failure {
|
||||
}
|
||||
|
||||
class Success {
|
||||
constructor (pos, value) {
|
||||
constructor(pos, value) {
|
||||
this.pos = pos
|
||||
this.value = value
|
||||
}
|
||||
@@ -250,7 +352,7 @@ class Success {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
class P {
|
||||
static alt (...parsers) {
|
||||
static alt(...parsers) {
|
||||
const { length } = parsers
|
||||
return new P((input, pos, end) => {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
@@ -263,7 +365,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static grammar (rules) {
|
||||
static grammar(rules) {
|
||||
const grammar = {}
|
||||
Object.keys(rules).forEach(k => {
|
||||
const rule = rules[k]
|
||||
@@ -272,14 +374,14 @@ class P {
|
||||
return grammar
|
||||
}
|
||||
|
||||
static lazy (parserCreator, arg) {
|
||||
static lazy(parserCreator, arg) {
|
||||
const parser = new P((input, pos, end) =>
|
||||
(parser._parse = parserCreator(arg)._parse)(input, pos, end)
|
||||
)
|
||||
return parser
|
||||
}
|
||||
|
||||
static regex (regex) {
|
||||
static regex(regex) {
|
||||
regex = new RegExp(regex.source, 'y')
|
||||
return new P((input, pos) => {
|
||||
regex.lastIndex = pos
|
||||
@@ -290,7 +392,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static seq (...parsers) {
|
||||
static seq(...parsers) {
|
||||
const { length } = parsers
|
||||
return new P((input, pos, end) => {
|
||||
const values = new Array(length)
|
||||
@@ -306,21 +408,20 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
static text (text) {
|
||||
static text(text) {
|
||||
const { length } = text
|
||||
return new P(
|
||||
(input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
return new P((input, pos) =>
|
||||
input.startsWith(text, pos)
|
||||
? new Success(pos + length, text)
|
||||
: new Failure(pos, `'${text}'`)
|
||||
)
|
||||
}
|
||||
|
||||
constructor (parse) {
|
||||
constructor(parse) {
|
||||
this._parse = parse
|
||||
}
|
||||
|
||||
map (fn) {
|
||||
map(fn) {
|
||||
return new P((input, pos, end) => {
|
||||
const result = this._parse(input, pos, end)
|
||||
if (result instanceof Success) {
|
||||
@@ -330,11 +431,11 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
parse (input, pos = 0, end = input.length) {
|
||||
parse(input, pos = 0, end = input.length) {
|
||||
return this._parse(input, pos, end).value
|
||||
}
|
||||
|
||||
repeat (min = 0, max = Infinity) {
|
||||
repeat(min = 0, max = Infinity) {
|
||||
return new P((input, pos, end) => {
|
||||
const value = []
|
||||
let result
|
||||
@@ -360,7 +461,7 @@ class P {
|
||||
})
|
||||
}
|
||||
|
||||
skip (otherParser) {
|
||||
skip(otherParser) {
|
||||
return new P((input, pos, end) => {
|
||||
const result = this._parse(input, pos, end)
|
||||
if (result instanceof Failure) {
|
||||
@@ -376,18 +477,28 @@ class P {
|
||||
}
|
||||
}
|
||||
|
||||
P.eof = new P(
|
||||
(input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
P.eof = new P((input, pos, end) =>
|
||||
pos < end ? new Failure(pos, 'end of input') : new Success(pos)
|
||||
)
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const parser = P.grammar({
|
||||
default: r =>
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(
|
||||
([, terms]) => (terms.length === 0 ? new Null() : new And(terms))
|
||||
P.seq(r.ws, r.term.repeat(), P.eof).map(([, terms]) =>
|
||||
terms.length === 0 ? new Null() : new And(terms)
|
||||
),
|
||||
globPattern: new P((input, pos, end) => {
|
||||
let value = ''
|
||||
let c
|
||||
while (pos < end && ((c = input[pos]) === '*' || c in RAW_STRING_CHARS)) {
|
||||
++pos
|
||||
value += c
|
||||
}
|
||||
return value.length === 0
|
||||
? new Failure(pos, 'a raw string')
|
||||
: new Success(pos, value)
|
||||
}),
|
||||
quotedString: new P((input, pos, end) => {
|
||||
if (input[pos] !== '"') {
|
||||
return new Failure(pos, '"')
|
||||
@@ -405,6 +516,7 @@ const parser = P.grammar({
|
||||
|
||||
return new Success(pos, value.join(''))
|
||||
}),
|
||||
property: r => P.alt(r.quotedString, r.rawString),
|
||||
rawString: new P((input, pos, end) => {
|
||||
let value = ''
|
||||
let c
|
||||
@@ -416,7 +528,33 @@ const parser = P.grammar({
|
||||
? new Failure(pos, 'a raw string')
|
||||
: new Success(pos, value)
|
||||
}),
|
||||
string: r => P.alt(r.quotedString, r.rawString),
|
||||
regex: new P((input, pos, end) => {
|
||||
if (input[pos] !== '/') {
|
||||
return new Failure(pos, '/')
|
||||
}
|
||||
++pos
|
||||
|
||||
let c
|
||||
|
||||
let pattern = ''
|
||||
let escaped = false
|
||||
while (pos < end && ((c = input[pos++]) !== '/' || escaped)) {
|
||||
escaped = c === '\\'
|
||||
pattern += c
|
||||
}
|
||||
|
||||
if (c !== '/') {
|
||||
return new Failure(pos, '/')
|
||||
}
|
||||
|
||||
let flags = ''
|
||||
if (pos < end && (c = input[pos]) === 'i') {
|
||||
++pos
|
||||
flags += c
|
||||
}
|
||||
|
||||
return new Success(pos, new RegExpNode(pattern, flags))
|
||||
}),
|
||||
term: r =>
|
||||
P.alt(
|
||||
P.seq(P.text('('), r.ws, r.term.repeat(1), P.text(')')).map(
|
||||
@@ -438,20 +576,23 @@ const parser = P.grammar({
|
||||
}
|
||||
return new Comparison(op, val)
|
||||
}),
|
||||
P.seq(r.string, r.ws, P.text(':'), r.ws, r.term).map(
|
||||
P.seq(r.property, r.ws, P.text(':'), r.ws, r.term).map(
|
||||
_ => new Property(_[0], _[4])
|
||||
),
|
||||
P.seq(r.string, P.text('?')).map(_ => new TruthyProperty(_[0])),
|
||||
P.alt(
|
||||
r.quotedString.map(_ => new StringNode(_)),
|
||||
r.rawString.map(str => {
|
||||
const asNum = +str
|
||||
return Number.isNaN(asNum)
|
||||
? new StringNode(str)
|
||||
: new NumberNode(asNum)
|
||||
})
|
||||
)
|
||||
P.seq(r.property, P.text('?')).map(_ => new TruthyProperty(_[0])),
|
||||
r.value
|
||||
).skip(r.ws),
|
||||
value: r =>
|
||||
P.alt(
|
||||
r.quotedString.map(_ => new StringNode(_)),
|
||||
r.regex,
|
||||
r.globPattern.map(str => {
|
||||
const asNum = +str
|
||||
return Number.isNaN(asNum)
|
||||
? new GlobPattern(str)
|
||||
: new NumberOrStringNode(str)
|
||||
})
|
||||
),
|
||||
ws: P.regex(/\s*/),
|
||||
}).default
|
||||
export const parse = parser.parse.bind(parser)
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
import { ast, pattern } from './index.fixtures'
|
||||
import {
|
||||
getPropertyClausesStrings,
|
||||
GlobPattern,
|
||||
Null,
|
||||
NumberNode,
|
||||
NumberOrStringNode,
|
||||
parse,
|
||||
setPropertyClause,
|
||||
} from './'
|
||||
@@ -31,7 +33,7 @@ describe('parse', () => {
|
||||
|
||||
node = parse('32')
|
||||
expect(node.match(32)).toBe(true)
|
||||
expect(node.match('32')).toBe(false)
|
||||
expect(node.match('32')).toBe(true)
|
||||
expect(node.toString()).toBe('32')
|
||||
|
||||
node = parse('"32"')
|
||||
@@ -41,12 +43,24 @@ describe('parse', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('GlobPattern', () => {
|
||||
it('matches a glob pattern recursively', () => {
|
||||
expect(new GlobPattern('b*r').match({ foo: 'bar' })).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Number', () => {
|
||||
it('match a number recursively', () => {
|
||||
expect(new NumberNode(3).match([{ foo: 3 }])).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('NumberOrStringNode', () => {
|
||||
it('match a string', () => {
|
||||
expect(new NumberOrStringNode('123').match([{ foo: '123' }])).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setPropertyClause', () => {
|
||||
it('creates a node if none passed', () => {
|
||||
expect(setPropertyClause(undefined, 'foo', 'bar').toString()).toBe(
|
||||
@@ -82,5 +96,5 @@ describe('setPropertyClause', () => {
|
||||
})
|
||||
|
||||
it('toString', () => {
|
||||
expect(pattern).toBe(ast.toString())
|
||||
expect(ast.toString()).toBe(pattern)
|
||||
})
|
||||
|
||||
@@ -28,10 +28,10 @@
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0-beta.49",
|
||||
"@babel/core": "7.0.0-beta.49",
|
||||
"@babel/preset-env": "7.0.0-beta.49",
|
||||
"@babel/preset-flow": "7.0.0-beta.49",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^5.1.3",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -26,22 +26,21 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/fs": "^0.2.0",
|
||||
"@xen-orchestra/fs": "^0.5.0",
|
||||
"cli-progress": "^2.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"vhd-lib": "^0.3.0"
|
||||
"vhd-lib": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0-beta.49",
|
||||
"@babel/core": "^7.0.0-beta.49",
|
||||
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
|
||||
"@babel/preset-env": "^7.0.0-beta.49",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^0.10.0",
|
||||
"execa": "^1.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"promise-toolbox": "^0.9.5",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"rimraf": "^2.6.1",
|
||||
"tmp": "^0.0.33"
|
||||
},
|
||||
|
||||
@@ -5,7 +5,12 @@ import { resolve } from 'path'
|
||||
export default async args => {
|
||||
const vhd = new Vhd(getHandler({ url: 'file:///' }), resolve(args[0]))
|
||||
|
||||
await vhd.readHeaderAndFooter()
|
||||
try {
|
||||
await vhd.readHeaderAndFooter()
|
||||
} catch (error) {
|
||||
console.warn(error)
|
||||
await vhd.readHeaderAndFooter(false)
|
||||
}
|
||||
|
||||
console.log(vhd.header)
|
||||
console.log(vhd.footer)
|
||||
|
||||
@@ -3,7 +3,7 @@ import { mergeVhd } from 'vhd-lib'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { resolve } from 'path'
|
||||
|
||||
export default async function main (args) {
|
||||
export default async function main(args) {
|
||||
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
|
||||
return `Usage: ${this.command} <child VHD> <parent VHD>`
|
||||
}
|
||||
@@ -11,10 +11,11 @@ export default async function main (args) {
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
let bar
|
||||
await mergeVhd(handler, resolve(args[1]), handler, resolve(args[0]), {
|
||||
onProgress ({ done, total }) {
|
||||
onProgress({ done, total }) {
|
||||
if (bar === undefined) {
|
||||
bar = new Bar({
|
||||
format: 'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}',
|
||||
format:
|
||||
'merging [{bar}] {percentage}% | ETA: {eta}s | {value}/{total}',
|
||||
})
|
||||
bar.start(total, done)
|
||||
} else {
|
||||
|
||||
@@ -3,19 +3,18 @@ import { createSyntheticStream } from 'vhd-lib'
|
||||
import { createWriteStream } from 'fs'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
|
||||
export default async function main (args) {
|
||||
export default async function main(args) {
|
||||
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
|
||||
return `Usage: ${this.command} <input VHD> <output VHD>`
|
||||
}
|
||||
|
||||
const handler = getHandler({ url: 'file:///' })
|
||||
const stream = await createSyntheticStream(handler, path.resolve(args[0]))
|
||||
return new Promise((resolve, reject) => {
|
||||
createSyntheticStream(handler, path.resolve(args[0]))
|
||||
.on('error', reject)
|
||||
.pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
stream.on('error', reject).pipe(
|
||||
createWriteStream(args[1])
|
||||
.on('error', reject)
|
||||
.on('finish', resolve)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import execPromise from 'exec-promise'
|
||||
|
||||
import commands from './commands'
|
||||
|
||||
function runCommand (commands, [command, ...args]) {
|
||||
function runCommand(commands, [command, ...args]) {
|
||||
if (command === undefined || command === '-h' || command === '--help') {
|
||||
command = 'help'
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import execa from 'execa'
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
import { fromCallback as pFromCallback } from 'promise-toolbox'
|
||||
import { pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import command from './commands/info'
|
||||
|
||||
|
||||
@@ -5,15 +5,13 @@ import fs from 'fs-extra'
|
||||
import getStream from 'get-stream'
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { fromEvent, fromCallback as pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import chainVhd from './chain'
|
||||
import createReadStream from './createSyntheticStream'
|
||||
import Vhd from './vhd'
|
||||
import vhdMerge from './merge'
|
||||
import { SECTOR_SIZE } from './_constants'
|
||||
import Vhd, { chainVhd, createSyntheticStream, mergeVhd as vhdMerge } from './'
|
||||
|
||||
import { SECTOR_SIZE } from './src/_constants'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -30,18 +28,18 @@ afterEach(async () => {
|
||||
await pFromCallback(cb => rimraf(tmpDir, cb))
|
||||
})
|
||||
|
||||
async function createRandomFile (name, sizeMb) {
|
||||
async function createRandomFile(name, sizeMb) {
|
||||
await execa('bash', [
|
||||
'-c',
|
||||
`< /dev/urandom tr -dc "\\t\\n [:alnum:]" | head -c ${sizeMb}M >${name}`,
|
||||
])
|
||||
}
|
||||
|
||||
async function checkFile (vhdName) {
|
||||
async function checkFile(vhdName) {
|
||||
await execa('vhd-util', ['check', '-p', '-b', '-t', '-n', vhdName])
|
||||
}
|
||||
|
||||
async function recoverRawContent (vhdName, rawName, originalSize) {
|
||||
async function recoverRawContent(vhdName, rawName, originalSize) {
|
||||
await checkFile(vhdName)
|
||||
await execa('qemu-img', ['convert', '-fvpc', '-Oraw', vhdName, rawName])
|
||||
if (originalSize !== undefined) {
|
||||
@@ -49,7 +47,7 @@ async function recoverRawContent (vhdName, rawName, originalSize) {
|
||||
}
|
||||
}
|
||||
|
||||
async function convertFromRawToVhd (rawName, vhdName) {
|
||||
async function convertFromRawToVhd(rawName, vhdName) {
|
||||
await execa('qemu-img', ['convert', '-f', 'raw', '-Ovpc', rawName, vhdName])
|
||||
}
|
||||
|
||||
@@ -270,14 +268,18 @@ test('coalesce works in normal cases', async () => {
|
||||
|
||||
test('createSyntheticStream passes vhd-util check', async () => {
|
||||
const initalSize = 4
|
||||
const expectedVhdSize = 4197888
|
||||
await createRandomFile('randomfile', initalSize)
|
||||
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
|
||||
const handler = getHandler({ url: 'file://' + process.cwd() })
|
||||
const stream = createReadStream(handler, 'randomfile.vhd')
|
||||
const stream = await createSyntheticStream(handler, 'randomfile.vhd')
|
||||
expect(stream.length).toEqual(expectedVhdSize)
|
||||
await fromEvent(
|
||||
stream.pipe(await fs.createWriteStream('recovered.vhd')),
|
||||
'finish'
|
||||
)
|
||||
await checkFile('recovered.vhd')
|
||||
const stats = await fs.stat('recovered.vhd')
|
||||
expect(stats.size).toEqual(expectedVhdSize)
|
||||
await execa('qemu-img', ['compare', 'recovered.vhd', 'randomfile'])
|
||||
})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vhd-lib",
|
||||
"version": "0.3.0",
|
||||
"version": "0.4.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Primitives for VHD file handling",
|
||||
"keywords": [],
|
||||
@@ -20,27 +20,26 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.0.0-beta.49",
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"core-js": "3.0.0-beta.3",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^6.0.1",
|
||||
"fs-extra": "^7.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.9.5",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"struct-fu": "^1.2.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.0.0-beta.49",
|
||||
"@babel/core": "7.0.0-beta.49",
|
||||
"@babel/plugin-transform-runtime": "^7.0.0-beta.49",
|
||||
"@babel/preset-env": "7.0.0-beta.49",
|
||||
"@babel/preset-flow": "7.0.0-beta.49",
|
||||
"@xen-orchestra/fs": "^0.2.0",
|
||||
"@babel/cli": "^7.0.0",
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"@xen-orchestra/fs": "^0.5.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^0.10.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-promise": "^2.0.0",
|
||||
"get-stream": "^3.0.0",
|
||||
"get-stream": "^4.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2",
|
||||
"tmp": "^0.0.33"
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
import { SECTOR_SIZE } from './_constants'
|
||||
|
||||
export default function computeGeometryForSize (size) {
|
||||
const totalSectors = Math.ceil(size / 512)
|
||||
export default function computeGeometryForSize(size) {
|
||||
const totalSectors = Math.min(Math.ceil(size / 512), 65535 * 16 * 255)
|
||||
let sectorsPerTrackCylinder
|
||||
let heads
|
||||
let cylinderTimesHeads
|
||||
if (totalSectors > 65535 * 16 * 255) {
|
||||
throw Error('disk is too big')
|
||||
}
|
||||
// straight copypasta from the file spec appendix on CHS Calculation
|
||||
if (totalSectors >= 65535 * 16 * 63) {
|
||||
sectorsPerTrackCylinder = 255
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
PLATFORM_WI2K,
|
||||
} from './_constants'
|
||||
|
||||
export function createFooter (
|
||||
export function createFooter(
|
||||
size,
|
||||
timestamp,
|
||||
geometry,
|
||||
@@ -39,7 +39,7 @@ export function createFooter (
|
||||
return footer
|
||||
}
|
||||
|
||||
export function createHeader (
|
||||
export function createHeader(
|
||||
maxTableEntries,
|
||||
tableOffset = HEADER_SIZE + FOOTER_SIZE,
|
||||
blockSize = VHD_BLOCK_SIZE_BYTES
|
||||
|
||||
@@ -95,7 +95,7 @@ export const unpackField = (field, buf) => {
|
||||
|
||||
// Returns the checksum of a raw struct.
|
||||
// The raw struct (footer or header) is altered with the new sum.
|
||||
export function checksumStruct (buf, struct) {
|
||||
export function checksumStruct(buf, struct) {
|
||||
const checksumField = struct.fields.checksum
|
||||
let sum = 0
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { dirname, relative } from 'path'
|
||||
import Vhd from './vhd'
|
||||
import { DISK_TYPE_DIFFERENCING } from './_constants'
|
||||
|
||||
export default async function chain (
|
||||
export default async function chain(
|
||||
parentHandler,
|
||||
parentPath,
|
||||
childHandler,
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
|
||||
import Vhd from './vhd'
|
||||
|
||||
export default asyncIteratorToStream(async function * (handler, path) {
|
||||
export default asyncIteratorToStream(async function*(handler, path) {
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
try {
|
||||
const vhd = new Vhd(handler, fd)
|
||||
|
||||
@@ -3,7 +3,7 @@ import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import computeGeometryForSize from './_computeGeometryForSize'
|
||||
import { createFooter } from './_createFooterHeader'
|
||||
|
||||
export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
export default asyncIteratorToStream(async function*(size, blockParser) {
|
||||
const geometry = computeGeometryForSize(size)
|
||||
const actualSize = geometry.actualSize
|
||||
const footer = createFooter(
|
||||
@@ -13,7 +13,7 @@ export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
)
|
||||
let position = 0
|
||||
|
||||
function * filePadding (paddingLength) {
|
||||
function* filePadding(paddingLength) {
|
||||
if (paddingLength > 0) {
|
||||
const chunkSize = 1024 * 1024 // 1Mo
|
||||
for (
|
||||
@@ -33,10 +33,10 @@ export default asyncIteratorToStream(async function * (size, blockParser) {
|
||||
if (paddingLength < 0) {
|
||||
throw new Error('Received out of order blocks')
|
||||
}
|
||||
yield * filePadding(paddingLength)
|
||||
yield* filePadding(paddingLength)
|
||||
yield next.data
|
||||
position = next.offsetBytes + next.data.length
|
||||
}
|
||||
yield * filePadding(actualSize - position)
|
||||
yield* filePadding(actualSize - position)
|
||||
yield footer
|
||||
})
|
||||
|
||||
@@ -17,9 +17,9 @@ import { set as setBitmap } from './_bitmap'
|
||||
const VHD_BLOCK_SIZE_SECTORS = VHD_BLOCK_SIZE_BYTES / SECTOR_SIZE
|
||||
|
||||
/**
|
||||
* @returns {Array} an array of occupation bitmap, each bit mapping an input block size of bytes
|
||||
* @returns currentVhdPositionSector the first free sector after the data
|
||||
*/
|
||||
function createBAT (
|
||||
function createBAT(
|
||||
firstBlockPosition,
|
||||
blockAddressList,
|
||||
ratio,
|
||||
@@ -36,9 +36,10 @@ function createBAT (
|
||||
(bitmapSize + VHD_BLOCK_SIZE_BYTES) / SECTOR_SIZE
|
||||
}
|
||||
})
|
||||
return currentVhdPositionSector
|
||||
}
|
||||
|
||||
export default asyncIteratorToStream(async function * (
|
||||
export default async function createReadableStream(
|
||||
diskSize,
|
||||
incomingBlockSize,
|
||||
blockAddressList,
|
||||
@@ -58,7 +59,7 @@ export default asyncIteratorToStream(async function * (
|
||||
|
||||
const maxTableEntries = Math.ceil(diskSize / VHD_BLOCK_SIZE_BYTES) + 1
|
||||
const tablePhysicalSizeBytes =
|
||||
Math.ceil(maxTableEntries * 4 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
Math.ceil((maxTableEntries * 4) / SECTOR_SIZE) * SECTOR_SIZE
|
||||
|
||||
const batPosition = FOOTER_SIZE + HEADER_SIZE
|
||||
const firstBlockPosition = batPosition + tablePhysicalSizeBytes
|
||||
@@ -79,9 +80,16 @@ export default asyncIteratorToStream(async function * (
|
||||
const bitmapSize =
|
||||
Math.ceil(VHD_BLOCK_SIZE_SECTORS / 8 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
const bat = Buffer.alloc(tablePhysicalSizeBytes, 0xff)
|
||||
createBAT(firstBlockPosition, blockAddressList, ratio, bat, bitmapSize)
|
||||
const endOfData = createBAT(
|
||||
firstBlockPosition,
|
||||
blockAddressList,
|
||||
ratio,
|
||||
bat,
|
||||
bitmapSize
|
||||
)
|
||||
const fileSize = endOfData * SECTOR_SIZE + FOOTER_SIZE
|
||||
let position = 0
|
||||
function * yieldAndTrack (buffer, expectedPosition) {
|
||||
function* yieldAndTrack(buffer, expectedPosition) {
|
||||
if (expectedPosition !== undefined) {
|
||||
assert.strictEqual(position, expectedPosition)
|
||||
}
|
||||
@@ -90,7 +98,7 @@ export default asyncIteratorToStream(async function * (
|
||||
position += buffer.length
|
||||
}
|
||||
}
|
||||
async function * generateFileContent (blockIterator, bitmapSize, ratio) {
|
||||
async function* generateFileContent(blockIterator, bitmapSize, ratio) {
|
||||
let currentBlock = -1
|
||||
let currentVhdBlockIndex = -1
|
||||
let currentBlockWithBitmap = Buffer.alloc(0)
|
||||
@@ -100,7 +108,7 @@ export default asyncIteratorToStream(async function * (
|
||||
const batIndex = Math.floor(next.offsetBytes / VHD_BLOCK_SIZE_BYTES)
|
||||
if (batIndex !== currentVhdBlockIndex) {
|
||||
if (currentVhdBlockIndex >= 0) {
|
||||
yield * yieldAndTrack(
|
||||
yield* yieldAndTrack(
|
||||
currentBlockWithBitmap,
|
||||
bat.readUInt32BE(currentVhdBlockIndex * 4) * SECTOR_SIZE
|
||||
)
|
||||
@@ -115,14 +123,21 @@ export default asyncIteratorToStream(async function * (
|
||||
}
|
||||
next.data.copy(
|
||||
currentBlockWithBitmap,
|
||||
bitmapSize + next.offsetBytes % VHD_BLOCK_SIZE_BYTES
|
||||
bitmapSize + (next.offsetBytes % VHD_BLOCK_SIZE_BYTES)
|
||||
)
|
||||
}
|
||||
yield * yieldAndTrack(currentBlockWithBitmap)
|
||||
yield* yieldAndTrack(currentBlockWithBitmap)
|
||||
}
|
||||
yield * yieldAndTrack(footer, 0)
|
||||
yield * yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield * yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield * generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield * yieldAndTrack(footer)
|
||||
})
|
||||
|
||||
async function* iterator() {
|
||||
yield* yieldAndTrack(footer, 0)
|
||||
yield* yieldAndTrack(header, FOOTER_SIZE)
|
||||
yield* yieldAndTrack(bat, FOOTER_SIZE + HEADER_SIZE)
|
||||
yield* generateFileContent(blockIterator, bitmapSize, ratio)
|
||||
yield* yieldAndTrack(footer)
|
||||
}
|
||||
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
}
|
||||
|
||||
@@ -15,18 +15,24 @@ import { test as mapTestBit } from './_bitmap'
|
||||
const resolveRelativeFromFile = (file, path) =>
|
||||
resolve('/', dirname(file), path).slice(1)
|
||||
|
||||
export default asyncIteratorToStream(function * (handler, path) {
|
||||
export default async function createSyntheticStream(handler, path) {
|
||||
const fds = []
|
||||
|
||||
const cleanup = () => {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
}
|
||||
try {
|
||||
const vhds = []
|
||||
while (true) {
|
||||
const fd = yield handler.openFile(path, 'r')
|
||||
const fd = await handler.openFile(path, 'r')
|
||||
fds.push(fd)
|
||||
const vhd = new Vhd(handler, fd)
|
||||
vhds.push(vhd)
|
||||
yield vhd.readHeaderAndFooter()
|
||||
yield vhd.readBlockAllocationTable()
|
||||
await vhd.readHeaderAndFooter()
|
||||
await vhd.readBlockAllocationTable()
|
||||
|
||||
if (vhd.footer.diskType === DISK_TYPE_DYNAMIC) {
|
||||
break
|
||||
@@ -64,14 +70,8 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
const nBlocks = Math.ceil(footer.currentSize / header.blockSize)
|
||||
|
||||
const blocksOwner = new Array(nBlocks)
|
||||
for (
|
||||
let iBlock = 0,
|
||||
blockOffset = Math.ceil(
|
||||
(header.tableOffset + bat.length) / SECTOR_SIZE
|
||||
);
|
||||
iBlock < nBlocks;
|
||||
++iBlock
|
||||
) {
|
||||
let blockOffset = Math.ceil((header.tableOffset + bat.length) / SECTOR_SIZE)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
let blockSector = BLOCK_UNUSED
|
||||
for (let i = 0; i < nVhds; ++i) {
|
||||
if (vhds[i].containsBlock(iBlock)) {
|
||||
@@ -83,71 +83,78 @@ export default asyncIteratorToStream(function * (handler, path) {
|
||||
}
|
||||
bat.writeUInt32BE(blockSector, iBlock * 4)
|
||||
}
|
||||
const fileSize = blockOffset * SECTOR_SIZE + FOOTER_SIZE
|
||||
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
const iterator = function*() {
|
||||
try {
|
||||
footer = fuFooter.pack(footer)
|
||||
checksumStruct(footer, fuFooter)
|
||||
yield footer
|
||||
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
header = fuHeader.pack(header)
|
||||
checksumStruct(header, fuHeader)
|
||||
yield header
|
||||
|
||||
yield bat
|
||||
yield bat
|
||||
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function * (iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, i, n)
|
||||
// TODO: for generic usage the bitmap needs to be properly computed for each block
|
||||
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
|
||||
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
|
||||
const owner = blocksOwner[iBlock]
|
||||
if (owner === undefined) {
|
||||
continue
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield * emitBlockSectors(iVhd + 1, start, i)
|
||||
|
||||
yield bitmap
|
||||
|
||||
const blocksByVhd = new Map()
|
||||
const emitBlockSectors = function*(iVhd, i, n) {
|
||||
const vhd = vhds[iVhd]
|
||||
const isRootVhd = vhd === rootVhd
|
||||
if (!vhd.containsBlock(iBlock)) {
|
||||
if (isRootVhd) {
|
||||
yield Buffer.alloc((n - i) * SECTOR_SIZE)
|
||||
} else {
|
||||
yield* emitBlockSectors(iVhd + 1, i, n)
|
||||
}
|
||||
return
|
||||
}
|
||||
let block = blocksByVhd.get(vhd)
|
||||
if (block === undefined) {
|
||||
block = yield vhd._readBlock(iBlock)
|
||||
blocksByVhd.set(vhd, block)
|
||||
}
|
||||
const { bitmap, data } = block
|
||||
if (isRootVhd) {
|
||||
yield data.slice(i * SECTOR_SIZE, n * SECTOR_SIZE)
|
||||
return
|
||||
}
|
||||
while (i < n) {
|
||||
const hasData = mapTestBit(bitmap, i)
|
||||
const start = i
|
||||
do {
|
||||
++i
|
||||
} while (i < n && mapTestBit(bitmap, i) === hasData)
|
||||
if (hasData) {
|
||||
yield data.slice(start * SECTOR_SIZE, i * SECTOR_SIZE)
|
||||
} else {
|
||||
yield* emitBlockSectors(iVhd + 1, start, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
yield footer
|
||||
} finally {
|
||||
cleanup()
|
||||
}
|
||||
yield * emitBlockSectors(owner, 0, sectorsPerBlockData)
|
||||
}
|
||||
|
||||
yield footer
|
||||
} finally {
|
||||
for (let i = 0, n = fds.length; i < n; ++i) {
|
||||
handler.closeFile(fds[i]).catch(error => {
|
||||
console.warn('createReadStream, closeFd', i, error)
|
||||
})
|
||||
}
|
||||
const stream = asyncIteratorToStream(iterator())
|
||||
stream.length = fileSize
|
||||
return stream
|
||||
} catch (e) {
|
||||
cleanup()
|
||||
throw e
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
// see https://github.com/babel/babel/issues/8450
|
||||
import 'core-js/features/symbol/async-iterator'
|
||||
|
||||
export { default } from './vhd'
|
||||
export { default as chainVhd } from './chain'
|
||||
export { default as createContentStream } from './createContentStream'
|
||||
|
||||
@@ -10,7 +10,7 @@ import { DISK_TYPE_DIFFERENCING, DISK_TYPE_DYNAMIC } from './_constants'
|
||||
// Merge vhd child into vhd parent.
|
||||
//
|
||||
// TODO: rename the VHD file during the merge
|
||||
export default concurrency(2)(async function merge (
|
||||
export default concurrency(2)(async function merge(
|
||||
parentHandler,
|
||||
parentPath,
|
||||
childHandler,
|
||||
@@ -51,7 +51,10 @@ export default concurrency(2)(async function merge (
|
||||
|
||||
// finds first allocated block for the 2 following loops
|
||||
let firstBlock = 0
|
||||
while (firstBlock < maxTableEntries && !childVhd.containsBlock(firstBlock)) {
|
||||
while (
|
||||
firstBlock < maxTableEntries &&
|
||||
!childVhd.containsBlock(firstBlock)
|
||||
) {
|
||||
++firstBlock
|
||||
}
|
||||
|
||||
|
||||
@@ -79,11 +79,11 @@ BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
|
||||
// - sectorSize = 512
|
||||
|
||||
export default class Vhd {
|
||||
get batSize () {
|
||||
get batSize() {
|
||||
return computeBatSize(this.header.maxTableEntries)
|
||||
}
|
||||
|
||||
constructor (handler, path) {
|
||||
constructor(handler, path) {
|
||||
this._handler = handler
|
||||
this._path = path
|
||||
}
|
||||
@@ -92,22 +92,22 @@ export default class Vhd {
|
||||
// Read functions.
|
||||
// =================================================================
|
||||
|
||||
async _read (start, n) {
|
||||
async _read(start, n) {
|
||||
const { bytesRead, buffer } = await this._handler.read(
|
||||
this._path,
|
||||
Buffer.alloc(n),
|
||||
start
|
||||
)
|
||||
assert.equal(bytesRead, n)
|
||||
assert.strictEqual(bytesRead, n)
|
||||
return buffer
|
||||
}
|
||||
|
||||
containsBlock (id) {
|
||||
containsBlock(id) {
|
||||
return this._getBatEntry(id) !== BLOCK_UNUSED
|
||||
}
|
||||
|
||||
// Returns the first address after metadata. (In bytes)
|
||||
getEndOfHeaders () {
|
||||
getEndOfHeaders() {
|
||||
const { header } = this
|
||||
|
||||
let end = FOOTER_SIZE + HEADER_SIZE
|
||||
@@ -132,7 +132,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Returns the first sector after data.
|
||||
getEndOfData () {
|
||||
getEndOfData() {
|
||||
let end = Math.ceil(this.getEndOfHeaders() / SECTOR_SIZE)
|
||||
|
||||
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
|
||||
@@ -153,7 +153,7 @@ export default class Vhd {
|
||||
// TODO: extract the checks into reusable functions:
|
||||
// - better human reporting
|
||||
// - auto repair if possible
|
||||
async readHeaderAndFooter (checkSecondFooter = true) {
|
||||
async readHeaderAndFooter(checkSecondFooter = true) {
|
||||
const buf = await this._read(0, FOOTER_SIZE + HEADER_SIZE)
|
||||
const bufFooter = buf.slice(0, FOOTER_SIZE)
|
||||
const bufHeader = buf.slice(FOOTER_SIZE)
|
||||
@@ -206,7 +206,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Returns a buffer that contains the block allocation table of a vhd file.
|
||||
async readBlockAllocationTable () {
|
||||
async readBlockAllocationTable() {
|
||||
const { header } = this
|
||||
this.blockTable = await this._read(
|
||||
header.tableOffset,
|
||||
@@ -215,11 +215,11 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// return the first sector (bitmap) of a block
|
||||
_getBatEntry (block) {
|
||||
_getBatEntry(block) {
|
||||
return this.blockTable.readUInt32BE(block * 4)
|
||||
}
|
||||
|
||||
_readBlock (blockId, onlyBitmap = false) {
|
||||
_readBlock(blockId, onlyBitmap = false) {
|
||||
const blockAddr = this._getBatEntry(blockId)
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
throw new Error(`no such block ${blockId}`)
|
||||
@@ -228,23 +228,22 @@ export default class Vhd {
|
||||
return this._read(
|
||||
sectorsToBytes(blockAddr),
|
||||
onlyBitmap ? this.bitmapSize : this.fullBlockSize
|
||||
).then(
|
||||
buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
).then(buf =>
|
||||
onlyBitmap
|
||||
? { id: blockId, bitmap: buf }
|
||||
: {
|
||||
id: blockId,
|
||||
bitmap: buf.slice(0, this.bitmapSize),
|
||||
data: buf.slice(this.bitmapSize),
|
||||
buffer: buf,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// get the identifiers and first sectors of the first and last block
|
||||
// in the file
|
||||
//
|
||||
_getFirstAndLastBlocks () {
|
||||
_getFirstAndLastBlocks() {
|
||||
const n = this.header.maxTableEntries
|
||||
const bat = this.blockTable
|
||||
let i = 0
|
||||
@@ -289,7 +288,7 @@ export default class Vhd {
|
||||
// =================================================================
|
||||
|
||||
// Write a buffer/stream at a given position in a vhd file.
|
||||
async _write (data, offset) {
|
||||
async _write(data, offset) {
|
||||
debug(
|
||||
`_write offset=${offset} size=${
|
||||
Buffer.isBuffer(data) ? data.length : '???'
|
||||
@@ -308,7 +307,7 @@ export default class Vhd {
|
||||
: fromEvent(data.pipe(stream), 'finish')
|
||||
}
|
||||
|
||||
async _freeFirstBlockSpace (spaceNeededBytes) {
|
||||
async _freeFirstBlockSpace(spaceNeededBytes) {
|
||||
try {
|
||||
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
|
||||
const tableOffset = this.header.tableOffset
|
||||
@@ -348,7 +347,7 @@ export default class Vhd {
|
||||
}
|
||||
}
|
||||
|
||||
async ensureBatSize (entries) {
|
||||
async ensureBatSize(entries) {
|
||||
const { header } = this
|
||||
const prevMaxTableEntries = header.maxTableEntries
|
||||
if (prevMaxTableEntries >= entries) {
|
||||
@@ -373,7 +372,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// set the first sector (bitmap) of a block
|
||||
_setBatEntry (block, blockSector) {
|
||||
_setBatEntry(block, blockSector) {
|
||||
const i = block * 4
|
||||
const { blockTable } = this
|
||||
|
||||
@@ -384,7 +383,7 @@ export default class Vhd {
|
||||
|
||||
// Make a new empty block at vhd end.
|
||||
// Update block allocation table in context and in file.
|
||||
async createBlock (blockId) {
|
||||
async createBlock(blockId) {
|
||||
const blockAddr = Math.ceil(this.getEndOfData() / SECTOR_SIZE)
|
||||
|
||||
debug(`create block ${blockId} at ${blockAddr}`)
|
||||
@@ -403,7 +402,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Write a bitmap at a block address.
|
||||
async writeBlockBitmap (blockAddr, bitmap) {
|
||||
async writeBlockBitmap(blockAddr, bitmap) {
|
||||
const { bitmapSize } = this
|
||||
|
||||
if (bitmap.length !== bitmapSize) {
|
||||
@@ -420,7 +419,7 @@ export default class Vhd {
|
||||
await this._write(bitmap, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeEntireBlock (block) {
|
||||
async writeEntireBlock(block) {
|
||||
let blockAddr = this._getBatEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
@@ -429,7 +428,7 @@ export default class Vhd {
|
||||
await this._write(block.buffer, sectorsToBytes(blockAddr))
|
||||
}
|
||||
|
||||
async writeBlockSectors (block, beginSectorId, endSectorId, parentBitmap) {
|
||||
async writeBlockSectors(block, beginSectorId, endSectorId, parentBitmap) {
|
||||
let blockAddr = this._getBatEntry(block.id)
|
||||
|
||||
if (blockAddr === BLOCK_UNUSED) {
|
||||
@@ -461,7 +460,7 @@ export default class Vhd {
|
||||
)
|
||||
}
|
||||
|
||||
async coalesceBlock (child, blockId) {
|
||||
async coalesceBlock(child, blockId) {
|
||||
const block = await child._readBlock(blockId)
|
||||
const { bitmap, data } = block
|
||||
|
||||
@@ -503,7 +502,7 @@ export default class Vhd {
|
||||
}
|
||||
|
||||
// Write a context footer. (At the end and beginning of a vhd file.)
|
||||
async writeFooter (onlyEndFooter = false) {
|
||||
async writeFooter(onlyEndFooter = false) {
|
||||
const { footer } = this
|
||||
|
||||
const rawFooter = fuFooter.pack(footer)
|
||||
@@ -523,7 +522,7 @@ export default class Vhd {
|
||||
await this._write(rawFooter, offset)
|
||||
}
|
||||
|
||||
writeHeader () {
|
||||
writeHeader() {
|
||||
const { header } = this
|
||||
const rawHeader = fuHeader.pack(header)
|
||||
header.checksum = checksumStruct(rawHeader, fuHeader)
|
||||
@@ -536,7 +535,7 @@ export default class Vhd {
|
||||
return this._write(rawHeader, offset)
|
||||
}
|
||||
|
||||
async writeData (offsetSectors, buffer) {
|
||||
async writeData(offsetSectors, buffer) {
|
||||
const bufferSizeSectors = Math.ceil(buffer.length / SECTOR_SIZE)
|
||||
const startBlock = Math.floor(offsetSectors / this.sectorsPerBlock)
|
||||
const endBufferSectors = offsetSectors + bufferSizeSectors
|
||||
@@ -589,7 +588,7 @@ export default class Vhd {
|
||||
await this.writeFooter()
|
||||
}
|
||||
|
||||
async ensureSpaceForParentLocators (neededSectors) {
|
||||
async ensureSpaceForParentLocators(neededSectors) {
|
||||
const firstLocatorOffset = FOOTER_SIZE + HEADER_SIZE
|
||||
const currentSpace =
|
||||
Math.floor(this.header.tableOffset / SECTOR_SIZE) -
|
||||
@@ -603,7 +602,7 @@ export default class Vhd {
|
||||
return firstLocatorOffset
|
||||
}
|
||||
|
||||
async setUniqueParentLocator (fileNameString) {
|
||||
async setUniqueParentLocator(fileNameString) {
|
||||
const { header } = this
|
||||
header.parentLocatorEntry[0].platformCode = PLATFORM_W2KU
|
||||
const encodedFilename = Buffer.from(fileNameString, 'utf16le')
|
||||
|
||||
@@ -3,11 +3,11 @@ import execa from 'execa'
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
import { createWriteStream, readFile } from 'fs-promise'
|
||||
import { fromCallback as pFromCallback, fromEvent } from 'promise-toolbox'
|
||||
import { fromEvent, pFromCallback } from 'promise-toolbox'
|
||||
|
||||
import { createFooter } from './_createFooterHeader'
|
||||
import createReadableRawVHDStream from './createReadableRawStream'
|
||||
import createReadableSparseVHDStream from './createReadableSparseStream'
|
||||
import { createReadableRawStream, createReadableSparseStream } from './'
|
||||
|
||||
import { createFooter } from './src/_createFooterHeader'
|
||||
|
||||
const initialDir = process.cwd()
|
||||
|
||||
@@ -54,7 +54,7 @@ test('ReadableRawVHDStream does not crash', async () => {
|
||||
},
|
||||
}
|
||||
const fileSize = 1000
|
||||
const stream = createReadableRawVHDStream(fileSize, mockParser)
|
||||
const stream = createReadableRawStream(fileSize, mockParser)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
@@ -85,7 +85,7 @@ test('ReadableRawVHDStream detects when blocks are out of order', async () => {
|
||||
}
|
||||
return expect(
|
||||
new Promise((resolve, reject) => {
|
||||
const stream = createReadableRawVHDStream(100000, mockParser)
|
||||
const stream = createReadableRawStream(100000, mockParser)
|
||||
stream.on('error', reject)
|
||||
const pipe = stream.pipe(createWriteStream('outputStream'))
|
||||
pipe.on('finish', resolve)
|
||||
@@ -107,12 +107,13 @@ test('ReadableSparseVHDStream can handle a sparse file', async () => {
|
||||
},
|
||||
]
|
||||
const fileSize = blockSize * 110
|
||||
const stream = createReadableSparseVHDStream(
|
||||
const stream = await createReadableSparseStream(
|
||||
fileSize,
|
||||
blockSize,
|
||||
blocks.map(b => b.offsetBytes),
|
||||
blocks
|
||||
)
|
||||
expect(stream.length).toEqual(4197888)
|
||||
const pipe = stream.pipe(createWriteStream('output.vhd'))
|
||||
await fromEvent(pipe, 'finish')
|
||||
await execa('vhd-util', ['check', '-t', '-i', '-n', 'output.vhd'])
|
||||
3
packages/xapi-explore-sr/.babelrc.js
Normal file
3
packages/xapi-explore-sr/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
packages/xapi-explore-sr/.npmignore
Normal file
24
packages/xapi-explore-sr/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user