Compare commits

..

3 Commits

Author SHA1 Message Date
Florent BEAUCHAMP
9bfa6db64a fix(@xen-orchestra/backups): save the vm and vmSapshot metadata 2023-05-12 14:30:35 +02:00
Florent BEAUCHAMP
cfd3cf78f8 feat(@xen-orchestra/backups): implement secondary backups 2023-05-12 14:30:35 +02:00
Florent BEAUCHAMP
c85323baa4 refactor(@xen-orchestra/backups): reorganize backup job to prepare for secondary backup
* full and delta backup jobs are renamed to full and incremental
* inside an incremental job, the full and delta transfers are renamed to base and delta transver
* the baseVm used for computing the snapshot is renamed to comparisasonBasisVm
* split a lot of files to extract reusable methods
* use a factory to instantiates BackubJob, the Backup* classes are now private by convention
* the VmBackups class are instatiated by a XapiBackupJob => move them to the _backup folder
* the writers need a VmBackup in their constructor: move them to the vmBackup folder
2023-05-12 14:30:35 +02:00
334 changed files with 6671 additions and 11184 deletions

View File

@@ -28,7 +28,7 @@ module.exports = {
},
},
{
files: ['*.{integ,spec,test}.{,c,m}js'],
files: ['*.{spec,test}.{,c,m}js'],
rules: {
'n/no-unpublished-require': 'off',
'n/no-unpublished-import': 'off',

View File

@@ -21,7 +21,7 @@
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^4.5.0"
"vhd-lib": "^4.4.0"
},
"scripts": {
"postversion": "npm publish --access public"

View File

@@ -313,8 +313,8 @@ module.exports = class NbdClient {
const exportSize = this.#exportSize
const chunkSize = 2 * 1024 * 1024
indexGenerator = function* () {
const nbBlocks = Math.ceil(Number(exportSize / BigInt(chunkSize)))
for (let index = 0; BigInt(index) < nbBlocks; index++) {
const nbBlocks = Math.ceil(exportSize / chunkSize)
for (let index = 0; index < nbBlocks; index++) {
yield { index, size: chunkSize }
}
}

View File

@@ -0,0 +1,76 @@
'use strict'
const NbdClient = require('./index.js')
const { spawn } = require('node:child_process')
const fs = require('node:fs/promises')
const { test } = require('tap')
const tmp = require('tmp')
const { pFromCallback } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const FILE_SIZE = 2 * 1024 * 1024
async function createTempFile(size) {
const tmpPath = await pFromCallback(cb => tmp.file(cb))
const data = Buffer.alloc(size, 0)
for (let i = 0; i < size; i += 4) {
data.writeUInt32BE(i, i)
}
await fs.writeFile(tmpPath, data)
return tmpPath
}
test('it works with unsecured network', async tap => {
const path = await createTempFile(FILE_SIZE)
const nbdServer = spawn(
'nbdkit',
[
'file',
path,
'--newstyle', //
'--exit-with-parent',
'--read-only',
'--export-name=MY_SECRET_EXPORT',
],
{
stdio: ['inherit', 'inherit', 'inherit'],
}
)
const client = new NbdClient({
address: 'localhost',
exportname: 'MY_SECRET_EXPORT',
secure: false,
})
await client.connect()
tap.equal(client.exportSize, BigInt(FILE_SIZE))
const CHUNK_SIZE = 128 * 1024 // non default size
const indexes = []
for (let i = 0; i < FILE_SIZE / CHUNK_SIZE; i++) {
indexes.push(i)
}
// read mutiple blocks in parallel
await asyncEach(
indexes,
async i => {
const block = await client.readBlock(i, CHUNK_SIZE)
let blockOk = true
let firstFail
for (let j = 0; j < CHUNK_SIZE; j += 4) {
const wanted = i * CHUNK_SIZE + j
const found = block.readUInt32BE(j)
blockOk = blockOk && found === wanted
if (!blockOk && firstFail === undefined) {
firstFail = j
}
}
tap.ok(blockOk, `check block ${i} content`)
},
{ concurrency: 8 }
)
await client.disconnect()
nbdServer.kill()
await fs.unlink(path)
})

View File

@@ -13,7 +13,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.2.1",
"version": "1.2.0",
"engines": {
"node": ">=14.0"
},
@@ -23,7 +23,7 @@
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.6.0",
"promise-toolbox": "^0.21.0",
"xen-api": "^1.3.3"
"xen-api": "^1.3.0"
},
"devDependencies": {
"tap": "^16.3.0",
@@ -31,6 +31,6 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap --lines 97 --functions 95 --branches 74 --statements 97 tests/*.integ.js"
"test-integration": "tap *.integ.js"
}
}

View File

@@ -1,182 +0,0 @@
Public Key Info:
Public Key Algorithm: RSA
Key Security Level: High (3072 bits)
modulus:
00:be:92:be:df:de:0a:ab:38:fc:1a:c0:1a:58:4d:86
b8:1f:25:10:7d:19:05:17:bf:02:3d:e9:ef:f8:c0:04
5d:6f:98:de:5c:dd:c3:0f:e2:61:61:e4:b5:9c:42:ac
3e:af:fd:30:10:e1:54:32:66:75:f6:80:90:85:05:a0
6a:14:a2:6f:a7:2e:f0:f3:52:94:2a:f2:34:fc:0d:b4
fb:28:5d:1c:11:5c:59:6e:63:34:ba:b3:fd:73:b1:48
35:00:84:53:da:6a:9b:84:ab:64:b1:a1:2b:3a:d1:5a
d7:13:7c:12:2a:4e:72:e9:96:d6:30:74:c5:71:05:14
4b:2d:01:94:23:67:4e:37:3c:1e:c1:a0:bc:34:04:25
21:11:fb:4b:6b:53:74:8f:90:93:57:af:7f:3b:78:d6
a4:87:fe:7d:ed:20:11:8b:70:54:67:b8:c9:f5:c0:6b
de:4e:e7:a5:79:ff:f7:ad:cf:10:57:f5:51:70:7b:54
68:28:9e:b9:c2:10:7b:ab:aa:11:47:9f:ec:e6:2f:09
44:4a:88:5b:dd:8c:10:b4:c4:03:25:06:d9:e0:9f:a0
0d:cf:94:4b:3b:fa:a5:17:2c:e4:67:c4:17:6a:ab:d8
c8:7a:16:41:b9:91:b7:9c:ae:8c:94:be:26:61:51:71
c1:a6:39:39:97:75:28:a9:0e:21:ea:f0:bd:71:4a:8c
e1:f8:1d:a9:22:2f:10:a8:1b:e5:a4:9a:fd:0f:fa:c6
20:bc:96:99:79:c6:ba:a4:1f:3e:d4:91:c5:af:bb:71
0a:5a:ef:69:9c:64:69:ce:5a:fe:3f:c2:24:f4:26:d4
3d:ab:ab:9a:f0:f6:f1:b1:64:a9:f4:e2:34:6a:ab:2e
95:47:b9:07:5a:39:c6:95:9c:a9:e8:ed:71:dd:c1:21
16:c8:2d:4c:2c:af:06:9d:c6:fa:fe:c5:2a:6c:b4:c3
d5:96:fc:5e:fd:ec:1c:30:b4:9d:cb:29:ef:a8:50:1c
21:
public exponent:
01:00:01:
private exponent:
25:37:c5:7d:35:01:02:65:73:9e:c9:cb:9b:59:30:a9
3e:b3:df:5f:7f:06:66:97:d0:19:45:59:af:4b:d8:ce
62:a0:09:35:3b:bd:ff:99:27:89:95:bf:fe:0f:6b:52
26:ce:9c:97:7f:5a:11:29:bf:79:ef:ab:c9:be:ca:90
4d:0d:58:1e:df:65:01:30:2c:6d:a2:b5:c4:4f:ec:fb
6b:eb:9b:32:ac:c5:6e:70:83:78:be:f4:0d:a7:1e:c1
f3:22:e4:b9:70:3e:85:0f:6f:ef:dc:d8:f3:78:b5:73
f1:83:36:8c:fa:9b:28:91:63:ad:3c:f0:de:5c:ae:94
eb:ea:36:03:20:06:bf:74:c7:50:eb:52:36:1a:65:21
eb:40:17:7f:93:61:dd:33:d0:02:bc:ec:6d:31:f1:41
5a:a9:d1:f0:00:66:4c:c4:18:47:d5:67:e3:cd:bb:83
44:07:ab:62:83:21:dc:d8:e6:89:37:08:bb:9d:ea:62
c2:5d:ce:85:c2:dc:48:27:0c:a4:23:61:b7:30:e7:26
44:dc:1e:5c:2e:16:35:2b:2e:a6:e6:a4:ce:1f:9b:e9
fe:96:fa:49:1d:fb:2a:df:bc:bf:46:da:52:f8:37:8a
84:ab:e4:73:e6:46:56:b5:b4:3d:e1:63:eb:02:8e:d7
67:96:c4:dc:28:6d:6b:b6:0c:a3:0b:db:87:29:ad:f9
ec:73:b6:55:a3:40:32:13:84:c7:2f:33:74:04:dc:42
00:11:9c:fb:fc:62:35:b3:82:c3:3c:28:80:e8:09:a8
97:c7:c1:2e:3d:27:fa:4f:9b:fc:c2:34:58:41:5c:a1
e2:70:2e:2f:82:ad:bd:bd:8e:dd:23:12:25:de:89:70
60:75:48:90:80:ac:55:74:51:6f:49:9e:7f:63:41:8b
3c:b1:f5:c3:6b:4b:5a:50:a6:4d:38:e8:82:c2:04:c8
30:fd:06:9b:c1:04:27:b6:63:3a:5e:f5:4d:00:c3:d1
prime1:
00:f6:00:2e:7d:89:61:24:16:5e:87:ca:18:6c:03:b8
b4:33:df:4a:a7:7f:db:ed:39:15:41:12:61:4f:4e:b4
de:ab:29:d9:0c:6c:01:7e:53:2e:ee:e7:5f:a2:e4:6d
c6:4b:07:4e:d8:a3:ae:45:06:97:bd:18:a3:e9:dd:29
54:64:6d:f0:af:08:95:ae:ae:3e:71:63:76:2a:a1:18
c4:b1:fc:bc:3d:42:15:74:b3:c5:38:1f:5d:92:f1:b2
c6:3f:10:fe:35:1a:c6:b1:ce:70:38:ff:08:5c:de:61
79:c7:50:91:22:4d:e9:c8:18:49:e2:5c:91:84:86:e2
4d:0f:6e:9b:0d:81:df:aa:f3:59:75:56:e9:33:18:dd
ab:39:da:e2:25:01:05:a1:6e:23:59:15:2c:89:35:c7
ae:9c:c7:ea:88:9a:1a:f3:48:07:11:82:59:79:8c:62
53:06:37:30:14:b3:82:b1:50:fc:ae:b8:f7:1c:57:44
7d:
prime2:
00:c6:51:cc:dc:88:2e:cf:98:90:10:19:e0:d3:a4:d1
3f:dc:b0:29:d3:bb:26:ee:eb:00:17:17:d1:d1:bb:9b
34:b1:4e:af:b5:6c:1c:54:53:b4:bb:55:da:f7:78:cd
38:b4:2e:3a:8c:63:80:3b:64:9c:b4:2b:cd:dd:50:0b
05:d2:00:7a:df:8e:c3:e6:29:e0:9c:d8:40:b7:11:09
f4:38:df:f6:ed:93:1e:18:d4:93:fa:8d:ee:82:9c:0f
c1:88:26:84:9d:4f:ae:8a:17:d5:55:54:4c:c6:0a:ac
4d:ec:33:51:68:0f:4b:92:2e:04:57:fe:15:f5:00:46
5c:8e:ad:09:2c:e7:df:d5:36:7a:4e:bd:da:21:22:d7
58:b4:72:93:94:af:34:cc:e2:b8:d0:4f:0b:5d:97:08
12:19:17:34:c5:15:49:00:48:56:13:b8:45:4e:3b:f8
bc:d5:ab:d9:6d:c2:4a:cc:01:1a:53:4d:46:50:49:3b
75:
coefficient:
63:67:50:29:10:6a:85:a3:dc:51:90:20:76:86:8c:83
8e:d5:ff:aa:75:fd:b5:f8:31:b0:96:6c:18:1d:5b:ed
a4:2e:47:8d:9c:c2:1e:2c:a8:6d:4b:10:a5:c2:53:46
8a:9a:84:91:d7:fc:f5:cc:03:ce:b9:3d:5c:01:d2:27
99:7b:79:89:4f:a1:12:e3:05:5d:ee:10:f6:8c:e6:ce
5e:da:32:56:6d:6f:eb:32:b4:75:7b:94:49:d8:2d:9e
4d:19:59:2e:e4:0b:bc:95:df:df:65:67:a1:dd:c6:2b
99:f4:76:e8:9f:fa:57:1d:ca:f9:58:a9:ce:9b:30:5c
42:8a:ba:05:e7:e2:15:45:25:bc:e9:68:c1:8b:1a:37
cc:e1:aa:45:2e:94:f5:81:47:1e:64:7f:c0:c1:b7:a8
21:58:18:a9:a0:ed:e0:27:75:bf:65:81:6b:e4:1d:5a
b7:7e:df:d8:28:c6:36:21:19:c8:6e:da:ca:9e:da:84
exp1:
00:ba:d7:fe:77:a9:0d:98:2c:49:56:57:c0:5e:e2:20
ba:f6:1f:26:03:bc:d0:5d:08:9b:45:16:61:c4:ab:e2
22:b1:dc:92:17:a6:3d:28:26:a4:22:1e:a8:7b:ff:86
05:33:5d:74:9c:85:0d:cb:2d:ab:b8:9b:6b:7c:28:57
c8:da:92:ca:59:17:6b:21:07:05:34:78:37:fb:3e:ea
a2:13:12:04:23:7e:fa:ee:ed:cf:e0:c5:a9:fb:ff:0a
2b:1b:21:9c:02:d7:b8:8c:ba:60:70:59:fc:8f:14:f4
f2:5a:d9:ad:b2:61:7d:2c:56:8e:5f:98:b1:89:f8:2d
10:1c:a5:84:ad:28:b4:aa:92:34:a3:34:04:e1:a3:84
52:16:1a:52:e3:8a:38:2d:99:8a:cd:91:90:87:12:ca
fc:ab:e6:08:14:03:00:6f:41:88:e4:da:9d:7c:fd:8c
7c:c4:de:cb:ed:1d:3f:29:d0:7a:6b:76:df:71:ae:32
bd:
exp2:
4a:e9:d3:6c:ea:b4:64:0e:c9:3c:8b:c9:f5:a8:a8:b2
6a:f6:d0:95:fe:78:32:7f:ea:c4:ce:66:9f:c7:32:55
b1:34:7c:03:18:17:8b:73:23:2e:30:bc:4a:07:03:de
8b:91:7a:e4:55:21:b7:4d:c6:33:f8:e8:06:d5:99:94
55:43:81:26:b9:93:1e:7a:6b:32:54:2d:fd:f9:1d:bd
77:4e:82:c4:33:72:87:06:a5:ef:5b:75:e1:38:7a:6b
2c:b7:00:19:3c:64:3e:1d:ca:a4:34:f7:db:47:64:d6
fa:86:58:15:ea:d1:2d:22:dc:d9:30:4d:b3:02:ab:91
83:03:b2:17:98:6f:60:e6:f7:44:8f:4a:ba:81:a2:bf
0b:4a:cc:9c:b9:a2:44:52:d0:65:3f:b6:97:5f:d9:d8
9c:49:bb:d1:46:bd:10:b2:42:71:a8:85:e5:8b:99:e6
1b:00:93:5d:76:ab:32:6c:a8:39:17:53:9c:38:4d:91
Public Key PIN:
pin-sha256:ISh/UeFjUG5Gwrpx6hMUGQPvg9wOKjOkHmRbs4YjZqs=
Public Key ID:
sha256:21287f51e163506e46c2ba71ea13141903ef83dc0e2a33a41e645bb3862366ab
sha1:1a48455111ac45fb5807c5cdb7b20b896c52f0b6
-----BEGIN RSA PRIVATE KEY-----
MIIG4wIBAAKCAYEAvpK+394Kqzj8GsAaWE2GuB8lEH0ZBRe/Aj3p7/jABF1vmN5c
3cMP4mFh5LWcQqw+r/0wEOFUMmZ19oCQhQWgahSib6cu8PNSlCryNPwNtPsoXRwR
XFluYzS6s/1zsUg1AIRT2mqbhKtksaErOtFa1xN8EipOcumW1jB0xXEFFEstAZQj
Z043PB7BoLw0BCUhEftLa1N0j5CTV69/O3jWpIf+fe0gEYtwVGe4yfXAa95O56V5
//etzxBX9VFwe1RoKJ65whB7q6oRR5/s5i8JREqIW92MELTEAyUG2eCfoA3PlEs7
+qUXLORnxBdqq9jIehZBuZG3nK6MlL4mYVFxwaY5OZd1KKkOIerwvXFKjOH4Haki
LxCoG+Wkmv0P+sYgvJaZeca6pB8+1JHFr7txClrvaZxkac5a/j/CJPQm1D2rq5rw
9vGxZKn04jRqqy6VR7kHWjnGlZyp6O1x3cEhFsgtTCyvBp3G+v7FKmy0w9WW/F79
7BwwtJ3LKe+oUBwhAgMBAAECggGAJTfFfTUBAmVznsnLm1kwqT6z319/BmaX0BlF
Wa9L2M5ioAk1O73/mSeJlb/+D2tSJs6cl39aESm/ee+ryb7KkE0NWB7fZQEwLG2i
tcRP7Ptr65syrMVucIN4vvQNpx7B8yLkuXA+hQ9v79zY83i1c/GDNoz6myiRY608
8N5crpTr6jYDIAa/dMdQ61I2GmUh60AXf5Nh3TPQArzsbTHxQVqp0fAAZkzEGEfV
Z+PNu4NEB6tigyHc2OaJNwi7nepiwl3OhcLcSCcMpCNhtzDnJkTcHlwuFjUrLqbm
pM4fm+n+lvpJHfsq37y/RtpS+DeKhKvkc+ZGVrW0PeFj6wKO12eWxNwobWu2DKML
24cprfnsc7ZVo0AyE4THLzN0BNxCABGc+/xiNbOCwzwogOgJqJfHwS49J/pPm/zC
NFhBXKHicC4vgq29vY7dIxIl3olwYHVIkICsVXRRb0mef2NBizyx9cNrS1pQpk04
6ILCBMgw/QabwQQntmM6XvVNAMPRAoHBAPYALn2JYSQWXofKGGwDuLQz30qnf9vt
ORVBEmFPTrTeqynZDGwBflMu7udfouRtxksHTtijrkUGl70Yo+ndKVRkbfCvCJWu
rj5xY3YqoRjEsfy8PUIVdLPFOB9dkvGyxj8Q/jUaxrHOcDj/CFzeYXnHUJEiTenI
GEniXJGEhuJND26bDYHfqvNZdVbpMxjdqzna4iUBBaFuI1kVLIk1x66cx+qImhrz
SAcRgll5jGJTBjcwFLOCsVD8rrj3HFdEfQKBwQDGUczciC7PmJAQGeDTpNE/3LAp
07sm7usAFxfR0bubNLFOr7VsHFRTtLtV2vd4zTi0LjqMY4A7ZJy0K83dUAsF0gB6
347D5ingnNhAtxEJ9Djf9u2THhjUk/qN7oKcD8GIJoSdT66KF9VVVEzGCqxN7DNR
aA9Lki4EV/4V9QBGXI6tCSzn39U2ek692iEi11i0cpOUrzTM4rjQTwtdlwgSGRc0
xRVJAEhWE7hFTjv4vNWr2W3CSswBGlNNRlBJO3UCgcEAutf+d6kNmCxJVlfAXuIg
uvYfJgO80F0Im0UWYcSr4iKx3JIXpj0oJqQiHqh7/4YFM110nIUNyy2ruJtrfChX
yNqSylkXayEHBTR4N/s+6qITEgQjfvru7c/gxan7/worGyGcAte4jLpgcFn8jxT0
8lrZrbJhfSxWjl+YsYn4LRAcpYStKLSqkjSjNATho4RSFhpS44o4LZmKzZGQhxLK
/KvmCBQDAG9BiOTanXz9jHzE3svtHT8p0Hprdt9xrjK9AoHASunTbOq0ZA7JPIvJ
9aiosmr20JX+eDJ/6sTOZp/HMlWxNHwDGBeLcyMuMLxKBwPei5F65FUht03GM/jo
BtWZlFVDgSa5kx56azJULf35Hb13ToLEM3KHBqXvW3XhOHprLLcAGTxkPh3KpDT3
20dk1vqGWBXq0S0i3NkwTbMCq5GDA7IXmG9g5vdEj0q6gaK/C0rMnLmiRFLQZT+2
l1/Z2JxJu9FGvRCyQnGoheWLmeYbAJNddqsybKg5F1OcOE2RAoHAY2dQKRBqhaPc
UZAgdoaMg47V/6p1/bX4MbCWbBgdW+2kLkeNnMIeLKhtSxClwlNGipqEkdf89cwD
zrk9XAHSJ5l7eYlPoRLjBV3uEPaM5s5e2jJWbW/rMrR1e5RJ2C2eTRlZLuQLvJXf
32Vnod3GK5n0duif+lcdyvlYqc6bMFxCiroF5+IVRSW86WjBixo3zOGqRS6U9YFH
HmR/wMG3qCFYGKmg7eAndb9lgWvkHVq3ft/YKMY2IRnIbtrKntqE
-----END RSA PRIVATE KEY-----

View File

@@ -1,169 +0,0 @@
'use strict'
const NbdClient = require('../index.js')
const { spawn, exec } = require('node:child_process')
const fs = require('node:fs/promises')
const { test } = require('tap')
const tmp = require('tmp')
const { pFromCallback } = require('promise-toolbox')
const { Socket } = require('node:net')
const { NBD_DEFAULT_PORT } = require('../constants.js')
const assert = require('node:assert')
const FILE_SIZE = 10 * 1024 * 1024
async function createTempFile(size) {
const tmpPath = await pFromCallback(cb => tmp.file(cb))
const data = Buffer.alloc(size, 0)
for (let i = 0; i < size; i += 4) {
data.writeUInt32BE(i, i)
}
await fs.writeFile(tmpPath, data)
return tmpPath
}
async function spawnNbdKit(path) {
let tries = 5
// wait for server to be ready
const nbdServer = spawn(
'nbdkit',
[
'file',
path,
'--newstyle', //
'--exit-with-parent',
'--read-only',
'--export-name=MY_SECRET_EXPORT',
'--tls=on',
'--tls-certificates=./tests/',
// '--tls-verify-peer',
// '--verbose',
'--exit-with-parent',
],
{
stdio: ['inherit', 'inherit', 'inherit'],
}
)
nbdServer.on('error', err => {
console.error(err)
})
do {
try {
const socket = new Socket()
await new Promise((resolve, reject) => {
socket.connect(NBD_DEFAULT_PORT, 'localhost')
socket.once('error', reject)
socket.once('connect', resolve)
})
socket.destroy()
break
} catch (err) {
tries--
if (tries <= 0) {
throw err
} else {
await new Promise(resolve => setTimeout(resolve, 1000))
}
}
} while (true)
return nbdServer
}
async function killNbdKit() {
return new Promise((resolve, reject) =>
exec('pkill -9 -f -o nbdkit', err => {
err ? reject(err) : resolve()
})
)
}
test('it works with unsecured network', async tap => {
const path = await createTempFile(FILE_SIZE)
let nbdServer = await spawnNbdKit(path)
const client = new NbdClient(
{
address: '127.0.0.1',
exportname: 'MY_SECRET_EXPORT',
cert: `-----BEGIN CERTIFICATE-----
MIIDazCCAlOgAwIBAgIUeHpQ0IeD6BmP2zgsv3LV3J4BI/EwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMzA1MTcxMzU1MzBaFw0yNDA1
MTYxMzU1MzBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQC/8wLopj/iZY6ijmpvgCJsl+zY0hQZQcIoaCs0H75u
8PPSzHedtOLURAkJeMmIS40UY/eIvHh7yZolevaSJLNT2Iolscvc2W9NCF4N1V6y
zs4pDzP+YPF7Q8ldNaQIX0bAk4PfaMSM+pLh67u+uI40732AfQqD01BNCTD/uHRB
lKnQuqQpe9UM9UzRRVejpu1r19D4dJruAm6y2SJVTeT4a1sSJixl6I1YPmt80FJh
gq9O2KRGbXp1xIjemWgW99MHg63pTgxEiULwdJOGgmqGRDzgZKJS5UUpxe/ViEO4
59I18vIkgibaRYhENgmnP3lIzTOLlUe07tbSML5RGBbBAgMBAAGjUzBRMB0GA1Ud
DgQWBBR/8+zYoL0H0LdWfULHg1LynFdSbzAfBgNVHSMEGDAWgBR/8+zYoL0H0LdW
fULHg1LynFdSbzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBD
OF5bTmbDEGoZ6OuQaI0vyya/T4FeaoWmh22gLeL6dEEmUVGJ1NyMTOvG9GiGJ8OM
QhD1uHJei45/bXOYIDGey2+LwLWye7T4vtRFhf8amYh0ReyP/NV4/JoR/U3pTSH6
tns7GZ4YWdwUhvOOlm17EQKVO/hP3t9mp74gcjdL4bCe5MYSheKuNACAakC1OR0U
ZakJMP9ijvQuq8spfCzrK+NbHKNHR9tEgQw+ax/t1Au4dGVtFbcoxqCrx2kTl0RP
CYu1Xn/FVPx1HoRgWc7E8wFhDcA/P3SJtfIQWHB9FzSaBflKGR4t8WCE2eE8+cTB
57ABhfYpMlZ4aHjuN1bL
-----END CERTIFICATE-----
`,
},
{
readAhead: 2,
}
)
await client.connect()
tap.equal(client.exportSize, BigInt(FILE_SIZE))
const CHUNK_SIZE = 1024 * 1024 // non default size
const indexes = []
for (let i = 0; i < FILE_SIZE / CHUNK_SIZE; i++) {
indexes.push(i)
}
const nbdIterator = client.readBlocks(function* () {
for (const index of indexes) {
yield { index, size: CHUNK_SIZE }
}
})
let i = 0
for await (const block of nbdIterator) {
let blockOk = true
let firstFail
for (let j = 0; j < CHUNK_SIZE; j += 4) {
const wanted = i * CHUNK_SIZE + j
const found = block.readUInt32BE(j)
blockOk = blockOk && found === wanted
if (!blockOk && firstFail === undefined) {
firstFail = j
}
}
tap.ok(blockOk, `check block ${i} content`)
i++
// flaky server is flaky
if (i % 7 === 0) {
// kill the older nbdkit process
await killNbdKit()
nbdServer = await spawnNbdKit(path)
}
}
// we can reuse the conneciton to read other blocks
// default iterator
const nbdIteratorWithDefaultBlockIterator = client.readBlocks()
let nb = 0
for await (const block of nbdIteratorWithDefaultBlockIterator) {
nb++
tap.equal(block.length, 2 * 1024 * 1024)
}
tap.equal(nb, 5)
assert.rejects(() => client.readBlock(100, CHUNK_SIZE))
await client.disconnect()
// double disconnection shouldn't pose any problem
await client.disconnect()
nbdServer.kill()
await fs.unlink(path)
})

View File

@@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDazCCAlOgAwIBAgIUeHpQ0IeD6BmP2zgsv3LV3J4BI/EwDQYJKoZIhvcNAQEL
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yMzA1MTcxMzU1MzBaFw0yNDA1
MTYxMzU1MzBaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
AQUAA4IBDwAwggEKAoIBAQC/8wLopj/iZY6ijmpvgCJsl+zY0hQZQcIoaCs0H75u
8PPSzHedtOLURAkJeMmIS40UY/eIvHh7yZolevaSJLNT2Iolscvc2W9NCF4N1V6y
zs4pDzP+YPF7Q8ldNaQIX0bAk4PfaMSM+pLh67u+uI40732AfQqD01BNCTD/uHRB
lKnQuqQpe9UM9UzRRVejpu1r19D4dJruAm6y2SJVTeT4a1sSJixl6I1YPmt80FJh
gq9O2KRGbXp1xIjemWgW99MHg63pTgxEiULwdJOGgmqGRDzgZKJS5UUpxe/ViEO4
59I18vIkgibaRYhENgmnP3lIzTOLlUe07tbSML5RGBbBAgMBAAGjUzBRMB0GA1Ud
DgQWBBR/8+zYoL0H0LdWfULHg1LynFdSbzAfBgNVHSMEGDAWgBR/8+zYoL0H0LdW
fULHg1LynFdSbzAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBD
OF5bTmbDEGoZ6OuQaI0vyya/T4FeaoWmh22gLeL6dEEmUVGJ1NyMTOvG9GiGJ8OM
QhD1uHJei45/bXOYIDGey2+LwLWye7T4vtRFhf8amYh0ReyP/NV4/JoR/U3pTSH6
tns7GZ4YWdwUhvOOlm17EQKVO/hP3t9mp74gcjdL4bCe5MYSheKuNACAakC1OR0U
ZakJMP9ijvQuq8spfCzrK+NbHKNHR9tEgQw+ax/t1Au4dGVtFbcoxqCrx2kTl0RP
CYu1Xn/FVPx1HoRgWc7E8wFhDcA/P3SJtfIQWHB9FzSaBflKGR4t8WCE2eE8+cTB
57ABhfYpMlZ4aHjuN1bL
-----END CERTIFICATE-----

View File

@@ -1,28 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC/8wLopj/iZY6i
jmpvgCJsl+zY0hQZQcIoaCs0H75u8PPSzHedtOLURAkJeMmIS40UY/eIvHh7yZol
evaSJLNT2Iolscvc2W9NCF4N1V6yzs4pDzP+YPF7Q8ldNaQIX0bAk4PfaMSM+pLh
67u+uI40732AfQqD01BNCTD/uHRBlKnQuqQpe9UM9UzRRVejpu1r19D4dJruAm6y
2SJVTeT4a1sSJixl6I1YPmt80FJhgq9O2KRGbXp1xIjemWgW99MHg63pTgxEiULw
dJOGgmqGRDzgZKJS5UUpxe/ViEO459I18vIkgibaRYhENgmnP3lIzTOLlUe07tbS
ML5RGBbBAgMBAAECggEATLYiafcTHfgnZmjTOad0WoDnC4n9tVBV948WARlUooLS
duL3RQRHCLz9/ZaTuFA1XDpNcYyc/B/IZoU7aJGZR3+JSmJBjowpUphu+klVNNG4
i6lDRrzYlUI0hfdLjHsDTDBIKi91KcB0lix/VkvsrVQvDHwsiR2ZAIiVWAWQFKrR
5O3DhSTHbqyq47uR58rWr4Zf3zvZaUl841AS1yELzCiZqz7AenvyWphim0c0XA5d
I63CEShntHnEAA9OMcP8+BNf/3AmqB4welY+m8elB3aJNH+j7DKq/AWqaM5nl2PC
cS6qgpxwOyTxEOyj1xhwK5ZMRR3heW3NfutIxSOPlwKBgQDB9ZkrBeeGVtCISO7C
eCANzSLpeVrahTvaCSQLdPHsLRLDUc+5mxdpi3CaRlzYs3S1OWdAtyWX9mBryltF
qDPhCNjFDyHok4D3wLEWdS9oUVwEKUM8fOPW3tXLLiMM7p4862Qo7LqnqHzPqsnz
22iZo5yjcc7aLJ+VmFrbAowwOwKBgQD9WNCvczTd7Ymn7zEvdiAyNoS0OZ0orwEJ
zGaxtjqVguGklNfrb/UB+eKNGE80+YnMiSaFc9IQPetLntZdV0L7kWYdCI8kGDNA
DbVRCOp+z8DwAojlrb/zsYu23anQozT3WeHxVU66lNuyEQvSW2tJa8gN1htrD7uY
5KLibYrBMwKBgEM0iiHyJcrSgeb2/mO7o7+keJhVSDm3OInP6QFfQAQJihrLWiKB
rpcPjbCm+LzNUX8JqNEvpIMHB1nR/9Ye9frfSdzd5W3kzicKSVHywL5wkmWOtpFa
5Mcq5wFDtzlf5MxO86GKhRJauwRptRgdyhySKFApuva1x4XaCIEiXNjJAoGBAN82
t3c+HCBEv3o05rMYcrmLC1T3Rh6oQlPtwbVmByvfywsFEVCgrc/16MPD3VWhXuXV
GRmPuE8THxLbead30M5xhvShq+xzXgRbj5s8Lc9ZIHbW5OLoOS1vCtgtaQcoJOyi
Rs4pCVqe+QpktnO6lEZ2Libys+maTQEiwNibBxu9AoGAUG1V5aKMoXa7pmGeuFR6
ES+1NDiCt6yDq9BsLZ+e2uqvWTkvTGLLwvH6xf9a0pnnILd0AUTKAAaoUdZS6++E
cGob7fxMwEE+UETp0QBgLtfjtExMOFwr2avw8PV4CYEUkPUAm2OFB2Twh+d/PNfr
FAxF1rN47SBPNbFI8N4TFsg=
-----END PRIVATE KEY-----

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 reedog117
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,127 +0,0 @@
forked from https://github.com/reedog117/node-vsphere-soap
# node-vsphere-soap
[![Join the chat at https://gitter.im/reedog117/node-vsphere-soap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/reedog117/node-vsphere-soap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
This is a Node.js module to connect to VMware vCenter servers and/or ESXi hosts and perform operations using the [vSphere Web Services API]. If you're feeling really adventurous, you can use this module to port vSphere operations from other languages (such as the Perl, Python, and Go libraries that exist) and have fully native Node.js code controlling your VMware virtual infrastructure!
This is very much in alpha.
## Authors
- Patrick C - [@reedog117]
## Version
0.0.2-5
## Installation
```sh
$ npm install node-vsphere-soap --save
```
## Sample Code
### To connect to a vCenter server:
var nvs = require('node-vsphere-soap');
var vc = new nvs.Client(host, user, password, sslVerify);
vc.once('ready', function() {
// perform work here
});
vc.once('error', function(err) {
// handle error here
});
#### Arguments
- host = hostname or IP of vCenter/ESX/ESXi server
- user = username
- password = password
- sslVerify = true|false - set to false if you have self-signed/unverified certificates
#### Events
- ready = emits when session authenticated with server
- error = emits when there's an error
- _err_ contains the error
#### Client instance variables
- serviceContent - ServiceContent object retrieved by RetrieveServiceContent API call
- userName - username of authenticated user
- fullName - full name of authenticated user
### To run a command:
var vcCmd = vc.runCommand( commandToRun, arguments );
vcCmd.once('result', function( result, raw, soapHeader) {
// handle results
});
vcCmd.once('error', function( err) {
// handle errors
});
#### Arguments
- commandToRun = Method from the vSphere API
- arguments = JSON document containing arguments to send
#### Events
- result = emits when session authenticated with server
- _result_ contains the JSON-formatted result from the server
- _raw_ contains the raw SOAP XML response from the server
- _soapHeader_ contains any soapHeaders from the server
- error = emits when there's an error
- _err_ contains the error
Make sure you check out tests/vsphere-soap.test.js for examples on how to create commands to run
## Development
node-vsphere-soap uses a number of open source projects to work properly:
- [node.js] - evented I/O for the backend
- [node-soap] - SOAP client for Node.js
- [soap-cookie] - cookie authentication for the node-soap module
- [lodash] - for quickly manipulating JSON
- [lab] - testing engine
- [code] - assertion engine used with lab
Want to contribute? Great!
### Todo's
- Write More Tests
- Create Travis CI test harness with a fake vCenter Instance
- Add Code Comments
### Testing
I have been testing on a Mac with node v0.10.36 and both ESXi and vCenter 5.5.
To edit tests, edit the file **test/vsphere-soap.test.js**
To point the module at your own vCenter/ESXi host, edit **config-test.stub.js** and save it as **config-test.js**
To run test scripts:
```sh
$ npm test
```
## License
MIT
[vSphere Web Services API]: http://pubs.vmware.com/vsphere-55/topic/com.vmware.wssdk.apiref.doc/right-pane.html
[node-soap]: https://github.com/vpulim/node-soap
[node.js]: http://nodejs.org/
[soap-cookie]: https://github.com/shanestillwell/soap-cookie
[code]: https://github.com/hapijs/code
[lab]: https://github.com/hapijs/lab
[lodash]: https://lodash.com/
[@reedog117]: http://www.twitter.com/reedog117

View File

@@ -1,231 +0,0 @@
'use strict'
/*
node-vsphere-soap
client.js
This file creates the Client class
- when the class is instantiated, a connection will be made to the ESXi/vCenter server to verify that the creds are good
- upon a bad login, the connnection will be terminated
*/
const EventEmitter = require('events').EventEmitter
const axios = require('axios')
const https = require('node:https')
const util = require('util')
const soap = require('soap')
const Cookie = require('soap-cookie') // required for session persistence
// Client class
// inherits from EventEmitter
// possible events: connect, error, ready
function Client(vCenterHostname, username, password, sslVerify) {
this.status = 'disconnected'
this.reconnectCount = 0
sslVerify = typeof sslVerify !== 'undefined' ? sslVerify : false
EventEmitter.call(this)
// sslVerify argument handling
if (sslVerify) {
this.clientopts = {}
} else {
this.clientopts = {
request: axios.create({
httpsAgent: new https.Agent({
rejectUnauthorized: false,
}),
}),
}
}
this.connectionInfo = {
host: vCenterHostname,
user: username,
password,
sslVerify,
}
this._loginArgs = {
userName: this.connectionInfo.user,
password: this.connectionInfo.password,
}
this._vcUrl = 'https://' + this.connectionInfo.host + '/sdk/vimService.wsdl'
// connect to the vCenter / ESXi host
this.on('connect', this._connect)
this.emit('connect')
// close session
this.on('close', this._close)
return this
}
util.inherits(Client, EventEmitter)
Client.prototype.runCommand = function (command, args) {
const self = this
let cmdargs
if (!args || args === null) {
cmdargs = {}
} else {
cmdargs = args
}
const emitter = new EventEmitter()
// check if client has successfully connected
if (self.status === 'ready' || self.status === 'connecting') {
self.client.VimService.VimPort[command](cmdargs, function (err, result, raw, soapHeader) {
if (err) {
_soapErrorHandler(self, emitter, command, cmdargs, err)
}
if (command === 'Logout') {
self.status = 'disconnected'
process.removeAllListeners('beforeExit')
}
emitter.emit('result', result, raw, soapHeader)
})
} else {
// if connection not ready or connecting, reconnect to instance
if (self.status === 'disconnected') {
self.emit('connect')
}
self.once('ready', function () {
self.client.VimService.VimPort[command](cmdargs, function (err, result, raw, soapHeader) {
if (err) {
_soapErrorHandler(self, emitter, command, cmdargs, err)
}
if (command === 'Logout') {
self.status = 'disconnected'
process.removeAllListeners('beforeExit')
}
emitter.emit('result', result, raw, soapHeader)
})
})
}
return emitter
}
Client.prototype.close = function () {
const self = this
self.emit('close')
}
Client.prototype._connect = function () {
const self = this
if (self.status !== 'disconnected') {
return
}
self.status = 'connecting'
soap.createClient(
self._vcUrl,
self.clientopts,
function (err, client) {
if (err) {
self.emit('error', err)
throw err
}
self.client = client // save client for later use
self
.runCommand('RetrieveServiceContent', { _this: 'ServiceInstance' })
.once('result', function (result, raw, soapHeader) {
if (!result.returnval) {
self.status = 'disconnected'
self.emit('error', raw)
return
}
self.serviceContent = result.returnval
self.sessionManager = result.returnval.sessionManager
const loginArgs = { _this: self.sessionManager, ...self._loginArgs }
self
.runCommand('Login', loginArgs)
.once('result', function (result, raw, soapHeader) {
self.authCookie = new Cookie(client.lastResponseHeaders)
self.client.setSecurity(self.authCookie) // needed since vSphere SOAP WS uses cookies
self.userName = result.returnval.userName
self.fullName = result.returnval.fullName
self.reconnectCount = 0
self.status = 'ready'
self.emit('ready')
process.once('beforeExit', self._close)
})
.once('error', function (err) {
self.status = 'disconnected'
self.emit('error', err)
})
})
.once('error', function (err) {
self.status = 'disconnected'
self.emit('error', err)
})
},
self._vcUrl
)
}
Client.prototype._close = function () {
const self = this
if (self.status === 'ready') {
self
.runCommand('Logout', { _this: self.sessionManager })
.once('result', function () {
self.status = 'disconnected'
})
.once('error', function () {
/* don't care of error during disconnection */
self.status = 'disconnected'
})
} else {
self.status = 'disconnected'
}
}
function _soapErrorHandler(self, emitter, command, args, err) {
err = err || { body: 'general error' }
if (err.body.match(/session is not authenticated/)) {
self.status = 'disconnected'
process.removeAllListeners('beforeExit')
if (self.reconnectCount < 10) {
self.reconnectCount += 1
self
.runCommand(command, args)
.once('result', function (result, raw, soapHeader) {
emitter.emit('result', result, raw, soapHeader)
})
.once('error', function (err) {
emitter.emit('error', err.body)
throw err
})
} else {
emitter.emit('error', err.body)
throw err
}
} else {
emitter.emit('error', err.body)
throw err
}
}
// end
exports.Client = Client

View File

@@ -1,38 +0,0 @@
{
"name": "@vates/node-vsphere-soap",
"version": "1.0.0",
"description": "interface to vSphere SOAP/WSDL from node for interfacing with vCenter or ESXi, forked from node-vsphere-soap",
"main": "lib/client.js",
"author": "reedog117",
"repository": {
"directory": "@vates/node-vsphere-soap",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"axios": "^1.4.0",
"soap": "^1.0.0",
"soap-cookie": "^0.10.1"
},
"devDependencies": {
"test": "^3.3.0"
},
"keywords": [
"vsphere",
"vcenter",
"api",
"soap",
"wsdl"
],
"preferGlobal": false,
"license": "MIT",
"private": false,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/node-vsphere-soap",
"engines": {
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
}
}

View File

@@ -1,15 +0,0 @@
'use strict'
// place your own credentials here for a vCenter or ESXi server
// this information will be used for connecting to a vCenter instance
// for module testing
// name the file config-test.js
const vCenterTestCreds = {
vCenterIP: 'vcsa',
vCenterUser: 'vcuser',
vCenterPassword: 'vcpw',
vCenter: true,
}
exports.vCenterTestCreds = vCenterTestCreds

View File

@@ -1,140 +0,0 @@
'use strict'
/*
vsphere-soap.test.js
tests for the vCenterConnectionInstance class
*/
const assert = require('assert')
const { describe, it } = require('test')
const vc = require('../lib/client')
// eslint-disable-next-line n/no-missing-require
const TestCreds = require('../config-test.js').vCenterTestCreds
const VItest = new vc.Client(TestCreds.vCenterIP, TestCreds.vCenterUser, TestCreds.vCenterPassword, false)
describe('Client object initialization:', function () {
it('provides a successful login', { timeout: 5000 }, function (t, done) {
VItest.once('ready', function () {
assert.notEqual(VItest.userName, null)
assert.notEqual(VItest.fullName, null)
assert.notEqual(VItest.serviceContent, null)
done()
}).once('error', function (err) {
console.error(err)
// this should fail if there's a problem
assert.notEqual(VItest.userName, null)
assert.notEqual(VItest.fullName, null)
assert.notEqual(VItest.serviceContent, null)
done()
})
})
})
describe('Client reconnection test:', function () {
it('can successfully reconnect', { timeout: 5000 }, function (t, done) {
VItest.runCommand('Logout', { _this: VItest.serviceContent.sessionManager })
.once('result', function (result) {
// now we're logged out, so let's try running a command to test automatic re-login
VItest.runCommand('CurrentTime', { _this: 'ServiceInstance' })
.once('result', function (result) {
assert(result.returnval instanceof Date)
done()
})
.once('error', function (err) {
console.error(err)
})
})
.once('error', function (err) {
console.error(err)
})
})
})
// these tests don't work yet
describe('Client tests - query commands:', function () {
it('retrieves current time', { timeout: 5000 }, function (t, done) {
VItest.runCommand('CurrentTime', { _this: 'ServiceInstance' }).once('result', function (result) {
assert(result.returnval instanceof Date)
done()
})
})
it('retrieves current time 2 (check for event clobbering)', { timeout: 5000 }, function (t, done) {
VItest.runCommand('CurrentTime', { _this: 'ServiceInstance' }).once('result', function (result) {
assert(result.returnval instanceof Date)
done()
})
})
it('can obtain the names of all Virtual Machines in the inventory', { timeout: 20000 }, function (t, done) {
// get property collector
const propertyCollector = VItest.serviceContent.propertyCollector
// get view manager
const viewManager = VItest.serviceContent.viewManager
// get root folder
const rootFolder = VItest.serviceContent.rootFolder
let containerView, objectSpec, traversalSpec, propertySpec, propertyFilterSpec
// this is the equivalent to
VItest.runCommand('CreateContainerView', {
_this: viewManager,
container: rootFolder,
type: ['VirtualMachine'],
recursive: true,
}).once('result', function (result) {
// build all the data structures needed to query all the vm names
containerView = result.returnval
objectSpec = {
attributes: { 'xsi:type': 'ObjectSpec' }, // setting attributes xsi:type is important or else the server may mis-recognize types!
obj: containerView,
skip: true,
}
traversalSpec = {
attributes: { 'xsi:type': 'TraversalSpec' },
name: 'traverseEntities',
type: 'ContainerView',
path: 'view',
skip: false,
}
objectSpec = { ...objectSpec, selectSet: [traversalSpec] }
propertySpec = {
attributes: { 'xsi:type': 'PropertySpec' },
type: 'VirtualMachine',
pathSet: ['name'],
}
propertyFilterSpec = {
attributes: { 'xsi:type': 'PropertyFilterSpec' },
propSet: [propertySpec],
objectSet: [objectSpec],
}
// TODO: research why it fails if propSet is declared after objectSet
VItest.runCommand('RetrievePropertiesEx', {
_this: propertyCollector,
specSet: [propertyFilterSpec],
options: { attributes: { type: 'RetrieveOptions' } },
})
.once('result', function (result, raw) {
assert.notEqual(result.returnval.objects, null)
if (Array.isArray(result.returnval.objects)) {
assert.strictEqual(result.returnval.objects[0].obj.attributes.type, 'VirtualMachine')
} else {
assert.strictEqual(result.returnval.objects.obj.attributes.type, 'VirtualMachine')
}
done()
})
.once('error', function (err) {
console.error('\n\nlast request : ' + VItest.client.lastRequest, err)
})
})
})
})

View File

@@ -2,8 +2,10 @@
import { Task } from '@vates/task'
const task = new Task({
// this object will be sent in the *start* event
properties: {
// data in this object will be sent along the *start* event
//
// property names should be chosen as not to clash with properties used by `Task` or `combineEvents`
data: {
name: 'my task',
},
@@ -14,15 +16,13 @@ const task = new Task({
// this function is called each time this task or one of it's subtasks change state
const { id, timestamp, type } = event
if (type === 'start') {
const { name, parentId, properties } = event
const { name, parentId } = event
} else if (type === 'end') {
const { result, status } = event
} else if (type === 'info' || type === 'warning') {
const { data, message } = event
} else if (type === 'property') {
const { name, value } = event
} else if (type === 'abortionRequested') {
const { reason } = event
}
},
})
@@ -36,6 +36,7 @@ task.id
// - pending
// - success
// - failure
// - aborted
task.status
// Triggers the abort signal associated to the task.
@@ -88,30 +89,6 @@ const onProgress = makeOnProgress({
onRootTaskStart(taskLog) {
// `taskLog` is an object reflecting the state of this task and all its subtasks,
// and will be mutated in real-time to reflect the changes of the task.
// timestamp at which the task started
taskLog.start
// current status of the task as described in the previous section
taskLog.status
// undefined or a dictionary of properties attached to the task
taskLog.properties
// timestamp at which the abortion was requested, undefined otherwise
taskLog.abortionRequestedAt
// undefined or an array of infos emitted on the task
taskLog.infos
// undefined or an array of warnings emitted on the task
taskLog.warnings
// timestamp at which the task ended, undefined otherwise
taskLog.end
// undefined or the result value of the task
taskLog.result
},
// This function is called each time a root task ends.

View File

@@ -18,8 +18,10 @@ npm install --save @vates/task
import { Task } from '@vates/task'
const task = new Task({
// this object will be sent in the *start* event
properties: {
// data in this object will be sent along the *start* event
//
// property names should be chosen as not to clash with properties used by `Task` or `combineEvents`
data: {
name: 'my task',
},
@@ -30,15 +32,13 @@ const task = new Task({
// this function is called each time this task or one of it's subtasks change state
const { id, timestamp, type } = event
if (type === 'start') {
const { name, parentId, properties } = event
const { name, parentId } = event
} else if (type === 'end') {
const { result, status } = event
} else if (type === 'info' || type === 'warning') {
const { data, message } = event
} else if (type === 'property') {
const { name, value } = event
} else if (type === 'abortionRequested') {
const { reason } = event
}
},
})
@@ -52,6 +52,7 @@ task.id
// - pending
// - success
// - failure
// - aborted
task.status
// Triggers the abort signal associated to the task.
@@ -104,30 +105,6 @@ const onProgress = makeOnProgress({
onRootTaskStart(taskLog) {
// `taskLog` is an object reflecting the state of this task and all its subtasks,
// and will be mutated in real-time to reflect the changes of the task.
// timestamp at which the task started
taskLog.start
// current status of the task as described in the previous section
taskLog.status
// undefined or a dictionnary of properties attached to the task
taskLog.properties
// timestamp at which the abortion was requested, undefined otherwise
taskLog.abortionRequestedAt
// undefined or an array of infos emitted on the task
taskLog.infos
// undefined or an array of warnings emitted on the task
taskLog.warnings
// timestamp at which the task ended, undefined otherwise
taskLog.end
// undefined or the result value of the task
taskLog.result
},
// This function is called each time a root task ends.

View File

@@ -4,18 +4,36 @@ const assert = require('node:assert').strict
const noop = Function.prototype
function omit(source, keys, target = { __proto__: null }) {
for (const key of Object.keys(source)) {
if (!keys.has(key)) {
target[key] = source[key]
}
}
return target
}
const IGNORED_START_PROPS = new Set([
'end',
'infos',
'properties',
'result',
'status',
'tasks',
'timestamp',
'type',
'warnings',
])
exports.makeOnProgress = function ({ onRootTaskEnd = noop, onRootTaskStart = noop, onTaskUpdate = noop }) {
const taskLogs = new Map()
return function onProgress(event) {
const { id, type } = event
let taskLog
if (type === 'start') {
taskLog = {
id,
properties: { __proto__: null, ...event.properties },
start: event.timestamp,
status: 'pending',
}
taskLog = omit(event, IGNORED_START_PROPS)
taskLog.start = event.timestamp
taskLog.status = 'pending'
taskLogs.set(id, taskLog)
const { parentId } = event
@@ -47,8 +65,6 @@ exports.makeOnProgress = function ({ onRootTaskEnd = noop, onRootTaskStart = noo
taskLog.end = event.timestamp
taskLog.result = event.result
taskLog.status = event.status
} else if (type === 'abortionRequested') {
taskLog.abortionRequestedAt = event.timestamp
}
if (type === 'end' && taskLog.$root === taskLog) {

View File

@@ -11,7 +11,7 @@ describe('makeOnProgress()', function () {
const events = []
let log
const task = new Task({
properties: { name: 'task' },
data: { name: 'task' },
onProgress: makeOnProgress({
onRootTaskStart(log_) {
assert.equal(log, undefined)
@@ -32,50 +32,36 @@ describe('makeOnProgress()', function () {
assert.equal(events.length, 0)
let i = 0
await task.run(async () => {
assert.equal(events[i++], 'onRootTaskStart')
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(log.id, task.id)
assert.equal(log.properties.name, 'task')
assert(Math.abs(log.start - Date.now()) < 10)
Task.set('name', 'new name')
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(log.properties.name, 'new name')
assert.equal(events[0], 'onRootTaskStart')
assert.equal(events[1], 'onTaskUpdate')
assert.equal(log.name, 'task')
Task.set('progress', 0)
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(events[2], 'onTaskUpdate')
assert.equal(log.properties.progress, 0)
Task.info('foo', {})
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(events[3], 'onTaskUpdate')
assert.deepEqual(log.infos, [{ data: {}, message: 'foo' }])
const subtask = new Task({ properties: { name: 'subtask' } })
await subtask.run(() => {
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(log.tasks[0].properties.name, 'subtask')
await Task.run({ data: { name: 'subtask' } }, () => {
assert.equal(events[4], 'onTaskUpdate')
assert.equal(log.tasks[0].name, 'subtask')
Task.warning('bar', {})
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(events[5], 'onTaskUpdate')
assert.deepEqual(log.tasks[0].warnings, [{ data: {}, message: 'bar' }])
subtask.abort()
assert.equal(events[i++], 'onTaskUpdate')
assert(Math.abs(log.tasks[0].abortionRequestedAt - Date.now()) < 10)
})
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(events[6], 'onTaskUpdate')
assert.equal(log.tasks[0].status, 'success')
Task.set('progress', 100)
assert.equal(events[i++], 'onTaskUpdate')
assert.equal(events[7], 'onTaskUpdate')
assert.equal(log.properties.progress, 100)
})
assert.equal(events[i++], 'onRootTaskEnd')
assert.equal(events[i++], 'onTaskUpdate')
assert(Math.abs(log.end - Date.now()) < 10)
assert.equal(events[8], 'onRootTaskEnd')
assert.equal(events[9], 'onTaskUpdate')
assert.equal(log.status, 'success')
})
})

View File

@@ -10,10 +10,11 @@ function define(object, property, value) {
const noop = Function.prototype
const ABORTED = 'aborted'
const FAILURE = 'failure'
const PENDING = 'pending'
const SUCCESS = 'success'
exports.STATUS = { FAILURE, PENDING, SUCCESS }
exports.STATUS = { ABORTED, FAILURE, PENDING, SUCCESS }
// stored in the global context so that various versions of the library can interact.
const asyncStorageKey = '@vates/task@0'
@@ -82,8 +83,8 @@ exports.Task = class Task {
return this.#status
}
constructor({ properties, onProgress } = {}) {
this.#startData = { properties }
constructor({ data = {}, onProgress } = {}) {
this.#startData = data
if (onProgress !== undefined) {
this.#onProgress = onProgress
@@ -104,16 +105,12 @@ exports.Task = class Task {
const { signal } = this.#abortController
signal.addEventListener('abort', () => {
if (this.status === PENDING) {
if (this.status === PENDING && !this.#running) {
this.#maybeStart()
this.#emit('abortionRequested', { reason: signal.reason })
if (!this.#running) {
const status = FAILURE
this.#status = status
this.#emit('end', { result: signal.reason, status })
}
const status = ABORTED
this.#status = status
this.#emit('end', { result: signal.reason, status })
}
})
}
@@ -159,7 +156,9 @@ exports.Task = class Task {
this.#running = false
return result
} catch (result) {
const status = FAILURE
const { signal } = this.#abortController
const aborted = signal.aborted && result === signal.reason
const status = aborted ? ABORTED : FAILURE
this.#status = status
this.#emit('end', { status, result })

View File

@@ -15,7 +15,7 @@ function assertEvent(task, expected, eventIndex = -1) {
assert.equal(typeof actual.id, 'string')
assert.equal(typeof actual.timestamp, 'number')
for (const keys of Object.keys(expected)) {
assert.deepEqual(actual[keys], expected[keys])
assert.equal(actual[keys], expected[keys])
}
}
@@ -30,10 +30,10 @@ function createTask(opts) {
describe('Task', function () {
describe('contructor', function () {
it('data properties are passed to the start event', async function () {
const properties = { foo: 0, bar: 1 }
const task = createTask({ properties })
const data = { foo: 0, bar: 1 }
const task = createTask({ data })
await task.run(noop)
assertEvent(task, { type: 'start', properties }, 0)
assertEvent(task, { ...data, type: 'start' }, 0)
})
})
@@ -79,22 +79,20 @@ describe('Task', function () {
})
.catch(noop)
assert.equal(task.status, 'failure')
assert.equal(task.status, 'aborted')
assert.equal(task.$events.length, 3)
assert.equal(task.$events.length, 2)
assertEvent(task, { type: 'start' }, 0)
assertEvent(task, { type: 'abortionRequested', reason }, 1)
assertEvent(task, { type: 'end', status: 'failure', result: reason }, 2)
assertEvent(task, { type: 'end', status: 'aborted', result: reason }, 1)
})
it('does not abort if the task fails without the abort reason', async function () {
const task = createTask()
const reason = {}
const result = new Error()
await task
.run(() => {
task.abort(reason)
task.abort({})
throw result
})
@@ -102,20 +100,18 @@ describe('Task', function () {
assert.equal(task.status, 'failure')
assert.equal(task.$events.length, 3)
assert.equal(task.$events.length, 2)
assertEvent(task, { type: 'start' }, 0)
assertEvent(task, { type: 'abortionRequested', reason }, 1)
assertEvent(task, { type: 'end', status: 'failure', result }, 2)
assertEvent(task, { type: 'end', status: 'failure', result }, 1)
})
it('does not abort if the task succeed', async function () {
const task = createTask()
const reason = {}
const result = {}
await task
.run(() => {
task.abort(reason)
task.abort({})
return result
})
@@ -123,10 +119,9 @@ describe('Task', function () {
assert.equal(task.status, 'success')
assert.equal(task.$events.length, 3)
assert.equal(task.$events.length, 2)
assertEvent(task, { type: 'start' }, 0)
assertEvent(task, { type: 'abortionRequested', reason }, 1)
assertEvent(task, { type: 'end', status: 'success', result }, 2)
assertEvent(task, { type: 'end', status: 'success', result }, 1)
})
it('aborts before task is running', function () {
@@ -135,12 +130,11 @@ describe('Task', function () {
task.abort(reason)
assert.equal(task.status, 'failure')
assert.equal(task.status, 'aborted')
assert.equal(task.$events.length, 3)
assert.equal(task.$events.length, 2)
assertEvent(task, { type: 'start' }, 0)
assertEvent(task, { type: 'abortionRequested', reason }, 1)
assertEvent(task, { type: 'end', status: 'failure', result: reason }, 2)
assertEvent(task, { type: 'end', status: 'aborted', result: reason }, 1)
})
})
@@ -249,7 +243,7 @@ describe('Task', function () {
assert.equal(task.status, 'failure')
})
it('changes to failure if aborted after run is complete', async function () {
it('changes to aborted after run is complete', async function () {
const task = createTask()
await task
.run(() => {
@@ -258,13 +252,13 @@ describe('Task', function () {
Task.abortSignal.throwIfAborted()
})
.catch(noop)
assert.equal(task.status, 'failure')
assert.equal(task.status, 'aborted')
})
it('changes to failure if aborted when not running', function () {
it('changes to aborted if aborted when not running', async function () {
const task = createTask()
task.abort()
assert.equal(task.status, 'failure')
assert.equal(task.status, 'aborted')
})
})

View File

@@ -13,7 +13,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "0.2.0",
"version": "0.1.2",
"engines": {
"node": ">=14"
},

View File

@@ -7,8 +7,8 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.39.0",
"@xen-orchestra/fs": "^4.0.1",
"@xen-orchestra/backups": "^0.36.0",
"@xen-orchestra/fs": "^3.3.4",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "1.0.9",
"version": "1.0.6",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -1,19 +0,0 @@
'use strict'
const { Metadata } = require('./_runners/Metadata.js')
const { VmsRemote } = require('./_runners/VmsRemote.js')
const { VmsXapi } = require('./_runners/VmsXapi.js')
exports.createRunner = function createRunner(opts) {
const { type } = opts.job
switch (type) {
case 'backup':
return new VmsXapi(opts)
case 'mirrorBackup':
return new VmsRemote(opts)
case 'metadataBackup':
return new Metadata(opts)
default:
throw new Error(`No runner for the backup type ${type}`)
}
}

View File

@@ -2,10 +2,10 @@
const { asyncMap } = require('@xen-orchestra/async-map')
const { DIR_XO_POOL_METADATA_BACKUPS } = require('../RemoteAdapter.js')
const { forkStreamUnpipe } = require('./_forkStreamUnpipe.js')
const { formatFilenameDate } = require('../_filenameDate.js')
const { Task } = require('../Task.js')
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
const { formatFilenameDate } = require('./_filenameDate.js')
const { Task } = require('./Task.js')
const { forkStreamUnpipe } = require('./_backupJob/forkStreamUnpipe.js')
const PATH_DB_DUMP = '/pool/xmldbdump'
exports.PATH_DB_DUMP = PATH_DB_DUMP

View File

@@ -252,7 +252,7 @@ class RemoteAdapter {
)
}
async deleteDeltaVmBackups(backups) {
async deleteIncrementalVmBackups(backups) {
const handler = this._handler
// this will delete the json, unused VHDs will be detected by `cleanVm`
@@ -304,7 +304,7 @@ class RemoteAdapter {
}
await Promise.all([
delta !== undefined && this.deleteDeltaVmBackups(delta),
delta !== undefined && this.deleteIncrementalVmBackups(delta),
full !== undefined && this.deleteFullVmBackups(full),
])
@@ -333,7 +333,7 @@ class RemoteAdapter {
const RE_VHDI = /^vhdi(\d+)$/
const handler = this._handler
const diskPath = handler.getFilePath('/' + diskId)
const diskPath = handler._getFilePath('/' + diskId)
const mountDir = yield getTmpDir()
await fromCallback(execFile, 'vhdimount', [diskPath, mountDir])
try {
@@ -404,27 +404,20 @@ class RemoteAdapter {
return `${baseName}.vhd`
}
async listAllVms() {
async listAllVmBackups() {
const handler = this._handler
const vmsUuids = []
await asyncEach(await handler.list(BACKUP_DIR), async entry => {
const backups = { __proto__: null }
await asyncMap(await handler.list(BACKUP_DIR), async entry => {
// ignore hidden and lock files
if (entry[0] !== '.' && !entry.endsWith('.lock')) {
vmsUuids.push(entry)
const vmBackups = await this.listVmBackups(entry)
if (vmBackups.length !== 0) {
backups[entry] = vmBackups
}
}
})
return vmsUuids
}
async listAllVmBackups() {
const vmsUuids = await this.listAllVms()
const backups = { __proto__: null }
await asyncEach(vmsUuids, async vmUuid => {
const vmBackups = await this.listVmBackups(vmUuid)
if (vmBackups.length !== 0) {
backups[vmUuid] = vmBackups
}
})
return backups
}
@@ -698,8 +691,8 @@ class RemoteAdapter {
}
// open the hierarchy of ancestors until we find a full one
async _createVhdStream(handler, path, { useChain }) {
const disposableSynthetic = useChain ? await VhdSynthetic.fromVhdChain(handler, path) : await openVhd(handler, path)
async _createSyntheticStream(handler, path) {
const disposableSynthetic = await VhdSynthetic.fromVhdChain(handler, path)
// I don't want the vhds to be disposed on return
// but only when the stream is done ( or failed )
@@ -724,7 +717,7 @@ class RemoteAdapter {
return stream
}
async readIncrementalVmBackup(metadata, ignoredVdis, { useChain = true } = {}) {
async readIncrementalVmBackup(metadata, ignoredVdis, { useSynthetic = true } = {}) {
const handler = this._handler
const { vbds, vhds, vifs, vm, vmSnapshot } = metadata
const dir = dirname(metadata._filename)
@@ -732,7 +725,9 @@ class RemoteAdapter {
const streams = {}
await asyncMapSettled(Object.keys(vdis), async ref => {
streams[`${ref}.vhd`] = await this._createVhdStream(handler, join(dir, vhds[ref]), { useChain })
streams[`${ref}.vhd`] = useSynthetic
? await this._createSyntheticStream(handler, join(dir, vhds[ref]))
: await this._handler.createReadStream(join(dir, vhds[ref]))
})
return {

View File

@@ -1,9 +1,7 @@
'use strict'
const { join, resolve } = require('node:path/posix')
const { DIR_XO_POOL_METADATA_BACKUPS } = require('./RemoteAdapter.js')
const { PATH_DB_DUMP } = require('./_runners/_PoolMetadataBackup.js')
const { PATH_DB_DUMP } = require('./PoolMetadataBackup.js')
exports.RestoreMetadataBackup = class RestoreMetadataBackup {
constructor({ backupId, handler, xapi }) {
@@ -22,8 +20,7 @@ exports.RestoreMetadataBackup = class RestoreMetadataBackup {
task: xapi.task_create('Import pool metadata'),
})
} else {
const metadata = JSON.parse(await handler.readFile(join(backupId, 'metadata.json')))
return String(await handler.readFile(resolve(backupId, metadata.data ?? 'data.json')))
return String(await handler.readFile(`${backupId}/data.json`))
}
}
}

View File

@@ -3,15 +3,15 @@
const Disposable = require('promise-toolbox/Disposable')
const pTimeout = require('promise-toolbox/timeout')
const { compileTemplate } = require('@xen-orchestra/template')
const { runTask } = require('./_runTask.js')
const { RemoteTimeoutError } = require('./_RemoteTimeoutError.js')
const { RemoteTimeoutError } = require('./RemoteTimeoutError.js')
const { runTask } = require('./runTask.js')
exports.DEFAULT_SETTINGS = {
getRemoteTimeout: 300e3,
reportWhen: 'failure',
}
exports.Abstract = class AbstractRunner {
exports.AbstractBackupJob = class AbstractBackupJob {
constructor({ config, getAdapter, getConnectedRecord, job, schedule }) {
this._config = config
this._getRecord = getConnectedRecord

View File

@@ -5,18 +5,18 @@ const Disposable = require('promise-toolbox/Disposable')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { PoolMetadataBackup } = require('./_PoolMetadataBackup.js')
const { XoMetadataBackup } = require('./_XoMetadataBackup.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { PoolMetadataBackup } = require('../PoolMetadataBackup.js')
const { XoMetadataBackup } = require('./XoMetadataBackup.js')
const { DEFAULT_SETTINGS, AbstractBackupJob } = require('./AbstractBackupJob.js')
const { runTask } = require('./runTask.js')
const { getAdaptersByRemote } = require('./getAdapterByRemote.js')
const DEFAULT_METADATA_SETTINGS = {
retentionPoolMetadata: 0,
retentionXoMetadata: 0,
}
exports.Metadata = class MetadataBackupRunner extends Abstract {
exports.MetadatasBackupJob = class MetadatasBackupJob extends AbstractBackupJob {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_METADATA_SETTINGS, config.defaultSettings, config.metadata?.defaultSettings)

View File

@@ -1,8 +1,7 @@
'use strict'
class RemoteTimeoutError extends Error {
exports.RemoteTimeoutError = class RemoteTimeoutError extends Error {
constructor(remoteId) {
super('timeout while getting the remote ' + remoteId)
this.remoteId = remoteId
}
}
exports.RemoteTimeoutError = RemoteTimeoutError

View File

@@ -1,23 +1,24 @@
'use strict'
const { asyncMapSettled } = require('@xen-orchestra/async-map')
const { asyncMapSettled, asyncMap } = require('@xen-orchestra/async-map')
const Disposable = require('promise-toolbox/Disposable')
const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { Task } = require('../Task.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { FullRemote } = require('./_vmRunners/FullRemote.js')
const { IncrementalRemote } = require('./_vmRunners/IncrementalRemote.js')
const createStreamThrottle = require('./createStreamThrottle.js')
const { DEFAULT_SETTINGS, AbstractBackupJob } = require('./AbstractBackupJob.js')
const { runTask } = require('./runTask.js')
const { getAdaptersByRemote } = require('./getAdapterByRemote.js')
const { IncrementalRemoteVmBackup } = require('./VmBackup/IncrementalRemoteVmBackup.js')
const { FullRemoteVmBackup } = require('./VmBackup/FullRemoteVmBackup.js')
const DEFAULT_REMOTE_VM_SETTINGS = {
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
exportRetention: 0,
fullInterval: 0,
healthCheckSr: undefined,
healthCheckVmsWithTags: [],
maxExportRate: 0,
@@ -27,7 +28,7 @@ const DEFAULT_REMOTE_VM_SETTINGS = {
vmTimeout: 0,
}
exports.VmsRemote = class RemoteVmsBackupRunner extends Abstract {
exports.RemoteVmBackupJob = class RemoteVmBackupJob extends AbstractBackupJob {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_REMOTE_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)
@@ -56,7 +57,13 @@ exports.VmsRemote = class RemoteVmsBackupRunner extends Abstract {
return
}
const vmsUuids = await sourceRemoteAdapter.listAllVms()
const vmsUuids = []
await asyncMap(await sourceRemoteAdapter._handler.list('xo-vm-backups'), async entry => {
// ignore hidden and lock files
if (entry[0] !== '.' && !entry.endsWith('.lock')) {
vmsUuids.push(entry)
}
})
Task.info('vms', { vms: vmsUuids })
@@ -81,11 +88,13 @@ exports.VmsRemote = class RemoteVmsBackupRunner extends Abstract {
}
let vmBackup
if (job.mode === 'delta') {
vmBackup = new IncrementalRemote(opts)
} else if (job.mode === 'full') {
vmBackup = new FullRemote(opts)
vmBackup = new IncrementalRemoteVmBackup(opts)
} else {
throw new Error(`Job mode ${job.mode} not implemented for mirror backup`)
if (job.mode === 'full') {
vmBackup = new FullRemoteVmBackup(opts)
} else {
throw new Error(`Job mode ${job.mode} not implemented`)
}
}
return runTask(taskStart, () => vmBackup.run())

View File

@@ -1,11 +1,10 @@
'use strict'
const { Abstract } = require('./_Abstract')
const { AbstractVmBackup } = require('./AbstractVmBackup')
const { getVmBackupDir } = require('../../_getVmBackupDir')
const { asyncEach } = require('@vates/async-each')
const { Disposable } = require('promise-toolbox')
exports.AbstractRemote = class AbstractRemoteVmBackupRunner extends Abstract {
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
class AbstractRemoteVmBackup extends AbstractVmBackup {
constructor({
config,
job,
@@ -24,74 +23,78 @@ exports.AbstractRemote = class AbstractRemoteVmBackupRunner extends Abstract {
this.scheduleId = schedule.id
this.timestamp = undefined
// the vm object is used in writers
// remoteWriter only need vm.uuid
// @todo : how to do better ?
// missing tags for healthcheck
this.vm = { uuid: vmUuid }
this._healthCheckSr = healthCheckSr
this._sourceRemoteAdapter = sourceRemoteAdapter
this._throttleStream = throttleStream
this._vmUuid = vmUuid
const allSettings = job.settings
const writers = new Set()
this._writers = writers
const RemoteWriter = this._getRemoteWriter()
Object.entries(remoteAdapters).forEach(([remoteId, adapter]) => {
Object.keys(remoteAdapters).forEach(remoteId => {
const targetSettings = {
...settings,
...allSettings[remoteId],
}
writers.add(
new RemoteWriter({
adapter,
config,
healthCheckSr,
job,
scheduleId: schedule.id,
vmUuid,
remoteId,
settings: targetSettings,
})
)
if (targetSettings.exportRetention !== 0) {
writers.add(new RemoteWriter({ backup: this, remoteId, settings: targetSettings }))
}
})
}
async _computeTransferList(predicate) {
const vmBackups = await this._sourceRemoteAdapter.listVmBackups(this._vmUuid, predicate)
const localMetada = new Map()
const vmBackups = await this._sourceRemoteAdapter.listVmBackups(this.vm.uuid, predicate)
const localMetada = {}
Object.values(vmBackups).forEach(metadata => {
const timestamp = metadata.timestamp
localMetada.set(timestamp, metadata)
localMetada[timestamp] = metadata
})
const nbRemotes = Object.keys(this.remoteAdapters).length
const remoteMetadatas = {}
await asyncEach(Object.values(this.remoteAdapters), async remoteAdapter => {
const remoteMetadata = await remoteAdapter.listVmBackups(this._vmUuid, predicate)
remoteMetadata.forEach(metadata => {
const timestamp = metadata.timestamp
remoteMetadatas[timestamp] = (remoteMetadatas[timestamp] ?? 0) + 1
await Promise.all(
Object.values(this.remoteAdapters).map(async remoteAdapter => {
const remoteMetadata = await remoteAdapter.listVmBackups(this.vm.uuid, predicate)
remoteMetadata.forEach(metadata => {
const timestamp = metadata.timestamp
remoteMetadatas[timestamp] = (remoteMetadatas[timestamp] ?? 0) + 1
})
})
})
)
let chain = []
const timestamps = [...localMetada.keys()]
timestamps.sort()
for (const timestamp of timestamps) {
for (const timestamp in localMetada) {
if (remoteMetadatas[timestamp] !== nbRemotes) {
// this backup is not present in all the remote
// should be retransfered if not found later
chain.push(localMetada.get(timestamp))
chain.push(localMetada[timestamp])
} else {
// backup is present in local and remote : the chain has already been transferred
chain = []
}
}
return chain
}
async run() {
async run($defer) {
const handler = this._sourceRemoteAdapter._handler
await Disposable.use(await handler.lock(getVmBackupDir(this._vmUuid)), async () => {
await this._run()
await this._healthCheck()
const sourceLock = await handler.lock(getVmBackupDir(this.vm.uuid))
$defer(async () => {
sourceLock.dispose()
})
await this._run()
}
}
exports.AbstractRemoteVmBackup = AbstractRemoteVmBackup
decorateMethodsWith(AbstractRemoteVmBackup, {
run: defer,
})

View File

@@ -1,10 +1,9 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { Task } = require('../../Task.js')
const { debug, warn } = createLogger('xo:backups:AbstractVmRunner')
const { Task } = require('../../Task')
const { asyncMap } = require('@xen-orchestra/async-map')
const { debug, warn } = createLogger('xo:backups:AbstractVmBackup')
class AggregateError extends Error {
constructor(errors, message) {
@@ -19,7 +18,7 @@ const asyncEach = async (iterable, fn, thisArg = iterable) => {
}
}
exports.Abstract = class AbstractVmBackupRunner {
class AbstractVmBackup {
// calls fn for each function, warns of any errors, and throws only if there are no writers left
async _callWriters(fn, step, parallel = true) {
const writers = this._writers
@@ -75,21 +74,17 @@ exports.Abstract = class AbstractVmBackupRunner {
}
// check if current VM has tags
const tags = this._tags
const { tags } = this.vm
const intersect = settings.healthCheckVmsWithTags.some(t => tags.includes(t))
if (settings.healthCheckVmsWithTags.length !== 0 && !intersect) {
// create a task to have an info in the logs and reports
return Task.run(
{
name: 'health check',
},
() => {
Task.info(`This VM doesn't match the health check's tags for this schedule`)
}
)
return
}
await this._callWriters(writer => writer.healthCheck(), 'writer.healthCheck()')
await this._callWriters(writer => writer.healthCheck(this._healthCheckSr), 'writer.healthCheck()')
}
async run() {
throw new Error('not implemented')
}
}
exports.AbstractVmBackup = AbstractVmBackup

View File

@@ -1,25 +1,24 @@
'use strict'
const assert = require('assert')
const groupBy = require('lodash/groupBy.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const groupBy = require('lodash/groupBy.js')
const { asyncMap } = require('@xen-orchestra/async-map')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { getOldEntries } = require('./writers/_getOldEntries.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { Abstract } = require('./_Abstract.js')
const { AbstractVmBackup } = require('./AbstractVmBackup.js')
class AbstractXapiVmBackupRunner extends Abstract {
class AbstractXapiVmBackup extends AbstractVmBackup {
constructor({
config,
getSnapshotNameLabel,
healthCheckSr,
job,
remoteAdapters,
remotes,
schedule,
settings,
srs,
@@ -40,23 +39,23 @@ class AbstractXapiVmBackupRunner extends Abstract {
this.timestamp = undefined
// VM currently backed up
const tags = (this._tags = vm.tags)
this.vm = vm
const { tags } = this.vm
// VM (snapshot) that is really exported
this._exportedVm = undefined
this._vm = vm
this.exportedVm = undefined
this._fullVdisRequired = undefined
this._getSnapshotNameLabel = getSnapshotNameLabel
this._isIncremental = job.mode === 'delta'
this._healthCheckSr = healthCheckSr
this._jobId = job.id
this._jobSnapshots = undefined
this._throttleStream = throttleStream
this._xapi = vm.$xapi
// Base VM for the export
this._baseVm = undefined
// Reference VM for the incremental export
// if possible we willonly export the difference between thie vm and now
this._vmComparisonBasis = undefined
// Settings for this specific run (job, schedule, VM)
if (tags.includes('xo-memory-backup')) {
@@ -66,32 +65,22 @@ class AbstractXapiVmBackupRunner extends Abstract {
settings.offlineSnapshot = true
}
this._settings = settings
// Create writers
{
const writers = new Set()
this._writers = writers
const [BackupWriter, ReplicationWriter] = this._getWriters()
const [RemoteWriter, XapiWriter] = this._getWriters()
const allSettings = job.settings
Object.entries(remoteAdapters).forEach(([remoteId, adapter]) => {
Object.keys(remoteAdapters).forEach(remoteId => {
const targetSettings = {
...settings,
...allSettings[remoteId],
}
if (targetSettings.exportRetention !== 0) {
writers.add(
new BackupWriter({
adapter,
config,
healthCheckSr,
job,
scheduleId: schedule.id,
vmUuid: vm.uuid,
remoteId,
settings: targetSettings,
})
)
writers.add(new RemoteWriter({ backup: this, remoteId, settings: targetSettings }))
}
})
srs.forEach(sr => {
@@ -100,17 +89,7 @@ class AbstractXapiVmBackupRunner extends Abstract {
...allSettings[sr.uuid],
}
if (targetSettings.copyRetention !== 0) {
writers.add(
new ReplicationWriter({
config,
healthCheckSr,
job,
scheduleId: schedule.id,
vmUuid: vm.uuid,
sr,
settings: targetSettings,
})
)
writers.add(new XapiWriter({ backup: this, sr, settings: targetSettings }))
}
})
}
@@ -119,7 +98,7 @@ class AbstractXapiVmBackupRunner extends Abstract {
// ensure the VM itself does not have any backup metadata which would be
// copied on manual snapshots and interfere with the backup jobs
async _cleanMetadata() {
const vm = this._vm
const { vm } = this
if ('xo:backup:job' in vm.other_config) {
await vm.update_other_config({
'xo:backup:datetime': null,
@@ -133,7 +112,7 @@ class AbstractXapiVmBackupRunner extends Abstract {
}
async _snapshot() {
const vm = this._vm
const { vm } = this
const xapi = this._xapi
const settings = this._settings
@@ -158,19 +137,19 @@ class AbstractXapiVmBackupRunner extends Abstract {
'xo:backup:vm': vm.uuid,
})
this._exportedVm = await xapi.getRecord('VM', snapshotRef)
this.exportedVm = await xapi.getRecord('VM', snapshotRef)
return this._exportedVm.uuid
return this.exportedVm.uuid
})
} else {
this._exportedVm = vm
this.exportedVm = vm
this.timestamp = Date.now()
}
}
async _fetchJobSnapshots() {
const jobId = this._jobId
const vmRef = this._vm.$ref
const vmRef = this.vm.$ref
const xapi = this._xapi
const snapshotsRef = await xapi.getField('VM', vmRef, 'snapshots')
@@ -189,7 +168,7 @@ class AbstractXapiVmBackupRunner extends Abstract {
async _removeUnusedSnapshots() {
const allSettings = this.job.settings
const baseSettings = this._baseSettings
const baseVmRef = this._baseVm?.$ref
const vmComparisonBasisRef = this._vmComparisonBasis?.$ref
const snapshotsPerSchedule = groupBy(this._jobSnapshots, _ => _.other_config['xo:backup:schedule'])
const xapi = this._xapi
@@ -197,10 +176,10 @@ class AbstractXapiVmBackupRunner extends Abstract {
const settings = {
...baseSettings,
...allSettings[scheduleId],
...allSettings[this._vm.uuid],
...allSettings[this.vm.uuid],
}
return asyncMap(getOldEntries(settings.snapshotRetention, snapshots), ({ $ref }) => {
if ($ref !== baseVmRef) {
if ($ref !== vmComparisonBasisRef) {
return xapi.VM_destroy($ref)
}
})
@@ -239,12 +218,13 @@ class AbstractXapiVmBackupRunner extends Abstract {
await this._fetchJobSnapshots()
// will only do something during incremental Backup
await this._selectBaseVm()
await this._cleanMetadata()
await this._removeUnusedSnapshots()
const vm = this._vm
const { vm } = this
const isRunning = vm.power_state === 'Running'
const startAfter = isRunning && (settings.offlineBackup ? 'backup' : settings.offlineSnapshot && 'snapshot')
if (startAfter) {
@@ -271,8 +251,8 @@ class AbstractXapiVmBackupRunner extends Abstract {
await this._healthCheck()
}
}
exports.AbstractXapi = AbstractXapiVmBackupRunner
exports.AbstractXapiVmBackup = AbstractXapiVmBackup
decorateMethodsWith(AbstractXapiVmBackupRunner, {
decorateMethodsWith(AbstractXapiVmBackup, {
run: defer,
})

View File

@@ -0,0 +1,43 @@
'use strict'
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { AbstractRemoteVmBackup } = require('./AbstractRemoteVmBackup')
const { FullRemoteWriter } = require('./writers/FullRemoteWriter')
const { forkStreamUnpipe } = require('../forkStreamUnpipe')
const FullRemoteVmBackup = class FullRemoteVmBackup extends AbstractRemoteVmBackup {
_getRemoteWriter() {
return FullRemoteWriter
}
async _run($defer) {
const transferList = await this._computeTransferList(({ mode }) => mode === 'full')
await this._callWriters(async writer => {
await writer.beforeBackup()
$defer(async () => {
await writer.afterBackup()
})
}, 'writer.beforeBackup()')
for (const metadata of transferList) {
const stream = await this._sourceRemoteAdapter.readFullVmBackup(metadata)
// @todo should skip if backup is already there (success on only one remote)
await this._callWriters(
writer =>
writer.run({
stream: forkStreamUnpipe(stream),
timestamp: metadata.timestamp,
vm: metadata.vm,
vmSnapshot: metadata.vmSnapshot,
}),
'writer.run()'
)
}
}
}
exports.FullRemoteVmBackup = FullRemoteVmBackup
decorateMethodsWith(FullRemoteVmBackup, {
_run: defer,
})

View File

@@ -2,21 +2,21 @@
const { createLogger } = require('@xen-orchestra/log')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe.js')
const { FullRemoteWriter } = require('../_writers/FullRemoteWriter.js')
const { FullXapiWriter } = require('../_writers/FullXapiWriter.js')
const { forkStreamUnpipe } = require('../forkStreamUnpipe.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { FullRemoteWriter } = require('./writers/FullRemoteWriter.js')
const { FullXapiWriter } = require('./writers/FullXapiWriter.js')
const { AbstractXapiVmBackup } = require('./AbstractXapiVMBackup.js')
const { debug } = createLogger('xo:backups:FullXapiVmBackup')
const { debug } = createLogger('xo:backups:FullVmBackup')
exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
class FullXapiVmBackup extends AbstractXapiVmBackup {
_getWriters() {
return [FullRemoteWriter, FullXapiWriter]
}
_mustDoSnapshot() {
const vm = this._vm
const { vm } = this
const settings = this._settings
return (
@@ -29,10 +29,8 @@ exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
async _copy() {
const { compression } = this.job
const vm = this._vm
const exportedVm = this._exportedVm
const stream = this._throttleStream(
await this._xapi.VM_export(exportedVm.$ref, {
await this._xapi.VM_export(this.exportedVm.$ref, {
compress: Boolean(compression) && (compression === 'native' ? 'gzip' : 'zstd'),
useSnapshot: false,
})
@@ -47,8 +45,6 @@ exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
sizeContainer,
stream: forkStreamUnpipe(stream),
timestamp,
vm,
vmSnapshot: exportedVm,
}),
'writer.run()'
)
@@ -61,5 +57,7 @@ exports.FullXapi = class FullXapiVmBackupRunner extends AbstractXapi {
speed: duration !== 0 ? (size * 1e3) / 1024 / 1024 / duration : 0,
size,
})
this._healthCheck()
}
}
exports.FullXapiVmBackup = FullXapiVmBackup

View File

@@ -3,15 +3,20 @@ const assert = require('node:assert')
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { AbstractRemoteVmBackup } = require('./AbstractRemoteVmBackup')
const { mapValues } = require('lodash')
const { IncrementalRemoteWriter } = require('./writers/IncrementalRemoteWriter')
const { forkStreamUnpipe } = require('../forkStreamUnpipe')
const { Task } = require('../../Task')
const { AbstractRemote } = require('./_AbstractRemote')
const { IncrementalRemoteWriter } = require('../_writers/IncrementalRemoteWriter')
const { forkDeltaExport } = require('./_forkDeltaExport')
const isVhdDifferencingDisk = require('vhd-lib/isVhdDifferencingDisk')
const { asyncEach } = require('@vates/async-each')
class IncrementalRemoteVmBackupRunner extends AbstractRemote {
const forkDeltaExport = deltaExport =>
Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
class IncrementalRemoteVmBackup extends AbstractRemoteVmBackup {
_getRemoteWriter() {
return IncrementalRemoteWriter
}
@@ -30,13 +35,7 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
await this._callWriters(writer => writer.prepare({ isBase: metadata.isBase }), 'writer.prepare()')
const incrementalExport = await this._sourceRemoteAdapter.readIncrementalVmBackup(metadata, undefined, {
useChain: false,
})
const differentialVhds = {}
await asyncEach(Object.entries(incrementalExport.streams), async ([key, stream]) => {
differentialVhds[key] = await isVhdDifferencingDisk(stream)
useSynthetic: false,
})
incrementalExport.streams = mapValues(incrementalExport.streams, this._throttleStream)
@@ -44,7 +43,6 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
writer =>
writer.transfer({
deltaExport: forkDeltaExport(incrementalExport),
differentialVhds,
timestamp: metadata.timestamp,
vm: metadata.vm,
vmSnapshot: metadata.vmSnapshot,
@@ -52,16 +50,15 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
'writer.transfer()'
)
await this._callWriters(writer => writer.cleanup(), 'writer.cleanup()')
// for healthcheck
this._tags = metadata.vm.tags
}
this._healthCheck()
} else {
Task.info('No new data to upload for this VM')
}
}
}
exports.IncrementalRemote = IncrementalRemoteVmBackupRunner
decorateMethodsWith(IncrementalRemoteVmBackupRunner, {
exports.IncrementalRemoteVmBackup = IncrementalRemoteVmBackup
decorateMethodsWith(IncrementalRemoteVmBackup, {
_run: defer,
})

View File

@@ -8,21 +8,26 @@ const { asyncMap } = require('@xen-orchestra/async-map')
const { createLogger } = require('@xen-orchestra/log')
const { pipeline } = require('node:stream')
const { IncrementalRemoteWriter } = require('../_writers/IncrementalRemoteWriter.js')
const { IncrementalXapiWriter } = require('../_writers/IncrementalXapiWriter.js')
const { exportIncrementalVm } = require('../../_incrementalVm.js')
const { forkStreamUnpipe } = require('../forkStreamUnpipe.js')
const { Task } = require('../../Task.js')
const { watchStreamSize } = require('../../_watchStreamSize.js')
const { AbstractXapi } = require('./_AbstractXapi.js')
const { forkDeltaExport } = require('./_forkDeltaExport.js')
const isVhdDifferencingDisk = require('vhd-lib/isVhdDifferencingDisk')
const { asyncEach } = require('@vates/async-each')
const { debug } = createLogger('xo:backups:IncrementalXapiVmBackup')
const { IncrementalRemoteWriter } = require('./writers/IncrementalRemoteWriter.js')
const { IncrementalXapiWriter } = require('./writers/IncrementalXapiWriter.js')
const { AbstractXapiVmBackup } = require('./AbstractXapiVMBackup.js')
const { debug } = createLogger('xo:backups:VmBackup')
const forkDeltaExport = deltaExport =>
Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
const noop = Function.prototype
exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXapi {
class IncrementalXapiVmBackup extends AbstractXapiVmBackup {
_getWriters() {
return [IncrementalRemoteWriter, IncrementalXapiWriter]
}
@@ -32,58 +37,50 @@ exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXa
}
async _copy() {
const baseVm = this._baseVm
const vm = this._vm
const exportedVm = this._exportedVm
const { exportedVm } = this
const vmComparisonBasis = this._vmComparisonBasis
const fullVdisRequired = this._fullVdisRequired
const isFull = fullVdisRequired === undefined || fullVdisRequired.size !== 0
const isBase = fullVdisRequired === undefined || fullVdisRequired.size !== 0
await this._callWriters(writer => writer.prepare({ isFull }), 'writer.prepare()')
await this._callWriters(writer => writer.prepare({ isBase }), 'writer.prepare()')
const deltaExport = await exportIncrementalVm(exportedVm, baseVm, {
const incrementalExport = await exportIncrementalVm(exportedVm, vmComparisonBasis, {
fullVdisRequired,
})
// since NBD is network based, if one disk use nbd , all the disk use them
// except the suspended VDI
if (Object.values(deltaExport.streams).some(({ _nbd }) => _nbd)) {
if (Object.values(incrementalExport.streams).some(({ _nbd }) => _nbd)) {
Task.info('Transfer data using NBD')
}
const differentialVhds = {}
// since isVhdDifferencingDisk is reading and unshifting data in stream
// it should be done BEFORE any other stream transform
await asyncEach(Object.entries(deltaExport.streams), async ([key, stream]) => {
differentialVhds[key] = await isVhdDifferencingDisk(stream)
})
const sizeContainers = mapValues(deltaExport.streams, stream => watchStreamSize(stream))
const sizeContainers = mapValues(incrementalExport.streams, stream => watchStreamSize(stream))
if (this._settings.validateVhdStreams) {
deltaExport.streams = mapValues(deltaExport.streams, stream => pipeline(stream, vhdStreamValidator, noop))
incrementalExport.streams = mapValues(incrementalExport.streams, stream =>
pipeline(stream, vhdStreamValidator, noop)
)
}
deltaExport.streams = mapValues(deltaExport.streams, this._throttleStream)
incrementalExport.streams = mapValues(incrementalExport.streams, this._throttleStream)
const timestamp = Date.now()
await this._callWriters(
writer =>
writer.transfer({
deltaExport: forkDeltaExport(deltaExport),
differentialVhds,
deltaExport: forkDeltaExport(incrementalExport),
sizeContainers,
timestamp,
vm,
vmSnapshot: exportedVm,
}),
'writer.transfer()'
)
this._baseVm = exportedVm
this._vmComparisonBasis = exportedVm
if (baseVm !== undefined) {
if (vmComparisonBasis !== undefined) {
await exportedVm.update_other_config(
'xo:backup:deltaChainLength',
String(+(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1)
String(+(vmComparisonBasis.other_config['xo:backup:deltaChainLength'] ?? 0) + 1)
)
}
@@ -116,11 +113,11 @@ exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXa
const fullInterval = this._settings.fullInterval
const deltaChainLength = +(baseVm.other_config['xo:backup:deltaChainLength'] ?? 0) + 1
if (!(fullInterval === 0 || fullInterval > deltaChainLength)) {
debug('not using base VM becaust fullInterval reached')
debug('not using base VM because fullInterval reached')
return
}
const srcVdis = keyBy(await xapi.getRecords('VDI', await this._vm.$getDisks()), '$ref')
const srcVdis = keyBy(await xapi.getRecords('VDI', await this.vm.$getDisks()), '$ref')
// resolve full record
baseVm = await xapi.getRecord('VM', baseVm.$ref)
@@ -169,7 +166,8 @@ exports.IncrementalXapi = class IncrementalXapiVmBackupRunner extends AbstractXa
}
})
this._baseVm = baseVm
this._vmComparisonBasis = baseVm
this._fullVdisRequired = fullVdisRequired
}
}
exports.IncrementalXapiVmBackup = IncrementalXapiVmBackup

View File

@@ -1,8 +1,8 @@
'use strict'
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../../../_filenameDate.js')
const { getOldEntries } = require('./_getOldEntries.js')
const { Task } = require('../../../Task.js')
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
@@ -26,17 +26,15 @@ exports.FullRemoteWriter = class FullRemoteWriter extends MixinRemoteWriter(Abst
)
}
async _run({ timestamp, sizeContainer, stream, vm, vmSnapshot }) {
async _run({ timestamp, sizeContainer, stream, vm = this._backup.vm, vmSnapshot = this._backup.exportedVm }) {
const backup = this._backup
const settings = this._settings
const job = this._job
const scheduleId = this._scheduleId
const { job, scheduleId } = backup
const adapter = this._adapter
let metadata = await this._isAlreadyTransferred(timestamp)
if (metadata !== undefined) {
// @todo : should skip backup while being vigilant to not stuck the forked stream
Task.info('This backup has already been transfered')
}
// TODO: clean VM backup directory
const oldBackups = getOldEntries(
settings.exportRetention - 1,
@@ -49,7 +47,7 @@ exports.FullRemoteWriter = class FullRemoteWriter extends MixinRemoteWriter(Abst
const dataBasename = basename + '.xva'
const dataFilename = this._vmBackupDir + '/' + dataBasename
metadata = {
const metadata = {
jobId: job.id,
mode: job.mode,
scheduleId,
@@ -69,9 +67,9 @@ exports.FullRemoteWriter = class FullRemoteWriter extends MixinRemoteWriter(Abst
await adapter.outputStream(dataFilename, stream, {
validator: tmpPath => adapter.isValidXva(tmpPath),
})
return { size: sizeContainer.size }
return { size: sizeContainer?.size }
})
metadata.size = sizeContainer.size
metadata.size = sizeContainer?.size ?? 0
this._metadataFileName = await adapter.writeVmBackupMetadata(vm.uuid, metadata)
if (!deleteFirst) {

View File

@@ -4,9 +4,9 @@ const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../../../_filenameDate.js')
const { getOldEntries } = require('./_getOldEntries.js')
const { Task } = require('../../../Task.js')
const { AbstractFullWriter } = require('./_AbstractFullWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
@@ -32,11 +32,10 @@ exports.FullXapiWriter = class FullXapiWriter extends MixinXapiWriter(AbstractFu
)
}
async _run({ timestamp, sizeContainer, stream, vm }) {
async _run({ timestamp, sizeContainer, stream }) {
const sr = this._sr
const settings = this._settings
const job = this._job
const scheduleId = this._scheduleId
const { job, scheduleId, vm } = this._backup
const { uuid: srUuid, $xapi: xapi } = sr

View File

@@ -1,9 +1,9 @@
'use strict'
const assert = require('assert')
const map = require('lodash/map.js')
const mapValues = require('lodash/mapValues.js')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { asyncEach } = require('@vates/async-each')
const { asyncMap } = require('@xen-orchestra/async-map')
const { chainVhd, checkVhdChain, openVhd, VhdAbstract } = require('vhd-lib')
const { createLogger } = require('@xen-orchestra/log')
@@ -11,9 +11,9 @@ const { decorateClass } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { dirname } = require('path')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../../../_filenameDate.js')
const { getOldEntries } = require('./_getOldEntries.js')
const { Task } = require('../../../Task.js')
const { MixinRemoteWriter } = require('./_MixinRemoteWriter.js')
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
@@ -21,14 +21,15 @@ const { checkVhd } = require('./_checkVhd.js')
const { packUuid } = require('./_packUuid.js')
const { Disposable } = require('promise-toolbox')
const { warn } = createLogger('xo:backups:DeltaBackupWriter')
const { warn } = createLogger('xo:backups:IncrementalRemoteWriter')
class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWriter) {
async checkBaseVdis(baseUuidToSrcVdi) {
const { handler } = this._adapter
const backup = this._backup
const adapter = this._adapter
const vdisDir = `${this._vmBackupDir}/vdis/${this._job.id}`
const vdisDir = `${this._vmBackupDir}/vdis/${backup.job.id}`
await asyncMap(baseUuidToSrcVdi, async ([baseUuid, srcVdi]) => {
let found = false
@@ -69,13 +70,13 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
return this._cleanVm({ merge: true })
}
prepare({ isFull }) {
prepare({ isBase }) {
// create the task related to this export and ensure all methods are called in this context
const task = new Task({
name: 'export',
data: {
id: this._remoteId,
isFull,
isBase,
type: 'remote',
},
})
@@ -90,12 +91,11 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
async _prepare() {
const adapter = this._adapter
const settings = this._settings
const scheduleId = this._scheduleId
const vmUuid = this._vmUuid
const { scheduleId, vm } = this._backup
const oldEntries = getOldEntries(
settings.exportRetention - 1,
await adapter.listVmBackups(vmUuid, _ => _.mode === 'delta' && _.scheduleId === scheduleId)
await adapter.listVmBackups(vm.uuid, _ => _.mode === 'delta' && _.scheduleId === scheduleId)
)
this._oldEntries = oldEntries
@@ -130,23 +130,20 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
// delete sequentially from newest to oldest to avoid unnecessary merges
for (let i = oldEntries.length; i-- > 0; ) {
await adapter.deleteDeltaVmBackups([oldEntries[i]])
await adapter.deleteIncrementalVmBackups([oldEntries[i]])
}
}
async _transfer($defer, { differentialVhds, timestamp, deltaExport, vm, vmSnapshot }) {
async _transfer($defer, { timestamp, deltaExport, vm = this._backup.vm, vmSnapshot = this._backup.exportedVm }) {
const adapter = this._adapter
const job = this._job
const scheduleId = this._scheduleId
const settings = this._settings
const backup = this._backup
const { job, scheduleId } = backup
const jobId = job.id
const handler = adapter.handler
let metadataContent = await this._isAlreadyTransferred(timestamp)
if (metadataContent !== undefined) {
// @todo : should skip backup while being vigilant to not stuck the forked stream
Task.info('This backup has already been transfered')
}
// TODO: clean VM backup directory
const basename = formatFilenameDate(timestamp)
const vhds = mapValues(
@@ -161,7 +158,7 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
}/${adapter.getVhdFileName(basename)}`
)
metadataContent = {
const metadataContent = {
jobId,
mode: job.mode,
scheduleId,
@@ -174,14 +171,14 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
vm,
vmSnapshot,
}
const { size } = await Task.run({ name: 'transfer' }, async () => {
let transferSize = 0
await asyncEach(
Object.entries(deltaExport.vdis),
async ([id, vdi]) => {
await Promise.all(
map(deltaExport.vdis, async (vdi, id) => {
const path = `${this._vmBackupDir}/${vhds[id]}`
const isDelta = differentialVhds[`${id}.vhd`]
const isDelta = vdi.other_config['xo:base_delta'] !== undefined
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
@@ -194,11 +191,7 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
.sort()
.pop()
assert.notStrictEqual(
parentPath,
undefined,
`missing parent of ${id} in ${dirname(path)}, looking for ${vdi.other_config['xo:base_delta']}`
)
assert.notStrictEqual(parentPath, undefined, `missing parent of ${id}`)
parentPath = parentPath.slice(1) // remove leading slash
@@ -211,11 +204,18 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
// merges and chainings
checksum: false,
validator: tmpPath => checkVhd(handler, tmpPath),
writeBlockConcurrency: this._config.writeBlockConcurrency,
writeBlockConcurrency: this._backup.config.writeBlockConcurrency,
})
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
try {
await chainVhd(handler, parentPath, handler, path)
} catch (err) {
// @todo : check why if chains with full disk
if (err.message !== 'cannot chain disk of type 3') {
throw err
}
}
}
// set the correct UUID in the VHD
@@ -224,17 +224,12 @@ class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWrite
await vhd.readBlockAllocationTable() // required by writeFooter()
await vhd.writeFooter()
})
},
{
concurrency: settings.diskPerVmConcurrency,
}
})
)
return { size: transferSize }
})
metadataContent.size = size
this._metadataFileName = await adapter.writeVmBackupMetadata(vm.uuid, metadataContent)
// TODO: run cleanup?
}
}

View File

@@ -4,10 +4,10 @@ const { asyncMap, asyncMapSettled } = require('@xen-orchestra/async-map')
const ignoreErrors = require('promise-toolbox/ignoreErrors')
const { formatDateTime } = require('@xen-orchestra/xapi')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getOldEntries } = require('../../_getOldEntries.js')
const { importIncrementalVm, TAG_COPY_SRC } = require('../../_incrementalVm.js')
const { Task } = require('../../Task.js')
const { formatFilenameDate } = require('../../../_filenameDate.js')
const { getOldEntries } = require('./_getOldEntries.js')
const { importIncrementalVm, TAG_COPY_SRC } = require('../../../_incrementalVm.js')
const { Task } = require('../../../Task.js')
const { AbstractIncrementalWriter } = require('./_AbstractIncrementalWriter.js')
const { MixinXapiWriter } = require('./_MixinXapiWriter.js')
@@ -16,7 +16,7 @@ const { listReplicatedVms } = require('./_listReplicatedVms.js')
exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWriter) {
async checkBaseVdis(baseUuidToSrcVdi, baseVm) {
const sr = this._sr
const replicatedVm = listReplicatedVms(sr.$xapi, this._job.id, sr.uuid, this._vmUuid).find(
const replicatedVm = listReplicatedVms(sr.$xapi, this._backup.job.id, sr.uuid, this._backup.vm.uuid).find(
vm => vm.other_config[TAG_COPY_SRC] === baseVm.uuid
)
if (replicatedVm === undefined) {
@@ -38,21 +38,20 @@ exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWri
}
}
prepare({ isFull }) {
prepare({ isBase }) {
// create the task related to this export and ensure all methods are called in this context
const task = new Task({
name: 'export',
data: {
id: this._sr.uuid,
isFull,
isBase,
name_label: this._sr.name_label,
type: 'SR',
},
})
const hasHealthCheckSr = this._healthCheckSr !== undefined
this.transfer = task.wrapFn(this.transfer)
this.cleanup = task.wrapFn(this.cleanup, !hasHealthCheckSr)
this.healthCheck = task.wrapFn(this.healthCheck, hasHealthCheckSr)
this.cleanup = task.wrapFn(this.cleanup)
this.healthCheck = task.wrapFn(this.healthCheck, true)
return task.run(() => this._prepare())
}
@@ -60,13 +59,12 @@ exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWri
async _prepare() {
const settings = this._settings
const { uuid: srUuid, $xapi: xapi } = this._sr
const vmUuid = this._vmUuid
const scheduleId = this._scheduleId
const { scheduleId, vm } = this._backup
// delete previous interrupted copies
ignoreErrors.call(asyncMapSettled(listReplicatedVms(xapi, scheduleId, undefined, vmUuid), vm => vm.$destroy))
ignoreErrors.call(asyncMapSettled(listReplicatedVms(xapi, scheduleId, undefined, vm.uuid), vm => vm.$destroy))
this._oldEntries = getOldEntries(settings.copyRetention - 1, listReplicatedVms(xapi, scheduleId, srUuid, vmUuid))
this._oldEntries = getOldEntries(settings.copyRetention - 1, listReplicatedVms(xapi, scheduleId, srUuid, vm.uuid))
if (settings.deleteFirst) {
await this._deleteOldEntries()
@@ -83,11 +81,10 @@ exports.IncrementalXapiWriter = class IncrementalXapiWriter extends MixinXapiWri
return asyncMapSettled(this._oldEntries, vm => vm.$destroy())
}
async _transfer({ timestamp, deltaExport, sizeContainers, vm }) {
async _transfer({ timestamp, deltaExport, sizeContainers }) {
const { _warmMigration } = this._settings
const sr = this._sr
const job = this._job
const scheduleId = this._scheduleId
const { job, scheduleId, vm } = this._backup
const { uuid: srUuid, $xapi: xapi } = sr

View File

@@ -3,9 +3,9 @@
const { AbstractWriter } = require('./_AbstractWriter.js')
exports.AbstractFullWriter = class AbstractFullWriter extends AbstractWriter {
async run({ timestamp, sizeContainer, stream, vm, vmSnapshot }) {
async run({ timestamp, sizeContainer, stream }) {
try {
return await this._run({ timestamp, sizeContainer, stream, vm, vmSnapshot })
return await this._run({ timestamp, sizeContainer, stream })
} finally {
// ensure stream is properly closed
stream.destroy()

View File

@@ -11,13 +11,13 @@ exports.AbstractIncrementalWriter = class AbstractIncrementalWriter extends Abst
throw new Error('Not implemented')
}
prepare({ isFull }) {
prepare({ isBase }) {
throw new Error('Not implemented')
}
async transfer({ deltaExport, ...other }) {
async transfer({ timestamp, deltaExport, sizeContainers }) {
try {
return await this._transfer({ deltaExport, ...other })
return await this._transfer({ timestamp, deltaExport, sizeContainers })
} finally {
// ensure all streams are properly closed
for (const stream of Object.values(deltaExport.streams)) {

View File

@@ -0,0 +1,14 @@
'use strict'
exports.AbstractWriter = class AbstractWriter {
constructor({ backup, settings }) {
this._backup = backup
this._settings = settings
}
beforeBackup() {}
afterBackup() {}
healthCheck(sr) {}
}

View File

@@ -4,26 +4,26 @@ const { createLogger } = require('@xen-orchestra/log')
const { join } = require('path')
const assert = require('assert')
const { formatFilenameDate } = require('../../_filenameDate.js')
const { getVmBackupDir } = require('../../_getVmBackupDir.js')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup.js')
const { ImportVmBackup } = require('../../ImportVmBackup.js')
const { Task } = require('../../Task.js')
const MergeWorker = require('../../merge-worker/index.js')
const { formatFilenameDate } = require('../../../_filenameDate.js')
const { getVmBackupDir } = require('../../../_getVmBackupDir.js')
const { HealthCheckVmBackup } = require('../../../HealthCheckVmBackup.js')
const { ImportVmBackup } = require('../../../ImportVmBackup.js')
const { Task } = require('../../../Task.js')
const MergeWorker = require('../../../merge-worker/index.js')
const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
const { info, warn } = createLogger('xo:backups:MixinRemoteWriter')
exports.MixinRemoteWriter = (BaseClass = Object) =>
class MixinRemoteWriter extends BaseClass {
#lock
constructor({ remoteId, adapter, ...rest }) {
constructor({ remoteId, ...rest }) {
super(rest)
this._adapter = adapter
this._adapter = rest.backup.remoteAdapters[remoteId]
this._remoteId = remoteId
this._vmBackupDir = getVmBackupDir(rest.vmUuid)
this._vmBackupDir = getVmBackupDir(this._backup.vm.uuid)
}
async _cleanVm(options) {
@@ -38,7 +38,7 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
Task.warning(message, data)
},
lock: false,
mergeBlockConcurrency: this._config.mergeBlockConcurrency,
mergeBlockConcurrency: this._backup.config.mergeBlockConcurrency,
})
})
} catch (error) {
@@ -55,10 +55,10 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
}
async afterBackup() {
const { disableMergeWorker } = this._config
const { disableMergeWorker } = this._backup.config
// merge worker only compatible with local remotes
const { handler } = this._adapter
const willMergeInWorker = !disableMergeWorker && typeof handler.getRealPath === 'function'
const willMergeInWorker = !disableMergeWorker && typeof handler._getRealPath === 'function'
const { merge } = await this._cleanVm({ remove: true, merge: !willMergeInWorker })
await this.#lock.dispose()
@@ -70,15 +70,13 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
// add a random suffix to avoid collision in case multiple tasks are created at the same second
Math.random().toString(36).slice(2)
await handler.outputFile(taskFile, this._vmUuid)
const remotePath = handler.getRealPath()
await handler.outputFile(taskFile, this._backup.vm.uuid)
const remotePath = handler._getRealPath()
await MergeWorker.run(remotePath)
}
}
healthCheck() {
const sr = this._healthCheckSr
assert.notStrictEqual(sr, undefined, 'SR should be defined before making a health check')
healthCheck(sr) {
assert.notStrictEqual(
this._metadataFileName,
undefined,
@@ -111,16 +109,4 @@ exports.MixinRemoteWriter = (BaseClass = Object) =>
}
)
}
_isAlreadyTransferred(timestamp) {
const vmUuid = this._vmUuid
const adapter = this._adapter
const backupDir = getVmBackupDir(vmUuid)
try {
const actualMetadata = JSON.parse(
adapter._handler.readFile(`${backupDir}/${formatFilenameDate(timestamp)}.json`)
)
return actualMetadata
} catch (error) {}
}
}

View File

@@ -0,0 +1,50 @@
'use strict'
const { Task } = require('../../../Task')
const assert = require('node:assert/strict')
const { HealthCheckVmBackup } = require('../../../HealthCheckVmBackup')
function extractOpaqueRef(str) {
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
exports.MixinXapiWriter = (BaseClass = Object) =>
class MixinXapiWriter extends BaseClass {
constructor({ sr, ...rest }) {
super(rest)
this._sr = sr
}
healthCheck(sr) {
assert.notEqual(this._targetVmRef, undefined, 'A vm should have been transfered to be health checked')
// copy VM
return Task.run(
{
name: 'health check',
},
async () => {
const { $xapi: xapi } = sr
let clonedVm
try {
const baseVm = xapi.getObject(this._targetVmRef) ?? (await xapi.waitObject(this._targetVmRef))
const clonedRef = await xapi
.callAsync('VM.clone', this._targetVmRef, `Health Check - ${baseVm.name_label}`)
.then(extractOpaqueRef)
clonedVm = xapi.getObject(clonedRef) ?? (await xapi.waitObject(clonedRef))
await new HealthCheckVmBackup({
restoredVm: clonedVm,
xapi,
}).run()
} finally {
clonedVm && (await xapi.VM_destroy(clonedVm.$ref))
}
}
)
}
}

View File

@@ -6,12 +6,12 @@ const { limitConcurrency } = require('limit-concurrency-decorator')
const { extractIdsFromSimplePattern } = require('../extractIdsFromSimplePattern.js')
const { Task } = require('../Task.js')
const createStreamThrottle = require('./_createStreamThrottle.js')
const { DEFAULT_SETTINGS, Abstract } = require('./_Abstract.js')
const { runTask } = require('./_runTask.js')
const { getAdaptersByRemote } = require('./_getAdaptersByRemote.js')
const { IncrementalXapi } = require('./_vmRunners/IncrementalXapi.js')
const { FullXapi } = require('./_vmRunners/FullXapi.js')
const createStreamThrottle = require('./createStreamThrottle.js')
const { IncrementalXapiVmBackup } = require('./VmBackup/IncrementalXapiVmBackup.js')
const { FullXapiVmBackup } = require('./VmBackup/FullXapiVmBackup.js')
const { DEFAULT_SETTINGS, AbstractBackupJob } = require('./AbstractBackupJob.js')
const { runTask } = require('./runTask.js')
const { getAdaptersByRemote } = require('./getAdapterByRemote.js')
const DEFAULT_XAPI_VM_SETTINGS = {
bypassVdiChainsCheck: false,
@@ -19,7 +19,6 @@ const DEFAULT_XAPI_VM_SETTINGS = {
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
diskPerVmConcurrency: 0, // not limited by default
exportRetention: 0,
fullInterval: 0,
healthCheckSr: undefined,
@@ -36,7 +35,7 @@ const DEFAULT_XAPI_VM_SETTINGS = {
vmTimeout: 0,
}
exports.VmsXapi = class VmsXapiBackupRunner extends Abstract {
exports.XapiVmBackupJob = class XapiVmBackupJob extends AbstractBackupJob {
_computeBaseSettings(config, job) {
const baseSettings = { ...DEFAULT_SETTINGS }
Object.assign(baseSettings, DEFAULT_XAPI_VM_SETTINGS, config.defaultSettings, config.vm?.defaultSettings)
@@ -114,10 +113,10 @@ exports.VmsXapi = class VmsXapiBackupRunner extends Abstract {
}
let vmBackup
if (job.mode === 'delta') {
vmBackup = new IncrementalXapi(opts)
vmBackup = new IncrementalXapiVmBackup(opts)
} else {
if (job.mode === 'full') {
vmBackup = new FullXapi(opts)
vmBackup = new FullXapiVmBackup(opts)
} else {
throw new Error(`Job mode ${job.mode} not implemented`)
}

View File

@@ -1,7 +1,6 @@
'use strict'
const { asyncMap } = require('@xen-orchestra/async-map')
const { join } = require('@xen-orchestra/fs/path')
const { DIR_XO_CONFIG_BACKUPS } = require('../RemoteAdapter.js')
const { formatFilenameDate } = require('../_filenameDate.js')
@@ -24,11 +23,10 @@ exports.XoMetadataBackup = class XoMetadataBackup {
const dir = `${scheduleDir}/${formatFilenameDate(timestamp)}`
const data = job.xoMetadata
const dataBaseName = './data.json'
const fileName = `${dir}/data.json`
const metadata = JSON.stringify(
{
data: dataBaseName,
jobId: job.id,
jobName: job.name,
scheduleId: schedule.id,
@@ -38,8 +36,6 @@ exports.XoMetadataBackup = class XoMetadataBackup {
null,
2
)
const dataFileName = join(dir, dataBaseName)
const metaDataFileName = `${dir}/metadata.json`
await asyncMap(
@@ -56,7 +52,7 @@ exports.XoMetadataBackup = class XoMetadataBackup {
async () => {
const handler = adapter.handler
const dirMode = this._config.dirMode
await handler.outputFile(dataFileName, data, { dirMode })
await handler.outputFile(fileName, data, { dirMode })
await handler.outputFile(metaDataFileName, metadata, {
dirMode,
})

View File

@@ -1,9 +1,9 @@
'use strict'
const getAdaptersByRemote = adapters => {
exports.getAdaptersByRemote = adapters => {
const adaptersByRemote = {}
adapters.forEach(({ adapter, remoteId }) => {
adaptersByRemote[remoteId] = adapter
})
return adaptersByRemote
}
exports.getAdaptersByRemote = getAdaptersByRemote

View File

@@ -0,0 +1,7 @@
'use strict'
const { Task } = require('../Task')
const noop = Function.prototype
exports.runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs

View File

@@ -13,10 +13,10 @@ const { createDebounceResource } = require('@vates/disposable/debounceResource.j
const { decorateMethodsWith } = require('@vates/decorate-with')
const { deduped } = require('@vates/disposable/deduped.js')
const { getHandler } = require('@xen-orchestra/fs')
const { createRunner } = require('./Backup.js')
const { parseDuration } = require('@vates/parse-duration')
const { Xapi } = require('@xen-orchestra/xapi')
const { instantiateBackupJob } = require('./backupJob.js')
const { RemoteAdapter } = require('./RemoteAdapter.js')
const { Task } = require('./Task.js')
@@ -48,7 +48,7 @@ class BackupWorker {
}
run() {
return createRunner({
return instantiateBackupJob({
config: this.#config,
getAdapter: remoteId => this.getAdapter(this.#remotes[remoteId]),
getConnectedRecord: Disposable.factory(async function* getConnectedRecord(type, uuid) {

View File

@@ -3,6 +3,7 @@
const { beforeEach, afterEach, test, describe } = require('test')
const assert = require('assert').strict
const rimraf = require('rimraf')
const tmp = require('tmp')
const fs = require('fs-extra')
const uuid = require('uuid')
@@ -13,7 +14,6 @@ const { VHDFOOTER, VHDHEADER } = require('./tests.fixtures.js')
const { VhdFile, Constants, VhdDirectory, VhdAbstract } = require('vhd-lib')
const { checkAliases } = require('./_cleanVm')
const { dirname, basename } = require('path')
const { rimraf } = require('rimraf')
let tempDir, adapter, handler, jobId, vdiId, basePath, relativePath
const rootPath = 'xo-vm-backups/VMUUID/'

View File

@@ -35,7 +35,7 @@ const resolveUuid = async (xapi, cache, uuid, type) => {
exports.exportIncrementalVm = async function exportIncrementalVm(
vm,
baseVm,
vmComparisonBasis,
{
cancelToken = CancelToken.none,
@@ -46,12 +46,12 @@ exports.exportIncrementalVm = async function exportIncrementalVm(
} = {}
) {
// refs of VM's VDIs → base's VDIs.
const baseVdis = {}
baseVm &&
baseVm.$VBDs.forEach(vbd => {
const vdisCompaisonBasis = {}
vmComparisonBasis &&
vmComparisonBasis.$VBDs.forEach(vbd => {
let vdi, snapshotOf
if ((vdi = vbd.$VDI) && (snapshotOf = vdi.$snapshot_of) && !fullVdisRequired.has(snapshotOf.uuid)) {
baseVdis[vdi.snapshot_of] = vdi
vdisCompaisonBasis[vdi.snapshot_of] = vdi
}
})
@@ -74,20 +74,20 @@ exports.exportIncrementalVm = async function exportIncrementalVm(
}
// Look for a snapshot of this vdi in the base VM.
const baseVdi = baseVdis[vdi.snapshot_of]
const vdiComparisonBasis = vdisCompaisonBasis[vdi.snapshot_of]
vdis[vdiRef] = {
...vdi,
other_config: {
...vdi.other_config,
[TAG_BASE_DELTA]: baseVdi && !disableBaseTags ? baseVdi.uuid : undefined,
[TAG_BASE_DELTA]: vdiComparisonBasis && !disableBaseTags ? vdiComparisonBasis.uuid : undefined,
},
$snapshot_of$uuid: vdi.$snapshot_of?.uuid,
$SR$uuid: vdi.$SR.uuid,
}
streams[`${vdiRef}.vhd`] = await vdi.$exportContent({
baseRef: baseVdi?.$ref,
baseRef: vdiComparisonBasis?.$ref,
cancelToken,
format: 'vhd',
})
@@ -126,10 +126,10 @@ exports.exportIncrementalVm = async function exportIncrementalVm(
vm: {
...vm,
other_config:
baseVm && !disableBaseTags
vmComparisonBasis && !disableBaseTags
? {
...vm.other_config,
[TAG_BASE_DELTA]: baseVm.uuid,
[TAG_BASE_DELTA]: vmComparisonBasis.uuid,
}
: omit(vm.other_config, TAG_BASE_DELTA),
},

View File

@@ -1,6 +0,0 @@
'use strict'
const { Task } = require('../Task.js')
const noop = Function.prototype
const runTask = (...args) => Task.run(...args).catch(noop) // errors are handled by logs
exports.runTask = runTask

View File

@@ -1,53 +0,0 @@
'use strict'
const { decorateMethodsWith } = require('@vates/decorate-with')
const { defer } = require('golike-defer')
const { AbstractRemote } = require('./_AbstractRemote')
const { FullRemoteWriter } = require('../_writers/FullRemoteWriter')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe')
const { watchStreamSize } = require('../../_watchStreamSize')
const { Task } = require('../../Task')
class FullRemoteVmBackupRunner extends AbstractRemote {
_getRemoteWriter() {
return FullRemoteWriter
}
async _run($defer) {
const transferList = await this._computeTransferList(({ mode }) => mode === 'full')
await this._callWriters(async writer => {
await writer.beforeBackup()
$defer(async () => {
await writer.afterBackup()
})
}, 'writer.beforeBackup()')
if (transferList.length > 0) {
for (const metadata of transferList) {
const stream = await this._sourceRemoteAdapter.readFullVmBackup(metadata)
const sizeContainer = watchStreamSize(stream)
// @todo shouldn't transfer backup if it will be deleted by retention policy (higher retention on source than destination)
await this._callWriters(
writer =>
writer.run({
stream: forkStreamUnpipe(stream),
timestamp: metadata.timestamp,
vm: metadata.vm,
vmSnapshot: metadata.vmSnapshot,
sizeContainer,
}),
'writer.run()'
)
// for healthcheck
this._tags = metadata.vm.tags
}
} else {
Task.info('No new data to upload for this VM')
}
}
}
exports.FullRemote = FullRemoteVmBackupRunner
decorateMethodsWith(FullRemoteVmBackupRunner, {
_run: defer,
})

View File

@@ -1,12 +0,0 @@
'use strict'
const { mapValues } = require('lodash')
const { forkStreamUnpipe } = require('../_forkStreamUnpipe')
exports.forkDeltaExport = function forkDeltaExport(deltaExport) {
return Object.create(deltaExport, {
streams: {
value: mapValues(deltaExport.streams, forkStreamUnpipe),
},
})
}

View File

@@ -1,31 +0,0 @@
'use strict'
const { formatFilenameDate } = require('../../_filenameDate')
const { getVmBackupDir } = require('../../_getVmBackupDir')
exports.AbstractWriter = class AbstractWriter {
constructor({ config, healthCheckSr, job, vmUuid, scheduleId, settings }) {
this._config = config
this._healthCheckSr = healthCheckSr
this._job = job
this._scheduleId = scheduleId
this._settings = settings
this._vmUuid = vmUuid
}
beforeBackup() {}
afterBackup() {}
healthCheck(sr) {}
_isAlreadyTransferred(timestamp) {
const vmUuid = this._vmUuid
const adapter = this._adapter
const backupDir = getVmBackupDir(vmUuid)
try {
const actualMetadata = JSON.parse(adapter._handler.readFile(`${backupDir}/${formatFilenameDate(timestamp)}.json`))
return actualMetadata
} catch (error) {}
}
}

View File

@@ -1,74 +0,0 @@
'use strict'
const { extractOpaqueRef } = require('@xen-orchestra/xapi')
const { Task } = require('../../Task')
const assert = require('node:assert/strict')
const { HealthCheckVmBackup } = require('../../HealthCheckVmBackup')
exports.MixinXapiWriter = (BaseClass = Object) =>
class MixinXapiWriter extends BaseClass {
constructor({ sr, ...rest }) {
super(rest)
this._sr = sr
}
// check if the base Vm has all its disk on health check sr
async #isAlreadyOnHealthCheckSr(baseVm) {
const xapi = baseVm.$xapi
const vdiRefs = await xapi.VM_getDisks(baseVm.$ref)
for (const vdiRef of vdiRefs) {
const vdi = xapi.getObject(vdiRef)
if (vdi.$SR.uuid !== this._heathCheckSr.uuid) {
return false
}
}
return true
}
healthCheck() {
const sr = this._healthCheckSr
assert.notStrictEqual(sr, undefined, 'SR should be defined before making a health check')
assert.notEqual(this._targetVmRef, undefined, 'A vm should have been transfered to be health checked')
// copy VM
return Task.run(
{
name: 'health check',
},
async () => {
const { $xapi: xapi } = sr
let healthCheckVmRef
try {
const baseVm = xapi.getObject(this._targetVmRef) ?? (await xapi.waitObject(this._targetVmRef))
if (await this.#isAlreadyOnHealthCheckSr(baseVm)) {
healthCheckVmRef = await Task.run(
{ name: 'cloning-vm' },
async () =>
await xapi
.callAsync('VM.clone', this._targetVmRef, `Health Check - ${baseVm.name_label}`)
.then(extractOpaqueRef)
)
} else {
healthCheckVmRef = await Task.run(
{ name: 'copying-vm' },
async () =>
await xapi
.callAsync('VM.copy', this._targetVmRef, `Health Check - ${baseVm.name_label}`, sr.$ref)
.then(extractOpaqueRef)
)
}
const healthCheckVm = xapi.getObject(healthCheckVmRef) ?? (await xapi.waitObject(healthCheckVmRef))
await new HealthCheckVmBackup({
restoredVm: healthCheckVm,
xapi,
}).run()
} finally {
healthCheckVmRef && (await xapi.VM_destroy(healthCheckVmRef))
}
}
)
}
}

View File

@@ -0,0 +1,24 @@
'use strict'
const { MetadatasBackupJob } = require('./_backupJob/MetadatasBackupJob.js')
const { RemoteVmBackupJob } = require('./_backupJob/RemoteVmBackupJob.js')
const { XapiVmBackupJob } = require('./_backupJob/XapiVmBackupJob.js')
exports.instantiateBackupJob = function instantiateBackupJob({
config,
getAdapter,
getConnectedRecord,
job,
schedule,
}) {
switch (job.type) {
case 'backup':
return new XapiVmBackupJob({ config, getAdapter, getConnectedRecord, job, schedule })
case 'remote-to-remote':
return new RemoteVmBackupJob({ config, getAdapter, getConnectedRecord, job, schedule })
case 'metadataBackup':
return new MetadatasBackupJob({ config, getAdapter, getConnectedRecord, job, schedule })
default:
throw new Error(`No runners for the backup type ${job.type}`)
}
}

View File

@@ -136,7 +136,7 @@ task.start(message: 'restore', data: { jobId: string, srId: string, time: number
## API
### Run description object
### Run description object Metadata / Pool backup
This is a JavaScript object containing all the information necessary to run a backup job.
@@ -150,11 +150,65 @@ job:
# Human readable identifier
name: string
# Whether this job is doing Full Backup / Disaster Recovery or
# Delta Backup / Continuous Replication
# which pools to saved, can be undefined
pools : IdPattern
# which remotes to use
remotes: IdPattern
settings:
# Used for the whole job
'': Settings
# Used for a specific schedule
[ScheduleId]: Settings
# Used for a specific VM
[VmId]: Settings
# if defined : backup the xo metadata
xoMetadata : string
type: 'metadataBackup'
# Information necessary to connect to each remote
remotes:
[RemoteId]:
url: string
# Indicates which schedule is used for this run
schedule:
id: ScheduleId
# Information necessary to connect to each XAPI
xapis:
[XapiId]:
allowUnauthorized: boolean
credentials:
password: string
username: string
url: string
```
### Run description object Vms Xapi to remote and/or Xapi to Xapi
This is a JavaScript object containing all the information necessary to run a backup job.
```coffee
# Information about the job itself
job:
# Unique identifier
id: string
# Human readable identifier
name: string
# Whether this job is doing Full Backup / Full Replication or
# Incremental Backup / Incremental Replication
mode: 'full' | 'delta'
# For backup jobs, indicates which remotes to use
# indicates which remotes used to writes. Can be empty.
remotes: IdPattern
settings:
@@ -168,19 +222,16 @@ job:
# Used for a specific VM
[VmId]: Settings
# For replication jobs, indicates which SRs to use
# indicates which SRs to use for replication jobs. Can be empty.
srs: IdPattern
type: 'backup' | 'mirrorBackup'
# Here for historical reasons, xapi to remote or xapi to xapi
type: 'backup'
# Indicates which VMs to backup/replicate for a xapi to remote backup job
# Indicates which VMs to backup/replicate
vms: IdPattern
# Indicates which remote to read from for a mirror backup job
sourceRemote: IdPattern
# Indicates which XAPI to use to connect to a specific VM or SR
# for remote to remote backup job,this is only needed if there is healtcheck
recordToXapi:
[ObjectId]: XapiId
@@ -203,6 +254,62 @@ xapis:
url: string
```
### Run description object Vms remote to remote
This is a JavaScript object containing all the information necessary to run a backup job.
```coffee
# Information about the job itself
job:
# Unique identifier
id: string
# Human readable identifier
name: string
# Whether this job is doing Full Backup / Full Replication or
# Incremental Backup / Incremental Replication
mode: 'full' | 'delta'
# Indicates which remotes to write VMs
remotes: IdPattern
settings:
# Used for the whole job
'': Settings
# Used for a specific schedule
[ScheduleId]: Settings
# Used for a specific VM
[VmId]: Settings
# only transfer data saved by one of theses Job
# transfer all if empty
sourceJobIds: IdPattern
# Here for historical reasons, xapi to remote or xapi to xapi
type: 'remote-to-remote'
# Indicates which VMs to backup/replicate
vms: IdPattern
# Indicate the remote used to read Vms
sourceRemote:
[ObjectId]: XapiId
# Information necessary to connect to each remote (read or write)
remotes:
[RemoteId]:
url: string
# Indicates which schedule is used for this run
schedule:
id: ScheduleId
```
### `IdPattern`
For a single object:
@@ -231,7 +338,7 @@ Settings are described in [`@xen-orchestra/backups/Backup.js](https://github.com
- `prepare({ isFull })`
- `transfer({ timestamp, deltaExport, sizeContainers })`
- `cleanup()`
- `healthCheck()` // is not executed if no health check sr or tag doesn't match
- `healthCheck(sr)`
- **Full**
- `run({ timestamp, sizeContainer, stream })`
- `afterBackup()`

View File

@@ -8,9 +8,9 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.39.0",
"version": "0.36.0",
"engines": {
"node": ">=14.18"
"node": ">=14.6"
},
"scripts": {
"postversion": "npm publish --access public",
@@ -24,10 +24,10 @@
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.4",
"@vates/fuse-vhd": "^1.0.0",
"@vates/nbd-client": "^1.2.1",
"@vates/nbd-client": "^1.2.0",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^4.0.1",
"@xen-orchestra/fs": "^3.3.4",
"@xen-orchestra/log": "^0.6.0",
"@xen-orchestra/template": "^0.1.0",
"compare-versions": "^5.0.1",
@@ -42,18 +42,17 @@
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"uuid": "^9.0.0",
"vhd-lib": "^4.5.0",
"xen-api": "^1.3.3",
"vhd-lib": "^4.4.0",
"yazl": "^2.5.1"
},
"devDependencies": {
"rimraf": "^5.0.1",
"rimraf": "^4.1.1",
"sinon": "^15.0.1",
"test": "^3.2.1",
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^2.2.1"
"@xen-orchestra/xapi": "^2.2.0"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -18,7 +18,7 @@
"preferGlobal": true,
"dependencies": {
"golike-defer": "^0.5.1",
"xen-api": "^1.3.3"
"xen-api": "^1.3.0"
},
"scripts": {
"postversion": "npm publish"

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/fs",
"version": "4.0.1",
"version": "3.3.4",
"license": "AGPL-3.0-or-later",
"description": "The File System for Xen Orchestra backups.",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
@@ -30,6 +30,7 @@
"@vates/coalesce-calls": "^0.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/read-chunk": "^1.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.6.0",
"bind-property-descriptor": "^2.0.0",
"decorator-synchronized": "^0.6.0",
@@ -52,9 +53,7 @@
"@babel/preset-env": "^7.8.0",
"cross-env": "^7.0.2",
"dotenv": "^16.0.0",
"rimraf": "^5.0.1",
"sinon": "^15.0.4",
"test": "^3.3.0",
"rimraf": "^4.1.1",
"tmp": "^0.2.1"
},
"scripts": {
@@ -64,9 +63,7 @@
"prebuild": "yarn run clean",
"predev": "yarn run clean",
"prepublishOnly": "yarn run build",
"pretest": "yarn run build",
"postversion": "npm publish",
"test": "node--test ./dist/"
"postversion": "npm publish"
},
"author": {
"name": "Vates SAS",

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'readable-stream'
import copyStreamToBuffer from './_copyStreamToBuffer.js'
@@ -17,6 +16,6 @@ describe('copyStreamToBuffer', () => {
await copyStreamToBuffer(stream, buffer)
assert.equal(buffer.toString(), 'hel')
expect(buffer.toString()).toBe('hel')
})
})

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'readable-stream'
import createBufferFromStream from './_createBufferFromStream.js'
@@ -15,6 +14,6 @@ describe('createBufferFromStream', () => {
const buffer = await createBufferFromStream(stream)
assert.equal(buffer.toString(), 'hello')
expect(buffer.toString()).toBe('hello')
})
})

View File

@@ -1,6 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import { Readable } from 'node:stream'
import { _getEncryptor } from './_encryptor'
import crypto from 'crypto'
@@ -27,13 +25,13 @@ algorithms.forEach(algorithm => {
it('handle buffer', () => {
const encrypted = encryptor.encryptData(buffer)
if (algorithm !== 'none') {
assert.equal(encrypted.equals(buffer), false) // encrypted should be different
expect(encrypted.equals(buffer)).toEqual(false) // encrypted should be different
// ivlength, auth tag, padding
assert.notEqual(encrypted.length, buffer.length)
expect(encrypted.length).not.toEqual(buffer.length)
}
const decrypted = encryptor.decryptData(encrypted)
assert.equal(decrypted.equals(buffer), true)
expect(decrypted.equals(buffer)).toEqual(true)
})
it('handle stream', async () => {
@@ -41,12 +39,12 @@ algorithms.forEach(algorithm => {
stream.length = buffer.length
const encrypted = encryptor.encryptStream(stream)
if (algorithm !== 'none') {
assert.equal(encrypted.length, undefined)
expect(encrypted.length).toEqual(undefined)
}
const decrypted = encryptor.decryptStream(encrypted)
const decryptedBuffer = await streamToBuffer(decrypted)
assert.equal(decryptedBuffer.equals(buffer), true)
expect(decryptedBuffer.equals(buffer)).toEqual(true)
})
})
})

View File

@@ -1,5 +1,4 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import guessAwsRegion from './_guessAwsRegion.js'
@@ -7,12 +6,12 @@ describe('guessAwsRegion', () => {
it('should return region from AWS URL', async () => {
const region = guessAwsRegion('s3.test-region.amazonaws.com')
assert.equal(region, 'test-region')
expect(region).toBe('test-region')
})
it('should return default region if none is found is AWS URL', async () => {
const region = guessAwsRegion('s3.amazonaws.com')
assert.equal(region, 'us-east-1')
expect(region).toBe('us-east-1')
})
})

View File

@@ -9,32 +9,28 @@ import LocalHandler from './local'
const sudoExeca = (command, args, opts) => execa('sudo', [command, ...args], opts)
export default class MountHandler extends LocalHandler {
#execa
#keeper
#params
#realPath
constructor(remote, { mountsDir = join(tmpdir(), 'xo-fs-mounts'), useSudo = false, ...opts } = {}, params) {
super(remote, opts)
this.#execa = useSudo ? sudoExeca : execa
this.#params = {
this._execa = useSudo ? sudoExeca : execa
this._keeper = undefined
this._params = {
...params,
options: [params.options, remote.options ?? params.defaultOptions].filter(_ => _ !== undefined).join(','),
}
this.#realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
this._realPath = join(mountsDir, remote.id || Math.random().toString(36).slice(2))
}
async _forget() {
const keeper = this.#keeper
const keeper = this._keeper
if (keeper === undefined) {
return
}
this.#keeper = undefined
this._keeper = undefined
await fs.close(keeper)
await ignoreErrors.call(
this.#execa('umount', [this.getRealPath()], {
this._execa('umount', [this._getRealPath()], {
env: {
LANG: 'C',
},
@@ -42,30 +38,30 @@ export default class MountHandler extends LocalHandler {
)
}
getRealPath() {
return this.#realPath
_getRealPath() {
return this._realPath
}
async _sync() {
// in case of multiple `sync`s, ensure we properly close previous keeper
{
const keeper = this.#keeper
const keeper = this._keeper
if (keeper !== undefined) {
this.#keeper = undefined
this._keeper = undefined
ignoreErrors.call(fs.close(keeper))
}
}
const realPath = this.getRealPath()
const realPath = this._getRealPath()
await fs.ensureDir(realPath)
try {
const { type, device, options, env } = this.#params
const { type, device, options, env } = this._params
// Linux mount is more flexible in which order the mount arguments appear.
// But FreeBSD requires this order of the arguments.
await this.#execa('mount', ['-o', options, '-t', type, device, realPath], {
await this._execa('mount', ['-o', options, '-t', type, device, realPath], {
env: {
LANG: 'C',
...env,
@@ -75,7 +71,7 @@ export default class MountHandler extends LocalHandler {
try {
// the failure may mean it's already mounted, use `findmnt` to check
// that's the case
await this.#execa('findmnt', [realPath], {
await this._execa('findmnt', [realPath], {
stdio: 'ignore',
})
} catch (_) {
@@ -86,7 +82,7 @@ export default class MountHandler extends LocalHandler {
// keep an open file on the mount to prevent it from being unmounted if used
// by another handler/process
const keeperPath = `${realPath}/.keeper_${Math.random().toString(36).slice(2)}`
this.#keeper = await fs.open(keeperPath, 'w')
this._keeper = await fs.open(keeperPath, 'w')
ignoreErrors.call(fs.unlink(keeperPath))
}
}

View File

@@ -1,6 +1,6 @@
import asyncMapSettled from '@xen-orchestra/async-map/legacy'
import assert from 'assert'
import getStream from 'get-stream'
import { asyncEach } from '@vates/async-each'
import { coalesceCalls } from '@vates/coalesce-calls'
import { createLogger } from '@xen-orchestra/log'
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
@@ -37,13 +37,8 @@ const ignoreEnoent = error => {
const noop = Function.prototype
class PrefixWrapper {
#prefix
constructor(handler, prefix) {
this.#prefix = prefix
// cannot be a private field because used by methods dynamically added
// outside of the class
this._prefix = prefix
this._handler = handler
}
@@ -55,7 +50,7 @@ class PrefixWrapper {
async list(dir, opts) {
const entries = await this._handler.list(this._resolve(dir), opts)
if (opts != null && opts.prependDir) {
const n = this.#prefix.length
const n = this._prefix.length
entries.forEach((entry, i, entries) => {
entries[i] = entry.slice(n)
})
@@ -67,21 +62,19 @@ class PrefixWrapper {
return this._handler.rename(this._resolve(oldPath), this._resolve(newPath))
}
// cannot be a private method because used by methods dynamically added
// outside of the class
_resolve(path) {
return this.#prefix + normalizePath(path)
return this._prefix + normalizePath(path)
}
}
export default class RemoteHandlerAbstract {
#rawEncryptor
#encryptor
get #encryptor() {
if (this.#rawEncryptor === undefined) {
get _encryptor() {
if (this.#encryptor === undefined) {
throw new Error(`Can't access to encryptor before remote synchronization`)
}
return this.#rawEncryptor
return this.#encryptor
}
constructor(remote, options = {}) {
@@ -118,10 +111,6 @@ export default class RemoteHandlerAbstract {
}
// Public members
//
// Should not be called directly because:
// - some concurrency limits may be applied which may lead to deadlocks
// - some preprocessing may be applied on parameters that should not be done multiple times (e.g. prefixing paths)
get type() {
throw new Error('Not implemented')
@@ -132,6 +121,10 @@ export default class RemoteHandlerAbstract {
return prefix === '/' ? this : new PrefixWrapper(this, prefix)
}
async closeFile(fd) {
await this.__closeFile(fd)
}
async createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) {
if (options.end !== undefined || options.start !== undefined) {
assert.strictEqual(this.isEncrypted, false, `Can't read part of a file when encryption is active ${file}`)
@@ -164,7 +157,7 @@ export default class RemoteHandlerAbstract {
}
if (this.isEncrypted) {
stream = this.#encryptor.decryptStream(stream)
stream = this._encryptor.decryptStream(stream)
} else {
// try to add the length prop if missing and not a range stream
if (stream.length === undefined && options.end === undefined && options.start === undefined) {
@@ -193,7 +186,7 @@ export default class RemoteHandlerAbstract {
path = normalizePath(path)
let checksumStream
input = this.#encryptor.encryptStream(input)
input = this._encryptor.encryptStream(input)
if (checksum) {
checksumStream = createChecksumStream()
pipeline(input, checksumStream, noop)
@@ -231,10 +224,10 @@ export default class RemoteHandlerAbstract {
assert.strictEqual(this.isEncrypted, false, `Can't compute size of an encrypted file ${file}`)
const size = await timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout)
return size - this.#encryptor.ivLength
return size - this._encryptor.ivLength
}
async __list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) {
async list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) {
try {
const virtualDir = normalizePath(dir)
dir = normalizePath(dir)
@@ -264,12 +257,20 @@ export default class RemoteHandlerAbstract {
return { dispose: await this._lock(path) }
}
async mkdir(dir, { mode } = {}) {
await this.__mkdir(normalizePath(dir), { mode })
}
async mktree(dir, { mode } = {}) {
await this._mktree(normalizePath(dir), { mode })
}
openFile(path, flags) {
return this.__openFile(path, flags)
}
async outputFile(file, data, { dirMode, flags = 'wx' } = {}) {
const encryptedData = this.#encryptor.encryptData(data)
const encryptedData = this._encryptor.encryptData(data)
await this._outputFile(normalizePath(file), encryptedData, { dirMode, flags })
}
@@ -278,9 +279,9 @@ export default class RemoteHandlerAbstract {
return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async __readFile(file, { flags = 'r' } = {}) {
async readFile(file, { flags = 'r' } = {}) {
const data = await this._readFile(normalizePath(file), { flags })
return this.#encryptor.decryptData(data)
return this._encryptor.decryptData(data)
}
async #rename(oldPath, newPath, { checksum }, createTree = true) {
@@ -300,11 +301,11 @@ export default class RemoteHandlerAbstract {
}
}
__rename(oldPath, newPath, { checksum = false } = {}) {
rename(oldPath, newPath, { checksum = false } = {}) {
return this.#rename(normalizePath(oldPath), normalizePath(newPath), { checksum })
}
async __copy(oldPath, newPath, { checksum = false } = {}) {
async copy(oldPath, newPath, { checksum = false } = {}) {
oldPath = normalizePath(oldPath)
newPath = normalizePath(newPath)
@@ -331,33 +332,33 @@ export default class RemoteHandlerAbstract {
async sync() {
await this._sync()
try {
await this.#checkMetadata()
await this._checkMetadata()
} catch (error) {
await this._forget()
throw error
}
}
async #canWriteMetadata() {
const list = await this.__list('/', {
async _canWriteMetadata() {
const list = await this.list('/', {
filter: e => !e.startsWith('.') && e !== ENCRYPTION_DESC_FILENAME && e !== ENCRYPTION_METADATA_FILENAME,
})
return list.length === 0
}
async #createMetadata() {
async _createMetadata() {
const encryptionAlgorithm = this._remote.encryptionKey === undefined ? 'none' : DEFAULT_ENCRYPTION_ALGORITHM
this.#rawEncryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
await Promise.all([
this._writeFile(normalizePath(ENCRYPTION_DESC_FILENAME), JSON.stringify({ algorithm: encryptionAlgorithm }), {
flags: 'w',
}), // not encrypted
this.__writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
this.writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
])
}
async #checkMetadata() {
async _checkMetadata() {
let encryptionAlgorithm = 'none'
let data
try {
@@ -373,18 +374,18 @@ export default class RemoteHandlerAbstract {
}
try {
this.#rawEncryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
// this file is encrypted
const data = await this.__readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
const data = await this.readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
JSON.parse(data)
} catch (error) {
// can be enoent, bad algorithm, or broeken json ( bad key or algorithm)
if (encryptionAlgorithm !== 'none') {
if (await this.#canWriteMetadata()) {
if (await this._canWriteMetadata()) {
// any other error , but on empty remote => update with remote settings
info('will update metadata of this remote')
return this.#createMetadata()
return this._createMetadata()
} else {
warn(
`The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`,
@@ -437,7 +438,7 @@ export default class RemoteHandlerAbstract {
await this._truncate(file, len)
}
async __unlink(file, { checksum = true } = {}) {
async unlink(file, { checksum = true } = {}) {
file = normalizePath(file)
if (checksum) {
@@ -452,8 +453,8 @@ export default class RemoteHandlerAbstract {
await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
}
async __writeFile(file, data, { flags = 'wx' } = {}) {
const encryptedData = this.#encryptor.encryptData(data)
async writeFile(file, data, { flags = 'wx' } = {}) {
const encryptedData = this._encryptor.encryptData(data)
await this._writeFile(normalizePath(file), encryptedData, { flags })
}
@@ -464,8 +465,6 @@ export default class RemoteHandlerAbstract {
}
async __mkdir(dir, { mode } = {}) {
dir = normalizePath(dir)
try {
await this._mkdir(dir, { mode })
} catch (error) {
@@ -587,9 +586,9 @@ export default class RemoteHandlerAbstract {
if (validator !== undefined) {
await validator.call(this, tmpPath)
}
await this.__rename(tmpPath, path)
await this.rename(tmpPath, path)
} catch (error) {
await this.__unlink(tmpPath)
await this.unlink(tmpPath)
throw error
}
}
@@ -623,7 +622,7 @@ export default class RemoteHandlerAbstract {
}
const files = await this._list(dir)
await asyncEach(files, file =>
await asyncMapSettled(files, file =>
this._unlink(`${dir}/${file}`).catch(error => {
// Unlink dir behavior is not consistent across platforms
// https://github.com/nodejs/node-v0.x-archive/issues/5791
@@ -666,22 +665,7 @@ export default class RemoteHandlerAbstract {
}
get isEncrypted() {
return this.#encryptor.id !== 'NULL_ENCRYPTOR'
}
}
// from implementation methods, which names start with `__`, create public
// accessors on which external behaviors can be added (e.g. concurrency limits, path rewriting)
{
const proto = RemoteHandlerAbstract.prototype
for (const method of Object.getOwnPropertyNames(proto)) {
if (method.startsWith('__')) {
const publicName = method.slice(2)
assert(!Object.hasOwn(proto, publicName))
Object.defineProperty(proto, publicName, Object.getOwnPropertyDescriptor(proto, method))
}
return this._encryptor.id !== 'NULL_ENCRYPTOR'
}
}

View File

@@ -1,13 +1,11 @@
import { after, beforeEach, describe, it } from 'test'
import { strict as assert } from 'assert'
import sinon from 'sinon'
/* eslint-env jest */
import { DEFAULT_ENCRYPTION_ALGORITHM, _getEncryptor } from './_encryptor'
import { Disposable, pFromCallback, TimeoutError } from 'promise-toolbox'
import { getSyncedHandler } from '.'
import { rimraf } from 'rimraf'
import AbstractHandler from './abstract'
import fs from 'fs-extra'
import rimraf from 'rimraf'
import tmp from 'tmp'
const TIMEOUT = 10e3
@@ -26,7 +24,7 @@ class TestHandler extends AbstractHandler {
const noop = Function.prototype
const clock = sinon.useFakeTimers()
jest.useFakeTimers()
describe('closeFile()', () => {
it(`throws in case of timeout`, async () => {
@@ -35,8 +33,8 @@ describe('closeFile()', () => {
})
const promise = testHandler.closeFile({ fd: undefined, path: '' })
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -47,8 +45,8 @@ describe('getInfo()', () => {
})
const promise = testHandler.getInfo()
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -59,8 +57,8 @@ describe('getSize()', () => {
})
const promise = testHandler.getSize('')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -71,8 +69,8 @@ describe('list()', () => {
})
const promise = testHandler.list('.')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -83,8 +81,8 @@ describe('openFile()', () => {
})
const promise = testHandler.openFile('path')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -95,8 +93,8 @@ describe('rename()', () => {
})
const promise = testHandler.rename('oldPath', 'newPath')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -107,8 +105,8 @@ describe('rmdir()', () => {
})
const promise = testHandler.rmdir('dir')
clock.tick(TIMEOUT)
await assert.rejects(promise, TimeoutError)
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
@@ -117,14 +115,14 @@ describe('encryption', () => {
beforeEach(async () => {
dir = await pFromCallback(cb => tmp.dir(cb))
})
after(async () => {
afterAll(async () => {
await rimraf(dir)
})
it('sync should NOT create metadata if missing (not encrypted)', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
assert.deepEqual(await fs.readdir(dir), [])
expect(await fs.readdir(dir)).toEqual([])
})
it('sync should create metadata if missing (encrypted)', async () => {
@@ -133,12 +131,12 @@ describe('encryption', () => {
noop
)
assert.deepEqual(await fs.readdir(dir), ['encryption.json', 'metadata.json'])
expect(await fs.readdir(dir)).toEqual(['encryption.json', 'metadata.json'])
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
// encrypted , should not be parsable
assert.rejects(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`)))
expect(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`))).rejects.toThrowError()
})
it('sync should not modify existing metadata', async () => {
@@ -148,9 +146,9 @@ describe('encryption', () => {
await Disposable.use(await getSyncedHandler({ url: `file://${dir}` }), noop)
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, 'none')
expect(encryption.algorithm).toEqual('none')
const metadata = JSON.parse(await fs.readFile(`${dir}/metadata.json`, 'utf-8'))
assert.equal(metadata.random, 'NOTSORANDOM')
expect(metadata.random).toEqual('NOTSORANDOM')
})
it('should modify metadata if empty', async () => {
@@ -162,11 +160,11 @@ describe('encryption', () => {
noop
)
let encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, 'none')
expect(encryption.algorithm).toEqual('none')
})
it(
@@ -180,9 +178,9 @@ describe('encryption', () => {
const handler = yield getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` })
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
assert.equal(encryption.algorithm, DEFAULT_ENCRYPTION_ALGORITHM)
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
const metadata = JSON.parse(await handler.readFile(`./metadata.json`))
assert.equal(metadata.random, 'NOTSORANDOM')
expect(metadata.random).toEqual('NOTSORANDOM')
})
)
@@ -200,9 +198,9 @@ describe('encryption', () => {
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await assert.rejects(
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd10"` }), noop)
)
).rejects.toThrowError()
})
it('sync should fail when changing algorithm', async () => {
@@ -215,8 +213,8 @@ describe('encryption', () => {
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await assert.rejects(
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }), noop)
)
).rejects.toThrowError()
})
})

View File

@@ -1,5 +1,4 @@
import { after, afterEach, before, beforeEach, describe, it } from 'test'
import { strict as assert } from 'assert'
/* eslint-env jest */
import 'dotenv/config'
import { forOwn, random } from 'lodash'
@@ -54,11 +53,11 @@ handlers.forEach(url => {
})
}
before(async () => {
beforeAll(async () => {
handler = getHandler({ url }).addPrefix(`xo-fs-tests-${Date.now()}`)
await handler.sync()
})
after(async () => {
afterAll(async () => {
await handler.forget()
handler = undefined
})
@@ -73,63 +72,67 @@ handlers.forEach(url => {
describe('#type', () => {
it('returns the type of the remote', () => {
assert.equal(typeof handler.type, 'string')
expect(typeof handler.type).toBe('string')
})
})
describe('#getInfo()', () => {
let info
before(async () => {
beforeAll(async () => {
info = await handler.getInfo()
})
it('should return an object with info', async () => {
assert.equal(typeof info, 'object')
expect(typeof info).toBe('object')
})
it('should return correct type of attribute', async () => {
if (info.size !== undefined) {
assert.equal(typeof info.size, 'number')
expect(typeof info.size).toBe('number')
}
if (info.used !== undefined) {
assert.equal(typeof info.used, 'number')
expect(typeof info.used).toBe('number')
}
})
})
describe('#getSize()', () => {
before(() => handler.outputFile('file', TEST_DATA))
beforeEach(() => handler.outputFile('file', TEST_DATA))
testWithFileDescriptor('file', 'r', async () => {
assert.equal(await handler.getSize('file'), TEST_DATA_LEN)
expect(await handler.getSize('file')).toEqual(TEST_DATA_LEN)
})
})
describe('#list()', () => {
it(`should list the content of folder`, async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.list('.'), ['file'])
await expect(await handler.list('.')).toEqual(['file'])
})
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
assert.deepEqual(await handler.list('dir', { prependDir: true }), ['/dir/file'])
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
it('can prepend the directory to entries', async () => {
await handler.outputFile('dir/file', '')
expect(await handler.list('dir', { prependDir: true })).toEqual(['/dir/file'])
})
it('throws ENOENT if no such directory', async () => {
await handler.rmtree('dir')
assert.equal((await rejectionOf(handler.list('dir'))).code, 'ENOENT')
expect((await rejectionOf(handler.list('dir'))).code).toBe('ENOENT')
})
it('can returns empty for missing directory', async () => {
assert.deepEqual(await handler.list('dir', { ignoreMissing: true }), [])
expect(await handler.list('dir', { ignoreMissing: true })).toEqual([])
})
})
describe('#mkdir()', () => {
it('creates a directory', async () => {
await handler.mkdir('dir')
assert.deepEqual(await handler.list('.'), ['dir'])
await expect(await handler.list('.')).toEqual(['dir'])
})
it('does not throw on existing directory', async () => {
@@ -140,15 +143,15 @@ handlers.forEach(url => {
it('throws ENOTDIR on existing file', async () => {
await handler.outputFile('file', '')
const error = await rejectionOf(handler.mkdir('file'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
})
describe('#mktree()', () => {
it('creates a tree of directories', async () => {
await handler.mktree('dir/dir')
assert.deepEqual(await handler.list('.'), ['dir'])
assert.deepEqual(await handler.list('dir'), ['dir'])
await expect(await handler.list('.')).toEqual(['dir'])
await expect(await handler.list('dir')).toEqual(['dir'])
})
it('does not throw on existing directory', async () => {
@@ -159,27 +162,26 @@ handlers.forEach(url => {
it('throws ENOTDIR on existing file', async () => {
await handler.outputFile('dir/file', '')
const error = await rejectionOf(handler.mktree('dir/file'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
it('throws ENOTDIR on existing file in path', async () => {
await handler.outputFile('file', '')
const error = await rejectionOf(handler.mktree('file/dir'))
assert.equal(error.code, 'ENOTDIR')
expect(error.code).toBe('ENOTDIR')
})
})
describe('#outputFile()', () => {
it('writes data to a file', async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.readFile('file'), TEST_DATA)
expect(await handler.readFile('file')).toEqual(TEST_DATA)
})
it('throws on existing files', async () => {
await handler.unlink('file')
await handler.outputFile('file', '')
const error = await rejectionOf(handler.outputFile('file', ''))
assert.equal(error.code, 'EEXIST')
expect(error.code).toBe('EEXIST')
})
it("shouldn't timeout in case of the respect of the parallel execution restriction", async () => {
@@ -190,7 +192,7 @@ handlers.forEach(url => {
})
describe('#read()', () => {
before(() => handler.outputFile('file', TEST_DATA))
beforeEach(() => handler.outputFile('file', TEST_DATA))
const start = random(TEST_DATA_LEN)
const size = random(TEST_DATA_LEN)
@@ -198,8 +200,8 @@ handlers.forEach(url => {
testWithFileDescriptor('file', 'r', async ({ file }) => {
const buffer = Buffer.alloc(size)
const result = await handler.read(file, buffer, start)
assert.deepEqual(result.buffer, buffer)
assert.deepEqual(result, {
expect(result.buffer).toBe(buffer)
expect(result).toEqual({
buffer,
bytesRead: Math.min(size, TEST_DATA_LEN - start),
})
@@ -209,13 +211,12 @@ handlers.forEach(url => {
describe('#readFile', () => {
it('returns a buffer containing the contents of the file', async () => {
await handler.outputFile('file', TEST_DATA)
assert.deepEqual(await handler.readFile('file'), TEST_DATA)
expect(await handler.readFile('file')).toEqual(TEST_DATA)
})
it('throws on missing file', async () => {
await handler.unlink('file')
const error = await rejectionOf(handler.readFile('file'))
assert.equal(error.code, 'ENOENT')
expect(error.code).toBe('ENOENT')
})
})
@@ -224,19 +225,19 @@ handlers.forEach(url => {
await handler.outputFile('file', TEST_DATA)
await handler.rename('file', `file2`)
assert.deepEqual(await handler.list('.'), ['file2'])
assert.deepEqual(await handler.readFile(`file2`), TEST_DATA)
expect(await handler.list('.')).toEqual(['file2'])
expect(await handler.readFile(`file2`)).toEqual(TEST_DATA)
})
it(`should rename the file and create dest directory`, async () => {
await handler.outputFile('file', TEST_DATA)
await handler.rename('file', `sub/file2`)
assert.deepEqual(await handler.list('sub'), ['file2'])
assert.deepEqual(await handler.readFile(`sub/file2`), TEST_DATA)
expect(await handler.list('sub')).toEqual(['file2'])
expect(await handler.readFile(`sub/file2`)).toEqual(TEST_DATA)
})
it(`should fail with enoent if source file is missing`, async () => {
const error = await rejectionOf(handler.rename('file', `sub/file2`))
assert.equal(error.code, 'ENOENT')
expect(error.code).toBe('ENOENT')
})
})
@@ -244,15 +245,14 @@ handlers.forEach(url => {
it('should remove an empty directory', async () => {
await handler.mkdir('dir')
await handler.rmdir('dir')
assert.deepEqual(await handler.list('.'), [])
expect(await handler.list('.')).toEqual([])
})
it(`should throw on non-empty directory`, async () => {
await handler.outputFile('dir/file', '')
const error = await rejectionOf(handler.rmdir('.'))
assert.equal(error.code, 'ENOTEMPTY')
await handler.unlink('dir/file')
await expect(error.code).toEqual('ENOTEMPTY')
})
it('does not throw on missing directory', async () => {
@@ -265,7 +265,7 @@ handlers.forEach(url => {
await handler.outputFile('dir/file', '')
await handler.rmtree('dir')
assert.deepEqual(await handler.list('.'), [])
expect(await handler.list('.')).toEqual([])
})
})
@@ -273,9 +273,9 @@ handlers.forEach(url => {
it('tests the remote appears to be working', async () => {
const answer = await handler.test()
assert.equal(answer.success, true)
assert.equal(typeof answer.writeRate, 'number')
assert.equal(typeof answer.readRate, 'number')
expect(answer.success).toBe(true)
expect(typeof answer.writeRate).toBe('number')
expect(typeof answer.readRate).toBe('number')
})
})
@@ -284,7 +284,7 @@ handlers.forEach(url => {
await handler.outputFile('file', TEST_DATA)
await handler.unlink('file')
assert.deepEqual(await handler.list('.'), [])
await expect(await handler.list('.')).toEqual([])
})
it('does not throw on missing file', async () => {
@@ -294,7 +294,6 @@ handlers.forEach(url => {
describe('#write()', () => {
beforeEach(() => handler.outputFile('file', TEST_DATA))
afterEach(() => handler.unlink('file'))
const PATCH_DATA_LEN = Math.ceil(TEST_DATA_LEN / 2)
const PATCH_DATA = unsecureRandomBytes(PATCH_DATA_LEN)
@@ -323,7 +322,7 @@ handlers.forEach(url => {
describe(title, () => {
testWithFileDescriptor('file', 'r+', async ({ file }) => {
await handler.write(file, PATCH_DATA, offset)
assert.deepEqual(await handler.readFile('file'), expected)
await expect(await handler.readFile('file')).toEqual(expected)
})
})
}
@@ -331,7 +330,6 @@ handlers.forEach(url => {
})
describe('#truncate()', () => {
afterEach(() => handler.unlink('file'))
forOwn(
{
'shrinks file': (() => {
@@ -350,7 +348,7 @@ handlers.forEach(url => {
it(title, async () => {
await handler.outputFile('file', TEST_DATA)
await handler.truncate('file', length)
assert.deepEqual(await handler.readFile('file'), expected)
await expect(await handler.readFile('file')).toEqual(expected)
})
}
)

View File

@@ -34,14 +34,11 @@ function dontAddSyncStackTrace(fn, ...args) {
}
export default class LocalHandler extends RemoteHandlerAbstract {
#addSyncStackTrace
#retriesOnEagain
constructor(remote, opts = {}) {
super(remote)
this.#addSyncStackTrace = opts.syncStackTraces ?? true ? addSyncStackTrace : dontAddSyncStackTrace
this.#retriesOnEagain = {
this._addSyncStackTrace = opts.syncStackTraces ?? true ? addSyncStackTrace : dontAddSyncStackTrace
this._retriesOnEagain = {
delay: 1e3,
retries: 9,
...opts.retriesOnEagain,
@@ -54,26 +51,26 @@ export default class LocalHandler extends RemoteHandlerAbstract {
return 'file'
}
getRealPath() {
_getRealPath() {
return this._remote.path
}
getFilePath(file) {
return this.getRealPath() + file
_getFilePath(file) {
return this._getRealPath() + file
}
async _closeFile(fd) {
return this.#addSyncStackTrace(fs.close, fd)
return this._addSyncStackTrace(fs.close, fd)
}
async _copy(oldPath, newPath) {
return this.#addSyncStackTrace(fs.copy, this.getFilePath(oldPath), this.getFilePath(newPath))
return this._addSyncStackTrace(fs.copy, this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _createReadStream(file, options) {
if (typeof file === 'string') {
const stream = fs.createReadStream(this.getFilePath(file), options)
await this.#addSyncStackTrace(fromEvent, stream, 'open')
const stream = fs.createReadStream(this._getFilePath(file), options)
await this._addSyncStackTrace(fromEvent, stream, 'open')
return stream
}
return fs.createReadStream('', {
@@ -85,8 +82,8 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _createWriteStream(file, options) {
if (typeof file === 'string') {
const stream = fs.createWriteStream(this.getFilePath(file), options)
await this.#addSyncStackTrace(fromEvent, stream, 'open')
const stream = fs.createWriteStream(this._getFilePath(file), options)
await this._addSyncStackTrace(fromEvent, stream, 'open')
return stream
}
return fs.createWriteStream('', {
@@ -101,7 +98,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
// filesystem, type, size, used, available, capacity and mountpoint.
// size, used, available and capacity may be `NaN` so we remove any `NaN`
// value from the object.
const info = await df.file(this.getFilePath('/'))
const info = await df.file(this._getFilePath('/'))
Object.keys(info).forEach(key => {
if (Number.isNaN(info[key])) {
delete info[key]
@@ -112,16 +109,16 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _getSize(file) {
const stats = await this.#addSyncStackTrace(fs.stat, this.getFilePath(typeof file === 'string' ? file : file.path))
const stats = await this._addSyncStackTrace(fs.stat, this._getFilePath(typeof file === 'string' ? file : file.path))
return stats.size
}
async _list(dir) {
return this.#addSyncStackTrace(fs.readdir, this.getFilePath(dir))
return this._addSyncStackTrace(fs.readdir, this._getFilePath(dir))
}
async _lock(path) {
const acquire = lockfile.lock.bind(undefined, this.getFilePath(path), {
const acquire = lockfile.lock.bind(undefined, this._getFilePath(path), {
async onCompromised(error) {
warn('lock compromised', { error })
try {
@@ -133,11 +130,11 @@ export default class LocalHandler extends RemoteHandlerAbstract {
},
})
let release = await this.#addSyncStackTrace(acquire)
let release = await this._addSyncStackTrace(acquire)
return async () => {
try {
await this.#addSyncStackTrace(release)
await this._addSyncStackTrace(release)
} catch (error) {
warn('lock could not be released', { error })
}
@@ -145,18 +142,18 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
_mkdir(dir, { mode }) {
return this.#addSyncStackTrace(fs.mkdir, this.getFilePath(dir), { mode })
return this._addSyncStackTrace(fs.mkdir, this._getFilePath(dir), { mode })
}
async _openFile(path, flags) {
return this.#addSyncStackTrace(fs.open, this.getFilePath(path), flags)
return this._addSyncStackTrace(fs.open, this._getFilePath(path), flags)
}
async _read(file, buffer, position) {
const needsClose = typeof file === 'string'
file = needsClose ? await this.#addSyncStackTrace(fs.open, this.getFilePath(file), 'r') : file.fd
file = needsClose ? await this._addSyncStackTrace(fs.open, this._getFilePath(file), 'r') : file.fd
try {
return await this.#addSyncStackTrace(
return await this._addSyncStackTrace(
fs.read,
file,
buffer,
@@ -166,44 +163,44 @@ export default class LocalHandler extends RemoteHandlerAbstract {
)
} finally {
if (needsClose) {
await this.#addSyncStackTrace(fs.close, file)
await this._addSyncStackTrace(fs.close, file)
}
}
}
async _readFile(file, options) {
const filePath = this.getFilePath(file)
return await this.#addSyncStackTrace(retry, () => fs.readFile(filePath, options), this.#retriesOnEagain)
const filePath = this._getFilePath(file)
return await this._addSyncStackTrace(retry, () => fs.readFile(filePath, options), this._retriesOnEagain)
}
async _rename(oldPath, newPath) {
return this.#addSyncStackTrace(fs.rename, this.getFilePath(oldPath), this.getFilePath(newPath))
return this._addSyncStackTrace(fs.rename, this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _rmdir(dir) {
return this.#addSyncStackTrace(fs.rmdir, this.getFilePath(dir))
return this._addSyncStackTrace(fs.rmdir, this._getFilePath(dir))
}
async _sync() {
const path = this.getRealPath('/')
await this.#addSyncStackTrace(fs.ensureDir, path)
await this.#addSyncStackTrace(fs.access, path, fs.R_OK | fs.W_OK)
const path = this._getRealPath('/')
await this._addSyncStackTrace(fs.ensureDir, path)
await this._addSyncStackTrace(fs.access, path, fs.R_OK | fs.W_OK)
}
_truncate(file, len) {
return this.#addSyncStackTrace(fs.truncate, this.getFilePath(file), len)
return this._addSyncStackTrace(fs.truncate, this._getFilePath(file), len)
}
async _unlink(file) {
const filePath = this.getFilePath(file)
return await this.#addSyncStackTrace(retry, () => fs.unlink(filePath), this.#retriesOnEagain)
const filePath = this._getFilePath(file)
return await this._addSyncStackTrace(retry, () => fs.unlink(filePath), this._retriesOnEagain)
}
_writeFd(file, buffer, position) {
return this.#addSyncStackTrace(fs.write, file.fd, buffer, 0, buffer.length, position)
return this._addSyncStackTrace(fs.write, file.fd, buffer, 0, buffer.length, position)
}
_writeFile(file, data, { flags }) {
return this.#addSyncStackTrace(fs.writeFile, this.getFilePath(file), data, { flag: flags })
return this._addSyncStackTrace(fs.writeFile, this._getFilePath(file), data, { flag: flags })
}
}

View File

@@ -34,10 +34,6 @@ const MAX_PART_SIZE = 1024 * 1024 * 1024 * 5 // 5GB
const { warn } = createLogger('xo:fs:s3')
export default class S3Handler extends RemoteHandlerAbstract {
#bucket
#dir
#s3
constructor(remote, _opts) {
super(remote)
const {
@@ -50,7 +46,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
region = guessAwsRegion(host),
} = parse(remote.url)
this.#s3 = new S3Client({
this._s3 = new S3Client({
apiVersion: '2006-03-01',
endpoint: `${protocol}://${host}`,
forcePathStyle: true,
@@ -73,27 +69,27 @@ export default class S3Handler extends RemoteHandlerAbstract {
})
// Workaround for https://github.com/aws/aws-sdk-js-v3/issues/2673
this.#s3.middlewareStack.use(getApplyMd5BodyChecksumPlugin(this.#s3.config))
this._s3.middlewareStack.use(getApplyMd5BodyChecksumPlugin(this._s3.config))
const parts = split(path)
this.#bucket = parts.shift()
this.#dir = join(...parts)
this._bucket = parts.shift()
this._dir = join(...parts)
}
get type() {
return 's3'
}
#makeCopySource(path) {
return join(this.#bucket, this.#dir, path)
_makeCopySource(path) {
return join(this._bucket, this._dir, path)
}
#makeKey(file) {
return join(this.#dir, file)
_makeKey(file) {
return join(this._dir, file)
}
#makePrefix(dir) {
const prefix = join(this.#dir, dir, '/')
_makePrefix(dir) {
const prefix = join(this._dir, dir, '/')
// no prefix for root
if (prefix !== './') {
@@ -101,20 +97,20 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
}
#createParams(file) {
return { Bucket: this.#bucket, Key: this.#makeKey(file) }
_createParams(file) {
return { Bucket: this._bucket, Key: this._makeKey(file) }
}
async #multipartCopy(oldPath, newPath) {
async _multipartCopy(oldPath, newPath) {
const size = await this._getSize(oldPath)
const CopySource = this.#makeCopySource(oldPath)
const multipartParams = await this.#s3.send(new CreateMultipartUploadCommand({ ...this.#createParams(newPath) }))
const CopySource = this._makeCopySource(oldPath)
const multipartParams = await this._s3.send(new CreateMultipartUploadCommand({ ...this._createParams(newPath) }))
try {
const parts = []
let start = 0
while (start < size) {
const partNumber = parts.length + 1
const upload = await this.#s3.send(
const upload = await this._s3.send(
new UploadPartCopyCommand({
...multipartParams,
CopySource,
@@ -125,31 +121,31 @@ export default class S3Handler extends RemoteHandlerAbstract {
parts.push({ ETag: upload.CopyPartResult.ETag, PartNumber: partNumber })
start += MAX_PART_SIZE
}
await this.#s3.send(
await this._s3.send(
new CompleteMultipartUploadCommand({
...multipartParams,
MultipartUpload: { Parts: parts },
})
)
} catch (e) {
await this.#s3.send(new AbortMultipartUploadCommand(multipartParams))
await this._s3.send(new AbortMultipartUploadCommand(multipartParams))
throw e
}
}
async _copy(oldPath, newPath) {
const CopySource = this.#makeCopySource(oldPath)
const CopySource = this._makeCopySource(oldPath)
try {
await this.#s3.send(
await this._s3.send(
new CopyObjectCommand({
...this.#createParams(newPath),
...this._createParams(newPath),
CopySource,
})
)
} catch (e) {
// object > 5GB must be copied part by part
if (e.name === 'EntityTooLarge') {
return this.#multipartCopy(oldPath, newPath)
return this._multipartCopy(oldPath, newPath)
}
// normalize this error code
if (e.name === 'NoSuchKey') {
@@ -163,20 +159,20 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
}
async #isNotEmptyDir(path) {
const result = await this.#s3.send(
async _isNotEmptyDir(path) {
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
MaxKeys: 1,
Prefix: this.#makePrefix(path),
Prefix: this._makePrefix(path),
})
)
return result.Contents?.length > 0
}
async #isFile(path) {
async _isFile(path) {
try {
await this.#s3.send(new HeadObjectCommand(this.#createParams(path)))
await this._s3.send(new HeadObjectCommand(this._createParams(path)))
return true
} catch (error) {
if (error.name === 'NotFound') {
@@ -193,9 +189,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
pipeline(input, Body, () => {})
const upload = new Upload({
client: this.#s3,
client: this._s3,
params: {
...this.#createParams(path),
...this._createParams(path),
Body,
},
})
@@ -206,7 +202,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
try {
await validator.call(this, path)
} catch (error) {
await this.__unlink(path)
await this.unlink(path)
throw error
}
}
@@ -228,9 +224,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
},
})
async _writeFile(file, data, options) {
return this.#s3.send(
return this._s3.send(
new PutObjectCommand({
...this.#createParams(file),
...this._createParams(file),
Body: data,
})
)
@@ -238,7 +234,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
async _createReadStream(path, options) {
try {
return (await this.#s3.send(new GetObjectCommand(this.#createParams(path)))).Body
return (await this._s3.send(new GetObjectCommand(this._createParams(path)))).Body
} catch (e) {
if (e.name === 'NoSuchKey') {
const error = new Error(`ENOENT: no such file '${path}'`)
@@ -251,9 +247,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _unlink(path) {
await this.#s3.send(new DeleteObjectCommand(this.#createParams(path)))
await this._s3.send(new DeleteObjectCommand(this._createParams(path)))
if (await this.#isNotEmptyDir(path)) {
if (await this._isNotEmptyDir(path)) {
const error = new Error(`EISDIR: illegal operation on a directory, unlink '${path}'`)
error.code = 'EISDIR'
error.path = path
@@ -264,12 +260,12 @@ export default class S3Handler extends RemoteHandlerAbstract {
async _list(dir) {
let NextContinuationToken
const uniq = new Set()
const Prefix = this.#makePrefix(dir)
const Prefix = this._makePrefix(dir)
do {
const result = await this.#s3.send(
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
Prefix,
Delimiter: '/',
// will only return path until delimiters
@@ -299,7 +295,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _mkdir(path) {
if (await this.#isFile(path)) {
if (await this._isFile(path)) {
const error = new Error(`ENOTDIR: file already exists, mkdir '${path}'`)
error.code = 'ENOTDIR'
error.path = path
@@ -310,15 +306,15 @@ export default class S3Handler extends RemoteHandlerAbstract {
// s3 doesn't have a rename operation, so copy + delete source
async _rename(oldPath, newPath) {
await this.__copy(oldPath, newPath)
await this.#s3.send(new DeleteObjectCommand(this.#createParams(oldPath)))
await this.copy(oldPath, newPath)
await this._s3.send(new DeleteObjectCommand(this._createParams(oldPath)))
}
async _getSize(file) {
if (typeof file !== 'string') {
file = file.fd
}
const result = await this.#s3.send(new HeadObjectCommand(this.#createParams(file)))
const result = await this._s3.send(new HeadObjectCommand(this._createParams(file)))
return +result.ContentLength
}
@@ -326,15 +322,15 @@ export default class S3Handler extends RemoteHandlerAbstract {
if (typeof file !== 'string') {
file = file.fd
}
const params = this.#createParams(file)
const params = this._createParams(file)
params.Range = `bytes=${position}-${position + buffer.length - 1}`
try {
const result = await this.#s3.send(new GetObjectCommand(params))
const result = await this._s3.send(new GetObjectCommand(params))
const bytesRead = await copyStreamToBuffer(result.Body, buffer)
return { bytesRead, buffer }
} catch (e) {
if (e.name === 'NoSuchKey') {
if (await this.#isNotEmptyDir(file)) {
if (await this._isNotEmptyDir(file)) {
const error = new Error(`${file} is a directory`)
error.code = 'EISDIR'
error.path = file
@@ -346,7 +342,7 @@ export default class S3Handler extends RemoteHandlerAbstract {
}
async _rmdir(path) {
if (await this.#isNotEmptyDir(path)) {
if (await this._isNotEmptyDir(path)) {
const error = new Error(`ENOTEMPTY: directory not empty, rmdir '${path}`)
error.code = 'ENOTEMPTY'
error.path = path
@@ -360,11 +356,11 @@ export default class S3Handler extends RemoteHandlerAbstract {
// @todo : use parallel processing for unlink
async _rmtree(path) {
let NextContinuationToken
const Prefix = this.#makePrefix(path)
const Prefix = this._makePrefix(path)
do {
const result = await this.#s3.send(
const result = await this._s3.send(
new ListObjectsV2Command({
Bucket: this.#bucket,
Bucket: this._bucket,
Prefix,
ContinuationToken: NextContinuationToken,
})
@@ -376,9 +372,9 @@ export default class S3Handler extends RemoteHandlerAbstract {
async ({ Key }) => {
// _unlink will add the prefix, but Key contains everything
// also we don't need to check if we delete a directory, since the list only return files
await this.#s3.send(
await this._s3.send(
new DeleteObjectCommand({
Bucket: this.#bucket,
Bucket: this._bucket,
Key,
})
)

View File

@@ -1,8 +1,6 @@
# ChangeLog
## **next**
## **0.1.1** (2023-07-03)
## **0.2.0**
- Invalidate sessionId token after logout (PR [#6480](https://github.com/vatesfr/xen-orchestra/pull/6480))
- Settings page (PR [#6418](https://github.com/vatesfr/xen-orchestra/pull/6418))
@@ -17,9 +15,6 @@
- Add a star icon near the pool master (PR [#6712](https://github.com/vatesfr/xen-orchestra/pull/6712))
- Display an error message if the data cannot be fetched (PR [#6525](https://github.com/vatesfr/xen-orchestra/pull/6525))
- Add "Under Construction" views (PR [#6673](https://github.com/vatesfr/xen-orchestra/pull/6673))
- Ability to change the state of selected VMs from the pool's list of VMs (PR [#6782](https://github.com/vatesfr/xen-orchestra/pull/6782))
- Ability to copy selected VMs from the pool's list of VMs (PR [#6847](https://github.com/vatesfr/xen-orchestra/pull/6847))
- Ability to delete selected VMs from the pool's list of VMs (PR [#6673](https://github.com/vatesfr/xen-orchestra/pull/6860))
## **0.1.0**

View File

@@ -157,6 +157,35 @@ export const useFoobarStore = defineStore("foobar", () => {
});
```
#### Xen Api Collection Stores
When creating a store for a Xen Api objects collection, use the `createXenApiCollectionStoreContext` helper.
```typescript
export const useConsoleStore = defineStore("console", () =>
createXenApiCollectionStoreContext("console")
);
```
##### Extending the base context
Here is how to extend the base context:
```typescript
import { computed } from "vue";
export const useFoobarStore = defineStore("foobar", () => {
const baseContext = createXenApiCollectionStoreContext("foobar");
const myCustomGetter = computed(() => baseContext.ids.reverse());
return {
...baseContext,
myCustomGetter,
};
});
```
### I18n
Internationalization of the app is done with [Vue-i18n](https://vue-i18n.intlify.dev/).

View File

@@ -1,144 +0,0 @@
# Stores for XenApiRecord collections
All collections of `XenApiRecord` are stored inside the `xapiCollectionStore`.
To retrieve a collection, invoke `useXapiCollectionStore().get(type)`.
## Accessing a collection
In order to use a collection, you'll need to subscribe to it.
```typescript
const consoleStore = useXapiCollectionStore().get("console");
const { records, getByUuid /* ... */ } = consoleStore.subscribe();
```
## Deferred subscription
If you wish to initialize the subscription on demand, you can pass `{ immediate: false }` as options to `subscribe()`.
```typescript
const consoleStore = useXapiCollectionStore().get("console");
const { records, start, isStarted /* ... */ } = consoleStore.subscribe({
immediate: false,
});
// Later, you can then use start() to initialize the subscription.
```
## Create a dedicated store for a collection
To create a dedicated store for a specific `XenApiRecord`, simply return the collection from the XAPI Collection Store:
```typescript
export const useConsoleStore = defineStore("console", () =>
useXapiCollectionStore().get("console")
);
```
## Extending the base Subscription
To extend the base Subscription, you'll need to override the `subscribe` method.
For that, you can use the `createSubscribe<XenApiRecord, Extensions>((options) => { /* ... */})` helper.
### Define the extensions
Subscription extensions are defined as `(object | [object, RequiredOptions])[]`.
When using a tuple (`[object, RequiredOptions]`), the corresponding `object` type will be added to the subscription if
the `RequiredOptions` for that tuple are present in the options passed to `subscribe`.
```typescript
// Always present extension
type DefaultExtension = {
propA: string;
propB: ComputedRef<number>;
};
// Conditional extension 1
type FirstConditionalExtension = [
{ propC: ComputedRef<string> }, // <- This signature will be added
{ optC: string } // <- if this condition is met
];
// Conditional extension 2
type SecondConditionalExtension = [
{ propD: () => void }, // <- This signature will be added
{ optD: number } // <- if this condition is met
];
// Create the extensions array
type Extensions = [
DefaultExtension,
FirstConditionalExtension,
SecondConditionalExtension
];
```
### Define the subscription
```typescript
export const useConsoleStore = defineStore("console", () => {
const consoleCollection = useXapiCollectionStore().get("console");
const subscribe = createSubscribe<XenApiConsole, Extensions>((options) => {
const originalSubscription = consoleCollection.subscribe(options);
const extendedSubscription = {
propA: "Some string",
propB: computed(() => 42),
};
const propCSubscription = options?.optC !== undefined && {
propC: computed(() => "Some other string"),
};
const propDSubscription = options?.optD !== undefined && {
propD: () => console.log("Hello"),
};
return {
...originalSubscription,
...extendedSubscription,
...propCSubscription,
...propDSubscription,
};
});
return {
...consoleCollection,
subscribe,
};
});
```
The generated `subscribe` method will then automatically have the following `options` signature:
```typescript
type Options = {
immediate?: false;
optC?: string;
optD?: number;
};
```
### Use the subscription
In each case, all the default properties (`records`, `getByUuid`, etc.) will be present.
```typescript
const store = useConsoleStore();
// No options (propA and propB will be present)
const subscription = store.subscribe();
// optC option (propA, propB and propC will be present)
const subscription = store.subscribe({ optC: "Hello" });
// optD option (propA, propB and propD will be present)
const subscription = store.subscribe({ optD: 12 });
// optC and optD options (propA, propB, propC and propD will be present)
const subscription = store.subscribe({ optC: "Hello", optD: 12 });
```

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/lite",
"version": "0.1.1",
"version": "0.1.0",
"scripts": {
"dev": "GIT_HEAD=$(git rev-parse HEAD) vite",
"build": "run-p type-check build-only",
@@ -19,8 +19,8 @@
"@types/d3-time-format": "^4.0.0",
"@types/lodash-es": "^4.17.6",
"@types/marked": "^4.0.8",
"@vueuse/core": "^10.1.2",
"@vueuse/math": "^10.1.2",
"@vueuse/core": "^9.5.0",
"@vueuse/math": "^9.5.0",
"complex-matcher": "^0.7.0",
"d3-time-format": "^4.1.0",
"decorator-synchronized": "^0.6.0",
@@ -34,19 +34,19 @@
"lodash-es": "^4.17.21",
"make-error": "^1.3.6",
"marked": "^4.2.12",
"pinia": "^2.1.2",
"pinia": "^2.0.14",
"placement.js": "^1.0.0-beta.5",
"vue": "^3.3.4",
"vue": "^3.2.37",
"vue-echarts": "^6.2.3",
"vue-i18n": "^9.2.2",
"vue-router": "^4.2.1"
"vue-i18n": "9",
"vue-router": "^4.0.16"
},
"devDependencies": {
"@intlify/unplugin-vue-i18n": "^0.10.0",
"@intlify/vite-plugin-vue-i18n": "^6.0.1",
"@limegrass/eslint-plugin-import-alias": "^1.0.5",
"@rushstack/eslint-patch": "^1.1.0",
"@types/node": "^16.11.41",
"@vitejs/plugin-vue": "^4.2.3",
"@vitejs/plugin-vue": "^3.2.0",
"@vue/eslint-config-prettier": "^7.0.0",
"@vue/eslint-config-typescript": "^11.0.0",
"@vue/tsconfig": "^0.1.3",
@@ -56,9 +56,9 @@
"postcss-custom-media": "^9.0.1",
"postcss-nested": "^6.0.0",
"typescript": "^4.9.3",
"vite": "^4.3.8",
"vite-plugin-pages": "^0.29.1",
"vue-tsc": "^1.6.5"
"vite": "^3.2.4",
"vite-plugin-pages": "^0.27.1",
"vue-tsc": "^1.0.9"
},
"private": true,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/lite",

View File

@@ -1,5 +1,25 @@
<template>
<UnreachableHostsModal />
<UiModal
v-if="isSslModalOpen"
:icon="faServer"
color="error"
@close="clearUnreachableHostsUrls"
>
<template #title>{{ $t("unreachable-hosts") }}</template>
<template #subtitle>{{ $t("following-hosts-unreachable") }}</template>
<p>{{ $t("allow-self-signed-ssl") }}</p>
<ul>
<li v-for="url in unreachableHostsUrls" :key="url.hostname">
<a :href="url.href" rel="noopener" target="_blank">{{ url.href }}</a>
</li>
</ul>
<template #buttons>
<UiButton color="success" @click="reload">
{{ $t("unreachable-hosts-reload-page") }}
</UiButton>
<UiButton @click="clearUnreachableHostsUrls">{{ $t("cancel") }}</UiButton>
</template>
</UiModal>
<div v-if="!$route.meta.hasStoryNav && !xenApiStore.isConnected">
<AppLogin />
</div>
@@ -21,15 +41,21 @@ import AppHeader from "@/components/AppHeader.vue";
import AppLogin from "@/components/AppLogin.vue";
import AppNavigation from "@/components/AppNavigation.vue";
import AppTooltips from "@/components/AppTooltips.vue";
import UnreachableHostsModal from "@/components/UnreachableHostsModal.vue";
import UiButton from "@/components/ui/UiButton.vue";
import UiModal from "@/components/ui/UiModal.vue";
import { useChartTheme } from "@/composables/chart-theme.composable";
import { useHostStore } from "@/stores/host.store";
import { usePoolStore } from "@/stores/pool.store";
import { useUiStore } from "@/stores/ui.store";
import { useXenApiStore } from "@/stores/xen-api.store";
import { faServer } from "@fortawesome/free-solid-svg-icons";
import { useActiveElement, useMagicKeys, whenever } from "@vueuse/core";
import { logicAnd } from "@vueuse/math";
import { computed } from "vue";
import { useI18n } from "vue-i18n";
import { difference } from "lodash-es";
import { computed, ref, watch } from "vue";
const unreachableHostsUrls = ref<URL[]>([]);
const clearUnreachableHostsUrls = () => (unreachableHostsUrls.value = []);
let link = document.querySelector(
"link[rel~='icon']"
@@ -44,16 +70,16 @@ link.href = favicon;
document.title = "XO Lite";
const xenApiStore = useXenApiStore();
const { records: hosts } = useHostStore().subscribe();
const { pool } = usePoolStore().subscribe();
useChartTheme();
const uiStore = useUiStore();
if (import.meta.env.DEV) {
const { locale } = useI18n();
const activeElement = useActiveElement();
const { D, L } = useMagicKeys();
const { D } = useMagicKeys();
const canToggle = computed(() => {
const canToggleDarkMode = computed(() => {
if (activeElement.value == null) {
return true;
}
@@ -62,16 +88,22 @@ if (import.meta.env.DEV) {
});
whenever(
logicAnd(D, canToggle),
logicAnd(D, canToggleDarkMode),
() => (uiStore.colorMode = uiStore.colorMode === "dark" ? "light" : "dark")
);
whenever(
logicAnd(L, canToggle),
() => (locale.value = locale.value === "en" ? "fr" : "en")
);
}
watch(hosts, (hosts, previousHosts) => {
difference(hosts, previousHosts).forEach((host) => {
const url = new URL("http://localhost");
url.protocol = window.location.protocol;
url.hostname = host.address;
fetch(url, { mode: "no-cors" }).catch(() =>
unreachableHostsUrls.value.push(url)
);
});
});
whenever(
() => pool.value?.$ref,
async (poolRef) => {
@@ -80,6 +112,9 @@ whenever(
await xenApi.startWatch();
}
);
const isSslModalOpen = computed(() => unreachableHostsUrls.value.length > 0);
const reload = () => window.location.reload();
</script>
<style lang="postcss">

View File

@@ -13,10 +13,13 @@
v-model="password"
:placeholder="$t('password')"
:readonly="isConnecting"
required
/>
</FormInputWrapper>
<UiButton type="submit" :busy="isConnecting">
<UiButton
type="submit"
:busy="isConnecting"
:disabled="password.trim().length < 1"
>
{{ $t("login") }}
</UiButton>
</form>

View File

@@ -1,15 +1,15 @@
<template>
<div v-if="!isDisabled" ref="tooltipElement" class="app-tooltip">
<span class="triangle" />
<span class="label">{{ options.content }}</span>
<span class="label">{{ content }}</span>
</div>
</template>
<script lang="ts" setup>
import type { TooltipOptions } from "@/stores/tooltip.store";
import { isString } from "lodash-es";
import { isEmpty, isFunction, isString } from "lodash-es";
import place from "placement.js";
import { computed, ref, watchEffect } from "vue";
import type { TooltipOptions } from "@/stores/tooltip.store";
const props = defineProps<{
target: HTMLElement;
@@ -18,13 +18,29 @@ const props = defineProps<{
const tooltipElement = ref<HTMLElement>();
const isDisabled = computed(() =>
isString(props.options.content)
? props.options.content.trim() === ""
: props.options.content === false
const content = computed(() =>
isString(props.options) ? props.options : props.options.content
);
const placement = computed(() => props.options.placement ?? "top");
const isDisabled = computed(() => {
if (isEmpty(content.value)) {
return true;
}
if (isString(props.options)) {
return false;
}
if (isFunction(props.options.disabled)) {
return props.options.disabled(props.target);
}
return props.options.disabled ?? false;
});
const placement = computed(() =>
isString(props.options) ? "top" : props.options.placement ?? "top"
);
watchEffect(() => {
if (tooltipElement.value) {

View File

@@ -6,26 +6,23 @@
<slot v-else />
</template>
<script
generic="T extends XenApiRecord<string>, I extends T['uuid']"
lang="ts"
setup
>
<script lang="ts" setup>
import UiSpinner from "@/components/ui/UiSpinner.vue";
import type { XenApiRecord } from "@/libs/xen-api";
import ObjectNotFoundView from "@/views/ObjectNotFoundView.vue";
import { computed } from "vue";
import { useRouter } from "vue-router";
const props = defineProps<{
isReady: boolean;
uuidChecker: (uuid: I) => boolean;
id?: I;
uuidChecker: (uuid: string) => boolean;
id?: string;
}>();
const { currentRoute } = useRouter();
const id = computed(() => props.id ?? (currentRoute.value.params.uuid as I));
const id = computed(
() => props.id ?? (currentRoute.value.params.uuid as string)
);
const isRecordNotFound = computed(
() => props.isReady && !props.uuidChecker(id.value)

View File

@@ -4,7 +4,7 @@
<script lang="ts" setup>
import UiIcon from "@/components/ui/icon/UiIcon.vue";
import { POWER_STATE } from "@/libs/xen-api";
import type { PowerState } from "@/libs/xen-api";
import {
faMoon,
faPause,
@@ -15,14 +15,14 @@ import {
import { computed } from "vue";
const props = defineProps<{
state: POWER_STATE;
state: PowerState;
}>();
const icons = {
[POWER_STATE.RUNNING]: faPlay,
[POWER_STATE.PAUSED]: faPause,
[POWER_STATE.SUSPENDED]: faMoon,
[POWER_STATE.HALTED]: faStop,
Running: faPlay,
Paused: faPause,
Suspended: faMoon,
Halted: faStop,
};
const icon = computed(() => icons[props.state] ?? faQuestion);

View File

@@ -4,7 +4,7 @@
<script lang="ts" setup>
import { fibonacci } from "iterable-backoff";
import { computed, onBeforeUnmount, ref, watchEffect } from "vue";
import { computed, onBeforeUnmount, ref, watch, watchEffect } from "vue";
import VncClient from "@novnc/novnc/core/rfb";
import { useXenApiStore } from "@/stores/xen-api.store";
import { promiseTimeout } from "@vueuse/shared";
@@ -87,6 +87,7 @@ const createVncConnection = async () => {
vncClient.addEventListener("connect", handleConnectionEvent);
};
watch(url, clearVncClient);
watchEffect(() => {
if (
url.value === undefined ||
@@ -97,8 +98,6 @@ watchEffect(() => {
}
nConnectionAttempts = 0;
clearVncClient();
createVncConnection();
});

View File

@@ -1,59 +0,0 @@
<template>
<UiModal
v-if="isSslModalOpen"
:icon="faServer"
color="error"
@close="clearUnreachableHostsUrls"
>
<template #title>{{ $t("unreachable-hosts") }}</template>
<div class="description">
<p>{{ $t("following-hosts-unreachable") }}</p>
<p>{{ $t("allow-self-signed-ssl") }}</p>
<ul>
<li v-for="url in unreachableHostsUrls" :key="url">
<a :href="url" class="link" rel="noopener" target="_blank">{{
url
}}</a>
</li>
</ul>
</div>
<template #buttons>
<UiButton color="success" @click="reload">
{{ $t("unreachable-hosts-reload-page") }}
</UiButton>
<UiButton @click="clearUnreachableHostsUrls">{{ $t("cancel") }}</UiButton>
</template>
</UiModal>
</template>
<script lang="ts" setup>
import { faServer } from "@fortawesome/free-solid-svg-icons";
import UiModal from "@/components/ui/UiModal.vue";
import UiButton from "@/components/ui/UiButton.vue";
import { computed, ref, watch } from "vue";
import { difference } from "lodash";
import { useHostStore } from "@/stores/host.store";
const { records: hosts } = useHostStore().subscribe();
const unreachableHostsUrls = ref<Set<string>>(new Set());
const clearUnreachableHostsUrls = () => unreachableHostsUrls.value.clear();
const isSslModalOpen = computed(() => unreachableHostsUrls.value.size > 0);
const reload = () => window.location.reload();
watch(hosts, (nextHosts, previousHosts) => {
difference(nextHosts, previousHosts).forEach((host) => {
const url = new URL("http://localhost");
url.protocol = window.location.protocol;
url.hostname = host.address;
fetch(url, { mode: "no-cors" }).catch(() =>
unreachableHostsUrls.value.add(url.toString())
);
});
});
</script>
<style lang="postcss" scoped>
.description p {
margin: 1rem 0;
}
</style>

View File

@@ -4,11 +4,11 @@
<div
v-for="item in computedData.sortedArray"
:key="item.id"
class="progress-item"
:class="{
warning: item.value > MIN_WARNING_VALUE,
error: item.value > MIN_DANGEROUS_VALUE,
}"
class="progress-item"
>
<UiProgressBar :value="item.value" color="custom" />
<UiProgressLegend
@@ -18,15 +18,15 @@
</div>
<slot :total-percent="computedData.totalPercentUsage" name="footer" />
</template>
<UiCardSpinner v-else />
<UiSpinner v-else class="spinner" />
</div>
</template>
<script lang="ts" setup>
import { computed } from "vue";
import UiProgressBar from "@/components/ui/progress/UiProgressBar.vue";
import UiProgressLegend from "@/components/ui/progress/UiProgressLegend.vue";
import UiCardSpinner from "@/components/ui/UiCardSpinner.vue";
import { computed } from "vue";
import UiSpinner from "@/components/ui/UiSpinner.vue";
interface Data {
id: string;
@@ -67,6 +67,14 @@ const computedData = computed(() => {
</script>
<style lang="postcss" scoped>
.spinner {
color: var(--color-extra-blue-base);
display: flex;
margin: auto;
width: 40px;
height: 40px;
}
.progress-item:nth-child(1) {
--progress-bar-color: var(--color-extra-blue-d60);
}
@@ -83,11 +91,9 @@ const computedData = computed(() => {
--progress-bar-height: 1.2rem;
--progress-bar-color: var(--color-extra-blue-l20);
--progress-bar-background-color: var(--color-blue-scale-400);
&.warning {
--progress-bar-color: var(--color-orange-world-base);
}
&.error {
--progress-bar-color: var(--color-red-vates-base);
}

View File

@@ -18,19 +18,33 @@
</component>
</template>
<script lang="ts">
export default {
name: "FormCheckbox",
inheritAttrs: false,
};
</script>
<script lang="ts" setup>
import { type HTMLAttributes, computed, inject, ref } from "vue";
import {
type HTMLAttributes,
type InputHTMLAttributes,
computed,
inject,
ref,
} from "vue";
import { faCheck, faCircle, faMinus } from "@fortawesome/free-solid-svg-icons";
import { useVModel } from "@vueuse/core";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
defineOptions({ inheritAttrs: false });
const props = defineProps<{
// Temporary workaround for https://github.com/vuejs/core/issues/4294
interface Props extends Omit<InputHTMLAttributes, ""> {
modelValue?: unknown;
disabled?: boolean;
wrapperAttrs?: HTMLAttributes;
}>();
}
const props = defineProps<Props>();
const emit = defineEmits<{
(event: "update:modelValue", value: boolean): void;

View File

@@ -5,7 +5,6 @@
v-model="value"
:class="inputClass"
:disabled="disabled || isLabelDisabled"
:required="required"
class="select"
ref="inputElement"
v-bind="$attrs"
@@ -22,7 +21,6 @@
v-model="value"
:class="inputClass"
:disabled="disabled || isLabelDisabled"
:required="required"
class="textarea"
v-bind="$attrs"
/>
@@ -31,7 +29,6 @@
v-model="value"
:class="inputClass"
:disabled="disabled || isLabelDisabled"
:required="required"
class="input"
ref="inputElement"
v-bind="$attrs"
@@ -47,9 +44,17 @@
</span>
</template>
<script lang="ts">
export default {
name: "FormInput",
inheritAttrs: false,
};
</script>
<script lang="ts" setup>
import {
type HTMLAttributes,
type InputHTMLAttributes,
computed,
inject,
nextTick,
@@ -62,23 +67,20 @@ import { faAngleDown } from "@fortawesome/free-solid-svg-icons";
import { useTextareaAutosize, useVModel } from "@vueuse/core";
import UiIcon from "@/components/ui/icon/UiIcon.vue";
defineOptions({ inheritAttrs: false });
// Temporary workaround for https://github.com/vuejs/core/issues/4294
interface Props extends Omit<InputHTMLAttributes, ""> {
modelValue?: unknown;
color?: Color;
before?: Omit<IconDefinition, ""> | string;
after?: Omit<IconDefinition, ""> | string;
beforeWidth?: string;
afterWidth?: string;
disabled?: boolean;
right?: boolean;
wrapperAttrs?: HTMLAttributes;
}
const props = withDefaults(
defineProps<{
modelValue?: any;
color?: Color;
before?: IconDefinition | string;
after?: IconDefinition | string;
beforeWidth?: string;
afterWidth?: string;
disabled?: boolean;
required?: boolean;
right?: boolean;
wrapperAttrs?: HTMLAttributes;
}>(),
{ color: "info" }
);
const props = withDefaults(defineProps<Props>(), { color: "info" });
const inputElement = ref();
@@ -92,7 +94,7 @@ const isEmpty = computed(
);
const inputType = inject("inputType", "input");
const isLabelDisabled = inject("isLabelDisabled", ref(false));
const parentColor = inject(
const color = inject(
"color",
computed(() => undefined)
);
@@ -106,7 +108,7 @@ const wrapperClass = computed(() => [
]);
const inputClass = computed(() => [
parentColor.value ?? props.color,
color.value ?? props.color,
{
right: props.right,
"has-before": props.before !== undefined,

View File

@@ -1,41 +0,0 @@
<template>
<div class="form-input-group">
<slot />
</div>
</template>
<style lang="postcss" scoped>
.form-input-group {
display: inline-flex;
align-items: center;
:slotted(.form-input),
:slotted(.form-select) {
&:hover {
z-index: 1;
}
&:focus-within {
z-index: 2;
}
&:not(:first-child) {
margin-left: -1px;
.input,
.select {
border-top-left-radius: 0;
border-bottom-left-radius: 0;
}
}
&:not(:last-child) {
.input,
.select {
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
}
}
}
</style>

View File

@@ -1,5 +1,12 @@
<template>
<li v-if="host !== undefined" class="infra-host-item">
<li
v-if="host !== undefined"
v-tooltip="{
content: host.name_label,
disabled: isTooltipDisabled,
}"
class="infra-host-item"
>
<InfraItemLabel
:active="isCurrentHost"
:icon="faServer"
@@ -29,7 +36,7 @@ import InfraAction from "@/components/infra/InfraAction.vue";
import InfraItemLabel from "@/components/infra/InfraItemLabel.vue";
import InfraVmList from "@/components/infra/InfraVmList.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import type { XenApiHost } from "@/libs/xen-api";
import { hasEllipsis } from "@/libs/utils";
import { useHostStore } from "@/stores/host.store";
import { usePoolStore } from "@/stores/pool.store";
import { useUiStore } from "@/stores/ui.store";
@@ -43,7 +50,7 @@ import { useToggle } from "@vueuse/core";
import { computed } from "vue";
const props = defineProps<{
hostOpaqueRef: XenApiHost["$ref"];
hostOpaqueRef: string;
}>();
const { getByOpaqueRef } = useHostStore().subscribe();
@@ -59,6 +66,9 @@ const isCurrentHost = computed(
() => props.hostOpaqueRef === uiStore.currentHostOpaqueRef
);
const [isExpanded, toggle] = useToggle(true);
const isTooltipDisabled = (target: HTMLElement) =>
!hasEllipsis(target.querySelector(".text"));
</script>
<style lang="postcss" scoped>

View File

@@ -7,9 +7,9 @@
class="infra-item-label"
v-bind="$attrs"
>
<a :href="href" class="link" @click="navigate" v-tooltip="hasTooltip">
<a :href="href" class="link" @click="navigate">
<UiIcon :icon="icon" class="icon" />
<div ref="textElement" class="text">
<div class="text">
<slot />
</div>
</a>
@@ -22,10 +22,7 @@
<script lang="ts" setup>
import UiIcon from "@/components/ui/icon/UiIcon.vue";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import type { IconDefinition } from "@fortawesome/fontawesome-common-types";
import { computed, ref } from "vue";
import type { RouteLocationRaw } from "vue-router";
defineProps<{
@@ -33,9 +30,6 @@ defineProps<{
route: RouteLocationRaw;
active?: boolean;
}>();
const textElement = ref<HTMLElement>();
const hasTooltip = computed(() => hasEllipsis(textElement.value));
</script>
<style lang="postcss" scoped>

View File

@@ -1,5 +1,13 @@
<template>
<li v-if="vm !== undefined" ref="rootElement" class="infra-vm-item">
<li
v-if="vm !== undefined"
ref="rootElement"
v-tooltip="{
content: vm.name_label,
disabled: isTooltipDisabled,
}"
class="infra-vm-item"
>
<InfraItemLabel
v-if="isVisible"
:icon="faDisplay"
@@ -19,14 +27,15 @@
import InfraAction from "@/components/infra/InfraAction.vue";
import InfraItemLabel from "@/components/infra/InfraItemLabel.vue";
import PowerStateIcon from "@/components/PowerStateIcon.vue";
import type { XenApiVm } from "@/libs/xen-api";
import { vTooltip } from "@/directives/tooltip.directive";
import { hasEllipsis } from "@/libs/utils";
import { useVmStore } from "@/stores/vm.store";
import { faDisplay } from "@fortawesome/free-solid-svg-icons";
import { useIntersectionObserver } from "@vueuse/core";
import { computed, ref } from "vue";
const props = defineProps<{
vmOpaqueRef: XenApiVm["$ref"];
vmOpaqueRef: string;
}>();
const { getByOpaqueRef } = useVmStore().subscribe();
@@ -40,6 +49,9 @@ const { stop } = useIntersectionObserver(rootElement, ([entry]) => {
stop();
}
});
const isTooltipDisabled = (target: HTMLElement) =>
!hasEllipsis(target.querySelector(".text"));
</script>
<style lang="postcss" scoped>

View File

@@ -11,21 +11,18 @@
<script lang="ts" setup>
import InfraLoadingItem from "@/components/infra/InfraLoadingItem.vue";
import InfraVmItem from "@/components/infra/InfraVmItem.vue";
import type { XenApiHost } from "@/libs/xen-api";
import { useVmStore } from "@/stores/vm.store";
import { faDisplay } from "@fortawesome/free-solid-svg-icons";
import { computed } from "vue";
const props = defineProps<{
hostOpaqueRef?: XenApiHost["$ref"];
hostOpaqueRef?: string;
}>();
const { isReady, recordsByHostRef, hasError } = useVmStore().subscribe();
const vms = computed(() =>
recordsByHostRef.value.get(
props.hostOpaqueRef ?? ("OpaqueRef:NULL" as XenApiHost["$ref"])
)
recordsByHostRef.value.get(props.hostOpaqueRef ?? "OpaqueRef:NULL")
);
</script>

Some files were not shown because too many files have changed in this diff Show More