Compare commits
81 Commits
xen-api-v0
...
smart-sele
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
048877d653 | ||
|
|
0938804947 | ||
|
|
851bcf9816 | ||
|
|
9f6fc785bc | ||
|
|
56636bf5d4 | ||
|
|
3899a65167 | ||
|
|
628e53c1c3 | ||
|
|
9fa424dd8d | ||
|
|
3e6f2eecfa | ||
|
|
cc655c8ba8 | ||
|
|
78aa0474ee | ||
|
|
9caefa2f49 | ||
|
|
478726fa3b | ||
|
|
f64917ec52 | ||
|
|
2bc25f91c4 | ||
|
|
623d7ffe2f | ||
|
|
07510b5099 | ||
|
|
9f21f9a7bc | ||
|
|
93da70709e | ||
|
|
00436e744a | ||
|
|
1e642fc512 | ||
|
|
6baef2450c | ||
|
|
600f34f85a | ||
|
|
6c0c6bc5c4 | ||
|
|
fcd62ed3cd | ||
|
|
785f2e3a6d | ||
|
|
c2925f7c1e | ||
|
|
60814d8b58 | ||
|
|
2dec448f2c | ||
|
|
b71f4f6800 | ||
|
|
558083a916 | ||
|
|
d507ed9dff | ||
|
|
7ed0242662 | ||
|
|
d7b3d989d7 | ||
|
|
707b2f77f0 | ||
|
|
5ddbb76979 | ||
|
|
97b0fe62d4 | ||
|
|
8ac9b2cdc7 | ||
|
|
bc4c1a13e6 | ||
|
|
d3ec303ade | ||
|
|
6cfc2a1ba6 | ||
|
|
e15cadc863 | ||
|
|
2f9284c263 | ||
|
|
2465852fd6 | ||
|
|
a9f48a0d50 | ||
|
|
4ed0035c67 | ||
|
|
b66f2dfb80 | ||
|
|
3cb155b129 | ||
|
|
df7efc04e2 | ||
|
|
a21a8457a4 | ||
|
|
020955f535 | ||
|
|
51f23a5f03 | ||
|
|
d024319441 | ||
|
|
f8f35938c0 | ||
|
|
2573ace368 | ||
|
|
6bf7269814 | ||
|
|
6695c7bf5e | ||
|
|
44a83fd817 | ||
|
|
08ddfe0649 | ||
|
|
5ba170bf1f | ||
|
|
8150d3110c | ||
|
|
312b33ae85 | ||
|
|
008eb995ed | ||
|
|
6d8848043c | ||
|
|
cf572c0cc5 | ||
|
|
18cfa7dd29 | ||
|
|
72cac2bbd6 | ||
|
|
48ffa28e0b | ||
|
|
2e6baeb95a | ||
|
|
3b5650dc1e | ||
|
|
3279728e4b | ||
|
|
fe0dcbacc5 | ||
|
|
7c5d90fe40 | ||
|
|
944dad6e36 | ||
|
|
6713d3ec66 | ||
|
|
6adadb2359 | ||
|
|
b01096876c | ||
|
|
60243d8517 | ||
|
|
94d0809380 | ||
|
|
e935dd9bad | ||
|
|
30aa2b83d0 |
11
.eslintrc.js
11
.eslintrc.js
@@ -16,6 +16,16 @@ module.exports = {
|
||||
$PropertyType: true,
|
||||
$Shape: true,
|
||||
},
|
||||
|
||||
overrides: [
|
||||
{
|
||||
files: ['packages/*cli*/**/*.js', '*-cli.js'],
|
||||
rules: {
|
||||
'no-console': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
parser: 'babel-eslint',
|
||||
parserOptions: {
|
||||
ecmaFeatures: {
|
||||
@@ -23,6 +33,7 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }],
|
||||
'no-var': 'error',
|
||||
'node/no-extraneous-import': 'error',
|
||||
'node/no-extraneous-require': 'error',
|
||||
|
||||
@@ -16,6 +16,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.24.3"
|
||||
"xen-api": "^0.24.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.7.0",
|
||||
"version": "0.7.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
@@ -24,12 +24,12 @@
|
||||
"@marsaud/smb2": "^0.13.0",
|
||||
"@sindresorhus/df": "^2.1.0",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"decorator-synchronized": "^0.3.0",
|
||||
"decorator-synchronized": "^0.5.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"get-stream": "^4.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
@@ -45,7 +45,7 @@
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"dotenv": "^6.1.0",
|
||||
"dotenv": "^7.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
|
||||
@@ -89,7 +89,7 @@ export default class MountHandler extends LocalHandler {
|
||||
try {
|
||||
// the failure may mean it's already mounted, use `findmnt` to check
|
||||
// that's the case
|
||||
await this._execa('findmnt', ['--target', realPath], {
|
||||
await this._execa('findmnt', [realPath], {
|
||||
stdio: 'ignore',
|
||||
})
|
||||
} catch (_) {
|
||||
|
||||
@@ -25,6 +25,10 @@ type RemoteInfo = { used?: number, size?: number }
|
||||
type File = FileDescriptor | string
|
||||
|
||||
const checksumFile = file => file + '.checksum'
|
||||
const computeRate = (hrtime: number[], size: number) => {
|
||||
const seconds = hrtime[0] + hrtime[1] / 1e9
|
||||
return size / seconds
|
||||
}
|
||||
|
||||
const DEFAULT_TIMEOUT = 6e5 // 10 min
|
||||
|
||||
@@ -362,18 +366,27 @@ export default class RemoteHandlerAbstract {
|
||||
}
|
||||
|
||||
async test(): Promise<Object> {
|
||||
const SIZE = 1024 * 1024 * 10
|
||||
const testFileName = normalizePath(`${Date.now()}.test`)
|
||||
const data = await fromCallback(cb => randomBytes(1024 * 1024, cb))
|
||||
const data = await fromCallback(cb => randomBytes(SIZE, cb))
|
||||
let step = 'write'
|
||||
try {
|
||||
const writeStart = process.hrtime()
|
||||
await this._outputFile(testFileName, data, { flags: 'wx' })
|
||||
const writeDuration = process.hrtime(writeStart)
|
||||
|
||||
step = 'read'
|
||||
const readStart = process.hrtime()
|
||||
const read = await this._readFile(testFileName, { flags: 'r' })
|
||||
const readDuration = process.hrtime(readStart)
|
||||
|
||||
if (!data.equals(read)) {
|
||||
throw new Error('output and input did not match')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
writeRate: computeRate(writeDuration, SIZE),
|
||||
readRate: computeRate(readDuration, SIZE),
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
|
||||
@@ -290,9 +290,11 @@ handlers.forEach(url => {
|
||||
|
||||
describe('#test()', () => {
|
||||
it('tests the remote appears to be working', async () => {
|
||||
expect(await handler.test()).toEqual({
|
||||
success: true,
|
||||
})
|
||||
const answer = await handler.test()
|
||||
|
||||
expect(answer.success).toBe(true)
|
||||
expect(typeof answer.writeRate).toBe('number')
|
||||
expect(typeof answer.readRate).toBe('number')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.11.0"
|
||||
"promise-toolbox": "^0.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
64
CHANGELOG.md
64
CHANGELOG.md
@@ -1,5 +1,69 @@
|
||||
# ChangeLog
|
||||
|
||||
## Next (2019-03-19)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [SR/Disk] Disable actions on unmanaged VDIs [#3988](https://github.com/vatesfr/xen-orchestra/issues/3988) (PR [#4000](https://github.com/vatesfr/xen-orchestra/pull/4000))
|
||||
- [Pool] Specify automatic networks on a Pool [#3916](https://github.com/vatesfr/xen-orchestra/issues/3916) (PR [#3958](https://github.com/vatesfr/xen-orchestra/pull/3958))
|
||||
- [VM/advanced] Manage start delay for VM [#3909](https://github.com/vatesfr/xen-orchestra/issues/3909) (PR [#4002](https://github.com/vatesfr/xen-orchestra/pull/4002))
|
||||
- [New/Vm] SR section: Display warning message when the selected SRs aren't in the same host [#3911](https://github.com/vatesfr/xen-orchestra/issues/3911) (PR [#3967](https://github.com/vatesfr/xen-orchestra/pull/3967))
|
||||
- Enable compression for HTTP requests (and initial objects fetch)
|
||||
- [VDI migration] Display same-pool SRs first in the selector [#3945](https://github.com/vatesfr/xen-orchestra/issues/3945) (PR [#3996](https://github.com/vatesfr/xen-orchestra/pull/3996))
|
||||
- [Home] Save the current page in url [#3993](https://github.com/vatesfr/xen-orchestra/issues/3993) (PR [#3999](https://github.com/vatesfr/xen-orchestra/pull/3999))
|
||||
- [VDI] Ensure suspend VDI is destroyed when destroying a VM [#4027](https://github.com/vatesfr/xen-orchestra/issues/4027) (PR [#4038](https://github.com/vatesfr/xen-orchestra/pull/4038))
|
||||
- [VM/disk]: Warning when 2 VDIs are on 2 different hosts' local SRs [#3911](https://github.com/vatesfr/xen-orchestra/issues/3911) (PR [#3969](https://github.com/vatesfr/xen-orchestra/pull/3969))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [New network] PIF was wrongly required which prevented from creating a private network (PR [#4010](https://github.com/vatesfr/xen-orchestra/pull/4010))
|
||||
- [Google authentication] Migrate to new endpoint
|
||||
- [Backup NG] Better handling of huge logs [#4025](https://github.com/vatesfr/xen-orchestra/issues/4025) (PR [#4026](https://github.com/vatesfr/xen-orchestra/pull/4026))
|
||||
- [Home/VM] Bulk migration: fixed VM VDIs not migrated to the selected SR [#3986](https://github.com/vatesfr/xen-orchestra/issues/3986) (PR [#3987](https://github.com/vatesfr/xen-orchestra/pull/3987))
|
||||
- [Stats] Fix cache usage with simultaneous requests [#4017](https://github.com/vatesfr/xen-orchestra/issues/4017) (PR [#4028](https://github.com/vatesfr/xen-orchestra/pull/4028))
|
||||
- [Backup NG] Fix compression displayed for the wrong backup mode (PR [#4021](https://github.com/vatesfr/xen-orchestra/pull/4021))
|
||||
|
||||
## **5.32.2** (2019-02-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix XAPI events monitoring on old version (XenServer 7.2)
|
||||
|
||||
## **5.32.1** (2019-02-28)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix a very short timeout in the monitoring of XAPI events which may lead to unresponsive XenServer hosts
|
||||
|
||||
## **5.32.0** (2019-02-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM migration] Display same-pool hosts first in the selector [#3262](https://github.com/vatesfr/xen-orchestra/issues/3262) (PR [#3890](https://github.com/vatesfr/xen-orchestra/pull/3890))
|
||||
- [Home/VM] Sort VM by start time [#3955](https://github.com/vatesfr/xen-orchestra/issues/3955) (PR [#3970](https://github.com/vatesfr/xen-orchestra/pull/3970))
|
||||
- [Editable fields] Unfocusing (clicking outside) submits the change instead of canceling (PR [#3980](https://github.com/vatesfr/xen-orchestra/pull/3980))
|
||||
- [Network] Dedicated page for network creation [#3895](https://github.com/vatesfr/xen-orchestra/issues/3895) (PR [#3906](https://github.com/vatesfr/xen-orchestra/pull/3906))
|
||||
- [Logs] Add button to download the log [#3957](https://github.com/vatesfr/xen-orchestra/issues/3957) (PR [#3985](https://github.com/vatesfr/xen-orchestra/pull/3985))
|
||||
- [Continuous Replication] Share full copy between schedules [#3973](https://github.com/vatesfr/xen-orchestra/issues/3973) (PR [#3995](https://github.com/vatesfr/xen-orchestra/pull/3995))
|
||||
- [Backup] Ability to backup XO configuration and pool metadata [#808](https://github.com/vatesfr/xen-orchestra/issues/808) [#3501](https://github.com/vatesfr/xen-orchestra/issues/3501) (PR [#3912](https://github.com/vatesfr/xen-orchestra/pull/3912))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Host] Fix multipathing status for XenServer < 7.5 [#3956](https://github.com/vatesfr/xen-orchestra/issues/3956) (PR [#3961](https://github.com/vatesfr/xen-orchestra/pull/3961))
|
||||
- [Home/VM] Show creation date of the VM on if it available [#3953](https://github.com/vatesfr/xen-orchestra/issues/3953) (PR [#3959](https://github.com/vatesfr/xen-orchestra/pull/3959))
|
||||
- [Notifications] Fix invalid notifications when not registered (PR [#3966](https://github.com/vatesfr/xen-orchestra/pull/3966))
|
||||
- [Import] Fix import of some OVA files [#3962](https://github.com/vatesfr/xen-orchestra/issues/3962) (PR [#3974](https://github.com/vatesfr/xen-orchestra/pull/3974))
|
||||
- [Servers] Fix *already connected error* after a server has been removed during connection [#3976](https://github.com/vatesfr/xen-orchestra/issues/3976) (PR [#3977](https://github.com/vatesfr/xen-orchestra/pull/3977))
|
||||
- [Backup] Fix random _mount_ issues with NFS/SMB remotes [#3973](https://github.com/vatesfr/xen-orchestra/issues/3973) (PR [#4003](https://github.com/vatesfr/xen-orchestra/pull/4003))
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.7.0
|
||||
- xen-api v0.24.3
|
||||
- xoa-updater v0.15.2
|
||||
- xo-server v5.36.0
|
||||
- xo-web v5.36.0
|
||||
|
||||
## **5.31.2** (2019-02-08)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -2,27 +2,19 @@
|
||||
|
||||
### Enhancements
|
||||
|
||||
- [VM migration] Display same-pool hosts first in the selector [#3262](https://github.com/vatesfr/xen-orchestra/issues/3262) (PR [#3890](https://github.com/vatesfr/xen-orchestra/pull/3890))
|
||||
- [Home/VM] Sort VM by start time [#3955](https://github.com/vatesfr/xen-orchestra/issues/3955) (PR [#3970](https://github.com/vatesfr/xen-orchestra/pull/3970))
|
||||
- [Editable fields] Unfocusing (clicking outside) submits the change instead of canceling (PR [#3980](https://github.com/vatesfr/xen-orchestra/pull/3980))
|
||||
- [Network] Dedicated page for network creation [#3895](https://github.com/vatesfr/xen-orchestra/issues/3895) (PR [#3906](https://github.com/vatesfr/xen-orchestra/pull/3906))
|
||||
- [Logs] Add button to download the log [#3957](https://github.com/vatesfr/xen-orchestra/issues/3957) (PR [#3985](https://github.com/vatesfr/xen-orchestra/pull/3985))
|
||||
- [Continuous Replication] Share full copy between schedules [#3973](https://github.com/vatesfr/xen-orchestra/issues/3973) (PR [#3995](https://github.com/vatesfr/xen-orchestra/pull/3995))
|
||||
- [Backup] Ability to backup XO configuration and pool metadata [#808](https://github.com/vatesfr/xen-orchestra/issues/808) [#3501](https://github.com/vatesfr/xen-orchestra/issues/3501) (PR [#3912](https://github.com/vatesfr/xen-orchestra/pull/3912))
|
||||
- [Remotes] Benchmarks (read and write rate speed) added when remote is tested [#3991](https://github.com/vatesfr/xen-orchestra/issues/3991) (PR [#4015](https://github.com/vatesfr/xen-orchestra/pull/4015))
|
||||
- [Cloud Config] Support both NoCloud and Config Drive 2 datasources for maximum compatibility (PR [#4053](https://github.com/vatesfr/xen-orchestra/pull/4053))
|
||||
- [Advanced] Configurable cookie validity (PR [#4059](https://github.com/vatesfr/xen-orchestra/pull/4059))
|
||||
- [Plugins] Display number of installed plugins [#4008](https://github.com/vatesfr/xen-orchestra/issues/4008) (PR [#4050](https://github.com/vatesfr/xen-orchestra/pull/4050))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- [Host] Fix multipathing status for XenServer < 7.5 [#3956](https://github.com/vatesfr/xen-orchestra/issues/3956) (PR [#3961](https://github.com/vatesfr/xen-orchestra/pull/3961))
|
||||
- [Home/VM] Show creation date of the VM on if it available [#3953](https://github.com/vatesfr/xen-orchestra/issues/3953) (PR [#3959](https://github.com/vatesfr/xen-orchestra/pull/3959))
|
||||
- [Notifications] Fix invalid notifications when not registered (PR [#3966](https://github.com/vatesfr/xen-orchestra/pull/3966))
|
||||
- [Import] Fix import of some OVA files [#3962](https://github.com/vatesfr/xen-orchestra/issues/3962) (PR [#3974](https://github.com/vatesfr/xen-orchestra/pull/3974))
|
||||
- [Servers] Fix *already connected error* after a server has been removed during connection [#3976](https://github.com/vatesfr/xen-orchestra/issues/3976) (PR [#3977](https://github.com/vatesfr/xen-orchestra/pull/3977))
|
||||
- [Backup] Fix random _mount_ issues with NFS/SMB remotes [#3973](https://github.com/vatesfr/xen-orchestra/issues/3973) (PR [#4003](https://github.com/vatesfr/xen-orchestra/pull/4003))
|
||||
- [Home] Always sort the items by their names as a secondary sort criteria [#3983](https://github.com/vatesfr/xen-orchestra/issues/3983) (PR [#4047](https://github.com/vatesfr/xen-orchestra/pull/4047))
|
||||
- [Remotes] Fixes `spawn mount EMFILE` error during backup
|
||||
- Properly redirect to sign in page instead of being stuck in a refresh loop
|
||||
|
||||
### Released packages
|
||||
|
||||
- @xen-orchestra/fs v0.7.0
|
||||
- xen-api v0.24.3
|
||||
- xoa-updater v0.15.2
|
||||
- xo-server v5.36.0
|
||||
- xo-web v5.36.0
|
||||
- @xen-orchestra/fs v0.8.0
|
||||
- xo-server v5.38.0
|
||||
- xo-web v5.38.0
|
||||
|
||||
@@ -33,6 +33,7 @@
|
||||
* [Disaster recovery](disaster_recovery.md)
|
||||
* [Smart Backup](smart_backup.md)
|
||||
* [File level Restore](file_level_restore.md)
|
||||
* [Metadata Backup](metadata_backup.md)
|
||||
* [Backup Concurrency](concurrency.md)
|
||||
* [Configure backup reports](backup_reports.md)
|
||||
* [Backup troubleshooting](backup_troubleshooting.md)
|
||||
|
||||
BIN
docs/assets/cr-seed-1.png
Normal file
BIN
docs/assets/cr-seed-1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 12 KiB |
BIN
docs/assets/cr-seed-2.png
Normal file
BIN
docs/assets/cr-seed-2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 KiB |
BIN
docs/assets/cr-seed-3.png
Normal file
BIN
docs/assets/cr-seed-3.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/assets/cr-seed-4.png
Normal file
BIN
docs/assets/cr-seed-4.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
@@ -12,7 +12,9 @@ Another good way to check if there is activity is the XOA VM stats view (on the
|
||||
|
||||
### VDI chain protection
|
||||
|
||||
This means your previous VM disks and snapshots should be "merged" (*coalesced* in the XenServer world) before we can take a new snapshot. This mechanism is handled by XenServer itself, not Xen Orchestra. But we can check your existing VDI chain and avoid creating more snapshots than your storage can merge. Otherwise, this will lead to catastrophic consequences. Xen Orchestra is the **only** XenServer/XCP backup product dealing with this.
|
||||
Backup jobs regularly delete snapshots. When a snapshot is deleted, either manually or via a backup job, it triggers the need for Xenserver to coalesce the VDI chain - to merge the remaining VDIs and base copies in the chain. This means generally we cannot take too many new snapshots on said VM until Xenserver has finished running a coalesce job on the VDI chain.
|
||||
|
||||
This mechanism and scheduling is handled by XenServer itself, not Xen Orchestra. But we can check your existing VDI chain and avoid creating more snapshots than your storage can merge. If we don't, this will lead to catastrophic consequences. Xen Orchestra is the **only** XenServer/XCP backup product that takes this into account and offers protection.
|
||||
|
||||
Without this detection, you could have 2 potential issues:
|
||||
|
||||
@@ -21,9 +23,9 @@ Without this detection, you could have 2 potential issues:
|
||||
|
||||
The first issue is a chain that contains more than 30 elements (fixed XenServer limit), and the other one means it's full because the "coalesce" process couldn't keep up the pace and the storage filled up.
|
||||
|
||||
In the end, this message is a **protection mechanism against damaging your SR**. The backup job will fail, but XenServer itself should eventually automatically coalesce the snapshot chain, and the the next time the backup job should complete.
|
||||
In the end, this message is a **protection mechanism preventing damage to your SR**. The backup job will fail, but XenServer itself should eventually automatically coalesce the snapshot chain, and the the next time the backup job should complete.
|
||||
|
||||
Just remember this: **coalesce will happen every time a snapshot is removed**.
|
||||
Just remember this: **a coalesce should happen every time a snapshot is removed**.
|
||||
|
||||
> You can read more on this on our dedicated blog post regarding [XenServer coalesce detection](https://xen-orchestra.com/blog/xenserver-coalesce-detection-in-xen-orchestra/).
|
||||
|
||||
@@ -37,7 +39,9 @@ Coalesce jobs can also fail to run if the SR does not have enough free space. Ch
|
||||
|
||||
You can check if a coalesce job is currently active by running `ps axf | grep vhd` on the XenServer host and looking for a VHD process in the results (one of the resulting processes will be the grep command you just ran, ignore that one).
|
||||
|
||||
If you don't see any running coalesce jobs, and can't find any other reason that XenServer has not started one, you can attempt to make it start a coalesce job by rescanning the SR. This is harmless to try, but will not always result in a coalesce. Visit the problematic SR in the XOA UI, then click the "Rescan All Disks" button towards the top right: it looks like a refresh circle icon. This should begin the coalesce process - if you click the Advanced tab in the SR view, the "disks needing to be coalesced" list should become smaller and smaller.
|
||||
If you don't see any running coalesce jobs, and can't find any other reason that XenServer has not started one, you can attempt to make it start a coalesce job by rescanning the SR. This is harmless to try, but will not always result in a coalesce. Visit the problematic SR in the XOA UI, then click the "Rescan All Disks" button towards the top right: it looks like a refresh circle icon. This should begin the coalesce process - if you click the Advanced tab in the SR view, the "disks needing to be coalesced" list should become smaller and smaller.
|
||||
|
||||
As a last resort, migrating the VM (more specifically, its disks) to a new storage repository will also force a coalesce and solve this issue. That means migrating a VM to another host (with its own storage) and back will force the VDI chain for that VM to be coalesced, and get rid of the `VDI Chain Protection` message.
|
||||
|
||||
### Parse Error
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ There are several ways to protect your VMs:
|
||||
* [Rolling Snapshots](rolling_snapshots.md) [*Starter Edition*]
|
||||
* [Delta Backups](delta_backups.md) (best of both previous ones) [*Enterprise Edition*]
|
||||
* [Disaster Recovery](disaster_recovery.md) [*Enterprise Edition*]
|
||||
* [Metadata Backups](metadata_backup.md) [*Enterprise Edition*]
|
||||
* [Continuous Replication](continuous_replication.md) [*Premium Edition*]
|
||||
* [File Level Restore](file_level_restore.md) [*Premium Edition*]
|
||||
|
||||
|
||||
@@ -43,11 +43,19 @@ To protect the replication, we removed the possibility to boot your copied VM di
|
||||
|
||||
### Job creation
|
||||
|
||||
Create the Continuous Replication backup job, and leave it disabled for now. On the main Backup-NG page, note its identifiers, the main `backupJobId` and the ID of one on the schedules for the job, `backupScheduleId`.
|
||||
Create the Continuous Replication backup job, and leave it disabled for now. On the main Backup-NG page, copy the job's `backupJobId` by hovering to the left of the shortened ID and clicking the copy to clipboard button:
|
||||
|
||||

|
||||
|
||||
Copy it somewhere temporarily. Now we need to also copy the ID of the job schedule, `backupScheduleId`. Do this by hovering over the schedule name in the same panel as before, and clicking the copy to clipboard button. Keep it with the `backupJobId` you copied previously as we will need them all later:
|
||||
|
||||

|
||||
|
||||
### Seed creation
|
||||
|
||||
Manually create a snapshot on the VM to backup, and note its UUID as `snapshotUuid` from the snapshot panel for the VM.
|
||||
Manually create a snapshot on the VM being backed up, then copy this snapshot UUID, `snapshotUuid` from the snapshot panel of the VM:
|
||||
|
||||

|
||||
|
||||
> DO NOT ever delete or alter this snapshot, feel free to rename it to make that clear.
|
||||
|
||||
@@ -55,7 +63,9 @@ Manually create a snapshot on the VM to backup, and note its UUID as `snapshotUu
|
||||
|
||||
Export this snapshot to a file, then import it on the target SR.
|
||||
|
||||
Note the UUID of this newly created VM as `targetVmUuid`.
|
||||
We need to copy the UUID of this newly created VM as well, `targetVmUuid`:
|
||||
|
||||

|
||||
|
||||
> DO not start this VM or it will break the Continuous Replication job! You can rename this VM to more easily remember this.
|
||||
|
||||
@@ -66,7 +76,7 @@ The XOA backup system requires metadata to correctly associate the source snapsh
|
||||
First install the tool (all the following is done from the XOA VM CLI):
|
||||
|
||||
```
|
||||
npm i -g xo-cr-seed
|
||||
sudo npm i -g --unsafe-perm @xen-orchestra/cr-seed-cli
|
||||
```
|
||||
|
||||
Here is an example of how the utility expects the UUIDs and info passed to it:
|
||||
|
||||
31
docs/metadata_backup.md
Normal file
31
docs/metadata_backup.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Metadata backup
|
||||
|
||||
> WARNING: Metadata backup is an experimental feature. Restore is not yet available and some unexpected issues may occur.
|
||||
|
||||
## Introduction
|
||||
|
||||
XCP-ng and Citrix Hypervisor (Xenserver) hosts use a database to store metadata about VMs and their associated resources such as storage and networking. Metadata forms this complete view of all VMs available on your pool. Backing up the metadata of your pool allows you to recover from a physical hardware failure scenario in which you lose your hosts without losing your storage (SAN, NAS...).
|
||||
|
||||
In Xen Orchestra, Metadata backup is divided into two different options:
|
||||
|
||||
* Pool metadata backup
|
||||
* XO configuration backup
|
||||
|
||||
### How to use metadata backup
|
||||
|
||||
In the backup job section, when creating a new backup job, you will now have a choice between backing up VMs and backing up Metadata.
|
||||

|
||||
|
||||
When you select Metadata backup, you will have a new backup job screen, letting you choose between a pool metadata backup and an XO configuration backup (or both at the same time):
|
||||
|
||||

|
||||
|
||||
Define the name and retention for the job.
|
||||
|
||||

|
||||
|
||||
Once created, the job is displayed with the other classic jobs.
|
||||
|
||||

|
||||
|
||||
> Restore for metadata backup jobs should be available in XO 5.33
|
||||
10
package.json
10
package.json
@@ -4,10 +4,10 @@
|
||||
"@babel/register": "^7.0.0",
|
||||
"babel-core": "^7.0.0-0",
|
||||
"babel-eslint": "^10.0.1",
|
||||
"babel-jest": "^23.0.1",
|
||||
"babel-jest": "^24.1.0",
|
||||
"benchmark": "^2.1.4",
|
||||
"eslint": "^5.1.0",
|
||||
"eslint-config-prettier": "^3.3.0",
|
||||
"eslint-config-prettier": "^4.1.0",
|
||||
"eslint-config-standard": "12.0.0",
|
||||
"eslint-config-standard-jsx": "^6.0.2",
|
||||
"eslint-plugin-import": "^2.8.0",
|
||||
@@ -16,13 +16,13 @@
|
||||
"eslint-plugin-react": "^7.6.1",
|
||||
"eslint-plugin-standard": "^4.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"flow-bin": "^0.90.0",
|
||||
"flow-bin": "^0.95.1",
|
||||
"globby": "^9.0.0",
|
||||
"husky": "^1.2.1",
|
||||
"jest": "^23.0.1",
|
||||
"jest": "^24.1.0",
|
||||
"lodash": "^4.17.4",
|
||||
"prettier": "^1.10.2",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"sorted-object": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
3
packages/smart-selector/.babelrc.js
Normal file
3
packages/smart-selector/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
packages/smart-selector/.npmignore
Normal file
24
packages/smart-selector/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
76
packages/smart-selector/README.md
Normal file
76
packages/smart-selector/README.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# ${pkg.name} [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
|
||||
> ${pkg.description}
|
||||
|
||||
Differences with [reselect](https://github.com/reactjs/reselect):
|
||||
|
||||
- simpler: no custom memoization
|
||||
- inputs (and their selectors): are stored in objects, not arrays
|
||||
- lazy:
|
||||
- inputs are not computed before accessed
|
||||
- unused inputs do not trigger a call to the transform function
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/${pkg.name}):
|
||||
|
||||
```
|
||||
> npm install --save ${pkg.name}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import createSelector from 'smart-selector'
|
||||
|
||||
const getVisibleTodos = createSelector(
|
||||
{
|
||||
filter: state => state.filter,
|
||||
todos: state => state.todos,
|
||||
},
|
||||
inputs => {
|
||||
switch (inputs.filter) {
|
||||
case 'ALL':
|
||||
return inputs.todos
|
||||
case 'COMPLETED':
|
||||
return inputs.todos.filter(todo => todo.completed)
|
||||
case 'ACTIVE':
|
||||
return inputs.todos.filter(todo => !todo.completed)
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are *very* welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xo-web/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
43
packages/smart-selector/package.json
Normal file
43
packages/smart-selector/package.json
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "smart-selector",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/smart-selector",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
"bin": {},
|
||||
"files": [
|
||||
"dist/"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "7.1.5",
|
||||
"@babel/core": "7.1.5",
|
||||
"@babel/preset-env": "7.1.5",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.1",
|
||||
"rimraf": "^2.6.2"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"clean": "rimraf dist/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run clean",
|
||||
"prepublishOnly": "yarn run build"
|
||||
}
|
||||
}
|
||||
82
packages/smart-selector/src/index.js
Normal file
82
packages/smart-selector/src/index.js
Normal file
@@ -0,0 +1,82 @@
|
||||
const { create, keys } = Object
|
||||
|
||||
const createSelector = (inputSelectors, transform) => {
|
||||
const previousArgs = [{}] // initialize with non-repeatable args
|
||||
let cache, previousResult, previousThisArg
|
||||
let previousInputs = {}
|
||||
|
||||
const spyDescriptors = {}
|
||||
const inputs = keys(inputSelectors)
|
||||
for (let i = 0, n = inputs.length; i < n; ++i) {
|
||||
const input = inputs[i]
|
||||
spyDescriptors[input] = {
|
||||
enumerable: true,
|
||||
get: () =>
|
||||
input in previousInputs
|
||||
? previousInputs[input]
|
||||
: (previousInputs[input] =
|
||||
input in cache
|
||||
? cache[input]
|
||||
: inputSelectors[input].apply(previousThisArg, previousArgs)),
|
||||
}
|
||||
}
|
||||
const spy = create(null, spyDescriptors)
|
||||
|
||||
function selector () {
|
||||
// handle arguments
|
||||
{
|
||||
const { length } = arguments
|
||||
let i = 0
|
||||
if (this === previousThisArg && length === previousArgs.length) {
|
||||
while (i < length && arguments[i] === previousArgs[i]) {
|
||||
++i
|
||||
}
|
||||
if (i === length) {
|
||||
return previousResult
|
||||
}
|
||||
} else {
|
||||
previousArgs.length = length
|
||||
previousThisArg = this
|
||||
}
|
||||
while (i < length) {
|
||||
previousArgs[i] = arguments[i]
|
||||
++i
|
||||
}
|
||||
}
|
||||
|
||||
// handle inputs
|
||||
cache = previousInputs
|
||||
previousInputs = {}
|
||||
{
|
||||
const inputs = keys(cache)
|
||||
const { length } = inputs
|
||||
if (length !== 0) {
|
||||
let i = 0
|
||||
while (true) {
|
||||
if (i === length) {
|
||||
// inputs are unchanged
|
||||
return previousResult
|
||||
}
|
||||
|
||||
const input = inputs[i++]
|
||||
const value = inputSelectors[input].apply(this, arguments)
|
||||
if (value !== cache[input]) {
|
||||
// update the value
|
||||
cache[input] = value
|
||||
|
||||
// remove non-computed values
|
||||
while (i < length) {
|
||||
delete cache[inputs[i++]]
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return (previousResult = transform(spy))
|
||||
}
|
||||
return selector
|
||||
}
|
||||
export { createSelector as default }
|
||||
99
packages/smart-selector/src/index.spec.js
Normal file
99
packages/smart-selector/src/index.spec.js
Normal file
@@ -0,0 +1,99 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import createSelector from './'
|
||||
|
||||
const noop = () => {}
|
||||
|
||||
describe('createSelector', () => {
|
||||
it('calls input selectors with this and arguments', () => {
|
||||
const thisArg = {}
|
||||
const args = ['arg1', 'arg2']
|
||||
const foo = jest.fn()
|
||||
|
||||
createSelector({ foo }, ({ foo }) => {}).apply(thisArg, args)
|
||||
|
||||
expect(foo.mock.instances).toEqual([thisArg])
|
||||
expect(foo.mock.calls).toEqual([args])
|
||||
})
|
||||
|
||||
it('calls input selectors only when accessed', () => {
|
||||
const foo = jest.fn()
|
||||
createSelector({ foo }, inputs => {
|
||||
expect(foo.mock.calls.length).toBe(0)
|
||||
noop(inputs.foo)
|
||||
expect(foo.mock.calls.length).toBe(1)
|
||||
})()
|
||||
})
|
||||
|
||||
it('does not call the input selectors if this arguments did not change', () => {
|
||||
const foo = jest.fn()
|
||||
const selector = createSelector({ foo }, ({ foo }) => {})
|
||||
|
||||
selector('arg1')
|
||||
expect(foo.mock.calls.length).toBe(1)
|
||||
|
||||
selector('arg1')
|
||||
expect(foo.mock.calls.length).toBe(1)
|
||||
|
||||
selector('arg1', 'arg2')
|
||||
expect(foo.mock.calls.length).toBe(2)
|
||||
|
||||
selector.call({}, 'arg1', 'arg2')
|
||||
expect(foo.mock.calls.length).toBe(3)
|
||||
})
|
||||
|
||||
it('does not call the transform if inputs did not change', () => {
|
||||
const transform = jest.fn(({ foo }) => {})
|
||||
const selector = createSelector(
|
||||
{
|
||||
foo: () => 'foo',
|
||||
},
|
||||
transform
|
||||
)
|
||||
|
||||
selector({})
|
||||
expect(transform.mock.calls.length).toBe(1)
|
||||
|
||||
selector({})
|
||||
expect(transform.mock.calls.length).toBe(1)
|
||||
})
|
||||
|
||||
it('computes only the necessary inputs to determine if transform should be called', () => {
|
||||
let foo = 'foo 1'
|
||||
const bar = 'bar 1'
|
||||
const inputs = {
|
||||
foo: jest.fn(() => foo),
|
||||
bar: jest.fn(() => bar),
|
||||
}
|
||||
const transform = jest.fn(inputs => {
|
||||
if (inputs.foo !== 'foo 1') {
|
||||
return inputs.bar
|
||||
}
|
||||
})
|
||||
const selector = createSelector(inputs, transform)
|
||||
|
||||
selector({})
|
||||
expect(inputs.foo.mock.calls.length).toBe(1)
|
||||
expect(inputs.bar.mock.calls.length).toBe(0)
|
||||
|
||||
selector({})
|
||||
expect(inputs.foo.mock.calls.length).toBe(2)
|
||||
expect(inputs.bar.mock.calls.length).toBe(0)
|
||||
|
||||
foo = 'foo 2'
|
||||
|
||||
selector({})
|
||||
expect(inputs.foo.mock.calls.length).toBe(3)
|
||||
expect(inputs.bar.mock.calls.length).toBe(1)
|
||||
|
||||
foo = 'foo 1'
|
||||
|
||||
selector({})
|
||||
expect(inputs.foo.mock.calls.length).toBe(4)
|
||||
expect(inputs.bar.mock.calls.length).toBe(1)
|
||||
|
||||
selector({})
|
||||
expect(inputs.foo.mock.calls.length).toBe(5)
|
||||
expect(inputs.bar.mock.calls.length).toBe(1)
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vhd-cli",
|
||||
"version": "0.2.0",
|
||||
"version": "0.3.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"keywords": [],
|
||||
@@ -27,7 +27,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/fs": "^0.7.0",
|
||||
"@xen-orchestra/fs": "^0.7.1",
|
||||
"cli-progress": "^2.0.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"getopts": "^2.2.3",
|
||||
@@ -42,7 +42,7 @@
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"rimraf": "^2.6.1",
|
||||
"tmp": "^0.0.33"
|
||||
},
|
||||
|
||||
33
packages/vhd-cli/src/commands/repl.js
Normal file
33
packages/vhd-cli/src/commands/repl.js
Normal file
@@ -0,0 +1,33 @@
|
||||
import { asCallback, fromCallback, fromEvent } from 'promise-toolbox'
|
||||
import { getHandler } from '@xen-orchestra/fs'
|
||||
import { relative } from 'path'
|
||||
import { start as createRepl } from 'repl'
|
||||
import Vhd, * as vhdLib from 'vhd-lib'
|
||||
|
||||
export default async args => {
|
||||
const cwd = process.cwd()
|
||||
const handler = getHandler({ url: 'file://' + cwd })
|
||||
await handler.sync()
|
||||
try {
|
||||
const repl = createRepl({
|
||||
prompt: 'vhd> ',
|
||||
})
|
||||
Object.assign(repl.context, vhdLib)
|
||||
repl.context.handler = handler
|
||||
repl.context.open = path => new Vhd(handler, relative(cwd, path))
|
||||
|
||||
// Make the REPL waits for promise completion.
|
||||
repl.eval = (evaluate => (cmd, context, filename, cb) => {
|
||||
asCallback.call(
|
||||
fromCallback(cb => {
|
||||
evaluate.call(repl, cmd, context, filename, cb)
|
||||
}).then(value => (Array.isArray(value) ? Promise.all(value) : value)),
|
||||
cb
|
||||
)
|
||||
})(repl.eval)
|
||||
|
||||
await fromEvent(repl, 'exit')
|
||||
} finally {
|
||||
await handler.forget()
|
||||
}
|
||||
}
|
||||
@@ -22,11 +22,11 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"core-js": "3.0.0-beta.3",
|
||||
"core-js": "3.0.0",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"struct-fu": "^1.2.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
@@ -35,7 +35,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"@xen-orchestra/fs": "^0.7.0",
|
||||
"@xen-orchestra/fs": "^0.7.1",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"pw": "^0.0.4",
|
||||
"xen-api": "^0.24.3"
|
||||
"xen-api": "^0.24.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.1.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xen-api",
|
||||
"version": "0.24.3",
|
||||
"version": "0.24.5",
|
||||
"license": "ISC",
|
||||
"description": "Connector to the Xen API",
|
||||
"keywords": [
|
||||
@@ -37,16 +37,16 @@
|
||||
"debug": "^4.0.1",
|
||||
"event-to-promise": "^0.8.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"http-request-plus": "^0.7.2",
|
||||
"iterable-backoff": "^0.0.0",
|
||||
"jest-diff": "^23.5.0",
|
||||
"http-request-plus": "^0.8.0",
|
||||
"iterable-backoff": "^0.1.0",
|
||||
"jest-diff": "^24.0.0",
|
||||
"json-rpc-protocol": "^0.13.1",
|
||||
"kindof": "^2.0.0",
|
||||
"lodash": "^4.17.4",
|
||||
"make-error": "^1.3.0",
|
||||
"minimist": "^1.2.0",
|
||||
"ms": "^2.1.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"pw": "0.0.4",
|
||||
"xmlrpc": "^1.3.2",
|
||||
"xo-collection": "^0.4.1"
|
||||
|
||||
@@ -37,7 +37,7 @@ const debug = createDebug('xen-api')
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// in seconds
|
||||
// in seconds!
|
||||
const EVENT_TIMEOUT = 60
|
||||
|
||||
// http://www.gnu.org/software/libc/manual/html_node/Error-Codes.html
|
||||
@@ -956,7 +956,11 @@ export class Xapi extends EventEmitter {
|
||||
async _watchEventsWrapper() {
|
||||
if (!this._watching) {
|
||||
this._watching = true
|
||||
await ignoreErrors.call(this._watchEvents())
|
||||
try {
|
||||
await this._watchEvents()
|
||||
} catch (error) {
|
||||
console.error('_watchEventsWrapper', error)
|
||||
}
|
||||
this._watching = false
|
||||
}
|
||||
}
|
||||
@@ -997,9 +1001,12 @@ export class Xapi extends EventEmitter {
|
||||
this._addObject(type, ref, record)
|
||||
}
|
||||
)
|
||||
} catch (_) {
|
||||
} catch (error) {
|
||||
// there is nothing ideal to do here, do not interrupt event
|
||||
// handling
|
||||
if (error != null && error.code !== 'MESSAGE_REMOVED') {
|
||||
console.warn('_watchEvents', 'initial fetch', type, error)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
@@ -1019,8 +1026,12 @@ export class Xapi extends EventEmitter {
|
||||
try {
|
||||
result = await this._sessionCall(
|
||||
'event.from',
|
||||
[types, fromToken, EVENT_TIMEOUT],
|
||||
EVENT_TIMEOUT * 1.1
|
||||
[
|
||||
types,
|
||||
fromToken,
|
||||
EVENT_TIMEOUT + 0.1, // must be float for XML-RPC transport
|
||||
],
|
||||
EVENT_TIMEOUT * 1e3 * 1.1
|
||||
)
|
||||
} catch (error) {
|
||||
if (error instanceof TimeoutError) {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { format, parse } from 'json-rpc-protocol'
|
||||
|
||||
import { UnsupportedTransport } from './_utils'
|
||||
|
||||
// https://github.com/xenserver/xenadmin/blob/0df39a9d83cd82713f32d24704852a0fd57b8a64/XenModel/XenAPI/Session.cs#L403-L433
|
||||
export default ({ allowUnauthorized, url }) => {
|
||||
return (method, args) =>
|
||||
httpRequestPlus
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"chalk": "^2.2.0",
|
||||
"exec-promise": "^0.7.0",
|
||||
"fs-promise": "^2.0.3",
|
||||
"http-request-plus": "^0.7.2",
|
||||
"http-request-plus": "^0.8.0",
|
||||
"human-format": "^0.10.0",
|
||||
"l33teral": "^3.0.3",
|
||||
"lodash": "^4.17.4",
|
||||
@@ -43,7 +43,7 @@
|
||||
"nice-pipe": "0.0.0",
|
||||
"pretty-ms": "^4.0.0",
|
||||
"progress-stream": "^2.0.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"pump": "^3.0.0",
|
||||
"pw": "^0.0.4",
|
||||
"strip-indent": "^2.0.0",
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
"xo-lib": "^0.9.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^10.12.2",
|
||||
"@types/node": "^11.11.4",
|
||||
"@types/through2": "^2.0.31",
|
||||
"tslint": "^5.9.1",
|
||||
"tslint-config-standard": "^8.0.1",
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
declare module 'csv-parser' {
|
||||
function csvParser(opts?: Object): any
|
||||
export = csvParser
|
||||
}
|
||||
|
||||
declare module 'exec-promise' {
|
||||
function execPromise(cb: (args: string[]) => any): void
|
||||
export = execPromise
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server-auth-google",
|
||||
"version": "0.2.0",
|
||||
"version": "0.2.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Google authentication plugin for XO-Server",
|
||||
"keywords": [
|
||||
@@ -33,7 +33,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"passport-google-oauth20": "^1.0.0"
|
||||
"passport-google-oauth20": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
"inquirer": "^6.0.0",
|
||||
"ldapjs": "^1.0.1",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.11.0"
|
||||
"promise-toolbox": "^0.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"http-request-plus": "^0.7.2",
|
||||
"http-request-plus": "^0.8.0",
|
||||
"jsonrpc-websocket-client": "^0.4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
"dependencies": {
|
||||
"nodemailer": "^5.0.0",
|
||||
"nodemailer-markdown": "^1.0.1",
|
||||
"promise-toolbox": "^0.11.0"
|
||||
"promise-toolbox": "^0.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"node": ">=6"
|
||||
},
|
||||
"dependencies": {
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"slack-node": "^0.1.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
"html-minifier": "^3.5.8",
|
||||
"human-format": "^0.10.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.11.0"
|
||||
"promise-toolbox": "^0.12.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
|
||||
@@ -4,6 +4,11 @@
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// https://expressjs.com/en/advanced/best-practice-performance.html#set-node_env-to-production
|
||||
if (process.env.NODE_ENV === undefined) {
|
||||
process.env.NODE_ENV = 'production'
|
||||
}
|
||||
|
||||
// Better stack traces if possible.
|
||||
require('../better-stacks')
|
||||
|
||||
|
||||
@@ -21,6 +21,18 @@ verboseApiLogsOnErrors = false
|
||||
[apiWebSocketOptions]
|
||||
perMessageDeflate = { threshold = 524288 } # 512kiB
|
||||
|
||||
[authentication]
|
||||
defaultTokenValidity = '30 days'
|
||||
maxTokenValidity = '0.5 year'
|
||||
|
||||
# Default to `maxTokenValidity`
|
||||
#permanentCookieValidity = '30 days'
|
||||
|
||||
# Default to `undefined`, ie as long as the browser is not restarted
|
||||
#
|
||||
# https://developer.mozilla.org/fr/docs/Web/HTTP/Headers/Set-Cookie#Session_cookie
|
||||
#sessionCookieValidity = '10 hours'
|
||||
|
||||
[[http.listen]]
|
||||
port = 80
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "5.35.0",
|
||||
"version": "5.37.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -35,8 +35,9 @@
|
||||
"@iarna/toml": "^2.2.1",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"@xen-orchestra/cron": "^1.0.3",
|
||||
"@xen-orchestra/defined": "^0.0.0",
|
||||
"@xen-orchestra/emit-async": "^0.0.0",
|
||||
"@xen-orchestra/fs": "^0.7.0",
|
||||
"@xen-orchestra/fs": "^0.7.1",
|
||||
"@xen-orchestra/log": "^0.1.4",
|
||||
"@xen-orchestra/mixin": "^0.0.0",
|
||||
"ajv": "^6.1.1",
|
||||
@@ -47,12 +48,13 @@
|
||||
"blocked": "^1.2.1",
|
||||
"bluebird": "^3.5.1",
|
||||
"body-parser": "^1.18.2",
|
||||
"compression": "^1.7.3",
|
||||
"connect-flash": "^0.1.1",
|
||||
"cookie": "^0.3.1",
|
||||
"cookie-parser": "^1.4.3",
|
||||
"d3-time-format": "^2.1.1",
|
||||
"debug": "^4.0.1",
|
||||
"decorator-synchronized": "^0.3.0",
|
||||
"decorator-synchronized": "^0.5.0",
|
||||
"deptree": "^1.0.0",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"event-to-promise": "^0.8.0",
|
||||
@@ -69,11 +71,11 @@
|
||||
"helmet": "^3.9.0",
|
||||
"highland": "^2.11.1",
|
||||
"http-proxy": "^1.16.2",
|
||||
"http-request-plus": "^0.7.2",
|
||||
"http-request-plus": "^0.8.0",
|
||||
"http-server-plus": "^0.10.0",
|
||||
"human-format": "^0.10.0",
|
||||
"is-redirect": "^1.0.0",
|
||||
"iterable-backoff": "^0.0.0",
|
||||
"iterable-backoff": "^0.1.0",
|
||||
"jest-worker": "^24.0.0",
|
||||
"js-yaml": "^3.10.0",
|
||||
"json-rpc-peer": "^0.15.3",
|
||||
@@ -93,17 +95,18 @@
|
||||
"ms": "^2.1.1",
|
||||
"multikey-hash": "^1.0.4",
|
||||
"ndjson": "^1.5.0",
|
||||
"otplib": "^10.0.1",
|
||||
"otplib": "^11.0.0",
|
||||
"parse-pairs": "^0.2.2",
|
||||
"partial-stream": "0.0.0",
|
||||
"passport": "^0.4.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"pretty-format": "^23.0.0",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"pretty-format": "^24.0.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"proxy-agent": "^3.0.0",
|
||||
"pug": "^2.0.0-rc.4",
|
||||
"pump": "^3.0.0",
|
||||
"pw": "^0.0.4",
|
||||
"readable-stream": "^3.2.0",
|
||||
"redis": "^2.8.0",
|
||||
"schema-inspector": "^1.6.8",
|
||||
"semver": "^5.4.1",
|
||||
@@ -112,14 +115,14 @@
|
||||
"stack-chain": "^2.0.0",
|
||||
"stoppable": "^1.0.5",
|
||||
"struct-fu": "^1.2.0",
|
||||
"tar-stream": "^1.5.5",
|
||||
"tar-stream": "^2.0.1",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.0.33",
|
||||
"uuid": "^3.0.1",
|
||||
"value-matcher": "^0.2.0",
|
||||
"vhd-lib": "^0.5.1",
|
||||
"ws": "^6.0.0",
|
||||
"xen-api": "^0.24.3",
|
||||
"xen-api": "^0.24.5",
|
||||
"xml2js": "^0.4.19",
|
||||
"xo-acl-resolver": "^0.4.1",
|
||||
"xo-collection": "^0.4.1",
|
||||
|
||||
@@ -44,6 +44,14 @@
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Directory containing the database of XO.
|
||||
# Currently used for logs.
|
||||
#
|
||||
# Default: '/var/lib/xo-server/data'
|
||||
#datadir = '/var/lib/xo-server/data'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Configuration of the embedded HTTP server.
|
||||
[http]
|
||||
# If set to true, all HTTP traffic will be redirected to the first HTTPs
|
||||
@@ -136,12 +144,6 @@ port = 80
|
||||
# del = '3dda29ad-3015-44f9-b13b-fa570de92489'
|
||||
# srem = '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
|
||||
|
||||
# Directory containing the database of XO.
|
||||
# Currently used for logs.
|
||||
#
|
||||
# Default: '/var/lib/xo-server/data'
|
||||
#datadir = '/var/lib/xo-server/data'
|
||||
|
||||
#=====================================================================
|
||||
|
||||
# Configuration for remotes
|
||||
|
||||
87
packages/xo-server/src/_MultiKeyMap.js
Normal file
87
packages/xo-server/src/_MultiKeyMap.js
Normal file
@@ -0,0 +1,87 @@
|
||||
class Node {
|
||||
constructor(value) {
|
||||
this.children = new Map()
|
||||
this.value = value
|
||||
}
|
||||
}
|
||||
|
||||
function del(node, i, keys) {
|
||||
if (i === keys.length) {
|
||||
if (node instanceof Node) {
|
||||
node.value = undefined
|
||||
return node
|
||||
}
|
||||
return
|
||||
}
|
||||
if (!(node instanceof Node)) {
|
||||
return node
|
||||
}
|
||||
const key = keys[i]
|
||||
const { children } = node
|
||||
const child = children.get(key)
|
||||
if (child === undefined) {
|
||||
return node
|
||||
}
|
||||
const newChild = del(child, i + 1, keys)
|
||||
if (newChild === undefined) {
|
||||
if (children.size === 1) {
|
||||
return node.value
|
||||
}
|
||||
children.delete(key)
|
||||
} else if (newChild !== child) {
|
||||
children.set(key, newChild)
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
function get(node, i, keys) {
|
||||
return i === keys.length
|
||||
? node instanceof Node
|
||||
? node.value
|
||||
: node
|
||||
: node instanceof Node
|
||||
? get(node.children.get(keys[i]), i + 1, keys)
|
||||
: undefined
|
||||
}
|
||||
|
||||
function set(node, i, keys, value) {
|
||||
if (i === keys.length) {
|
||||
if (node instanceof Node) {
|
||||
node.value = value
|
||||
return node
|
||||
}
|
||||
return value
|
||||
}
|
||||
const key = keys[i]
|
||||
if (!(node instanceof Node)) {
|
||||
node = new Node(node)
|
||||
node.children.set(key, set(undefined, i + 1, keys, value))
|
||||
} else {
|
||||
const { children } = node
|
||||
const child = children.get(key)
|
||||
const newChild = set(child, i + 1, keys, value)
|
||||
if (newChild !== child) {
|
||||
children.set(key, newChild)
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
export default class MultiKeyMap {
|
||||
constructor() {
|
||||
// each node is either a value or a Node if it contains children
|
||||
this._root = undefined
|
||||
}
|
||||
|
||||
delete(keys) {
|
||||
this._root = del(this._root, 0, keys)
|
||||
}
|
||||
|
||||
get(keys) {
|
||||
return get(this._root, 0, keys)
|
||||
}
|
||||
|
||||
set(keys, value) {
|
||||
this._root = set(this._root, 0, keys, value)
|
||||
}
|
||||
}
|
||||
22
packages/xo-server/src/_createNdJsonStream.js
Normal file
22
packages/xo-server/src/_createNdJsonStream.js
Normal file
@@ -0,0 +1,22 @@
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
|
||||
function* values(object) {
|
||||
const keys = Object.keys(object)
|
||||
for (let i = 0, n = keys.length; i < n; ++i) {
|
||||
yield object[keys[i]]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a NDJSON stream of all the values
|
||||
*
|
||||
* @param {(Array|Object)} collection
|
||||
*/
|
||||
module.exports = asyncIteratorToStream(function*(collection) {
|
||||
for (const value of Array.isArray(collection)
|
||||
? collection
|
||||
: values(collection)) {
|
||||
yield JSON.stringify(value)
|
||||
yield '\n'
|
||||
}
|
||||
})
|
||||
3
packages/xo-server/src/_ensureArray.js
Normal file
3
packages/xo-server/src/_ensureArray.js
Normal file
@@ -0,0 +1,3 @@
|
||||
// Ensure the value is an array, wrap it if necessary.
|
||||
export default value =>
|
||||
value === undefined ? [] : Array.isArray(value) ? value : [value]
|
||||
21
packages/xo-server/src/_ensureArray.spec.js
Normal file
21
packages/xo-server/src/_ensureArray.spec.js
Normal file
@@ -0,0 +1,21 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
import ensureArray from './_ensureArray'
|
||||
|
||||
describe('ensureArray()', function() {
|
||||
it('wrap the value in an array', function() {
|
||||
const value = 'foo'
|
||||
|
||||
expect(ensureArray(value)).toEqual([value])
|
||||
})
|
||||
|
||||
it('returns an empty array for undefined', function() {
|
||||
expect(ensureArray(undefined)).toEqual([])
|
||||
})
|
||||
|
||||
it('returns the object itself if is already an array', function() {
|
||||
const array = ['foo', 'bar', 'baz']
|
||||
|
||||
expect(ensureArray(array)).toBe(array)
|
||||
})
|
||||
})
|
||||
39
packages/xo-server/src/_pDebounceWithKey.js
Normal file
39
packages/xo-server/src/_pDebounceWithKey.js
Normal file
@@ -0,0 +1,39 @@
|
||||
import ensureArray from './_ensureArray'
|
||||
import MultiKeyMap from './_MultiKeyMap'
|
||||
|
||||
function removeCacheEntry(cache, keys) {
|
||||
cache.delete(keys)
|
||||
}
|
||||
|
||||
function scheduleRemoveCacheEntry(keys, expires) {
|
||||
const delay = expires - Date.now()
|
||||
if (delay <= 0) {
|
||||
removeCacheEntry(this, keys)
|
||||
} else {
|
||||
setTimeout(removeCacheEntry, delay, this, keys)
|
||||
}
|
||||
}
|
||||
|
||||
const defaultKeyFn = () => []
|
||||
|
||||
// debounce an async function so that all subsequent calls in a delay receive
|
||||
// the same result
|
||||
//
|
||||
// similar to `p-debounce` with `leading` set to `true` but with key support
|
||||
export default (fn, delay, keyFn = defaultKeyFn) => {
|
||||
const cache = new MultiKeyMap()
|
||||
return function() {
|
||||
const keys = ensureArray(keyFn.apply(this, arguments))
|
||||
let promise = cache.get(keys)
|
||||
if (promise === undefined) {
|
||||
cache.set(keys, (promise = fn.apply(this, arguments)))
|
||||
const remove = scheduleRemoveCacheEntry.bind(
|
||||
cache,
|
||||
keys,
|
||||
Date.now() + delay
|
||||
)
|
||||
promise.then(remove, remove)
|
||||
}
|
||||
return promise
|
||||
}
|
||||
}
|
||||
12
packages/xo-server/src/_parseDuration.js
Normal file
12
packages/xo-server/src/_parseDuration.js
Normal file
@@ -0,0 +1,12 @@
|
||||
import ms from 'ms'
|
||||
|
||||
export default value => {
|
||||
if (typeof value === 'number') {
|
||||
return value
|
||||
}
|
||||
const duration = ms(value)
|
||||
if (duration === undefined) {
|
||||
throw new TypeError(`not a valid duration: ${duration}`)
|
||||
}
|
||||
return duration
|
||||
}
|
||||
@@ -1,5 +1,8 @@
|
||||
import { basename } from 'path'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
|
||||
import createNdJsonStream from '../_createNdJsonStream'
|
||||
import { safeDateFormat } from '../utils'
|
||||
|
||||
export function createJob({ schedules, ...job }) {
|
||||
@@ -150,12 +153,26 @@ runJob.params = {
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export function getAllLogs() {
|
||||
return this.getBackupNgLogs()
|
||||
async function handleGetAllLogs(req, res) {
|
||||
const logs = await this.getBackupNgLogs()
|
||||
res.set('Content-Type', 'application/json')
|
||||
return fromCallback(cb => pipeline(createNdJsonStream(logs), res, cb))
|
||||
}
|
||||
|
||||
export function getAllLogs({ ndjson = false }) {
|
||||
return ndjson
|
||||
? this.registerHttpRequest(handleGetAllLogs).then($getFrom => ({
|
||||
$getFrom,
|
||||
}))
|
||||
: this.getBackupNgLogs()
|
||||
}
|
||||
|
||||
getAllLogs.permission = 'admin'
|
||||
|
||||
getAllLogs.params = {
|
||||
ndjson: { type: 'boolean', optional: true },
|
||||
}
|
||||
|
||||
export function getLogs({ after, before, limit, ...filter }) {
|
||||
return this.getBackupNgLogsSorted({ after, before, limit, filter })
|
||||
}
|
||||
|
||||
@@ -85,34 +85,35 @@ createBonded.description =
|
||||
// ===================================================================
|
||||
|
||||
export async function set({
|
||||
network,
|
||||
|
||||
automatic,
|
||||
defaultIsLocked,
|
||||
name_description: nameDescription,
|
||||
name_label: nameLabel,
|
||||
defaultIsLocked,
|
||||
id,
|
||||
network,
|
||||
}) {
|
||||
await this.getXapi(network).setNetworkProperties(network._xapiId, {
|
||||
automatic,
|
||||
defaultIsLocked,
|
||||
nameDescription,
|
||||
nameLabel,
|
||||
defaultIsLocked,
|
||||
})
|
||||
}
|
||||
|
||||
set.params = {
|
||||
id: {
|
||||
type: 'string',
|
||||
automatic: {
|
||||
type: 'boolean',
|
||||
optional: true,
|
||||
},
|
||||
name_label: {
|
||||
type: 'string',
|
||||
defaultIsLocked: {
|
||||
type: 'boolean',
|
||||
optional: true,
|
||||
},
|
||||
name_description: {
|
||||
type: 'string',
|
||||
optional: true,
|
||||
},
|
||||
defaultIsLocked: {
|
||||
type: 'boolean',
|
||||
name_label: {
|
||||
type: 'string',
|
||||
optional: true,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import { deprecate } from 'util'
|
||||
|
||||
import { getUserPublicProperties } from '../utils'
|
||||
import { invalidCredentials } from 'xo-common/api-errors'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export async function signIn(credentials) {
|
||||
const user = await this.authenticateUser(credentials)
|
||||
if (!user) {
|
||||
throw invalidCredentials()
|
||||
const { session } = this
|
||||
|
||||
const { user, expiration } = await this.authenticateUser(credentials)
|
||||
session.set('user_id', user.id)
|
||||
|
||||
if (expiration === undefined) {
|
||||
session.unset('expiration')
|
||||
} else {
|
||||
session.set('expiration', expiration)
|
||||
}
|
||||
this.session.set('user_id', user.id)
|
||||
|
||||
return getUserPublicProperties(user)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import { some } from 'lodash'
|
||||
|
||||
import ensureArray from '../_ensureArray'
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import { ensureArray, forEach, parseXml } from '../utils'
|
||||
import { forEach, parseXml } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
|
||||
@@ -612,6 +612,8 @@ set.params = {
|
||||
|
||||
share: { type: 'boolean', optional: true },
|
||||
|
||||
startDelay: { type: 'integer', optional: true },
|
||||
|
||||
// set the VM network interface controller
|
||||
nicType: { type: ['string', 'null'], optional: true },
|
||||
}
|
||||
@@ -1461,14 +1463,25 @@ getCloudInitConfig.resolve = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
export async function createCloudInitConfigDrive({ vm, sr, config, coreos }) {
|
||||
export async function createCloudInitConfigDrive({
|
||||
config,
|
||||
coreos,
|
||||
networkConfig,
|
||||
sr,
|
||||
vm,
|
||||
}) {
|
||||
const xapi = this.getXapi(vm)
|
||||
if (coreos) {
|
||||
// CoreOS is a special CloudConfig drive created by XS plugin
|
||||
await xapi.createCoreOsCloudInitConfigDrive(vm._xapiId, sr._xapiId, config)
|
||||
} else {
|
||||
// use generic Cloud Init drive
|
||||
await xapi.createCloudInitConfigDrive(vm._xapiId, sr._xapiId, config)
|
||||
await xapi.createCloudInitConfigDrive(
|
||||
vm._xapiId,
|
||||
sr._xapiId,
|
||||
config,
|
||||
networkConfig
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1476,6 +1489,7 @@ createCloudInitConfigDrive.params = {
|
||||
vm: { type: 'string' },
|
||||
sr: { type: 'string' },
|
||||
config: { type: 'string' },
|
||||
networkConfig: { type: 'string', optional: true },
|
||||
}
|
||||
|
||||
createCloudInitConfigDrive.resolve = {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import getStream from 'get-stream'
|
||||
import { forEach } from 'lodash'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
|
||||
import createNdJsonStream from '../_createNdJsonStream'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
@@ -17,6 +20,7 @@ export async function exportConfig() {
|
||||
(req, res) => {
|
||||
res.writeHead(200, 'OK', {
|
||||
'content-disposition': 'attachment',
|
||||
'content-type': 'application/json',
|
||||
})
|
||||
|
||||
return this.exportConfig()
|
||||
@@ -32,11 +36,9 @@ exportConfig.permission = 'admin'
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
function handleGetAllObjects(req, res, { filter, limit }) {
|
||||
forEach(this.getObjects({ filter, limit }), object => {
|
||||
res.write(JSON.stringify(object))
|
||||
res.write('\n')
|
||||
})
|
||||
res.end()
|
||||
const objects = this.getObjects({ filter, limit })
|
||||
res.set('Content-Type', 'application/json')
|
||||
return fromCallback(cb => pipeline(createNdJsonStream(objects), res, cb))
|
||||
}
|
||||
|
||||
export function getAllObjects({ filter, limit, ndjson = false }) {
|
||||
|
||||
@@ -10,8 +10,9 @@ import { invalidParameters } from 'xo-common/api-errors'
|
||||
import { v4 as generateUuid } from 'uuid'
|
||||
import { includes, remove, filter, find, range } from 'lodash'
|
||||
|
||||
import ensureArray from '../_ensureArray'
|
||||
import { asInteger } from '../xapi/utils'
|
||||
import { parseXml, ensureArray } from '../utils'
|
||||
import { parseXml } from '../utils'
|
||||
|
||||
const log = createLogger('xo:xosan')
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import assert from 'assert'
|
||||
import authenticator from 'otplib/authenticator'
|
||||
import bind from 'lodash/bind'
|
||||
import blocked from 'blocked'
|
||||
import compression from 'compression'
|
||||
import createExpress from 'express'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import crypto from 'crypto'
|
||||
@@ -14,17 +15,20 @@ import pw from 'pw'
|
||||
import serveStatic from 'serve-static'
|
||||
import startsWith from 'lodash/startsWith'
|
||||
import stoppable from 'stoppable'
|
||||
import WebServer from 'http-server-plus'
|
||||
import WebSocket from 'ws'
|
||||
|
||||
import { compile as compilePug } from 'pug'
|
||||
import { createServer as createProxyServer } from 'http-proxy'
|
||||
import { fromEvent } from 'promise-toolbox'
|
||||
import { ifDef } from '@xen-orchestra/defined'
|
||||
import { join as joinPath } from 'path'
|
||||
|
||||
import JsonRpcPeer from 'json-rpc-peer'
|
||||
import { invalidCredentials } from 'xo-common/api-errors'
|
||||
import { ensureDir, readdir, readFile } from 'fs-extra'
|
||||
|
||||
import WebServer from 'http-server-plus'
|
||||
import parseDuration from './_parseDuration'
|
||||
import Xo from './xo'
|
||||
import {
|
||||
forEach,
|
||||
@@ -91,6 +95,8 @@ function createExpressApp(config) {
|
||||
|
||||
app.use(helmet())
|
||||
|
||||
app.use(compression())
|
||||
|
||||
// Registers the cookie-parser and express-session middlewares,
|
||||
// necessary for connect-flash.
|
||||
app.use(cookieParser(null, config.http.cookies))
|
||||
@@ -118,7 +124,7 @@ function createExpressApp(config) {
|
||||
return app
|
||||
}
|
||||
|
||||
async function setUpPassport(express, xo) {
|
||||
async function setUpPassport(express, xo, { authentication: authCfg }) {
|
||||
const strategies = { __proto__: null }
|
||||
xo.registerPassportStrategy = strategy => {
|
||||
passport.use(strategy)
|
||||
@@ -176,16 +182,24 @@ async function setUpPassport(express, xo) {
|
||||
}
|
||||
})
|
||||
|
||||
const PERMANENT_VALIDITY = ifDef(
|
||||
authCfg.permanentCookieValidity,
|
||||
parseDuration
|
||||
)
|
||||
const SESSION_VALIDITY = ifDef(authCfg.sessionCookieValidity, parseDuration)
|
||||
const setToken = async (req, res, next) => {
|
||||
const { user, isPersistent } = req.session
|
||||
const token = (await xo.createAuthenticationToken({ userId: user.id })).id
|
||||
const token = await xo.createAuthenticationToken({
|
||||
expiresIn: isPersistent ? PERMANENT_VALIDITY : SESSION_VALIDITY,
|
||||
userId: user.id,
|
||||
})
|
||||
|
||||
// Persistent cookie ? => 1 year
|
||||
// Non-persistent : external provider as Github, Twitter...
|
||||
res.cookie(
|
||||
'token',
|
||||
token,
|
||||
isPersistent ? { maxAge: 1000 * 60 * 60 * 24 * 365 } : undefined
|
||||
token.id,
|
||||
// a session (non-permanent) cookie must not have an expiration date
|
||||
// because it must not survive browser restart
|
||||
isPersistent ? { expires: new Date(token.expiration) } : undefined
|
||||
)
|
||||
|
||||
delete req.session.isPersistent
|
||||
@@ -237,7 +251,7 @@ async function setUpPassport(express, xo) {
|
||||
xo.registerPassportStrategy(
|
||||
new LocalStrategy(async (username, password, done) => {
|
||||
try {
|
||||
const user = await xo.authenticateUser({ username, password })
|
||||
const { user } = await xo.authenticateUser({ username, password })
|
||||
done(null, user)
|
||||
} catch (error) {
|
||||
done(null, false, { message: error.message })
|
||||
@@ -356,6 +370,7 @@ async function makeWebServerListen(
|
||||
;[opts.cert, opts.key] = await Promise.all([readFile(cert), readFile(key)])
|
||||
if (opts.key.includes('ENCRYPTED')) {
|
||||
opts.passphrase = await new Promise(resolve => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Encrypted key %s', key)
|
||||
process.stdout.write(`Enter pass phrase: `)
|
||||
pw(resolve)
|
||||
@@ -503,6 +518,11 @@ const setUpApi = (webServer, xo, config) => {
|
||||
|
||||
// Connect the WebSocket to the JSON-RPC server.
|
||||
socket.on('message', message => {
|
||||
const expiration = connection.get('expiration', undefined)
|
||||
if (expiration !== undefined && expiration < Date.now()) {
|
||||
return void connection.close()
|
||||
}
|
||||
|
||||
jsonRpc.write(message)
|
||||
})
|
||||
|
||||
@@ -550,7 +570,7 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
{
|
||||
const { token } = parseCookies(req.headers.cookie)
|
||||
|
||||
const user = await xo.authenticateUser({ token })
|
||||
const { user } = await xo.authenticateUser({ token })
|
||||
if (!(await xo.hasPermissions(user.id, [[id, 'operate']]))) {
|
||||
throw invalidCredentials()
|
||||
}
|
||||
@@ -570,6 +590,9 @@ const setUpConsoleProxy = (webServer, xo) => {
|
||||
proxyConsole(connection, vmConsole, xapi.sessionId)
|
||||
})
|
||||
} catch (error) {
|
||||
try {
|
||||
socket.end()
|
||||
} catch (_) {}
|
||||
console.error((error && error.stack) || error)
|
||||
}
|
||||
})
|
||||
@@ -667,7 +690,7 @@ export default async function main(args) {
|
||||
|
||||
// Everything above is not protected by the sign in, allowing xo-cli
|
||||
// to work properly.
|
||||
await setUpPassport(express, xo)
|
||||
await setUpPassport(express, xo, config)
|
||||
|
||||
// Attaches express to the web server.
|
||||
webServer.on('request', express)
|
||||
|
||||
@@ -14,6 +14,10 @@ export class Remotes extends Collection {
|
||||
async get(properties) {
|
||||
const remotes = await super.get(properties)
|
||||
forEach(remotes, remote => {
|
||||
remote.benchmarks =
|
||||
remote.benchmarks !== undefined
|
||||
? JSON.parse(remote.benchmarks)
|
||||
: undefined
|
||||
remote.enabled = remote.enabled === 'true'
|
||||
})
|
||||
return remotes
|
||||
|
||||
@@ -10,7 +10,7 @@ const recoverAccount = async ([name]) => {
|
||||
xo-server-recover-account <user name or email>
|
||||
|
||||
If the user does not exist, it is created, if it exists, updates
|
||||
its password and resets its permission to Admin.
|
||||
its password, remove any configured OTP and resets its permission to Admin.
|
||||
`
|
||||
}
|
||||
|
||||
@@ -32,7 +32,11 @@ xo-server-recover-account <user name or email>
|
||||
|
||||
const user = await xo.getUserByName(name, true)
|
||||
if (user !== null) {
|
||||
await xo.updateUser(user.id, { password, permission: 'admin' })
|
||||
await xo.updateUser(user.id, {
|
||||
password,
|
||||
permission: 'admin',
|
||||
preferences: { otp: null },
|
||||
})
|
||||
console.log(`user ${name} has been successfully updated`)
|
||||
} else {
|
||||
await xo.createUser({ name, password, permission: 'admin' })
|
||||
|
||||
@@ -3,7 +3,6 @@ import forEach from 'lodash/forEach'
|
||||
import has from 'lodash/has'
|
||||
import highland from 'highland'
|
||||
import humanFormat from 'human-format'
|
||||
import isArray from 'lodash/isArray'
|
||||
import isString from 'lodash/isString'
|
||||
import keys from 'lodash/keys'
|
||||
import multiKeyHashInt from 'multikey-hash'
|
||||
@@ -49,17 +48,6 @@ export const diffItems = (coll1, coll2) => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Ensure the value is an array, wrap it if necessary.
|
||||
export function ensureArray(value) {
|
||||
if (value === undefined) {
|
||||
return []
|
||||
}
|
||||
|
||||
return isArray(value) ? value : [value]
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Returns the value of a property and removes it from the object.
|
||||
export function extractProperty(obj, prop) {
|
||||
const value = obj[prop]
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
diffItems,
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
formatXml,
|
||||
generateToken,
|
||||
@@ -42,26 +41,6 @@ describe('diffItems', () => {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('ensureArray()', function() {
|
||||
it('wrap the value in an array', function() {
|
||||
const value = 'foo'
|
||||
|
||||
expect(ensureArray(value)).toEqual([value])
|
||||
})
|
||||
|
||||
it('returns an empty array for undefined', function() {
|
||||
expect(ensureArray(undefined)).toEqual([])
|
||||
})
|
||||
|
||||
it('returns the object itself if is already an array', function() {
|
||||
const array = ['foo', 'bar', 'baz']
|
||||
|
||||
expect(ensureArray(array)).toBe(array)
|
||||
})
|
||||
})
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
describe('extractProperty()', function() {
|
||||
it('returns the value of the property', function() {
|
||||
const value = {}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { startsWith } from 'lodash'
|
||||
|
||||
import ensureArray from './_ensureArray'
|
||||
import {
|
||||
ensureArray,
|
||||
extractProperty,
|
||||
forEach,
|
||||
isArray,
|
||||
@@ -347,6 +347,7 @@ const TRANSFORMS = {
|
||||
hasVendorDevice: obj.has_vendor_device,
|
||||
resourceSet,
|
||||
snapshots: link(obj, 'snapshots'),
|
||||
startDelay: +obj.start_delay,
|
||||
startTime: metrics && toTimestamp(metrics.start_time),
|
||||
tags: obj.tags,
|
||||
VIFs: link(obj, 'VIFs'),
|
||||
@@ -578,6 +579,7 @@ const TRANSFORMS = {
|
||||
|
||||
network(obj) {
|
||||
return {
|
||||
automatic: obj.other_config?.automatic === 'true',
|
||||
bridge: obj.bridge,
|
||||
defaultIsLocked: obj.default_locking_mode === 'disabled',
|
||||
MTU: +obj.MTU,
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import JSON5 from 'json5'
|
||||
import limitConcurrency from 'limit-concurrency-decorator'
|
||||
import synchronized from 'decorator-synchronized'
|
||||
import { BaseError } from 'make-error'
|
||||
import {
|
||||
defaults,
|
||||
endsWith,
|
||||
findKey,
|
||||
forEach,
|
||||
get,
|
||||
identity,
|
||||
map,
|
||||
mapValues,
|
||||
@@ -51,11 +52,6 @@ const RRD_POINTS_PER_STEP = {
|
||||
// Utils
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// Return current local timestamp in seconds
|
||||
function getCurrentTimestamp() {
|
||||
return Date.now() / 1000
|
||||
}
|
||||
|
||||
function convertNanToNull(value) {
|
||||
return isNaN(value) ? null : value
|
||||
}
|
||||
@@ -77,23 +73,8 @@ const computeValues = (dataRow, legendIndex, transformValue = identity) =>
|
||||
const combineStats = (stats, path, combineValues) =>
|
||||
zipWith(...map(stats, path), (...values) => combineValues(values))
|
||||
|
||||
// It browse the object in depth and initialise it's properties
|
||||
// The targerPath can be a string or an array containing the depth
|
||||
// targetPath: [a, b, c] => a.b.c
|
||||
const getValuesFromDepth = (obj, targetPath) => {
|
||||
if (typeof targetPath === 'string') {
|
||||
return (obj[targetPath] = [])
|
||||
}
|
||||
|
||||
forEach(targetPath, (path, key) => {
|
||||
if (obj[path] === undefined) {
|
||||
obj = obj[path] = targetPath.length - 1 === key ? [] : {}
|
||||
return
|
||||
}
|
||||
obj = obj[path]
|
||||
})
|
||||
return obj
|
||||
}
|
||||
const createGetProperty = (obj, property, defaultValue) =>
|
||||
defaults(obj, { [property]: defaultValue })[property]
|
||||
|
||||
const testMetric = (test, type) =>
|
||||
typeof test === 'string'
|
||||
@@ -245,6 +226,34 @@ const STATS = {
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
// RRD
|
||||
// json: {
|
||||
// meta: {
|
||||
// start: Number,
|
||||
// step: Number,
|
||||
// end: Number,
|
||||
// rows: Number,
|
||||
// columns: Number,
|
||||
// legend: String[rows]
|
||||
// },
|
||||
// data: Item[columns] // Item = { t: Number, values: Number[rows] }
|
||||
// }
|
||||
|
||||
// Local cache
|
||||
// _statsByObject : {
|
||||
// [uuid]: {
|
||||
// [step]: {
|
||||
// endTimestamp: Number, // the timestamp of the last statistic point
|
||||
// interval: Number, // step
|
||||
// stats: {
|
||||
// [metric1]: Number[],
|
||||
// [metric2]: {
|
||||
// [subMetric]: Number[],
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
export default class XapiStats {
|
||||
constructor() {
|
||||
this._statsByObject = {}
|
||||
@@ -268,44 +277,26 @@ export default class XapiStats {
|
||||
.then(response => response.readAll().then(JSON5.parse))
|
||||
}
|
||||
|
||||
async _getNextTimestamp(xapi, host, step) {
|
||||
const currentTimeStamp = await getServerTimestamp(xapi, host.$ref)
|
||||
const maxDuration = step * RRD_POINTS_PER_STEP[step]
|
||||
const lastTimestamp = get(this._statsByObject, [
|
||||
host.uuid,
|
||||
step,
|
||||
'endTimestamp',
|
||||
])
|
||||
// To avoid multiple requests, we keep a cash for the stats and
|
||||
// only return it if we not exceed a step
|
||||
_getCachedStats(uuid, step, currentTimeStamp) {
|
||||
const statsByObject = this._statsByObject
|
||||
|
||||
if (
|
||||
lastTimestamp === undefined ||
|
||||
currentTimeStamp - lastTimestamp + step > maxDuration
|
||||
) {
|
||||
return currentTimeStamp - maxDuration + step
|
||||
const stats = statsByObject[uuid]?.[step]
|
||||
if (stats === undefined) {
|
||||
return
|
||||
}
|
||||
return lastTimestamp
|
||||
|
||||
if (stats.endTimestamp + step < currentTimeStamp) {
|
||||
delete statsByObject[uuid][step]
|
||||
return
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
_getStats(hostUuid, step, vmUuid) {
|
||||
const hostStats = this._statsByObject[hostUuid][step]
|
||||
|
||||
// Return host stats
|
||||
if (vmUuid === undefined) {
|
||||
return {
|
||||
interval: step,
|
||||
...hostStats,
|
||||
}
|
||||
}
|
||||
|
||||
// Return vm stats
|
||||
return {
|
||||
interval: step,
|
||||
endTimestamp: hostStats.endTimestamp,
|
||||
...this._statsByObject[vmUuid][step],
|
||||
}
|
||||
}
|
||||
|
||||
async _getAndUpdateStats(xapi, { host, vmUuid, granularity }) {
|
||||
@synchronized.withKey((_, { host }) => host.uuid)
|
||||
async _getAndUpdateStats(xapi, { host, uuid, granularity }) {
|
||||
const step =
|
||||
granularity === undefined
|
||||
? RRD_STEP_SECONDS
|
||||
@@ -317,103 +308,93 @@ export default class XapiStats {
|
||||
)
|
||||
}
|
||||
|
||||
// Limit the number of http requests
|
||||
const hostUuid = host.uuid
|
||||
const currentTimeStamp = await getServerTimestamp(xapi, host.$ref)
|
||||
|
||||
if (
|
||||
!(
|
||||
vmUuid !== undefined &&
|
||||
get(this._statsByObject, [vmUuid, step]) === undefined
|
||||
) &&
|
||||
get(this._statsByObject, [hostUuid, step, 'localTimestamp']) + step >
|
||||
getCurrentTimestamp()
|
||||
) {
|
||||
return this._getStats(hostUuid, step, vmUuid)
|
||||
const stats = this._getCachedStats(uuid, step, currentTimeStamp)
|
||||
if (stats !== undefined) {
|
||||
return stats
|
||||
}
|
||||
|
||||
const timestamp = await this._getNextTimestamp(xapi, host, step)
|
||||
const json = await this._getJson(xapi, host, timestamp, step)
|
||||
if (json.meta.step !== step) {
|
||||
const maxDuration = step * RRD_POINTS_PER_STEP[step]
|
||||
|
||||
// To avoid crossing over the boundary, we ask for one less step
|
||||
const optimumTimestamp = currentTimeStamp - maxDuration + step
|
||||
const json = await this._getJson(xapi, host, optimumTimestamp, step)
|
||||
|
||||
const actualStep = json.meta.step
|
||||
if (json.data.length > 0) {
|
||||
// fetched data is organized from the newest to the oldest
|
||||
// but this implementation requires it in the other direction
|
||||
json.data.reverse()
|
||||
json.meta.legend.forEach((legend, index) => {
|
||||
const [, type, uuid, metricType] = /^AVERAGE:([^:]+):(.+):(.+)$/.exec(
|
||||
legend
|
||||
)
|
||||
|
||||
const metrics = STATS[type]
|
||||
if (metrics === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const { metric, testResult } = findMetric(metrics, metricType)
|
||||
if (metric === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const xoObjectStats = createGetProperty(this._statsByObject, uuid, {})
|
||||
let stepStats = xoObjectStats[actualStep]
|
||||
if (
|
||||
stepStats === undefined ||
|
||||
stepStats.endTimestamp !== json.meta.end
|
||||
) {
|
||||
stepStats = xoObjectStats[actualStep] = {
|
||||
endTimestamp: json.meta.end,
|
||||
interval: actualStep,
|
||||
}
|
||||
}
|
||||
|
||||
const path =
|
||||
metric.getPath !== undefined
|
||||
? metric.getPath(testResult)
|
||||
: [findKey(metrics, metric)]
|
||||
|
||||
const lastKey = path.length - 1
|
||||
let metricStats = createGetProperty(stepStats, 'stats', {})
|
||||
path.forEach((property, key) => {
|
||||
if (key === lastKey) {
|
||||
metricStats[property] = computeValues(
|
||||
json.data,
|
||||
index,
|
||||
metric.transformValue
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
metricStats = createGetProperty(metricStats, property, {})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if (actualStep !== step) {
|
||||
throw new FaultyGranularity(
|
||||
`Unable to get the true granularity: ${json.meta.step}`
|
||||
`Unable to get the true granularity: ${actualStep}`
|
||||
)
|
||||
}
|
||||
|
||||
// It exists data
|
||||
if (json.data.length !== 0) {
|
||||
// Warning: Sometimes, the json.xport.meta.start value does not match with the
|
||||
// timestamp of the oldest data value
|
||||
// So, we use the timestamp of the oldest data value !
|
||||
const startTimestamp = json.data[json.meta.rows - 1].t
|
||||
const endTimestamp = get(this._statsByObject, [
|
||||
hostUuid,
|
||||
step,
|
||||
'endTimestamp',
|
||||
])
|
||||
|
||||
const statsOffset = endTimestamp - startTimestamp + step
|
||||
if (endTimestamp !== undefined && statsOffset > 0) {
|
||||
const parseOffset = statsOffset / step
|
||||
// Remove useless data
|
||||
// Note: Older values are at end of json.data.row
|
||||
json.data.splice(json.data.length - parseOffset)
|
||||
return (
|
||||
this._statsByObject[uuid]?.[step] ?? {
|
||||
endTimestamp: currentTimeStamp,
|
||||
interval: step,
|
||||
stats: {},
|
||||
}
|
||||
|
||||
// It exists useful data
|
||||
if (json.data.length > 0) {
|
||||
// reorder data
|
||||
json.data.reverse()
|
||||
forEach(json.meta.legend, (legend, index) => {
|
||||
const [, type, uuid, metricType] = /^AVERAGE:([^:]+):(.+):(.+)$/.exec(
|
||||
legend
|
||||
)
|
||||
|
||||
const metrics = STATS[type]
|
||||
if (metrics === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const { metric, testResult } = findMetric(metrics, metricType)
|
||||
|
||||
if (metric === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
const path =
|
||||
metric.getPath !== undefined
|
||||
? metric.getPath(testResult)
|
||||
: [findKey(metrics, metric)]
|
||||
|
||||
const metricValues = getValuesFromDepth(this._statsByObject, [
|
||||
uuid,
|
||||
step,
|
||||
'stats',
|
||||
...path,
|
||||
])
|
||||
|
||||
metricValues.push(
|
||||
...computeValues(json.data, index, metric.transformValue)
|
||||
)
|
||||
|
||||
// remove older Values
|
||||
metricValues.splice(
|
||||
0,
|
||||
metricValues.length - RRD_POINTS_PER_STEP[step]
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Update timestamp
|
||||
const hostStats = this._statsByObject[hostUuid][step]
|
||||
hostStats.endTimestamp = json.meta.end
|
||||
hostStats.localTimestamp = getCurrentTimestamp()
|
||||
return this._getStats(hostUuid, step, vmUuid)
|
||||
)
|
||||
}
|
||||
|
||||
getHostStats(xapi, hostId, granularity) {
|
||||
const host = xapi.getObject(hostId)
|
||||
return this._getAndUpdateStats(xapi, {
|
||||
host: xapi.getObject(hostId),
|
||||
host,
|
||||
uuid: host.uuid,
|
||||
granularity,
|
||||
})
|
||||
}
|
||||
@@ -427,7 +408,7 @@ export default class XapiStats {
|
||||
|
||||
return this._getAndUpdateStats(xapi, {
|
||||
host,
|
||||
vmUuid: vm.uuid,
|
||||
uuid: vm.uuid,
|
||||
granularity,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -35,11 +35,11 @@ import {
|
||||
import { satisfies as versionSatisfies } from 'semver'
|
||||
|
||||
import createSizeStream from '../size-stream'
|
||||
import ensureArray from '../_ensureArray'
|
||||
import fatfsBuffer, { init as fatfsBufferInit } from '../fatfs-buffer'
|
||||
import pRetry from '../_pRetry'
|
||||
import {
|
||||
camelToSnakeCase,
|
||||
ensureArray,
|
||||
forEach,
|
||||
isFunction,
|
||||
map,
|
||||
@@ -305,17 +305,24 @@ export default class Xapi extends XapiBase {
|
||||
|
||||
async setNetworkProperties(
|
||||
id,
|
||||
{ nameLabel, nameDescription, defaultIsLocked }
|
||||
{ automatic, defaultIsLocked, nameDescription, nameLabel }
|
||||
) {
|
||||
let defaultLockingMode
|
||||
if (defaultIsLocked != null) {
|
||||
defaultLockingMode = defaultIsLocked ? 'disabled' : 'unlocked'
|
||||
}
|
||||
await this._setObjectProperties(this.getObject(id), {
|
||||
nameLabel,
|
||||
nameDescription,
|
||||
defaultLockingMode,
|
||||
})
|
||||
const network = this.getObject(id)
|
||||
await Promise.all([
|
||||
this._setObjectProperties(network, {
|
||||
defaultLockingMode,
|
||||
nameDescription,
|
||||
nameLabel,
|
||||
}),
|
||||
this._updateObjectMapProperty(network, 'other_config', {
|
||||
automatic:
|
||||
automatic === undefined ? undefined : automatic ? 'true' : null,
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
@@ -730,6 +737,10 @@ export default class Xapi extends XapiBase {
|
||||
this._deleteVm(snapshot)
|
||||
)::ignoreErrors(),
|
||||
|
||||
vm.power_state === 'Suspended' &&
|
||||
vm.suspend_VDI !== NULL_REF &&
|
||||
this._deleteVdi(vm.suspend_VDI)::ignoreErrors(),
|
||||
|
||||
deleteDisks &&
|
||||
asyncMap(disks, ({ $ref: vdiRef }) => {
|
||||
let onFailure = () => {
|
||||
@@ -754,7 +765,7 @@ export default class Xapi extends XapiBase {
|
||||
return (
|
||||
// Only remove VBDs not attached to other VMs.
|
||||
vdi.VBDs.length < 2 || every(vdi.$VBDs, vbd => vbd.VM === $ref)
|
||||
? this._deleteVdi(vdi)
|
||||
? this._deleteVdi(vdiRef)
|
||||
: onFailure(vdi)
|
||||
)
|
||||
}
|
||||
@@ -924,7 +935,7 @@ export default class Xapi extends XapiBase {
|
||||
//
|
||||
// The snapshot must not exist otherwise it could break the
|
||||
// next export.
|
||||
this._deleteVdi(vdi)::ignoreErrors()
|
||||
this._deleteVdi(vdi.$ref)::ignoreErrors()
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1080,7 +1091,7 @@ export default class Xapi extends XapiBase {
|
||||
}
|
||||
|
||||
newVdi = await this._getOrWaitObject(await this._cloneVdi(baseVdi))
|
||||
$defer.onFailure(() => this._deleteVdi(newVdi))
|
||||
$defer.onFailure(() => this._deleteVdi(newVdi.$ref))
|
||||
|
||||
await this._updateObjectMapProperty(newVdi, 'other_config', {
|
||||
[TAG_COPY_SRC]: vdi.uuid,
|
||||
@@ -1095,7 +1106,7 @@ export default class Xapi extends XapiBase {
|
||||
},
|
||||
sr: mapVdisSrs[vdi.uuid] || srId,
|
||||
})
|
||||
$defer.onFailure(() => this._deleteVdi(newVdi))
|
||||
$defer.onFailure(() => this._deleteVdi(newVdi.$ref))
|
||||
}
|
||||
|
||||
await asyncMap(vbds[vdiId], vbd =>
|
||||
@@ -1258,7 +1269,7 @@ export default class Xapi extends XapiBase {
|
||||
return loop()
|
||||
}
|
||||
|
||||
@synchronized
|
||||
@synchronized()
|
||||
_callInstallationPlugin(hostRef, vdi) {
|
||||
return this.call(
|
||||
'host.call_plugin',
|
||||
@@ -1286,7 +1297,7 @@ export default class Xapi extends XapiBase {
|
||||
'[XO] Supplemental pack ISO',
|
||||
'small temporary VDI to store a supplemental pack ISO'
|
||||
)
|
||||
$defer(() => this._deleteVdi(vdi))
|
||||
$defer(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
await this._callInstallationPlugin(this.getObject(hostId).$ref, vdi.uuid)
|
||||
}
|
||||
@@ -1314,7 +1325,7 @@ export default class Xapi extends XapiBase {
|
||||
'[XO] Supplemental pack ISO',
|
||||
'small temporary VDI to store a supplemental pack ISO'
|
||||
)
|
||||
$defer(() => this._deleteVdi(vdi))
|
||||
$defer(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
// Install pack sequentially to prevent concurrent access to the unique VDI
|
||||
for (const host of hosts) {
|
||||
@@ -1345,7 +1356,7 @@ export default class Xapi extends XapiBase {
|
||||
'[XO] Supplemental pack ISO',
|
||||
'small temporary VDI to store a supplemental pack ISO'
|
||||
)
|
||||
$defer(() => this._deleteVdi(vdi))
|
||||
$defer(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
await this._callInstallationPlugin(host.$ref, vdi.uuid)
|
||||
})
|
||||
@@ -1431,7 +1442,7 @@ export default class Xapi extends XapiBase {
|
||||
size: disk.capacity,
|
||||
sr: sr.$ref,
|
||||
}))
|
||||
$defer.onFailure(() => this._deleteVdi(vdi))
|
||||
$defer.onFailure(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
return this.createVbd({
|
||||
userdevice: disk.position,
|
||||
@@ -1906,15 +1917,21 @@ export default class Xapi extends XapiBase {
|
||||
vdi: newVdi,
|
||||
})
|
||||
})
|
||||
await this._deleteVdi(vdi)
|
||||
await this._deleteVdi(vdi.$ref)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: check whether the VDI is attached.
|
||||
async _deleteVdi(vdi) {
|
||||
log.debug(`Deleting VDI ${vdi.name_label}`)
|
||||
async _deleteVdi(vdiRef) {
|
||||
log.debug(`Deleting VDI ${vdiRef}`)
|
||||
|
||||
await this.call('VDI.destroy', vdi.$ref)
|
||||
try {
|
||||
await this.call('VDI.destroy', vdiRef)
|
||||
} catch (error) {
|
||||
if (error?.code !== 'HANDLE_INVALID') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_resizeVdi(vdi, size) {
|
||||
@@ -2009,7 +2026,7 @@ export default class Xapi extends XapiBase {
|
||||
}
|
||||
|
||||
async deleteVdi(vdiId) {
|
||||
await this._deleteVdi(this.getObject(vdiId))
|
||||
await this._deleteVdi(this.getObject(vdiId).$ref)
|
||||
}
|
||||
|
||||
async resizeVdi(vdiId, size) {
|
||||
@@ -2362,7 +2379,13 @@ export default class Xapi extends XapiBase {
|
||||
|
||||
// Generic Config Drive
|
||||
@deferrable
|
||||
async createCloudInitConfigDrive($defer, vmId, srId, config) {
|
||||
async createCloudInitConfigDrive(
|
||||
$defer,
|
||||
vmId,
|
||||
srId,
|
||||
userConfig,
|
||||
networkConfig
|
||||
) {
|
||||
const vm = this.getObject(vmId)
|
||||
const sr = this.getObject(srId)
|
||||
|
||||
@@ -2373,14 +2396,35 @@ export default class Xapi extends XapiBase {
|
||||
size: buffer.length,
|
||||
sr: sr.$ref,
|
||||
})
|
||||
$defer.onFailure(() => this._deleteVdi(vdi))
|
||||
$defer.onFailure(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
// Then, generate a FAT fs
|
||||
const fs = promisifyAll(fatfs.createFileSystem(fatfsBuffer(buffer)))
|
||||
const { mkdir, writeFile } = promisifyAll(
|
||||
fatfs.createFileSystem(fatfsBuffer(buffer))
|
||||
)
|
||||
|
||||
await Promise.all([
|
||||
fs.writeFile('meta-data', 'instance-id: ' + vm.uuid + '\n'),
|
||||
fs.writeFile('user-data', config),
|
||||
// preferred datasource: NoCloud
|
||||
//
|
||||
// https://cloudinit.readthedocs.io/en/latest/topics/datasources/nocloud.html
|
||||
writeFile('meta-data', 'instance-id: ' + vm.uuid + '\n'),
|
||||
writeFile('user-data', userConfig),
|
||||
networkConfig !== undefined && writeFile('network-config', networkConfig),
|
||||
|
||||
// fallback datasource: Config Drive 2
|
||||
//
|
||||
// https://cloudinit.readthedocs.io/en/latest/topics/datasources/configdrive.html#version-2
|
||||
mkdir('openstack').then(() =>
|
||||
mkdir('openstack/latest').then(() =>
|
||||
Promise.all([
|
||||
writeFile(
|
||||
'openstack/latest/meta_data.json',
|
||||
JSON.stringify({ uuid: vm.uuid })
|
||||
),
|
||||
writeFile('openstack/latest/user_data', userConfig),
|
||||
])
|
||||
)
|
||||
),
|
||||
])
|
||||
|
||||
// ignore errors, I (JFT) don't understand why they are emitted
|
||||
@@ -2406,7 +2450,7 @@ export default class Xapi extends XapiBase {
|
||||
size: stream.length,
|
||||
sr: sr.$ref,
|
||||
})
|
||||
$defer.onFailure(() => this._deleteVdi(vdi))
|
||||
$defer.onFailure(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
await this.importVdiContent(vdi.$id, stream, { format: VDI_FORMAT_RAW })
|
||||
|
||||
|
||||
@@ -12,14 +12,9 @@ import sortBy from 'lodash/sortBy'
|
||||
import assign from 'lodash/assign'
|
||||
import unzip from 'julien-f-unzip'
|
||||
|
||||
import ensureArray from '../../_ensureArray'
|
||||
import { debounce } from '../../decorators'
|
||||
import {
|
||||
ensureArray,
|
||||
forEach,
|
||||
mapFilter,
|
||||
mapToArray,
|
||||
parseXml,
|
||||
} from '../../utils'
|
||||
import { forEach, mapFilter, mapToArray, parseXml } from '../../utils'
|
||||
|
||||
import { extractOpaqueRef, useUpdateSystem } from '../utils'
|
||||
|
||||
@@ -300,7 +295,7 @@ export default {
|
||||
'small temporary VDI to store a patch ISO'
|
||||
)
|
||||
}
|
||||
$defer(() => this._deleteVdi(vdi))
|
||||
$defer(() => this._deleteVdi(vdi.$ref))
|
||||
|
||||
return vdi
|
||||
},
|
||||
|
||||
@@ -468,6 +468,13 @@ export default {
|
||||
return this._updateObjectMapProperty(vm, 'platform', { videoram })
|
||||
},
|
||||
},
|
||||
|
||||
startDelay: {
|
||||
get: vm => +vm.start_delay,
|
||||
set(startDelay, vm) {
|
||||
return this.call('VM.set_start_delay', vm.$ref, startDelay)
|
||||
},
|
||||
},
|
||||
}),
|
||||
|
||||
async editVm(id, props, checkLimits) {
|
||||
|
||||
@@ -1,28 +1,27 @@
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import ms from 'ms'
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
import { ignoreErrors } from 'promise-toolbox'
|
||||
import { invalidCredentials, noSuchObject } from 'xo-common/api-errors'
|
||||
|
||||
import parseDuration from '../_parseDuration'
|
||||
import Token, { Tokens } from '../models/token'
|
||||
import { forEach, generateToken } from '../utils'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
const log = createLogger('xo:authentification')
|
||||
|
||||
const noSuchAuthenticationToken = id => noSuchObject(id, 'authenticationToken')
|
||||
|
||||
const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30
|
||||
|
||||
export default class {
|
||||
constructor(xo) {
|
||||
constructor(xo, config) {
|
||||
this._config = config.authentication
|
||||
this._providers = new Set()
|
||||
this._xo = xo
|
||||
|
||||
// Store last failures by user to throttle tries (slow bruteforce
|
||||
// attacks).
|
||||
this._failures = { __proto__: null }
|
||||
|
||||
this._providers = new Set()
|
||||
|
||||
// Creates persistent collections.
|
||||
const tokensDb = (this._tokens = new Tokens({
|
||||
connection: xo._redis,
|
||||
@@ -38,7 +37,7 @@ export default class {
|
||||
|
||||
const user = await xo.getUserByName(username, true)
|
||||
if (user && (await xo.checkUserPassword(user.id, password))) {
|
||||
return user.id
|
||||
return { userId: user.id }
|
||||
}
|
||||
})
|
||||
|
||||
@@ -49,7 +48,8 @@ export default class {
|
||||
}
|
||||
|
||||
try {
|
||||
return (await xo.getAuthenticationToken(tokenId)).user_id
|
||||
const token = await xo.getAuthenticationToken(tokenId)
|
||||
return { expiration: token.expiration, userId: token.user_id }
|
||||
} catch (error) {}
|
||||
})
|
||||
|
||||
@@ -87,32 +87,47 @@ export default class {
|
||||
for (const provider of this._providers) {
|
||||
try {
|
||||
// A provider can return:
|
||||
// - `null` if the user could not be authenticated
|
||||
// - `undefined`/`null` if the user could not be authenticated
|
||||
// - the identifier of the authenticated user
|
||||
// - an object containing:
|
||||
// - `userId`
|
||||
// - optionally `expiration` to indicate when the session is no longer
|
||||
// valid
|
||||
// - an object with a property `username` containing the name
|
||||
// of the authenticated user
|
||||
const result = await provider(credentials)
|
||||
|
||||
// No match.
|
||||
if (!result) {
|
||||
if (result == null) {
|
||||
continue
|
||||
}
|
||||
|
||||
return result.username
|
||||
? await this._xo.registerUser(undefined, result.username)
|
||||
: await this._xo.getUser(result)
|
||||
if (typeof result === 'string') {
|
||||
return {
|
||||
user: await this._getUser(result),
|
||||
}
|
||||
}
|
||||
|
||||
const { userId, username, expiration } = result
|
||||
|
||||
return {
|
||||
user: await (userId !== undefined
|
||||
? this._xo.getUser(userId)
|
||||
: this._xo.registerUser(undefined, username)),
|
||||
expiration,
|
||||
}
|
||||
} catch (error) {
|
||||
// DEPRECATED: Authentication providers may just throw `null`
|
||||
// to indicate they could not authenticate the user without
|
||||
// any special errors.
|
||||
if (error) log.error(error)
|
||||
if (error !== null) log.error(error)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
async authenticateUser(credentials) {
|
||||
async authenticateUser(
|
||||
credentials
|
||||
): Promise<{| user: Object, expiration?: number |}> {
|
||||
// don't even attempt to authenticate with empty password
|
||||
const { password } = credentials
|
||||
if (password === '') {
|
||||
@@ -139,25 +154,31 @@ export default class {
|
||||
throw new Error('too fast authentication tries')
|
||||
}
|
||||
|
||||
const user = await this._authenticateUser(credentials)
|
||||
if (user) {
|
||||
delete failures[username]
|
||||
} else {
|
||||
const result = await this._authenticateUser(credentials)
|
||||
if (result === undefined) {
|
||||
failures[username] = now
|
||||
throw invalidCredentials()
|
||||
}
|
||||
|
||||
return user
|
||||
delete failures[username]
|
||||
return result
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
|
||||
async createAuthenticationToken({ expiresIn = ONE_MONTH, userId }) {
|
||||
async createAuthenticationToken({
|
||||
expiresIn = this._config.defaultTokenValidity,
|
||||
userId,
|
||||
}) {
|
||||
const token = new Token({
|
||||
id: await generateToken(),
|
||||
user_id: userId,
|
||||
expiration:
|
||||
Date.now() +
|
||||
(typeof expiresIn === 'string' ? ms(expiresIn) : expiresIn),
|
||||
Math.min(
|
||||
parseDuration(expiresIn),
|
||||
parseDuration(this._config.maxTokenValidity)
|
||||
),
|
||||
})
|
||||
|
||||
await this._tokens.add(token)
|
||||
|
||||
@@ -158,7 +158,7 @@ export default class IpPools {
|
||||
return countBy(ipPools, ({ id }) => `ipPool:${id}`)
|
||||
}
|
||||
|
||||
@synchronized
|
||||
@synchronized()
|
||||
allocIpAddresses(vifId, addAddresses, removeAddresses) {
|
||||
const updatedIpPools = {}
|
||||
const limits = {}
|
||||
|
||||
@@ -166,7 +166,7 @@ export default class {
|
||||
// save the new configuration.
|
||||
async configurePlugin(id, configuration) {
|
||||
const plugin = this._getRawPlugin(id)
|
||||
const metadata = await this._getPluginMetadata()
|
||||
const metadata = await this._getPluginMetadata(id)
|
||||
|
||||
if (metadata !== undefined) {
|
||||
configuration = sensitiveValues.merge(
|
||||
|
||||
@@ -68,22 +68,40 @@ export default class {
|
||||
let handler = handlers[id]
|
||||
if (handler === undefined) {
|
||||
handler = handlers[id] = getHandler(remote, this._remoteOptions)
|
||||
}
|
||||
|
||||
try {
|
||||
await handler.sync()
|
||||
ignoreErrors.call(this._updateRemote(id, { error: '' }))
|
||||
} catch (error) {
|
||||
ignoreErrors.call(this._updateRemote(id, { error: error.message }))
|
||||
throw error
|
||||
try {
|
||||
await handler.sync()
|
||||
ignoreErrors.call(this._updateRemote(id, { error: '' }))
|
||||
} catch (error) {
|
||||
ignoreErrors.call(this._updateRemote(id, { error: error.message }))
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return handler
|
||||
}
|
||||
|
||||
async testRemote(remote) {
|
||||
const handler = await this.getRemoteHandler(remote)
|
||||
return handler.test()
|
||||
async testRemote(remoteId) {
|
||||
const handler = await this.getRemoteHandler(remoteId)
|
||||
const { readRate, writeRate, ...answer } = await handler.test()
|
||||
|
||||
if (answer.success) {
|
||||
const benchmark = {
|
||||
readRate,
|
||||
timestamp: Date.now(),
|
||||
writeRate,
|
||||
}
|
||||
const remote = await this._getRemote(remoteId)
|
||||
|
||||
await this._updateRemote(remoteId, {
|
||||
benchmarks:
|
||||
remote.benchmarks !== undefined
|
||||
? [...remote.benchmarks.slice(-49), benchmark] // store 50 benchmarks
|
||||
: [benchmark],
|
||||
})
|
||||
}
|
||||
|
||||
return answer
|
||||
}
|
||||
|
||||
async getAllRemotesInfo() {
|
||||
@@ -150,7 +168,7 @@ export default class {
|
||||
}
|
||||
|
||||
@synchronized()
|
||||
async _updateRemote(id, { url, ...props }) {
|
||||
async _updateRemote(id, { benchmarks, url, ...props }) {
|
||||
const remote = await this._getRemote(id)
|
||||
|
||||
// url is handled separately to take care of obfuscated values
|
||||
@@ -158,6 +176,13 @@ export default class {
|
||||
remote.url = format(sensitiveValues.merge(parse(url), parse(remote.url)))
|
||||
}
|
||||
|
||||
if (
|
||||
benchmarks !== undefined ||
|
||||
(benchmarks = remote.benchmarks) !== undefined
|
||||
) {
|
||||
remote.benchmarks = JSON.stringify(benchmarks)
|
||||
}
|
||||
|
||||
patch(remote, props)
|
||||
|
||||
return (await this._remotes.update(remote)).properties
|
||||
|
||||
@@ -51,8 +51,8 @@ const levelPromise = db => {
|
||||
// ===================================================================
|
||||
|
||||
export default class {
|
||||
constructor(xo) {
|
||||
const dir = `${xo._config.datadir}/leveldb`
|
||||
constructor(xo, config) {
|
||||
const dir = `${config.datadir}/leveldb`
|
||||
this._db = ensureDir(dir).then(() => {
|
||||
return sublevel(
|
||||
levelup(dir, {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// @flow
|
||||
|
||||
import defer from 'golike-defer'
|
||||
import { type Remote, getHandler } from '@xen-orchestra/fs'
|
||||
import { mergeVhd as mergeVhd_ } from 'vhd-lib'
|
||||
|
||||
@@ -12,7 +13,8 @@ global.Promise = require('bluebird')
|
||||
// $FlowFixMe
|
||||
const config: Object = JSON.parse(process.env.XO_CONFIG)
|
||||
|
||||
export async function mergeVhd(
|
||||
export const mergeVhd = defer(async function(
|
||||
$defer: any,
|
||||
parentRemote: Remote,
|
||||
parentPath: string,
|
||||
childRemote: Remote,
|
||||
@@ -21,9 +23,11 @@ export async function mergeVhd(
|
||||
const parentHandler = getHandler(parentRemote, config.remoteOptions)
|
||||
const childHandler = getHandler(childRemote, config.remoteOptions)
|
||||
|
||||
// DO NOT forget the handlers as it they are still in use in the main process
|
||||
await parentHandler.sync()
|
||||
$defer.call(parentHandler, 'forget')
|
||||
|
||||
await childHandler.sync()
|
||||
$defer.call(childHandler, 'forget')
|
||||
|
||||
return mergeVhd_(parentHandler, parentPath, childHandler, childPath)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -25,9 +25,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"child-process-promise": "^2.0.3",
|
||||
"core-js": "3.0.0-beta.3",
|
||||
"core-js": "3.0.0",
|
||||
"pipette": "^0.9.3",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"tmp": "^0.0.33",
|
||||
"vhd-lib": "^0.5.1"
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "xo-web",
|
||||
"version": "5.35.0",
|
||||
"version": "5.37.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Web interface client for Xen-Orchestra",
|
||||
"keywords": [
|
||||
@@ -34,7 +34,7 @@
|
||||
"@nraynaud/novnc": "0.6.1",
|
||||
"@xen-orchestra/cron": "^1.0.3",
|
||||
"@xen-orchestra/defined": "^0.0.0",
|
||||
"ansi_up": "^3.0.0",
|
||||
"ansi_up": "^4.0.3",
|
||||
"asap": "^2.0.6",
|
||||
"babel-core": "^6.26.0",
|
||||
"babel-plugin-dev": "^1.0.0",
|
||||
@@ -96,7 +96,7 @@
|
||||
"moment-timezone": "^0.5.14",
|
||||
"notifyjs": "^3.0.0",
|
||||
"otplib": "^10.0.1",
|
||||
"promise-toolbox": "^0.11.0",
|
||||
"promise-toolbox": "^0.12.1",
|
||||
"prop-types": "^15.6.0",
|
||||
"qrcode": "^1.3.2",
|
||||
"random-password": "^0.1.2",
|
||||
|
||||
@@ -517,6 +517,8 @@ const messages = {
|
||||
remoteState: 'State',
|
||||
remoteDevice: 'Device',
|
||||
remoteDisk: 'Disk (Used / Total)',
|
||||
remoteSpeed: 'Speed (Write / Read)',
|
||||
remoteSpeedInfo: 'Read and write rate speed performed during latest test',
|
||||
remoteOptions: 'Options',
|
||||
remoteShare: 'Share',
|
||||
remoteAction: 'Action',
|
||||
@@ -733,6 +735,7 @@ const messages = {
|
||||
memoryLeftTooltip: '{used}% used ({free} free)',
|
||||
// ----- Pool network tab -----
|
||||
pif: 'PIF',
|
||||
poolNetworkAutomatic: 'Automatic',
|
||||
poolNetworkNameLabel: 'Name',
|
||||
poolNetworkDescription: 'Description',
|
||||
poolNetworkPif: 'PIFs',
|
||||
@@ -1022,6 +1025,8 @@ const messages = {
|
||||
importVdi: 'Import VDI content',
|
||||
importVdiNoFile: 'No file selected',
|
||||
selectVdiMessage: 'Drop VHD file here',
|
||||
srsNotOnSameHost:
|
||||
'The SRs must either be shared or on the same host for the VM to be able to start.',
|
||||
useQuotaWarning:
|
||||
'Creating this disk will use the disk space quota from the resource set {resourceSet} ({spaceLeft} left)',
|
||||
notEnoughSpaceInResourceSet:
|
||||
@@ -1092,6 +1097,7 @@ const messages = {
|
||||
guestOsLabel: 'Guest OS',
|
||||
miscLabel: 'Misc',
|
||||
virtualizationMode: 'Virtualization mode',
|
||||
startDelayLabel: 'Start delay (seconds)',
|
||||
cpuMaskLabel: 'CPU mask',
|
||||
selectCpuMask: 'Select core(s)…',
|
||||
cpuWeightLabel: 'CPU weight',
|
||||
@@ -1314,6 +1320,7 @@ const messages = {
|
||||
newVmShowAdvanced: 'Show advanced settings',
|
||||
newVmHideAdvanced: 'Hide advanced settings',
|
||||
newVmShare: 'Share this VM',
|
||||
newVmSrsNotOnSameHost: 'The SRs must either be on the same host or shared',
|
||||
|
||||
// ----- Self -----
|
||||
resourceSets: 'Resource sets',
|
||||
@@ -1403,11 +1410,11 @@ const messages = {
|
||||
scheduleName: 'Name',
|
||||
scheduleCopyId: 'Copy ID {id}',
|
||||
scheduleTimezone: 'Timezone',
|
||||
scheduleExportRetention: 'Backup ret.',
|
||||
scheduleCopyRetention: 'Replication ret.',
|
||||
scheduleSnapshotRetention: 'Snapshot ret.',
|
||||
poolMetadataRetention: 'Pool ret.',
|
||||
xoMetadataRetention: 'XO ret.',
|
||||
scheduleExportRetention: 'Backup retention',
|
||||
scheduleCopyRetention: 'Replication retention',
|
||||
scheduleSnapshotRetention: 'Snapshot retention',
|
||||
poolMetadataRetention: 'Pool retention',
|
||||
xoMetadataRetention: 'XO retention',
|
||||
getRemote: 'Get remote',
|
||||
listRemote: 'List Remote',
|
||||
simpleBackup: 'simple',
|
||||
|
||||
@@ -618,3 +618,28 @@ export const downloadLog = ({ log, date, type }) => {
|
||||
anchor.click()
|
||||
document.body.removeChild(anchor)
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Creates compare function based on different criterias
|
||||
//
|
||||
// ```js
|
||||
// [{ name: 'bar', value: v2 }, { name: 'foo', value: v1 }].sort(
|
||||
// createCompare([
|
||||
// o => o.value === v1,
|
||||
// 'name'
|
||||
// ])
|
||||
// )
|
||||
// ```
|
||||
export const createCompare = criterias => (...items) => {
|
||||
let res = 0
|
||||
// Array.find to stop when the result is != 0
|
||||
criterias.find(fn => {
|
||||
const [v1, v2] = items.map(item => {
|
||||
const v = typeof fn === 'string' ? item[fn] : fn(item)
|
||||
return v === true ? -1 : v === false ? 1 : v
|
||||
})
|
||||
return (res = v1 < v2 ? -1 : v1 > v2 ? 1 : 0)
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
19
packages/xo-web/src/common/xo/_parseNdJson.js
Normal file
19
packages/xo-web/src/common/xo/_parseNdJson.js
Normal file
@@ -0,0 +1,19 @@
|
||||
export default (string, cb) => {
|
||||
const { length } = string
|
||||
let i = 0
|
||||
while (i < length) {
|
||||
let j = string.indexOf('\n', i)
|
||||
|
||||
// no final \n
|
||||
if (j === -1) {
|
||||
j = length
|
||||
}
|
||||
|
||||
// non empty line
|
||||
if (j !== i) {
|
||||
cb(JSON.parse(string.slice(i, j)))
|
||||
}
|
||||
|
||||
i = j + 1
|
||||
}
|
||||
}
|
||||
@@ -49,6 +49,8 @@ import {
|
||||
updatePermissions,
|
||||
} from '../store/actions'
|
||||
|
||||
import parseNdJson from './_parseNdJson'
|
||||
|
||||
// ===================================================================
|
||||
|
||||
export const XEN_DEFAULT_CPU_WEIGHT = 256
|
||||
@@ -67,6 +69,9 @@ export const isVmRunning = vm => vm && vm.power_state === 'Running'
|
||||
// ===================================================================
|
||||
|
||||
export const signOut = () => {
|
||||
// prevent automatic reconnection
|
||||
xo.removeListener('closed', connect)
|
||||
|
||||
cookies.expire('token')
|
||||
window.location.reload(true)
|
||||
}
|
||||
@@ -141,26 +146,9 @@ export const connectStore = store => {
|
||||
.then(response => response.text())
|
||||
.then(data => {
|
||||
const objects = Object.create(null)
|
||||
|
||||
const { length } = data
|
||||
let i = 0
|
||||
while (i < length) {
|
||||
let j = data.indexOf('\n', i)
|
||||
|
||||
// no final \n
|
||||
if (j === -1) {
|
||||
j = length
|
||||
}
|
||||
|
||||
// non empty line
|
||||
if (j !== i) {
|
||||
const object = JSON.parse(data.slice(i, j))
|
||||
objects[object.id] = object
|
||||
}
|
||||
|
||||
i = j + 1
|
||||
}
|
||||
|
||||
parseNdJson(data, object => {
|
||||
objects[object.id] = object
|
||||
})
|
||||
store.dispatch(updateObjects(objects))
|
||||
})
|
||||
})
|
||||
@@ -1918,9 +1906,17 @@ export const subscribeBackupNgJobs = createSubscription(() =>
|
||||
_call('backupNg.getAllJobs')
|
||||
)
|
||||
|
||||
export const subscribeBackupNgLogs = createSubscription(() =>
|
||||
_call('backupNg.getAllLogs')
|
||||
)
|
||||
export const subscribeBackupNgLogs = createSubscription(async () => {
|
||||
const { $getFrom } = await _call('backupNg.getAllLogs', { ndjson: true })
|
||||
const response = await fetch(`.${$getFrom}`)
|
||||
const data = await response.text()
|
||||
|
||||
const logs = { __proto__: null }
|
||||
parseNdJson(data, log => {
|
||||
logs[log.id] = log
|
||||
})
|
||||
return logs
|
||||
})
|
||||
|
||||
export const subscribeMetadataBackupJobs = createSubscription(() =>
|
||||
_call('metadataBackup.getAllJobs')
|
||||
|
||||
@@ -133,7 +133,6 @@ export default class MigrateVmsModalBody extends BaseComponent {
|
||||
}
|
||||
const { networks, pifs, vbdsByVm, vifsByVm } = this.props
|
||||
const {
|
||||
intraPool,
|
||||
doNotMigrateVdi,
|
||||
doNotMigrateVmVdis,
|
||||
migrationNetworkId,
|
||||
@@ -152,8 +151,9 @@ export default class MigrateVmsModalBody extends BaseComponent {
|
||||
forEach(vbds, vbd => {
|
||||
const vdi = vbd.VDI
|
||||
if (!vbd.is_cd_drive && vdi) {
|
||||
mapVdisSrs[vdi] =
|
||||
intraPool && doNotMigrateVdi[vdi] ? this._getObject(vdi).SR : srId
|
||||
mapVdisSrs[vdi] = doNotMigrateVdi[vdi]
|
||||
? this._getObject(vdi).SR
|
||||
: srId
|
||||
}
|
||||
})
|
||||
mapVmsMapVdisSrs[vm] = mapVdisSrs
|
||||
@@ -218,29 +218,34 @@ export default class MigrateVmsModalBody extends BaseComponent {
|
||||
host.$PBDs,
|
||||
pbd => this._getObject(pbd).SR === defaultSrId
|
||||
)
|
||||
|
||||
const intraPool = every(this.props.vms, vm => vm.$pool === host.$pool)
|
||||
const doNotMigrateVmVdis = {}
|
||||
const doNotMigrateVdi = {}
|
||||
forEach(this.props.vbdsByVm, (vbds, vm) => {
|
||||
if (this._getObject(vm).$container === host.id) {
|
||||
doNotMigrateVmVdis[vm] = true
|
||||
return
|
||||
}
|
||||
const _doNotMigrateVdi = {}
|
||||
forEach(vbds, vbd => {
|
||||
if (vbd.VDI != null) {
|
||||
doNotMigrateVdi[vbd.VDI] = _doNotMigrateVdi[vbd.VDI] = isSrShared(
|
||||
this._getObject(this._getObject(vbd.VDI).$SR)
|
||||
)
|
||||
let noVdisMigration = false
|
||||
if (intraPool) {
|
||||
forEach(this.props.vbdsByVm, (vbds, vm) => {
|
||||
if (this._getObject(vm).$container === host.id) {
|
||||
doNotMigrateVmVdis[vm] = true
|
||||
return
|
||||
}
|
||||
const _doNotMigrateVdi = {}
|
||||
forEach(vbds, vbd => {
|
||||
if (vbd.VDI != null) {
|
||||
doNotMigrateVdi[vbd.VDI] = _doNotMigrateVdi[vbd.VDI] = isSrShared(
|
||||
this._getObject(this._getObject(vbd.VDI).$SR)
|
||||
)
|
||||
}
|
||||
})
|
||||
doNotMigrateVmVdis[vm] = every(_doNotMigrateVdi)
|
||||
})
|
||||
doNotMigrateVmVdis[vm] = every(_doNotMigrateVdi)
|
||||
})
|
||||
const noVdisMigration = every(doNotMigrateVmVdis)
|
||||
noVdisMigration = every(doNotMigrateVmVdis)
|
||||
}
|
||||
this.setState({
|
||||
defaultSrConnectedToHost,
|
||||
defaultSrId,
|
||||
host,
|
||||
intraPool: every(this.props.vms, vm => vm.$pool === host.$pool),
|
||||
intraPool,
|
||||
doNotMigrateVdi,
|
||||
doNotMigrateVmVdis,
|
||||
migrationNetworkId: defaultMigrationNetworkId,
|
||||
|
||||
@@ -23,6 +23,10 @@
|
||||
@extend .fa;
|
||||
@extend .fa-thumb-tack;
|
||||
}
|
||||
&-plugin {
|
||||
@extend .fa;
|
||||
@extend .fa-puzzle-piece;
|
||||
}
|
||||
&-message {
|
||||
@extend .fa;
|
||||
@extend .fa-envelope-o;
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
import { pickBy } from 'lodash'
|
||||
|
||||
const DEFAULTS = {
|
||||
__proto__: null,
|
||||
|
||||
compression: '',
|
||||
concurrency: 0,
|
||||
offlineSnapshot: false,
|
||||
timeout: 0,
|
||||
}
|
||||
|
||||
const MODES = {
|
||||
__proto__: null,
|
||||
|
||||
compression: 'full',
|
||||
}
|
||||
|
||||
const getSettingsWithNonDefaultValue = (mode, settings) =>
|
||||
pickBy(settings, (value, key) => {
|
||||
const settingMode = MODES[key]
|
||||
|
||||
return (
|
||||
(settingMode === undefined || settingMode === mode) &&
|
||||
value !== undefined &&
|
||||
value !== DEFAULTS[key]
|
||||
)
|
||||
})
|
||||
|
||||
export { getSettingsWithNonDefaultValue as default }
|
||||
@@ -37,10 +37,11 @@ import {
|
||||
import LogsTable, { LogStatus } from '../logs/backup-ng'
|
||||
import Page from '../page'
|
||||
|
||||
import NewVmBackup, { NewMetadataBackup } from './new'
|
||||
import Edit from './edit'
|
||||
import FileRestore from './file-restore'
|
||||
import getSettingsWithNonDefaultValue from './_getSettingsWithNonDefaultValue'
|
||||
import Health from './health'
|
||||
import NewVmBackup, { NewMetadataBackup } from './new'
|
||||
import Restore from './restore'
|
||||
import { destructPattern } from './utils'
|
||||
|
||||
@@ -244,28 +245,38 @@ class JobsTable extends React.Component {
|
||||
name: _('jobSchedules'),
|
||||
},
|
||||
{
|
||||
itemRenderer: ({ compression = '', settings }) => {
|
||||
const { concurrency, offlineSnapshot, reportWhen, timeout } =
|
||||
settings[''] || {}
|
||||
itemRenderer: job => {
|
||||
const {
|
||||
compression,
|
||||
concurrency,
|
||||
offlineSnapshot,
|
||||
reportWhen,
|
||||
timeout,
|
||||
} = getSettingsWithNonDefaultValue(job.mode, {
|
||||
compression: job.compression,
|
||||
...job.settings[''],
|
||||
})
|
||||
|
||||
return (
|
||||
<Ul>
|
||||
{reportWhen && <Li>{_.keyValue(_('reportWhen'), reportWhen)}</Li>}
|
||||
{concurrency > 0 && (
|
||||
{reportWhen !== undefined && (
|
||||
<Li>{_.keyValue(_('reportWhen'), reportWhen)}</Li>
|
||||
)}
|
||||
{concurrency !== undefined && (
|
||||
<Li>{_.keyValue(_('concurrency'), concurrency)}</Li>
|
||||
)}
|
||||
{timeout > 0 && (
|
||||
{timeout !== undefined && (
|
||||
<Li>{_.keyValue(_('timeout'), timeout / 3600e3)} hours</Li>
|
||||
)}
|
||||
{offlineSnapshot && (
|
||||
{offlineSnapshot !== undefined && (
|
||||
<Li>
|
||||
{_.keyValue(
|
||||
_('offlineSnapshot'),
|
||||
<span className='text-success'>{_('stateEnabled')}</span>
|
||||
_(offlineSnapshot ? 'stateEnabled' : 'stateDisabled')
|
||||
)}
|
||||
</Li>
|
||||
)}
|
||||
{compression !== '' && (
|
||||
{compression !== undefined && (
|
||||
<Li>
|
||||
{_.keyValue(
|
||||
_('compression'),
|
||||
|
||||
@@ -15,7 +15,7 @@ import { Card, CardBlock, CardHeader } from 'card'
|
||||
import { constructSmartPattern, destructSmartPattern } from 'smart-backup'
|
||||
import { Container, Col, Row } from 'grid'
|
||||
import { createGetObjectsOfType } from 'selectors'
|
||||
import { flatten, includes, isEmpty, map, mapValues, some } from 'lodash'
|
||||
import { flatten, includes, isEmpty, map, mapValues, omit, some } from 'lodash'
|
||||
import { form } from 'modal'
|
||||
import { generateId } from 'reaclette-utils'
|
||||
import { injectIntl } from 'react-intl'
|
||||
@@ -44,6 +44,7 @@ import {
|
||||
import NewSchedule from './new-schedule'
|
||||
import Schedules from './schedules'
|
||||
import SmartBackup from './smart-backup'
|
||||
import getSettingsWithNonDefaultValue from '../_getSettingsWithNonDefaultValue'
|
||||
import {
|
||||
canDeltaBackup,
|
||||
constructPattern,
|
||||
@@ -137,12 +138,12 @@ const createDoesRetentionExist = name => {
|
||||
}
|
||||
|
||||
const getInitialState = () => ({
|
||||
_displayAdvancedSettings: undefined,
|
||||
_vmsPattern: undefined,
|
||||
backupMode: false,
|
||||
compression: undefined,
|
||||
crMode: false,
|
||||
deltaMode: false,
|
||||
displayAdvancedSettings: undefined,
|
||||
drMode: false,
|
||||
name: '',
|
||||
paramsUpdated: false,
|
||||
@@ -489,6 +490,9 @@ export default decorate([
|
||||
return getInitialState()
|
||||
},
|
||||
setCompression: (_, compression) => ({ compression }),
|
||||
toggleDisplayAdvancedSettings: () => ({ displayAdvancedSettings }) => ({
|
||||
_displayAdvancedSettings: !displayAdvancedSettings,
|
||||
}),
|
||||
setGlobalSettings: (_, { name, value }) => ({
|
||||
propSettings,
|
||||
settings = propSettings,
|
||||
@@ -611,6 +615,16 @@ export default decorate([
|
||||
}
|
||||
: setting
|
||||
),
|
||||
displayAdvancedSettings: (state, props) =>
|
||||
defined(
|
||||
state._displayAdvancedSettings,
|
||||
!isEmpty(
|
||||
getSettingsWithNonDefaultValue(state.isFull ? 'full' : 'delta', {
|
||||
compression: get(() => props.job.compression),
|
||||
...get(() => omit(props.job.settings[''], 'reportWhen')),
|
||||
})
|
||||
)
|
||||
),
|
||||
},
|
||||
}),
|
||||
injectState,
|
||||
@@ -620,10 +634,6 @@ export default decorate([
|
||||
const { concurrency, reportWhen = 'failure', offlineSnapshot, timeout } =
|
||||
settings.get('') || {}
|
||||
const compression = defined(state.compression, job.compression, '')
|
||||
const displayAdvancedSettings = defined(
|
||||
state.displayAdvancedSettings,
|
||||
compression !== '' || concurrency > 0 || timeout > 0 || offlineSnapshot
|
||||
)
|
||||
|
||||
if (state.needUpdateParams) {
|
||||
effects.updateParams()
|
||||
@@ -848,11 +858,13 @@ export default decorate([
|
||||
{_('newBackupSettings')}
|
||||
<ActionButton
|
||||
className='pull-right'
|
||||
data-mode='displayAdvancedSettings'
|
||||
handler={effects.toggleMode}
|
||||
icon={displayAdvancedSettings ? 'toggle-on' : 'toggle-off'}
|
||||
data-mode='_displayAdvancedSettings'
|
||||
handler={effects.toggleDisplayAdvancedSettings}
|
||||
icon={
|
||||
state.displayAdvancedSettings ? 'toggle-on' : 'toggle-off'
|
||||
}
|
||||
iconColor={
|
||||
displayAdvancedSettings ? 'text-success' : undefined
|
||||
state.displayAdvancedSettings ? 'text-success' : undefined
|
||||
}
|
||||
size='small'
|
||||
>
|
||||
@@ -885,7 +897,7 @@ export default decorate([
|
||||
valueKey='value'
|
||||
/>
|
||||
</FormGroup>
|
||||
{displayAdvancedSettings && (
|
||||
{state.displayAdvancedSettings && (
|
||||
<div>
|
||||
<FormGroup>
|
||||
<label htmlFor={state.inputConcurrencyId}>
|
||||
|
||||
@@ -480,23 +480,18 @@ export default class Home extends Component {
|
||||
selectedItems: {},
|
||||
}
|
||||
|
||||
get page() {
|
||||
return this.state.page
|
||||
}
|
||||
set page(activePage) {
|
||||
this.setState({ activePage })
|
||||
}
|
||||
|
||||
componentWillMount() {
|
||||
this._initFilterAndSortBy(this.props)
|
||||
}
|
||||
|
||||
componentWillReceiveProps(props) {
|
||||
const { type } = props
|
||||
|
||||
if (this._getFilter() !== this._getFilter(props)) {
|
||||
this._initFilterAndSortBy(props)
|
||||
}
|
||||
if (props.type !== this.props.type) {
|
||||
this.setState({ activePage: undefined, highlighted: undefined })
|
||||
if (type !== this.props.type) {
|
||||
this.setState({ highlighted: undefined })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -523,6 +518,14 @@ export default class Home extends Component {
|
||||
identity,
|
||||
])
|
||||
|
||||
_getPage() {
|
||||
const {
|
||||
location: { query },
|
||||
} = this.props
|
||||
const queryPage = +query.p
|
||||
return Number.isNaN(queryPage) ? 1 : queryPage
|
||||
}
|
||||
|
||||
_getType() {
|
||||
return this.props.type
|
||||
}
|
||||
@@ -531,7 +534,7 @@ export default class Home extends Component {
|
||||
const { pathname, query } = this.props.location
|
||||
this.context.router.push({
|
||||
pathname,
|
||||
query: { ...query, t: type, s: undefined },
|
||||
query: { ...query, t: type, s: undefined, p: 1 },
|
||||
})
|
||||
}
|
||||
|
||||
@@ -655,10 +658,8 @@ export default class Home extends Component {
|
||||
const { pathname, query } = props.location
|
||||
this.context.router[replace ? 'replace' : 'push']({
|
||||
pathname,
|
||||
query: { ...query, s: filter },
|
||||
query: { ...query, s: filter, p: 1 },
|
||||
})
|
||||
|
||||
this.page = 1
|
||||
}
|
||||
|
||||
_clearFilter = () => this._setFilter('')
|
||||
@@ -673,20 +674,27 @@ export default class Home extends Component {
|
||||
|
||||
_getFilteredItems = createSort(
|
||||
createFilter(() => this.props.items, this._getFilterFunction),
|
||||
() => this.state.sortBy,
|
||||
createSelector(
|
||||
() => this.state.sortBy,
|
||||
sortBy => [sortBy, 'name_label']
|
||||
),
|
||||
() => this.state.sortOrder
|
||||
)
|
||||
|
||||
_getVisibleItems = createPager(
|
||||
this._getFilteredItems,
|
||||
() => this.state.activePage || 1,
|
||||
() => this._getPage(),
|
||||
ITEMS_PER_PAGE
|
||||
)
|
||||
|
||||
_expandAll = () => this.setState({ expandAll: !this.state.expandAll })
|
||||
|
||||
_onPageSelection = page => {
|
||||
this.page = page
|
||||
const { pathname, query } = this.props.location
|
||||
this.context.router.replace({
|
||||
pathname,
|
||||
query: { ...query, p: page },
|
||||
})
|
||||
}
|
||||
|
||||
_tick = isCriteria => (
|
||||
@@ -1151,7 +1159,7 @@ export default class Home extends Component {
|
||||
const filteredItems = this._getFilteredItems()
|
||||
const visibleItems = this._getVisibleItems()
|
||||
const { Item } = OPTIONS[this.props.type]
|
||||
const { activePage, expandAll, highlighted, selectedItems } = this.state
|
||||
const { expandAll, highlighted, selectedItems } = this.state
|
||||
|
||||
// Necessary because indeterminate cannot be used as an attribute
|
||||
if (this.refs.masterCheckbox) {
|
||||
@@ -1201,7 +1209,7 @@ export default class Home extends Component {
|
||||
<Pagination
|
||||
onChange={this._onPageSelection}
|
||||
pages={ceil(filteredItems.length / ITEMS_PER_PAGE)}
|
||||
value={activePage || 1}
|
||||
value={this._getPage()}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -45,6 +45,7 @@ import {
|
||||
createVm,
|
||||
createVms,
|
||||
getCloudInitConfig,
|
||||
isSrShared,
|
||||
subscribeCurrentUser,
|
||||
subscribeIpPools,
|
||||
subscribeResourceSets,
|
||||
@@ -81,6 +82,7 @@ import {
|
||||
resolveResourceSet,
|
||||
} from 'utils'
|
||||
import {
|
||||
createFilter,
|
||||
createSelector,
|
||||
createGetObject,
|
||||
createGetObjectsOfType,
|
||||
@@ -485,20 +487,19 @@ export default class NewVm extends BaseComponent {
|
||||
}
|
||||
})
|
||||
|
||||
const VIFs = []
|
||||
let VIFs = []
|
||||
const defaultNetworkIds = this._getDefaultNetworkIds(template)
|
||||
forEach(template.VIFs, vifId => {
|
||||
const vif = getObject(storeState, vifId, resourceSet)
|
||||
VIFs.push({
|
||||
network:
|
||||
pool || isInResourceSet(vif.$network)
|
||||
? vif.$network
|
||||
: this._getDefaultNetworkId(template),
|
||||
: defaultNetworkIds[0],
|
||||
})
|
||||
})
|
||||
if (VIFs.length === 0) {
|
||||
VIFs.push({
|
||||
network: this._getDefaultNetworkId(template),
|
||||
})
|
||||
VIFs = defaultNetworkIds.map(id => ({ network: id }))
|
||||
}
|
||||
const name_label =
|
||||
state.name_label === '' || !state.name_labelHasChanged
|
||||
@@ -631,21 +632,38 @@ export default class NewVm extends BaseComponent {
|
||||
)
|
||||
}
|
||||
)
|
||||
_getDefaultNetworkId = template => {
|
||||
|
||||
_getAutomaticNetworks = createSelector(
|
||||
createFilter(this._getPoolNetworks, [network => network.automatic]),
|
||||
networks => networks.map(_ => _.id)
|
||||
)
|
||||
|
||||
_getDefaultNetworkIds = template => {
|
||||
if (template === undefined) {
|
||||
return
|
||||
return []
|
||||
}
|
||||
|
||||
const network =
|
||||
this.props.pool === undefined
|
||||
? find(this._getResolvedResourceSet().objectsByType.network, {
|
||||
$pool: template.$pool,
|
||||
})
|
||||
: find(this._getPoolNetworks(), network => {
|
||||
const pif = getObject(store.getState(), network.PIFs[0])
|
||||
return pif && pif.management
|
||||
})
|
||||
return network && network.id
|
||||
if (this.props.pool === undefined) {
|
||||
const network = find(
|
||||
this._getResolvedResourceSet().objectsByType.network,
|
||||
{
|
||||
$pool: template.$pool,
|
||||
}
|
||||
)
|
||||
return network !== undefined ? [network.id] : []
|
||||
}
|
||||
|
||||
const automaticNetworks = this._getAutomaticNetworks()
|
||||
if (automaticNetworks.length !== 0) {
|
||||
return automaticNetworks
|
||||
}
|
||||
|
||||
const network = find(this._getPoolNetworks(), network => {
|
||||
const pif = getObject(store.getState(), network.PIFs[0])
|
||||
return pif && pif.management
|
||||
})
|
||||
|
||||
return network !== undefined ? [network.id] : []
|
||||
}
|
||||
|
||||
_buildVmsNameTemplate = createSelector(
|
||||
@@ -788,9 +806,7 @@ export default class NewVm extends BaseComponent {
|
||||
this._setState({
|
||||
VIFs: [
|
||||
...state.VIFs,
|
||||
{
|
||||
network: this._getDefaultNetworkId(state.template),
|
||||
},
|
||||
{ network: this._getDefaultNetworkIds(state.template)[0] },
|
||||
],
|
||||
})
|
||||
}
|
||||
@@ -1329,6 +1345,36 @@ export default class NewVm extends BaseComponent {
|
||||
|
||||
// DISKS -----------------------------------------------------------------------
|
||||
|
||||
_getDiskSrs = createSelector(
|
||||
() => this.state.state.existingDisks,
|
||||
() => this.state.state.VDIs,
|
||||
(existingDisks, vdis) => {
|
||||
const diskSrs = new Set()
|
||||
forEach(existingDisks, disk => diskSrs.add(disk.$SR))
|
||||
vdis.forEach(disk => diskSrs.add(disk.SR))
|
||||
return [...diskSrs]
|
||||
}
|
||||
)
|
||||
|
||||
_srsNotOnSameHost = createSelector(
|
||||
this._getDiskSrs,
|
||||
() => this.props.srs,
|
||||
(diskSrs, srs) => {
|
||||
let container
|
||||
let sr
|
||||
return diskSrs.some(srId => {
|
||||
sr = srs[srId]
|
||||
return (
|
||||
sr !== undefined &&
|
||||
!isSrShared(sr) &&
|
||||
(container !== undefined
|
||||
? container !== sr.$container
|
||||
: ((container = sr.$container), false))
|
||||
)
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
_renderDisks = () => {
|
||||
const {
|
||||
state: { installMethod, existingDisks, VDIs },
|
||||
@@ -1455,6 +1501,11 @@ export default class NewVm extends BaseComponent {
|
||||
{index < VDIs.length - 1 && <hr />}
|
||||
</div>
|
||||
))}
|
||||
{this._srsNotOnSameHost() && (
|
||||
<span className='text-danger'>
|
||||
<Icon icon='alarm' /> {_('newVmSrsNotOnSameHost')}
|
||||
</span>
|
||||
)}
|
||||
<Item>
|
||||
<Button onClick={this._addVdi}>
|
||||
<Icon icon='new-vm-add' /> {_('newVmAddDisk')}
|
||||
|
||||
@@ -92,7 +92,7 @@ const NewNetwork = decorate([
|
||||
description,
|
||||
mtu,
|
||||
name,
|
||||
pif: pif.id,
|
||||
pif: pif == null ? undefined : pif.id,
|
||||
pool: pool.id,
|
||||
vlan,
|
||||
})
|
||||
@@ -160,7 +160,7 @@ const NewNetwork = decorate([
|
||||
multi={bonded}
|
||||
onChange={effects.onChangePif}
|
||||
predicate={pifPredicate}
|
||||
required
|
||||
required={bonded}
|
||||
value={bonded ? pifs : pif}
|
||||
/>
|
||||
<label>{_('newNetworkName')}</label>
|
||||
|
||||
@@ -79,6 +79,21 @@ class Name extends Component {
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
class AutomaticNetwork extends Component {
|
||||
_editAutomaticNetwork = automatic =>
|
||||
editNetwork(this.props.network, { automatic })
|
||||
|
||||
render() {
|
||||
const { network } = this.props
|
||||
|
||||
return (
|
||||
<Toggle onChange={this._editAutomaticNetwork} value={network.automatic} />
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
class Description extends Component {
|
||||
_editDescription = value =>
|
||||
editNetwork(this.props.network, { name_description: value })
|
||||
@@ -343,6 +358,10 @@ const NETWORKS_COLUMNS = [
|
||||
itemRenderer: network =>
|
||||
!isEmpty(network.PIFs) && <PifsItem network={network} />,
|
||||
},
|
||||
{
|
||||
name: _('poolNetworkAutomatic'),
|
||||
itemRenderer: network => <AutomaticNetwork network={network} />,
|
||||
},
|
||||
{
|
||||
name: '',
|
||||
itemRenderer: network => <NetworkActions network={network} />,
|
||||
|
||||
@@ -317,6 +317,13 @@ export default decorate([
|
||||
value={state.search}
|
||||
/>
|
||||
</p>
|
||||
<span>
|
||||
{_('homeDisplayedItems', {
|
||||
displayed: state.sortedPlugins.length,
|
||||
icon: <Icon icon='plugin' />,
|
||||
total: plugins.length,
|
||||
})}
|
||||
</span>
|
||||
<ul style={{ paddingLeft: 0 }}>
|
||||
{state.sortedPlugins.map(plugin => (
|
||||
<li key={plugin.id} className='list-group-item clearfix'>
|
||||
|
||||
@@ -6,9 +6,16 @@ import React from 'react'
|
||||
import SortedTable from 'sorted-table'
|
||||
import StateButton from 'state-button'
|
||||
import Tooltip from 'tooltip'
|
||||
import { addSubscriptions, formatSize, generateRandomId, noop } from 'utils'
|
||||
import {
|
||||
addSubscriptions,
|
||||
formatSize,
|
||||
formatSpeed,
|
||||
generateRandomId,
|
||||
noop,
|
||||
} from 'utils'
|
||||
import { alert } from 'modal'
|
||||
import { format, parse } from 'xo-remote-parser'
|
||||
import { get } from '@xen-orchestra/defined'
|
||||
import { groupBy, map, isEmpty } from 'lodash'
|
||||
import { injectIntl } from 'react-intl'
|
||||
import { injectState, provideState } from 'reaclette'
|
||||
@@ -77,7 +84,7 @@ const COLUMN_STATE = {
|
||||
name: _('remoteState'),
|
||||
}
|
||||
const COLUMN_DISK = {
|
||||
itemRenderer: (remote, { formatMessage }) =>
|
||||
itemRenderer: remote =>
|
||||
remote.info !== undefined &&
|
||||
remote.info.used !== undefined &&
|
||||
remote.info.size !== undefined && (
|
||||
@@ -87,6 +94,30 @@ const COLUMN_DISK = {
|
||||
),
|
||||
name: _('remoteDisk'),
|
||||
}
|
||||
const COLUMN_SPEED = {
|
||||
itemRenderer: remote => {
|
||||
const benchmark = get(() => remote.benchmarks[remote.benchmarks.length - 1])
|
||||
|
||||
return (
|
||||
benchmark !== undefined &&
|
||||
benchmark.readRate !== undefined &&
|
||||
benchmark.writeRate !== undefined && (
|
||||
<span>{`${formatSpeed(benchmark.writeRate, 1e3)} / ${formatSpeed(
|
||||
benchmark.readRate,
|
||||
1e3
|
||||
)}`}</span>
|
||||
)
|
||||
)
|
||||
},
|
||||
name: (
|
||||
<span>
|
||||
{_('remoteSpeed')}{' '}
|
||||
<Tooltip content={_('remoteSpeedInfo')}>
|
||||
<Icon icon='info' size='lg' />
|
||||
</Tooltip>
|
||||
</span>
|
||||
),
|
||||
}
|
||||
|
||||
const fixRemoteUrl = remote => editRemote(remote, { url: format(remote) })
|
||||
const COLUMNS_LOCAL_REMOTE = [
|
||||
@@ -105,6 +136,7 @@ const COLUMNS_LOCAL_REMOTE = [
|
||||
},
|
||||
COLUMN_STATE,
|
||||
COLUMN_DISK,
|
||||
COLUMN_SPEED,
|
||||
]
|
||||
const COLUMNS_NFS_REMOTE = [
|
||||
COLUMN_NAME,
|
||||
@@ -166,6 +198,7 @@ const COLUMNS_NFS_REMOTE = [
|
||||
},
|
||||
COLUMN_STATE,
|
||||
COLUMN_DISK,
|
||||
COLUMN_SPEED,
|
||||
]
|
||||
const COLUMNS_SMB_REMOTE = [
|
||||
COLUMN_NAME,
|
||||
@@ -222,6 +255,7 @@ const COLUMNS_SMB_REMOTE = [
|
||||
),
|
||||
name: _('remoteAuth'),
|
||||
},
|
||||
COLUMN_SPEED,
|
||||
]
|
||||
|
||||
const GROUPED_ACTIONS = [
|
||||
|
||||
@@ -182,6 +182,7 @@ const COLUMNS = [
|
||||
|
||||
const GROUPED_ACTIONS = [
|
||||
{
|
||||
disabled: vdis => some(vdis, { type: 'VDI-unmanaged' }),
|
||||
handler: deleteVdis,
|
||||
icon: 'delete',
|
||||
label: _('deleteSelectedVdis'),
|
||||
@@ -193,12 +194,14 @@ const INDIVIDUAL_ACTIONS = [
|
||||
...(process.env.XOA_PLAN > 1
|
||||
? [
|
||||
{
|
||||
disabled: ({ id }, { isVdiAttached }) => isVdiAttached[id],
|
||||
disabled: ({ id, type }, { isVdiAttached }) =>
|
||||
isVdiAttached[id] || type === 'VDI-unmanaged',
|
||||
handler: importVdi,
|
||||
icon: 'import',
|
||||
label: _('importVdi'),
|
||||
},
|
||||
{
|
||||
disabled: ({ type }) => type === 'VDI-unmanaged',
|
||||
handler: exportVdi,
|
||||
icon: 'export',
|
||||
label: _('exportVdi'),
|
||||
@@ -211,6 +214,7 @@ const INDIVIDUAL_ACTIONS = [
|
||||
label: vdi => _('copyUuid', { uuid: vdi.uuid }),
|
||||
},
|
||||
{
|
||||
disabled: ({ type }) => type === 'VDI-unmanaged',
|
||||
handler: deleteVdi,
|
||||
icon: 'delete',
|
||||
label: _('deleteSelectedVdi'),
|
||||
|
||||
@@ -673,6 +673,15 @@ export default class TabAdvanced extends Component {
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
<tr>
|
||||
<th>{_('startDelayLabel')}</th>
|
||||
<td>
|
||||
<Number
|
||||
value={vm.startDelay}
|
||||
onChange={value => editVm(vm, { startDelay: value })}
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>{_('cpuMaskLabel')}</th>
|
||||
<td>
|
||||
|
||||
@@ -14,6 +14,7 @@ import TabButton from 'tab-button'
|
||||
import { Sr } from 'render-xo-item'
|
||||
import { Container, Row, Col } from 'grid'
|
||||
import {
|
||||
createCollectionWrapper,
|
||||
createGetObjectsOfType,
|
||||
createSelector,
|
||||
createFinder,
|
||||
@@ -23,10 +24,11 @@ import {
|
||||
import { DragDropContext, DragSource, DropTarget } from 'react-dnd'
|
||||
import { injectIntl } from 'react-intl'
|
||||
import {
|
||||
noop,
|
||||
addSubscriptions,
|
||||
formatSize,
|
||||
connectStore,
|
||||
createCompare,
|
||||
formatSize,
|
||||
noop,
|
||||
resolveResourceSet,
|
||||
} from 'utils'
|
||||
import { SelectSr, SelectVdi, SelectResourceSetsSr } from 'select-objects'
|
||||
@@ -35,6 +37,7 @@ import { XoSelect, Size, Text } from 'editable'
|
||||
import { confirm } from 'modal'
|
||||
import { error } from 'notification'
|
||||
import {
|
||||
every,
|
||||
filter,
|
||||
find,
|
||||
forEach,
|
||||
@@ -44,6 +47,7 @@ import {
|
||||
mapValues,
|
||||
pick,
|
||||
some,
|
||||
sortedUniq,
|
||||
} from 'lodash'
|
||||
import {
|
||||
attachDiskToVm,
|
||||
@@ -57,6 +61,7 @@ import {
|
||||
editVdi,
|
||||
exportVdi,
|
||||
importVdi,
|
||||
isSrShared,
|
||||
isSrWritable,
|
||||
isVmRunning,
|
||||
migrateVdi,
|
||||
@@ -65,6 +70,45 @@ import {
|
||||
subscribeResourceSets,
|
||||
} from 'xo'
|
||||
|
||||
const createCompareContainers = poolId =>
|
||||
createCompare([c => c.$pool === poolId, c => c.type === 'pool'])
|
||||
const compareSrs = createCompare([isSrShared])
|
||||
|
||||
class VdiSr extends Component {
|
||||
_getCompareContainers = createSelector(
|
||||
() => this.props.userData.vm.$pool,
|
||||
poolId => createCompareContainers(poolId)
|
||||
)
|
||||
|
||||
_getSrPredicate = createSelector(
|
||||
() => this.props.userData.vm.$pool,
|
||||
poolId => sr => sr.$pool === poolId && isSrWritable(sr)
|
||||
)
|
||||
|
||||
_onChangeSr = sr => migrateVdi(this.props.item, sr)
|
||||
|
||||
render() {
|
||||
const { item: vdi, userData } = this.props
|
||||
const sr = userData.srs[vdi.$SR]
|
||||
return (
|
||||
sr !== undefined && (
|
||||
<XoSelect
|
||||
compareContainers={this._getCompareContainers()}
|
||||
compareOptions={compareSrs}
|
||||
labelProp='name_label'
|
||||
onChange={this._onChangeSr}
|
||||
predicate={this._getSrPredicate()}
|
||||
useLongClick
|
||||
value={sr}
|
||||
xoType='SR'
|
||||
>
|
||||
<Sr id={sr.id} link />
|
||||
</XoSelect>
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const COLUMNS_VM_PV = [
|
||||
{
|
||||
itemRenderer: vdi => (
|
||||
@@ -98,23 +142,7 @@ const COLUMNS_VM_PV = [
|
||||
sortCriteria: 'size',
|
||||
},
|
||||
{
|
||||
itemRenderer: (vdi, userData) => {
|
||||
const sr = userData.srs[vdi.$SR]
|
||||
return (
|
||||
sr !== undefined && (
|
||||
<XoSelect
|
||||
labelProp='name_label'
|
||||
onChange={sr => migrateVdi(vdi, sr)}
|
||||
predicate={sr => sr.$pool === userData.vm.$pool && isSrWritable(sr)}
|
||||
useLongClick
|
||||
value={sr}
|
||||
xoType='SR'
|
||||
>
|
||||
<Sr id={sr.id} link />
|
||||
</XoSelect>
|
||||
)
|
||||
)
|
||||
},
|
||||
component: VdiSr,
|
||||
name: _('vdiSr'),
|
||||
sortCriteria: (vdi, userData) => {
|
||||
const sr = userData.srs[vdi.$SR]
|
||||
@@ -580,17 +608,31 @@ class BootOrder extends Component {
|
||||
}
|
||||
|
||||
class MigrateVdiModalBody extends Component {
|
||||
static propTypes = {
|
||||
pool: PropTypes.string.isRequired,
|
||||
}
|
||||
|
||||
get value() {
|
||||
return this.state
|
||||
}
|
||||
|
||||
_getCompareContainers = createSelector(
|
||||
() => this.props.pool,
|
||||
poolId => createCompareContainers(poolId)
|
||||
)
|
||||
|
||||
render() {
|
||||
return (
|
||||
<Container>
|
||||
<SingleLineRow>
|
||||
<Col size={6}>{_('vdiMigrateSelectSr')}</Col>
|
||||
<Col size={6}>
|
||||
<SelectSr onChange={this.linkState('sr')} required />
|
||||
<SelectSr
|
||||
compareContainers={this._getCompareContainers()}
|
||||
compareOptions={compareSrs}
|
||||
onChange={this.linkState('sr')}
|
||||
required
|
||||
/>
|
||||
</Col>
|
||||
</SingleLineRow>
|
||||
<SingleLineRow className='mt-1'>
|
||||
@@ -621,6 +663,30 @@ export default class TabDisks extends Component {
|
||||
}
|
||||
}
|
||||
|
||||
_areSrsOnSameHost = createSelector(
|
||||
createSelector(
|
||||
() => this.props.vdis,
|
||||
createCollectionWrapper(vdis => sortedUniq(map(vdis, '$SR').sort()))
|
||||
),
|
||||
() => this.props.srs,
|
||||
(vdiSrs, srs) => {
|
||||
if (some(vdiSrs, srId => srs[srId] === undefined)) {
|
||||
return true // the user doesn't have permissions on one of the SRs: no warning
|
||||
}
|
||||
let container
|
||||
let sr
|
||||
return every(vdiSrs, srId => {
|
||||
sr = srs[srId]
|
||||
if (isSrShared(sr)) {
|
||||
return true
|
||||
}
|
||||
return container === undefined
|
||||
? ((container = sr.$container), true)
|
||||
: container === sr.$container
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
_toggleNewDisk = () =>
|
||||
this.setState({
|
||||
newDisk: !this.state.newDisk,
|
||||
@@ -645,7 +711,7 @@ export default class TabDisks extends Component {
|
||||
_migrateVdi = vdi => {
|
||||
return confirm({
|
||||
title: _('vdiMigrate'),
|
||||
body: <MigrateVdiModalBody />,
|
||||
body: <MigrateVdiModalBody pool={this.props.vm.$pool} />,
|
||||
}).then(({ sr, migrateAll }) => {
|
||||
if (!sr) {
|
||||
return error(_('vdiMigrateNoSr'), _('vdiMigrateNoSrMessage'))
|
||||
@@ -774,6 +840,13 @@ export default class TabDisks extends Component {
|
||||
</Col>
|
||||
</Row>
|
||||
<Row>
|
||||
{!this._areSrsOnSameHost() && (
|
||||
<div>
|
||||
<span className='text-danger'>
|
||||
<Icon icon='alarm' /> {_('srsNotOnSameHost')}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
<Col>
|
||||
<SortedTable
|
||||
actions={ACTIONS}
|
||||
|
||||
Reference in New Issue
Block a user