Compare commits

..

123 Commits

Author SHA1 Message Date
Julien Fontanet
63b5ee6f96 feat(xo-server): 5.17.4 2018-03-16 17:49:11 +01:00
Julien Fontanet
36d2de049f feat(xo-server/vhd): createReadStream (#2763)
A stream to a synthetic full VHD.
2018-03-16 17:47:10 +01:00
Julien Fontanet
86b0d5e2b7 fix(xo-server/backupNg.importVmBackup): do not try to detect base VDI 2018-03-16 17:38:56 +01:00
Julien Fontanet
d34f641130 fix(xo-server/backupNg.importVmBackup): do not try to detect base VM 2018-03-16 17:30:42 +01:00
Julien Fontanet
39d7b4c7bd fix(xo-server/backupNg.importVmBackup): fix VM name for delta 2018-03-16 17:15:18 +01:00
Julien Fontanet
ad0d4156fb fix(xo-server/backupNg.importVmBackup): add missing version for delta 2018-03-16 17:15:18 +01:00
badrAZ
80187e2789 feat(xo-web/self-service): add the internal networks to the networks' select (#2664) 2018-03-16 16:57:44 +01:00
badrAZ
89e25c9b81 fix(xo-web/dashboard/overview): missing patches not fetched (#2772)
Fixes #2768
2018-03-16 16:37:57 +01:00
Julien Fontanet
ca51d59815 chore(xo-server/chainVhd): allow parent and child to be in different dirs (#2762) 2018-03-16 16:24:45 +01:00
Julien Fontanet
433f445e99 fix(xo-server/backups): no checksum files for VHDs (#2761)
Because keeping them up-to-date after chainings and merges is too expensive (requires reading the whole file).

In legacy backups they were keeping up-to-date and great costs and never used for verification anyway.
2018-03-16 16:24:25 +01:00
Julien Fontanet
474a765e1b chore(xo-server/chainVhd): remove checksum recomputing (#2759)
It's high time to remove this.
2018-03-16 16:24:02 +01:00
badrAZ
7d4b17380d feat(Backups NG): fourth iteration (#2756) 2018-03-16 16:23:19 +01:00
Julien Fontanet
b58b1d94cd fix(xo-server/xapi): add missing import 2018-03-16 16:21:20 +01:00
Olivier Lambert
16e7257e3b feat(host/pool): clearer memory info (#2771)
Fixes #2750
2018-03-16 14:19:59 +01:00
Julien Fontanet
ca1a46f980 chore(xo-server/backups-ng): add todo items 2018-03-14 15:52:26 +01:00
Julien Fontanet
596bd12f59 chore(xo-server/vhd): add format cheatsheet 2018-03-14 15:51:02 +01:00
badrAZ
301ab65c01 fix(xo-web/backup/overview): fix the race condition between subscriptions (#2766)
Fixes #2733
2018-03-14 14:15:47 +01:00
Pierre Donias
35f210e074 fix(xo-server/xosan): make tmpBoundObjectId unique (#2760)
Fixes #2758
2018-03-13 16:05:38 +01:00
Julien Fontanet
c239b518e0 chore(xo-server/checksum): documentation 2018-03-13 15:57:08 +01:00
Julien Fontanet
f45935aa44 chore(xo-server/vhd-merge): abstract FD handling (#2757)
Due to our smart implementation, the Vhd class does not need to be aware of the fact that the file is already opened.
2018-03-13 15:09:21 +01:00
Julien Fontanet
782505b292 feat(xo-server): close inactive HTTP connections on stop 2018-03-13 10:22:06 +01:00
Julien Fontanet
1368e3b86d chore: update dependencies 2018-03-13 10:06:23 +01:00
Julien Fontanet
ab9c24401e feat(xo-web): 5.17.2 2018-03-12 17:54:09 +01:00
Julien Fontanet
831f4e48d1 feat(xo-server): 5.17.3 2018-03-12 17:53:34 +01:00
Julien Fontanet
f5511449af fix(xo-server/vm.create): work around a race condition (#2755)
Fixes #2747
2018-03-12 17:52:53 +01:00
Julien Fontanet
80c1e39b53 feat(Backups NG): third iteration (#2729) 2018-03-12 17:26:20 +01:00
badrAZ
3ce4e86784 fix(xo-web/sorted-table): returns undefined if userData is empty (#2752)
Fixes #2748
2018-03-12 17:19:15 +01:00
Julien Fontanet
fb617418bb feat(xo-server): 5.17.2 2018-03-09 19:12:00 +01:00
Pierre Donias
9fb0f793b2 fix(prettierrc): add trailingComma to avoid conflicts with eslint (#2744) 2018-03-09 11:55:02 +01:00
Julien Fontanet
3b21a097ab fix(xo-web): handle incorrect filters (#2743)
Fixes #2740
2018-03-09 11:30:22 +01:00
Rajaa.BARHTAOUI
ef09a42a89 feat(xo-web): disconnect VDI from Health view (#2655)
See #2505
2018-03-08 14:48:30 +01:00
Julien Fontanet
74d8f2a859 fix(xo-server): test with Babel 7 2018-03-08 11:00:00 +01:00
Julien Fontanet
48910f9c0f fix(xo-server/remote-handlers): do not swallow sync() value 2018-03-08 10:51:24 +01:00
Julien Fontanet
788a1accbd feat(xo-server): update to Babel 7 (#2731) 2018-03-08 10:11:14 +01:00
Julien Fontanet
b254e7e852 chore: update dependencies 2018-03-08 00:13:54 +01:00
Julien Fontanet
e288fa1b8a feat(xo-web): 5.17.1 2018-03-07 21:22:18 +01:00
Julien Fontanet
eb9ec68494 feat(xo-server): 5.17.1 2018-03-07 21:21:33 +01:00
Julien Fontanet
10ab4f2d79 fix(xo-server): work around minor Babel issue 2018-03-07 21:11:20 +01:00
badrAZ
b1986dc275 feat(Backups NG): second iteration (#2718) 2018-03-07 20:57:28 +01:00
Julien Fontanet
831e36ae5f fix(xo-server/exportDeltaVm): cannot assign ro name_label 2018-03-07 20:41:20 +01:00
Julien Fontanet
77a2d37d98 fix(xo-server/exportDeltaVm): do not leak the snapshot name
Fixes #2727
2018-03-07 20:30:09 +01:00
Julien Fontanet
37b90e25dc fix(xo-server/jobs): userIds are strings
Fixes #2728
2018-03-07 20:17:22 +01:00
Julien Fontanet
41f16846b6 chore(xo-server): addChecksumToReadStream → createChecksumStream (#2725)
`addChecksumToReadStream` was overly complicated and its usage was limited.

`createChecksumStream` is similar but does not pipe the readable stream in by itself.
2018-03-06 17:48:21 +01:00
Julien Fontanet
3e89c62e72 chore(xo-server): replace eventToPromise with fromEvent 2018-03-06 16:40:29 +01:00
Julien Fontanet
b7d3762c06 chore(xo-server): delete unused schedules on clean 2018-03-06 16:39:00 +01:00
Julien Fontanet
481bc9430a chore(xo-server/utils): remove unnecessary moment-timezone import 2018-03-06 16:38:07 +01:00
Julien Fontanet
13f2470887 chore(xo-server): remove createRawObject
Replace both `createRawObject()` and `Object.create()` by `{ __proto__: null }`.
2018-03-06 16:36:41 +01:00
Julien Fontanet
0308fe4e6e chore(xo-server): add checksum handling for VM import 2018-03-06 16:36:13 +01:00
Julien Fontanet
197273193e chore(xo-server): explicitly check for a schedule 2018-03-06 16:35:42 +01:00
Julien Fontanet
e4b11a793b chore(xo-server): move checksum streams into own module 2018-03-06 16:34:22 +01:00
Julien Fontanet
927d3135c4 chore(xo-server): rename removeSchedule to deleteSchedule 2018-03-06 16:32:59 +01:00
Julien Fontanet
aa533c20d6 fix(xo-server): respect compression param 2018-03-06 16:31:11 +01:00
Julien Fontanet
7fd615525a chore(xen-api): TODO do not cancel a finished task 2018-03-06 16:26:05 +01:00
Julien Fontanet
6abf3fc0af feat: add code of conduct 2018-03-06 10:18:05 +01:00
Julien Fontanet
6bb0929822 chore(xo-server/backupNg): remove unnecessary destructuring 2018-03-03 10:51:57 +01:00
Julien Fontanet
feebc04e55 chore(xo-server/BackupsNg): remove schedule default value
A backup NG job cannot be run without a schedule anyway
2018-03-03 10:26:45 +01:00
Julien Fontanet
2d406cd7c1 chore(xo-server/backupNg): rename importVmbackup{Ng,} 2018-03-03 10:25:01 +01:00
Julien Fontanet
788bfe632f chore(xo-server/exportDeltaVm): pass cancel token to _snapshotVm 2018-03-03 10:21:10 +01:00
Julien Fontanet
1149102f37 chore(xo-server/exportDeltaVm): pass name to _snapshotVm
Instead of setting it manually afterward.
2018-03-03 10:20:35 +01:00
Julien Fontanet
8bd949f618 chore(xo-server/exportDeltaVm): use _snapshotVm directly 2018-03-03 10:19:51 +01:00
Julien Fontanet
489b142a66 chore(xo-server): remove unnecessary getObject in exportDeltaVm 2018-03-03 10:15:24 +01:00
Julien Fontanet
cbbbb6da4f chore(xo-server): doc attrs of VMs created by Backup NG 2018-03-03 10:14:06 +01:00
Julien Fontanet
6701c7e3af chore(xo-server): use checksumFile helper unlink 2018-03-03 10:07:09 +01:00
Julien Fontanet
ecd460a991 feat(xo-web): 5.17.0 2018-03-02 19:57:24 +01:00
Julien Fontanet
b4d7648ffe feat(xo-server): 5.17.0 2018-03-02 19:57:04 +01:00
Julien Fontanet
eb3dfb0f30 feat(Backups NG): first iteration (#2705) 2018-03-02 19:56:08 +01:00
Julien Fontanet
2b9ba69480 fix(xo-server): getJob return the correct job 2018-03-02 19:53:16 +01:00
Julien Fontanet
8f784162ea chore(xo-server): Xapi#exportDeltaVm make streams writable 2018-03-02 19:52:35 +01:00
Julien Fontanet
a2ab64b142 chore(xo-server): Xapi#exportDeltaVm accept a snapshot 2018-03-02 19:52:00 +01:00
Julien Fontanet
052817ccbf chore(xo-server): RemoteHandler#rename handle cheksum 2018-03-02 19:51:03 +01:00
Julien Fontanet
48b2297bc1 chore(xo-server): handle nested job props (#2712) 2018-03-02 19:29:08 +01:00
Nicolas Raynaud
e76a0ad4bd feat(xo-server): improve VHD merge speed (#2643)
Avoid re-opening/closing the files multiple times, introduce a lot of latency in remote FS.
2018-03-02 19:08:01 +01:00
Olivier Lambert
baf6d30348 fix(changelog): remove useless spaces 2018-03-02 18:31:32 +01:00
Olivier Lambert
7d250dd90b feat(changelog): move and update changelog 2018-03-02 18:30:22 +01:00
Rajaa.BARHTAOUI
efaabb02e8 feat(xo-web): confirm modal before host emergency shutdown (#2714)
Fixes #2230
2018-03-02 18:05:58 +01:00
Julien Fontanet
0c3b98d451 fix(xo-server): forward createOutputStream errors with checksum 2018-03-02 15:29:26 +01:00
Julien Fontanet
28d1539ea6 fix(xo-server): fix Xapi#snapshotVm
It was broken by #2701.
2018-03-02 10:53:49 +01:00
Julien Fontanet
8ad02d2d51 feat(xo-web): ActionButton accept data-* props instead of handlerParam (#2713) 2018-03-02 09:57:26 +01:00
Julien Fontanet
1947a066e0 chore: disable flow for test
Still some config issues which I have to fix.
2018-03-01 16:30:02 +01:00
Julien Fontanet
d99e643634 chore(xo-server): inject schedule in jobs (#2710) 2018-03-01 16:27:51 +01:00
Rajaa.BARHTAOUI
65e1ac2ef9 chore(xo-web): consistently use "Username" label (#2709)
Fixes #2651
2018-03-01 15:58:48 +01:00
Julien Fontanet
64a768090f fix(xo-server): typo, executor → executors 2018-03-01 13:37:40 +01:00
Julien Fontanet
488eed046e chore(xo-server): pluggable job executors (#2707) 2018-03-01 12:10:08 +01:00
Julien Fontanet
dccddd78a6 chore(xo-web): rewrite smart-backup-pattern (#2698)
Fix a few issues
2018-02-28 17:07:16 +01:00
Julien Fontanet
3c247abcf9 chore(xo-web): add exact prop to NavLink (#2699) 2018-02-28 17:05:44 +01:00
Julien Fontanet
db795e91fd feat(complex-matcher): 0.3.0 2018-02-28 16:40:18 +01:00
Julien Fontanet
f060f56c93 feat(complex-matcher): number comparison (#2702)
`foo:>=42` matches `{ foo: 42 }` but not `"bar"` nor `{ foo: 37 }`.
2018-02-28 16:36:54 +01:00
Julien Fontanet
51be573f5e chore(xo-web): rewrite smart-backup-pattern 2018-02-28 16:23:29 +01:00
Julien Fontanet
4257cbb618 chore(xo-server): improve jobs code (#2696)
- add type filtering (default to `call`)
- support passing extra params to the call
- Flow typing
2018-02-28 16:22:41 +01:00
Julien Fontanet
e25d6b712d chore(xo-web): addSubscriptions provide initial props (#2697) 2018-02-28 16:09:56 +01:00
Julien Fontanet
b499d60130 chore(xo-server): improve scheduling code (#2695) 2018-02-28 15:59:19 +01:00
Julien Fontanet
68e06303a4 chore(xo-server): more cancelable Xapi methods (#2701) 2018-02-28 15:25:22 +01:00
badrAZ
60085798f2 fix(xo-web/jobs/vm.revert): use the snapshot's id instead of the vm's id (#2685)
Fixes #2498
2018-02-28 14:33:05 +01:00
badrAZ
c62cab39f1 feat(xo-web/VM): change the "share" button position (#2667)
Fixes #2663
2018-02-28 14:10:27 +01:00
Julien Fontanet
30483ab2d9 feat(xo-web): pass userData to SortedTable actions (#2700) 2018-02-28 13:43:41 +01:00
Julien Fontanet
c38c716616 chore(xo-server): use sepecific Babel plugins instead of stage-0 (#2694) 2018-02-28 12:59:23 +01:00
Julien Fontanet
ded1127d64 chore: mutualize Babel 7 config 2018-02-26 22:30:37 +01:00
Julien Fontanet
38d6130e89 chore(xo-cli): remove flow test 2018-02-26 21:58:32 +01:00
Julien Fontanet
ee47e40d1a feat(xo-web/logs): display real job status (#2688) 2018-02-26 18:02:39 +01:00
Julien Fontanet
80e66415d7 feat(xo-server): 5.16.2 2018-02-26 11:26:02 +01:00
Julien Fontanet
81e6372070 feat(xen-api): 0.16.6 2018-02-26 11:23:20 +01:00
Julien Fontanet
dbfbd42d29 fix(xo-server): identifiable names for VM export snapshots
Fixes #2668
2018-02-24 23:00:50 +01:00
Julien Fontanet
e0d34b1747 fix(xo-server): CR with lazy streams (#2675) 2018-02-23 17:50:17 +01:00
Julien Fontanet
9a8f9dd1d7 feat(xo-web): display attached VDI snapshots in Health (#2684)
Fixes #2634
2018-02-23 16:30:40 +01:00
Pierre Donias
75521f8757 fix(xo-server): do not count snapshots in self quotas (#2682)
Fixes #2626
2018-02-23 15:00:23 +01:00
Julien Fontanet
11d4cb2f04 fix(xo-server): detect interruption of full backups (#2686) 2018-02-23 13:07:48 +01:00
Rajaa.BARHTAOUI
d90cb09b56 feat(xo-web): disconnect VDIs from SR/disks view (#2602)
See #2505
2018-02-23 10:03:20 +01:00
Rajaa.BARHTAOUI
a02d393457 fix(xo-web/VM): allow self-service user to insert CD (#2647)
Fixes #2503
2018-02-22 16:42:43 +01:00
Julien Fontanet
01a5963947 feat(xen-api): allow createTask in read-only mode (#2679)
Fixes #2678
2018-02-22 15:50:35 +01:00
Julien Fontanet
7ef314d9f4 chore(lint-staged): rewritten in JS (#2676)
- simpler code, no need to hack around the shell
- no more double formatting
- no longer use git stash, simply cache files in memory
2018-02-22 11:45:44 +01:00
Julien Fontanet
2ff25d1f61 fix(xo-server): limit number of VDI exports (#2673)
Fixes #2672
2018-02-21 19:26:39 +01:00
Julien Fontanet
ede12b6732 fix(xo-server): limit number of VM exports (#2671)
Fixes #2669
2018-02-21 17:37:07 +01:00
Julien Fontanet
8a010f62fd chore(xo-server): remove unused Xapi#exportVdi 2018-02-21 17:32:30 +01:00
badrAZ
51da4a7e70 fix(xo-web/VM): show error when setting resource set fails (#2638)
Fixes #2620
2018-02-20 14:44:24 +01:00
Julien Fontanet
fd2580f5da feat(xo-cli): document config export (#2662) 2018-02-20 11:43:38 +01:00
Julien Fontanet
c5fdab7d47 feat(cron): 1.0.2 2018-02-20 11:42:19 +01:00
Julien Fontanet
ae094438b1 fix(cron): Schedule#next() with moment 2018-02-20 11:41:12 +01:00
Julien Fontanet
3e5af9e894 chore: update dependencies 2018-02-19 18:10:05 +01:00
Julien Fontanet
10093afb91 feat(cron): 1.0.1 2018-02-19 17:06:17 +01:00
Julien Fontanet
58032738b9 chore(cron): replace luxon with moment-timezone (#2657) 2018-02-19 17:04:04 +01:00
Julien Fontanet
89cbbaeeea chore: fix yarn.lock 2018-02-19 15:52:17 +01:00
Julien Fontanet
5ca08eb400 feat(xo-server): 5.16.1 2018-02-19 14:11:10 +01:00
Julien Fontanet
fad049d2ac feat(xo-web): 5.16.2 2018-02-19 14:10:37 +01:00
128 changed files with 7456 additions and 3323 deletions

View File

@@ -2,6 +2,12 @@ module.exports = {
extends: ['standard', 'standard-jsx'],
globals: {
__DEV__: true,
$Dict: true,
$Diff: true,
$Exact: true,
$Keys: true,
$PropertyType: true,
$Shape: true,
},
parser: 'babel-eslint',
rules: {

View File

@@ -8,6 +8,7 @@
[lints]
[options]
esproposal.decorators=ignore
include_warnings=true
module.use_strict=true

View File

@@ -1,4 +1,5 @@
module.exports = {
semi: false,
singleQuote: true,
trailingComma: 'es5',
}

View File

@@ -0,0 +1,47 @@
'use strict'
const PLUGINS_RE = /^(?:@babel\/plugin-.+|babel-plugin-lodash)$/
const PRESETS_RE = /^@babel\/preset-.+$/
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
module.exports = function (pkg, plugins, presets) {
plugins === undefined && (plugins = {})
presets === undefined && (presets = {})
presets['@babel/preset-env'] = {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && PLUGINS_RE.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && PRESETS_RE.test(name)) {
presets[name] = {}
}
})
return {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
}

View File

@@ -0,0 +1,11 @@
{
"private": true,
"name": "@xen-orchestra/babel-config",
"version": "0.0.0",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/@xen-orchestra/babel-config",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
}
}

View File

@@ -1,47 +1,3 @@
'use strict'
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const pkg = require('./package')
const plugins = {
lodash: {},
}
const presets = {
'@babel/preset-env': {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
},
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && /@babel\/plugin-.+/.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && /@babel\/preset-.+/.test(name)) {
presets[name] = {}
}
})
module.exports = {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,6 +1,6 @@
{
"name": "@xen-orchestra/cron",
"version": "1.0.0",
"version": "1.0.2",
"license": "ISC",
"description": "Focused, well maintained, cron parser/scheduler",
"keywords": [
@@ -38,7 +38,7 @@
},
"dependencies": {
"lodash": "^4.17.4",
"luxon": "^0.5.2"
"moment-timezone": "^0.5.14"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",

View File

@@ -1,4 +1,4 @@
import { DateTime } from 'luxon'
import moment from 'moment-timezone'
import next from './next'
import parse from './parse'
@@ -41,7 +41,10 @@ class Job {
class Schedule {
constructor (pattern, zone = 'utc') {
this._schedule = parse(pattern)
this._dateTimeOpts = { zone }
this._createDate =
zone.toLowerCase() === 'utc'
? moment.utc
: zone === 'local' ? moment : () => moment.tz(zone)
}
createJob (fn) {
@@ -51,15 +54,15 @@ class Schedule {
next (n) {
const dates = new Array(n)
const schedule = this._schedule
let date = DateTime.fromObject(this._dateTimeOpts)
let date = this._createDate()
for (let i = 0; i < n; ++i) {
dates[i] = (date = next(schedule, date)).toJSDate()
dates[i] = (date = next(schedule, date)).toDate()
}
return dates
}
_nextDelay () {
const now = DateTime.fromObject(this._dateTimeOpts)
const now = this._createDate()
return next(this._schedule, now) - now
}

View File

@@ -1,10 +1,10 @@
import moment from 'moment-timezone'
import sortedIndex from 'lodash/sortedIndex'
import { DateTime } from 'luxon'
const NEXT_MAPPING = {
month: { year: 1 },
day: { month: 1 },
weekday: { week: 1 },
date: { month: 1 },
day: { week: 1 },
hour: { day: 1 },
minute: { hour: 1 },
}
@@ -13,38 +13,37 @@ const getFirst = values => (values !== undefined ? values[0] : 0)
const setFirstAvailable = (date, unit, values) => {
if (values === undefined) {
return date
return
}
const curr = date.get(unit)
const next = values[sortedIndex(values, curr) % values.length]
if (curr === next) {
return date
return
}
const newDate = date.set({ [unit]: next })
return newDate > date ? newDate : newDate.plus(NEXT_MAPPING[unit])
const timestamp = +date
date.set(unit, next)
if (timestamp > +date) {
date.add(NEXT_MAPPING[unit])
}
return true
}
// returns the next run, after the passed date
export default (schedule, fromDate) => {
let date = fromDate
let date = moment(fromDate)
.set({
second: 0,
millisecond: 0,
})
.plus({ minute: 1 })
.add({ minute: 1 })
const { minute, hour, dayOfMonth, month, dayOfWeek } = schedule
date = setFirstAvailable(date, 'minute', minute)
setFirstAvailable(date, 'minute', minute)
let tmp
tmp = setFirstAvailable(date, 'hour', hour)
if (tmp !== date) {
date = tmp.set({
minute: getFirst(minute),
})
if (setFirstAvailable(date, 'hour', hour)) {
date.set('minute', getFirst(minute))
}
let loop
@@ -52,30 +51,30 @@ export default (schedule, fromDate) => {
do {
loop = false
tmp = setFirstAvailable(date, 'month', month)
if (tmp !== date) {
date = tmp.set({
day: 1,
if (setFirstAvailable(date, 'month', month)) {
date.set({
date: 1,
hour: getFirst(hour),
minute: getFirst(minute),
})
}
let newDate = date.clone()
if (dayOfMonth === undefined) {
if (dayOfWeek !== undefined) {
tmp = setFirstAvailable(date, 'weekday', dayOfWeek)
setFirstAvailable(newDate, 'day', dayOfWeek)
}
} else if (dayOfWeek === undefined) {
tmp = setFirstAvailable(date, 'day', dayOfMonth)
setFirstAvailable(newDate, 'date', dayOfMonth)
} else {
tmp = DateTime.min(
setFirstAvailable(date, 'day', dayOfMonth),
setFirstAvailable(date, 'weekday', dayOfWeek)
)
const dateDay = newDate.clone()
setFirstAvailable(dateDay, 'date', dayOfMonth)
setFirstAvailable(newDate, 'day', dayOfWeek)
newDate = moment.min(dateDay, newDate)
}
if (tmp !== date) {
loop = tmp.month !== date.month
date = tmp.set({
if (+date !== +newDate) {
loop = date.month() !== newDate.month()
date = newDate.set({
hour: getFirst(hour),
minute: getFirst(minute),
})

View File

@@ -1,17 +1,15 @@
/* eslint-env jest */
import mapValues from 'lodash/mapValues'
import { DateTime } from 'luxon'
import moment from 'moment-timezone'
import next from './next'
import parse from './parse'
const N = (pattern, fromDate = '2018-04-09T06:25') =>
next(parse(pattern), DateTime.fromISO(fromDate, { zone: 'utc' })).toISO({
includeOffset: false,
suppressMilliseconds: true,
suppressSeconds: true,
})
const N = (pattern, fromDate = '2018-04-09T06:25') => {
const iso = next(parse(pattern), moment.utc(fromDate)).toISOString()
return iso.slice(0, iso.lastIndexOf(':'))
}
describe('next()', () => {
mapValues(

View File

@@ -173,12 +173,7 @@ export default createParser({
{
aliases: 'jan feb mar apr may jun jul aug sep oct nov dec'.split(' '),
name: 'month',
range: [1, 12],
// this function is applied to numeric entries (not steps)
//
// currently parse month 0-11
post: value => value + 1,
range: [0, 11],
},
{
aliases: 'mon tue wen thu fri sat sun'.split(' '),

View File

@@ -8,16 +8,16 @@ describe('parse()', () => {
minute: [0],
hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dayOfMonth: [1, 11, 21, 31],
month: [1, 3, 5, 8, 11],
month: [0, 2, 4, 7, 10],
})
})
it('correctly parse months', () => {
expect(parse('* * * 0,11 *')).toEqual({
month: [1, 12],
month: [0, 11],
})
expect(parse('* * * jan,dec *')).toEqual({
month: [1, 12],
month: [0, 11],
})
})

View File

@@ -1,121 +1,184 @@
# ChangeLog
## **5.17.0** (2018-03-02)
### Enhancements
- Add modal confirmation for host emergency mode [#2230](https://github.com/vatesfr/xen-orchestra/issues/2230)
- Authorize stats fetching in RO mode [#2678](https://github.com/vatesfr/xen-orchestra/issues/2678)
- Limit VM.export concurrency [#2669](https://github.com/vatesfr/xen-orchestra/issues/2669)
- Basic backup: snapshots names [#2668](https://github.com/vatesfr/xen-orchestra/issues/2668)
- Change placement of "share" button for self [#2663](https://github.com/vatesfr/xen-orchestra/issues/2663)
- Username field labeled inconsistently [#2651](https://github.com/vatesfr/xen-orchestra/issues/2651)
- Backup report for VDI chain status [#2639](https://github.com/vatesfr/xen-orchestra/issues/2639)
- [Dashboard/Health] Control domain VDIs should includes snapshots [#2634](https://github.com/vatesfr/xen-orchestra/issues/2634)
- Do not count VM-snapshot in self quota [#2626](https://github.com/vatesfr/xen-orchestra/issues/2626)
- [xo-web] Backup logs [#2618](https://github.com/vatesfr/xen-orchestra/issues/2618)
- [VM/Snapshots] grouped deletion [#2595](https://github.com/vatesfr/xen-orchestra/issues/2595)
- [Backups] add a new state for a VM: skipped [#2591](https://github.com/vatesfr/xen-orchestra/issues/2591)
- Set a self-service VM at "share" after creation [#2589](https://github.com/vatesfr/xen-orchestra/issues/2589)
- [Backup logs] Improve Unhealthy VDI Chain message [#2586](https://github.com/vatesfr/xen-orchestra/issues/2586)
- [SortedTable] Put sort criteria in URL like the filter [#2584](https://github.com/vatesfr/xen-orchestra/issues/2584)
- Cant attach XenTools on User side. [#2503](https://github.com/vatesfr/xen-orchestra/issues/2503)
- Pool filter for health view [#2302](https://github.com/vatesfr/xen-orchestra/issues/2302)
- [Smart Backup] Improve feedback [#2253](https://github.com/vatesfr/xen-orchestra/issues/2253)
### Bugs
- Limit VDI export concurrency [#2672](https://github.com/vatesfr/xen-orchestra/issues/2672)
- Select is broken outside dev mode [#2645](https://github.com/vatesfr/xen-orchestra/issues/2645)
- "New" XOSAN automatically register the user [#2625](https://github.com/vatesfr/xen-orchestra/issues/2625)
- [VM/Advanced] Error on resource set change should not be hidden [#2620](https://github.com/vatesfr/xen-orchestra/issues/2620)
- misspelled word [#2606](https://github.com/vatesfr/xen-orchestra/issues/2606)
- Jobs vm.revert failing all the time [#2498](https://github.com/vatesfr/xen-orchestra/issues/2498)
## **5.16.0** (2018-01-31)
### Enhancements
- Use @xen-orchestra/cron everywhere [#2616](https://github.com/vatesfr/xen-orchestra/issues/2616)
- [SortedTable] Possibility to specify grouped/individual actions together [#2596](https://github.com/vatesfr/xen-orchestra/issues/2596)
- Self-service: allow VIF create [#2593](https://github.com/vatesfr/xen-orchestra/issues/2593)
- Ghost tasks [#2579](https://github.com/vatesfr/xen-orchestra/issues/2579)
- Autopatching: ignore 7.3 update patch for 7.2 [#2564](https://github.com/vatesfr/xen-orchestra/issues/2564)
- Allow deleting VMs for which `destroy` is blocked [#2525](https://github.com/vatesfr/xen-orchestra/issues/2525)
- Better confirmation on mass destructive actions [#2522](https://github.com/vatesfr/xen-orchestra/issues/2522)
- Move VM In to/Out of Self Service Group [#1913](https://github.com/vatesfr/xen-orchestra/issues/1913)
- Two factor auth [#1897](https://github.com/vatesfr/xen-orchestra/issues/1897)
- token.create should accept an expiration [#1769](https://github.com/vatesfr/xen-orchestra/issues/1769)
- Self Service User - User don't have quota in his dashboard [#1538](https://github.com/vatesfr/xen-orchestra/issues/1538)
- Remove CoffeeScript in xo-server [#189](https://github.com/vatesfr/xen-orchestra/issues/189)
- Better Handling of suspending VMs from the Home screen [#2547](https://github.com/vatesfr/xen-orchestra/issues/2547)
- [xen-api] Stronger reconnection policy [#2410](https://github.com/vatesfr/xen-orchestra/issues/2410)
### Bugs
- [cron] toJSDate is not a function [#2661](https://github.com/vatesfr/xen-orchestra/issues/2661)
- [Delta backup] Merge should not fail when delta contains no data [#2635](https://github.com/vatesfr/xen-orchestra/issues/2635)
- Select issues [#2590](https://github.com/vatesfr/xen-orchestra/issues/2590)
- Fix selects display [#2575](https://github.com/vatesfr/xen-orchestra/issues/2575)
- [SortedTable] Stuck when displaying last page [#2569](https://github.com/vatesfr/xen-orchestra/issues/2569)
- [vm/network] Duplicate key error [#2553](https://github.com/vatesfr/xen-orchestra/issues/2553)
- Jobs vm.revert failing all the time [#2498](https://github.com/vatesfr/xen-orchestra/issues/2498)
- TZ selector is not used for backup schedule preview [#2464](https://github.com/vatesfr/xen-orchestra/issues/2464)
- Remove filter in VM/network view [#2548](https://github.com/vatesfr/xen-orchestra/issues/2548)
## **5.15.0** (2017-12-29)
### Enhancements
* VDI resize online method removed in 7.3 [#2542](https://github.com/vatesfr/xen-orchestra/issues/2542)
* Smart replace VDI.pool_migrate removed from XenServer 7.3 Free [#2541](https://github.com/vatesfr/xen-orchestra/issues/2541)
* New memory constraints in XenServer 7.3 [#2540](https://github.com/vatesfr/xen-orchestra/issues/2540)
* Link to Settings/Logs for admins in error notifications [#2516](https://github.com/vatesfr/xen-orchestra/issues/2516)
* [Self Service] Do not use placehodlers to describe inputs [#2509](https://github.com/vatesfr/xen-orchestra/issues/2509)
* Obfuscate password in log in LDAP plugin test [#2506](https://github.com/vatesfr/xen-orchestra/issues/2506)
* Log rotation [#2492](https://github.com/vatesfr/xen-orchestra/issues/2492)
* Continuous Replication TAG [#2473](https://github.com/vatesfr/xen-orchestra/issues/2473)
* Graphs in VM list view [#2469](https://github.com/vatesfr/xen-orchestra/issues/2469)
* [Delta Backups] Do not include merge duration in transfer speed stat [#2426](https://github.com/vatesfr/xen-orchestra/issues/2426)
* Warning for disperse mode [#2537](https://github.com/vatesfr/xen-orchestra/issues/2537)
- VDI resize online method removed in 7.3 [#2542](https://github.com/vatesfr/xen-orchestra/issues/2542)
- Smart replace VDI.pool_migrate removed from XenServer 7.3 Free [#2541](https://github.com/vatesfr/xen-orchestra/issues/2541)
- New memory constraints in XenServer 7.3 [#2540](https://github.com/vatesfr/xen-orchestra/issues/2540)
- Link to Settings/Logs for admins in error notifications [#2516](https://github.com/vatesfr/xen-orchestra/issues/2516)
- [Self Service] Do not use placehodlers to describe inputs [#2509](https://github.com/vatesfr/xen-orchestra/issues/2509)
- Obfuscate password in log in LDAP plugin test [#2506](https://github.com/vatesfr/xen-orchestra/issues/2506)
- Log rotation [#2492](https://github.com/vatesfr/xen-orchestra/issues/2492)
- Continuous Replication TAG [#2473](https://github.com/vatesfr/xen-orchestra/issues/2473)
- Graphs in VM list view [#2469](https://github.com/vatesfr/xen-orchestra/issues/2469)
- [Delta Backups] Do not include merge duration in transfer speed stat [#2426](https://github.com/vatesfr/xen-orchestra/issues/2426)
- Warning for disperse mode [#2537](https://github.com/vatesfr/xen-orchestra/issues/2537)
### Bugs
* VM console doesn't work when using IPv6 in URL [#2530](https://github.com/vatesfr/xen-orchestra/issues/2530)
* Retention issue with failed basic backup [#2524](https://github.com/vatesfr/xen-orchestra/issues/2524)
* [VM/Advanced] Check that the autopower on setting is working [#2489](https://github.com/vatesfr/xen-orchestra/issues/2489)
* Cloud config drive create fail on XenServer < 7 [#2478](https://github.com/vatesfr/xen-orchestra/issues/2478)
* VM create fails due to missing vGPU id [#2466](https://github.com/vatesfr/xen-orchestra/issues/2466)
- VM console doesn't work when using IPv6 in URL [#2530](https://github.com/vatesfr/xen-orchestra/issues/2530)
- Retention issue with failed basic backup [#2524](https://github.com/vatesfr/xen-orchestra/issues/2524)
- [VM/Advanced] Check that the autopower on setting is working [#2489](https://github.com/vatesfr/xen-orchestra/issues/2489)
- Cloud config drive create fail on XenServer < 7 [#2478](https://github.com/vatesfr/xen-orchestra/issues/2478)
- VM create fails due to missing vGPU id [#2466](https://github.com/vatesfr/xen-orchestra/issues/2466)
## **5.14.0** (2017-10-31)
### Enhancements
* VM snapshot description display [#2458](https://github.com/vatesfr/xen-orchestra/issues/2458)
* [Home] Ability to sort VM by number of snapshots [#2450](https://github.com/vatesfr/xen-orchestra/issues/2450)
* Display XS version in host view [#2439](https://github.com/vatesfr/xen-orchestra/issues/2439)
* [File restore]: Clarify the possibility to select multiple files [#2438](https://github.com/vatesfr/xen-orchestra/issues/2438)
* [Continuous Replication] Time in replicated VMs [#2431](https://github.com/vatesfr/xen-orchestra/issues/2431)
* [SortedTable] Active page in URL param [#2405](https://github.com/vatesfr/xen-orchestra/issues/2405)
* replace all '...' with the UTF-8 equivalent [#2391](https://github.com/vatesfr/xen-orchestra/issues/2391)
* [SortedTable] Explicit when no items [#2388](https://github.com/vatesfr/xen-orchestra/issues/2388)
* Handle patching licenses [#2382](https://github.com/vatesfr/xen-orchestra/issues/2382)
* Credential leaking in logs for messages regarding invalid credentials and "too fast authentication" [#2363](https://github.com/vatesfr/xen-orchestra/issues/2363)
* [SortedTable] Keyboard support [#2330](https://github.com/vatesfr/xen-orchestra/issues/2330)
* token.create should accept an expiration [#1769](https://github.com/vatesfr/xen-orchestra/issues/1769)
* On updater error, display link to documentation [#1610](https://github.com/vatesfr/xen-orchestra/issues/1610)
* Add basic vGPU support [#2413](https://github.com/vatesfr/xen-orchestra/issues/2413)
* Storage View - Disk Tab - real disk usage [#2475](https://github.com/vatesfr/xen-orchestra/issues/2475)
- VM snapshot description display [#2458](https://github.com/vatesfr/xen-orchestra/issues/2458)
- [Home] Ability to sort VM by number of snapshots [#2450](https://github.com/vatesfr/xen-orchestra/issues/2450)
- Display XS version in host view [#2439](https://github.com/vatesfr/xen-orchestra/issues/2439)
- [File restore]: Clarify the possibility to select multiple files [#2438](https://github.com/vatesfr/xen-orchestra/issues/2438)
- [Continuous Replication] Time in replicated VMs [#2431](https://github.com/vatesfr/xen-orchestra/issues/2431)
- [SortedTable] Active page in URL param [#2405](https://github.com/vatesfr/xen-orchestra/issues/2405)
- replace all '...' with the UTF-8 equivalent [#2391](https://github.com/vatesfr/xen-orchestra/issues/2391)
- [SortedTable] Explicit when no items [#2388](https://github.com/vatesfr/xen-orchestra/issues/2388)
- Handle patching licenses [#2382](https://github.com/vatesfr/xen-orchestra/issues/2382)
- Credential leaking in logs for messages regarding invalid credentials and "too fast authentication" [#2363](https://github.com/vatesfr/xen-orchestra/issues/2363)
- [SortedTable] Keyboard support [#2330](https://github.com/vatesfr/xen-orchestra/issues/2330)
- token.create should accept an expiration [#1769](https://github.com/vatesfr/xen-orchestra/issues/1769)
- On updater error, display link to documentation [#1610](https://github.com/vatesfr/xen-orchestra/issues/1610)
- Add basic vGPU support [#2413](https://github.com/vatesfr/xen-orchestra/issues/2413)
- Storage View - Disk Tab - real disk usage [#2475](https://github.com/vatesfr/xen-orchestra/issues/2475)
### Bugs
* Config drive - Custom config not working properly [#2449](https://github.com/vatesfr/xen-orchestra/issues/2449)
* Snapshot sorted table breaks copyVm [#2446](https://github.com/vatesfr/xen-orchestra/issues/2446)
* [vm/snapshots] Incorrect default sort order [#2442](https://github.com/vatesfr/xen-orchestra/issues/2442)
* [Backups/Jobs] Incorrect months mapping [#2427](https://github.com/vatesfr/xen-orchestra/issues/2427)
* [Xapi#barrier()] Not compatible with XenServer < 6.1 [#2418](https://github.com/vatesfr/xen-orchestra/issues/2418)
* [SortedTable] Change page when no more items on the page [#2401](https://github.com/vatesfr/xen-orchestra/issues/2401)
* Review and fix creating a VM from a snapshot [#2343](https://github.com/vatesfr/xen-orchestra/issues/2343)
* Unable to edit / save restored backup job [#1922](https://github.com/vatesfr/xen-orchestra/issues/1922)
- Config drive - Custom config not working properly [#2449](https://github.com/vatesfr/xen-orchestra/issues/2449)
- Snapshot sorted table breaks copyVm [#2446](https://github.com/vatesfr/xen-orchestra/issues/2446)
- [vm/snapshots] Incorrect default sort order [#2442](https://github.com/vatesfr/xen-orchestra/issues/2442)
- [Backups/Jobs] Incorrect months mapping [#2427](https://github.com/vatesfr/xen-orchestra/issues/2427)
- [Xapi#barrier()] Not compatible with XenServer < 6.1 [#2418](https://github.com/vatesfr/xen-orchestra/issues/2418)
- [SortedTable] Change page when no more items on the page [#2401](https://github.com/vatesfr/xen-orchestra/issues/2401)
- Review and fix creating a VM from a snapshot [#2343](https://github.com/vatesfr/xen-orchestra/issues/2343)
- Unable to edit / save restored backup job [#1922](https://github.com/vatesfr/xen-orchestra/issues/1922)
## **5.13.0** (2017-09-29)
### Enhancements
* replace all '...' with the UTF-8 equivalent [#2391](https://github.com/vatesfr/xen-orchestra/issues/2391)
* [SortedTable] Explicit when no items [#2388](https://github.com/vatesfr/xen-orchestra/issues/2388)
* Auto select iqn or lun if there is only one [#2379](https://github.com/vatesfr/xen-orchestra/issues/2379)
* [Sparklines] Hide points [#2370](https://github.com/vatesfr/xen-orchestra/issues/2370)
* Allow xo-server-recover-account to generate a random password [#2360](https://github.com/vatesfr/xen-orchestra/issues/2360)
* Add disk in existing VM as self user [#2348](https://github.com/vatesfr/xen-orchestra/issues/2348)
* Sorted table for Settings/server [#2340](https://github.com/vatesfr/xen-orchestra/issues/2340)
* Sign in should be case insensitive [#2337](https://github.com/vatesfr/xen-orchestra/issues/2337)
* [SortedTable] Extend checkbox click to whole column [#2329](https://github.com/vatesfr/xen-orchestra/issues/2329)
* [SortedTable] Ability to select all items (across pages) [#2324](https://github.com/vatesfr/xen-orchestra/issues/2324)
* [SortedTable] Range selection [#2323](https://github.com/vatesfr/xen-orchestra/issues/2323)
* Warning on SMB remote creation [#2316](https://github.com/vatesfr/xen-orchestra/issues/2316)
* [Home | SortedTable] Add link to syntax doc in the filter input [#2305](https://github.com/vatesfr/xen-orchestra/issues/2305)
* [SortedTable] Add optional binding of filter to an URL query [#2301](https://github.com/vatesfr/xen-orchestra/issues/2301)
* [Home][Keyboard navigation] Allow selecting the objects [#2214](https://github.com/vatesfr/xen-orchestra/issues/2214)
* SR view / Disks: option to display non managed VDIs [#1724](https://github.com/vatesfr/xen-orchestra/issues/1724)
* Continuous Replication Retention [#1692](https://github.com/vatesfr/xen-orchestra/issues/1692)
- replace all '...' with the UTF-8 equivalent [#2391](https://github.com/vatesfr/xen-orchestra/issues/2391)
- [SortedTable] Explicit when no items [#2388](https://github.com/vatesfr/xen-orchestra/issues/2388)
- Auto select iqn or lun if there is only one [#2379](https://github.com/vatesfr/xen-orchestra/issues/2379)
- [Sparklines] Hide points [#2370](https://github.com/vatesfr/xen-orchestra/issues/2370)
- Allow xo-server-recover-account to generate a random password [#2360](https://github.com/vatesfr/xen-orchestra/issues/2360)
- Add disk in existing VM as self user [#2348](https://github.com/vatesfr/xen-orchestra/issues/2348)
- Sorted table for Settings/server [#2340](https://github.com/vatesfr/xen-orchestra/issues/2340)
- Sign in should be case insensitive [#2337](https://github.com/vatesfr/xen-orchestra/issues/2337)
- [SortedTable] Extend checkbox click to whole column [#2329](https://github.com/vatesfr/xen-orchestra/issues/2329)
- [SortedTable] Ability to select all items (across pages) [#2324](https://github.com/vatesfr/xen-orchestra/issues/2324)
- [SortedTable] Range selection [#2323](https://github.com/vatesfr/xen-orchestra/issues/2323)
- Warning on SMB remote creation [#2316](https://github.com/vatesfr/xen-orchestra/issues/2316)
- [Home | SortedTable] Add link to syntax doc in the filter input [#2305](https://github.com/vatesfr/xen-orchestra/issues/2305)
- [SortedTable] Add optional binding of filter to an URL query [#2301](https://github.com/vatesfr/xen-orchestra/issues/2301)
- [Home][Keyboard navigation] Allow selecting the objects [#2214](https://github.com/vatesfr/xen-orchestra/issues/2214)
- SR view / Disks: option to display non managed VDIs [#1724](https://github.com/vatesfr/xen-orchestra/issues/1724)
- Continuous Replication Retention [#1692](https://github.com/vatesfr/xen-orchestra/issues/1692)
### Bugs
* iSCSI issue on LUN selector [#2374](https://github.com/vatesfr/xen-orchestra/issues/2374)
* Errors in VM copy are not properly reported [#2347](https://github.com/vatesfr/xen-orchestra/issues/2347)
* Removing a PIF IP fails [#2346](https://github.com/vatesfr/xen-orchestra/issues/2346)
* Review and fix creating a VM from a snapshot [#2343](https://github.com/vatesfr/xen-orchestra/issues/2343)
* iSCSI LUN Detection fails with authentification [#2339](https://github.com/vatesfr/xen-orchestra/issues/2339)
* Fix PoolActionBar to add a new SR [#2307](https://github.com/vatesfr/xen-orchestra/issues/2307)
* [VM migration] Error if default SR not accessible to target host [#2180](https://github.com/vatesfr/xen-orchestra/issues/2180)
* A job shouldn't executable more than once at the same time [#2053](https://github.com/vatesfr/xen-orchestra/issues/2053)
- iSCSI issue on LUN selector [#2374](https://github.com/vatesfr/xen-orchestra/issues/2374)
- Errors in VM copy are not properly reported [#2347](https://github.com/vatesfr/xen-orchestra/issues/2347)
- Removing a PIF IP fails [#2346](https://github.com/vatesfr/xen-orchestra/issues/2346)
- Review and fix creating a VM from a snapshot [#2343](https://github.com/vatesfr/xen-orchestra/issues/2343)
- iSCSI LUN Detection fails with authentification [#2339](https://github.com/vatesfr/xen-orchestra/issues/2339)
- Fix PoolActionBar to add a new SR [#2307](https://github.com/vatesfr/xen-orchestra/issues/2307)
- [VM migration] Error if default SR not accessible to target host [#2180](https://github.com/vatesfr/xen-orchestra/issues/2180)
- A job shouldn't executable more than once at the same time [#2053](https://github.com/vatesfr/xen-orchestra/issues/2053)
## **5.12.0** (2017-08-31)
### Enhancements
* PIF selector with physical status [#2326](https://github.com/vatesfr/xen-orchestra/issues/2326)
* [SortedTable] Range selection [#2323](https://github.com/vatesfr/xen-orchestra/issues/2323)
* Self service filter for home/VM view [#2303](https://github.com/vatesfr/xen-orchestra/issues/2303)
* SR/Disks Display total of VDIs to coalesce [#2300](https://github.com/vatesfr/xen-orchestra/issues/2300)
* Pool filter in the task view [#2293](https://github.com/vatesfr/xen-orchestra/issues/2293)
* "Loading" while fetching objects [#2285](https://github.com/vatesfr/xen-orchestra/issues/2285)
* [SortedTable] Add grouped actions feature [#2276](https://github.com/vatesfr/xen-orchestra/issues/2276)
* Add a filter to the backups' log [#2246](https://github.com/vatesfr/xen-orchestra/issues/2246)
* It should not be possible to migrate a halted VM. [#2233](https://github.com/vatesfr/xen-orchestra/issues/2233)
* [Home][Keyboard navigation] Allow selecting the objects [#2214](https://github.com/vatesfr/xen-orchestra/issues/2214)
* Allow to set pool master [#2213](https://github.com/vatesfr/xen-orchestra/issues/2213)
* Continuous Replication Retention [#1692](https://github.com/vatesfr/xen-orchestra/issues/1692)
- PIF selector with physical status [#2326](https://github.com/vatesfr/xen-orchestra/issues/2326)
- [SortedTable] Range selection [#2323](https://github.com/vatesfr/xen-orchestra/issues/2323)
- Self service filter for home/VM view [#2303](https://github.com/vatesfr/xen-orchestra/issues/2303)
- SR/Disks Display total of VDIs to coalesce [#2300](https://github.com/vatesfr/xen-orchestra/issues/2300)
- Pool filter in the task view [#2293](https://github.com/vatesfr/xen-orchestra/issues/2293)
- "Loading" while fetching objects [#2285](https://github.com/vatesfr/xen-orchestra/issues/2285)
- [SortedTable] Add grouped actions feature [#2276](https://github.com/vatesfr/xen-orchestra/issues/2276)
- Add a filter to the backups' log [#2246](https://github.com/vatesfr/xen-orchestra/issues/2246)
- It should not be possible to migrate a halted VM. [#2233](https://github.com/vatesfr/xen-orchestra/issues/2233)
- [Home][Keyboard navigation] Allow selecting the objects [#2214](https://github.com/vatesfr/xen-orchestra/issues/2214)
- Allow to set pool master [#2213](https://github.com/vatesfr/xen-orchestra/issues/2213)
- Continuous Replication Retention [#1692](https://github.com/vatesfr/xen-orchestra/issues/1692)
### Bugs
* Home pagination bug [#2310](https://github.com/vatesfr/xen-orchestra/issues/2310)
* Fix PoolActionBar to add a new SR [#2307](https://github.com/vatesfr/xen-orchestra/issues/2307)
* VM snapshots are not correctly deleted [#2304](https://github.com/vatesfr/xen-orchestra/issues/2304)
* Parallel deletion of VMs fails [#2297](https://github.com/vatesfr/xen-orchestra/issues/2297)
* Continous replication create multiple zombie disks [#2292](https://github.com/vatesfr/xen-orchestra/issues/2292)
* Add user to Group issue [#2196](https://github.com/vatesfr/xen-orchestra/issues/2196)
* [VM migration] Error if default SR not accessible to target host [#2180](https://github.com/vatesfr/xen-orchestra/issues/2180)
- Home pagination bug [#2310](https://github.com/vatesfr/xen-orchestra/issues/2310)
- Fix PoolActionBar to add a new SR [#2307](https://github.com/vatesfr/xen-orchestra/issues/2307)
- VM snapshots are not correctly deleted [#2304](https://github.com/vatesfr/xen-orchestra/issues/2304)
- Parallel deletion of VMs fails [#2297](https://github.com/vatesfr/xen-orchestra/issues/2297)
- Continous replication create multiple zombie disks [#2292](https://github.com/vatesfr/xen-orchestra/issues/2292)
- Add user to Group issue [#2196](https://github.com/vatesfr/xen-orchestra/issues/2196)
- [VM migration] Error if default SR not accessible to target host [#2180](https://github.com/vatesfr/xen-orchestra/issues/2180)
## **5.11.0** (2017-07-31)

46
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,46 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at julien.fontanet@vates.fr. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

16
flow-typed/lodash.js vendored Normal file
View File

@@ -0,0 +1,16 @@
declare module 'lodash' {
declare export function invert<K, V>(object: { [K]: V }): { [V]: K }
declare export function isEmpty(mixed): boolean
declare export function keyBy<T>(array: T[], iteratee: string): boolean
declare export function last<T>(array?: T[]): T | void
declare export function map<T1, T2>(
collection: T1[],
iteratee: (T1) => T2
): T2[]
declare export function mapValues<K, V1, V2>(
object: { [K]: V1 },
iteratee: (V1, K) => V2
): { [K]: V2 }
declare export function noop(...args: mixed[]): void
declare export function values<K, V>(object: { [K]: V }): V[]
}

11
flow-typed/promise-toolbox.js vendored Normal file
View File

@@ -0,0 +1,11 @@
declare module 'promise-toolbox' {
declare export function cancelable(Function): Function
declare export function defer<T>(): {|
promise: Promise<T>,
reject: T => void,
resolve: T => void
|}
declare export function fromEvent(emitter: mixed, string): Promise<mixed>
declare export function ignoreErrors(): Promise<void>
declare export function timeout<T>(delay: number): Promise<T>
}

2
flow-typed/xo.js vendored Normal file
View File

@@ -0,0 +1,2 @@
// eslint-disable-next-line no-undef
declare type $Dict<T, K = string> = { [K]: T }

View File

@@ -6,14 +6,14 @@
"benchmark": "^2.1.4",
"eslint": "^4.14.0",
"eslint-config-standard": "^11.0.0-beta.0",
"eslint-config-standard-jsx": "^4.0.2",
"eslint-config-standard-jsx": "^5.0.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-node": "^6.0.0",
"eslint-plugin-promise": "^3.6.0",
"eslint-plugin-react": "^7.6.1",
"eslint-plugin-standard": "^3.0.1",
"exec-promise": "^0.7.0",
"flow-bin": "^0.66.0",
"flow-bin": "^0.67.1",
"globby": "^8.0.0",
"husky": "^0.14.3",
"jest": "^22.0.4",
@@ -43,6 +43,7 @@
"/packages/complex-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/value-matcher/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-cli/.+\\.jsx?$": "babel-7-jest",
"/packages/xo-server/.+\\.jsx?$": "babel-7-jest",
"\\.jsx?$": "babel-jest"
}
},
@@ -56,7 +57,7 @@
"precommit": "scripts/lint-staged",
"prepare": "scripts/run-script prepare",
"pretest": "eslint --ignore-path .gitignore .",
"test": "jest && flow status"
"test": "jest"
},
"workspaces": [
"@xen-orchestra/*",

View File

@@ -1,47 +1,3 @@
'use strict'
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const pkg = require('./package')
const plugins = {
lodash: {},
}
const presets = {
'@babel/preset-env': {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
},
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && /@babel\/plugin-.+/.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && /@babel\/preset-.+/.test(name)) {
presets[name] = {}
}
})
module.exports = {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,6 +1,6 @@
{
"name": "complex-matcher",
"version": "0.2.1",
"version": "0.3.0",
"license": "ISC",
"description": "",
"keywords": [],

View File

@@ -70,6 +70,29 @@ export class And extends Node {
}
}
export class Comparison extends Node {
constructor (operator, value) {
super()
this._comparator = Comparison.comparators[operator]
this._operator = operator
this._value = value
}
match (value) {
return typeof value === 'number' && this._comparator(value, this._value)
}
toString () {
return this._operator + String(this._value)
}
}
Comparison.comparators = {
'>': (a, b) => a > b,
'>=': (a, b) => a >= b,
'<': (a, b) => a < b,
'<=': (a, b) => a <= b,
}
export class Or extends Node {
constructor (children) {
super()
@@ -408,6 +431,13 @@ const parser = P.grammar({
P.text(')')
).map(_ => new Or(_[4])),
P.seq(P.text('!'), r.ws, r.term).map(_ => new Not(_[2])),
P.seq(P.regex(/[<>]=?/), r.rawString).map(([op, val]) => {
val = +val
if (Number.isNaN(val)) {
throw new TypeError('value must be a number')
}
return new Comparison(op, val)
}),
P.seq(r.string, r.ws, P.text(':'), r.ws, r.term).map(
_ => new Property(_[0], _[4])
),

View File

@@ -1,47 +1,3 @@
'use strict'
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const pkg = require('./package')
const plugins = {
lodash: {},
}
const presets = {
'@babel/preset-env': {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
},
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && /@babel\/plugin-.+/.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && /@babel\/preset-.+/.test(name)) {
presets[name] = {}
}
})
module.exports = {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -1,6 +1,6 @@
{
"name": "xen-api",
"version": "0.16.5",
"version": "0.16.6",
"license": "ISC",
"description": "Connector to the Xen API",
"keywords": [

View File

@@ -408,6 +408,7 @@ export class Xapi extends EventEmitter {
? Promise.reject(new Error(`cannot call ${method}() in read only mode`))
: this._sessionCall(`Async.${method}`, args).then(taskRef => {
$cancelToken.promise.then(() => {
// TODO: do not trigger if the task is already over
this._sessionCall('task.cancel', [taskRef]).catch(noop)
})
@@ -418,11 +419,10 @@ export class Xapi extends EventEmitter {
}
// create a task and automatically destroy it when settled
//
// allowed even in read-only mode because it does not have impact on the
// XenServer and it's necessary for getResource()
createTask (nameLabel, nameDescription = '') {
if (this._readOnly) {
return Promise.reject(new Error('cannot create task in read only mode'))
}
const promise = this._sessionCall('task.create', [
nameLabel,
nameDescription,

View File

@@ -1,47 +1,3 @@
'use strict'
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const pkg = require('./package')
const plugins = {
lodash: {},
}
const presets = {
'@babel/preset-env': {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
},
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && /@babel\/plugin-.+/.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && /@babel\/preset-.+/.test(name)) {
presets[name] = {}
}
})
module.exports = {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -105,6 +105,12 @@ encoding by prefixing with `json:`:
> xo-cli foo.bar baz='json:[1, 2, 3]'
```
##### Configuration export
```
> xo-cli xo.exportConfig @=config.json
```
##### VM export
```

View File

@@ -62,7 +62,6 @@
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"pretest": "flow status"
"prepublishOnly": "yarn run build"
}
}

View File

@@ -328,6 +328,15 @@ async function listObjects (args) {
}
exports.listObjects = listObjects
function ensurePathParam (method, value) {
if (typeof value !== 'string') {
const error =
method +
' requires the @ parameter to be a path (e.g. @=/tmp/config.json)'
throw error
}
}
async function call (args) {
if (!args.length) {
throw new Error('missing command name')
@@ -350,6 +359,7 @@ async function call (args) {
key = keys[0]
if (key === '$getFrom') {
ensurePathParam(method, file)
url = resolveUrl(baseUrl, result[key])
const output = createWriteStream(file)
@@ -371,6 +381,7 @@ async function call (args) {
}
if (key === '$sendTo') {
ensurePathParam(method, file)
url = resolveUrl(baseUrl, result[key])
const stats = await stat(file)

View File

@@ -30,7 +30,7 @@
"node": ">=4"
},
"dependencies": {
"@xen-orchestra/cron": "^1.0.0",
"@xen-orchestra/cron": "^1.0.2",
"babel-runtime": "^6.11.6",
"lodash": "^4.16.2"
},

View File

@@ -1,47 +1,3 @@
'use strict'
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const pkg = require('./package')
const plugins = {
lodash: {},
}
const presets = {
'@babel/preset-env': {
debug: !__TEST__,
loose: true,
shippedProposals: true,
targets: __PROD__
? (() => {
let node = (pkg.engines || {}).node
if (node !== undefined) {
const trimChars = '^=>~'
while (trimChars.includes(node[0])) {
node = node.slice(1)
}
return { node: node }
}
})()
: { browsers: '', node: 'current' },
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
},
}
Object.keys(pkg.devDependencies || {}).forEach(name => {
if (!(name in presets) && /@babel\/plugin-.+/.test(name)) {
plugins[name] = {}
} else if (!(name in presets) && /@babel\/preset-.+/.test(name)) {
presets[name] = {}
}
})
module.exports = {
comments: !__PROD__,
ignore: __TEST__ ? undefined : [/\.spec\.js$/],
plugins: Object.keys(plugins).map(plugin => [plugin, plugins[plugin]]),
presets: Object.keys(presets).map(preset => [preset, presets[preset]]),
}
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -20,9 +20,9 @@
"node": ">=6"
},
"dependencies": {
"@xen-orchestra/cron": "^1.0.0",
"@xen-orchestra/cron": "^1.0.2",
"d3-time-format": "^2.1.1",
"json5": "^0.5.1",
"json5": "^1.0.0",
"lodash": "^4.17.4"
},
"devDependencies": {

View File

@@ -34,7 +34,7 @@
"node": ">=4"
},
"dependencies": {
"@xen-orchestra/cron": "^1.0.0",
"@xen-orchestra/cron": "^1.0.2",
"babel-runtime": "^6.23.0",
"handlebars": "^4.0.6",
"html-minifier": "^3.5.8",

View File

@@ -0,0 +1,3 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -0,0 +1,13 @@
#!/usr/bin/env node
'use strict'
global.Promise = require('bluebird')
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
require("exec-promise")(require("../dist/vhd-test").default)

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "5.16.0",
"version": "5.17.4",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -15,7 +15,6 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
"preferGlobal": true,
"files": [
"better-stacks.js",
@@ -29,16 +28,16 @@
"bin": "bin"
},
"engines": {
"node": ">=4.5"
"node": ">=6"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.40",
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"@xen-orchestra/cron": "^1.0.0",
"@xen-orchestra/cron": "^1.0.2",
"ajv": "^6.1.1",
"app-conf": "^0.5.0",
"archiver": "^2.1.0",
"babel-runtime": "^6.26.0",
"base64url": "^2.0.0",
"bind-property-descriptor": "^1.0.0",
"blocked": "^1.2.1",
@@ -71,14 +70,14 @@
"is-redirect": "^1.0.0",
"js-yaml": "^3.10.0",
"json-rpc-peer": "^0.15.3",
"json5": "^0.5.1",
"json5": "^1.0.0",
"julien-f-source-map-support": "0.1.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^3.0.0",
"level-party": "^3.0.4",
"level-sublevel": "^6.6.1",
"limit-concurrency-decorator": "^0.3.0",
"limit-concurrency-decorator": "^0.4.0",
"lodash": "^4.17.4",
"make-error": "^1",
"micromatch": "^3.1.4",
@@ -102,13 +101,14 @@
"serve-static": "^1.13.1",
"split-lines": "^1.1.0",
"stack-chain": "^2.0.0",
"stoppable": "^1.0.5",
"tar-stream": "^1.5.5",
"through2": "^2.0.3",
"tmp": "^0.0.33",
"uuid": "^3.0.1",
"value-matcher": "^0.1.0",
"ws": "^4.0.0",
"xen-api": "^0.16.5",
"value-matcher": "^0.2.0",
"ws": "^5.0.0",
"xen-api": "^0.16.6",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
@@ -117,13 +117,18 @@
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-core": "^6.26.0",
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/plugin-proposal-decorators": "7.0.0-beta.40",
"@babel/plugin-proposal-export-default-from": "7.0.0-beta.40",
"@babel/plugin-proposal-export-namespace-from": "7.0.0-beta.40",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.40",
"@babel/plugin-proposal-optional-chaining": "^7.0.0-beta.40",
"@babel/plugin-proposal-pipeline-operator": "^7.0.0-beta.40",
"@babel/plugin-proposal-throw-expressions": "^7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/preset-flow": "7.0.0-beta.40",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.6.1",
"babel-preset-stage-0": "^6.24.1",
"cross-env": "^5.1.3",
"index-modules": "^0.3.0",
"rimraf": "^2.6.2"
@@ -136,23 +141,5 @@
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build",
"start": "node bin/xo-server"
},
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-0"
]
}
}

View File

@@ -0,0 +1,68 @@
import { Readable } from 'stream'
// return the next value of the iterator but if it is a promise, resolve it and
// reinject it
//
// this enables the use of a simple generator instead of an async generator
// (which are less widely supported)
const next = async (iterator, arg) => {
let cursor = iterator.next(arg)
if (typeof cursor.then === 'function') {
return cursor
}
let value
while (
!cursor.done &&
(value = cursor.value) != null &&
typeof value.then === 'function'
) {
let success = false
try {
value = await value
success = true
} catch (error) {
cursor = iterator.throw(error)
}
if (success) {
cursor = iterator.next(value)
}
}
return cursor
}
// Create a readable stream from a generator
//
// generator can be async or can yield promises to wait for them
export const createReadable = (generator, options) => {
const readable = new Readable(options)
readable._read = size => {
const iterator = generator(size)
readable._destroy = (error, cb) => {
iterator.throw(error)
cb(error)
}
let running = false
const read = (readable._read = async size => {
if (running) {
return
}
running = true
try {
let cursor
do {
cursor = await next(iterator, size)
if (cursor.done) {
return readable.push(null)
}
} while (readable.push(cursor.value))
} catch (error) {
readable.emit('error', error)
} finally {
running = false
}
})
return read(size)
}
return readable
}

View File

@@ -0,0 +1,155 @@
export function createJob ({ schedules, ...job }) {
job.userId = this.user.id
return this.createBackupNgJob(job, schedules)
}
createJob.permission = 'admin'
createJob.params = {
compression: {
enum: ['', 'native'],
optional: true,
},
mode: {
enum: ['full', 'delta'],
},
name: {
type: 'string',
optional: true,
},
remotes: {
type: 'object',
optional: true,
},
schedules: {
type: 'object',
optional: true,
},
settings: {
type: 'object',
},
vms: {
type: 'object',
},
}
export function deleteJob ({ id }) {
return this.deleteBackupNgJob(id)
}
deleteJob.permission = 'admin'
deleteJob.params = {
id: {
type: 'string',
},
}
export function editJob (props) {
return this.updateJob(props)
}
editJob.permission = 'admin'
editJob.params = {
compression: {
enum: ['', 'native'],
optional: true,
},
id: {
type: 'string',
},
mode: {
enum: ['full', 'delta'],
optional: true,
},
name: {
type: 'string',
optional: true,
},
remotes: {
type: 'object',
optional: true,
},
settings: {
type: 'object',
optional: true,
},
vms: {
type: 'object',
optional: true,
},
}
export function getAllJobs () {
return this.getAllJobs('backup')
}
getAllJobs.permission = 'admin'
export function getJob ({ id }) {
return this.getJob(id, 'backup')
}
getJob.permission = 'admin'
getJob.params = {
id: {
type: 'string',
},
}
export async function runJob ({ id, schedule }) {
return this.runJobSequence([id], await this.getSchedule(schedule))
}
runJob.permission = 'admin'
runJob.params = {
id: {
type: 'string',
},
schedule: {
type: 'string',
},
}
// -----------------------------------------------------------------------------
export function deleteVmBackup ({ id }) {
return this.deleteVmBackupNg(id)
}
deleteVmBackup.permission = 'admin'
deleteVmBackup.params = {
id: {
type: 'string',
},
}
export function listVmBackups ({ remotes }) {
return this.listVmBackupsNg(remotes)
}
listVmBackups.permission = 'admin'
listVmBackups.params = {
remotes: {
type: 'array',
items: {
type: 'string',
},
},
}
export function importVmBackup ({ id, sr }) {
return this.importVmBackupNg(id, sr)
}
importVmBackup.permission = 'admin'
importVmBackup.params = {
id: {
type: 'string',
},
sr: {
type: 'string',
},
}

View File

@@ -1,14 +1,14 @@
// FIXME so far, no acls for jobs
export async function getAll () {
return /* await */ this.getAllJobs()
return /* await */ this.getAllJobs('call')
}
getAll.permission = 'admin'
getAll.description = 'Gets all available jobs'
export async function get (id) {
return /* await */ this.getJob(id)
return /* await */ this.getJob(id, 'call')
}
get.permission = 'admin'

View File

@@ -99,11 +99,14 @@ set.params = {
// -------------------------------------------------------------------
export function get ({ id }) {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getResourceSet(id)
}
get.permission = 'admin'
get.params = {
id: {
type: 'string',

View File

@@ -17,41 +17,44 @@ get.params = {
id: { type: 'string' },
}
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), {
job: jobId,
export function create ({ cron, enabled, jobId, name, timezone }) {
return this.createSchedule({
cron,
enabled,
jobId,
name,
timezone,
userId: this.session.get('user_id'),
})
}
create.permission = 'admin'
create.description = 'Creates a new schedule'
create.params = {
jobId: { type: 'string' },
cron: { type: 'string' },
enabled: { type: 'boolean', optional: true },
jobId: { type: 'string' },
name: { type: 'string', optional: true },
timezone: { type: 'string', optional: true },
}
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
export async function set ({ cron, enabled, id, jobId, name, timezone }) {
await this.updateSchedule({ cron, enabled, id, jobId, name, timezone })
}
set.permission = 'admin'
set.description = 'Modifies an existing schedule'
set.params = {
id: { type: 'string' },
jobId: { type: 'string', optional: true },
cron: { type: 'string', optional: true },
enabled: { type: 'boolean', optional: true },
id: { type: 'string' },
jobId: { type: 'string', optional: true },
name: { type: 'string', optional: true },
timezone: { type: 'string', optional: true },
}
async function delete_ ({ id }) {
await this.removeSchedule(id)
await this.deleteSchedule(id)
}
delete_.permission = 'admin'

View File

@@ -1,30 +0,0 @@
export async function enable ({ id }) {
const schedule = await this.getSchedule(id)
schedule.enabled = true
await this.updateSchedule(id, schedule)
}
enable.permission = 'admin'
enable.description = "Enables a schedule to run it's job as scheduled"
enable.params = {
id: { type: 'string' },
}
export async function disable ({ id }) {
const schedule = await this.getSchedule(id)
schedule.enabled = false
await this.updateSchedule(id, schedule)
}
disable.permission = 'admin'
disable.description = 'Disables a schedule'
disable.params = {
id: { type: 'string' },
}
export function getScheduleTable () {
return this.scheduleTable
}
disable.permission = 'admin'
disable.description = 'Get a map of existing schedules enabled/disabled state'

View File

@@ -65,7 +65,11 @@ export async function copyVm ({ vm, sr }) {
console.log('export delta VM...')
const input = await srcXapi.exportDeltaVm(vm)
console.log('import delta VM...')
await tgtXapi.deleteVm(await tgtXapi.importDeltaVm(input, { srId: sr }))
const { transferSize, vm: copyVm } = await tgtXapi.importDeltaVm(input, {
srId: sr,
})
console.log('transfered size:', transferSize)
await tgtXapi.deleteVm(copyVm)
}
}

View File

@@ -160,12 +160,11 @@ export async function create (params) {
])
}
for (const vifId of vm.VIFs) {
const vif = this.getObject(vifId, 'VIF')
for (const vif of xapiVm.$VIFs) {
await this.allocIpAddresses(
vifId,
concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses)
).catch(() => xapi.deleteVif(vif._xapiId))
vif.$id,
concat(vif.ipv4_allowed, vif.ipv6_allowed)
).catch(() => xapi.deleteVif(vif))
}
if (params.bootAfterCreate) {
@@ -356,8 +355,10 @@ async function delete_ ({
)
// Update resource sets
const resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
if (resourceSet != null) {
if (
vm.type === 'VM' && // only regular VMs
xapi.xo.getData(vm._xapiId, 'resourceSet') != null
) {
;this.setVmResourceSet(vm._xapiId, null)::ignoreErrors()
}
@@ -411,7 +412,9 @@ insertCd.params = {
insertCd.resolve = {
vm: ['id', 'VM', 'operate'],
vdi: ['cd_id', 'VDI', 'view'],
// Not compatible with resource sets.
// FIXME: find a workaround.
vdi: ['cd_id', 'VDI', ''],
}
// -------------------------------------------------------------------
@@ -773,7 +776,7 @@ export function importDeltaBackup ({ sr, remote, filePath, mapVdisSrs }) {
remoteId: remote,
filePath,
mapVdisSrs: mapVdisSrsXapi,
})
}).then(_ => _.vm)
}
importDeltaBackup.params = {
@@ -1054,12 +1057,12 @@ export function revert ({ snapshot, snapshotBefore }) {
}
revert.params = {
id: { type: 'string' },
snapshot: { type: 'string' },
snapshotBefore: { type: 'boolean', optional: true },
}
revert.resolve = {
snapshot: ['id', 'VM-snapshot', 'administrate'],
snapshot: ['snapshot', 'VM-snapshot', 'administrate'],
}
// -------------------------------------------------------------------

View File

@@ -665,7 +665,9 @@ export const createSR = defer(async function (
CURRENT_POOL_OPERATIONS[poolId] = { ...OPERATION_OBJECT, state: 0 }
const tmpBoundObjectId = srs.join(',')
const tmpBoundObjectId = `tmp_${srs.join(',')}_${Math.random()
.toString(32)
.slice(2)}`
const license = await this.createBoundXosanTrialLicense({
boundObjectId: tmpBoundObjectId,
})

View File

@@ -1,6 +1,6 @@
import { EventEmitter } from 'events'
import { createRawObject, noop } from './utils'
import { noop } from './utils'
// ===================================================================
@@ -8,7 +8,7 @@ export default class Connection extends EventEmitter {
constructor () {
super()
this._data = createRawObject()
this._data = { __proto__: null }
}
// Close the connection.

View File

@@ -3,16 +3,17 @@ import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import eventToPromise from 'event-to-promise'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
import stoppable from 'stoppable'
import WebSocket from 'ws'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { fromEvent } from 'promise-toolbox'
import { join as joinPath } from 'path'
import JsonRpcPeer from 'json-rpc-peer'
@@ -22,7 +23,6 @@ import { ensureDir, readdir, readFile } from 'fs-extra'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
createRawObject,
forEach,
isArray,
isFunction,
@@ -103,7 +103,7 @@ function createExpressApp () {
}
async function setUpPassport (express, xo) {
const strategies = createRawObject()
const strategies = { __proto__: null }
xo.registerPassportStrategy = strategy => {
passport.use(strategy)
@@ -333,7 +333,7 @@ async function makeWebServerListen (
}
async function createWebServer ({ listen, listenOptions }) {
const webServer = new WebServer()
const webServer = stoppable(new WebServer())
await Promise.all(
mapToArray(listen, opts =>
@@ -566,7 +566,7 @@ export default async function main (args) {
const xo = new Xo(config)
// Register web server close on XO stop.
xo.on('stop', () => pFromCallback(cb => webServer.close(cb)))
xo.on('stop', () => pFromCallback(cb => webServer.stop(cb)))
// Connects to all registered servers.
await xo.start()
@@ -645,7 +645,7 @@ export default async function main (args) {
})
})
await eventToPromise(xo, 'stopped')
await fromEvent(xo, 'stopped')
debug('bye :-)')
}

View File

@@ -1,186 +0,0 @@
import { BaseError } from 'make-error'
import { createPredicate } from 'value-matcher'
import { timeout } from 'promise-toolbox'
import { assign, filter, find, isEmpty, map, mapValues } from 'lodash'
import { crossProduct } from './math'
import { asyncMap, serializeError, thunkToArray } from './utils'
export class JobExecutorError extends BaseError {}
export class UnsupportedJobType extends JobExecutorError {
constructor (job) {
super('Unknown job type: ' + job.type)
}
}
export class UnsupportedVectorType extends JobExecutorError {
constructor (vector) {
super('Unknown vector type: ' + vector.type)
}
}
// ===================================================================
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(
crossProduct(map(items, value => resolveParamsVector.call(this, value)))
)
},
fetchObjects ({ pattern }) {
const objects = filter(this.xo.getObjects(), createPredicate(pattern))
if (isEmpty(objects)) {
throw new Error('no objects match this pattern')
}
return objects
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value,
})
})
},
set: ({ values }) => values,
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default class JobExecutor {
constructor (xo) {
this.xo = xo
// The logger is not available until Xo has started.
xo.on('start', () =>
xo.getLogger('jobs').then(logger => {
this._logger = logger
})
)
}
async exec (job) {
const runJobId = this._logger.notice(`Starting execution of ${job.id}.`, {
event: 'job.start',
userId: job.userId,
jobId: job.id,
key: job.key,
})
try {
if (job.type === 'call') {
const execStatus = await this._execCall(job, runJobId)
this.xo.emit('job:terminated', execStatus)
} else {
throw new UnsupportedJobType(job)
}
this._logger.notice(`Execution terminated for ${job.id}.`, {
event: 'job.end',
runJobId,
})
} catch (error) {
this._logger.error(`The execution of ${job.id} has failed.`, {
event: 'job.end',
runJobId,
error: serializeError(error),
})
throw error
}
}
async _execCall (job, runJobId) {
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(this, paramsVector)
: [{}] // One call with no parameters
const connection = this.xo.createUserConnection()
connection.set('user_id', job.userId)
const schedule = find(await this.xo.getAllSchedules(), { job: job.id })
const execStatus = {
calls: {},
runJobId,
start: Date.now(),
timezone: schedule !== undefined ? schedule.timezone : undefined,
}
await asyncMap(paramsFlatVector, params => {
const runCallId = this._logger.notice(
`Starting ${job.method} call. (${job.id})`,
{
event: 'jobCall.start',
runJobId,
method: job.method,
params,
}
)
const call = (execStatus.calls[runCallId] = {
method: job.method,
params,
start: Date.now(),
})
let promise = this.xo.callApiMethod(
connection,
job.method,
assign({}, params)
)
if (job.timeout) {
promise = promise::timeout(job.timeout)
}
return promise.then(
value => {
this._logger.notice(
`Call ${job.method} (${runCallId}) is a success. (${job.id})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value,
}
)
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(
`Call ${job.method} (${runCallId}) has failed. (${job.id})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason),
}
)
call.error = reason
call.end = Date.now()
}
)
})
connection.close()
execStatus.end = Date.now()
return execStatus
}
}

View File

@@ -1,43 +0,0 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Job extends Model {}
export class Jobs extends Collection {
get Model () {
return Job
}
async create (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.add(new Job(job))
}
async save (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.update(job)
}
async get (properties) {
const jobs = await super.get(properties)
// Deserializes.
forEach(jobs, job => {
job.paramsVector = parseProp('job', job, 'paramsVector', {})
const { timeout } = job
if (timeout !== undefined) {
job.timeout = +timeout
}
})
return jobs
}
}

View File

@@ -1,38 +0,0 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
// ===================================================================
export default class Schedule extends Model {}
export class Schedules extends Collection {
get Model () {
return Schedule
}
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(
new Schedule({
userId,
job,
cron,
enabled,
name,
timezone,
})
)
}
async save (schedule) {
return /* await */ this.update(schedule)
}
async get (properties) {
const schedules = await super.get(properties)
forEach(schedules, schedule => {
schedule.enabled = schedule.enabled === 'true'
})
return schedules
}
}

View File

@@ -0,0 +1,15 @@
// @flow
// patch o: assign properties from p
// if the value of a p property is null, delete it from o
const patch = <T: {}>(o: T, p: $Shape<T>) => {
Object.keys(p).forEach(k => {
const v: any = p[k]
if (v === null) {
delete o[k]
} else if (v !== undefined) {
o[k] = v
}
})
}
export { patch as default }

View File

@@ -1,50 +1,58 @@
import eventToPromise from 'event-to-promise'
import through2 from 'through2'
import { ignoreErrors } from 'promise-toolbox'
// @flow
import { type Readable, type Writable } from 'stream'
import { fromEvent, ignoreErrors } from 'promise-toolbox'
import { parse } from 'xo-remote-parser'
import {
addChecksumToReadStream,
getPseudoRandomBytes,
streamToBuffer,
validChecksumOfReadStream,
} from '../utils'
import { getPseudoRandomBytes, streamToBuffer } from '../utils'
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
type Data = Buffer | Readable | string
type FileDescriptor = {| fd: mixed, path: string |}
type LaxReadable = Readable & Object
type LaxWritable = Writable & Object
type File = FileDescriptor | string
const checksumFile = file => file + '.checksum'
export default class RemoteHandlerAbstract {
constructor (remote) {
_remote: Object
constructor (remote: any) {
this._remote = { ...remote, ...parse(remote.url) }
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
}
get type () {
get type (): string {
throw new Error('Not implemented')
}
/**
* Asks the handler to sync the state of the effective remote with its' metadata
*/
async sync () {
async sync (): Promise<mixed> {
return this._sync()
}
async _sync () {
async _sync (): Promise<mixed> {
throw new Error('Not implemented')
}
/**
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
*/
async forget () {
return this._forget()
async forget (): Promise<void> {
await this._forget()
}
async _forget () {
async _forget (): Promise<void> {
throw new Error('Not implemented')
}
async test () {
async test (): Promise<Object> {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
@@ -66,55 +74,81 @@ export default class RemoteHandlerAbstract {
error: error.message || String(error),
}
} finally {
;this.unlink(testFileName)::ignoreErrors()
ignoreErrors.call(this.unlink(testFileName))
}
}
async outputFile (file, data, options) {
async outputFile (file: string, data: Data, options?: Object): Promise<void> {
return this._outputFile(file, data, {
flags: 'wx',
...options,
})
}
async _outputFile (file, data, options) {
async _outputFile (file: string, data: Data, options?: Object): Promise<void> {
const stream = await this.createOutputStream(file, options)
const promise = eventToPromise(stream, 'finish')
const promise = fromEvent(stream, 'finish')
stream.end(data)
return promise
await promise
}
async readFile (file, options) {
async readFile (file: string, options?: Object): Promise<Buffer> {
return this._readFile(file, options)
}
_readFile (file, options) {
_readFile (file: string, options?: Object): Promise<Buffer> {
return this.createReadStream(file, options).then(streamToBuffer)
}
async rename (oldPath, newPath) {
return this._rename(oldPath, newPath)
async rename (
oldPath: string,
newPath: string,
{ checksum = false }: Object = {}
) {
let p = this._rename(oldPath, newPath)
if (checksum) {
p = Promise.all([
p,
this._rename(checksumFile(oldPath), checksumFile(newPath)),
])
}
return p
}
async _rename (oldPath, newPath) {
async _rename (oldPath: string, newPath: string) {
throw new Error('Not implemented')
}
async list (dir = '.') {
return this._list(dir)
async list (
dir: string = '.',
{
filter,
prependDir = false,
}: { filter?: (name: string) => boolean, prependDir?: boolean } = {}
): Promise<string[]> {
const entries = await this._list(dir)
if (prependDir) {
entries.forEach((entry, i) => {
entries[i] = dir + '/' + entry
})
}
return filter === undefined ? entries : entries.filter(filter)
}
async _list (dir) {
async _list (dir: string): Promise<string[]> {
throw new Error('Not implemented')
}
createReadStream (
file,
{ checksum = false, ignoreMissingChecksum = false, ...options } = {}
) {
file: string,
{ checksum = false, ignoreMissingChecksum = false, ...options }: Object = {}
): Promise<LaxReadable> {
const path = typeof file === 'string' ? file : file.path
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = eventToPromise(stream, 'readable')
let promise = fromEvent(stream, 'readable')
// try to add the length prop if missing and not a range stream
if (
@@ -124,11 +158,11 @@ export default class RemoteHandlerAbstract {
) {
promise = Promise.all([
promise,
this.getSize(file)
.then(size => {
ignoreErrors.call(
this.getSize(file).then(size => {
stream.length = size
})
::ignoreErrors(),
),
])
}
@@ -140,13 +174,16 @@ export default class RemoteHandlerAbstract {
}
// avoid a unhandled rejection warning
;streamP::ignoreErrors()
ignoreErrors.call(streamP)
return this.readFile(`${file}.checksum`).then(
return this.readFile(checksumFile(path)).then(
checksum =>
streamP.then(stream => {
const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream = (validChecksumOfReadStream(
stream,
String(checksum).trim()
): LaxReadable)
stream.length = length
return stream
@@ -160,18 +197,42 @@ export default class RemoteHandlerAbstract {
)
}
async _createReadStream (file, options) {
async _createReadStream (
file: string,
options?: Object
): Promise<LaxReadable> {
throw new Error('Not implemented')
}
async refreshChecksum (path) {
const stream = addChecksumToReadStream(await this.createReadStream(path))
stream.resume() // start reading the whole file
const checksum = await stream.checksum
await this.outputFile(`${path}.checksum`, checksum)
async openFile (path: string, flags?: string): Promise<FileDescriptor> {
return { fd: await this._openFile(path, flags), path }
}
async createOutputStream (file, { checksum = false, ...options } = {}) {
async _openFile (path: string, flags?: string): Promise<mixed> {
throw new Error('Not implemented')
}
async closeFile (fd: FileDescriptor): Promise<void> {
await this._closeFile(fd.fd)
}
async _closeFile (fd: mixed): Promise<void> {
throw new Error('Not implemented')
}
async refreshChecksum (path: string): Promise<void> {
const stream = (await this.createReadStream(path)).pipe(
createChecksumStream()
)
stream.resume() // start reading the whole file
await this.outputFile(checksumFile(path), await stream.checksum)
}
async createOutputStream (
file: File,
{ checksum = false, ...options }: Object = {}
): Promise<LaxWritable> {
const path = typeof file === 'string' ? file : file.path
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options,
@@ -181,42 +242,47 @@ export default class RemoteHandlerAbstract {
return streamP
}
const connectorStream = through2()
const checksumStream = createChecksumStream()
const forwardError = error => {
connectorStream.emit('error', error)
checksumStream.emit('error', error)
}
const streamWithChecksum = addChecksumToReadStream(connectorStream)
streamWithChecksum.pipe(await streamP)
const stream = await streamP
stream.on('error', forwardError)
checksumStream.pipe(stream)
streamWithChecksum.checksum
.then(value => this.outputFile(`${file}.checksum`, value))
// $FlowFixMe
checksumStream.checksumWritten = checksumStream.checksum
.then(value => this.outputFile(checksumFile(path), value))
.catch(forwardError)
return connectorStream
return checksumStream
}
async _createOutputStream (file, options) {
async _createOutputStream (
file: mixed,
options?: Object
): Promise<LaxWritable> {
throw new Error('Not implemented')
}
async unlink (file, { checksum = true } = {}) {
async unlink (file: string, { checksum = true }: Object = {}): Promise<void> {
if (checksum) {
;this._unlink(`${file}.checksum`)::ignoreErrors()
ignoreErrors.call(this._unlink(checksumFile(file)))
}
return this._unlink(file)
await this._unlink(file)
}
async _unlink (file) {
async _unlink (file: mixed): Promise<void> {
throw new Error('Not implemented')
}
async getSize (file) {
async getSize (file: mixed): Promise<number> {
return this._getSize(file)
}
async _getSize (file) {
async _getSize (file: mixed): Promise<number> {
throw new Error('Not implemented')
}
}

View File

@@ -0,0 +1,100 @@
// @flow
// $FlowFixMe
import through2 from 'through2'
import { createHash } from 'crypto'
import { defer, fromEvent } from 'promise-toolbox'
import { invert } from 'lodash'
import { type Readable, type Transform } from 'stream'
// Format: $<algorithm>$<salt>$<encrypted>
//
// http://man7.org/linux/man-pages/man3/crypt.3.html#NOTES
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
sha512: '6',
}
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// Create a through stream which computes the checksum of all data going
// through.
//
// The `checksum` attribute is a promise which resolves at the end of the stream
// with a string representation of the checksum.
//
// const source = ...
// const checksumStream = source.pipe(createChecksumStream())
// checksumStream.resume() // make the data flow without an output
// console.log(await checksumStream.checksum)
export const createChecksumStream = (
algorithm: string = 'md5'
): Transform & { checksum: Promise<string> } => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithm}`)
}
const hash = createHash(algorithm)
const { promise, resolve, reject } = defer()
const stream = through2(
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
resolve(`$${algorithmId}$$${hash.digest('hex')}`)
callback()
}
).once('error', reject)
stream.checksum = promise
return stream
}
// Check if the checksum of a readable stream is equals to an expected checksum.
// The given stream is wrapped in a stream which emits an error event
// if the computed checksum is not equals to the expected checksum.
export const validChecksumOfReadStream = (
stream: Readable,
expectedChecksum: string
): Readable & { checksumVerified: Promise<void> } => {
const algorithmId = expectedChecksum.slice(
1,
expectedChecksum.indexOf('$', 1)
)
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
}
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
const wrapper: any = stream.pipe(
through2(
{ highWaterMark: 0 },
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
)
: null
)
}
)
)
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksumVerified = fromEvent(wrapper, 'end')
return wrapper
}

View File

@@ -63,13 +63,29 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _createReadStream (file, options) {
return fs.createReadStream(this._getFilePath(file), options)
if (typeof file === 'string') {
return fs.createReadStream(this._getFilePath(file), options)
} else {
return fs.createReadStream('', {
autoClose: false,
...options,
fd: file.fd,
})
}
}
async _createOutputStream (file, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
return fs.createWriteStream(path, options)
if (typeof file === 'string') {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
return fs.createWriteStream(path, options)
} else {
return fs.createWriteStream('', {
autoClose: false,
...options,
fd: file.fd,
})
}
}
async _unlink (file) {
@@ -82,7 +98,17 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _getSize (file) {
const stats = await fs.stat(this._getFilePath(file))
const stats = await fs.stat(
this._getFilePath(typeof file === 'string' ? file : file.path)
)
return stats.size
}
async _openFile (path, flags) {
return fs.open(this._getFilePath(path), flags)
}
async _closeFile (fd) {
return fs.close(fd)
}
}

View File

@@ -139,6 +139,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
async _createReadStream (file, options = {}) {
if (typeof file !== 'string') {
file = file.path
}
const client = this._getClient(this._remote)
let stream
@@ -154,6 +157,9 @@ export default class SmbHandler extends RemoteHandlerAbstract {
}
async _createOutputStream (file, options = {}) {
if (typeof file !== 'string') {
file = file.path
}
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
@@ -188,13 +194,22 @@ export default class SmbHandler extends RemoteHandlerAbstract {
let size
try {
size = await client.getSize(this._getFilePath(file))::pFinally(() => {
client.close()
})
size = await client
.getSize(this._getFilePath(typeof file === 'string' ? file : file.path))
::pFinally(() => {
client.close()
})
} catch (error) {
throw normalizeError(error)
}
return size
}
// this is a fake
async _openFile (path) {
return this._getFilePath(path)
}
async _closeFile (fd) {}
}

View File

@@ -1,6 +1,10 @@
import through2 from 'through2'
// @flow
const createSizeStream = () => {
// $FlowFixMe
import through2 from 'through2'
import { type Readable } from 'stream'
const createSizeStream = (): Readable & { size: number } => {
const wrapper = through2((chunk, enc, cb) => {
wrapper.size += chunk.length
cb(null, chunk)

View File

@@ -1,10 +1,8 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash/forEach'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
@@ -14,24 +12,16 @@ import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import through2 from 'through2'
import { randomBytes } from 'crypto'
import { dirname, resolve } from 'path'
import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
isPromise,
promisify,
reflect as pReflect,
} from 'promise-toolbox'
import { createHash, randomBytes } from 'crypto'
// ===================================================================
@@ -78,16 +68,9 @@ export function camelToSnakeCase (string) {
// -------------------------------------------------------------------
// Returns an empty object without prototype (if possible).
export const createRawObject = Object.create
? (createObject => () => createObject(null))(Object.create)
: () => ({})
// -------------------------------------------------------------------
// Only works with string items!
export const diffItems = (coll1, coll2) => {
const removed = createRawObject()
const removed = { __proto__: null }
forEach(coll2, value => {
removed[value] = true
})
@@ -106,99 +89,6 @@ export const diffItems = (coll1, coll2) => {
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
sha512: '6',
}
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// Wrap a readable stream in a stream with a checksum promise
// attribute which is resolved at the end of an input stream.
// (Finally .checksum contains the checksum of the input stream)
//
// Example:
// const sourceStream = ...
// const targetStream = ...
// const checksumStream = addChecksumToReadStream(sourceStream)
// await Promise.all([
// eventToPromise(checksumStream.pipe(targetStream), 'finish'),
// checksumStream.checksum.then(console.log)
// ])
export const addChecksumToReadStream = (stream, algorithm = 'md5') => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithm}`)
}
const hash = createHash(algorithm)
const { promise, resolve } = defer()
const wrapper = stream.pipe(
through2(
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
resolve(hash.digest('hex'))
callback()
}
)
)
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`)
return wrapper
}
// Check if the checksum of a readable stream is equals to an expected checksum.
// The given stream is wrapped in a stream which emits an error event
// if the computed checksum is not equals to the expected checksum.
export const validChecksumOfReadStream = (stream, expectedChecksum) => {
const algorithmId = expectedChecksum.slice(
1,
expectedChecksum.indexOf('$', 1)
)
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
}
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
const wrapper = stream.pipe(
through2(
{ highWaterMark: 0 },
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(
`Bad checksum (${checksum}), expected: ${expectedChecksum}`
)
: null
)
}
)
)
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksumVerified = eventToPromise(wrapper, 'end')
return wrapper
}
// -------------------------------------------------------------------
// Ensure the value is an array, wrap it if necessary.
export function ensureArray (value) {
if (value === undefined) {
@@ -307,7 +197,7 @@ export const parseXml = (function () {
// - works only with strings
// - methods are already bound and chainable
export const lightSet = collection => {
let data = createRawObject()
let data = { __proto__: null }
if (collection) {
forEach(collection, value => {
data[value] = true
@@ -321,7 +211,7 @@ export const lightSet = collection => {
return set
},
clear: () => {
data = createRawObject()
data = { __proto__: null }
return set
},
delete: value => {
@@ -429,6 +319,12 @@ export const popProperty = obj => {
// -------------------------------------------------------------------
// resolve a relative path from a file
export const resolveRelativeFromFile = (file, path) =>
resolve('/', dirname(file), path).slice(1)
// -------------------------------------------------------------------
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')

View File

@@ -0,0 +1,24 @@
// @flow
import { type Readable } from 'stream'
type MaybePromise<T> = Promise<T> | T
declare export function asyncMap<T1, T2>(
collection: MaybePromise<T1[]>,
(T1, number) => MaybePromise<T2>
): Promise<T2[]>
declare export function asyncMap<K, V1, V2>(
collection: MaybePromise<{ [K]: V1 }>,
(V1, K) => MaybePromise<V2>
): Promise<V2[]>
declare export function getPseudoRandomBytes(n: number): Buffer
declare export function resolveRelativeFromFile(file: string, path: string): string
declare export function safeDateFormat(timestamp: number): string
declare export function serializeError(error: Error): Object
declare export function streamToBuffer(stream: Readable): Promise<Buffer>

View File

@@ -2,7 +2,6 @@
import {
camelToSnakeCase,
createRawObject,
diffItems,
ensureArray,
extractProperty,
@@ -32,24 +31,6 @@ describe('camelToSnakeCase()', function () {
// -------------------------------------------------------------------
describe('createRawObject()', () => {
it('returns an empty object', () => {
expect(createRawObject()).toEqual({})
})
it('creates a new object each time', () => {
expect(createRawObject()).not.toBe(createRawObject())
})
if (Object.getPrototypeOf) {
it('creates an object without a prototype', () => {
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
})
}
})
// -------------------------------------------------------------------
describe('diffItems', () => {
it('computes the added/removed items between 2 iterables', () => {
expect(diffItems(['foo', 'bar'], ['baz', 'foo'])).toEqual([

View File

@@ -2,12 +2,15 @@
import assert from 'assert'
import concurrency from 'limit-concurrency-decorator'
import eventToPromise from 'event-to-promise'
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import { dirname, relative } from 'path'
import { fromEvent } from 'promise-toolbox'
import type RemoteHandler from './remote-handlers/abstract'
import constantStream from './constant-stream'
import { noop, streamToBuffer } from './utils'
import { createReadable } from './ag2s'
import { noop, resolveRelativeFromFile, streamToBuffer } from './utils'
const VHD_UTIL_DEBUG = 0
const debug = VHD_UTIL_DEBUG ? str => console.log(`[vhd-util]${str}`) : noop
@@ -34,8 +37,8 @@ const VHD_PARENT_LOCATOR_ENTRIES = 8
const VHD_PLATFORM_CODE_NONE = 0
// Types of backup treated. Others are not supported.
const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
export const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
export const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
// Other.
const BLOCK_UNUSED = 0xffffffff
@@ -182,7 +185,28 @@ function checksumStruct (rawStruct, struct) {
// ===================================================================
class Vhd {
// Format:
//
// 1. Footer (512)
// 2. Header (1024)
// 3. Unordered entries
// - BAT (batSize @ header.tableOffset)
// - Blocks (@ blockOffset(i))
// - bitmap (blockBitmapSize)
// - data (header.blockSize)
// - Parent locators (parentLocatorSize(i) @ parentLocatorOffset(i))
// 4. Footer (512 @ vhdSize - 512)
//
// Variables:
//
// - batSize = min(1, ceil(header.maxTableEntries * 4 / sectorSize)) * sectorSize
// - blockBitmapSize = ceil(header.blockSize / sectorSize / 8 / sectorSize) * sectorSize
// - blockOffset(i) = bat[i] * sectorSize
// - nBlocks = ceil(footer.currentSize / header.blockSize)
// - parentLocatorOffset(i) = header.parentLocatorEntry[i].platformDataOffset
// - parentLocatorSize(i) = header.parentLocatorEntry[i].platformDataSpace * sectorSize
// - sectorSize = 512
export class Vhd {
constructor (handler, path) {
this._handler = handler
this._path = path
@@ -203,6 +227,10 @@ class Vhd {
return this._readStream(start, n).then(streamToBuffer)
}
containsBlock (id) {
return this._getBatEntry(id) !== BLOCK_UNUSED
}
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
@@ -328,10 +356,12 @@ class Vhd {
).then(
buf =>
onlyBitmap
? { bitmap: buf }
? { id: blockId, bitmap: buf }
: {
id: blockId,
bitmap: buf.slice(0, this.bitmapSize),
data: buf.slice(this.bitmapSize),
buffer: buf,
}
)
}
@@ -339,7 +369,6 @@ class Vhd {
// get the identifiers and first sectors of the first and last block
// in the file
//
// return undefined if none
_getFirstAndLastBlocks () {
const n = this.header.maxTableEntries
const bat = this.blockTable
@@ -353,7 +382,9 @@ class Vhd {
j += VHD_ENTRY_SIZE
if (i === n) {
throw new Error('no allocated block found')
const error = new Error('no allocated block found')
error.noBlock = true
throw error
}
}
lastSector = firstSector
@@ -383,27 +414,23 @@ class Vhd {
// =================================================================
// Write a buffer/stream at a given position in a vhd file.
_write (data, offset) {
async _write (data, offset) {
debug(
`_write offset=${offset} size=${
Buffer.isBuffer(data) ? data.length : '???'
}`
)
// TODO: could probably be merged in remote handlers.
return this._handler
.createOutputStream(this._path, {
flags: 'r+',
start: offset,
const stream = await this._handler.createOutputStream(this._path, {
flags: 'r+',
start: offset,
})
return Buffer.isBuffer(data)
? new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
.then(
Buffer.isBuffer(data)
? stream =>
new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
: stream => eventToPromise(data.pipe(stream), 'finish')
)
: fromEvent(data.pipe(stream), 'finish')
}
async ensureBatSize (size) {
@@ -415,11 +442,11 @@ class Vhd {
}
const tableOffset = uint32ToUint64(header.tableOffset)
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
// extend BAT
const maxTableEntries = (header.maxTableEntries = size)
const batSize = maxTableEntries * VHD_ENTRY_SIZE
const batSize = sectorsToBytes(
sectorsRoundUpNoZero(maxTableEntries * VHD_ENTRY_SIZE)
)
const prevBat = this.blockTable
const bat = (this.blockTable = Buffer.allocUnsafe(batSize))
prevBat.copy(bat)
@@ -428,7 +455,7 @@ class Vhd {
`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
const extendBat = () => {
const extendBat = async () => {
debug(
`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
@@ -438,25 +465,37 @@ class Vhd {
tableOffset + prevBat.length
)
}
try {
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([extendBat(), this.writeHeader()])
}
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([extendBat(), this.writeHeader()])
}
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(
`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`
)
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`)
return Promise.all([
// copy the first block at the end
this._readStream(sectorsToBytes(firstSector), fullBlockSize)
.then(stream => this._write(stream, sectorsToBytes(newFirstSector)))
.then(extendBat),
this._setBatEntry(first, newFirstSector),
this.writeHeader(),
this.writeFooter(),
])
const stream = await this._readStream(
sectorsToBytes(firstSector),
fullBlockSize
)
await this._write(stream, sectorsToBytes(newFirstSector))
await extendBat()
await this._setBatEntry(first, newFirstSector)
await this.writeHeader()
await this.writeFooter()
} catch (e) {
if (e.noBlock) {
await extendBat()
await this.writeHeader()
await this.writeFooter()
} else {
throw e
}
}
}
// set the first sector (bitmap) of a block
@@ -510,7 +549,16 @@ class Vhd {
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, endSectorId) {
async writeEntireBlock (block) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
await this._write(block.buffer, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, endSectorId, parentBitmap) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
@@ -525,6 +573,11 @@ class Vhd {
}, sectors=${beginSectorId}...${endSectorId}`
)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(parentBitmap, i)
}
await this.writeBlockBitmap(blockAddr, parentBitmap)
await this._write(
block.data.slice(
sectorsToBytes(beginSectorId),
@@ -532,20 +585,11 @@ class Vhd {
),
sectorsToBytes(offset)
)
const { bitmap } = await this._readBlock(block.id, true)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(bitmap, i)
}
await this.writeBlockBitmap(blockAddr, bitmap)
}
// Merge block id (of vhd child) into vhd parent.
async coalesceBlock (child, blockId) {
// Get block data and bitmap of block id.
const { bitmap, data } = await child._readBlock(blockId)
const block = await child._readBlock(blockId)
const { bitmap, data } = block
debug(`coalesceBlock block=${blockId}`)
@@ -556,7 +600,7 @@ class Vhd {
if (!mapTestBit(bitmap, i)) {
continue
}
let parentBitmap = null
let endSector = i + 1
// Count changed sectors.
@@ -566,7 +610,16 @@ class Vhd {
// Write n sectors into parent.
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
await this.writeBlockSectors({ id: blockId, data }, i, endSector)
const isFullBlock = i === 0 && endSector === sectorsPerBlock
if (isFullBlock) {
await this.writeEntireBlock(block)
} else {
if (parentBitmap === null) {
parentBitmap = (await this._readBlock(blockId, true)).bitmap
}
await this.writeBlockSectors(block, i, endSector, parentBitmap)
}
i = endSector
}
@@ -619,61 +672,70 @@ export default concurrency(2)(async function vhdMerge (
childHandler,
childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
const parentFd = await parentHandler.openFile(parentPath, 'r+')
try {
const parentVhd = new Vhd(parentHandler, parentFd)
const childFd = await childHandler.openFile(childPath, 'r')
try {
const childVhd = new Vhd(childHandler, childFd)
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error('Unable to merge, child is not a delta backup.')
}
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error('Unable to merge, child is not a delta backup.')
}
// Merging in differencing disk is prohibited in our case.
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
throw new Error('Unable to merge, parent is not a full backup.')
}
// Allocation table map is not yet implemented.
if (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error('Unsupported allocation table map.')
}
// Allocation table map is not yet implemented.
if (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error('Unsupported allocation table map.')
}
// Read allocation table of child/parent.
await Promise.all([parentVhd.readBlockTable(), childVhd.readBlockTable()])
// Read allocation table of child/parent.
await Promise.all([parentVhd.readBlockTable(), childVhd.readBlockTable()])
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
let mergedDataSize = 0
for (
let blockId = 0;
blockId < childVhd.header.maxTableEntries;
blockId++
) {
if (childVhd.containsBlock(blockId)) {
mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
}
}
let mergedDataSize = 0
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) {
mergedDataSize += await parentVhd.coalesceBlock(childVhd, blockId)
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.timestamp = cFooter.timestamp
pFooter.uuid = cFooter.uuid
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
return mergedDataSize
} finally {
await childHandler.closeFile(childFd)
}
} finally {
await parentHandler.closeFile(parentFd)
}
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.timestamp = cFooter.timestamp
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
return mergedDataSize
})
// returns true if the child was actually modified
@@ -692,7 +754,7 @@ export async function chainVhd (
const { header } = childVhd
const parentName = parentPath.split('/').pop()
const parentName = relative(dirname(childPath), parentPath)
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
@@ -704,19 +766,147 @@ export async function chainVhd (
return true
}
// The checksum was broken between xo-server v5.2.4 and v5.2.5
//
// Replace by a correct checksum if necessary.
//
// TODO: remove when enough time as passed (6 months).
{
const rawHeader = fuHeader.pack(header)
const checksum = checksumStruct(rawHeader, fuHeader)
if (checksum !== header.checksum) {
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
return true
}
}
return false
}
export const createReadStream = (handler, path) =>
createReadable(function * () {
const fds = []
try {
const vhds = []
while (true) {
const fd = yield handler.openFile(path, 'r')
fds.push(fd)
const vhd = new Vhd(handler, fd)
vhds.push(vhd)
yield vhd.readHeaderAndFooter()
yield vhd.readBlockTable()
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
break
}
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
const nVhds = vhds.length
// this the VHD we want to synthetize
const vhd = vhds[0]
// data of our synthetic VHD
// TODO: empty parentUuid and parentLocatorEntry-s in header
let header = {
...vhd.header,
tableOffset: {
high: 0,
low: 512 + 1024,
},
parentUnicodeName: '',
}
const bat = Buffer.allocUnsafe(
Math.ceil(4 * header.maxTableEntries / VHD_SECTOR_SIZE) *
VHD_SECTOR_SIZE
)
let footer = {
...vhd.footer,
diskType: HARD_DISK_TYPE_DYNAMIC,
}
const sectorsPerBlockData = vhd.sectorsPerBlock
const sectorsPerBlock =
sectorsPerBlockData + vhd.bitmapSize / VHD_SECTOR_SIZE
const nBlocks = Math.ceil(
uint32ToUint64(footer.currentSize) / header.blockSize
)
const blocksOwner = new Array(nBlocks)
for (
let iBlock = 0,
blockOffset = Math.ceil((512 + 1024 + bat.length) / VHD_SECTOR_SIZE);
iBlock < nBlocks;
++iBlock
) {
let blockSector = BLOCK_UNUSED
for (let i = 0; i < nVhds; ++i) {
if (vhds[i].containsBlock(iBlock)) {
blocksOwner[iBlock] = i
blockSector = blockOffset
blockOffset += sectorsPerBlock
break
}
}
bat.writeUInt32BE(blockSector, iBlock * 4)
}
footer = fuFooter.pack(footer)
checksumStruct(footer, fuFooter)
yield footer
header = fuHeader.pack(header)
checksumStruct(header, fuHeader)
yield header
yield bat
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
const owner = blocksOwner[iBlock]
if (owner === undefined) {
continue
}
yield bitmap
const blocksByVhd = new Map()
const emitBlockSectors = function * (iVhd, i, n) {
const vhd = vhds[iVhd]
if (!vhd.containsBlock(iBlock)) {
yield * emitBlockSectors(iVhd + 1, i, n)
return
}
let block = blocksByVhd.get(vhd)
if (block === undefined) {
block = yield vhd._readBlock(iBlock)
blocksByVhd.set(vhd, block)
}
const { bitmap, data } = block
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
yield data.slice(i * VHD_SECTOR_SIZE, n * VHD_SECTOR_SIZE)
return
}
while (i < n) {
const hasData = mapTestBit(bitmap, i)
const start = i
do {
++i
} while (i < n && mapTestBit(bitmap, i) === hasData)
if (hasData) {
yield data.slice(start * VHD_SECTOR_SIZE, i * VHD_SECTOR_SIZE)
} else {
yield * emitBlockSectors(iVhd + 1, start, i)
}
}
}
yield * emitBlockSectors(owner, 0, sectorsPerBlock)
}
yield footer
} finally {
for (let i = 0, n = fds.length; i < n; ++i) {
handler.closeFile(fds[i]).catch(error => {
console.warn('createReadStream, closeFd', i, error)
})
}
}
})
export async function readVhdMetadata (handler: RemoteHandler, path: string) {
const vhd = new Vhd(handler, path)
await vhd.readHeaderAndFooter()
return {
footer: vhd.footer,
header: vhd.header,
}
}

View File

@@ -0,0 +1,72 @@
import execa from 'execa'
import vhdMerge, { chainVhd, Vhd } from './vhd-merge'
import LocalHandler from './remote-handlers/local.js'
async function testVhdMerge () {
console.log('before merge')
const moOfRandom = 4
await execa('bash', [
'-c',
`head -c ${moOfRandom}M < /dev/urandom >randomfile`,
])
await execa('bash', [
'-c',
`head -c ${moOfRandom / 2}M < /dev/urandom >small_randomfile`,
])
await execa('qemu-img', [
'convert',
'-f',
'raw',
'-Ovpc',
'randomfile',
'randomfile.vhd',
])
await execa('vhd-util', ['check', '-t', '-n', 'randomfile.vhd'])
await execa('vhd-util', ['create', '-s', moOfRandom, '-n', 'empty.vhd'])
// await execa('vhd-util', ['snapshot', '-n', 'randomfile_delta.vhd', '-p', 'randomfile.vhd'])
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler._getSize('randomfile')
await chainVhd(handler, 'empty.vhd', handler, 'randomfile.vhd')
const childVhd = new Vhd(handler, 'randomfile.vhd')
console.log('changing type')
await childVhd.readHeaderAndFooter()
console.log('child vhd', childVhd.footer.currentSize, originalSize)
await childVhd.readBlockTable()
childVhd.footer.diskType = 4 // Delta backup.
await childVhd.writeFooter()
console.log('chained')
await vhdMerge(handler, 'empty.vhd', handler, 'randomfile.vhd')
console.log('merged')
const parentVhd = new Vhd(handler, 'empty.vhd')
await parentVhd.readHeaderAndFooter()
console.log('parent vhd', parentVhd.footer.currentSize)
await execa('qemu-img', [
'convert',
'-f',
'vpc',
'-Oraw',
'empty.vhd',
'recovered',
])
await execa('truncate', ['-s', originalSize, 'recovered'])
console.log('ls', (await execa('ls', ['-lt'])).stdout)
console.log(
'diff',
(await execa('diff', ['-q', 'randomfile', 'recovered'])).stdout
)
/* const vhd = new Vhd(handler, 'randomfile_delta.vhd')
await vhd.readHeaderAndFooter()
await vhd.readBlockTable()
console.log('vhd.header.maxTableEntries', vhd.header.maxTableEntries)
await vhd.ensureBatSize(300)
console.log('vhd.header.maxTableEntries', vhd.header.maxTableEntries)
*/
console.log(await handler.list())
console.log('lol')
}
export { testVhdMerge as default }

View File

@@ -6,9 +6,10 @@ import synchronized from 'decorator-synchronized'
import tarStream from 'tar-stream'
import vmdkToVhd from 'xo-vmdk-to-vhd'
import {
cancellable,
cancelable,
catchPlus as pCatch,
defer,
fromEvent,
ignoreErrors,
} from 'promise-toolbox'
import { PassThrough } from 'stream'
@@ -34,7 +35,6 @@ import { mixin } from '../decorators'
import {
asyncMap,
camelToSnakeCase,
createRawObject,
ensureArray,
forEach,
isFunction,
@@ -50,6 +50,7 @@ import {
import mixins from './mixins'
import OTHER_CONFIG_TEMPLATE from './other-config-template'
import { type DeltaVmExport } from './'
import {
asBoolean,
asInteger,
@@ -84,9 +85,6 @@ export const VDI_FORMAT_RAW = 'raw'
export const IPV4_CONFIG_MODES = ['None', 'DHCP', 'Static']
export const IPV6_CONFIG_MODES = ['None', 'DHCP', 'Static', 'Autoconf']
// do not share the same limit for export and import, it could lead to deadlocks
const importLimit = concurrency(2)
// ===================================================================
@mixin(mapToArray(mixins))
@@ -103,8 +101,8 @@ export default class Xapi extends XapiBase {
return getObject.apply(this, args)
})(this.getObject)
const genericWatchers = (this._genericWatchers = createRawObject())
const objectsWatchers = (this._objectWatchers = createRawObject())
const genericWatchers = (this._genericWatchers = { __proto__: null })
const objectsWatchers = (this._objectWatchers = { __proto__: null })
const onAddOrUpdate = objects => {
forEach(objects, object => {
@@ -711,17 +709,23 @@ export default class Xapi extends XapiBase {
}
// Returns a stream to the exported VM.
async exportVm (vmId, { compress = true } = {}) {
@concurrency(2, stream => stream.then(stream => fromEvent(stream, 'end')))
@cancelable
async exportVm ($cancelToken, vmId, { compress = true } = {}) {
const vm = this.getObject(vmId)
let host
let snapshotRef
if (isVmRunning(vm)) {
host = vm.$resident_on
snapshotRef = (await this._snapshotVm(vm)).$ref
snapshotRef = (await this._snapshotVm(
$cancelToken,
vm,
`[XO Export] ${vm.name_label}`
)).$ref
}
const promise = this.getResource('/export/', {
const promise = this.getResource($cancelToken, '/export/', {
host,
query: {
ref: snapshotRef || vm.$ref,
@@ -772,20 +776,21 @@ export default class Xapi extends XapiBase {
}
_assertHealthyVdiChains (vm) {
const cache = createRawObject()
const cache = { __proto__: null }
forEach(vm.$VBDs, ({ $VDI }) => {
this._assertHealthyVdiChain($VDI, cache)
})
}
// Create a snapshot of the VM and returns a delta export object.
@cancellable
// Create a snapshot (if necessary) of the VM and returns a delta export
// object.
@cancelable
@deferrable
async exportDeltaVm (
$defer,
$cancelToken,
vmId,
baseVmId = undefined,
vmId: string,
baseVmId?: string,
{
bypassVdiChainsCheck = false,
@@ -795,17 +800,16 @@ export default class Xapi extends XapiBase {
disableBaseTags = false,
snapshotNameLabel = undefined,
} = {}
) {
): Promise<DeltaVmExport> {
let vm = this.getObject(vmId)
if (!bypassVdiChainsCheck) {
this._assertHealthyVdiChains(this.getObject(vmId))
this._assertHealthyVdiChains(vm)
}
const vm = await this.snapshotVm(vmId)
$defer.onFailure(() => this._deleteVm(vm))
if (snapshotNameLabel) {
;this._setObjectProperties(vm, {
nameLabel: snapshotNameLabel,
})::ignoreErrors()
// do not use the snapshot name in the delta export
const exportedNameLabel = vm.name_label
if (!vm.is_a_snapshot) {
vm = await this._snapshotVm($cancelToken, vm, snapshotNameLabel)
$defer.onFailure(() => this._deleteVm(vm))
}
const baseVm = baseVmId && this.getObject(baseVmId)
@@ -870,13 +874,8 @@ export default class Xapi extends XapiBase {
...vdi,
$SR$uuid: vdi.$SR.uuid,
}
const stream = (streams[`${vdiRef}.vhd`] = this._exportVdi(
$cancelToken,
vdi,
baseVdi,
VDI_FORMAT_VHD
))
$defer.onFailure(stream.cancel)
streams[`${vdiRef}.vhd`] = () =>
this._exportVdi($cancelToken, vdi, baseVdi, VDI_FORMAT_VHD)
})
const vifs = {}
@@ -895,6 +894,7 @@ export default class Xapi extends XapiBase {
vifs,
vm: {
...vm,
name_label: exportedNameLabel,
other_config:
baseVm && !disableBaseTags
? {
@@ -906,7 +906,9 @@ export default class Xapi extends XapiBase {
},
'streams',
{
value: await streams::pAll(),
configurable: true,
value: streams,
writable: true,
}
)
}
@@ -914,9 +916,10 @@ export default class Xapi extends XapiBase {
@deferrable
async importDeltaVm (
$defer,
delta,
delta: DeltaVmExport,
{
deleteBase = false,
detectBase = true,
disableStartAfterImport = true,
mapVdisSrs = {},
name_label = delta.vm.name_label,
@@ -929,17 +932,19 @@ export default class Xapi extends XapiBase {
throw new Error(`Unsupported delta backup version: ${version}`)
}
const remoteBaseVmUuid = delta.vm.other_config[TAG_BASE_DELTA]
let baseVm
if (remoteBaseVmUuid) {
baseVm = find(
this.objects.all,
obj =>
(obj = obj.other_config) && obj[TAG_COPY_SRC] === remoteBaseVmUuid
)
if (detectBase) {
const remoteBaseVmUuid = delta.vm.other_config[TAG_BASE_DELTA]
if (remoteBaseVmUuid) {
baseVm = find(
this.objects.all,
obj =>
(obj = obj.other_config) && obj[TAG_COPY_SRC] === remoteBaseVmUuid
)
if (!baseVm) {
throw new Error('could not find the base VM')
if (!baseVm) {
throw new Error('could not find the base VM')
}
}
}
@@ -949,8 +954,6 @@ export default class Xapi extends XapiBase {
baseVdis[vbd.VDI] = vbd.$VDI
})
const { streams } = delta
// 1. Create the VMs.
const vm = await this._getOrWaitObject(
await this._createVmRecord({
@@ -978,7 +981,7 @@ export default class Xapi extends XapiBase {
// 3. Create VDIs.
const newVdis = await map(delta.vdis, async vdi => {
const remoteBaseVdiUuid = vdi.other_config[TAG_BASE_DELTA]
const remoteBaseVdiUuid = detectBase && vdi.other_config[TAG_BASE_DELTA]
if (!remoteBaseVdiUuid) {
const newVdi = await this.createVdi({
...vdi,
@@ -1018,6 +1021,9 @@ export default class Xapi extends XapiBase {
defaultNetwork = networksOnPoolMasterByDevice[pif.device] = pif.$network
})
const { streams } = delta
let transferSize = 0
await Promise.all([
// Create VBDs.
asyncMap(delta.vbds, vbd =>
@@ -1030,8 +1036,17 @@ export default class Xapi extends XapiBase {
// Import VDI contents.
asyncMap(newVdis, async (vdi, id) => {
for (const stream of ensureArray(streams[`${id}.vhd`])) {
await this._importVdiContent(vdi, stream, VDI_FORMAT_VHD)
for (let stream of ensureArray(streams[`${id}.vhd`])) {
if (typeof stream === 'function') {
stream = await stream()
}
const sizeStream = stream
.pipe(createSizeStream())
.once('finish', () => {
transferSize += sizeStream.size
})
stream.task = sizeStream.task
await this._importVdiContent(vdi, sizeStream, VDI_FORMAT_VHD)
}
}),
@@ -1067,7 +1082,7 @@ export default class Xapi extends XapiBase {
}),
])
return vm
return { transferSize, vm }
}
async _migrateVmWithStorageMotion (
@@ -1227,8 +1242,8 @@ export default class Xapi extends XapiBase {
)
}
@importLimit
async _importVm (stream, sr, onVmCreation = undefined) {
@cancelable
async _importVm ($cancelToken, stream, sr, onVmCreation = undefined) {
const taskRef = await this.createTask('VM import')
const query = {}
@@ -1238,16 +1253,18 @@ export default class Xapi extends XapiBase {
query.sr_id = sr.$ref
}
if (onVmCreation) {
if (onVmCreation != null) {
;this._waitObject(
obj =>
obj && obj.current_operations && taskRef in obj.current_operations
obj != null &&
obj.current_operations != null &&
taskRef in obj.current_operations
)
.then(onVmCreation)
::ignoreErrors()
}
const vmRef = await this.putResource(stream, '/import/', {
const vmRef = await this.putResource($cancelToken, stream, '/import/', {
host,
query,
task: taskRef,
@@ -1256,7 +1273,6 @@ export default class Xapi extends XapiBase {
return vmRef
}
@importLimit
@deferrable
async _importOvaVm (
$defer,
@@ -1409,7 +1425,8 @@ export default class Xapi extends XapiBase {
}
@synchronized() // like @concurrency(1) but more efficient
async _snapshotVm (vm, nameLabel = vm.name_label) {
@cancelable
async _snapshotVm ($cancelToken, vm, nameLabel = vm.name_label) {
debug(
`Snapshotting VM ${vm.name_label}${
nameLabel !== vm.name_label ? ` as ${nameLabel}` : ''
@@ -1418,7 +1435,12 @@ export default class Xapi extends XapiBase {
let ref
try {
ref = await this.call('VM.snapshot_with_quiesce', vm.$ref, nameLabel)
ref = await this.callAsync(
$cancelToken,
'VM.snapshot_with_quiesce',
vm.$ref,
nameLabel
).then(extractOpaqueRef)
this.addTag(ref, 'quiesce')::ignoreErrors()
await this._waitObjectState(ref, vm => includes(vm.tags, 'quiesce'))
@@ -1434,7 +1456,12 @@ export default class Xapi extends XapiBase {
) {
throw error
}
ref = await this.call('VM.snapshot', vm.$ref, nameLabel)
ref = await this.callAsync(
$cancelToken,
'VM.snapshot',
vm.$ref,
nameLabel
).then(extractOpaqueRef)
}
// Convert the template to a VM and wait to have receive the up-
// to-date object.
@@ -1850,7 +1877,8 @@ export default class Xapi extends XapiBase {
return snap
}
@cancellable
@concurrency(12, stream => stream.then(stream => fromEvent(stream, 'end')))
@cancelable
_exportVdi ($cancelToken, vdi, base, format = VDI_FORMAT_VHD) {
const host = vdi.$SR.$PBDs[0].$host
@@ -1875,15 +1903,6 @@ export default class Xapi extends XapiBase {
})
}
// Returns a stream to the exported VDI.
exportVdi (vdiId, { baseId, format } = {}) {
return this._exportVdi(
this.getObject(vdiId),
baseId && this.getObject(baseId),
format
)
}
// -----------------------------------------------------------------
async _importVdiContent (vdi, body, format = VDI_FORMAT_VHD) {

View File

@@ -0,0 +1,84 @@
// @flow
import { type Readable } from 'stream'
type AugmentedReadable = Readable & {
size?: number,
task?: Promise<mixed>
}
type MaybeArray<T> = Array<T> | T
export type DeltaVmExport = {|
streams: $Dict < () => Promise < AugmentedReadable >>,
vbds: { [ref: string]: Object },
vdis: {
[ref: string]: {
$SR$uuid: string,
snapshot_of: string,
}
},
version: '1.0.0',
vifs: { [ref: string]: Object },
vm: Vm,
|}
export type DeltaVmImport = {|
...DeltaVmExport,
streams: $Dict < MaybeArray < AugmentedReadable | () => Promise < AugmentedReadable >>>,
|}
declare class XapiObject {
$id: string;
$ref: string;
$type: string;
}
type Id = string | XapiObject
declare export class Vm extends XapiObject {
$snapshots: Vm[];
name_label: string;
other_config: $Dict<string>;
snapshot_time: number;
uuid: string;
}
declare export class Xapi {
objects: { all: $Dict<Object> };
_importVm(
cancelToken: mixed,
stream: AugmentedReadable,
sr?: XapiObject,
onVmCreation?: (XapiObject) => any
): Promise<string>;
_updateObjectMapProperty(
object: XapiObject,
property: string,
entries: $Dict<string>
): Promise<void>;
_setObjectProperties(
object: XapiObject,
properties: $Dict<mixed>
): Promise<void>;
_snapshotVm(cancelToken: mixed, vm: Vm, nameLabel?: string): Promise<Vm>;
addTag(object: Id, tag: string): Promise<void>;
barrier(): void;
barrier(ref: string): XapiObject;
deleteVm(vm: Id): Promise<void>;
editVm(vm: Id, $Dict<mixed>): Promise<void>;
exportDeltaVm(
cancelToken: mixed,
snapshot: Id,
baseSnapshot ?: Id
): Promise<DeltaVmExport>;
exportVm(
cancelToken: mixed,
vm: Vm,
options ?: Object
): Promise<AugmentedReadable>;
getObject(object: Id): XapiObject;
importDeltaVm(data: DeltaVmImport, options: Object): Promise<{ vm: Vm }>;
importVm(stream: AugmentedReadable, options: Object): Promise<Vm>;
}

View File

@@ -11,7 +11,6 @@ import unzip from 'julien-f-unzip'
import { debounce } from '../../decorators'
import {
createRawObject,
ensureArray,
forEach,
mapFilter,
@@ -35,7 +34,7 @@ export default {
const data = parseXml(await readAll()).patchdata
const patches = createRawObject()
const patches = { __proto__: null }
forEach(data.patches.patch, patch => {
patches[patch.uuid] = {
date: patch.timestamp,
@@ -65,7 +64,7 @@ export default {
})
const resolveVersionPatches = function (uuids) {
const versionPatches = createRawObject()
const versionPatches = { __proto__: null }
forEach(ensureArray(uuids), ({ uuid }) => {
versionPatches[uuid] = patches[uuid]
@@ -74,7 +73,7 @@ export default {
return versionPatches
}
const versions = createRawObject()
const versions = { __proto__: null }
let latestVersion
forEach(data.serverversions.version, version => {
versions[version.value] = {
@@ -112,7 +111,7 @@ export default {
},
_getInstalledPoolPatchesOnHost (host) {
const installed = createRawObject()
const installed = { __proto__: null }
// platform_version < 2.1.1
forEach(host.$patches, hostPatch => {
@@ -131,7 +130,7 @@ export default {
const all = await this._getPoolPatchesForHost(host)
const installed = this._getInstalledPoolPatchesOnHost(host)
const installable = createRawObject()
const installable = { __proto__: null }
forEach(all, (patch, uuid) => {
if (installed[uuid]) {
return

View File

@@ -1,6 +1,6 @@
import { forEach, groupBy } from 'lodash'
import { createRawObject, mapToArray } from '../../utils'
import { mapToArray } from '../../utils'
export default {
_connectAllSrPbds (sr) {
@@ -67,9 +67,9 @@ export default {
getUnhealthyVdiChainsLength (sr) {
const vdis = this.getObject(sr).$VDIs
const unhealthyVdis = createRawObject()
const unhealthyVdis = { __proto__: null }
const children = groupBy(vdis, 'sm_config.vhd-parent')
const cache = createRawObject()
const cache = { __proto__: null }
forEach(vdis, vdi => {
if (vdi.managed && !vdi.is_a_snapshot) {
const { uuid } = vdi

View File

@@ -9,7 +9,6 @@ import { satisfies as versionSatisfies } from 'semver'
import {
camelToSnakeCase,
createRawObject,
forEach,
isArray,
isBoolean,
@@ -77,7 +76,7 @@ export const extractOpaqueRef = str => {
// -------------------------------------------------------------------
const TYPE_TO_NAMESPACE = createRawObject()
const TYPE_TO_NAMESPACE = { __proto__: null }
forEach(
[
'Bond',
@@ -116,7 +115,7 @@ export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
// -------------------------------------------------------------------
export const getVmDisks = vm => {
const disks = createRawObject(null)
const disks = { __proto__: null }
forEach(vm.$VBDs, vbd => {
let vdi
if (

View File

@@ -3,7 +3,6 @@ import { forEach, includes, map } from 'lodash'
import { ModelAlreadyExists } from '../collection'
import { Acls } from '../models/acl'
import { createRawObject } from '../utils'
// ===================================================================
@@ -86,10 +85,10 @@ export default class {
this._getPermissionsByRole(),
])
const permissions = createRawObject()
const permissions = { __proto__: null }
for (const { action, object: objectId } of acls) {
const current =
permissions[objectId] || (permissions[objectId] = createRawObject())
permissions[objectId] || (permissions[objectId] = { __proto__: null })
const permissionsForRole = permissionsByRole[action]
if (permissionsForRole) {
@@ -128,7 +127,7 @@ export default class {
async _getPermissionsByRole () {
const roles = await this.getRoles()
const permissions = createRawObject()
const permissions = { __proto__: null }
for (const role of roles) {
permissions[role.id] = role.permissions
}

View File

@@ -6,7 +6,7 @@ import { forEach, isArray, isFunction, map, mapValues } from 'lodash'
import * as methods from '../api'
import { MethodNotFound } from 'json-rpc-peer'
import { createRawObject, noop, serializeError } from '../utils'
import { noop, serializeError } from '../utils'
import * as errors from 'xo-common/api-errors'
@@ -164,7 +164,7 @@ const removeSensitiveParams = (value, name) => {
export default class Api {
constructor (xo) {
this._logger = null
this._methods = createRawObject()
this._methods = { __proto__: null }
this._xo = xo
this.addApiMethods(methods)

View File

@@ -3,7 +3,7 @@ import { noSuchObject } from 'xo-common/api-errors'
import { ignoreErrors } from 'promise-toolbox'
import Token, { Tokens } from '../models/token'
import { createRawObject, forEach, generateToken } from '../utils'
import { forEach, generateToken } from '../utils'
// ===================================================================
@@ -17,7 +17,7 @@ export default class {
// Store last failures by user to throttle tries (slow bruteforce
// attacks).
this._failures = createRawObject()
this._failures = { __proto__: null }
this._providers = new Set()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,153 @@
// @flow
import assert from 'assert'
import { type BackupJob } from '../backups-ng'
import { type CallJob } from '../jobs'
import { type Schedule } from '../scheduling'
const createOr = (children: Array<any>): any =>
children.length === 1 ? children[0] : { __or: children }
const methods = {
'vm.deltaCopy': (
job: CallJob,
{ retention = 1, sr, vms },
schedule: Schedule
) => ({
mode: 'delta',
settings: {
[schedule.id]: {
exportRetention: retention,
vmTimeout: job.timeout,
},
},
srs: { id: sr },
userId: job.userId,
vms,
}),
'vm.rollingDeltaBackup': (
job: CallJob,
{ depth = 1, retention = depth, remote, vms },
schedule: Schedule
) => ({
mode: 'delta',
remotes: { id: remote },
settings: {
[schedule.id]: {
exportRetention: retention,
vmTimeout: job.timeout,
},
},
vms,
}),
'vm.rollingDrCopy': (
job: CallJob,
{ deleteOldBackupsFirst, depth = 1, retention = depth, sr, vms },
schedule: Schedule
) => ({
mode: 'full',
settings: {
[schedule.id]: {
deleteFirst: deleteOldBackupsFirst,
exportRetention: retention,
vmTimeout: job.timeout,
},
},
srs: { id: sr },
vms,
}),
'vm.rollingBackup': (
job: CallJob,
{ compress, depth = 1, retention = depth, remoteId, vms },
schedule: Schedule
) => ({
compression: compress ? 'native' : undefined,
mode: 'full',
remotes: { id: remoteId },
settings: {
[schedule.id]: {
exportRetention: retention,
vmTimeout: job.timeout,
},
},
vms,
}),
'vm.rollingSnapshot': (
job: CallJob,
{ depth = 1, retention = depth, vms },
schedule: Schedule
) => ({
mode: 'full',
settings: {
[schedule.id]: {
snapshotRetention: retention,
vmTimeout: job.timeout,
},
},
vms,
}),
}
const parseParamsVector = vector => {
assert.strictEqual(vector.type, 'crossProduct')
const { items } = vector
assert.strictEqual(items.length, 2)
let vms, params
if (items[1].type === 'map') {
;[params, vms] = items
vms = vms.collection
assert.strictEqual(vms.type, 'fetchObjects')
vms = vms.pattern
} else {
;[vms, params] = items
assert.strictEqual(vms.type, 'set')
vms = vms.values
if (vms.length !== 0) {
assert.deepStrictEqual(Object.keys(vms[0]), ['id'])
vms = { id: createOr(vms.map(_ => _.id)) }
}
}
assert.strictEqual(params.type, 'set')
params = params.values
assert.strictEqual(params.length, 1)
params = params[0]
return { ...params, vms }
}
export const translateOldJobs = async (app: any): Promise<Array<BackupJob>> => {
const backupJobs: Array<BackupJob> = []
const [jobs, schedules] = await Promise.all([
app.getAllJobs('call'),
app.getAllSchedules(),
])
jobs.forEach(job => {
try {
const { id } = job
let method, schedule
if (
job.type === 'call' &&
(method = methods[job.method]) !== undefined &&
(schedule = schedules.find(_ => _.jobId === id)) !== undefined
) {
const params = parseParamsVector(job.paramsVector)
backupJobs.push({
id,
name: params.tag || job.name,
type: 'backup',
userId: job.userId,
// $FlowFixMe `method` is initialized but Flow fails to see this
...method(job, params, schedule),
})
}
} catch (error) {
console.warn('translateOldJobs', job, error)
}
})
return backupJobs
}

View File

@@ -1,9 +1,8 @@
import deferrable from 'golike-defer'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import execa from 'execa'
import splitLines from 'split-lines'
import { CancelToken, ignoreErrors } from 'promise-toolbox'
import { CancelToken, fromEvent, ignoreErrors } from 'promise-toolbox'
import { createParser as createPairsParser } from 'parse-pairs'
import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
@@ -102,30 +101,6 @@ const getDeltaBackupNameWithoutExt = name =>
name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
// Checksums have been corrupted between 5.2.6 and 5.2.7.
//
// For a short period of time, bad checksums will be regenerated
// instead of rejected.
//
// TODO: restore when enough time has passed (a week/a month).
async function checkFileIntegrity (handler, name) {
await handler.refreshChecksum(name)
// let stream
//
// try {
// stream = await handler.createReadStream(name, { checksum: true })
// } catch (error) {
// if (error.code === 'ENOENT') {
// return
// }
//
// throw error
// }
//
// stream.resume()
// await eventToPromise(stream, 'finish')
}
// -------------------------------------------------------------------
const listPartitions = (() => {
@@ -429,8 +404,7 @@ export default class {
})(srcVm.other_config[TAG_LAST_BASE_DELTA])
// 2. Copy.
let size = 0
const dstVm = await (async () => {
const { transferSize, vm: dstVm } = await (async () => {
const { cancel, token } = CancelToken.source()
const delta = await srcXapi.exportDeltaVm(
token,
@@ -452,17 +426,6 @@ export default class {
delta.vm.other_config[TAG_EXPORT_TIME] = date
delta.vm.tags = [...delta.vm.tags, 'Continuous Replication']
const { streams } = delta
forEach(delta.vdis, (vdi, key) => {
const id = `${key}.vhd`
const stream = streams[id]
const sizeStream = createSizeStream().once('finish', () => {
size += sizeStream.size
})
sizeStream.task = stream.task
streams[id] = stream.pipe(sizeStream)
})
let toRemove = filter(
targetXapi.objects.all,
obj => obj.$type === 'vm' && obj.other_config[TAG_SOURCE_VM] === uuid
@@ -508,7 +471,7 @@ export default class {
// 5. Return the identifier of the new XO VM object.
id: xapiObjectToXo(dstVm).id,
transferDuration: Date.now() - transferStart,
transferSize: size,
transferSize,
}
}
@@ -558,15 +521,7 @@ export default class {
const backups = await this._listVdiBackups(handler, dir)
for (let i = 1; i < backups.length; i++) {
const childPath = dir + '/' + backups[i]
const modified = await chainVhd(
handler,
dir + '/' + backups[i - 1],
handler,
childPath
)
if (modified) {
await handler.refreshChecksum(childPath)
}
await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath)
}
}
@@ -582,8 +537,6 @@ export default class {
const timestamp = getVdiTimestamp(backups[i])
const newFullBackup = `${dir}/${timestamp}_full.vhd`
await checkFileIntegrity(handler, `${dir}/${backups[i]}`)
let j = i
for (; j > 0 && isDeltaVdiBackup(backups[j]); j--);
const fullBackupId = j
@@ -598,7 +551,6 @@ export default class {
const backup = `${dir}/${backups[j]}`
try {
await checkFileIntegrity(handler, backup)
mergedDataSize += await vhdMerge(handler, parent, handler, backup)
} catch (e) {
console.error('Unable to use vhd-util.', e)
@@ -652,6 +604,10 @@ export default class {
xapi,
{ vdiParent, isFull, handler, stream, dir, retention }
) {
if (typeof stream === 'function') {
stream = await stream()
}
const backupDirectory = `vdi_${vdiParent.uuid}`
dir = `${dir}/${backupDirectory}`
@@ -675,18 +631,12 @@ export default class {
const sizeStream = createSizeStream()
try {
const targetStream = await handler.createOutputStream(backupFullPath, {
// FIXME: Checksum is not computed for full vdi backups.
// The problem is in the merge case, a delta merged in a full vdi
// backup forces us to browse the resulting file =>
// Significant transfer time on the network !
checksum: !isFull,
})
const targetStream = await handler.createOutputStream(backupFullPath)
stream.on('error', error => targetStream.emit('error', error))
await Promise.all([
eventToPromise(stream.pipe(sizeStream).pipe(targetStream), 'finish'),
fromEvent(stream.pipe(sizeStream).pipe(targetStream), 'finish'),
stream.task,
])
} catch (error) {
@@ -898,10 +848,7 @@ export default class {
streams[`${id}.vhd`] = await Promise.all(
mapToArray(backups, async backup =>
handler.createReadStream(`${vdisFolder}/${backup}`, {
checksum: true,
ignoreMissingChecksum: true,
})
handler.createReadStream(`${vdisFolder}/${backup}`)
)
)
})
@@ -910,11 +857,11 @@ export default class {
delta.vm.name_label += ` (${shortDate(datetime * 1e3)})`
delta.vm.tags.push('restored from backup')
vm = await xapi.importDeltaVm(delta, {
vm = (await xapi.importDeltaVm(delta, {
disableStartAfterImport: false,
srId: sr !== undefined && sr._xapiId,
mapVdisSrs,
})
})).vm
} else {
throw new Error(`Unsupported delta backup version: ${version}`)
}
@@ -935,8 +882,6 @@ export default class {
$defer.onFailure.call(handler, 'unlink', file)
$defer.onFailure.call(targetStream, 'close')
const promise = eventToPromise(targetStream, 'finish')
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
compress,
})
@@ -945,7 +890,7 @@ export default class {
sourceStream.pipe(sizeStream).pipe(targetStream)
await promise
await Promise.all([sourceStream.task, fromEvent(targetStream, 'finish')])
return {
transferSize: sizeStream.size,

View File

@@ -1,87 +0,0 @@
import { assign } from 'lodash'
import { lastly } from 'promise-toolbox'
import { noSuchObject } from 'xo-common/api-errors'
import JobExecutor from '../job-executor'
import { Jobs as JobsDb } from '../models/job'
import { mapToArray } from '../utils'
// ===================================================================
export default class Jobs {
constructor (xo) {
this._executor = new JobExecutor(xo)
const jobsDb = (this._jobs = new JobsDb({
connection: xo._redis,
prefix: 'xo:job',
indexes: ['user_id', 'key'],
}))
this._runningJobs = Object.create(null)
xo.on('clean', () => jobsDb.rebuildIndexes())
xo.on('start', () => {
xo.addConfigManager(
'jobs',
() => jobsDb.get(),
jobs => Promise.all(mapToArray(jobs, job => jobsDb.save(job))),
['users']
)
})
}
async getAllJobs () {
return /* await */ this._jobs.get()
}
async getJob (id) {
const job = await this._jobs.first(id)
if (!job) {
throw noSuchObject(id, 'job')
}
return job.properties
}
async createJob (job) {
// TODO: use plain objects
const job_ = await this._jobs.create(job)
return job_.properties
}
async updateJob ({ id, ...props }) {
const job = await this.getJob(id)
assign(job, props)
if (job.timeout === null) {
delete job.timeout
}
return /* await */ this._jobs.save(job)
}
async removeJob (id) {
return /* await */ this._jobs.remove(id)
}
_runJob (job) {
const { id } = job
const runningJobs = this._runningJobs
if (runningJobs[id]) {
throw new Error(`job ${id} is already running`)
}
runningJobs[id] = true
return this._executor.exec(job)::lastly(() => {
delete runningJobs[id]
})
}
async runJobSequence (idSequence) {
const jobs = await Promise.all(
mapToArray(idSequence, id => this.getJob(id))
)
for (const job of jobs) {
await this._runJob(job)
}
}
}

View File

@@ -0,0 +1,124 @@
import { createPredicate } from 'value-matcher'
import { timeout } from 'promise-toolbox'
import { assign, filter, isEmpty, map, mapValues } from 'lodash'
import { crossProduct } from '../../math'
import { asyncMap, serializeError, thunkToArray } from '../../utils'
// ===================================================================
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(
crossProduct(map(items, value => resolveParamsVector.call(this, value)))
)
},
fetchObjects ({ pattern }) {
const objects = filter(this.getObjects(), createPredicate(pattern))
if (isEmpty(objects)) {
throw new Error('no objects match this pattern')
}
return objects
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value,
})
})
},
set: ({ values }) => values,
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default async function executeJobCall ({
app,
job,
logger,
runJobId,
schedule,
session,
}) {
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(app, paramsVector)
: [{}] // One call with no parameters
const execStatus = {
calls: {},
runJobId,
start: Date.now(),
timezone: schedule !== undefined ? schedule.timezone : undefined,
}
await asyncMap(paramsFlatVector, params => {
const runCallId = logger.notice(
`Starting ${job.method} call. (${job.id})`,
{
event: 'jobCall.start',
runJobId,
method: job.method,
params,
}
)
const call = (execStatus.calls[runCallId] = {
method: job.method,
params,
start: Date.now(),
})
let promise = app.callApiMethod(session, job.method, assign({}, params))
if (job.timeout) {
promise = promise::timeout(job.timeout)
}
return promise.then(
value => {
logger.notice(
`Call ${job.method} (${runCallId}) is a success. (${job.id})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value,
}
)
call.returnedValue = value
call.end = Date.now()
},
reason => {
logger.notice(
`Call ${job.method} (${runCallId}) has failed. (${job.id})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason),
}
)
call.error = reason
call.end = Date.now()
}
)
})
execStatus.end = Date.now()
return execStatus
}

View File

@@ -1,7 +1,7 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { resolveParamsVector } from './job-executor'
import { resolveParamsVector } from './execute-call'
describe('resolveParamsVector', function () {
forEach(
@@ -68,37 +68,35 @@ describe('resolveParamsVector', function () {
// Context.
{
xo: {
getObjects: function () {
return [
{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted',
},
{
id: 'vm:2',
$pool: 'pool:1',
tags: ['foo'],
type: 'VM',
power_state: 'Running',
},
{
id: 'host:1',
type: 'host',
power_state: 'Running',
},
{
id: 'vm:3',
$pool: 'pool:8',
tags: ['foo'],
type: 'VM',
power_state: 'Halted',
},
]
},
getObjects: function () {
return [
{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted',
},
{
id: 'vm:2',
$pool: 'pool:1',
tags: ['foo'],
type: 'VM',
power_state: 'Running',
},
{
id: 'host:1',
type: 'host',
power_state: 'Running',
},
{
id: 'vm:3',
$pool: 'pool:8',
tags: ['foo'],
type: 'VM',
power_state: 'Halted',
},
]
},
},
],

View File

@@ -0,0 +1,275 @@
// @flow
import type { Pattern } from 'value-matcher'
import { cancelable } from 'promise-toolbox'
import { map as mapToArray } from 'lodash'
import { noSuchObject } from 'xo-common/api-errors'
import Collection from '../../collection/redis'
import patch from '../../patch'
import { serializeError } from '../../utils'
import type Logger from '../logs/loggers/abstract'
import { type Schedule } from '../scheduling'
import executeCall from './execute-call'
// ===================================================================
export type Job = {
id: string,
name: string,
type: string,
userId: string
}
type ParamsVector =
| {|
items: Array<Object>,
type: 'crossProduct'
|}
| {|
mapping: Object,
type: 'extractProperties',
value: Object
|}
| {|
pattern: Pattern,
type: 'fetchObjects'
|}
| {|
collection: Object,
iteratee: Function,
paramName?: string,
type: 'map'
|}
| {|
type: 'set',
values: any
|}
export type CallJob = {|
...$Exact<Job>,
method: string,
paramsVector: ParamsVector,
timeout?: number,
type: 'call'
|}
export type Executor = ({|
app: Object,
cancelToken: any,
job: Job,
logger: Logger,
runJobId: string,
schedule?: Schedule,
session: Object
|}) => Promise<any>
// -----------------------------------------------------------------------------
const normalize = job => {
Object.keys(job).forEach(key => {
try {
const value = (job[key] = JSON.parse(job[key]))
// userId are always strings, even if the value is numeric, which might to
// them being parsed as numbers.
//
// The issue has been introduced by
// 48b2297bc151df582160be7c1bf1e8ee160320b8.
if (key === 'userId' && typeof value === 'number') {
job[key] = String(value)
}
} catch (_) {}
})
return job
}
const serialize = (job: {| [string]: any |}) => {
Object.keys(job).forEach(key => {
const value = job[key]
if (typeof value !== 'string') {
job[key] = JSON.stringify(job[key])
}
})
return job
}
class JobsDb extends Collection {
async create (job): Promise<Job> {
return normalize((await this.add(serialize((job: any)))).properties)
}
async save (job): Promise<void> {
await this.update(serialize((job: any)))
}
async get (properties): Promise<Array<Job>> {
const jobs = await super.get(properties)
jobs.forEach(normalize)
return jobs
}
}
// -----------------------------------------------------------------------------
export default class Jobs {
_app: any
_executors: { __proto__: null, [string]: Executor }
_jobs: JobsDb
_logger: Logger
_runningJobs: { __proto__: null, [string]: boolean }
constructor (xo: any) {
this._app = xo
const executors = (this._executors = { __proto__: null })
const jobsDb = (this._jobs = new JobsDb({
connection: xo._redis,
prefix: 'xo:job',
indexes: ['user_id', 'key'],
}))
this._logger = undefined
this._runningJobs = { __proto__: null }
executors.call = executeCall
xo.on('clean', () => jobsDb.rebuildIndexes())
xo.on('start', () => {
xo.addConfigManager(
'jobs',
() => jobsDb.get(),
jobs => Promise.all(mapToArray(jobs, job => jobsDb.save(job))),
['users']
)
xo.getLogger('jobs').then(logger => {
this._logger = logger
})
})
}
async getAllJobs (type?: string): Promise<Array<Job>> {
// $FlowFixMe don't know what is the problem (JFT)
const jobs = await this._jobs.get()
const runningJobs = this._runningJobs
const result = []
jobs.forEach(job => {
if (type === undefined || job.type === type) {
job.runId = runningJobs[job.id]
result.push(job)
}
})
return result
}
async getJob (id: string, type?: string): Promise<Job> {
let job = await this._jobs.first(id)
if (job === null || (type !== undefined && job.properties.type !== type)) {
throw noSuchObject(id, 'job')
}
job = job.properties
job.runId = this._runningJobs[id]
return job
}
createJob (job: $Diff<Job, {| id: string |}>): Promise<Job> {
return this._jobs.create(job)
}
async updateJob ({ id, ...props }: $Shape<Job>) {
const job = await this.getJob(id)
patch(job, props)
return /* await */ this._jobs.save(job)
}
registerJobExecutor (type: string, executor: Executor): void {
const executors = this._executors
if (type in executors) {
throw new Error(`there is already a job executor for type ${type}`)
}
executors[type] = executor
}
async removeJob (id: string) {
return /* await */ this._jobs.remove(id)
}
async _runJob (cancelToken: any, job: Job, schedule?: Schedule) {
const { id } = job
const runningJobs = this._runningJobs
if (id in runningJobs) {
throw new Error(`job ${id} is already running`)
}
const executor = this._executors[job.type]
if (executor === undefined) {
throw new Error(`cannot run job ${id}: no executor for type ${job.type}`)
}
const logger = this._logger
const runJobId = logger.notice(`Starting execution of ${id}.`, {
event: 'job.start',
userId: job.userId,
jobId: id,
// $FlowFixMe only defined for CallJob
key: job.key,
})
runningJobs[id] = runJobId
try {
const app = this._app
const session = app.createUserConnection()
session.set('user_id', job.userId)
const status = await executor({
app,
cancelToken,
job,
logger,
runJobId,
schedule,
session,
})
logger.notice(`Execution terminated for ${job.id}.`, {
event: 'job.end',
runJobId,
})
session.close()
app.emit('job:terminated', status)
} catch (error) {
logger.error(`The execution of ${id} has failed.`, {
event: 'job.end',
runJobId,
error: serializeError(error),
})
throw error
} finally {
delete runningJobs[id]
}
}
@cancelable
async runJobSequence (
$cancelToken: any,
idSequence: Array<string>,
schedule?: Schedule
) {
const jobs = await Promise.all(
mapToArray(idSequence, id => this.getJob(id))
)
for (const job of jobs) {
if ($cancelToken.requested) {
break
}
await this._runJob($cancelToken, job, schedule)
}
}
}

View File

@@ -2,7 +2,7 @@ import Ajv from 'ajv'
import { PluginsMetadata } from '../models/plugin-metadata'
import { invalidParameters, noSuchObject } from 'xo-common/api-errors'
import { createRawObject, isFunction, mapToArray } from '../utils'
import { isFunction, mapToArray } from '../utils'
// ===================================================================
@@ -11,7 +11,7 @@ export default class {
this._ajv = new Ajv({
useDefaults: true,
})
this._plugins = createRawObject()
this._plugins = { __proto__: null }
this._pluginsMetadata = new PluginsMetadata({
connection: xo._redis,

View File

@@ -335,6 +335,7 @@ export default class {
let set
if (
object.$type !== 'vm' ||
object.is_a_snapshot ||
// No set for this VM.
!(id = xapi.xo.getData(object, 'resourceSet')) ||
// Not our set.

View File

@@ -1,204 +1,169 @@
import { BaseError } from 'make-error'
// @flow
import { createSchedule } from '@xen-orchestra/cron'
import { noSuchObject } from 'xo-common/api-errors.js'
import { keyBy } from 'lodash'
import { noSuchObject } from 'xo-common/api-errors'
import { Schedules } from '../models/schedule'
import { forEach, mapToArray } from '../utils'
import Collection from '../collection/redis'
import patch from '../patch'
import { asyncMap } from '../utils'
// ===================================================================
export type Schedule = {|
cron: string,
enabled: boolean,
id: string,
jobId: string,
name: string,
timezone?: string,
userId: string
|}
const _resolveId = scheduleOrId => scheduleOrId.id || scheduleOrId
const normalize = schedule => {
const { enabled } = schedule
if (typeof enabled !== 'boolean') {
schedule.enabled = enabled === 'true'
}
if ('job' in schedule) {
schedule.jobId = schedule.job
delete schedule.job
}
return schedule
}
export class SchedulerError extends BaseError {}
export class ScheduleOverride extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ID ' + _resolveId(scheduleOrId) + ' is already added')
class Schedules extends Collection {
async get (properties) {
const schedules = await super.get(properties)
schedules.forEach(normalize)
return schedules
}
}
export class ScheduleNotEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId) + ' is not enabled')
}
}
export default class Scheduling {
_app: any
_db: {|
add: Function,
first: Function,
get: Function,
remove: Function,
update: Function
|}
_runs: { __proto__: null, [string]: () => void }
export class ScheduleAlreadyEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId) + ' is already enabled')
}
}
constructor (app: any) {
this._app = app
// ===================================================================
export default class {
constructor (xo) {
this.xo = xo
const schedules = (this._redisSchedules = new Schedules({
connection: xo._redis,
const db = (this._db = new Schedules({
connection: app._redis,
prefix: 'xo:schedule',
indexes: ['user_id', 'job'],
}))
this._scheduleTable = undefined
xo.on('clean', () => schedules.rebuildIndexes())
xo.on('start', () => {
xo.addConfigManager(
this._runs = { __proto__: null }
app.on('clean', async () => {
const [jobsById, schedules] = await Promise.all([
app.getAllJobs().then(_ => keyBy(_, 'id')),
app.getAllSchedules(),
])
await db.remove(
schedules.filter(_ => !(_.jobId in jobsById)).map(_ => _.id)
)
return db.rebuildIndexes()
})
app.on('start', async () => {
app.addConfigManager(
'schedules',
() => schedules.get(),
schedules_ =>
Promise.all(
mapToArray(schedules_, schedule => schedules.save(schedule))
),
() => db.get(),
schedules =>
asyncMap(schedules, schedule => db.update(normalize(schedule))),
['jobs']
)
return this._loadSchedules()
const schedules = await this.getAllSchedules()
schedules.forEach(schedule => this._start(schedule))
})
xo.on('stop', () => this._disableAll())
}
_add (schedule) {
const { id } = schedule
this._schedules[id] = schedule
this._scheduleTable[id] = false
try {
if (schedule.enabled) {
this._enable(schedule)
}
} catch (error) {
console.warn('Scheduling#_add(%s)', id, error)
}
}
_exists (scheduleOrId) {
const id_ = _resolveId(scheduleOrId)
return id_ in this._schedules
}
_isEnabled (scheduleOrId) {
return this._scheduleTable[_resolveId(scheduleOrId)]
}
_enable ({ cron, id, job, timezone = 'local' }) {
this._cronJobs[id] = createSchedule(cron, timezone).startJob(() =>
this.xo.runJobSequence([job])
)
this._scheduleTable[id] = true
}
_disable (scheduleOrId) {
if (!this._exists(scheduleOrId)) {
throw noSuchObject(scheduleOrId, 'schedule')
}
if (!this._isEnabled(scheduleOrId)) {
throw new ScheduleNotEnabled(scheduleOrId)
}
const id = _resolveId(scheduleOrId)
this._cronJobs[id]() // Stop cron job.
delete this._cronJobs[id]
this._scheduleTable[id] = false
}
_disableAll () {
forEach(this._scheduleTable, (enabled, id) => {
if (enabled) {
this._disable(id)
}
app.on('stop', () => {
const runs = this._runs
Object.keys(runs).forEach(id => {
runs[id]()
delete runs[id]
})
})
}
get scheduleTable () {
return this._scheduleTable
}
async _loadSchedules () {
this._schedules = {}
this._scheduleTable = {}
this._cronJobs = {}
const schedules = await this.xo.getAllSchedules()
forEach(schedules, schedule => {
this._add(schedule)
})
}
async _getSchedule (id) {
const schedule = await this._redisSchedules.first(id)
if (!schedule) {
throw noSuchObject(id, 'schedule')
}
return schedule
}
async getSchedule (id) {
return (await this._getSchedule(id)).properties
}
async getAllSchedules () {
return /* await */ this._redisSchedules.get()
}
async createSchedule (userId, { job, cron, enabled, name, timezone }) {
const schedule_ = await this._redisSchedules.create(
userId,
job,
async createSchedule ({
cron,
enabled,
jobId,
name,
timezone,
userId,
}: $Diff<Schedule, {| id: string |}>) {
const schedule = (await this._db.add({
cron,
enabled,
jobId,
name,
timezone
)
const schedule = schedule_.properties
this._add(schedule)
timezone,
userId,
})).properties
this._start(schedule)
return schedule
}
async updateSchedule (id, { job, cron, enabled, name, timezone }) {
const schedule = await this._getSchedule(id)
if (job !== undefined) schedule.set('job', job)
if (cron !== undefined) schedule.set('cron', cron)
if (enabled !== undefined) schedule.set('enabled', enabled)
if (name !== undefined) schedule.set('name', name)
if (timezone === null) {
schedule.set('timezone', undefined) // Remove current timezone
} else if (timezone !== undefined) {
schedule.set('timezone', timezone)
}
await this._redisSchedules.save(schedule)
const { properties } = schedule
if (!this._exists(id)) {
async getSchedule (id: string): Promise<Schedule> {
const schedule = await this._db.first(id)
if (schedule === null) {
throw noSuchObject(id, 'schedule')
}
// disable the schedule, _add() will enable it if necessary
if (this._isEnabled(id)) {
this._disable(id)
}
this._add(properties)
return schedule.properties
}
async removeSchedule (id) {
await this._redisSchedules.remove(id)
async getAllSchedules (): Promise<Array<Schedule>> {
return this._db.get()
}
try {
this._disable(id)
} catch (exc) {
if (!(exc instanceof SchedulerError)) {
throw exc
}
} finally {
delete this._schedules[id]
delete this._scheduleTable[id]
async deleteSchedule (id: string) {
this._stop(id)
await this._db.remove(id)
}
async updateSchedule ({
cron,
enabled,
id,
jobId,
name,
timezone,
userId,
}: $Shape<Schedule>) {
const schedule = await this.getSchedule(id)
patch(schedule, { cron, enabled, jobId, name, timezone, userId })
this._start(schedule)
await this._db.update(schedule)
}
_start (schedule: Schedule) {
const { id } = schedule
this._stop(id)
if (schedule.enabled) {
this._runs[id] = createSchedule(
schedule.cron,
schedule.timezone
).startJob(() => this._app.runJobSequence([schedule.jobId], schedule))
}
}
_stop (id: string) {
const runs = this._runs
if (id in runs) {
runs[id]()
delete runs[id]
}
}
}

View File

@@ -6,7 +6,6 @@ import xapiObjectToXo from '../xapi-object-to-xo'
import XapiStats from '../xapi-stats'
import {
camelToSnakeCase,
createRawObject,
forEach,
isEmpty,
isString,
@@ -19,15 +18,15 @@ import { Servers } from '../models/server'
export default class {
constructor (xo) {
this._objectConflicts = createRawObject() // TODO: clean when a server is disconnected.
this._objectConflicts = { __proto__: null } // TODO: clean when a server is disconnected.
const serversDb = (this._servers = new Servers({
connection: xo._redis,
prefix: 'xo:server',
indexes: ['host'],
}))
this._stats = new XapiStats()
this._xapis = createRawObject()
this._xapisByPool = createRawObject()
this._xapis = { __proto__: null }
this._xapisByPool = { __proto__: null }
this._xo = xo
xo.on('clean', () => serversDb.rebuildIndexes())
@@ -173,7 +172,7 @@ export default class {
const previous = objects.get(xoId, undefined)
if (previous && previous._xapiRef !== xapiObject.$ref) {
const conflicts_ =
conflicts[xoId] || (conflicts[xoId] = createRawObject())
conflicts[xoId] || (conflicts[xoId] = { __proto__: null })
conflicts_[conId] = xoObject
} else {
objects.set(xoId, xoObject)
@@ -235,7 +234,7 @@ export default class {
const conId = server.id
// Maps ids of XAPI objects to ids of XO objects.
const xapiIdsToXo = createRawObject()
const xapiIdsToXo = { __proto__: null }
// Map of XAPI objects which failed to be transformed to XO
// objects.
@@ -243,7 +242,7 @@ export default class {
// At each `finish` there will be another attempt to transform
// until they succeed.
let toRetry
let toRetryNext = createRawObject()
let toRetryNext = { __proto__: null }
const onAddOrUpdate = objects => {
this._onXenAdd(objects, xapiIdsToXo, toRetryNext, conId)
@@ -266,7 +265,7 @@ export default class {
if (!isEmpty(toRetryNext)) {
toRetry = toRetryNext
toRetryNext = createRawObject()
toRetryNext = { __proto__: null }
}
}

View File

@@ -17,7 +17,7 @@ import {
import mixins from './xo-mixins'
import Connection from './connection'
import { mixin } from './decorators'
import { createRawObject, generateToken, noop } from './utils'
import { generateToken, noop } from './utils'
// ===================================================================
@@ -36,9 +36,9 @@ export default class Xo extends EventEmitter {
// Connections to users.
this._nextConId = 0
this._connections = createRawObject()
this._connections = { __proto__: null }
this._httpRequestWatchers = createRawObject()
this._httpRequestWatchers = { __proto__: null }
// Connects to Redis.
{
@@ -90,7 +90,7 @@ export default class Xo extends EventEmitter {
}
}
const results = createRawObject(null)
const results = { __proto__: null }
for (const id in all) {
const object = all[id]
if (filter(object, id, all)) {
@@ -251,8 +251,8 @@ export default class Xo extends EventEmitter {
let entered, exited
function reset () {
entered = createRawObject()
exited = createRawObject()
entered = { __proto__: null }
exited = { __proto__: null }
}
reset()

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "xo-web",
"version": "5.16.1",
"version": "5.17.2",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [
@@ -30,8 +30,9 @@
"node": ">=6"
},
"devDependencies": {
"@julien-f/freactal": "0.1.0",
"@nraynaud/novnc": "0.6.1",
"@xen-orchestra/cron": "^1.0.0",
"@xen-orchestra/cron": "^1.0.2",
"ansi_up": "^2.0.2",
"asap": "^2.0.6",
"babel-core": "^6.26.0",
@@ -56,7 +57,7 @@
"chartist-plugin-legend": "^0.6.1",
"chartist-plugin-tooltip": "0.0.11",
"classnames": "^2.2.3",
"complex-matcher": "^0.2.1",
"complex-matcher": "^0.3.0",
"cookies-js": "^1.2.2",
"d3": "^4.12.2",
"debounce-input-decorator": "^0.1.0",
@@ -68,7 +69,7 @@
"font-mfizz": "^2.4.1",
"get-stream": "^3.0.0",
"gulp": "^4.0.0",
"gulp-autoprefixer": "^4.1.0",
"gulp-autoprefixer": "^5.0.0",
"gulp-csso": "^3.0.0",
"gulp-embedlr": "^0.5.2",
"gulp-plumber": "^1.1.0",
@@ -128,7 +129,7 @@
"uglify-es": "^3.3.4",
"uncontrollable-input": "^0.1.1",
"url-parse": "^1.2.0",
"value-matcher": "^0.1.0",
"value-matcher": "^0.2.0",
"vinyl": "^2.1.0",
"watchify": "^3.7.0",
"whatwg-fetch": "^2.0.3",

View File

@@ -1,5 +1,5 @@
import isFunction from 'lodash/isFunction'
import React from 'react'
import { isFunction, startsWith } from 'lodash'
import Button from './button'
import Component from './base-component'
@@ -27,6 +27,9 @@ import { error as _error } from './notification'
handler: propTypes.func.isRequired,
// optional value which will be passed as first param to the handler
//
// if you need multiple values, you can provide `data-*` props instead of
// `handlerParam`
handlerParam: propTypes.any,
// XO icon to use for this button
@@ -50,11 +53,30 @@ export default class ActionButton extends Component {
}
async _execute () {
if (this.props.pending || this.state.working) {
const { props } = this
if (props.pending || this.state.working) {
return
}
const { children, handler, handlerParam, tooltip } = this.props
const { children, handler, tooltip } = props
let handlerParam
if ('handlerParam' in props) {
handlerParam = props.handlerParam
} else {
let empty = true
handlerParam = {}
Object.keys(props).forEach(key => {
if (startsWith(key, 'data-')) {
empty = false
handlerParam[key.slice(5)] = props[key]
}
})
if (empty) {
handlerParam = undefined
}
}
try {
this.setState({
@@ -64,7 +86,7 @@ export default class ActionButton extends Component {
const result = await handler(handlerParam)
const { redirectOnSuccess } = this.props
const { redirectOnSuccess } = props
if (redirectOnSuccess) {
return this.context.router.push(
isFunction(redirectOnSuccess)

View File

@@ -7,6 +7,16 @@ const call = fn => fn()
// callbacks have been correctly initialized when there are circular dependencies
const addSubscriptions = subscriptions => Component =>
class SubscriptionWrapper extends React.PureComponent {
constructor () {
super()
// provide all props since the beginning (better behavior with Freactal)
const state = (this.state = {})
Object.keys(subscriptions).forEach(key => {
state[key] = undefined
})
}
_unsubscribes = null
componentWillMount () {

View File

@@ -1,6 +1,6 @@
import React from 'react'
import { Portal } from 'react-overlays'
import { forEach, isEmpty, keys, map, noop } from 'lodash'
import { forEach, isEmpty, keys, map } from 'lodash'
import _ from './intl'
import ActionButton from './action-button'
@@ -76,9 +76,6 @@ const ActionButton_ = ({ children, labelId, ...props }) => (
// ===================================================================
@connectStore({
hostsById: createGetObjectsOfType('host').groupBy('id'),
})
class HostsPatchesTable extends Component {
constructor (props) {
super(props)
@@ -94,21 +91,15 @@ class HostsPatchesTable extends Component {
)
_subscribeMissingPatches = (hosts = this.props.hosts) => {
const { hostsById } = this.props
const unsubs = map(
hosts,
host =>
hostsById
? subscribeHostMissingPatches(hostsById[host.id][0], patches =>
this.setState({
missingPatches: {
...this.state.missingPatches,
[host.id]: patches.length,
},
})
)
: noop
const unsubs = map(hosts, host =>
subscribeHostMissingPatches(host, patches =>
this.setState({
missingPatches: {
...this.state.missingPatches,
[host.id]: patches.length,
},
})
)
)
if (this.unsubscribeMissingPatches !== undefined) {

View File

@@ -207,6 +207,7 @@ const messages = {
selectSshKey: 'Select SSH key(s)…',
selectSrs: 'Select SR(s)…',
selectVms: 'Select VM(s)…',
selectVmSnapshots: 'Select snapshot(s)…',
selectVmTemplates: 'Select VM template(s)…',
selectTags: 'Select tag(s)…',
selectVdis: 'Select disk(s)…',
@@ -259,6 +260,7 @@ const messages = {
jobId: 'ID',
jobType: 'Type',
jobName: 'Name',
jobMode: 'Mode',
jobNamePlaceholder: 'Name of your job (forbidden: "_")',
jobStart: 'Start',
jobEnd: 'End',
@@ -274,9 +276,12 @@ const messages = {
jobServerTimezone: 'Server',
runJob: 'Run job',
runJobVerbose: 'One shot running started. See overview for logs.',
jobStarted: 'Started',
jobFinished: 'Finished',
jobInterrupted: 'Interrupted',
jobStarted: 'Started',
saveBackupJob: 'Save',
resetBackupJob: 'Reset',
createBackupJob: 'Create',
deleteBackupSchedule: 'Remove backup job',
deleteBackupScheduleQuestion:
'Are you sure you want to delete this backup job?',
@@ -286,6 +291,9 @@ const messages = {
jobEditMessage:
'You are editing job {name} ({id}). Saving will override previous job state.',
scheduleEdit: 'Edit',
scheduleSave: 'Save',
cancelScheduleEdition: 'Cancel',
scheduleAdd: 'Add a schedule',
scheduleDelete: 'Delete',
deleteSelectedSchedules: 'Delete selected schedules',
noScheduledJobs: 'No scheduled jobs.',
@@ -311,6 +319,14 @@ const messages = {
smartBackupModeSelection: 'Select backup mode:',
normalBackup: 'Normal backup',
smartBackup: 'Smart backup',
exportRetention: 'Export retention',
snapshotRetention: 'Snapshot retention',
backupName: 'Name',
useDelta: 'Use delta',
useCompression: 'Use compression',
smartBackupModeTitle: 'Smart mode',
backupTargetRemotes: 'Target remotes (for Export)',
backupTargetSrs: 'Target SRs (for Replication)',
localRemoteWarningTitle: 'Local remote selected',
localRemoteWarningMessage:
'Warning: local remotes will use limited XOA disk space. Only for advanced users.',
@@ -319,10 +335,12 @@ const messages = {
editBackupVmsTitle: 'VMs',
editBackupSmartStatusTitle: 'VMs statuses',
editBackupSmartResidentOn: 'Resident on',
editBackupSmartNotResidentOn: 'Not resident on',
editBackupSmartPools: 'Pools',
editBackupSmartTags: 'Tags',
sampleOfMatchingVms: 'Sample of matching Vms',
editBackupSmartTagsTitle: 'VMs Tags',
editBackupSmartExcludedTagsTitle: 'Excluded VMs tags',
editBackupNot: 'Reverse',
editBackupTagTitle: 'Tag',
editBackupReportTitle: 'Report',
@@ -352,6 +370,13 @@ const messages = {
remoteTestSuccessMessage: 'The remote appears to work correctly',
remoteConnectionFailed: 'Connection failed',
// ------ Backup job -----
confirmDeleteBackupJobsTitle:
'Delete backup job{nJobs, plural, one {} other {s}}',
confirmDeleteBackupJobsBody:
'Are you sure you want to delete {nJobs, number} backup job{nJobs, plural, one {} other {s}}?',
// ------ Remote -----
remoteName: 'Name',
remotePath: 'Path',
@@ -431,10 +456,10 @@ const messages = {
groupNameColumn: 'Name',
groupUsersColumn: 'Users',
addUserToGroupColumn: 'Add User',
userNameColumn: 'Email',
userNameColumn: 'Username',
userPermissionColumn: 'Permissions',
userPasswordColumn: 'Password',
userName: 'Email',
userName: 'Username',
userPassword: 'Password',
createUserButton: 'Create',
noUserFound: 'No user found',
@@ -514,7 +539,7 @@ const messages = {
srNoVdis: 'No VDIs in this storage',
// ----- Pool general -----
poolTitleRamUsage: 'Pool RAM usage:',
poolRamUsage: '{used} used on {total}',
poolRamUsage: '{used} used on {total} ({free} free)',
poolMaster: 'Master:',
displayAllHosts: 'Display all hosts of this pool',
displayAllStorages: 'Display all storages of this pool',
@@ -585,7 +610,9 @@ const messages = {
// ----- host stat tab -----
statLoad: 'Load average',
// ----- host advanced tab -----
memoryHostState: 'RAM Usage: {memoryUsed}',
hostTitleRamUsage: 'Host RAM usage:',
memoryHostState:
'RAM: {memoryUsed} used on {memoryTotal} ({memoryFree} free)',
hardwareHostSettingsLabel: 'Hardware',
hostAddress: 'Address',
hostStatus: 'Status',
@@ -759,12 +786,13 @@ const messages = {
vdiTags: 'Tags',
vdiSize: 'Size',
vdiSr: 'SR',
vdiVm: 'VM',
vdiVms: 'VMs',
vdiMigrate: 'Migrate VDI',
vdiMigrateSelectSr: 'Destination SR:',
vdiMigrateAll: 'Migrate all VDIs',
vdiMigrateNoSr: 'No SR',
vdiMigrateNoSrMessage: 'A target SR is required to migrate a VDI',
vdiDelete: 'Delete VDI',
vdiForget: 'Forget',
vdiRemove: 'Remove VDI',
noControlDomainVdis: 'No VDIs attached to Control Domain',
@@ -775,6 +803,7 @@ const messages = {
vbdNoVbd: 'No disks',
vbdConnect: 'Connect VBD',
vbdDisconnect: 'Disconnect VBD',
vbdsDisconnect: 'Disconnect VBDs',
vbdBootable: 'Bootable',
vbdReadonly: 'Readonly',
vbdAction: 'Action',
@@ -928,6 +957,7 @@ const messages = {
vmStateHalted: 'Halted',
vmStateOther: 'Other',
vmStateRunning: 'Running',
vmStateAll: 'All',
taskStatePanel: 'Pending tasks',
usersStatePanel: 'Users',
srStatePanel: 'Storage state',
@@ -1089,6 +1119,7 @@ const messages = {
importVmsCleanList: 'Reset',
vmImportSuccess: 'VM import success',
vmImportFailed: 'VM import failed',
setVmFailed: 'Error on setting the VM: {vm}',
startVmImport: 'Import starting…',
startVmExport: 'Export starting…',
nCpus: 'N CPUs',
@@ -1116,6 +1147,14 @@ const messages = {
// ---- Backup views ---
backupSchedules: 'Schedules',
backupSavedSchedules: 'Saved schedules',
backupNewSchedules: 'New schedules',
scheduleCron: 'Cron pattern',
scheduleName: 'Name',
scheduleTimezone: 'Timezone',
scheduleExportRetention: 'Export ret.',
scheduleSnapshotRetention: 'Snapshot ret.',
scheduleRun: 'Run',
getRemote: 'Get remote',
listRemote: 'List Remote',
simpleBackup: 'simple',
@@ -1128,8 +1167,10 @@ const messages = {
remoteError: 'Error',
noBackup: 'No backup available',
backupVmNameColumn: 'VM Name',
backupVmDescriptionColumn: 'VM Description',
backupTags: 'Tags',
lastBackupColumn: 'Last Backup',
firstBackupColumn: 'Oldest backup',
lastBackupColumn: 'Latest backup',
availableBackupsColumn: 'Available Backups',
backupRestoreErrorTitle: 'Missing parameters',
backupRestoreErrorMessage: 'Choose a SR and a backup',
@@ -1141,6 +1182,28 @@ const messages = {
importBackupTitle: 'Import VM',
importBackupMessage: 'Starting your backup import',
vmsToBackup: 'VMs to backup',
restoreResfreshList: 'Refresh backup list',
restoreVmBackups: 'Restore',
restoreVmBackupsTitle: 'Restore {vm}',
restoreVmBackupsBulkTitle:
'Restore {nVms, number} VM{nVms, plural, one {} other {s}}',
restoreVmBackupsBulkMessage:
'Restore {nVms, number} VM{nVms, plural, one {} other {s}} from {nVms, plural, one {its} other {their}} {oldestOrLatest} backup.',
oldest: 'oldest',
latest: 'latest',
restoreVmBackupsStart:
'Start VM{nVms, plural, one {} other {s}} after restore',
restoreVmBackupsBulkErrorTitle: 'Multi-restore error',
restoreVmBackupsBulkErrorMessage: 'You need to select a destination SR',
deleteVmBackups: 'Delete backups…',
deleteVmBackupsTitle: 'Delete {vm} backups',
deleteVmBackupsSelect: 'Select backups to delete:',
deleteVmBackupsSelectAll: 'All',
deleteVmBackupsBulkTitle: 'Delete backups',
deleteVmBackupsBulkMessage:
'Are you sure you want to delete all the backups from {nVms, number} VM{nVms, plural, one {} other {s}}?',
deleteVmBackupsBulkConfirmText:
'delete {nBackups} backup{nBackups, plural, one {} other {s}}',
// ----- Restore files view -----
listRemoteBackups: 'List remote backups',
@@ -1160,6 +1223,9 @@ const messages = {
restoreFilesUnselectAll: 'Unselect all files',
// ----- Modals -----
emergencyShutdownHostModalTitle: 'Emergency shutdown Host',
emergencyShutdownHostModalMessage:
'Are you sure you want to shutdown {host}?',
emergencyShutdownHostsModalTitle:
'Emergency shutdown Host{nHosts, plural, one {} other {s}}',
emergencyShutdownHostsModalMessage:
@@ -1240,6 +1306,9 @@ const messages = {
chooseSrForEachVdisModalVdiLabel: 'VDI',
chooseSrForEachVdisModalSrLabel: 'SR*',
chooseSrForEachVdisModalOptionalEntry: '* optional',
deleteVbdsModalTitle: 'Delete VBD{nVbds, plural, one {} other {s}}',
deleteVbdsModalMessage:
'Are you sure you want to delete {nVbds, number} VBD{nVbds, plural, one {} other {s}}?',
deleteVdiModalTitle: 'Delete VDI',
deleteVdiModalMessage:
'Are you sure you want to delete this disk? ALL DATA ON THIS DISK WILL BE LOST',
@@ -1265,12 +1334,16 @@ const messages = {
deleteSnapshotsModalTitle: 'Delete snapshot{nVms, plural, one {} other {s}}',
deleteSnapshotsModalMessage:
'Are you sure you want to delete {nVms, number} snapshot{nVms, plural, one {} other {s}}?',
disconnectVbdsModalTitle: 'Disconnect VBD{nVbds, plural, one {} other {s}}',
disconnectVbdsModalMessage:
'Are you sure you want to disconnect {nVbds, number} VBD{nVbds, plural, one {} other {s}}?',
revertVmModalMessage:
'Are you sure you want to revert this VM to the snapshot state? This operation is irreversible.',
revertVmModalSnapshotBefore: 'Snapshot before',
importBackupModalTitle: 'Import a {name} Backup',
importBackupModalStart: 'Start VM after restore',
importBackupModalSelectBackup: 'Select your backup…',
importBackupModalSelectSr: 'Select a destination SR…',
removeAllOrphanedModalWarning:
'Are you sure you want to remove all orphaned snapshot VDIs?',
removeAllLogsModalTitle: 'Remove all logs',

View File

@@ -7,19 +7,23 @@ import Icon from 'icon'
import propTypes from './prop-types-decorator'
import Tooltip from 'tooltip'
import { alert } from 'modal'
import { connectStore } from './utils'
import { SelectVdi } from './select-objects'
import { isAdmin } from 'selectors'
import { SelectVdi, SelectResourceSetsVdi } from './select-objects'
import { addSubscriptions, connectStore, resolveResourceSet } from './utils'
import { ejectCd, insertCd, subscribeResourceSets } from './xo'
import {
createGetObjectsOfType,
createFinder,
createGetObject,
createSelector,
} from './selectors'
import { ejectCd, insertCd } from './xo'
@propTypes({
vm: propTypes.object.isRequired,
})
@addSubscriptions({
resourceSets: subscribeResourceSets,
})
@connectStore(() => {
const getCdDrive = createFinder(
createGetObjectsOfType('VBD').pick((_, { vm }) => vm.$VBDs),
@@ -35,6 +39,7 @@ import { ejectCd, insertCd } from './xo'
return {
cdDrive: getCdDrive,
isAdmin,
mountedIso: getMountedIso,
}
})
@@ -55,6 +60,17 @@ export default class IsoDevice extends Component {
}
)
_getResolvedResourceSet = createSelector(
createFinder(
() => this.props.resourceSets,
createSelector(
() => this.props.vm.resourceSet,
id => resourceSet => resourceSet.id === id
)
),
resolveResourceSet
)
_handleInsert = iso => {
const { vm } = this.props
@@ -70,13 +86,17 @@ export default class IsoDevice extends Component {
_showWarning = () => alert(_('cdDriveNotInstalled'), _('cdDriveInstallation'))
render () {
const { cdDrive, mountedIso } = this.props
const { cdDrive, isAdmin, mountedIso } = this.props
const resourceSet = this._getResolvedResourceSet()
const useResourceSet = !(isAdmin || resourceSet === undefined)
const SelectVdi_ = useResourceSet ? SelectResourceSetsVdi : SelectVdi
return (
<div className='input-group'>
<SelectVdi
srPredicate={this._getPredicate()}
<SelectVdi_
onChange={this._handleInsert}
resourceSet={useResourceSet ? resourceSet : undefined}
srPredicate={this._getPredicate()}
value={mountedIso}
/>
<span className='input-group-btn'>

View File

@@ -235,7 +235,8 @@ export const confirm = ({ body, icon = 'alarm', title, strongConfirm }) =>
resolve={resolve}
strongConfirm={strongConfirm}
title={title}
/>
/>,
reject
)
})
: chooseAction({

View File

@@ -3,9 +3,14 @@ import React from 'react'
import Link from './link'
export const NavLink = ({ children, to }) => (
export const NavLink = ({ children, exact, to }) => (
<li className='nav-item' role='tab'>
<Link className='nav-link' activeClassName='active' to={to}>
<Link
activeClassName='active'
className='nav-link'
onlyActiveOnIndex={exact}
to={to}
>
{children}
</Link>
</li>

View File

@@ -420,6 +420,27 @@ export const SelectVm = makeStoreSelect(
// ===================================================================
export const SelectVmSnapshot = makeStoreSelect(
() => {
const getSnapshotsByVms = createGetObjectsOfType('VM-snapshot')
.filter(getPredicate)
.sort()
.groupBy('$snapshot_of')
const getVms = createGetObjectsOfType('VM')
.pick(createSelector(getSnapshotsByVms, keys))
.sort()
return {
xoObjects: getSnapshotsByVms,
xoContainers: getVms,
}
},
{ placeholder: _('selectVmSnapshots') }
)
// ===================================================================
export const SelectHostVm = makeStoreSelect(
() => {
const getHosts = createGetObjectsOfType('host')

View File

@@ -1,53 +0,0 @@
import * as CM from 'complex-matcher'
import { flatten, identity, map } from 'lodash'
import { EMPTY_OBJECT } from './utils'
export const destructPattern = (pattern, valueTransform = identity) =>
pattern && {
not: !!pattern.__not,
values: valueTransform((pattern.__not || pattern).__or),
}
export const constructPattern = (
{ not, values } = EMPTY_OBJECT,
valueTransform = identity
) => {
if (values == null || !values.length) {
return
}
const pattern = { __or: valueTransform(values) }
return not ? { __not: pattern } : pattern
}
const parsePattern = pattern => {
const patternValues = flatten(
pattern.__not !== undefined ? pattern.__not.__or : pattern.__or
)
const queryString = new CM.Or(
map(patternValues, array => new CM.String(array))
)
return pattern.__not !== undefined ? CM.Not(queryString) : queryString
}
export const constructQueryString = pattern => {
const powerState = pattern.power_state
const pool = pattern.$pool
const tags = pattern.tags
const filter = []
if (powerState !== undefined) {
filter.push(new CM.Property('power_state', new CM.String(powerState)))
}
if (pool !== undefined) {
filter.push(new CM.Property('$pool', parsePattern(pool)))
}
if (tags !== undefined) {
filter.push(new CM.Property('tags', parsePattern(tags)))
}
return filter.length !== 0 ? new CM.And(filter).toString() : ''
}

View File

@@ -0,0 +1,112 @@
import * as CM from 'complex-matcher'
import { get, identity, isEmpty } from 'lodash'
import { EMPTY_OBJECT } from './../utils'
export const destructPattern = (pattern, valueTransform = identity) =>
pattern && {
not: !!pattern.__not,
values: valueTransform((pattern.__not || pattern).__or),
}
export const constructPattern = (
{ not, values } = EMPTY_OBJECT,
valueTransform = identity
) => {
if (values == null || !values.length) {
return
}
const pattern = { __or: valueTransform(values) }
return not ? { __not: pattern } : pattern
}
// ===================================================================
export const destructSmartPattern = (pattern, valueTransform = identity) =>
pattern && {
values: valueTransform(
pattern.__and !== undefined ? pattern.__and[0].__or : pattern.__or
),
notValues: valueTransform(
pattern.__and !== undefined
? pattern.__and[1].__not.__or
: get(pattern, '__not.__or')
),
}
export const constructSmartPattern = (
{ values, notValues } = EMPTY_OBJECT,
valueTransform = identity
) => {
const valuesExist = !isEmpty(values)
const notValuesExist = !isEmpty(notValues)
if (!valuesExist && !notValuesExist) {
return
}
const valuesPattern = valuesExist && { __or: valueTransform(values) }
const notValuesPattern = notValuesExist && {
__not: { __or: valueTransform(notValues) },
}
return valuesPattern && notValuesPattern
? { __and: [valuesPattern, notValuesPattern] }
: valuesPattern || notValuesPattern
}
// ===================================================================
const valueToComplexMatcher = pattern => {
if (typeof pattern === 'string') {
return new CM.String(pattern)
}
if (Array.isArray(pattern)) {
return new CM.And(pattern.map(valueToComplexMatcher))
}
if (pattern !== null && typeof pattern === 'object') {
const keys = Object.keys(pattern)
const { length } = keys
if (length === 1) {
const [key] = keys
if (key === '__and') {
return new CM.And(pattern.__and.map(valueToComplexMatcher))
}
if (key === '__or') {
return new CM.Or(pattern.__or.map(valueToComplexMatcher))
}
if (key === '__not') {
return new CM.Not(valueToComplexMatcher(pattern.__not))
}
}
const children = []
Object.keys(pattern).forEach(property => {
const subpattern = pattern[property]
if (subpattern !== undefined) {
children.push(
new CM.Property(property, valueToComplexMatcher(subpattern))
)
}
})
return children.length === 0 ? new CM.Null() : new CM.And(children)
}
throw new Error('could not transform this pattern')
}
export const constructQueryString = pattern => {
try {
return valueToComplexMatcher(pattern).toString()
} catch (error) {
console.warn('constructQueryString', pattern, error)
return ''
}
}
// ===================================================================
export default from './preview'

View File

@@ -0,0 +1,87 @@
import _ from 'intl'
import PropTypes from 'prop-types'
import React from 'react'
import { createPredicate } from 'value-matcher'
import { createSelector } from 'reselect'
import { filter, map, pickBy } from 'lodash'
import Component from './../base-component'
import Icon from './../icon'
import Link from './../link'
import renderXoItem from './../render-xo-item'
import Tooltip from './../tooltip'
import { Card, CardBlock, CardHeader } from './../card'
import { constructQueryString } from './index'
const SAMPLE_SIZE_OF_MATCHING_VMS = 3
export default class SmartBackupPreview extends Component {
static propTypes = {
pattern: PropTypes.object.isRequired,
vms: PropTypes.object.isRequired,
}
_getMatchingVms = createSelector(
() => this.props.vms,
createSelector(
() => this.props.pattern,
pattern => createPredicate(pickBy(pattern, val => val != null))
),
(vms, predicate) => filter(vms, predicate)
)
_getSampleOfMatchingVms = createSelector(this._getMatchingVms, vms =>
vms.slice(0, SAMPLE_SIZE_OF_MATCHING_VMS)
)
_getQueryString = createSelector(
() => this.props.pattern,
constructQueryString
)
render () {
const nMatchingVms = this._getMatchingVms().length
const sampleOfMatchingVms = this._getSampleOfMatchingVms()
const queryString = this._getQueryString()
return (
<Card>
<CardHeader>{_('sampleOfMatchingVms')}</CardHeader>
<CardBlock>
{nMatchingVms === 0 ? (
<p className='text-xs-center'>{_('noMatchingVms')}</p>
) : (
<div>
<ul className='list-group'>
{map(sampleOfMatchingVms, vm => (
<li className='list-group-item' key={vm.id}>
{renderXoItem(vm)}
</li>
))}
</ul>
<br />
<Tooltip content={_('redirectToMatchingVms')}>
<Link
className='pull-right'
target='_blank'
to={{
pathname: '/home',
query: {
t: 'VM',
s: queryString,
},
}}
>
{_('allMatchingVms', {
icon: <Icon icon='preview' />,
nMatchingVms,
})}
</Link>
</Tooltip>
</div>
)}
</CardBlock>
</Card>
)
}
}

View File

@@ -18,6 +18,7 @@ import {
isEmpty,
isFunction,
map,
startsWith,
} from 'lodash'
import ActionRowButton from '../action-row-button'
@@ -33,6 +34,7 @@ import Tooltip from '../tooltip'
import { BlockLink } from '../link'
import { Container, Col } from '../grid'
import {
createCollectionWrapper,
createCounter,
createFilter,
createPager,
@@ -207,15 +209,19 @@ class IndividualAction extends Component {
isFunction(disabled) ? disabled(item, userData) : disabled
)
_executeAction = () => {
const p = this.props
return p.handler(p.item, p.userData)
}
render () {
const { icon, label, level, handler, item } = this.props
const { icon, label, level } = this.props
return (
<ActionRowButton
btnStyle={level}
disabled={this._getIsDisabled()}
handler={handler}
handlerParam={item}
handler={this._executeAction}
icon={icon}
tooltip={label}
/>
@@ -232,15 +238,19 @@ class GroupedAction extends Component {
isFunction(disabled) ? disabled(selectedItems, userData) : disabled
)
_executeAction = () => {
const p = this.props
return p.handler(p.selectedItems, p.userData)
}
render () {
const { icon, label, level, handler, selectedItems } = this.props
const { icon, label, level } = this.props
return (
<ActionRowButton
btnStyle={level}
disabled={this._getIsDisabled()}
handler={handler}
handlerParam={selectedItems}
handler={this._executeAction}
icon={icon}
tooltip={label}
/>
@@ -276,7 +286,12 @@ const URL_STATE_RE = /^(?:(\d+)(?:_(\d+)(_desc)?)?-)?(.*)$/
disabled: propTypes.oneOfType([propTypes.bool, propTypes.func]),
handler: propTypes.func.isRequired,
icon: propTypes.string.isRequired,
individualDisabled: propTypes.oneOfType([
propTypes.bool,
propTypes.func,
]),
individualHandler: propTypes.func,
individualLabel: propTypes.node,
label: propTypes.node.isRequired,
level: propTypes.oneOf(['primary', 'warning', 'danger']),
})
@@ -287,10 +302,13 @@ const URL_STATE_RE = /^(?:(\d+)(?:_(\d+)(_desc)?)?-)?(.*)$/
paginationContainer: propTypes.func,
rowAction: propTypes.func,
rowLink: propTypes.oneOfType([propTypes.func, propTypes.string]),
rowTransform: propTypes.func,
// DOM node selector like body or .my-class
// The shortcuts will be enabled when the node is focused
shortcutsTarget: propTypes.string,
stateUrlParam: propTypes.string,
// @deprecated, use `data-${key}` instead
userData: propTypes.any,
},
{
@@ -305,6 +323,20 @@ export default class SortedTable extends Component {
constructor (props, context) {
super(props, context)
this._getUserData =
'userData' in props
? () => this.props.userData
: createCollectionWrapper(() => {
const { props } = this
const userData = {}
Object.keys(props).forEach(key => {
if (startsWith(key, 'data-')) {
userData[key.slice(5)] = props[key]
}
})
return isEmpty(userData) ? undefined : userData
})
let selectedColumn = props.defaultColumn
if (selectedColumn == null) {
selectedColumn = findIndex(props.columns, 'default')
@@ -350,17 +382,33 @@ export default class SortedTable extends Component {
this._getSelectedColumn = () =>
this.props.columns[this.state.selectedColumn]
this._getTotalNumberOfItems = createCounter(() => this.props.collection)
let getAllItems = () => this.props.collection
if ('rowTransform' in props) {
getAllItems = createSelector(
getAllItems,
this._getUserData,
() => this.props.rowTransform,
(items, userData, rowTransform) =>
map(items, item => rowTransform(item, userData))
)
}
this._getTotalNumberOfItems = createCounter(getAllItems)
const createMatcher = str => CM.parse(str).createPredicate()
this._getItems = createSort(
createFilter(
() => this.props.collection,
createSelector(() => this.state.filter, createMatcher)
getAllItems,
createSelector(
() => this.state.filter,
filter => {
try {
return CM.parse(filter).createPredicate()
} catch (_) {}
}
)
),
createSelector(
() => this._getSelectedColumn().sortCriteria,
() => this.props.userData,
this._getUserData,
(sortCriteria, userData) =>
typeof sortCriteria === 'function'
? object => sortCriteria(object, userData)
@@ -396,7 +444,7 @@ export default class SortedTable extends Component {
() => this.state.highlighted,
() => this.props.rowLink,
() => this.props.rowAction,
() => this.props.userData,
this._getUserData,
(
visibleItems,
hasGroupedActions,
@@ -643,7 +691,8 @@ export default class SortedTable extends Component {
_renderItem = (item, i) => {
const { props, state } = this
const { actions, individualActions, rowAction, rowLink, userData } = props
const { actions, individualActions, rowAction, rowLink } = props
const userData = this._getUserData()
const hasGroupedActions = this._hasGroupedActions()
const hasIndividualActions =
@@ -689,9 +738,11 @@ export default class SortedTable extends Component {
{map(actions, (props, key) => (
<IndividualAction
{...props}
disabled={props.individualDisabled || props.disabled}
handler={props.individualHandler || props.handler}
item={props.individualHandler !== undefined ? item : [item]}
key={key}
label={props.individualLabel || props.label}
userData={userData}
/>
))}
@@ -736,7 +787,6 @@ export default class SortedTable extends Component {
itemsPerPage,
paginationContainer,
shortcutsTarget,
userData,
} = props
const { all } = state
const groupedActions = this._getGroupedActions()
@@ -773,6 +823,8 @@ export default class SortedTable extends Component {
/>
)
const userData = this._getUserData()
return (
<div>
{shortcutsTarget !== undefined && (

View File

@@ -5,9 +5,11 @@ import XoHostInput from './xo-host-input'
import XoPoolInput from './xo-pool-input'
import XoRemoteInput from './xo-remote-input'
import XoRoleInput from './xo-role-input'
import xoSnapshotInput from './xo-snapshot-input'
import XoSrInput from './xo-sr-input'
import XoSubjectInput from './xo-subject-input'
import XoTagInput from './xo-tag-input'
import XoVdiInput from './xo-vdi-input'
import XoVmInput from './xo-vm-input'
import { getType, getXoType } from '../json-schema-input/helpers'
@@ -18,9 +20,11 @@ const XO_TYPE_TO_COMPONENT = {
pool: XoPoolInput,
remote: XoRemoteInput,
role: XoRoleInput,
snapshot: xoSnapshotInput,
sr: XoSrInput,
subject: XoSubjectInput,
tag: XoTagInput,
vdi: XoVdiInput,
vm: XoVmInput,
xoobject: XoHighLevelObjectInput,
}

View File

@@ -0,0 +1,27 @@
import React from 'react'
import XoAbstractInput from './xo-abstract-input'
import { PrimitiveInputWrapper } from '../json-schema-input/helpers'
import { SelectVmSnapshot } from '../select-objects'
// ===================================================================
export default class snapshotInput extends XoAbstractInput {
render () {
const { props } = this
return (
<PrimitiveInputWrapper {...props}>
<SelectVmSnapshot
disabled={props.disabled}
hasSelectAll
multi={props.multi}
onChange={this._onChange}
ref='input'
required={props.required}
value={props.value}
/>
</PrimitiveInputWrapper>
)
}
}

View File

@@ -0,0 +1,27 @@
import React from 'react'
import { SelectVdi } from 'select-objects'
import XoAbstractInput from './xo-abstract-input'
import { PrimitiveInputWrapper } from '../json-schema-input/helpers'
// ===================================================================
export default class VdiInput extends XoAbstractInput {
render () {
const { props } = this
return (
<PrimitiveInputWrapper {...props}>
<SelectVdi
disabled={props.disabled}
hasSelectAll
multi={props.multi}
onChange={this._onChange}
ref='input'
required={props.required}
value={props.value}
/>
</PrimitiveInputWrapper>
)
}
}

View File

@@ -25,7 +25,7 @@ import _ from '../intl'
import fetch, { post } from '../fetch'
import invoke from '../invoke'
import logError from '../log-error'
import renderXoItem from '../render-xo-item'
import renderXoItem, { renderXoItemFromId } from '../render-xo-item'
import store from 'store'
import { alert, chooseAction, confirm } from '../modal'
import { error, info, success } from '../notification'
@@ -300,10 +300,6 @@ export const subscribeResourceSets = createSubscription(() =>
_call('resourceSet.getAll')
)
export const subscribeScheduleTable = createSubscription(() =>
_call('scheduler.getScheduleTable')
)
export const subscribeSchedules = createSubscription(() =>
_call('schedule.getAll')
)
@@ -660,7 +656,12 @@ export const getHostMissingPatches = host =>
)
export const emergencyShutdownHost = host =>
_call('host.emergencyShutdownHost', { host: resolveId(host) })
confirm({
title: _('emergencyShutdownHostModalTitle'),
body: _('emergencyShutdownHostModalMessage', {
host: <strong>{host.name_label}</strong>,
}),
}).then(() => _call('host.emergencyShutdownHost', { host: resolveId(host) }))
export const emergencyShutdownHosts = hosts => {
const nHosts = size(hosts)
@@ -1118,17 +1119,23 @@ export const importDeltaBackup = ({ remote, file, sr, mapVdisSrs }) =>
)
import RevertSnapshotModalBody from './revert-snapshot-modal' // eslint-disable-line import/first
export const revertSnapshot = vm =>
export const revertSnapshot = snapshot =>
confirm({
title: _('revertVmModalTitle'),
body: <RevertSnapshotModalBody />,
}).then(
snapshotBefore => _call('vm.revert', { id: resolveId(vm), snapshotBefore }),
snapshotBefore =>
_call('vm.revert', { snapshot: resolveId(snapshot), snapshotBefore }),
noop
)
export const editVm = (vm, props) =>
_call('vm.set', { ...props, id: resolveId(vm) })
_call('vm.set', { ...props, id: resolveId(vm) }).catch(err => {
error(
_('setVmFailed', { vm: renderXoItemFromId(resolveId(vm)) }),
err.message
)
})
export const fetchVmStats = (vm, granularity) =>
_call('vm.stats', { id: resolveId(vm), granularity })
@@ -1196,11 +1203,14 @@ export const createVgpu = (vm, { gpuGroup, vgpuType }) =>
export const deleteVgpu = vgpu => _call('vm.deleteVgpu', resolveIds({ vgpu }))
export const shareVm = (vm, resourceSet) =>
export const shareVm = async (vm, resourceSet) =>
confirm({
title: _('shareVmInResourceSetModalTitle'),
body: _('shareVmInResourceSetModalMessage', {
self: renderXoItem(resourceSet),
self: renderXoItem({
...(await getResourceSet(resourceSet)),
type: 'resourceSet',
}),
}),
}).then(() => editVm(vm, { share: true }), noop)
@@ -1264,8 +1274,32 @@ export const connectVbd = vbd => _call('vbd.connect', { id: resolveId(vbd) })
export const disconnectVbd = vbd =>
_call('vbd.disconnect', { id: resolveId(vbd) })
export const disconnectVbds = vbds =>
confirm({
title: _('disconnectVbdsModalTitle', { nVbds: vbds.length }),
body: _('disconnectVbdsModalMessage', { nVbds: vbds.length }),
}).then(
() =>
Promise.all(
map(vbds, vbd => _call('vbd.disconnect', { id: resolveId(vbd) }))
),
noop
)
export const deleteVbd = vbd => _call('vbd.delete', { id: resolveId(vbd) })
export const deleteVbds = vbds =>
confirm({
title: _('deleteVbdsModalTitle', { nVbds: vbds.length }),
body: _('deleteVbdsModalMessage', { nVbds: vbds.length }),
}).then(
() =>
Promise.all(
map(vbds, vbd => _call('vbd.delete', { id: resolveId(vbd) }))
),
noop
)
export const editVbd = (vbd, props) =>
_call('vbd.set', { ...props, id: resolveId(vbd) })
@@ -1558,7 +1592,7 @@ export const deleteBackupSchedule = async schedule => {
body: _('deleteBackupScheduleQuestion'),
})
await _call('schedule.delete', { id: schedule.id })
await _call('job.delete', { id: schedule.job })
await _call('job.delete', { id: schedule.jobId })
subscribeSchedules.forceRefresh()
subscribeJobs.forceRefresh()
@@ -1581,26 +1615,74 @@ export const deleteSchedules = schedules =>
)
)
export const disableSchedule = id =>
_call('scheduler.disable', { id })::tap(subscribeScheduleTable.forceRefresh)
export const disableSchedule = id => editSchedule({ id, enabled: false })
export const editSchedule = ({
id,
job: jobId,
cron,
enabled,
name,
timezone,
}) =>
export const editSchedule = ({ id, jobId, cron, enabled, name, timezone }) =>
_call('schedule.set', { id, jobId, cron, enabled, name, timezone })::tap(
subscribeSchedules.forceRefresh
)
export const enableSchedule = id =>
_call('scheduler.enable', { id })::tap(subscribeScheduleTable.forceRefresh)
export const enableSchedule = id => editSchedule({ id, enabled: true })
export const getSchedule = id => _call('schedule.get', { id })
// Backup NG ---------------------------------------------------------
export const subscribeBackupNgJobs = createSubscription(() =>
_call('backupNg.getAllJobs')
)
export const createBackupNgJob = props =>
_call('backupNg.createJob', props)::tap(subscribeBackupNgJobs.forceRefresh)
export const deleteBackupNgJobs = async ids => {
const { length } = ids
if (length === 0) {
return
}
const vars = { nJobs: length }
try {
await confirm({
title: _('confirmDeleteBackupJobsTitle', vars),
body: <p>{_('confirmDeleteBackupJobsBody', vars)}</p>,
})
} catch (_) {
return
}
return Promise.all(
ids.map(id => _call('backupNg.deleteJob', { id: resolveId(id) }))
)::tap(subscribeBackupNgJobs.forceRefresh)
}
export const editBackupNgJob = props =>
_call('backupNg.editJob', props)::tap(subscribeBackupNgJobs.forceRefresh)
export const getBackupNgJob = id => _call('backupNg.getJob', { id })
export const runBackupNgJob = params => _call('backupNg.runJob', params)
export const listVmBackups = remotes =>
_call('backupNg.listVmBackups', { remotes: resolveIds(remotes) })
export const restoreBackup = (backup, sr, startOnRestore) => {
const promise = _call('backupNg.importVmBackup', {
id: resolveId(backup),
sr: resolveId(sr),
})
if (startOnRestore) {
return promise.then(startVm)
}
return promise
}
export const deleteBackup = backup =>
_call('backupNg.deleteVmBackup', { id: resolveId(backup) })
export const deleteBackups = backups => Promise.all(map(backups, deleteBackup))
// Plugins -----------------------------------------------------------
export const loadPlugin = async id =>
@@ -1677,6 +1759,9 @@ export const deleteResourceSet = async id => {
export const recomputeResourceSetsLimits = () =>
_call('resourceSet.recomputeAllLimits')
export const getResourceSet = id =>
_call('resourceSet.get', { id: resolveId(id) })
// Remote ------------------------------------------------------------
export const getRemote = remote =>
@@ -1861,10 +1946,28 @@ export const createSrLvm = (host, nameLabel, nameDescription, device) =>
// Job logs ----------------------------------------------------------
export const deleteJobsLog = id =>
_call('log.delete', { namespace: 'jobs', id })::tap(
subscribeJobsLogs.forceRefresh
)
export const deleteJobsLogs = async ids => {
const { length } = ids
if (length === 0) {
return
}
if (length !== 1) {
const vars = { nLogs: length }
try {
await confirm({
title: _('logDeleteMultiple', vars),
body: <p>{_('logDeleteMultipleMessage', vars)}</p>,
})
} catch (_) {
return
}
}
return _call('log.delete', {
namespace: 'jobs',
id: ids.map(resolveId),
})::tap(subscribeJobsLogs.forceRefresh)
}
// Logs

View File

@@ -259,6 +259,10 @@
@extend .fa;
@extend .fa-download;
}
&-restore {
@extend .fa;
@extend .fa-upload;
}
&-rolling-snapshot {
@extend .fa;
@extend .fa-camera;

View File

@@ -0,0 +1,26 @@
import addSubscriptions from 'add-subscriptions'
import React from 'react'
import { injectState, provideState } from '@julien-f/freactal'
import { subscribeBackupNgJobs, subscribeSchedules } from 'xo'
import { find, groupBy } from 'lodash'
import New from './new'
export default [
addSubscriptions({
jobs: subscribeBackupNgJobs,
schedulesByJob: cb =>
subscribeSchedules(schedules => {
cb(groupBy(schedules, 'jobId'))
}),
}),
provideState({
computed: {
job: (_, { jobs, routeParams: { id } }) => find(jobs, { id }),
schedules: (_, { schedulesByJob, routeParams: { id } }) =>
schedulesByJob && schedulesByJob[id],
},
}),
injectState,
({ state: { job, schedules } }) => <New job={job} schedules={schedules} />,
].reduceRight((value, decorator) => decorator(value))

Some files were not shown because too many files have changed in this diff Show More