Compare commits

..

42 Commits

Author SHA1 Message Date
Julien Fontanet
74f4a83aea feat(xo-server-usage-report): 0.4.0 2018-03-30 17:55:57 +02:00
Julien Fontanet
e67038a04d feat(xo-server-auth-saml): 0.5.1 2018-03-30 17:54:30 +02:00
badrAZ
1fa73b57a2 feat(xo-web/dashboard/overview): add filters for pools and hosts (#2769)
Fixes #1631
2018-03-30 17:44:36 +02:00
badrAZ
73c746fdd3 fix(xo-web/backup-ng/new): xoa plan verification (#2813) 2018-03-30 17:22:21 +02:00
Julien Fontanet
ab1413b741 feat(xen-api): more info to task destroyed before completion error 2018-03-30 15:28:53 +02:00
Julien Fontanet
c087eaf229 chore(xo-server): increase blocked threshold from 10 to 50 2018-03-30 15:09:47 +02:00
Julien Fontanet
8b9f9ffa3e feat(xo-server/snapshotVm): increase concurrency to 2 2018-03-30 12:39:51 +02:00
Julien Fontanet
a83fa90d87 chore(xo-server/snapshotVm): avoid using waitObjectState 2018-03-30 12:39:50 +02:00
Julien Fontanet
505f06c1d8 chore(xo-server/backups-ng): dont fork streams if 1 target 2018-03-30 12:39:50 +02:00
Julien Fontanet
2ac1093543 chore(xo-server/backups-ng): rm unneeded defer decorators 2018-03-30 12:39:50 +02:00
Pierre Donias
b3d8ce2041 feat(xo-web/new-vm): hide IP field if IP pools are not configured (#2811)
Fixes #2739
2018-03-29 17:19:38 +02:00
Rajaa.BARHTAOUI
b47789bf82 feat(xo-web): confirm modal before manual backup run (#2717)
Fixes #2355
2018-03-29 15:06:50 +02:00
Julien Fontanet
0a5e1a9bce fix(xo-server/backups-ng): discriminate replicated against the VM (#2809)
Fixes #2807
2018-03-29 13:47:16 +02:00
Julien Fontanet
f333679319 fix(xo-server/backups-ng): dont snapshot on unhealthy vdi chain 2018-03-29 10:51:22 +02:00
Julien Fontanet
20d3faa306 fix(xo-server/backups-ng): delete unused snapshot on delta failure 2018-03-29 10:49:48 +02:00
Julien Fontanet
cf11ed0830 fix(xo-server/backups-ng): dont delete snapshot on failure 2018-03-29 09:47:32 +02:00
Julien Fontanet
acd390ac42 todo(xo-server/backups-ng): do not delete rolling snapshot in case of failure 2018-03-28 17:52:07 +02:00
badrAZ
8a2fbe3ab5 feat(xo-web/backup): ability to migrate legacy to NG (#2801)
Fixes #2711
2018-03-28 14:33:43 +02:00
Julien Fontanet
7a6e7ec153 fix(xo-web/backup-ng): display ids like in logs 2018-03-28 11:48:42 +02:00
Julien Fontanet
7d90346c91 feat(xen-api): 0.16.7 2018-03-28 11:46:45 +02:00
Julien Fontanet
abb5193ced chore(xen-api/getObject*): clearer error messages 2018-03-28 11:46:45 +02:00
Julien Fontanet
52e845834e chore(xen-api): more explicit tests 2018-03-28 11:46:45 +02:00
Julien Fontanet
c1c17fad44 fix(xen-api/getObject): match obj.$id against refs 2018-03-28 11:46:45 +02:00
Julien Fontanet
d7b4025893 todo(xo-server/backups-ng): detect and gc uncomplete replications 2018-03-28 11:46:45 +02:00
Rajaa.BARHTAOUI
934356571c feat(xo-web/home): fix toolbar in header (#2798)
Fixes #1581
2018-03-28 11:29:27 +02:00
Julien Fontanet
738d98eb42 chore(xo-server): update http-server-plus to 0.10
Fixes #2803
2018-03-28 00:11:17 +02:00
Nicolas Raynaud
7e689076d8 chore(xo-server/vhd-merge): various updates (#2767)
Fixes #2746 

- implement parent locators
- tests
- remove `@nraynaud/struct-fu`
2018-03-27 18:39:36 +02:00
Rajaa.BARHTAOUI
0b9d031965 feat(xo-web/jobs/overview): use SortedTable (#2677)
See #2416
2018-03-27 16:56:56 +02:00
badrAZ
53f470518b feat(xo-server-usage-report): various improvements (#2788)
Fixes #2770
2018-03-27 16:07:29 +02:00
Rajaa.BARHTAOUI
664d648435 feat(xo-web/vm/disks): use SortedTable (#2429)
See #2416
2018-03-27 11:13:05 +02:00
Julien Fontanet
0d718bd632 feat(xo-server/backup NG): merge VHD in a worker (#2799) 2018-03-27 10:13:05 +02:00
badrAZ
ed5e0c3509 feat(xo-web/xoa/update): warn before upgrade if jobs running (#2795)
Fixes #2250
2018-03-26 18:01:29 +02:00
Julien Fontanet
20d5047b55 chore(xo-server/ag2s): use async-iterator-to-stream instead 2018-03-26 16:32:46 +02:00
Pierre Donias
4cfe3ec06e fix(xo-server/new-vm): race condition on VIFs (#2796)
Fixes #2794
2018-03-26 11:18:36 +02:00
Julien Fontanet
87664ff16a chore(xo-server-auth-saml): config description 2018-03-26 11:10:16 +02:00
Pierre Donias
adf278fc83 fix(xo-web/home): pagination (#2791)
Fixes #2730
2018-03-21 16:54:29 +01:00
Pierre Donias
a4d0fa62d2 chore(xo-web/restore): minor improvements & fixes (#2789)
Fixes #2692
2018-03-21 15:51:12 +01:00
Pierre Donias
ff59d091f1 fix(xo-server-cloud): check token before getResourceDownloadToken call (#2783) 2018-03-20 15:36:03 +01:00
Pierre Donias
4cac99d79a feat(xo-web/home): put sort criteria in URL (#2780)
Fixes #2585
2018-03-20 10:41:03 +01:00
Rajaa.BARHTAOUI
d1a046279d feat(xo-web/modal): autofocus strong-confirm text input (#2749) 2018-03-19 15:34:52 +01:00
Julien Fontanet
cb9fa5c42b chore: update dependencies 2018-03-19 14:55:19 +01:00
Julien Fontanet
05f9e6895b feat(xo-web): 5.17.3 2018-03-16 17:49:31 +01:00
53 changed files with 2968 additions and 2074 deletions

View File

@@ -7,6 +7,11 @@ node_js:
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
sudo: false
addons:
apt:
packages:
- qemu-utils
- blktap-utils
before_install:
- curl -o- -L https://yarnpkg.com/install.sh | bash
@@ -14,3 +19,7 @@ before_install:
cache:
yarn: true
script:
- yarn run test
- yarn run test-integration

View File

@@ -41,10 +41,10 @@
"moment-timezone": "^0.5.14"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/preset-flow": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},

View File

@@ -13,7 +13,7 @@
"eslint-plugin-react": "^7.6.1",
"eslint-plugin-standard": "^3.0.1",
"exec-promise": "^0.7.0",
"flow-bin": "^0.67.1",
"flow-bin": "^0.68.0",
"globby": "^8.0.0",
"husky": "^0.14.3",
"jest": "^22.0.4",
@@ -52,12 +52,13 @@
"build": "scripts/run-script --parallel build",
"clean": "scripts/run-script --parallel clean",
"dev": "scripts/run-script --parallel dev",
"dev-test": "jest --bail --watch",
"dev-test": "jest --bail --watch \"^(?!.*\\.integ\\.spec\\.js$)\"",
"posttest": "scripts/run-script test",
"precommit": "scripts/lint-staged",
"prepare": "scripts/run-script prepare",
"pretest": "eslint --ignore-path .gitignore .",
"test": "jest"
"test": "jest \"^(?!.*\\.integ\\.spec\\.js$)\"",
"test-integration": "jest \".integ\\.spec\\.js$\""
},
"workspaces": [
"@xen-orchestra/*",

View File

@@ -30,9 +30,9 @@
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.1",
"rimraf": "^2.6.2"

View File

@@ -28,10 +28,10 @@
},
"dependencies": {},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/preset-flow": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},

View File

@@ -26,7 +26,7 @@
"node": ">=4"
},
"dependencies": {
"@nraynaud/struct-fu": "^1.0.1",
"struct-fu": "^1.2.0",
"@nraynaud/xo-fs": "^0.0.5",
"babel-runtime": "^6.22.0",
"exec-promise": "^0.7.0"

View File

@@ -1,5 +1,5 @@
import assert from 'assert'
import fu from '@nraynaud/struct-fu'
import fu from 'struct-fu'
import { dirname } from 'path'
// ===================================================================

View File

@@ -1,6 +1,6 @@
{
"name": "xen-api",
"version": "0.16.6",
"version": "0.16.7",
"license": "ISC",
"description": "Connector to the Xen API",
"keywords": [

View File

@@ -441,16 +441,18 @@ export class Xapi extends EventEmitter {
// this lib), UUID (unique identifier that some objects have) or
// opaque reference (internal to XAPI).
getObject (idOrUuidOrRef, defaultValue) {
const object =
typeof idOrUuidOrRef === 'string'
? this._objects.all[idOrUuidOrRef] || this._objectsByRefs[idOrUuidOrRef]
: this._objects.all[idOrUuidOrRef.$id]
if (typeof idOrUuidOrRef === 'object') {
idOrUuidOrRef = idOrUuidOrRef.$id
}
if (object) return object
const object =
this._objects.all[idOrUuidOrRef] || this._objectsByRefs[idOrUuidOrRef]
if (object !== undefined) return object
if (arguments.length > 1) return defaultValue
throw new Error('there is not object can be matched to ' + idOrUuidOrRef)
throw new Error('no object with UUID or opaque ref: ' + idOrUuidOrRef)
}
// Returns the object for a given opaque reference (internal to
@@ -458,11 +460,11 @@ export class Xapi extends EventEmitter {
getObjectByRef (ref, defaultValue) {
const object = this._objectsByRefs[ref]
if (object) return object
if (object !== undefined) return object
if (arguments.length > 1) return defaultValue
throw new Error('there is no object with the ref ' + ref)
throw new Error('no object with opaque ref: ' + ref)
}
// Returns the object for a given UUID (unique identifier that some
@@ -475,7 +477,7 @@ export class Xapi extends EventEmitter {
if (arguments.length > 1) return defaultValue
throw new Error('there is no object with the UUID ' + uuid)
throw new Error('no object with UUID: ' + uuid)
}
getRecord (type, ref) {
@@ -813,7 +815,10 @@ export class Xapi extends EventEmitter {
const taskWatchers = this._taskWatchers
const taskWatcher = taskWatchers[ref]
if (taskWatcher !== undefined) {
taskWatcher.reject(new Error('task has been destroyed before completion'))
const error = new Error('task has been destroyed before completion')
error.task = object
error.taskRef = ref
taskWatcher.reject(error)
delete taskWatchers[ref]
}
}

View File

@@ -28,7 +28,7 @@
"node": ">=6"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.40",
"@babel/polyfill": "7.0.0-beta.42",
"bluebird": "^3.5.1",
"chalk": "^2.2.0",
"event-to-promise": "^0.8.0",
@@ -49,10 +49,10 @@
"xo-lib": "^0.9.0"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/preset-flow": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"

View File

@@ -40,7 +40,7 @@
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "rimraf dist/",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
"prepare": "yarn run build"
},
"babel": {
"plugins": [

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-auth-saml",
"version": "0.5.0",
"version": "0.5.1",
"license": "AGPL-3.0",
"description": "SAML authentication plugin for XO-Server",
"keywords": [

View File

@@ -3,22 +3,32 @@ import { Strategy } from 'passport-saml'
// ===================================================================
export const configurationSchema = {
description:
'Important: When registering your instance to your identity provider, you must configure its callback URL to `https://<xo.company.net>/signin/saml/callback`!',
type: 'object',
properties: {
cert: {
title: 'Certificate',
description: "Copy/paste the identity provider's certificate",
type: 'string',
},
entryPoint: {
title: 'Entry point',
description: 'Entry point of the identity provider',
type: 'string',
},
issuer: {
title: 'Issuer',
description: 'Issuer string to supply to the identity provider',
type: 'string',
},
usernameField: {
title: 'Username field',
description: 'Field to use as the XO username',
type: 'string',
},
},
required: ['cert', 'entryPoint', 'issuer'],
required: ['cert', 'entryPoint', 'issuer', 'usernameField'],
}
// ===================================================================

View File

@@ -128,10 +128,15 @@ class XoServerCloud {
throw new Error(`cannot get resource: ${namespace} not registered`)
}
const namespaceCatalog = await this._getNamespaceCatalog(namespace)
const { _token: token } = await this._getNamespaceCatalog(namespace)
// 2018-03-20 Extra check: getResourceDownloadToken seems to be called without a token in some cases
if (token === undefined) {
throw new Error(`${namespace} namespace token is undefined`)
}
const downloadToken = await this._updater.call('getResourceDownloadToken', {
token: namespaceCatalog._token,
token,
id,
version,
})

View File

@@ -26,9 +26,9 @@
"lodash": "^4.17.4"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "^7.0.0-beta.40",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-usage-report",
"version": "0.3.2",
"version": "0.4.0",
"license": "AGPL-3.0",
"description": "",
"keywords": [

View File

@@ -90,7 +90,7 @@
.top table{
margin: auto;
margin-top: 20px;
width: 400px;
min-width: 30em;
}
.top table caption {
@@ -121,6 +121,10 @@
border:1px solid #95a5a6;
text-align: center;
}
.allResources table {
min-width: 60em
}
</style>
</head>
<body>
@@ -151,86 +155,34 @@
</tr>
<tr>
<td>Number:</td>
<td>{{global.vms.number}}</td>
<td>
{{#if global.vmsEvolution.number}}
{{#compare global.vmsEvolution.number ">" 0}}+{{/compare}}
{{global.vmsEvolution.number}}
{{else}}
0
{{/if}}
</td>
<td>{{global.vms.number}} {{normaliseEvolution global.vmsEvolution.number}}</td>
</tr>
<tr>
<td>CPU:</td>
<td>{{global.vms.cpu}} %</td> <!-- One condition doesn't work -->
<td style='color:{{#compare global.vmsEvolution.cpu ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.cpu}}
{{#compare global.vmsEvolution.cpu ">" 0}}+{{/compare}}
{{global.vmsEvolution.cpu}}%
{{else}}
0
{{/if}}
</td>
<td>{{global.vms.cpu}} % {{normaliseEvolution global.vmsEvolution.cpu}}</td>
<tr>
<tr>
<td>RAM:</td>
<td>{{global.vms.ram}} GiB</td>
<td style='color:{{#compare global.vmsEvolution.ram ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.ram}}
{{#compare global.vmsEvolution.ram ">" 0}}+{{/compare}}
{{global.vmsEvolution.ram}}%
{{else}}
0
{{/if}}
</td>
<td>{{global.vms.ram}} GiB {{normaliseEvolution global.vmsEvolution.ram}}</td>
<tr>
<tr>
<td>Disk read:</td>
<td>{{global.vms.diskRead}} MiB</td>
<td style='color:{{#compare global.vmsEvolution.diskRead ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.diskRead}}
{{#compare global.vmsEvolution.diskRead ">" 0}}+{{/compare}}
{{global.vmsEvolution.diskRead}}%
{{else}}
0
{{/if}}
<td>{{global.vms.diskRead}} MiB {{normaliseEvolution global.vmsEvolution.diskRead}}
</td>
<tr>
<tr>
<td>Disk write:</td>
<td>{{global.vms.diskWrite}} MiB</td>
<td style='color:{{#compare global.vmsEvolution.diskWrite ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.diskWrite}}
{{#compare global.vmsEvolution.diskWrite ">" 0}}+{{/compare}}
{{global.vmsEvolution.diskWrite}}%
{{else}}
0
{{/if}}
<td>{{global.vms.diskWrite}} MiB {{normaliseEvolution global.vmsEvolution.diskWrite}}
</td>
<tr>
<tr>
<td>Net reception:</td>
<td>{{global.vms.netReception}} KiB</td>
<td style='color:{{#compare global.vmsEvolution.netReception ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.netReception}}
{{#compare global.vmsEvolution.netReception ">" 0}}+{{/compare}}
{{global.vmsEvolution.netReception}}%
{{else}}
0
{{/if}}
<td>Network RX:</td>
<td>{{global.vms.netReception}} KiB {{normaliseEvolution global.vmsEvolution.netReception}}
</td>
<tr>
<tr>
<td>Net transmission:</td>
<td>{{global.vms.netTransmission}} KiB</td>
<td style='color:{{#compare global.vmsEvolution.netTransmission ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.vmsEvolution.netTransmission}}
{{#compare global.vmsEvolution.netTransmission ">" 0}}+{{/compare}}
{{global.vmsEvolution.netTransmission}}%
{{else}}
0
{{/if}}
<td>Network TX:</td>
<td>{{global.vms.netTransmission}} KiB {{normaliseEvolution global.vmsEvolution.netTransmission}}
</td>
<tr>
</table>
@@ -288,7 +240,7 @@
</tr>
{{/each}}
<tr>
<td rowspan='{{math topVms.netReception.length "+" 1}}' class="tableHeader">Net reception</td>
<td rowspan='{{math topVms.netReception.length "+" 1}}' class="tableHeader">Network RX</td>
</tr>
{{#each topVms.netReception}}
<tr>
@@ -298,7 +250,7 @@
</tr>
{{/each}}
<tr>
<td rowspan='{{math topVms.netTransmission.length "+" 1}}' class="tableHeader">Net transmission</td>
<td rowspan='{{math topVms.netTransmission.length "+" 1}}' class="tableHeader">Network TX</td>
</tr>
{{#each topVms.netTransmission}}
<tr>
@@ -318,75 +270,33 @@
</tr>
<tr>
<td>Number:</td>
<td>{{global.hosts.number}}</td>
<td>
{{#if global.hostsEvolution.number}}
{{#compare global.hostsEvolution.number ">" 0}}+{{/compare}}
{{global.hostsEvolution.number}}
{{else}}
0
{{/if}}
<td>{{global.hosts.number}} {{normaliseEvolution global.hostsEvolution.number}}
</td>
</tr>
<tr>
<td>CPU:</td>
<td>{{global.hosts.cpu}} %</td>
<td style='color:{{#compare global.hostsEvolution.cpu ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.hostsEvolution.cpu}}
{{#compare global.hostsEvolution.cpu ">" 0}}+{{/compare}}
{{global.hostsEvolution.cpu}}%
{{else}}
0
{{/if}}
<td>{{global.hosts.cpu}} % {{normaliseEvolution global.hostsEvolution.cpu}}
</td>
<tr>
<tr>
<td>RAM:</td>
<td>{{global.hosts.ram}} GiB</td>
<td style='color:{{#compare global.hostsEvolution.ram ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.hostsEvolution.ram}}
{{#compare global.hostsEvolution.ram ">" 0}}+{{/compare}}
{{global.hostsEvolution.ram}}%
{{else}}
0
{{/if}}
<td>{{global.hosts.ram}} GiB {{normaliseEvolution global.hostsEvolution.ram}}
</td>
</td>
<tr>
<tr>
<td>Load average:</td>
<td>{{global.hosts.load}} </td>
<td style='color:{{#compare global.hostsEvolution.load ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.hostsEvolution.load}}
{{#compare global.hostsEvolution.load ">" 0}}+{{/compare}}
{{global.hostsEvolution.load}}%
{{else}}
0
{{/if}}
<td>{{global.hosts.load}} {{normaliseEvolution global.hostsEvolution.load}}
</td>
<tr>
<tr>
<td>Net reception:</td>
<td>{{global.hosts.netReception}} KiB</td>
<td style='color:{{#compare global.hostsEvolution.netReception ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.hostsEvolution.netReception}}
{{#compare global.hostsEvolution.netReception ">" 0}}+{{/compare}}
{{global.hostsEvolution.netReception}}%
{{else}}
0
{{/if}}
<td>Network RX:</td>
<td>{{global.hosts.netReception}} KiB {{normaliseEvolution global.hostsEvolution.netReception}}
</td>
<tr>
<tr>
<td>Net transmission:</td>
<td>{{global.hosts.netTransmission}} KiB</td>
<td style='color:{{#compare global.hostsEvolution.netTransmission ">" 0}} red {{else}} green {{/compare}}'>
{{#if global.hostsEvolution.netTransmission}}
{{#compare global.hostsEvolution.netTransmission ">" 0}}+{{/compare}}
{{global.hostsEvolution.netTransmission}}%
{{else}}
0
{{/if}}
<td>Network TX:</td>
<td>{{global.hosts.netTransmission}} KiB {{normaliseEvolution global.hostsEvolution.netTransmission}}
</td>
<tr>
</table>
@@ -432,7 +342,7 @@
</tr>
{{/each}}
<tr>
<td rowspan='{{math topHosts.netReception.length "+" 1}}' class="tableHeader">Net reception</td>
<td rowspan='{{math topHosts.netReception.length "+" 1}}' class="tableHeader">Network RX</td>
</tr>
{{#each topHosts.netReception}}
<tr>
@@ -442,7 +352,7 @@
</tr>
{{/each}}
<tr>
<td rowspan='{{math topHosts.netTransmission.length "+" 1}}' class="tableHeader">Net transmission</td>
<td rowspan='{{math topHosts.netTransmission.length "+" 1}}' class="tableHeader">Network TX</td>
</tr>
{{#each topHosts.netTransmission}}
<tr>
@@ -464,11 +374,11 @@
<th>Name</th>
<th>value</th>
</tr>
{{#each topAllocation}}
{{#each topSrs}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
<td>{{this.size}} GiB</td>
<td>{{this.value}} GiB</td>
</tr>
{{/each}}
</table>
@@ -533,8 +443,8 @@
<th>UUID</th>
<th>Name</th>
</tr>
{{#if vmsRessourcesEvolution.added}}
{{#each vmsRessourcesEvolution.added}}
{{#if vmsResourcesEvolution.added}}
{{#each vmsResourcesEvolution.added}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
@@ -553,8 +463,8 @@
<th>UUID</th>
<th>Name</th>
</tr>
{{#if vmsRessourcesEvolution.removed}}
{{#each vmsRessourcesEvolution.removed}}
{{#if vmsResourcesEvolution.removed}}
{{#each vmsResourcesEvolution.removed}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
@@ -572,8 +482,8 @@
<th>UUID</th>
<th>Name</th>
</tr>
{{#if hostsRessourcesEvolution.added}}
{{#each hostsRessourcesEvolution.added}}
{{#if hostsResourcesEvolution.added}}
{{#each hostsResourcesEvolution.added}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
@@ -591,8 +501,8 @@
<th>UUID</th>
<th>Name</th>
</tr>
{{#if hostsRessourcesEvolution.removed}}
{{#each hostsRessourcesEvolution.removed}}
{{#if hostsResourcesEvolution.removed}}
{{#each hostsResourcesEvolution.removed}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
@@ -606,5 +516,81 @@
</table>
</div>
</div>
{{#if allResources}}
<div class="page">
<div class="top allResources">
<hr color="#95a5a6" size="1px"/>
<h3 style="text-align: center;">All resources</h3>
<hr color="#95a5a6" size="1px"/>
<table>
<caption>VMs</caption>
<tr>
<th>UUID</th>
<th>Name</th>
<th>CPU</th>
<th>RAM (GiB)</th>
<th>Disk read (MiB)</th>
<th>Disk write (MiB)</th>
<th>Network RX (KiB)</th>
<th>Network TX (KiB)</th>
</tr>
{{#each allResources.vms}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
<td>{{normaliseValue this.cpu}} % {{normaliseEvolution this.evolution.cpu}}</td>
<td>{{normaliseValue this.ram}} {{normaliseEvolution this.evolution.ram}}</td>
<td>{{normaliseValue this.diskRead}} {{normaliseEvolution this.evolution.diskRead}}</td>
<td>{{normaliseValue this.diskWrite}} {{normaliseEvolution this.evolution.diskWrite}}</td>
<td>{{normaliseValue this.netReception}} {{normaliseEvolution this.evolution.netReception}}</td>
<td>{{normaliseValue this.netTransmission}} {{normaliseEvolution this.evolution.netTransmission}}</td>
</tr>
{{/each}}
</table>
<table>
<caption>Hosts</caption>
<tr>
<th>UUID</th>
<th>Name</th>
<th>CPU</th>
<th>RAM (GiB)</th>
<th>Load average</th>
<th>Network RX (KiB)</th>
<th>Network TX (KiB)</th>
</tr>
{{#each allResources.hosts}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
<td>{{normaliseValue this.cpu}} % {{normaliseEvolution this.evolution.cpu}}</td>
<td>{{normaliseValue this.ram}} {{normaliseEvolution this.evolution.ram}}</td>
<td>{{normaliseValue this.load}} {{normaliseEvolution this.evolution.load}}</td>
<td>{{normaliseValue this.netReception}} {{normaliseEvolution this.evolution.netReception}}</td>
<td>{{normaliseValue this.netTransmission}} {{normaliseEvolution this.evolution.netTransmission}}</td>
</tr>
{{/each}}
</table>
<table>
<caption>SRs</caption>
<tr>
<th>UUID</th>
<th>Name</th>
<th>Total space (GiB)</th>
<th>Used space (GiB)</th>
<th>Free space (GiB)</th>
</tr>
{{#each allResources.srs}}
<tr>
<td>{{shortUUID this.uuid}}</td>
<td>{{this.name}}</td>
<td>{{normaliseValue this.total}} {{normaliseEvolution this.evolution.total}}</td>
<td>{{normaliseValue this.used}}</td>
<td>{{normaliseValue this.free}}</td>
</tr>
{{/each}}
</table>
</div>
</div>
{{/if}}
</body>
</html>

View File

@@ -6,6 +6,7 @@ import {
concat,
differenceBy,
filter,
find,
forEach,
isFinite,
map,
@@ -67,6 +68,10 @@ export const configurationSchema = {
type: 'string',
},
},
all: {
type: 'boolean',
description: "It includes all resources' stats if on.",
},
periodicity: {
type: 'string',
enum: ['monthly', 'weekly'],
@@ -88,12 +93,12 @@ Handlebars.registerHelper('compare', function (
options
) {
if (arguments.length < 3) {
throw new Error('Handlerbars Helper "compare" needs 2 parameters')
throw new Error('Handlebars Helper "compare" needs 2 parameters')
}
if (!compareOperators[operator]) {
throw new Error(
`Handlerbars Helper "compare" doesn't know the operator ${operator}`
`Handlebars Helper "compare" doesn't know the operator ${operator}`
)
}
@@ -104,12 +109,12 @@ Handlebars.registerHelper('compare', function (
Handlebars.registerHelper('math', function (lvalue, operator, rvalue, options) {
if (arguments.length < 3) {
throw new Error('Handlerbars Helper "math" needs 2 parameters')
throw new Error('Handlebars Helper "math" needs 2 parameters')
}
if (!mathOperators[operator]) {
throw new Error(
`Handlerbars Helper "math" doesn't know the operator ${operator}`
`Handlebars Helper "math" doesn't know the operator ${operator}`
)
}
@@ -122,6 +127,23 @@ Handlebars.registerHelper('shortUUID', uuid => {
}
})
Handlebars.registerHelper(
'normaliseValue',
value => (isFinite(value) ? round(value, 2) : '-')
)
Handlebars.registerHelper(
'normaliseEvolution',
value =>
new Handlebars.SafeString(
isFinite(+value) && +value !== 0
? value > 0
? `(<b style="color: green;">▲ ${value}</b>)`
: `(<b style="color: red;">▼ ${String(value).slice(1)}</b>)`
: ''
)
)
// ===================================================================
function computeMean (values) {
@@ -170,13 +192,13 @@ function getTop (objects, options) {
)
}
function conputePercentage (curr, prev, options) {
function computePercentage (curr, prev, options) {
return zipObject(
options,
map(
options,
opt =>
prev[opt] === 0
prev[opt] === 0 || prev[opt] === null
? 'NONE'
: `${round((curr[opt] - prev[opt]) * 100 / prev[opt], 2)}`
)
@@ -185,45 +207,72 @@ function conputePercentage (curr, prev, options) {
function getDiff (oldElements, newElements) {
return {
added: differenceBy(oldElements, newElements, 'uuid'),
removed: differenceBy(newElements, oldElements, 'uuid'),
added: differenceBy(newElements, oldElements, 'uuid'),
removed: differenceBy(oldElements, newElements, 'uuid'),
}
}
// ===================================================================
function getVmsStats ({ runningVms, xo }) {
return Promise.all(
map(runningVms, async vm => {
const vmStats = await xo.getXapiVmStats(vm, 'days')
return {
uuid: vm.uuid,
name: vm.name_label,
cpu: computeDoubleMean(vmStats.stats.cpus),
ram: computeMean(vmStats.stats.memoryUsed) / gibPower,
diskRead: computeDoubleMean(values(vmStats.stats.xvds.r)) / mibPower,
diskWrite: computeDoubleMean(values(vmStats.stats.xvds.w)) / mibPower,
netReception: computeDoubleMean(vmStats.stats.vifs.rx) / kibPower,
netTransmission: computeDoubleMean(vmStats.stats.vifs.tx) / kibPower,
}
})
async function getVmsStats ({ runningVms, xo }) {
return orderBy(
await Promise.all(
map(runningVms, async vm => {
const vmStats = await xo.getXapiVmStats(vm, 'days')
return {
uuid: vm.uuid,
name: vm.name_label,
cpu: computeDoubleMean(vmStats.stats.cpus),
ram: computeMean(vmStats.stats.memoryUsed) / gibPower,
diskRead: computeDoubleMean(values(vmStats.stats.xvds.r)) / mibPower,
diskWrite: computeDoubleMean(values(vmStats.stats.xvds.w)) / mibPower,
netReception: computeDoubleMean(vmStats.stats.vifs.rx) / kibPower,
netTransmission: computeDoubleMean(vmStats.stats.vifs.tx) / kibPower,
}
})
),
'name',
'asc'
)
}
function getHostsStats ({ runningHosts, xo }) {
return Promise.all(
map(runningHosts, async host => {
const hostStats = await xo.getXapiHostStats(host, 'days')
async function getHostsStats ({ runningHosts, xo }) {
return orderBy(
await Promise.all(
map(runningHosts, async host => {
const hostStats = await xo.getXapiHostStats(host, 'days')
return {
uuid: host.uuid,
name: host.name_label,
cpu: computeDoubleMean(hostStats.stats.cpus),
ram: computeMean(hostStats.stats.memoryUsed) / gibPower,
load: computeMean(hostStats.stats.load),
netReception: computeDoubleMean(hostStats.stats.pifs.rx) / kibPower,
netTransmission:
computeDoubleMean(hostStats.stats.pifs.tx) / kibPower,
}
})
),
'name',
'asc'
)
}
function getSrsStats (xoObjects) {
return orderBy(
map(filter(xoObjects, { type: 'SR' }), sr => {
const total = sr.size / gibPower
const used = sr.physical_usage / gibPower
return {
uuid: host.uuid,
name: host.name_label,
cpu: computeDoubleMean(hostStats.stats.cpus),
ram: computeMean(hostStats.stats.memoryUsed) / gibPower,
load: computeMean(hostStats.stats.load),
netReception: computeDoubleMean(hostStats.stats.pifs.rx) / kibPower,
netTransmission: computeDoubleMean(hostStats.stats.pifs.tx) / kibPower,
uuid: sr.uuid,
name: sr.name_label,
total,
used,
free: total - used,
}
})
}),
'total',
'desc'
)
}
@@ -303,20 +352,21 @@ function getTopHosts ({ hostsStats, xo }) {
])
}
function getMostAllocatedSpaces ({ disks, xo }) {
return map(orderBy(disks, ['size'], ['desc']).slice(0, 3), disk => ({
uuid: disk.uuid,
name: disk.name_label,
size: round(disk.size / gibPower, 2),
}))
function getTopSrs ({ srsStats, xo }) {
return getTop(srsStats, ['total']).total
}
async function getHostsMissingPatches ({ runningHosts, xo }) {
const hostsMissingPatches = await Promise.all(
map(runningHosts, async host => {
const hostsPatches = await xo
let hostsPatches = await xo
.getXapi(host)
.listMissingPoolPatchesOnHost(host._xapiId)
if (host.license_params.sku_type === 'free') {
hostsPatches = filter(hostsPatches, { paid: false })
}
if (hostsPatches.length > 0) {
return {
uuid: host.uuid,
@@ -347,46 +397,75 @@ async function computeEvolution ({ storedStatsPath, ...newStats }) {
const prevDate = oldStats.style.currDate
const vmsEvolution = {
number: newStatsVms.number - oldStatsVms.number,
...conputePercentage(newStatsVms, oldStatsVms, [
const resourcesOptions = {
vms: [
'cpu',
'ram',
'diskRead',
'diskWrite',
'netReception',
'netTransmission',
]),
],
hosts: ['cpu', 'ram', 'load', 'netReception', 'netTransmission'],
srs: ['total'],
}
const vmsEvolution = {
number: newStatsVms.number - oldStatsVms.number,
...computePercentage(newStatsVms, oldStatsVms, resourcesOptions.vms),
}
const hostsEvolution = {
number: newStatsHosts.number - oldStatsHosts.number,
...conputePercentage(newStatsHosts, oldStatsHosts, [
'cpu',
'ram',
'load',
'netReception',
'netTransmission',
]),
...computePercentage(
newStatsHosts,
oldStatsHosts,
resourcesOptions.hosts
),
}
const vmsRessourcesEvolution = getDiff(
const vmsResourcesEvolution = getDiff(
oldStatsVms.allVms,
newStatsVms.allVms
)
const hostsRessourcesEvolution = getDiff(
const hostsResourcesEvolution = getDiff(
oldStatsHosts.allHosts,
newStatsHosts.allHosts
)
const usersEvolution = getDiff(oldStats.users, newStats.users)
const newAllResourcesStats = newStats.allResources
const oldAllResourcesStats = oldStats.allResources
// adding for each resource its evolution
if (
newAllResourcesStats !== undefined &&
oldAllResourcesStats !== undefined
) {
forEach(newAllResourcesStats, (resource, key) => {
const option = resourcesOptions[key]
if (option !== undefined) {
forEach(resource, newItem => {
const oldItem = find(oldAllResourcesStats[key], {
uuid: newItem.uuid,
})
if (oldItem !== undefined) {
newItem.evolution = computePercentage(newItem, oldItem, option)
}
})
}
})
}
return {
vmsEvolution,
hostsEvolution,
prevDate,
vmsRessourcesEvolution,
hostsRessourcesEvolution,
vmsResourcesEvolution,
hostsResourcesEvolution,
usersEvolution,
}
} catch (err) {
@@ -394,7 +473,7 @@ async function computeEvolution ({ storedStatsPath, ...newStats }) {
}
}
async function dataBuilder ({ xo, storedStatsPath }) {
async function dataBuilder ({ xo, storedStatsPath, all }) {
const xoObjects = values(xo.getObjects())
const runningVms = filter(xoObjects, { type: 'VM', power_state: 'Running' })
const haltedVms = filter(xoObjects, { type: 'VM', power_state: 'Halted' })
@@ -403,18 +482,17 @@ async function dataBuilder ({ xo, storedStatsPath }) {
power_state: 'Running',
})
const haltedHosts = filter(xoObjects, { type: 'host', power_state: 'Halted' })
const disks = filter(xoObjects, { type: 'SR' })
const [
users,
vmsStats,
hostsStats,
topAllocation,
srsStats,
hostsMissingPatches,
] = await Promise.all([
xo.getAllUsers(),
getVmsStats({ xo, runningVms }),
getHostsStats({ xo, runningHosts }),
getMostAllocatedSpaces({ xo, disks }),
getSrsStats(xoObjects),
getHostsMissingPatches({ xo, runningHosts }),
])
@@ -423,35 +501,50 @@ async function dataBuilder ({ xo, storedStatsPath }) {
globalHostsStats,
topVms,
topHosts,
topSrs,
usersEmail,
] = await Promise.all([
computeGlobalVmsStats({ xo, vmsStats, haltedVms }),
computeGlobalHostsStats({ xo, hostsStats, haltedHosts }),
getTopVms({ xo, vmsStats }),
getTopHosts({ xo, hostsStats }),
getTopSrs({ xo, srsStats }),
getAllUsersEmail(users),
])
let allResources
if (all) {
allResources = {
vms: vmsStats,
hosts: hostsStats,
srs: srsStats,
date: currDate,
}
}
const evolution = await computeEvolution({
allResources,
storedStatsPath,
hosts: globalHostsStats,
usersEmail,
vms: globalVmsStats,
})
const data = {
return {
allResources,
global: {
vms: globalVmsStats,
hosts: globalHostsStats,
vmsEvolution: evolution && evolution.vmsEvolution,
hostsEvolution: evolution && evolution.hostsEvolution,
},
topVms,
topHosts,
topSrs,
topVms,
hostsMissingPatches,
usersEmail,
topAllocation,
vmsRessourcesEvolution: evolution && evolution.vmsRessourcesEvolution,
hostsRessourcesEvolution: evolution && evolution.hostsRessourcesEvolution,
vmsResourcesEvolution: evolution && evolution.vmsResourcesEvolution,
hostsResourcesEvolution: evolution && evolution.hostsResourcesEvolution,
usersEvolution: evolution && evolution.usersEvolution,
style: {
imgXo,
@@ -460,8 +553,6 @@ async function dataBuilder ({ xo, storedStatsPath }) {
page: '{{page}}',
},
}
return data
}
// ===================================================================
@@ -472,6 +563,10 @@ class UsageReportPlugin {
this._dir = getDataDir
// Defined in configure().
this._conf = null
this._xo.addApiMethod(
'plugin.usageReport.send',
this._sendReport.bind(this, false)
)
}
configure (configuration, state) {
@@ -485,7 +580,7 @@ class UsageReportPlugin {
configuration.periodicity === 'monthly' ? '00 06 1 * *' : '00 06 * * 0'
).createJob(async () => {
try {
await this._sendReport()
await this._sendReport(true)
} catch (error) {
console.error(
'[WARN] scheduled function:',
@@ -511,13 +606,14 @@ class UsageReportPlugin {
}
test () {
return this._sendReport()
return this._sendReport(true)
}
async _sendReport () {
async _sendReport (storeData) {
const data = await dataBuilder({
xo: this._xo,
storedStatsPath: this._storedStatsPath,
all: this._conf.all,
})
await Promise.all([
@@ -537,10 +633,11 @@ class UsageReportPlugin {
},
],
}),
storeStats({
data,
storedStatsPath: this._storedStatsPath,
}),
storeData &&
storeStats({
data,
storedStatsPath: this._storedStatsPath,
}),
])
}
}

View File

@@ -31,13 +31,13 @@
"node": ">=6"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.40",
"@babel/polyfill": "7.0.0-beta.42",
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"@xen-orchestra/cron": "^1.0.2",
"ajv": "^6.1.1",
"app-conf": "^0.5.0",
"archiver": "^2.1.0",
"async-iterator-to-stream": "^1.0.1",
"base64url": "^2.0.0",
"bind-property-descriptor": "^1.0.0",
"blocked": "^1.2.1",
@@ -53,7 +53,7 @@
"escape-string-regexp": "^1.0.5",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.7.0",
"execa": "^0.9.0",
"execa": "^0.10.0",
"express": "^4.16.2",
"express-session": "^1.15.6",
"fatfs": "^0.10.4",
@@ -65,9 +65,10 @@
"highland": "^2.11.1",
"http-proxy": "^1.16.2",
"http-request-plus": "^0.5.0",
"http-server-plus": "^0.8.0",
"http-server-plus": "^0.10.0",
"human-format": "^0.10.0",
"is-redirect": "^1.0.0",
"jest-worker": "^22.4.3",
"js-yaml": "^3.10.0",
"json-rpc-peer": "^0.15.3",
"json5": "^1.0.0",
@@ -102,13 +103,14 @@
"split-lines": "^1.1.0",
"stack-chain": "^2.0.0",
"stoppable": "^1.0.5",
"struct-fu": "^1.2.0",
"tar-stream": "^1.5.5",
"through2": "^2.0.3",
"tmp": "^0.0.33",
"uuid": "^3.0.1",
"value-matcher": "^0.2.0",
"ws": "^5.0.0",
"xen-api": "^0.16.6",
"xen-api": "^0.16.7",
"xml2js": "^0.4.19",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
@@ -117,17 +119,17 @@
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.40",
"@babel/core": "7.0.0-beta.40",
"@babel/plugin-proposal-decorators": "7.0.0-beta.40",
"@babel/plugin-proposal-export-default-from": "7.0.0-beta.40",
"@babel/plugin-proposal-export-namespace-from": "7.0.0-beta.40",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.40",
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/plugin-proposal-decorators": "7.0.0-beta.42",
"@babel/plugin-proposal-export-default-from": "7.0.0-beta.42",
"@babel/plugin-proposal-export-namespace-from": "7.0.0-beta.42",
"@babel/plugin-proposal-function-bind": "7.0.0-beta.42",
"@babel/plugin-proposal-optional-chaining": "^7.0.0-beta.40",
"@babel/plugin-proposal-pipeline-operator": "^7.0.0-beta.40",
"@babel/plugin-proposal-throw-expressions": "^7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.40",
"@babel/preset-flow": "7.0.0-beta.40",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"index-modules": "^0.3.0",

View File

@@ -1,68 +0,0 @@
import { Readable } from 'stream'
// return the next value of the iterator but if it is a promise, resolve it and
// reinject it
//
// this enables the use of a simple generator instead of an async generator
// (which are less widely supported)
const next = async (iterator, arg) => {
let cursor = iterator.next(arg)
if (typeof cursor.then === 'function') {
return cursor
}
let value
while (
!cursor.done &&
(value = cursor.value) != null &&
typeof value.then === 'function'
) {
let success = false
try {
value = await value
success = true
} catch (error) {
cursor = iterator.throw(error)
}
if (success) {
cursor = iterator.next(value)
}
}
return cursor
}
// Create a readable stream from a generator
//
// generator can be async or can yield promises to wait for them
export const createReadable = (generator, options) => {
const readable = new Readable(options)
readable._read = size => {
const iterator = generator(size)
readable._destroy = (error, cb) => {
iterator.throw(error)
cb(error)
}
let running = false
const read = (readable._read = async size => {
if (running) {
return
}
running = true
try {
let cursor
do {
cursor = await next(iterator, size)
if (cursor.done) {
return readable.push(null)
}
} while (readable.push(cursor.value))
} catch (error) {
readable.emit('error', error)
} finally {
running = false
}
})
return read(size)
}
return readable
}

View File

@@ -32,6 +32,16 @@ createJob.params = {
},
}
export function migrateLegacyJob ({ id }) {
return this.migrateLegacyBackupJob(id)
}
migrateLegacyJob.permission = 'admin'
migrateLegacyJob.params = {
id: {
type: 'string',
},
}
export function deleteJob ({ id }) {
return this.deleteBackupNgJob(id)
}

View File

@@ -161,6 +161,7 @@ export async function create (params) {
}
for (const vif of xapiVm.$VIFs) {
xapi.xo.addObject(vif)
await this.allocIpAddresses(
vif.$id,
concat(vif.ipv4_allowed, vif.ipv6_allowed)

View File

@@ -538,9 +538,14 @@ export default async function main (args) {
{
const debug = createLogger('xo:perf')
blocked(ms => {
debug('blocked for %sms', ms | 0)
})
blocked(
ms => {
debug('blocked for %sms', ms | 0)
},
{
threshold: 50,
}
)
}
const config = await loadConfiguration()

View File

@@ -0,0 +1,25 @@
// @flow
import type RemoteHandler from './abstract'
import RemoteHandlerLocal from './local'
import RemoteHandlerNfs from './nfs'
import RemoteHandlerSmb from './smb'
export type Remote = { url: string }
const HANDLERS = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs,
}
export const getHandler = (remote: Remote): RemoteHandler => {
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
const Handler = HANDLERS[type]
if (!Handler) {
throw new Error('Unhandled remote type')
}
return new Handler(remote)
}

View File

@@ -0,0 +1,284 @@
/* eslint-env jest */
import execa from 'execa'
import fs from 'fs-extra'
import rimraf from 'rimraf'
import { randomBytes } from 'crypto'
import { fromEvent } from 'promise-toolbox'
import LocalHandler from './remote-handlers/local'
import vhdMerge, {
chainVhd,
createReadStream,
Vhd,
VHD_SECTOR_SIZE,
} from './vhd-merge'
import { pFromCallback, streamToBuffer, tmpDir } from './utils'
const initialDir = process.cwd()
jest.setTimeout(10000)
beforeEach(async () => {
const dir = await tmpDir()
process.chdir(dir)
})
afterEach(async () => {
const tmpDir = process.cwd()
process.chdir(initialDir)
await pFromCallback(cb => rimraf(tmpDir, cb))
})
async function createRandomFile (name, sizeMb) {
await execa('bash', [
'-c',
`< /dev/urandom tr -dc "\\t\\n [:alnum:]" | head -c ${sizeMb}M >${name}`,
])
}
async function checkFile (vhdName) {
await execa('vhd-util', ['check', '-p', '-b', '-t', '-n', vhdName])
}
async function recoverRawContent (vhdName, rawName, originalSize) {
await checkFile(vhdName)
await execa('qemu-img', ['convert', '-fvpc', '-Oraw', vhdName, rawName])
if (originalSize !== undefined) {
await execa('truncate', ['-s', originalSize, rawName])
}
}
async function convertFromRawToVhd (rawName, vhdName) {
await execa('qemu-img', ['convert', '-f', 'raw', '-Ovpc', rawName, vhdName])
}
test('blocks can be moved', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'randomfile.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
await newVhd._freeFirstBlockSpace(8000000)
await recoverRawContent('randomfile.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('the BAT MSB is not used for sign', async () => {
const randomBuffer = await pFromCallback(cb =>
randomBytes(VHD_SECTOR_SIZE, cb)
)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', '1.8T'])
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const vhd = new Vhd(handler, 'empty.vhd')
await vhd.readHeaderAndFooter()
await vhd.readBlockTable()
// we want the bit 31 to be on, to prove it's not been used for sign
const hugeWritePositionSectors = Math.pow(2, 31) + 200
await vhd.writeData(hugeWritePositionSectors, randomBuffer)
await checkFile('empty.vhd')
// here we are moving the first sector very far in the VHD to prove the BAT doesn't use signed int32
const hugePositionBytes = hugeWritePositionSectors * VHD_SECTOR_SIZE
await vhd._freeFirstBlockSpace(hugePositionBytes)
// we recover the data manually for speed reasons.
// fs.write() with offset is way faster than qemu-img when there is a 1.5To
// hole before the block of data
const recoveredFile = await fs.open('recovered', 'w')
try {
const vhd2 = new Vhd(handler, 'empty.vhd')
await vhd2.readHeaderAndFooter()
await vhd2.readBlockTable()
for (let i = 0; i < vhd.header.maxTableEntries; i++) {
const entry = vhd._getBatEntry(i)
if (entry !== 0xffffffff) {
const block = (await vhd2._readBlock(i)).data
await fs.write(
recoveredFile,
block,
0,
block.length,
vhd2.header.blockSize * i
)
}
}
} finally {
fs.close(recoveredFile)
}
const recovered = await streamToBuffer(
await fs.createReadStream('recovered', {
start: hugePositionBytes,
end: hugePositionBytes + randomBuffer.length - 1,
})
)
expect(recovered).toEqual(randomBuffer)
})
test('writeData on empty file', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
await newVhd.writeData(0, randomData)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('writeData in 2 non-overlaping operations', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
const splitPointSectors = 2
await newVhd.writeData(0, randomData.slice(0, splitPointSectors * 512))
await newVhd.writeData(
splitPointSectors,
randomData.slice(splitPointSectors * 512)
)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('writeData in 2 overlaping operations', async () => {
const mbOfRandom = 3
await createRandomFile('randomfile', mbOfRandom)
await execa('qemu-img', ['create', '-fvpc', 'empty.vhd', mbOfRandom + 'M'])
const randomData = await fs.readFile('randomfile')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'empty.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
const endFirstWrite = 3
const startSecondWrite = 2
await newVhd.writeData(0, randomData.slice(0, endFirstWrite * 512))
await newVhd.writeData(
startSecondWrite,
randomData.slice(startSecondWrite * 512)
)
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(randomData)
})
test('BAT can be extended and blocks moved', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler.getSize('randomfile')
const newVhd = new Vhd(handler, 'randomfile.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
await newVhd.ensureBatSize(2000)
await recoverRawContent('randomfile.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('coalesce works with empty parent files', async () => {
const mbOfRandom = 2
await createRandomFile('randomfile', mbOfRandom)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
await execa('qemu-img', [
'create',
'-fvpc',
'empty.vhd',
mbOfRandom + 1 + 'M',
])
await checkFile('randomfile.vhd')
await checkFile('empty.vhd')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const originalSize = await handler._getSize('randomfile')
await chainVhd(handler, 'empty.vhd', handler, 'randomfile.vhd', true)
await checkFile('randomfile.vhd')
await checkFile('empty.vhd')
await vhdMerge(handler, 'empty.vhd', handler, 'randomfile.vhd')
await recoverRawContent('empty.vhd', 'recovered', originalSize)
expect(await fs.readFile('recovered')).toEqual(
await fs.readFile('randomfile')
)
})
test('coalesce works in normal cases', async () => {
const mbOfRandom = 5
await createRandomFile('randomfile', mbOfRandom)
await createRandomFile('small_randomfile', Math.ceil(mbOfRandom / 2))
await execa('qemu-img', [
'create',
'-fvpc',
'parent.vhd',
mbOfRandom + 1 + 'M',
])
await convertFromRawToVhd('randomfile', 'child1.vhd')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
await execa('vhd-util', ['snapshot', '-n', 'child2.vhd', '-p', 'child1.vhd'])
const vhd = new Vhd(handler, 'child2.vhd')
await vhd.readHeaderAndFooter()
await vhd.readBlockTable()
vhd.footer.creatorApplication = 'xoa'
await vhd.writeFooter()
const originalSize = await handler._getSize('randomfile')
await chainVhd(handler, 'parent.vhd', handler, 'child1.vhd', true)
await execa('vhd-util', ['check', '-t', '-n', 'child1.vhd'])
await chainVhd(handler, 'child1.vhd', handler, 'child2.vhd', true)
await execa('vhd-util', ['check', '-t', '-n', 'child2.vhd'])
const smallRandom = await fs.readFile('small_randomfile')
const newVhd = new Vhd(handler, 'child2.vhd')
await newVhd.readHeaderAndFooter()
await newVhd.readBlockTable()
await newVhd.writeData(5, smallRandom)
await checkFile('child2.vhd')
await checkFile('child1.vhd')
await checkFile('parent.vhd')
await vhdMerge(handler, 'parent.vhd', handler, 'child1.vhd')
await checkFile('parent.vhd')
await chainVhd(handler, 'parent.vhd', handler, 'child2.vhd', true)
await checkFile('child2.vhd')
await vhdMerge(handler, 'parent.vhd', handler, 'child2.vhd')
await checkFile('parent.vhd')
await recoverRawContent(
'parent.vhd',
'recovered_from_coalescing',
originalSize
)
await execa('cp', ['randomfile', 'randomfile2'])
const fd = await fs.open('randomfile2', 'r+')
try {
await fs.write(fd, smallRandom, 0, smallRandom.length, 5 * VHD_SECTOR_SIZE)
} finally {
await fs.close(fd)
}
expect(await fs.readFile('recovered_from_coalescing')).toEqual(
await fs.readFile('randomfile2')
)
})
test('createReadStream passes vhd-util check', async () => {
const initalSize = 4
await createRandomFile('randomfile', initalSize)
await convertFromRawToVhd('randomfile', 'randomfile.vhd')
const handler = new LocalHandler({ url: 'file://' + process.cwd() })
const stream = createReadStream(handler, 'randomfile.vhd')
await fromEvent(
stream.pipe(await fs.createWriteStream('recovered.vhd')),
'finish'
)
await checkFile('recovered.vhd')
})

View File

@@ -1,19 +1,18 @@
// TODO: remove once completely merged in vhd.js
import assert from 'assert'
import asyncIteratorToStream from 'async-iterator-to-stream'
import concurrency from 'limit-concurrency-decorator'
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import fu from 'struct-fu'
import { dirname, relative } from 'path'
import { fromEvent } from 'promise-toolbox'
import type RemoteHandler from './remote-handlers/abstract'
import constantStream from './constant-stream'
import { createReadable } from './ag2s'
import { noop, resolveRelativeFromFile, streamToBuffer } from './utils'
const VHD_UTIL_DEBUG = 0
const debug = VHD_UTIL_DEBUG ? str => console.log(`[vhd-util]${str}`) : noop
const debug = VHD_UTIL_DEBUG ? str => console.log(`[vhd-merge]${str}`) : noop
// ===================================================================
//
@@ -28,7 +27,7 @@ const debug = VHD_UTIL_DEBUG ? str => console.log(`[vhd-util]${str}`) : noop
// Sizes in bytes.
const VHD_FOOTER_SIZE = 512
const VHD_HEADER_SIZE = 1024
const VHD_SECTOR_SIZE = 512
export const VHD_SECTOR_SIZE = 512
// Block allocation table entry size. (Block addr)
const VHD_ENTRY_SIZE = 4
@@ -40,6 +39,12 @@ const VHD_PLATFORM_CODE_NONE = 0
export const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
export const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
export const PLATFORM_NONE = 0
export const PLATFORM_W2RU = 0x57327275
export const PLATFORM_W2KU = 0x57326b75
export const PLATFORM_MAC = 0x4d616320
export const PLATFORM_MACX = 0x4d616358
// Other.
const BLOCK_UNUSED = 0xffffffff
const BIT_MASK = 0x80
@@ -50,28 +55,24 @@ BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
// ===================================================================
const SIZE_OF_32_BITS = Math.pow(2, 32)
const uint64 = fu.derive(
fu.uint32(2),
number => [Math.floor(number / SIZE_OF_32_BITS), number % SIZE_OF_32_BITS],
_ => _[0] * SIZE_OF_32_BITS + _[1]
)
const fuFooter = fu.struct([
fu.char('cookie', 8), // 0
fu.uint32('features'), // 8
fu.uint32('fileFormatVersion'), // 12
fu.struct('dataOffset', [
fu.uint32('high'), // 16
fu.uint32('low'), // 20
]),
uint64('dataOffset'), // offset of the header, should always be 512
fu.uint32('timestamp'), // 24
fu.char('creatorApplication', 4), // 28
fu.uint32('creatorVersion'), // 32
fu.uint32('creatorHostOs'), // 36
fu.struct('originalSize', [
// At the creation, current size of the hard disk.
fu.uint32('high'), // 40
fu.uint32('low'), // 44
]),
fu.struct('currentSize', [
// Current size of the virtual disk. At the creation: currentSize = originalSize.
fu.uint32('high'), // 48
fu.uint32('low'), // 52
]),
uint64('originalSize'),
uint64('currentSize'),
fu.struct('diskGeometry', [
fu.uint16('cylinders'), // 56
fu.uint8('heads'), // 58
@@ -87,12 +88,8 @@ const fuFooter = fu.struct([
const fuHeader = fu.struct([
fu.char('cookie', 8),
fu.struct('dataOffset', [fu.uint32('high'), fu.uint32('low')]),
fu.struct('tableOffset', [
// Absolute byte offset of the Block Allocation Table.
fu.uint32('high'),
fu.uint32('low'),
]),
fu.uint8('dataOffsetUnused', 8),
uint64('tableOffset'),
fu.uint32('headerVersion'),
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
@@ -108,11 +105,7 @@ const fuHeader = fu.struct([
fu.uint32('platformDataSpace'),
fu.uint32('platformDataLength'),
fu.uint32('reserved'),
fu.struct('platformDataOffset', [
// Absolute byte offset of the locator data.
fu.uint32('high'),
fu.uint32('low'),
]),
uint64('platformDataOffset'), // Absolute byte offset of the locator data.
],
VHD_PARENT_LOCATOR_ENTRIES
),
@@ -123,16 +116,14 @@ const fuHeader = fu.struct([
// Helpers
// ===================================================================
const SIZE_OF_32_BITS = Math.pow(2, 32)
const uint32ToUint64 = fu => fu.high * SIZE_OF_32_BITS + fu.low
const computeBatSize = entries =>
sectorsToBytes(sectorsRoundUpNoZero(entries * VHD_ENTRY_SIZE))
// Returns a 32 bits integer corresponding to a Vhd version.
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000ffff)
// Sectors conversions.
const sectorsRoundUp = bytes =>
Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE)
const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1
const sectorsRoundUpNoZero = bytes => Math.ceil(bytes / VHD_SECTOR_SIZE) || 1
const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE
// Check/Set a bit on a vhd map.
@@ -163,26 +154,39 @@ const unpackField = (field, buf) => {
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, struct) {
function checksumStruct (buf, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawStruct)
for (let i = 0, n = struct.size; i < n; i++) {
sum = (sum + rawStruct[i]) & 0xffffffff
// Do not use the stored checksum to compute the new checksum.
const checksumOffset = checksumField.offset
for (let i = 0, n = checksumOffset; i < n; ++i) {
sum += buf[i]
}
for (
let i = checksumOffset + checksumField.size, n = struct.size;
i < n;
++i
) {
sum += buf[i]
}
sum = 0xffffffff - sum
sum = ~sum >>> 0
// Write new sum.
packField(checksumField, sum, rawStruct)
packField(checksumField, sum, buf)
return sum
}
const assertChecksum = (name, buf, struct) => {
const actual = unpackField(struct.fields.checksum, buf)
const expected = checksumStruct(buf, struct)
if (actual !== expected) {
throw new Error(`invalid ${name} checksum ${actual}, expected ${expected}`)
}
}
// ===================================================================
// Format:
@@ -207,6 +211,10 @@ function checksumStruct (rawStruct, struct) {
// - parentLocatorSize(i) = header.parentLocatorEntry[i].platformDataSpace * sectorSize
// - sectorSize = 512
export class Vhd {
get batSize () {
return computeBatSize(this.header.maxTableEntries)
}
constructor (handler, path) {
this._handler = handler
this._path = path
@@ -235,17 +243,10 @@ export class Vhd {
getEndOfHeaders () {
const { header } = this
let end = uint32ToUint64(this.footer.dataOffset) + VHD_HEADER_SIZE
const blockAllocationTableSize = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
let end = VHD_FOOTER_SIZE + VHD_HEADER_SIZE
// Max(end, block allocation table end)
end = Math.max(
end,
uint32ToUint64(header.tableOffset) + blockAllocationTableSize
)
end = Math.max(end, header.tableOffset + this.batSize)
for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) {
const entry = header.parentLocatorEntry[i]
@@ -253,8 +254,7 @@ export class Vhd {
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
end = Math.max(
end,
uint32ToUint64(entry.platformDataOffset) +
sectorsToBytes(entry.platformDataSpace)
entry.platformDataOffset + sectorsToBytes(entry.platformDataSpace)
)
}
}
@@ -286,21 +286,16 @@ export class Vhd {
// Get the beginning (footer + header) of a vhd file.
async readHeaderAndFooter () {
const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE)
const bufFooter = buf.slice(0, VHD_FOOTER_SIZE)
const bufHeader = buf.slice(VHD_FOOTER_SIZE)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumStruct(buf, fuFooter)
assertChecksum('footer', bufFooter, fuFooter)
assertChecksum('header', bufHeader, fuHeader)
// Checksum child & parent.
if (sumToTest !== sum) {
throw new Error(
`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString(
'hex'
)})`
)
}
const footer = (this.footer = fuFooter.unpack(bufFooter))
assert.strictEqual(footer.dataOffset, VHD_FOOTER_SIZE)
const header = (this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE)))
this.footer = fuFooter.unpack(buf)
const header = (this.header = fuHeader.unpack(bufHeader))
// Compute the number of sectors in one block.
// Default: One block contains 4096 sectors of 512 bytes.
@@ -330,13 +325,10 @@ export class Vhd {
// Returns a buffer that contains the block allocation table of a vhd file.
async readBlockTable () {
const { header } = this
const offset = uint32ToUint64(header.tableOffset)
const size = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
this.blockTable = await this._read(
header.tableOffset,
header.maxTableEntries * VHD_ENTRY_SIZE
)
this.blockTable = await this._read(offset, size)
}
// return the first sector (bitmap) of a block
@@ -433,71 +425,70 @@ export class Vhd {
: fromEvent(data.pipe(stream), 'finish')
}
async ensureBatSize (size) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= size) {
return
}
const tableOffset = uint32ToUint64(header.tableOffset)
// extend BAT
const maxTableEntries = (header.maxTableEntries = size)
const batSize = sectorsToBytes(
sectorsRoundUpNoZero(maxTableEntries * VHD_ENTRY_SIZE)
)
const prevBat = this.blockTable
const bat = (this.blockTable = Buffer.allocUnsafe(batSize))
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevBat.length)
debug(
`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
const extendBat = async () => {
debug(
`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
return this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
tableOffset + prevBat.length
)
}
async _freeFirstBlockSpace (spaceNeededBytes) {
try {
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([extendBat(), this.writeHeader()])
const tableOffset = this.header.tableOffset
const { batSize } = this
const newMinSector = Math.ceil(
(tableOffset + batSize + spaceNeededBytes) / VHD_SECTOR_SIZE
)
if (
tableOffset + batSize + spaceNeededBytes >=
sectorsToBytes(firstSector)
) {
const { fullBlockSize } = this
const newFirstSector = Math.max(
lastSector + fullBlockSize / VHD_SECTOR_SIZE,
newMinSector
)
debug(
`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`
)
// copy the first block at the end
const stream = await this._readStream(
sectorsToBytes(firstSector),
fullBlockSize
)
await this._write(stream, sectorsToBytes(newFirstSector))
await this._setBatEntry(first, newFirstSector)
await this.writeFooter(true)
spaceNeededBytes -= this.fullBlockSize
if (spaceNeededBytes > 0) {
return this._freeFirstBlockSpace(spaceNeededBytes)
}
}
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(
`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`
)
// copy the first block at the end
const stream = await this._readStream(
sectorsToBytes(firstSector),
fullBlockSize
)
await this._write(stream, sectorsToBytes(newFirstSector))
await extendBat()
await this._setBatEntry(first, newFirstSector)
await this.writeHeader()
await this.writeFooter()
} catch (e) {
if (e.noBlock) {
await extendBat()
await this.writeHeader()
await this.writeFooter()
} else {
if (!e.noBlock) {
throw e
}
}
}
async ensureBatSize (entries) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= entries) {
return
}
const newBatSize = computeBatSize(entries)
await this._freeFirstBlockSpace(newBatSize - this.batSize)
const maxTableEntries = (header.maxTableEntries = entries)
const prevBat = this.blockTable
const bat = (this.blockTable = Buffer.allocUnsafe(newBatSize))
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevMaxTableEntries * VHD_ENTRY_SIZE)
debug(
`ensureBatSize: extend BAT ${prevMaxTableEntries} -> ${maxTableEntries}`
)
await this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
header.tableOffset + prevBat.length
)
await this.writeHeader()
}
// set the first sector (bitmap) of a block
_setBatEntry (block, blockSector) {
const i = block * VHD_ENTRY_SIZE
@@ -507,7 +498,7 @@ export class Vhd {
return this._write(
blockTable.slice(i, i + VHD_ENTRY_SIZE),
uint32ToUint64(this.header.tableOffset) + i
this.header.tableOffset + i
)
}
@@ -563,6 +554,9 @@ export class Vhd {
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
parentBitmap = Buffer.alloc(this.bitmapSize, 0)
} else if (parentBitmap === undefined) {
parentBitmap = (await this._readBlock(block.id, true)).bitmap
}
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
@@ -629,11 +623,13 @@ export class Vhd {
}
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
async writeFooter (onlyEndFooter = false) {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
const eof = await this._handler.getSize(this._path)
// sometimes the file is longer than anticipated, we still need to put the footer at the end
const offset = Math.max(this.getEndOfData(), eof - rawFooter.length)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(
@@ -641,8 +637,9 @@ export class Vhd {
footer.checksum
}). (data=${rawFooter.toString('hex')})`
)
await this._write(rawFooter, 0)
if (!onlyEndFooter) {
await this._write(rawFooter, 0)
}
await this._write(rawFooter, offset)
}
@@ -658,6 +655,73 @@ export class Vhd {
)
return this._write(rawHeader, offset)
}
async writeData (offsetSectors, buffer) {
const bufferSizeSectors = Math.ceil(buffer.length / VHD_SECTOR_SIZE)
const startBlock = Math.floor(offsetSectors / this.sectorsPerBlock)
const endBufferSectors = offsetSectors + bufferSizeSectors
const lastBlock = Math.ceil(endBufferSectors / this.sectorsPerBlock) - 1
await this.ensureBatSize(lastBlock)
const blockSizeBytes = this.sectorsPerBlock * VHD_SECTOR_SIZE
const coversWholeBlock = (offsetInBlockSectors, endInBlockSectors) =>
offsetInBlockSectors === 0 && endInBlockSectors === this.sectorsPerBlock
for (
let currentBlock = startBlock;
currentBlock <= lastBlock;
currentBlock++
) {
const offsetInBlockSectors = Math.max(
0,
offsetSectors - currentBlock * this.sectorsPerBlock
)
const endInBlockSectors = Math.min(
endBufferSectors - currentBlock * this.sectorsPerBlock,
this.sectorsPerBlock
)
const startInBuffer = Math.max(
0,
(currentBlock * this.sectorsPerBlock - offsetSectors) * VHD_SECTOR_SIZE
)
const endInBuffer = Math.min(
((currentBlock + 1) * this.sectorsPerBlock - offsetSectors) *
VHD_SECTOR_SIZE,
buffer.length
)
let inputBuffer
if (coversWholeBlock(offsetInBlockSectors, endInBlockSectors)) {
inputBuffer = buffer.slice(startInBuffer, endInBuffer)
} else {
inputBuffer = Buffer.alloc(blockSizeBytes, 0)
buffer.copy(
inputBuffer,
offsetInBlockSectors * VHD_SECTOR_SIZE,
startInBuffer,
endInBuffer
)
}
await this.writeBlockSectors(
{ id: currentBlock, data: inputBuffer },
offsetInBlockSectors,
endInBlockSectors
)
}
await this.writeFooter()
}
async ensureSpaceForParentLocators (neededSectors) {
const firstLocatorOffset = VHD_FOOTER_SIZE + VHD_HEADER_SIZE
const currentSpace =
Math.floor(this.header.tableOffset / VHD_SECTOR_SIZE) -
firstLocatorOffset / VHD_SECTOR_SIZE
if (currentSpace < neededSectors) {
const deltaSectors = neededSectors - currentSpace
await this._freeFirstBlockSpace(sectorsToBytes(deltaSectors))
this.header.tableOffset += sectorsToBytes(deltaSectors)
await this._write(this.blockTable, this.header.tableOffset)
}
return firstLocatorOffset
}
}
// Merge vhd child into vhd parent.
@@ -719,9 +783,9 @@ export default concurrency(2)(async function vhdMerge (
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.currentSize = cFooter.currentSize
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.originalSize = cFooter.originalSize
pFooter.timestamp = cFooter.timestamp
pFooter.uuid = cFooter.uuid
@@ -743,164 +807,178 @@ export async function chainVhd (
parentHandler,
parentPath,
childHandler,
childPath
childPath,
force = false
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter(),
])
const { header } = childVhd
await childVhd.readHeaderAndFooter()
const { header, footer } = childVhd
const parentName = relative(dirname(childPath), parentPath)
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
!isEqual(header.parentUuid, parentUuid)
) {
header.parentUuid = parentUuid
header.parentUnicodeName = parentName
await childVhd.writeHeader()
return true
if (footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
if (!force) {
throw new Error('cannot chain disk of type ' + footer.diskType)
}
footer.diskType = HARD_DISK_TYPE_DIFFERENCING
}
return false
await Promise.all([
childVhd.readBlockTable(),
parentVhd.readHeaderAndFooter(),
])
const parentName = relative(dirname(childPath), parentPath)
header.parentUuid = parentVhd.footer.uuid
header.parentUnicodeName = parentName
header.parentLocatorEntry[0].platformCode = PLATFORM_W2KU
const encodedFilename = Buffer.from(parentName, 'utf16le')
const dataSpaceSectors = Math.ceil(encodedFilename.length / VHD_SECTOR_SIZE)
const position = await childVhd.ensureSpaceForParentLocators(dataSpaceSectors)
await childVhd._write(encodedFilename, position)
header.parentLocatorEntry[0].platformDataSpace = sectorsToBytes(
dataSpaceSectors
)
header.parentLocatorEntry[0].platformDataLength = encodedFilename.length
header.parentLocatorEntry[0].platformDataOffset = position
for (let i = 1; i < 8; i++) {
header.parentLocatorEntry[i].platformCode = VHD_PLATFORM_CODE_NONE
header.parentLocatorEntry[i].platformDataSpace = 0
header.parentLocatorEntry[i].platformDataLength = 0
header.parentLocatorEntry[i].platformDataOffset = 0
}
await childVhd.writeHeader()
await childVhd.writeFooter()
return true
}
export const createReadStream = (handler, path) =>
createReadable(function * () {
const fds = []
export const createReadStream = asyncIteratorToStream(function * (handler, path) {
const fds = []
try {
const vhds = []
while (true) {
const fd = yield handler.openFile(path, 'r')
fds.push(fd)
const vhd = new Vhd(handler, fd)
vhds.push(vhd)
yield vhd.readHeaderAndFooter()
yield vhd.readBlockTable()
try {
const vhds = []
while (true) {
const fd = yield handler.openFile(path, 'r')
fds.push(fd)
const vhd = new Vhd(handler, fd)
vhds.push(vhd)
yield vhd.readHeaderAndFooter()
yield vhd.readBlockTable()
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
break
}
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
const nVhds = vhds.length
// this the VHD we want to synthetize
const vhd = vhds[0]
// data of our synthetic VHD
// TODO: empty parentUuid and parentLocatorEntry-s in header
let header = {
...vhd.header,
tableOffset: 512 + 1024,
parentUnicodeName: '',
}
const bat = Buffer.allocUnsafe(
Math.ceil(4 * header.maxTableEntries / VHD_SECTOR_SIZE) * VHD_SECTOR_SIZE
)
let footer = {
...vhd.footer,
diskType: HARD_DISK_TYPE_DYNAMIC,
}
const sectorsPerBlockData = vhd.sectorsPerBlock
const sectorsPerBlock =
sectorsPerBlockData + vhd.bitmapSize / VHD_SECTOR_SIZE
const nBlocks = Math.ceil(footer.currentSize / header.blockSize)
const blocksOwner = new Array(nBlocks)
for (
let iBlock = 0,
blockOffset = Math.ceil((512 + 1024 + bat.length) / VHD_SECTOR_SIZE);
iBlock < nBlocks;
++iBlock
) {
let blockSector = BLOCK_UNUSED
for (let i = 0; i < nVhds; ++i) {
if (vhds[i].containsBlock(iBlock)) {
blocksOwner[iBlock] = i
blockSector = blockOffset
blockOffset += sectorsPerBlock
break
}
path = resolveRelativeFromFile(path, vhd.header.parentUnicodeName)
}
const nVhds = vhds.length
// this the VHD we want to synthetize
const vhd = vhds[0]
// data of our synthetic VHD
// TODO: empty parentUuid and parentLocatorEntry-s in header
let header = {
...vhd.header,
tableOffset: {
high: 0,
low: 512 + 1024,
},
parentUnicodeName: '',
}
const bat = Buffer.allocUnsafe(
Math.ceil(4 * header.maxTableEntries / VHD_SECTOR_SIZE) *
VHD_SECTOR_SIZE
)
let footer = {
...vhd.footer,
diskType: HARD_DISK_TYPE_DYNAMIC,
}
const sectorsPerBlockData = vhd.sectorsPerBlock
const sectorsPerBlock =
sectorsPerBlockData + vhd.bitmapSize / VHD_SECTOR_SIZE
const nBlocks = Math.ceil(
uint32ToUint64(footer.currentSize) / header.blockSize
)
const blocksOwner = new Array(nBlocks)
for (
let iBlock = 0,
blockOffset = Math.ceil((512 + 1024 + bat.length) / VHD_SECTOR_SIZE);
iBlock < nBlocks;
++iBlock
) {
let blockSector = BLOCK_UNUSED
for (let i = 0; i < nVhds; ++i) {
if (vhds[i].containsBlock(iBlock)) {
blocksOwner[iBlock] = i
blockSector = blockOffset
blockOffset += sectorsPerBlock
break
}
}
bat.writeUInt32BE(blockSector, iBlock * 4)
}
footer = fuFooter.pack(footer)
checksumStruct(footer, fuFooter)
yield footer
header = fuHeader.pack(header)
checksumStruct(header, fuHeader)
yield header
yield bat
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
const owner = blocksOwner[iBlock]
if (owner === undefined) {
continue
}
yield bitmap
const blocksByVhd = new Map()
const emitBlockSectors = function * (iVhd, i, n) {
const vhd = vhds[iVhd]
if (!vhd.containsBlock(iBlock)) {
yield * emitBlockSectors(iVhd + 1, i, n)
return
}
let block = blocksByVhd.get(vhd)
if (block === undefined) {
block = yield vhd._readBlock(iBlock)
blocksByVhd.set(vhd, block)
}
const { bitmap, data } = block
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
yield data.slice(i * VHD_SECTOR_SIZE, n * VHD_SECTOR_SIZE)
return
}
while (i < n) {
const hasData = mapTestBit(bitmap, i)
const start = i
do {
++i
} while (i < n && mapTestBit(bitmap, i) === hasData)
if (hasData) {
yield data.slice(start * VHD_SECTOR_SIZE, i * VHD_SECTOR_SIZE)
} else {
yield * emitBlockSectors(iVhd + 1, start, i)
}
}
}
yield * emitBlockSectors(owner, 0, sectorsPerBlock)
}
yield footer
} finally {
for (let i = 0, n = fds.length; i < n; ++i) {
handler.closeFile(fds[i]).catch(error => {
console.warn('createReadStream, closeFd', i, error)
})
}
bat.writeUInt32BE(blockSector, iBlock * 4)
}
})
footer = fuFooter.pack(footer)
checksumStruct(footer, fuFooter)
yield footer
header = fuHeader.pack(header)
checksumStruct(header, fuHeader)
yield header
yield bat
const bitmap = Buffer.alloc(vhd.bitmapSize, 0xff)
for (let iBlock = 0; iBlock < nBlocks; ++iBlock) {
const owner = blocksOwner[iBlock]
if (owner === undefined) {
continue
}
yield bitmap
const blocksByVhd = new Map()
const emitBlockSectors = function * (iVhd, i, n) {
const vhd = vhds[iVhd]
if (!vhd.containsBlock(iBlock)) {
yield * emitBlockSectors(iVhd + 1, i, n)
return
}
let block = blocksByVhd.get(vhd)
if (block === undefined) {
block = yield vhd._readBlock(iBlock)
blocksByVhd.set(vhd, block)
}
const { bitmap, data } = block
if (vhd.footer.diskType === HARD_DISK_TYPE_DYNAMIC) {
yield data.slice(i * VHD_SECTOR_SIZE, n * VHD_SECTOR_SIZE)
return
}
while (i < n) {
const hasData = mapTestBit(bitmap, i)
const start = i
do {
++i
} while (i < n && mapTestBit(bitmap, i) === hasData)
if (hasData) {
yield data.slice(start * VHD_SECTOR_SIZE, i * VHD_SECTOR_SIZE)
} else {
yield * emitBlockSectors(iVhd + 1, start, i)
}
}
}
yield * emitBlockSectors(owner, 0, sectorsPerBlock)
}
yield footer
} finally {
for (let i = 0, n = fds.length; i < n; ++i) {
handler.closeFile(fds[i]).catch(error => {
console.warn('createReadStream, closeFd', i, error)
})
}
}
})
export async function readVhdMetadata (handler: RemoteHandler, path: string) {
const vhd = new Vhd(handler, path)

View File

@@ -1424,7 +1424,7 @@ export default class Xapi extends XapiBase {
}
}
@synchronized() // like @concurrency(1) but more efficient
@concurrency(2)
@cancelable
async _snapshotVm ($cancelToken, vm, nameLabel = vm.name_label) {
debug(
@@ -1442,8 +1442,6 @@ export default class Xapi extends XapiBase {
nameLabel
).then(extractOpaqueRef)
this.addTag(ref, 'quiesce')::ignoreErrors()
await this._waitObjectState(ref, vm => includes(vm.tags, 'quiesce'))
} catch (error) {
const { code } = error
if (
@@ -1467,7 +1465,7 @@ export default class Xapi extends XapiBase {
// to-date object.
const [, snapshot] = await Promise.all([
this.call('VM.set_is_a_template', ref, false),
this._waitObjectState(ref, snapshot => !snapshot.is_a_template),
this.barrier(ref),
])
return snapshot

View File

@@ -46,6 +46,7 @@ declare export class Vm extends XapiObject {
declare export class Xapi {
objects: { all: $Dict<Object> };
_assertHealthyVdiChains(vm: Vm): void;
_importVm(
cancelToken: mixed,
stream: AugmentedReadable,

View File

@@ -8,7 +8,7 @@ import { basename, dirname } from 'path'
import { isEmpty, last, mapValues, noop, values } from 'lodash'
import { timeout as pTimeout } from 'promise-toolbox'
import { type Executor, type Job } from '../jobs'
import { type CallJob, type Executor, type Job } from '../jobs'
import { type Schedule } from '../scheduling'
import type RemoteHandler from '../../remote-handlers/abstract'
@@ -25,12 +25,14 @@ import {
safeDateFormat,
serializeError,
} from '../../utils'
import mergeVhd, {
import {
chainVhd,
createReadStream as createVhdReadStream,
readVhdMetadata,
} from '../../vhd-merge'
import { translateLegacyJob } from './migration'
type Mode = 'full' | 'delta'
type Settings = {|
@@ -133,16 +135,21 @@ const isVhd = (filename: string) => filename.endsWith('.vhd')
const listReplicatedVms = (
xapi: Xapi,
scheduleId: string,
srId: string
srId: string,
vmUuid?: string
): Vm[] => {
const { all } = xapi.objects
const vms = {}
for (const key in all) {
const object = all[key]
const oc = object.other_config
if (
object.$type === 'vm' &&
object.other_config['xo:backup:schedule'] === scheduleId &&
object.other_config['xo:backup:sr'] === srId
oc['xo:backup:schedule'] === scheduleId &&
oc['xo:backup:sr'] === srId &&
(oc['xo:backup:vm'] === vmUuid ||
// 2018-03-28, JFT: to catch VMs replicated before this fix
oc['xo:backup:vm'] === undefined)
) {
vms[object.$id] = object
}
@@ -305,9 +312,12 @@ export default class BackupNg {
getAllSchedules: () => Promise<Schedule[]>,
getRemoteHandler: (id: string) => Promise<RemoteHandler>,
getXapi: (id: string) => Xapi,
getJob: (id: string, 'backup') => Promise<BackupJob>,
updateJob: ($Shape<BackupJob>) => Promise<BackupJob>,
getJob: ((id: string, 'backup') => Promise<BackupJob>) &
((id: string, 'call') => Promise<CallJob>),
updateJob: (($Shape<BackupJob>, ?boolean) => Promise<BackupJob>) &
(($Shape<CallJob>, ?boolean) => Promise<CallJob>),
removeJob: (id: string) => Promise<void>,
worker: $Dict<any>,
}
constructor (app: any) {
@@ -517,9 +527,9 @@ export default class BackupNg {
const backupsByVm = (backupsByVmByRemote[remoteId] = {})
await Promise.all(
entries.map(async vmId => {
entries.map(async vmUuid => {
// $FlowFixMe don't know what is the problem (JFT)
const backups = await this._listVmBackups(handler, vmId)
const backups = await this._listVmBackups(handler, vmUuid)
if (backups.length === 0) {
return
@@ -530,7 +540,7 @@ export default class BackupNg {
backup.id = `${remoteId}/${backup._filename}`
})
backupsByVm[vmId] = backups
backupsByVm[vmUuid] = backups
})
)
})
@@ -539,6 +549,14 @@ export default class BackupNg {
return backupsByVmByRemote
}
async migrateLegacyBackupJob (jobId: string) {
const [job, schedules] = await Promise.all([
this._app.getJob(jobId, 'call'),
this._app.getAllSchedules(),
])
await this._app.updateJob(translateLegacyJob(job, schedules), false)
}
// High:
// - [ ] clones of replicated VMs should not be garbage collected
// - if storing uuids in source VM, how to detect them if the source is
@@ -558,10 +576,10 @@ export default class BackupNg {
// - [ ] snapshots and files of an old job should be detected and removed
// - [ ] delta import should support mapVdisSrs
// - [ ] size of the path? (base64url(Buffer.from(uuid.split('-').join(''), 'hex')))
// - [ ] do not create snapshot if unhealthy vdi chain
// - [ ] fix backup reports
// - [ ] what does mean the vmTimeout with the new concurrency? a VM can take
// a very long time to finish if there are other VMs before…
// - [ ] detect and gc uncomplete replications
//
// Triage:
// - [ ] logs
@@ -576,17 +594,20 @@ export default class BackupNg {
// - [x] backups should be deletable from the API
// - [x] adding and removing VDIs should behave
// - [x] isolate VHD chains by job
// - [x] do not delete rolling snapshot in case of failure!
// - [x] do not create snapshot if unhealthy vdi chain
// - [x] replicated VMs should be discriminated by VM (vatesfr/xen-orchestra#2807)
@defer
async _backupVm (
$defer: any,
$cancelToken: any,
vmId: string,
vmUuid: string,
job: BackupJob,
schedule: Schedule
): Promise<BackupResult> {
const app = this._app
const xapi = app.getXapi(vmId)
const vm: Vm = (xapi.getObject(vmId): any)
const xapi = app.getXapi(vmUuid)
const vm: Vm = (xapi.getObject(vmUuid): any)
const { id: jobId, settings } = job
const { id: scheduleId } = schedule
@@ -611,6 +632,20 @@ export default class BackupNg {
const snapshots = vm.$snapshots
.filter(_ => _.other_config['xo:backup:job'] === jobId)
.sort(compareSnapshotTime)
await xapi._assertHealthyVdiChains(vm)
let snapshot: Vm = (await xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
): any)
await xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
'xo:backup:schedule': scheduleId,
'xo:backup:vm': vmUuid,
})
$defer(() =>
asyncMap(
getOldEntries(
@@ -623,16 +658,6 @@ export default class BackupNg {
)
)
let snapshot: Vm = (await xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
): any)
$defer.onFailure.call(xapi, '_deleteVm', snapshot)
await xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
'xo:backup:schedule': scheduleId,
})
snapshot = ((await xapi.barrier(snapshot.$ref): any): Vm)
if (exportRetention === 0) {
@@ -646,12 +671,13 @@ export default class BackupNg {
const remotes = unboxIds(job.remotes)
const srs = unboxIds(job.srs)
if (remotes.length === 0 && srs.length === 0) {
const nTargets = remotes.length + srs.length
if (nTargets === 0) {
throw new Error('export retention must be 0 without remotes and SRs')
}
const now = Date.now()
const vmDir = getVmBackupDir(vm.uuid)
const vmDir = getVmBackupDir(vmUuid)
const basename = safeDateFormat(now)
@@ -670,6 +696,15 @@ export default class BackupNg {
const exportTask = xva.task
xva = xva.pipe(createSizeStream())
const forkExport =
nTargets === 0
? () => xva
: () => {
const fork = xva.pipe(new PassThrough())
fork.task = exportTask
return fork
}
const dataBasename = `${basename}.xva`
const metadata: MetadataFull = {
@@ -689,79 +724,74 @@ export default class BackupNg {
const errors = []
await waitAll(
[
...remotes.map(
defer(async ($defer, remoteId) => {
const fork = xva.pipe(new PassThrough())
...remotes.map(async remoteId => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
await writeStream(fork, handler, dataFilename)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
})
),
...srs.map(
defer(async ($defer, srId) => {
const fork = xva.pipe(new PassThrough())
fork.task = exportTask
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
const vm = await xapi.barrier(
await xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
await writeStream(fork, handler, dataFilename)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const vm = await xapi.barrier(
await xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
)
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
})
),
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
],
error => {
console.warn(error)
@@ -779,6 +809,11 @@ export default class BackupNg {
transferSize: xva.size,
}
} else if (job.mode === 'delta') {
if (snapshotRetention === 0) {
// only keep the snapshot in case of success
$defer.onFailure.call(xapi, 'deleteVm', snapshot)
}
const baseSnapshot = last(snapshots)
if (baseSnapshot !== undefined) {
console.log(baseSnapshot.$id) // TODO: remove
@@ -815,49 +850,55 @@ export default class BackupNg {
const jsonMetadata = JSON.stringify(metadata)
// create a fork of the delta export
const forkExport = (() => {
// replace the stream factories by fork factories
const streams: any = mapValues(deltaExport.streams, lazyStream => {
let forks = []
return () => {
if (forks === undefined) {
throw new Error(
'cannot fork the stream after it has been created'
)
}
if (forks.length === 0) {
lazyStream().then(
stream => {
// $FlowFixMe
forks.forEach(({ resolve }) => {
const fork: any = stream.pipe(new PassThrough())
fork.task = stream.task
resolve(fork)
const forkExport =
nTargets === 1
? () => deltaExport
: (() => {
// replace the stream factories by fork factories
const streams: any = mapValues(
deltaExport.streams,
lazyStream => {
let forks = []
return () => {
if (forks === undefined) {
throw new Error(
'cannot fork the stream after it has been created'
)
}
if (forks.length === 0) {
lazyStream().then(
stream => {
// $FlowFixMe
forks.forEach(({ resolve }) => {
const fork: any = stream.pipe(new PassThrough())
fork.task = stream.task
resolve(fork)
})
forks = undefined
},
error => {
// $FlowFixMe
forks.forEach(({ reject }) => {
reject(error)
})
forks = undefined
}
)
}
return new Promise((resolve, reject) => {
// $FlowFixMe
forks.push({ reject, resolve })
})
forks = undefined
},
error => {
// $FlowFixMe
forks.forEach(({ reject }) => {
reject(error)
})
forks = undefined
}
)
}
)
return () => {
return {
__proto__: deltaExport,
streams,
}
}
return new Promise((resolve, reject) => {
// $FlowFixMe
forks.push({ reject, resolve })
})
}
})
return () => {
return {
__proto__: deltaExport,
streams,
}
}
})()
})()
const mergeStart = 0
const mergeEnd = 0
@@ -866,112 +907,103 @@ export default class BackupNg {
const errors = []
await waitAll(
[
...remotes.map(
defer(async ($defer, remoteId) => {
const fork = forkExport()
...remotes.map(async remoteId => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
await asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
await asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
const isDelta = 'xo:base_delta' in vdi.other_config
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
}
const isDelta = 'xo:base_delta' in vdi.other_config
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
}
await writeStream(
fork.streams[`${id}.vhd`](),
handler,
path,
{
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
}
)
$defer.onFailure.call(handler, 'unlink', path)
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
}
await writeStream(fork.streams[`${id}.vhd`](), handler, path, {
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
})
)
$defer.onFailure.call(handler, 'unlink', path)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
})
),
...srs.map(
defer(async ($defer, srId) => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
transferStart = Math.min(transferStart, Date.now())
const { vm } = await xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
}
})
)
transferEnd = Math.max(transferEnd, Date.now())
await handler.outputFile(metadataFilename, jsonMetadata)
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
transferStart = Math.min(transferStart, Date.now())
const { vm } = await xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
})
),
transferEnd = Math.max(transferEnd, Date.now())
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
],
error => {
console.warn(error)
@@ -1044,7 +1076,12 @@ export default class BackupNg {
$defer.onFailure.call(handler, 'unlink', path)
const childPath = child.path
await mergeVhd(handler, path, handler, childPath)
await this._app.worker.mergeVhd(
handler._remote,
path,
handler._remote,
childPath
)
await handler.rename(path, childPath)
}

View File

@@ -89,7 +89,7 @@ const methods = {
}),
}
const parseParamsVector = vector => {
const parseParamsVector = (vector: any) => {
assert.strictEqual(vector.type, 'crossProduct')
const { items } = vector
assert.strictEqual(items.length, 2)
@@ -120,34 +120,26 @@ const parseParamsVector = vector => {
return { ...params, vms }
}
export const translateOldJobs = async (app: any): Promise<Array<BackupJob>> => {
const backupJobs: Array<BackupJob> = []
const [jobs, schedules] = await Promise.all([
app.getAllJobs('call'),
app.getAllSchedules(),
])
jobs.forEach(job => {
try {
const { id } = job
let method, schedule
if (
job.type === 'call' &&
(method = methods[job.method]) !== undefined &&
(schedule = schedules.find(_ => _.jobId === id)) !== undefined
) {
const params = parseParamsVector(job.paramsVector)
backupJobs.push({
id,
name: params.tag || job.name,
type: 'backup',
userId: job.userId,
// $FlowFixMe `method` is initialized but Flow fails to see this
...method(job, params, schedule),
})
}
} catch (error) {
console.warn('translateOldJobs', job, error)
}
})
return backupJobs
export const translateLegacyJob = (
job: CallJob,
schedules: Schedule[]
): BackupJob => {
const { id } = job
let method, schedule
if (
job.type !== 'call' ||
(method = methods[job.method]) === undefined ||
(schedule = schedules.find(_ => _.jobId === id)) === undefined
) {
throw new Error(`cannot convert job ${job.id}`)
}
const params = parseParamsVector(job.paramsVector)
return {
id,
name: params.tag || job.name,
type: 'backup',
userId: job.userId,
// $FlowFixMe `method` is initialized but Flow fails to see this
...method(job, params, schedule),
}
}

View File

@@ -21,32 +21,32 @@ export type Job = {
id: string,
name: string,
type: string,
userId: string
userId: string,
}
type ParamsVector =
| {|
items: Array<Object>,
type: 'crossProduct'
type: 'crossProduct',
|}
| {|
mapping: Object,
type: 'extractProperties',
value: Object
value: Object,
|}
| {|
pattern: Pattern,
type: 'fetchObjects'
type: 'fetchObjects',
|}
| {|
collection: Object,
iteratee: Function,
paramName?: string,
type: 'map'
type: 'map',
|}
| {|
type: 'set',
values: any
values: any,
|}
export type CallJob = {|
@@ -54,7 +54,7 @@ export type CallJob = {|
method: string,
paramsVector: ParamsVector,
timeout?: number,
type: 'call'
type: 'call',
|}
export type Executor = ({|
@@ -64,7 +64,7 @@ export type Executor = ({|
logger: Logger,
runJobId: string,
schedule?: Schedule,
session: Object
session: Object,
|}) => Promise<any>
// -----------------------------------------------------------------------------
@@ -180,9 +180,12 @@ export default class Jobs {
return this._jobs.create(job)
}
async updateJob ({ id, ...props }: $Shape<Job>) {
const job = await this.getJob(id)
patch(job, props)
async updateJob (job: $Shape<Job>, merge: boolean = true) {
if (merge) {
const { id, ...props } = job
job = await this.getJob(id)
patch(job, props)
}
return /* await */ this._jobs.save(job)
}

View File

@@ -1,9 +1,7 @@
import { noSuchObject } from 'xo-common/api-errors'
import RemoteHandlerLocal from '../remote-handlers/local'
import RemoteHandlerNfs from '../remote-handlers/nfs'
import RemoteHandlerSmb from '../remote-handlers/smb'
import { forEach, mapToArray } from '../utils'
import { getHandler } from '../remote-handlers'
import { Remotes } from '../models/remote'
// ===================================================================
@@ -40,20 +38,7 @@ export default class {
throw new Error('remote is disabled')
}
const HANDLERS = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs,
}
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
const Handler = HANDLERS[type]
if (!Handler) {
throw new Error('Unhandled remote type')
}
return new Handler(remote)
return getHandler(remote)
}
async testRemote (remote) {

View File

@@ -0,0 +1,14 @@
import Worker from 'jest-worker'
export default class Workers {
get worker () {
return this._worker
}
constructor (app) {
app.on('start', () => {
this._worker = new Worker(require.resolve('./worker'))
})
app.on('stop', () => this._worker.end())
}
}

View File

@@ -0,0 +1,18 @@
// @flow
import mergeVhd_ from '../../vhd-merge'
import { type Remote, getHandler } from '../../remote-handlers'
export function mergeVhd (
parentRemote: Remote,
parentPath: string,
childRemote: Remote,
childPath: string
) {
return mergeVhd_(
getHandler(parentRemote),
parentPath,
getHandler(childRemote),
childPath
)
}

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "xo-web",
"version": "5.17.2",
"version": "5.17.3",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [

View File

@@ -256,6 +256,7 @@ const messages = {
jobMergedDataSpeed: 'Merge speed:',
allJobCalls: 'All',
job: 'Job',
jobEdit: 'Edit job',
jobModalTitle: 'Job {job}',
jobId: 'ID',
jobType: 'Type',
@@ -275,6 +276,7 @@ const messages = {
jobTimezone: 'Timezone',
jobServerTimezone: 'Server',
runJob: 'Run job',
runJobConfirm: 'Are you sure you want to run {backupType} {id} ({tag})?',
runJobVerbose: 'One shot running started. See overview for logs.',
jobFinished: 'Finished',
jobInterrupted: 'Interrupted',
@@ -290,11 +292,12 @@ const messages = {
'You are editing Schedule {name} ({id}). Saving will override previous schedule state.',
jobEditMessage:
'You are editing job {name} ({id}). Saving will override previous job state.',
scheduleEdit: 'Edit',
scheduleEdit: 'Edit schedule',
scheduleSave: 'Save',
cancelScheduleEdition: 'Cancel',
scheduleAdd: 'Add a schedule',
scheduleDelete: 'Delete',
scheduleRun: 'Run schedule',
deleteSelectedSchedules: 'Delete selected schedules',
noScheduledJobs: 'No scheduled jobs.',
newSchedule: 'New schedule',
@@ -313,6 +316,10 @@ const messages = {
noMatchingVms: 'There are no matching VMs!',
allMatchingVms: '{icon} See the matching VMs ({nMatchingVms, number})',
backupOwner: 'Backup owner',
migrateBackupSchedule: 'Migrate to backup NG',
migrateBackupScheduleMessage:
'This will migrate this backup to a backup NG. This operation is not reversible. Do you want to continue?',
runBackupNgJobConfirm: 'Are you sure you want to run {name} ({id})?',
// ------ New backup -----
newBackupSelection: 'Select your backup type:',
@@ -324,6 +331,8 @@ const messages = {
backupName: 'Name',
useDelta: 'Use delta',
useCompression: 'Use compression',
dbAndDrRequireEntreprisePlan: 'Delta Backup and DR require Entreprise plan',
crRequiresPremiumPlan: 'CR requires Premium plan',
smartBackupModeTitle: 'Smart mode',
backupTargetRemotes: 'Target remotes (for Export)',
backupTargetSrs: 'Target SRs (for Replication)',
@@ -980,6 +989,12 @@ const messages = {
dashboardStatsButtonRemoveAll: 'Clear selection',
dashboardStatsButtonAddAllHost: 'Add all hosts',
dashboardStatsButtonAddAllVM: 'Add all VMs',
dashboardSendReport: 'Send report',
dashboardReport: 'Report',
dashboardSendReportMessage:
'This will send a usage report to your configured emails.',
dashboardSendReportInfo:
'The usage report and transport email plugins need to be loaded!',
// --- Stats board --
weekHeatmapData: '{value} {date, date, medium}',
@@ -1154,7 +1169,6 @@ const messages = {
scheduleTimezone: 'Timezone',
scheduleExportRetention: 'Export ret.',
scheduleSnapshotRetention: 'Snapshot ret.',
scheduleRun: 'Run',
getRemote: 'Get remote',
listRemote: 'List Remote',
simpleBackup: 'simple',
@@ -1524,6 +1538,9 @@ const messages = {
promptUpgradeReloadTitle: 'Upgrade successful',
promptUpgradeReloadMessage:
'Your XOA has successfully upgraded, and your browser must reload the application. Do you want to reload now ?',
upgradeWarningTitle: 'Upgrade warning',
upgradeWarningMessage:
'You have some backup jobs in progress. If you upgrade now, these jobs will be interrupted! Are you sure you want to continue?',
// ----- OS Disclaimer -----
disclaimerTitle: 'Xen Orchestra from the sources',

View File

@@ -199,7 +199,13 @@ class StrongConfirm extends Component {
<strong className='no-text-selection'>{_(messageId, values)}</strong>
</div>
<div>
<input className='form-control' onChange={this._onInputChange} />
<input
className='form-control'
ref={ref => {
ref && ref.focus()
}}
onChange={this._onInputChange}
/>
</div>
</GenericModal>
)

View File

@@ -1,8 +1,7 @@
import Component from 'base-component'
import forEach from 'lodash/forEach'
import React from 'react'
import remove from 'lodash/remove'
import { Shortcuts as ReactShortcuts } from 'react-shortcuts'
import { forEach, remove } from 'lodash'
let enabled = true
const instances = []
@@ -29,7 +28,20 @@ export default class Shortcuts extends Component {
remove(instances, this)
}
_handler = (command, event) => {
// When an input is focused, shortcuts are disabled by default *except* for
// non-printable keys (Esc, Enter, ...) but we want to disable them as well
// https://github.com/avocode/react-shortcuts/issues/13#issuecomment-255868423
if (event.target.tagName === 'INPUT') {
return
}
this.props.handler(command, event)
}
render () {
return enabled ? <ReactShortcuts {...this.props} /> : null
return enabled ? (
<ReactShortcuts {...this.props} handler={this._handler} />
) : null
}
}

View File

@@ -830,8 +830,8 @@ export default class SortedTable extends Component {
{shortcutsTarget !== undefined && (
<Shortcuts
handler={this._getShortcutsHandler()}
isolate
name='SortedTable'
stopPropagation
targetNodeSelector={shortcutsTarget}
/>
)}

View File

@@ -1598,6 +1598,12 @@ export const deleteBackupSchedule = async schedule => {
subscribeJobs.forceRefresh()
}
export const migrateBackupSchedule = id =>
confirm({
title: _('migrateBackupSchedule'),
body: _('migrateBackupScheduleMessage'),
}).then(() => _call('backupNg.migrateLegacyJob', { id: resolveId(id) }))
export const deleteSchedule = schedule =>
_call('schedule.delete', { id: resolveId(schedule) })::tap(
subscribeSchedules.forceRefresh
@@ -1726,6 +1732,8 @@ export const purgePluginConfiguration = async id => {
export const testPlugin = async (id, data) => _call('plugin.test', { id, data })
export const sendUsageReport = () => _call('plugin.usageReport.send')
// Resource set ------------------------------------------------------
export const createResourceSet = (name, { subjects, objects, limits } = {}) =>

View File

@@ -285,6 +285,10 @@
@extend .fa;
@extend .fa-cogs;
}
&-migrate-job {
@extend .fa;
@extend .fa-share;
}
// VM
&-vm {

View File

@@ -5,6 +5,7 @@ import Icon from 'icon'
import React from 'react'
import SortedTable from 'sorted-table'
import StateButton from 'state-button'
import { confirm } from 'modal'
import { map, groupBy } from 'lodash'
import { Card, CardHeader, CardBlock } from 'card'
import { constructQueryString } from 'smart-backup'
@@ -28,6 +29,15 @@ import New from './new'
import FileRestore from './file-restore'
import Restore from './restore'
const _runBackupNgJob = ({ id, name, schedule }) =>
confirm({
title: _('runJob'),
body: _('runBackupNgJobConfirm', {
id: id.slice(0, 5),
name: <strong>{name}</strong>,
}),
}).then(() => runBackupNgJob({ id, schedule }))
const SchedulePreviewBody = ({ item: job, userData: { schedulesByJob } }) => (
<table>
<tr className='text-muted'>
@@ -57,12 +67,13 @@ const SchedulePreviewBody = ({ item: job, userData: { schedulesByJob } }) => (
</td>
<td>
<ActionButton
handler={runBackupNgJob}
btnStyle='primary'
data-id={job.id}
data-name={job.name}
data-schedule={schedule.id}
handler={_runBackupNgJob}
icon='run-schedule'
size='small'
data-id={job.id}
data-schedule={schedule.id}
btnStyle='primary'
/>
</td>
</tr>
@@ -93,7 +104,7 @@ class JobsTable extends React.Component {
],
columns: [
{
itemRenderer: _ => _.id.slice(0, 5),
itemRenderer: _ => _.id.slice(4, 8),
name: _('jobId'),
},
{

View File

@@ -1,5 +1,6 @@
import _ from 'intl'
import ActionButton from 'action-button'
import Icon from 'icon'
import React from 'react'
import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
import Tooltip from 'tooltip'
@@ -29,13 +30,18 @@ import { FormGroup, getRandomId, Input, Ul, Li } from './utils'
const normaliseTagValues = values => resolveIds(values).map(value => [value])
const constructPattern = values => ({
id: {
__or: resolveIds(values),
},
})
const constructPattern = values =>
values.length === 1
? {
id: resolveId(values[0]),
}
: {
id: {
__or: resolveIds(values),
},
}
const destructPattern = pattern => pattern.id.__or
const destructPattern = pattern => pattern.id.__or || [pattern.id]
const destructVmsPattern = pattern =>
pattern.id === undefined
@@ -554,14 +560,14 @@ export default [
</Card>
<Card>
<CardBlock>
<div className='btn-toolbar text-xs-center'>
<div className='text-xs-center'>
<ActionButton
active={state.snapshotMode}
handler={effects.setSnapshotMode}
icon='rolling-snapshot'
>
{_('rollingSnapshot')}
</ActionButton>
</ActionButton>{' '}
<ActionButton
active={state.backupMode}
disabled={state.isDelta}
@@ -569,31 +575,50 @@ export default [
icon='backup'
>
{_('backup')}
</ActionButton>
</ActionButton>{' '}
<ActionButton
active={state.deltaMode}
disabled={state.isFull}
disabled={
state.isFull ||
(!state.deltaMode && process.env.XOA_PLAN < 3)
}
handler={effects.setDeltaMode}
icon='delta-backup'
>
{_('deltaBackup')}
</ActionButton>
</ActionButton>{' '}
<ActionButton
active={state.drMode}
disabled={state.isDelta}
disabled={
state.isDelta ||
(!state.drMode && process.env.XOA_PLAN < 3)
}
handler={effects.setDrMode}
icon='disaster-recovery'
>
{_('disasterRecovery')}
</ActionButton>
</ActionButton>{' '}
{process.env.XOA_PLAN < 3 && (
<Tooltip content={_('dbAndDrRequireEntreprisePlan')}>
<Icon icon='info' />
</Tooltip>
)}{' '}
<ActionButton
active={state.crMode}
disabled={state.isFull}
disabled={
state.isFull ||
(!state.crMode && process.env.XOA_PLAN < 4)
}
handler={effects.setCrMode}
icon='continuous-replication'
>
{_('continuousReplication')}
</ActionButton>
</ActionButton>{' '}
{process.env.XOA_PLAN < 4 && (
<Tooltip content={_('crRequiresPremiumPlan')}>
<Icon icon='info' />
</Tooltip>
)}
</div>
</CardBlock>
</Card>

View File

@@ -10,6 +10,7 @@ import React from 'react'
import SortedTable from 'sorted-table'
import StateButton from 'state-button'
import Tooltip from 'tooltip'
import { confirm } from 'modal'
import { addSubscriptions } from 'utils'
import { constructQueryString } from 'smart-backup'
import { createSelector } from 'selectors'
@@ -19,6 +20,7 @@ import {
deleteBackupSchedule,
disableSchedule,
enableSchedule,
migrateBackupSchedule,
runJob,
subscribeJobs,
subscribeSchedules,
@@ -35,6 +37,16 @@ const jobKeyToLabel = {
rollingSnapshot: _('rollingSnapshot'),
}
const _runJob = ({ jobLabel, jobId, scheduleTag }) =>
confirm({
title: _('runJob'),
body: _('runJobConfirm', {
backupType: <strong>{jobLabel}</strong>,
id: <strong>{jobId.slice(4, 8)}</strong>,
tag: scheduleTag,
}),
}).then(() => runJob(jobId))
const JOB_COLUMNS = [
{
name: _('jobId'),
@@ -80,44 +92,55 @@ const JOB_COLUMNS = [
},
{
name: _('jobAction'),
itemRenderer: ({ redirect, schedule }, isScheduleUserMissing) => (
<fieldset>
{!isScheduleUserMissing[schedule.id] && (
<Tooltip content={_('backupUserNotFound')}>
<Icon className='mr-1' icon='error' />
</Tooltip>
)}
<Link
className='btn btn-sm btn-primary mr-1'
to={`/backup/${schedule.id}/edit`}
>
<Icon icon='edit' />
</Link>
<ButtonGroup>
{redirect && (
<ActionRowButton
btnStyle='primary'
handler={redirect}
icon='preview'
tooltip={_('redirectToMatchingVms')}
/>
itemRenderer: (item, isScheduleUserMissing) => {
const { redirect, schedule } = item
const { id } = schedule
return (
<fieldset>
{isScheduleUserMissing[id] && (
<Tooltip content={_('backupUserNotFound')}>
<Icon className='mr-1' icon='error' />
</Tooltip>
)}
<ActionRowButton
icon='delete'
btnStyle='danger'
handler={deleteBackupSchedule}
handlerParam={schedule}
/>
<ActionRowButton
disabled={!isScheduleUserMissing[schedule.id]}
icon='run-schedule'
btnStyle='warning'
handler={runJob}
handlerParam={schedule.jobId}
/>
</ButtonGroup>
</fieldset>
),
<Link
className='btn btn-sm btn-primary mr-1'
to={`/backup/${id}/edit`}
>
<Icon icon='edit' />
</Link>
<ButtonGroup>
{redirect && (
<ActionRowButton
btnStyle='primary'
handler={redirect}
icon='preview'
tooltip={_('redirectToMatchingVms')}
/>
)}
<ActionRowButton
btnStyle='warning'
disabled={isScheduleUserMissing[id]}
handler={_runJob}
handlerParam={item}
icon='run-schedule'
/>
<ActionRowButton
icon='migrate-job'
btnStyle='danger'
handler={migrateBackupSchedule}
handlerParam={schedule.jobId}
/>
<ActionRowButton
btnStyle='danger'
handler={deleteBackupSchedule}
handlerParam={schedule}
icon='delete'
/>
</ButtonGroup>
</fieldset>
)
},
textAlign: 'right',
},
]
@@ -193,7 +216,7 @@ export default class Overview extends Component {
(schedules, jobs, users) => {
const isScheduleUserMissing = {}
forEach(schedules, schedule => {
isScheduleUserMissing[schedule.id] = !!(
isScheduleUserMissing[schedule.id] = !(
jobs && find(users, user => user.id === jobs[schedule.jobId].userId)
)
})

View File

@@ -1,20 +1,10 @@
import _, { messages } from 'intl'
import ChooseSrForEachVdisModal from 'xo/choose-sr-for-each-vdis-modal'
import Component from 'base-component'
import every from 'lodash/every'
import filter from 'lodash/filter'
import find from 'lodash/find'
import forEach from 'lodash/forEach'
import groupBy from 'lodash/groupBy'
import Icon from 'icon'
import isEmpty from 'lodash/isEmpty'
import map from 'lodash/map'
import mapValues from 'lodash/mapValues'
import moment from 'moment'
import React from 'react'
import reduce from 'lodash/reduce'
import SortedTable from 'sorted-table'
import uniq from 'lodash/uniq'
import Upgrade from 'xoa-upgrade'
import { confirm } from 'modal'
import { createSelector } from 'selectors'
@@ -23,6 +13,19 @@ import { Container, Row, Col } from 'grid'
import { FormattedDate, injectIntl } from 'react-intl'
import { info, error } from 'notification'
import { Select, Toggle } from 'form'
import {
countBy,
every,
filter,
find,
forEach,
groupBy,
isEmpty,
map,
mapValues,
reduce,
uniq,
} from 'lodash'
import {
importBackup,
@@ -98,19 +101,19 @@ const VM_COLUMNS = [
},
{
name: _('availableBackupsColumn'),
itemRenderer: ({ simpleCount, deltaCount }) => (
itemRenderer: ({ count }) => (
<span>
{!!simpleCount && (
{count.simple > 0 && (
<span>
{_('simpleBackup')}{' '}
<span className='tag tag-pill tag-primary'>{simpleCount}</span>
<span className='tag tag-pill tag-primary'>{count.simple}</span>
</span>
)}
{!!simpleCount && !!deltaCount && ', '}
{!!deltaCount && (
{count.simple > 0 && count.delta > 0 && ', '}
{count.delta > 0 && (
<span>
{_('delta')}{' '}
<span className='tag tag-pill tag-primary'>{deltaCount}</span>
<span className='tag tag-pill tag-primary'>{count.delta}</span>
</span>
)}
</span>
@@ -140,8 +143,6 @@ const doImport = ({ backup, targetSrs, start }) => {
mapVdisSrs: targetSrs.mapVdisSrs,
remote: backup.remoteId,
sr: targetSrs.mainSr,
}).then(id => {
return id
})
if (start) {
importPromise.then(id => startVm({ id }))
@@ -242,16 +243,20 @@ const ImportModalBody = injectIntl(_ModalBody, { withRef: true })
})
export default class Restore extends Component {
componentWillReceiveProps ({ rawRemotes }) {
let filteredRemotes
if (
(filteredRemotes = filter(rawRemotes, 'enabled')) !==
filter(this.props.rawRemotes, 'enabled')
) {
this._listAll(filteredRemotes).catch(noop)
if (rawRemotes !== this.props.rawRemotes) {
this._listAll(rawRemotes).catch(noop)
}
}
_listAll = async remotes => {
componentDidMount () {
const { rawRemotes } = this.props
if (rawRemotes !== undefined) {
this._listAll(rawRemotes).catch(noop)
}
}
_listAll = async rawRemotes => {
const remotes = filter(rawRemotes, 'enabled')
const remotesInfo = await Promise.all(
map(remotes, async remote => ({
files: await listRemote(remote.id),
@@ -309,24 +314,15 @@ export default class Restore extends Component {
forEach(backupInfoByVm, (backups, vm) => {
backupInfoByVm[vm] = {
backups,
count: countBy(backups, 'type'),
last: reduce(backups, (last, b) => (b.date > last.date ? b : last)),
tagsByRemote: mapValues(
groupBy(backups, 'remoteId'),
(backups, remoteId) => ({
remoteName: find(remotes, remote => remote.id === remoteId).name,
remoteName: backups[0].remoteName,
tags: uniq(map(backups, 'tag')),
})
),
simpleCount: reduce(
backups,
(sum, b) => (b.type === 'simple' ? ++sum : sum),
0
),
deltaCount: reduce(
backups,
(sum, b) => (b.type === 'delta' ? ++sum : sum),
0
),
}
})
this.setState({ backupInfoByVm })

View File

@@ -1,4 +1,5 @@
import _, { messages } from 'intl'
import ActionButton from 'action-button'
import ButtonGroup from 'button-group'
import ChartistGraph from 'react-chartist'
import Component from 'base-component'
@@ -9,23 +10,27 @@ import PropTypes from 'prop-types'
import React from 'react'
import ResourceSetQuotas from 'resource-set-quotas'
import Upgrade from 'xoa-upgrade'
import { addSubscriptions, connectStore, formatSize } from 'utils'
import { Card, CardBlock, CardHeader } from 'card'
import { Container, Row, Col } from 'grid'
import { forEach, isEmpty, map, size } from 'lodash'
import { compact, filter, forEach, includes, isEmpty, map, size } from 'lodash'
import { injectIntl } from 'react-intl'
import { SelectHost, SelectPool } from 'select-objects'
import {
createCollectionWrapper,
createCounter,
createGetObjectsOfType,
createFilter,
createGetHostMetrics,
createGetObjectsOfType,
createSelector,
createTop,
isAdmin,
} from 'selectors'
import { addSubscriptions, connectStore, formatSize } from 'utils'
import {
isSrWritable,
sendUsageReport,
subscribePermissions,
subscribePlugins,
subscribeResourceSets,
subscribeUsers,
} from 'xo'
@@ -65,29 +70,83 @@ class PatchesCard extends Component {
}
}
@connectStore(() => {
const getHosts = createGetObjectsOfType('host')
const getVms = createGetObjectsOfType('VM')
const getHostMetrics = createGetHostMetrics(getHosts)
const writableSrs = createGetObjectsOfType('SR').filter([isSrWritable])
const getSrMetrics = createCollectionWrapper(
createSelector(writableSrs, writableSrs => {
const metrics = {
srTotal: 0,
srUsage: 0,
}
forEach(writableSrs, sr => {
metrics.srUsage += sr.physical_usage
metrics.srTotal += sr.size
})
return metrics
})
@connectStore({
hosts: createGetObjectsOfType('host'),
pools: createGetObjectsOfType('pool'),
srs: createGetObjectsOfType('SR').filter([isSrWritable]),
vms: createGetObjectsOfType('VM'),
alarmMessages: createGetObjectsOfType('message').filter([
message => message.name === 'ALARM',
]),
tasks: createGetObjectsOfType('task').filter([
task => task.status === 'pending',
]),
})
@addSubscriptions({
plugins: subscribePlugins,
users: subscribeUsers,
})
@injectIntl
class DefaultCard extends Component {
_getPoolWisePredicate = createSelector(
() => map(this.state.pools, 'id'),
poolsIds => item => isEmpty(poolsIds) || includes(poolsIds, item.$pool)
)
const getVmMetrics = createCollectionWrapper(
createSelector(getVms, vms => {
_getPredicate = createSelector(
this._getPoolWisePredicate,
() => map(this.state.hosts, 'id'),
(poolWisePredicate, hostsIds) => item =>
isEmpty(hostsIds)
? poolWisePredicate(item)
: includes(hostsIds, item.$container || item.$host)
)
_onPoolsChange = pools => {
const { hosts } = this.state
const poolIds = map(pools, 'id')
this.setState({
pools,
hosts: isEmpty(pools)
? hosts
: filter(hosts, host => includes(poolIds, host.$pool)),
})
}
_getHosts = createSelector(
createFilter(() => this.props.hosts, this._getPoolWisePredicate),
() => this.state.hosts,
(hosts, selectedHosts) => (isEmpty(selectedHosts) ? hosts : selectedHosts)
)
_getVms = createFilter(() => this.props.vms, this._getPredicate)
_getSrs = createFilter(() => this.props.srs, this._getPredicate)
_getPoolsNumber = createCounter(
createSelector(
() => this.props.pools,
() => this.state.pools,
(pools, selectedPools) => (isEmpty(selectedPools) ? pools : selectedPools)
)
)
_getHostsNumber = createCounter(this._getHosts)
_getVmsNumber = createCounter(this._getVms)
_getAlarmMessagesNumber = createCounter(
createFilter(() => this.props.alarmMessages, this._getPoolWisePredicate)
)
_getTasksNumber = createCounter(
createFilter(() => this.props.tasks, this._getPredicate)
)
_getHostMetrics = createGetHostMetrics(this._getHosts)
_getVmMetrics = createCollectionWrapper(
createSelector(this._getVms, vms => {
const metrics = {
vcpus: 0,
running: 0,
@@ -105,60 +164,90 @@ class PatchesCard extends Component {
return metrics
})
)
const getNumberOfAlarmMessages = createCounter(
createGetObjectsOfType('message'),
[message => message.name === 'ALARM']
)
const getNumberOfHosts = createCounter(getHosts)
const getNumberOfPools = createCounter(createGetObjectsOfType('pool'))
const getNumberOfTasks = createCounter(
createGetObjectsOfType('task').filter([task => task.status === 'pending'])
)
const getNumberOfVms = createCounter(getVms)
return {
hostMetrics: getHostMetrics,
hosts: getHosts,
nAlarmMessages: getNumberOfAlarmMessages,
nHosts: getNumberOfHosts,
nPools: getNumberOfPools,
nTasks: getNumberOfTasks,
nVms: getNumberOfVms,
srMetrics: getSrMetrics,
topWritableSrs: createTop(
writableSrs,
[sr => sr.physical_usage / sr.size],
5
),
vmMetrics: getVmMetrics,
}
})
@injectIntl
class DefaultCard extends Component {
componentWillMount () {
this.componentWillUnmount = subscribeUsers(users => {
this.setState({ users })
_getSrMetrics = createCollectionWrapper(
createSelector(this._getSrs, srs => {
const metrics = {
srTotal: 0,
srUsage: 0,
}
forEach(srs, sr => {
metrics.srUsage += sr.physical_usage
metrics.srTotal += sr.size
})
return metrics
})
)
_getTopSrs = createTop(this._getSrs, [sr => sr.physical_usage / sr.size], 5)
_onHostsChange = hosts => {
this.setState({
hosts: compact(hosts),
})
}
_canSendTheReport = createSelector(
() => this.props.plugins,
(plugins = []) => {
let count = 0
for (const { id, loaded } of plugins) {
if (
(id === 'usage-report' || id === 'transport-email') &&
loaded &&
++count === 2
) {
return true
}
}
}
)
render () {
const { props, state } = this
const users = state && state.users
const users = props.users
const nUsers = size(users)
const canSendTheReport = this._canSendTheReport()
const nPools = this._getPoolsNumber()
const nHosts = this._getHostsNumber()
const nVms = this._getVmsNumber()
const nAlarmMessages = this._getAlarmMessagesNumber()
const hostMetrics = this._getHostMetrics()
const vmMetrics = this._getVmMetrics()
const srMetrics = this._getSrMetrics()
const topSrs = this._getTopSrs()
const { formatMessage } = props.intl
return (
<Container>
<Row>
<Col mediumSize={6}>
<SelectPool
multi
onChange={this._onPoolsChange}
value={state.pools}
/>
</Col>
<Col mediumSize={6}>
<SelectHost
multi
onChange={this._onHostsChange}
predicate={this._getPoolWisePredicate()}
value={state.hosts}
/>
</Col>
</Row>
<br />
<Row>
<Col mediumSize={4}>
<Card>
<CardHeader>
<Icon icon='pool' /> {_('poolPanel', { pools: props.nPools })}
<Icon icon='pool' /> {_('poolPanel', { pools: nPools })}
</CardHeader>
<CardBlock>
<p className={styles.bigCardContent}>
<Link to='/home?t=pool'>{props.nPools}</Link>
<Link to='/home?t=pool'>{nPools}</Link>
</p>
</CardBlock>
</Card>
@@ -166,11 +255,11 @@ class DefaultCard extends Component {
<Col mediumSize={4}>
<Card>
<CardHeader>
<Icon icon='host' /> {_('hostPanel', { hosts: props.nHosts })}
<Icon icon='host' /> {_('hostPanel', { hosts: nHosts })}
</CardHeader>
<CardBlock>
<p className={styles.bigCardContent}>
<Link to='/home?t=host'>{props.nHosts}</Link>
<Link to='/home?t=host'>{nHosts}</Link>
</p>
</CardBlock>
</Card>
@@ -178,11 +267,11 @@ class DefaultCard extends Component {
<Col mediumSize={4}>
<Card>
<CardHeader>
<Icon icon='vm' /> {_('vmPanel', { vms: props.nVms })}
<Icon icon='vm' /> {_('vmPanel', { vms: nVms })}
</CardHeader>
<CardBlock>
<p className={styles.bigCardContent}>
<Link to='/home?s=&t=VM'>{props.nVms}</Link>
<Link to='/home?s=&t=VM'>{nVms}</Link>
</p>
</CardBlock>
</Card>
@@ -202,9 +291,8 @@ class DefaultCard extends Component {
formatMessage(messages.totalMemory),
],
series: [
props.hostMetrics.memoryUsage,
props.hostMetrics.memoryTotal -
props.hostMetrics.memoryUsage,
hostMetrics.memoryUsage,
hostMetrics.memoryTotal - hostMetrics.memoryUsage,
],
}}
options={PIE_GRAPH_OPTIONS}
@@ -212,8 +300,8 @@ class DefaultCard extends Component {
/>
<p className='text-xs-center'>
{_('ofUsage', {
total: formatSize(props.hostMetrics.memoryTotal),
usage: formatSize(props.hostMetrics.memoryUsage),
total: formatSize(hostMetrics.memoryTotal),
usage: formatSize(hostMetrics.memoryUsage),
})}
</p>
</CardBlock>
@@ -232,7 +320,7 @@ class DefaultCard extends Component {
formatMessage(messages.usedVCpus),
formatMessage(messages.totalCpus),
],
series: [props.vmMetrics.vcpus, props.hostMetrics.cpus],
series: [vmMetrics.vcpus, hostMetrics.cpus],
}}
options={{
showLabel: false,
@@ -243,8 +331,8 @@ class DefaultCard extends Component {
/>
<p className='text-xs-center'>
{_('ofCpusUsage', {
nCpus: props.hostMetrics.cpus,
nVcpus: props.vmMetrics.vcpus,
nCpus: hostMetrics.cpus,
nVcpus: vmMetrics.vcpus,
})}
</p>
</div>
@@ -266,8 +354,8 @@ class DefaultCard extends Component {
formatMessage(messages.totalSpace),
],
series: [
props.srMetrics.srUsage,
props.srMetrics.srTotal - props.srMetrics.srUsage,
srMetrics.srUsage,
srMetrics.srTotal - srMetrics.srUsage,
],
}}
options={PIE_GRAPH_OPTIONS}
@@ -275,8 +363,8 @@ class DefaultCard extends Component {
/>
<p className='text-xs-center'>
{_('ofUsage', {
total: formatSize(props.srMetrics.srTotal),
usage: formatSize(props.srMetrics.srUsage),
total: formatSize(srMetrics.srTotal),
usage: formatSize(srMetrics.srUsage),
})}
</p>
</BlockLink>
@@ -295,9 +383,9 @@ class DefaultCard extends Component {
<p className={styles.bigCardContent}>
<Link
to='/dashboard/health'
className={props.nAlarmMessages > 0 ? 'text-warning' : ''}
className={nAlarmMessages > 0 ? 'text-warning' : ''}
>
{props.nAlarmMessages}
{nAlarmMessages}
</Link>
</p>
</CardBlock>
@@ -310,7 +398,7 @@ class DefaultCard extends Component {
</CardHeader>
<CardBlock>
<p className={styles.bigCardContent}>
<Link to='/tasks'>{props.nTasks}</Link>
<Link to='/tasks'>{this._getTasksNumber()}</Link>
</p>
</CardBlock>
</Card>
@@ -348,9 +436,9 @@ class DefaultCard extends Component {
formatMessage(messages.vmStateOther),
],
series: [
props.vmMetrics.running,
props.vmMetrics.halted,
props.vmMetrics.other,
vmMetrics.running,
vmMetrics.halted,
vmMetrics.other,
],
}}
options={{ showLabel: false }}
@@ -358,8 +446,8 @@ class DefaultCard extends Component {
/>
<p className='text-xs-center'>
{_('vmsStates', {
running: props.vmMetrics.running,
halted: props.vmMetrics.halted,
running: vmMetrics.running,
halted: vmMetrics.halted,
})}
</p>
</BlockLink>
@@ -376,9 +464,9 @@ class DefaultCard extends Component {
<ChartistGraph
style={{ strokeWidth: '30px' }}
data={{
labels: map(props.topWritableSrs, 'name_label'),
labels: map(topSrs, 'name_label'),
series: map(
props.topWritableSrs,
topSrs,
sr => sr.physical_usage / sr.size * 100
),
}}
@@ -397,7 +485,36 @@ class DefaultCard extends Component {
</Row>
<Row>
<Col>
<PatchesCard hosts={props.hosts} />
<Card>
<CardHeader>
<Icon icon='menu-dashboard-stats' /> {_('dashboardReport')}
</CardHeader>
<CardBlock className='text-xs-center'>
<ActionButton
btnStyle='primary'
disabled={!canSendTheReport}
handler={sendUsageReport}
icon=''
>
{_('dashboardSendReport')}
</ActionButton>
<br />
{!canSendTheReport && (
<span>
<Link to='/settings/plugins' className='text-info'>
<Icon icon='info' /> {_('dashboardSendReportInfo')}
</Link>
<br />
</span>
)}
{_('dashboardSendReportMessage')}
</CardBlock>
</Card>
</Col>
</Row>
<Row>
<Col>
<PatchesCard hosts={this._getHosts()} />
</Col>
</Row>
</Container>

View File

@@ -4,6 +4,7 @@ import _ from 'intl'
import ActionButton from 'action-button'
import Button from 'button'
import CenterPanel from 'center-panel'
import classNames from 'classnames'
import Component from 'base-component'
import defined, { get } from 'xo-defined'
import Icon from 'icon'
@@ -14,7 +15,6 @@ import Pagination from 'pagination'
import propTypes from 'prop-types-decorator'
import React from 'react'
import Shortcuts from 'shortcuts'
import SingleLineRow from 'single-line-row'
import Tooltip from 'tooltip'
import { Card, CardHeader, CardBlock } from 'card'
import {
@@ -124,7 +124,11 @@ const OPTIONS = {
sortOrder: 'desc',
},
{ labelId: 'homeSortByRAM', sortBy: 'memory.size', sortOrder: 'desc' },
{ labelId: 'homeSortByCpus', sortBy: 'CPUs.cpu_count', sortOrder: 'desc' },
{
labelId: 'homeSortByCpus',
sortBy: 'CPUs.cpu_count',
sortOrder: 'desc',
},
],
},
VM: {
@@ -199,7 +203,11 @@ const OPTIONS = {
defaultFilter: '',
filters: homeFilters.vmTemplate,
mainActions: [
{ handler: deleteTemplates, icon: 'delete', tooltip: _('templateDelete') },
{
handler: deleteTemplates,
icon: 'delete',
tooltip: _('templateDelete'),
},
],
Item: TemplateItem,
showPoolsSelector: true,
@@ -485,14 +493,36 @@ export default class Home extends Component {
}
_getDefaultSort (props = this.props) {
const sortOption = find(OPTIONS[props.type].sortOptions, 'default')
const { sortOptions } = OPTIONS[props.type]
const defaultSort = find(sortOptions, 'default')
const urlSort = find(sortOptions, { sortBy: props.location.query.sortBy })
return {
sortBy: defined(() => sortOption.sortBy, 'name_label'),
sortOrder: defined(() => sortOption.sortOrder, 'asc'),
sortBy: defined(
() => urlSort.sortBy,
() => defaultSort.sortBy,
'name_label'
),
sortOrder: defined(
() => urlSort.sortOrder,
() => defaultSort.sortOrder,
'asc'
),
}
}
_setSort (event) {
const { sortBy, sortOrder } = event.currentTarget.dataset
const { pathname, query } = this.props.location
this.setState({ sortBy, sortOrder })
this.context.router.replace({
pathname,
query: { ...query, sortBy },
})
}
_setSort = this._setSort.bind(this)
_initFilterAndSortBy (props) {
const filter = this._getFilter(props)
@@ -752,9 +782,28 @@ export default class Home extends Component {
// Header --------------------------------------------------------------------
_renderHeader () {
const { isAdmin, noResourceSets, type } = this.props
const { filters } = OPTIONS[type]
const customFilters = this._getCustomFilters()
const filteredItems = this._getFilteredItems()
const nItems = this._getNumberOfItems()
const { isAdmin, items, noResourceSets, type } = this.props
const {
selectedHosts,
selectedPools,
selectedResourceSets,
selectedTags,
sortBy,
} = this.state
const options = OPTIONS[type]
const {
filters,
mainActions,
otherActions,
showHostsSelector,
showPoolsSelector,
showResourceSetsSelector,
} = options
return (
<Container>
@@ -832,6 +881,190 @@ export default class Home extends Component {
</Col>
)}
</Row>
<Row className={classNames(styles.itemRowHeader, 'mt-1')}>
<Col smallSize={11} mediumSize={3}>
<input
checked={this._getIsAllSelected()}
onChange={this._toggleMaster}
ref='masterCheckbox'
type='checkbox'
/>{' '}
<span className='text-muted'>
{this._getNumberOfSelectedItems()
? _('homeSelectedItems', {
icon: <Icon icon={type.toLowerCase()} />,
selected: this._getNumberOfSelectedItems(),
total: nItems,
})
: _('homeDisplayedItems', {
displayed: filteredItems.length,
icon: <Icon icon={type.toLowerCase()} />,
total: nItems,
})}
</span>
</Col>
<Col mediumSize={8} className='text-xs-right hidden-sm-down'>
{this._getNumberOfSelectedItems() ? (
<div>
{mainActions && (
<div className='btn-group'>
{map(mainActions, (action, key) => (
<Tooltip content={action.tooltip} key={key}>
<ActionButton
{...action}
handlerParam={this._getSelectedItemsIds()}
/>
</Tooltip>
))}
</div>
)}
{otherActions && (
<DropdownButton
bsStyle='secondary'
id='advanced'
title={_('homeMore')}
>
{map(otherActions, (action, key) => (
<MenuItem
key={key}
onClick={() => {
action.handler(
this._getSelectedItemsIds(),
action.params
)
}}
>
<Icon icon={action.icon} fixedWidth />{' '}
{_(action.labelId)}
</MenuItem>
))}
</DropdownButton>
)}
</div>
) : (
<div>
{showPoolsSelector && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover className={styles.selectObject} id='poolPopover'>
<SelectPool
autoFocus
multi
onChange={this._updateSelectedPools}
value={selectedPools}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='pool' /> {_('homeAllPools')}
</Button>
</OverlayTrigger>
)}
{showHostsSelector && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover className={styles.selectObject} id='HostPopover'>
<SelectHost
autoFocus
multi
onChange={this._updateSelectedHosts}
value={selectedHosts}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='host' /> {_('homeAllHosts')}
</Button>
</OverlayTrigger>
)}
<OverlayTrigger
autoFocus
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover className={styles.selectObject} id='tagPopover'>
<SelectTag
autoFocus
multi
objects={items}
onChange={this._updateSelectedTags}
value={selectedTags}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='tags' /> {_('homeAllTags')}
</Button>
</OverlayTrigger>
{showResourceSetsSelector &&
isAdmin &&
!noResourceSets && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover
className={styles.selectObject}
id='resourceSetPopover'
>
<SelectResourceSet
autoFocus
multi
onChange={this._updateSelectedResourceSets}
value={selectedResourceSets}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='resource-set' /> {_('homeAllResourceSets')}
</Button>
</OverlayTrigger>
)}
<DropdownButton
bsStyle='link'
id='sort'
title={_('homeSortBy')}
>
{map(
options.sortOptions,
({ labelId, sortBy: _sortBy, sortOrder }, key) => (
<MenuItem
key={key}
data-sort-by={_sortBy}
data-sort-order={sortOrder}
onClick={this._setSort}
>
{this._tick(_sortBy === sortBy)}
{_sortBy === sortBy ? (
<strong>{_(labelId)}</strong>
) : (
_(labelId)
)}
</MenuItem>
)
)}
</DropdownButton>
</div>
)}
</Col>
<Col smallSize={1} mediumSize={1} className='text-xs-right'>
<Button onClick={this._expandAll}>
<Icon icon='nav' />
</Button>
</Col>
</Row>
</Container>
)
}
@@ -849,29 +1082,8 @@ export default class Home extends Component {
const filteredItems = this._getFilteredItems()
const visibleItems = this._getVisibleItems()
const {
activePage,
expandAll,
highlighted,
selectedHosts,
selectedItems,
selectedPools,
selectedResourceSets,
selectedTags,
sortBy,
} = this.state
const { items, type } = this.props
const options = OPTIONS[type]
const {
Item,
mainActions,
otherActions,
showHostsSelector,
showPoolsSelector,
showResourceSetsSelector,
} = options
const { Item } = OPTIONS[this.props.type]
const { activePage, expandAll, highlighted, selectedItems } = this.state
// Necessary because indeterminate cannot be used as an attribute
if (this.refs.masterCheckbox) {
@@ -882,207 +1094,13 @@ export default class Home extends Component {
return (
<Page header={this._renderHeader()}>
<Shortcuts
name='Home'
handler={this._getShortcutsHandler()}
isolate
name='Home'
targetNodeSelector='body'
stopPropagation={false}
/>
<div>
<div className={styles.itemContainer}>
<SingleLineRow className={styles.itemContainerHeader}>
<Col smallsize={11} mediumSize={3}>
<input
checked={this._getIsAllSelected()}
onChange={this._toggleMaster}
ref='masterCheckbox'
type='checkbox'
/>{' '}
<span className='text-muted'>
{this._getNumberOfSelectedItems()
? _('homeSelectedItems', {
icon: <Icon icon={type.toLowerCase()} />,
selected: this._getNumberOfSelectedItems(),
total: nItems,
})
: _('homeDisplayedItems', {
displayed: filteredItems.length,
icon: <Icon icon={type.toLowerCase()} />,
total: nItems,
})}
</span>
</Col>
<Col mediumSize={8} className='text-xs-right hidden-sm-down'>
{this._getNumberOfSelectedItems() ? (
<div>
{mainActions && (
<div className='btn-group'>
{map(mainActions, (action, key) => (
<Tooltip content={action.tooltip} key={key}>
<ActionButton
{...action}
handlerParam={this._getSelectedItemsIds()}
/>
</Tooltip>
))}
</div>
)}
{otherActions && (
<DropdownButton
bsStyle='secondary'
id='advanced'
title={_('homeMore')}
>
{map(otherActions, (action, key) => (
<MenuItem
key={key}
onClick={() => {
action.handler(
this._getSelectedItemsIds(),
action.params
)
}}
>
<Icon icon={action.icon} fixedWidth />{' '}
{_(action.labelId)}
</MenuItem>
))}
</DropdownButton>
)}
</div>
) : (
<div>
{showPoolsSelector && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover
className={styles.selectObject}
id='poolPopover'
>
<SelectPool
autoFocus
multi
onChange={this._updateSelectedPools}
value={selectedPools}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='pool' /> {_('homeAllPools')}
</Button>
</OverlayTrigger>
)}
{showHostsSelector && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover
className={styles.selectObject}
id='HostPopover'
>
<SelectHost
autoFocus
multi
onChange={this._updateSelectedHosts}
value={selectedHosts}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='host' /> {_('homeAllHosts')}
</Button>
</OverlayTrigger>
)}
<OverlayTrigger
autoFocus
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover
className={styles.selectObject}
id='tagPopover'
>
<SelectTag
autoFocus
multi
objects={items}
onChange={this._updateSelectedTags}
value={selectedTags}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='tags' /> {_('homeAllTags')}
</Button>
</OverlayTrigger>
{showResourceSetsSelector &&
isAdmin &&
!noResourceSets && (
<OverlayTrigger
trigger='click'
rootClose
placement='bottom'
overlay={
<Popover
className={styles.selectObject}
id='resourceSetPopover'
>
<SelectResourceSet
autoFocus
multi
onChange={this._updateSelectedResourceSets}
value={selectedResourceSets}
/>
</Popover>
}
>
<Button btnStyle='link'>
<Icon icon='resource-set' />{' '}
{_('homeAllResourceSets')}
</Button>
</OverlayTrigger>
)}
<DropdownButton
bsStyle='link'
id='sort'
title={_('homeSortBy')}
>
{map(
options.sortOptions,
({ labelId, sortBy: _sortBy, sortOrder }, key) => (
<MenuItem
key={key}
onClick={() =>
this.setState({ sortBy: _sortBy, sortOrder })
}
>
{this._tick(_sortBy === sortBy)}
{_sortBy === sortBy ? (
<strong>{_(labelId)}</strong>
) : (
_(labelId)
)}
</MenuItem>
)
)}
</DropdownButton>
</div>
)}
</Col>
<Col smallsize={1} mediumSize={1} className='text-xs-right'>
<Button onClick={this._expandAll}>
<Icon icon='nav' />
</Button>
</Col>
</SingleLineRow>
{isEmpty(filteredItems) ? (
<p className='text-xs-center mt-1'>
<a className='btn btn-link' onClick={this._clearFilter}>
@@ -1115,7 +1133,7 @@ export default class Home extends Component {
<Pagination
onChange={this._onPageSelection}
pages={ceil(filteredItems.length / ITEMS_PER_PAGE)}
value={activePage}
value={activePage || 1}
/>
</div>
</div>

View File

@@ -1,14 +1,9 @@
import _ from 'intl'
import ActionRowButton from 'action-row-button'
import filter from 'lodash/filter'
import find from 'lodash/find'
import forEach from 'lodash/forEach'
import Icon from 'icon'
import Link from 'link'
import LogList from '../../logs'
import map from 'lodash/map'
import orderBy from 'lodash/orderBy'
import React, { Component } from 'react'
import SortedTable from 'sorted-table'
import StateButton from 'state-button'
import Tooltip from 'tooltip'
import Upgrade from 'xoa-upgrade'
@@ -16,8 +11,10 @@ import { addSubscriptions } from 'utils'
import { Container } from 'grid'
import { createSelector } from 'selectors'
import { Card, CardHeader, CardBlock } from 'card'
import { filter, find, forEach, orderBy } from 'lodash'
import {
deleteSchedule,
deleteSchedules,
disableSchedule,
enableSchedule,
runJob,
@@ -32,12 +29,88 @@ const jobKeyToLabel = {
genericTask: _('customJob'),
}
const SCHEDULES_COLUMNS = [
{
itemRenderer: schedule => (
<span>{`${schedule.name} (${schedule.id.slice(4, 8)})`}</span>
),
name: _('schedule'),
sortCriteria: 'name',
},
{
itemRenderer: (schedule, { jobs, isScheduleUserMissing }) => {
const jobId = schedule.jobId
const job = jobs[jobId]
return (
job !== undefined && (
<div>
<span>{`${job.name} - ${job.method} (${jobId.slice(4, 8)})`}</span>{' '}
{isScheduleUserMissing[schedule.id] && (
<Tooltip content={_('jobUserNotFound')}>
<Icon className='mr-1' icon='error' />
</Tooltip>
)}
<Link
className='btn btn-sm btn-primary ml-1'
to={`/jobs/${job.id}/edit`}
>
<Tooltip content={_('jobEdit')}>
<Icon icon='edit' />
</Tooltip>
</Link>
</div>
)
)
},
name: _('job'),
sortCriteria: (schedule, { jobs }) => {
const job = jobs[schedule.jobId]
return job !== undefined && job.name
},
},
{
itemRenderer: schedule => schedule.cron,
name: _('jobScheduling'),
},
{
itemRenderer: schedule => (
<StateButton
disabledLabel={_('jobStateDisabled')}
disabledHandler={enableSchedule}
disabledTooltip={_('logIndicationToEnable')}
enabledLabel={_('jobStateEnabled')}
enabledHandler={disableSchedule}
enabledTooltip={_('logIndicationToDisable')}
handlerParam={schedule.id}
state={schedule.enabled}
/>
),
name: _('jobState'),
},
]
const ACTIONS = [
{
handler: deleteSchedules,
icon: 'delete',
individualHandler: deleteSchedule,
individualLabel: _('scheduleDelete'),
label: _('deleteSelectedSchedules'),
level: 'danger',
},
]
// ===================================================================
@addSubscriptions({
users: subscribeUsers,
})
export default class Overview extends Component {
static contextTypes = {
router: React.PropTypes.object,
}
constructor (props) {
super(props)
this.state = {
@@ -82,31 +155,6 @@ export default class Overview extends Component {
return jobs[schedule.jobId]
}
_getJobLabel (job = {}) {
return `${job.name} - ${job.method} (${job.id.slice(4, 8)})`
}
_getScheduleLabel (schedule) {
return `${schedule.name} (${schedule.id.slice(4, 8)})`
}
_getScheduleToggle (schedule) {
const { id } = schedule
return (
<StateButton
disabledLabel={_('jobStateDisabled')}
disabledHandler={enableSchedule}
disabledTooltip={_('logIndicationToEnable')}
enabledLabel={_('jobStateEnabled')}
enabledHandler={disableSchedule}
enabledTooltip={_('logIndicationToDisable')}
handlerParam={id}
state={schedule.enabled}
/>
)
}
_getIsScheduleUserMissing = createSelector(
() => this.state.schedules,
() => this.props.users,
@@ -114,7 +162,7 @@ export default class Overview extends Component {
const isScheduleUserMissing = {}
forEach(schedules, schedule => {
isScheduleUserMissing[schedule.id] = !!find(
isScheduleUserMissing[schedule.id] = !find(
users,
user => user.id === this._getScheduleJob(schedule).userId
)
@@ -124,11 +172,29 @@ export default class Overview extends Component {
}
)
_individualActions = [
{
disabled: (schedule, { isScheduleUserMissing }) =>
isScheduleUserMissing[schedule.id],
handler: schedule => runJob(schedule.jobId),
icon: 'run-schedule',
label: _('scheduleRun'),
level: 'warning',
},
{
handler: schedule =>
this.context.router.push({
pathname: `/jobs/schedules/${schedule.id}/edit`,
}),
icon: 'edit',
label: _('scheduleEdit'),
level: 'primary',
},
]
render () {
const { schedules } = this.state
const isScheduleUserMissing = this._getIsScheduleUserMissing()
return process.env.XOA_PLAN > 3 ? (
<Container>
<Card>
@@ -136,73 +202,16 @@ export default class Overview extends Component {
<Icon icon='schedule' /> {_('backupSchedules')}
</CardHeader>
<CardBlock>
{schedules.length ? (
<table className='table'>
<thead className='thead-default'>
<tr>
<th>{_('schedule')}</th>
<th>{_('job')}</th>
<th className='hidden-xs-down'>{_('jobScheduling')}</th>
<th>{_('jobState')}</th>
<th className='text-xs-right'>{_('jobAction')}</th>
</tr>
</thead>
<tbody>
{map(schedules, (schedule, key) => {
const job = this._getScheduleJob(schedule)
return (
<tr key={key}>
<td>
{this._getScheduleLabel(schedule)}
<Link
className='btn btn-sm btn-primary ml-1'
to={`/jobs/schedules/${schedule.id}/edit`}
>
<Icon icon='edit' />
</Link>
</td>
<td>
{this._getJobLabel(job)}
<Link
className='btn btn-sm btn-primary ml-1'
to={`/jobs/${job.id}/edit`}
>
<Icon icon='edit' />
</Link>
</td>
<td className='hidden-xs-down'>{schedule.cron}</td>
<td>{this._getScheduleToggle(schedule)}</td>
<td className='text-xs-right'>
<fieldset>
{!isScheduleUserMissing[schedule.id] && (
<Tooltip content={_('jobUserNotFound')}>
<Icon className='mr-1' icon='error' />
</Tooltip>
)}
<ActionRowButton
icon='delete'
btnStyle='danger'
handler={deleteSchedule}
handlerParam={schedule}
/>
<ActionRowButton
disabled={!isScheduleUserMissing[schedule.id]}
icon='run-schedule'
btnStyle='warning'
handler={runJob}
handlerParam={schedule.jobId}
/>
</fieldset>
</td>
</tr>
)
})}
</tbody>
</table>
) : (
<p>{_('noScheduledJobs')}</p>
)}
<SortedTable
actions={ACTIONS}
collection={schedules}
columns={SCHEDULES_COLUMNS}
data-isScheduleUserMissing={this._getIsScheduleUserMissing()}
data-jobs={this.state.jobs || {}}
individualActions={this._individualActions}
shortcutsTarget='body'
stateUrlParam='s'
/>
</CardBlock>
</Card>
<LogList jobKeys={Object.keys(jobKeyToLabel)} />

View File

@@ -41,6 +41,7 @@ import {
createVms,
getCloudInitConfig,
subscribeCurrentUser,
subscribeIpPools,
subscribePermissions,
subscribeResourceSets,
XEN_DEFAULT_CPU_CAP,
@@ -115,12 +116,21 @@ const Item = ({ label, children, className }) => (
</span>
)
@addSubscriptions({
// eslint-disable-next-line standard/no-callback-literal
ipPoolsConfigured: cb => subscribeIpPools(ipPools => cb(ipPools.length > 0)),
})
@injectIntl
class Vif extends BaseComponent {
_getIpPoolPredicate = createSelector(
() => this.props.vif,
vif => ipPool => includes(ipPool.networks, vif.network)
)
render () {
const {
intl: { formatMessage },
ipPoolPredicate,
ipPoolsConfigured,
networkPredicate,
onChangeAddresses,
onChangeMac,
@@ -159,26 +169,28 @@ class Vif extends BaseComponent {
)}
</span>
</Item>
<LineItem>
<span className={styles.inlineSelect}>
{pool ? (
<SelectIp
containerPredicate={ipPoolPredicate}
multi
onChange={onChangeAddresses}
value={vif.addresses}
/>
) : (
<SelectResourceSetIp
containerPredicate={ipPoolPredicate}
multi
onChange={onChangeAddresses}
resourceSetId={resourceSet.id}
value={vif.addresses}
/>
)}
</span>
</LineItem>
{ipPoolsConfigured && (
<LineItem>
<span className={styles.inlineSelect}>
{pool ? (
<SelectIp
containerPredicate={this._getIpPoolPredicate()}
multi
onChange={onChangeAddresses}
value={vif.addresses}
/>
) : (
<SelectResourceSetIp
containerPredicate={this._getIpPoolPredicate()}
multi
onChange={onChangeAddresses}
resourceSetId={resourceSet.id}
value={vif.addresses}
/>
)}
</span>
</LineItem>
)}
<Item>
<Button onClick={onDelete}>
<Icon icon='new-vm-remove' />
@@ -531,25 +543,6 @@ export default class NewVm extends BaseComponent {
poolId => sr =>
(poolId == null || poolId === sr.$pool) && sr.SR_type === 'iso'
)
_getIpPoolPredicate = createSelector(
() => !!this.props.pool,
() => {
const { resourceSet } = this.props
return resourceSet && resourceSet.ipPools
},
() => this.props.vif,
(pool, ipPools, vif) => ipPool => {
if (!ipPool) {
return false
}
return (
pool ||
(ipPools &&
includes(ipPools, ipPool.id) &&
find(ipPool.networks, ipPoolNetwork => ipPoolNetwork === vif.network))
)
}
)
_getNetworkPredicate = createSelector(
this._getIsInPool,
this._getIsInResourceSet,

View File

@@ -1,6 +1,5 @@
import _, { messages } from 'intl'
import ActionButton from 'action-button'
import ActionRowButton from 'action-row-button'
import Component from 'base-component'
import HTML5Backend from 'react-dnd-html5-backend'
import Icon from 'icon'
@@ -10,8 +9,8 @@ import propTypes from 'prop-types-decorator'
import React from 'react'
import SingleLineRow from 'single-line-row'
import StateButton from 'state-button'
import SortedTable from 'sorted-table'
import TabButton from 'tab-button'
import Tooltip from 'tooltip'
import { Container, Row, Col } from 'grid'
import {
createSelector,
@@ -33,13 +32,15 @@ import { SizeInput, Toggle } from 'form'
import { XoSelect, Size, Text } from 'editable'
import { confirm } from 'modal'
import { error } from 'notification'
import { forEach, get, isEmpty, map, some } from 'lodash'
import { filter, find, forEach, get, map, mapValues, some } from 'lodash'
import {
attachDiskToVm,
createDisk,
connectVbd,
deleteVbd,
deleteVbds,
deleteVdi,
deleteVdis,
disconnectVbd,
editVdi,
isSrWritable,
@@ -50,6 +51,132 @@ import {
subscribeResourceSets,
} from 'xo'
const COLUMNS_VM_PV = [
{
itemRenderer: vdi => (
<Text
value={vdi.name_label}
onChange={value => editVdi(vdi, { name_label: value })}
/>
),
name: _('vdiNameLabel'),
sortCriteria: 'name_label',
default: true,
},
{
itemRenderer: vdi => (
<Text
value={vdi.name_description}
onChange={value => editVdi(vdi, { name_description: value })}
/>
),
name: _('vdiNameDescription'),
sortCriteria: 'name_description',
},
{
itemRenderer: vdi => (
<Size
value={vdi.size || null}
onChange={size => editVdi(vdi, { size })}
/>
),
name: _('vdiSize'),
sortCriteria: 'size',
},
{
itemRenderer: (vdi, userData) => {
const sr = userData.srs[vdi.$SR]
return (
sr !== undefined && (
<XoSelect
labelProp='name_label'
onChange={sr => migrateVdi(vdi, sr)}
predicate={sr => sr.$pool === userData.vm.$pool && isSrWritable(sr)}
useLongClick
value={sr}
xoType='SR'
>
<Link to={`/srs/${sr.id}`}>{sr.name_label}</Link>
</XoSelect>
)
)
},
name: _('vdiSr'),
sortCriteria: (vdi, userData) => {
const sr = userData.srs[vdi.$SR]
return sr !== undefined && sr.name_label
},
},
{
itemRenderer: (vdi, userData) => {
const vbd = userData.vbdsByVdi[vdi.id]
return (
<Toggle
onChange={bootable => setBootableVbd(vbd, bootable)}
value={vbd.bootable}
/>
)
},
name: _('vbdBootableStatus'),
id: 'vbdBootableStatus',
},
{
itemRenderer: (vdi, userData) => {
const vbd = userData.vbdsByVdi[vdi.id]
return (
<StateButton
disabledLabel={_('vbdStatusDisconnected')}
disabledHandler={connectVbd}
disabledTooltip={_('vbdConnect')}
enabledLabel={_('vbdStatusConnected')}
enabledHandler={disconnectVbd}
enabledTooltip={_('vbdDisconnect')}
disabled={!(vbd.attached || isVmRunning(userData.vm))}
handlerParam={vbd}
state={vbd.attached}
/>
)
},
name: _('vbdStatus'),
},
]
const COLUMNS = filter(COLUMNS_VM_PV, col => col.id !== 'vbdBootableStatus')
const ACTIONS = [
{
disabled: (selectedItems, userData) =>
some(map(selectedItems, vdi => userData.vbdsByVdi[vdi.id]), 'attached'),
handler: (selectedItems, userData) =>
deleteVbds(map(selectedItems, vdi => userData.vbdsByVdi[vdi.id])),
individualDisabled: (vdi, userData) => {
const vbd = userData.vbdsByVdi[vdi.id]
return vbd !== undefined && vbd.attached
},
individualHandler: (vdi, userData) => {
const vbd = userData.vbdsByVdi[vdi.id]
return vbd !== undefined && deleteVbd(vbd)
},
icon: 'vdi-forget',
label: _('vdiForget'),
level: 'danger',
},
{
disabled: (selectedItems, userData) =>
some(map(selectedItems, vdi => userData.vbdsByVdi[vdi.id]), 'attached'),
handler: deleteVdis,
individualDisabled: (vdi, userData) => {
const vbd = userData.vbdsByVdi[vdi.id]
return vbd !== undefined && vbd.attached
},
individualHandler: deleteVdi,
individualLabel: _('vdiRemove'),
icon: 'vdi-remove',
label: _('deleteSelectedVdis'),
level: 'danger',
},
]
const parseBootOrder = bootOrder => {
// FIXME missing translation
const bootOptions = {
@@ -516,6 +643,21 @@ export default class TabDisks extends Component {
isAdmin || (resourceSet == null && isVmAdmin)
)
_getVbdsByVdi = createSelector(
() => this.props.vdis,
() => this.props.vbds,
() => this.props.vm,
(vdis, vbds, vm) => mapValues(vdis, vdi => find(vbds, { VDI: vdi.id }))
)
individualActions = [
{
handler: this._migrateVdi,
icon: 'vdi-migrate',
label: _('vdiMigrate'),
},
]
render () {
const { srs, vbds, vdis, vm } = this.props
@@ -577,129 +719,17 @@ export default class TabDisks extends Component {
</Row>
<Row>
<Col>
{!isEmpty(vbds) ? (
<table className='table'>
<thead className='thead-default'>
<tr>
<th>{_('vdiNameLabel')}</th>
<th>{_('vdiNameDescription')}</th>
<th>{_('vdiSize')}</th>
<th>{_('vdiSr')}</th>
{vm.virtualizationMode === 'pv' && (
<th>{_('vbdBootableStatus')}</th>
)}
<th>{_('vbdStatus')}</th>
<th className='text-xs-right'>{_('vbdAction')}</th>
</tr>
</thead>
<tbody>
{map(vbds, vbd => {
const vdi = vdis[vbd.VDI]
if (vbd.is_cd_drive || !vdi) {
return
}
const sr = srs[vdi.$SR]
return (
<tr key={vbd.id}>
<td>
<Text
value={vdi.name_label}
onChange={value =>
editVdi(vdi, { name_label: value })
}
/>
</td>
<td>
<Text
value={vdi.name_description}
onChange={value =>
editVdi(vdi, { name_description: value })
}
/>
</td>
<td>
<Size
value={vdi.size || null}
onChange={size => editVdi(vdi, { size })}
/>
</td>
<td>
{' '}
{sr && (
<XoSelect
onChange={sr => migrateVdi(vdi, sr)}
xoType='SR'
predicate={sr =>
sr.$pool === vm.$pool && isSrWritable(sr)
}
labelProp='name_label'
value={sr}
useLongClick
>
<Link to={`/srs/${sr.id}`}>{sr.name_label}</Link>
</XoSelect>
)}
</td>
{vm.virtualizationMode === 'pv' && (
<td>
<Toggle
value={vbd.bootable}
onChange={bootable =>
setBootableVbd(vbd, bootable)
}
/>
</td>
)}
<td>
<StateButton
disabledLabel={_('vbdStatusDisconnected')}
disabledHandler={connectVbd}
disabledTooltip={_('vbdConnect')}
enabledLabel={_('vbdStatusConnected')}
enabledHandler={disconnectVbd}
enabledTooltip={_('vbdDisconnect')}
disabled={!(vbd.attached || isVmRunning(vm))}
handlerParam={vbd}
state={vbd.attached}
/>
</td>
<td className='text-xs-right'>
<Tooltip content={_('vdiMigrate')}>
<ActionRowButton
icon='vdi-migrate'
handler={this._migrateVdi}
handlerParam={vdi}
/>
</Tooltip>
{!vbd.attached && (
<span>
<Tooltip content={_('vdiForget')}>
<ActionRowButton
icon='vdi-forget'
handler={deleteVbd}
handlerParam={vbd}
/>
</Tooltip>
<Tooltip content={_('vdiRemove')}>
<ActionRowButton
icon='vdi-remove'
handler={deleteVdi}
handlerParam={vdi}
/>
</Tooltip>
</span>
)}
</td>
</tr>
)
})}
</tbody>
</table>
) : (
<h4 className='text-xs-center'>{_('vbdNoVbd')}</h4>
)}
<SortedTable
actions={ACTIONS}
collection={vdis}
columns={vm.virtualizationMode === 'pv' ? COLUMNS_VM_PV : COLUMNS}
data-srs={srs}
data-vbdsByVdi={this._getVbdsByVdi()}
data-vm={vm}
individualActions={this.individualActions}
shortcutsTarget='body'
stateUrlParam='s'
/>
</Col>
</Row>
<Row>

View File

@@ -7,15 +7,16 @@ import Icon from 'icon'
import React from 'react'
import Tooltip from 'tooltip'
import xoaUpdater, { exposeTrial, isTrialRunning } from 'xoa-updater'
import { confirm } from 'modal'
import { connectStore } from 'utils'
import { addSubscriptions, connectStore } from 'utils'
import { assign, includes, isEmpty, map, some } from 'lodash'
import { Card, CardBlock, CardHeader } from 'card'
import { confirm } from 'modal'
import { Container, Row, Col } from 'grid'
import { createSelector } from 'selectors'
import { error } from 'notification'
import { injectIntl } from 'react-intl'
import { Password } from 'form'
import { serverVersion } from 'xo'
import { assign, includes, isEmpty, map } from 'lodash'
import { serverVersion, subscribeBackupNgJobs, subscribeJobs } from 'xo'
import pkg from '../../../../package'
@@ -50,8 +51,18 @@ const states = {
}
const update = () => xoaUpdater.update()
const upgrade = () => xoaUpdater.upgrade()
const upgrade = ({ runningJobsExist }) =>
runningJobsExist
? confirm({
title: _('upgradeWarningTitle'),
body: _('upgradeWarningMessage'),
}).then(() => xoaUpdater.upgrade())
: xoaUpdater.upgrade()
@addSubscriptions({
backupNgJobs: subscribeBackupNgJobs,
jobs: subscribeJobs,
})
@connectStore(state => {
return {
configuration: state.xoaConfiguration,
@@ -156,6 +167,15 @@ export default class XoaUpdates extends Component {
update()
}
_getRunningJobsExist = createSelector(
() => this.props.jobs,
() => this.props.backupNgJobs,
(jobs, backupNgJobs) =>
jobs !== undefined &&
backupNgJobs !== undefined &&
some(jobs.concat(backupNgJobs), job => job.runId !== undefined)
)
render () {
const textClasses = {
info: 'text-info',
@@ -209,6 +229,7 @@ export default class XoaUpdates extends Component {
</ActionButton>{' '}
<ActionButton
btnStyle='success'
data-runningJobsExist={this._getRunningJobsExist()}
handler={upgrade}
icon='upgrade'
>

View File

@@ -10,7 +10,7 @@ const formatFiles = files => {
const testFiles = files =>
run(
'./node_modules/.bin/jest',
['--findRelatedTests', '--passWithNoTests'].concat(files)
['--testRegex=^(?!.*.integ.spec.js$).*.spec.js$', '--findRelatedTests', '--passWithNoTests'].concat(files)
)
// -----------------------------------------------------------------------------

1088
yarn.lock

File diff suppressed because it is too large Load Diff