Compare commits

..

183 Commits

Author SHA1 Message Date
Julien Fontanet
07829918e4 5.1.2 2016-07-28 15:21:12 +02:00
Julien Fontanet
b0d400b6eb fix(Xapi#exportDeltaVm): better handling of removed VDIs (#370)
Fixes vatesfr/xo-web#1333
2016-07-28 15:19:44 +02:00
Julien Fontanet
706cb895ad 5.1.1 2016-07-27 16:36:51 +02:00
Julien Fontanet
45bf539b3c fix(user.delete): fix tokens deletion 2016-07-27 13:23:16 +02:00
Julien Fontanet
0923981f8d fix(user.set): typo in error message 2016-07-27 13:01:32 +02:00
Julien Fontanet
b0ac14363d 5.1.0 2016-07-26 16:52:49 +02:00
Julien Fontanet
5d346aba37 fix(vm.create): cloudConfig handling 2016-07-26 14:26:24 +02:00
Julien Fontanet
124cb15ebe fix(resource sets): fix VM resources computation
Fixes vatesfr/xo-web#1276
2016-07-25 17:08:09 +02:00
Julien Fontanet
a244ab898d fix(vm.create): correctly store the resource set 2016-07-25 17:08:08 +02:00
Julien Fontanet
3c551590eb fix(vm.set): correctly save memory in limits 2016-07-25 17:08:07 +02:00
ABHAMON Ronan
10e30cccbc feat(models/schedule): null properly remove timezone (#368)
Related to vatesfr/xo-web#1314
2016-07-25 15:54:27 +02:00
Julien Fontanet
806a6b86a2 fix(signin): fix styles when /v4 2016-07-25 13:40:57 +02:00
Julien Fontanet
9719fdf5cc fix(sr.probe*): correctly prepare port param 2016-07-23 16:18:03 +02:00
Julien Fontanet
6d8764f8cb fix(Xapi#createVm): add missing param 2016-07-23 15:49:27 +02:00
Julien Fontanet
d9fd9cb408 fix(vm.create): better VBDs creation (#361)
Fixes vatesfr/xo-web#1257
2016-07-23 15:31:15 +02:00
Julien Fontanet
7710ec0aba feat(schemas): add user schema 2016-07-20 12:10:23 +02:00
Julien Fontanet
c97bd78cd0 fix(VM): cpuCap & cpuWeight are integers 2016-07-20 10:57:15 +02:00
ABHAMON Ronan
728c5aa86e feat(plugins): supports predefined configurations (#365)
See vatesfr/xo-web#1289
2016-07-19 17:28:53 +02:00
Pierre Donias
83d68ca293 feat(vm.set): make cpuWeight and cpuCap nullable (#364) 2016-07-19 16:53:47 +02:00
Julien Fontanet
47d7561db4 fix(VM): cpuCap can be defined when cpuWeight is not 2016-07-19 15:37:07 +02:00
ABHAMON Ronan
7d993e8319 feat(schedules): schedules support timezones (#363)
Fixes vatesfr/xo-web#1258
2016-07-19 13:32:27 +02:00
Julien Fontanet
1d1a597b22 feat(VM): expose cpuCap 2016-07-19 11:02:38 +02:00
Julien Fontanet
23082f9300 feat(vm.set): support for cpuCap (#362) 2016-07-19 10:35:03 +02:00
Julien Fontanet
ea1a7f9376 chore(Xapi#_getXenUpdates): use ensureArray() 2016-07-15 12:57:20 +02:00
Greenkeeper
1796c7bab8 chore(package): update nyc to version 7.0.0 (#358)
https://greenkeeper.io/
2016-07-14 13:09:12 +02:00
Greenkeeper
65ad76479a chore(package): update base64url to version 2.0.0 (#360)
https://greenkeeper.io/
2016-07-14 11:33:12 +02:00
Olivier Lambert
422db04ec8 5.0.5 2016-07-13 15:20:56 +02:00
Olivier Lambert
d12f60fe37 Merge pull request #359 from vatesfr/pierre-fix-create-vm
fix(vm/create): missing single quotes
2016-07-13 09:37:23 +02:00
Pierre Donias
194c1c991c fix(vm/create): missing single quotes 2016-07-12 16:40:32 +02:00
Olivier Lambert
3e8e2222c1 Merge pull request #357 from vatesfr/marsaudf-fix-job-log-error
Add message to job log error
2016-07-07 15:26:15 +02:00
Fabrice Marsaud
1620327a33 Add message to job log error 2016-07-07 14:55:43 +02:00
Olivier Lambert
b1131e3667 5.0.4 2016-07-07 12:12:54 +02:00
Olivier Lambert
db0250ac08 Merge pull request #356 from vatesfr/marsaudf-fix-patch-conflicts
Fix(xapi): handle correctly single XML elements
2016-07-07 11:22:27 +02:00
Fabrice Marsaud
0a6b605760 Handle single patch elements in parsed XML 2016-07-07 10:11:21 +02:00
Olivier Lambert
81ac2375e5 5.0.3 2016-07-06 23:23:14 +02:00
Olivier Lambert
6bcaca6cd7 Merge pull request #355 from vatesfr/issue-1233
fix(Xapi#importDeltaVm): correctly handle missing network
2016-07-06 23:21:55 +02:00
Olivier Lambert
ec8375252e fix(Xapi#importDeltaVm): correctly handle missing network 2016-07-06 23:11:47 +02:00
Julien Fontanet
766aa1762f 5.0.2 2016-07-05 17:56:02 +02:00
Julien Fontanet
5165e0a54c feat(user.set): support preferences 2016-07-05 17:19:38 +02:00
Julien Fontanet
a2f7ad627e feat(Xapi#migrateVm): allow non-running VMs
Fixes vatesfr/xo-web#1216
2016-07-05 17:09:54 +02:00
Julien Fontanet
1176c162d4 5.0.1 2016-06-30 15:46:27 +02:00
Fabrice Marsaud
a4880cd017 feat(remote.test): perform a write/read test on a remote (#354)
See vatesfr/xo-web#1075
2016-06-30 15:00:00 +02:00
Julien Fontanet
383bdce416 fix(plugin.configure): fix undefined handling 2016-06-29 13:08:02 +02:00
Julien Fontanet
7cc300dd83 fix(Xapi#createVif): fix handling when neither device nor position is not provided 2016-06-28 17:36:24 +02:00
Fabrice Marsaud
687809db9d fix(user.set): cannot change self permission (#353) 2016-06-28 13:28:31 +02:00
Julien Fontanet
1127ec3a90 feat(vif.set): allowed IPv4/IPv6 addresses (#328) 2016-06-27 15:11:46 +02:00
Julien Fontanet
a797edfae9 chore(xapi/mixins/vm): simplify _editVm() specs 2016-06-27 12:10:57 +02:00
Julien Fontanet
938e106252 feat(xapi/utils/makeEditObject): support camelCase and snake_case aliases 2016-06-27 12:10:54 +02:00
Julien Fontanet
a0eb9caaa2 feat(xapi/utils/makeEditObject): set, set.get, set.set can be true 2016-06-27 11:54:13 +02:00
Julien Fontanet
442f53d45e fix(xapi/utils/makeEditObject): use deep equality 2016-06-27 09:52:02 +02:00
Greenkeeper
68de1ca248 chore(package): update ws to version 1.1.1 (#348)
https://greenkeeper.io/
2016-06-26 20:19:47 +02:00
Greenkeeper
e16061141e chore(package): update d3-time-format to version 2.0.0 (#350)
https://greenkeeper.io/
2016-06-26 20:18:24 +02:00
Julien Fontanet
64cbe3d209 feat(build): delete dist before building 2016-06-26 17:47:56 +02:00
Julien Fontanet
ebdc6376d8 5.0.0 2016-06-24 18:34:31 +02:00
Julien Fontanet
68335123a1 feat(vm.create): all vm.set params are supported (#340) 2016-06-24 18:33:43 +02:00
Julien Fontanet
25b18f4ef8 chore(package): update xo-acl-resolver to 0.2.1 2016-06-24 14:43:18 +02:00
Julien Fontanet
9ad615b0ff fix(Xapi#_waitObjectState): fix failure when object is initially missing 2016-06-22 12:20:22 +02:00
Julien Fontanet
12eaceb032 fix(xapi-objects-to-xo): fix CPUs.number when no tools 2016-06-21 13:19:29 +02:00
Julien Fontanet
3263511b72 fix(Xapi#snapshotVm): fallback if quiesce failed
Fixes vatesfr/xo-web#1088
2016-06-21 11:21:01 +02:00
Julien Fontanet
75cae8c647 fix(Xapi#_updateObjectMapProperty): prepare XAPI param 2016-06-21 11:21:00 +02:00
Julien Fontanet
9991ef624c feat(Xapi#getObject): accept objects with _xapiId property 2016-06-21 11:21:00 +02:00
Julien Fontanet
489e9fce27 fix(xapi/index): work around Babel T2877 2016-06-21 11:21:00 +02:00
Julien Fontanet
0655628073 fix(xapi/index): incorrect import 2016-06-21 11:20:59 +02:00
Fabrice Marsaud
9460822529 feat(vm.importBackup): returns the new VM id (#345) 2016-06-20 18:07:14 +02:00
Julien Fontanet
d02358ac0d chore(xapi): move utilities into dedicated module 2016-06-17 18:43:10 +02:00
ABHAMON Ronan
366237a625 fix(XapiStats): fix unit for host free memory (#339) 2016-06-17 10:16:58 +02:00
Julien Fontanet
2f2da18994 chore: remove some unnecessary logs 2016-06-16 09:22:26 +02:00
Greenkeeper
ecd30db215 chore(package): update d3-time-format to version 1.0.0 (#338)
https://greenkeeper.io/
2016-06-15 08:40:56 +02:00
ABHAMON Ronan
1980854f6f feat(Xapi#importDeltaVm): attach VIFs to original networks if available (#335)
Fixes vatesfr/xo-web#1016
2016-06-10 11:05:54 +02:00
Julien Fontanet
7d4f006c25 feat(Xapi#exportDeltaVm): inject network/SR UUIDs in VIF/VDI records 2016-06-09 17:25:02 +02:00
Julien Fontanet
b697be2383 fix(Xapi#_snapshotVm): returns the up-to-date snapshot record 2016-06-09 17:17:14 +02:00
Fabrice Marsaud
143e53c43f chore(package): update xo-remote-parser to version 0.3.0 (#333) 2016-06-08 17:26:08 +02:00
Julien Fontanet
6dde1ade01 fix(xo-server-logs): fix broken require since Babel 6 2016-06-08 11:12:45 +02:00
Greenkeeper
d4de391ac5 chore(package): update d3-time-format to version 0.4.0 (#332)
https://greenkeeper.io/
2016-06-08 09:05:45 +02:00
Greenkeeper
af15f4bc6a chore(package): update xo-acl-resolver to version 0.2.0 (#330)
https://greenkeeper.io/
2016-06-07 16:46:23 +02:00
Fabrice Marsaud
d4ace24caa fix(job.set): protects userId from modification (#329) 2016-06-07 09:25:15 +02:00
Julien Fontanet
c5ab47fa66 chore(package): fix deps order 2016-06-06 13:38:16 +02:00
Julien Fontanet
d60051b629 fix(package): update xo-remote-parser to 0.2.1 2016-06-06 13:37:47 +02:00
Julien Fontanet
22ff330ee7 fix(package): update @marsaud/smb2 to 0.7.1 2016-06-03 18:22:37 +02:00
Olivier Lambert
dd62bef66d feat(host): expose correct timestamp for license expiry value 2016-05-31 17:24:49 +02:00
Julien Fontanet
e7feb99f8d feat(vm.create): clone param may be use to disable cloning (#318)
See vatesfr/xo-web#960
2016-05-30 11:34:39 +02:00
Julien Fontanet
6358accece fix(plugin.configure): correctly handle undefined 2016-05-30 11:12:11 +02:00
Olivier Lambert
9ce8a24eea feat(sr): add disconnect and connect all PBDs to a SR (#324) 2016-05-27 18:31:09 +02:00
Julien Fontanet
4d0673f489 feat(sr.forget): automatically disconnect PBDs (#323) 2016-05-27 18:15:09 +02:00
Olivier Lambert
fbe1e6a7d5 fix(vm): missing parameters and wrong value for set_memory_static_max 2016-05-27 15:03:49 +02:00
Greenkeeper
4ed02ca501 chore(package): update cookie to version 0.3.0 (#322)
https://greenkeeper.io/
2016-05-27 04:36:35 +02:00
Julien Fontanet
af245ed9fe fix(log.delete): id can be an array 2016-05-26 13:34:47 +02:00
Julien Fontanet
fc86a3e882 fix(vm): always consider memory dynamic max when updating resource set 2016-05-24 16:22:55 +02:00
Julien Fontanet
f9109edcf1 fix(vm.set): memoryMax should update resource set 2016-05-24 16:21:21 +02:00
Julien Fontanet
ec100e1a91 fix(vm.set): memoryMax should change dynamic max 2016-05-24 16:20:25 +02:00
Julien Fontanet
746c5f4a79 fix(vm.set): cpusMax (shame) 2016-05-24 15:13:53 +02:00
Julien Fontanet
b2611728a1 fix(vm): fix indent 2016-05-24 14:38:11 +02:00
Julien Fontanet
fc6cc4234d chore(vm.set): fix some comments 2016-05-24 14:33:40 +02:00
Julien Fontanet
7706c1cb63 feat(vm.set): memoryStaticMax 2016-05-24 14:33:02 +02:00
Julien Fontanet
4d7a07220c feat(vm.set): memoryMax increase static max if necessary 2016-05-24 14:32:14 +02:00
Julien Fontanet
436875f7dc fix(vm.set): memoryMin should not change static min 2016-05-24 14:30:26 +02:00
Julien Fontanet
21c6f53ecc fix(vm.set): cpusMax 2016-05-24 14:23:21 +02:00
Julien Fontanet
5472be8b72 4.17.0 2016-05-24 11:51:15 +02:00
Julien Fontanet
d22542fcf3 Revert "fix(leveldown): fix leveldown to version 1.4.4"
This reverts commit 5fa4c95480.
2016-05-24 11:50:36 +02:00
Julien Fontanet
1d8341eb27 Merge branch 'next-release' into stable 2016-05-24 11:49:47 +02:00
Julien Fontanet
1897a7ada3 fix(log.get): only usable by admins 2016-05-23 16:18:21 +02:00
Julien Fontanet
a048698c66 feat(log.*): add params schemas 2016-05-23 16:17:54 +02:00
Julien Fontanet
f891e57f4a fix(xapi-objects-to-xo): a SR should always have a container 2016-05-23 16:00:51 +02:00
Olivier Lambert
fcc590e48a feat(vm.snapshot): name param is optional (#320) 2016-05-23 12:56:20 +02:00
Julien Fontanet
9a02a2a65b fix(vm.set): fix call to $isVmRunning
Fixes #319
2016-05-23 10:02:34 +02:00
Julien Fontanet
536a6c5c60 feat(vm.rollingDrCopy): accepts a sr param (#315)
See vatesfr/xo-web#955
2016-05-21 14:24:16 +02:00
Julien Fontanet
86a6871ee8 fix(vm.set): correctly change min dynamic memory if necessary (#317)
Fixes vatesfr/xo-web#970
2016-05-21 14:14:57 +02:00
Julien Fontanet
6046045151 feat(vm.createInterface): position param is now optional 2016-05-21 13:11:51 +02:00
Julien Fontanet
9c3ddd4ba4 fix(Xapi#_createVm()): license issue with Dundee (#316)
Fixes vatesfr/xo-web#964.
2016-05-20 12:22:42 +02:00
Julien Fontanet
6c9f55c1d7 style(utils): fix lightSet 2016-05-17 09:08:51 +02:00
Julien Fontanet
5bec3d7dcd fix(xapi-object-to-xo): correctly set host memory size 2016-05-16 11:50:01 +02:00
Julien Fontanet
a4c309efe8 fix(package): ship signin.pug 2016-05-12 18:18:56 +02:00
Jon Sands
4e22a208dd fix(autopoweron): set pool other_config entry to true instead of on (#310)
Fixes #309 
Fixes vatesfr/xo-web#937
2016-05-12 13:17:02 +02:00
Julien Fontanet
ff9e77118e fix(Xapi): VM creation on Dundee (#303) 2016-05-11 18:03:58 +02:00
Julien Fontanet
6c6dfa9ac4 perf(Promise): use Bluebird as default implementation 2016-05-11 18:01:52 +02:00
Greenkeeper
d60d5207d8 chore(package): update xen-api to version 0.9.0 (#308)
https://greenkeeper.io/
2016-05-11 17:55:35 +02:00
ABHAMON Ronan
8c0ae892f5 feat(api): rename <namespace> param to id (#305) 2016-05-11 14:35:49 +02:00
Greenkeeper
f570492a11 chore(package): update xo-remote-parser to version 0.2.0 (#307)
https://greenkeeper.io/
2016-05-11 14:07:23 +02:00
Julien Fontanet
cc447304f5 fix(bin/xo-server): remove ES6 syntax 2016-05-10 13:43:53 +02:00
Julien Fontanet
8f8c6366e3 chore(xo-mixins/backup): use default value for remote handler flags 2016-05-05 18:22:19 +02:00
Julien Fontanet
3b13bcb098 fix(Xapi#exportDeltaVm): make streams property non-enumerable 2016-05-05 18:19:41 +02:00
Julien Fontanet
df60784b51 chore(signin): jade renamed to pug 2016-05-04 16:00:28 +02:00
Julien Fontanet
bae3122bb5 chore: various updates 2016-05-04 12:16:02 +02:00
Julien Fontanet
0770aef4bf chore(package): update standard to version 7.0.0 2016-05-04 11:59:56 +02:00
ABHAMON Ronan
c198350bfa feat(remote-handlers): cannot overwrite files by default (#297) 2016-05-03 16:56:26 +02:00
Greenkeeper
a2ed388777 chore(package): update helmet to version 2.0.0 (#298)
https://greenkeeper.io/
2016-04-30 08:10:22 +02:00
Julien Fontanet
f6670c699a 4.16.1 2016-04-29 10:28:03 +02:00
Julien Fontanet
5fa4c95480 fix(leveldown): fix leveldown to version 1.4.4
Due to Level/leveldown#276.
2016-04-29 10:27:37 +02:00
Julien Fontanet
5b8608c186 feat(utils/streamToBuffer): rebase on get-stream and use everywhere (#295) 2016-04-29 09:52:36 +02:00
Julien Fontanet
bb75d42ede 4.16.0 2016-04-29 09:44:42 +02:00
Julien Fontanet
b4b6def07a Merge branch 'next-release' into stable 2016-04-29 09:43:30 +02:00
Greenkeeper
b305700987 chore(package): update get-stream to version 2.1.0 (#294)
https://greenkeeper.io/
2016-04-29 09:14:30 +02:00
Greenkeeper
40232b7eb1 chore(package): update fs-extra to version 0.30.0 (#293)
https://greenkeeper.io/
2016-04-28 18:17:34 +02:00
Julien Fontanet
67ff666db4 Use the new execa.stdout() 2016-04-28 10:18:05 +02:00
Greenkeeper
5960fd4fe0 chore(package): update fs-extra to version 0.29.0 (#292)
https://greenkeeper.io/
2016-04-28 09:04:39 +02:00
Greenkeeper
f8b28c519c chore(package): update xo-acl-resolver to version 0.1.0 (#291)
https://greenkeeper.io/
2016-04-28 08:56:31 +02:00
Julien Fontanet
ee1105b6dd fix(Xapi#importVdiContent): find first attached PBD (#279) 2016-04-27 09:37:30 +02:00
Julien Fontanet
4778274c97 fix(Xapi#call): retries on TOO_MANY_PENDING_TASKS
Fixes fix vatesfr/xo-web#861
2016-04-27 09:28:36 +02:00
Julien Fontanet
d7ecb32238 Xapi#snapshotVm(): wait for the uptodate obj on quiesce. (#282)
Fixes vatesfr/xo-web#904
2016-04-27 09:24:00 +02:00
Greenkeeper
744306fc50 chore(package): update execa to version 0.4.0 (#290)
https://greenkeeper.io/
2016-04-27 09:13:16 +02:00
Olivier Lambert
11bbb8ed4d add host startTime and agentStartTime 2016-04-26 11:30:57 +02:00
Julien Fontanet
b5092a4444 feat(toTimestamp): handle timestamps. 2016-04-26 11:27:26 +02:00
Greenkeeper
e2442c07a9 https://greenkeeper.io/Fixes vatesfr/xo-web#769
https://greenkeeper.io/

Fixes vatesfr/xo-web#769.
2016-04-26 09:07:33 +02:00
Julien Fontanet
6f924d4e83 fix(user.delete): fix vatesfr/xo-web#901. 2016-04-25 14:33:29 +02:00
Greenkeeper
faf1508914 chore(package): update execa to version 0.3.0 (#284)
https://greenkeeper.io/
2016-04-23 17:51:56 +01:00
Julien Fontanet
7eb8152835 4.15.3 2016-04-22 16:18:21 +02:00
Julien Fontanet
8f45905831 fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 16:18:03 +02:00
Julien Fontanet
4ba2ffce5b fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 13:39:21 +02:00
Greenkeeper
ffb3659ef5 chore(package): update fs-extra to version 0.28.0 (#280)
http://greenkeeper.io/
2016-04-18 12:09:06 +01:00
Julien Fontanet
6dec07d562 signin form: fix redirect on success. 2016-04-18 11:57:58 +01:00
Julien Fontanet
afb22f3279 Merge pull request #278 from vatesfr/greenkeeper-fs-extra-0.27.0
Update fs-extra to version 0.27.0 🚀
2016-04-15 14:14:31 +02:00
greenkeeperio-bot
f2f369db64 chore(package): update fs-extra to version 0.27.0
http://greenkeeper.io/
2016-04-15 14:05:41 +02:00
Julien Fontanet
635c76db93 Deprecate host#CPUs and introduce host#cpus. 2016-04-13 10:59:29 +02:00
Julien Fontanet
5f50f1928d Merge pull request #276 from vatesfr/fix-auto-poweron
Fix auto poweron (fix vatesfr/xo-web#879).
2016-04-11 15:53:37 +02:00
Julien Fontanet
32c9ed1dc2 Fix auto poweron (fix vatesfr/xo-web#879). 2016-04-11 15:31:59 +02:00
Julien Fontanet
71741e144e Merge pull request #274 from vatesfr/abhamonr-set-vm-ram-min-max-values
api.vm: Set the min/max ram values.
2016-04-07 10:08:34 +02:00
wescoeur
f2e64cdd5e api.vm: Set the min/max ram values. 2016-04-07 09:25:45 +02:00
Julien Fontanet
afaa5d5e9e Merge pull request #275 from vatesfr/abhamonr-set-vm-cpus-max
api.vm: Set vcpus max.
2016-04-06 17:39:16 +02:00
wescoeur
d82861727d api.vm: Set vcpus max. 2016-04-06 17:32:51 +02:00
Julien Fontanet
90f0795416 Merge pull request #272 from vatesfr/abhamonr-fix-smb-backup-location
Ensure remote smb path is a directory. (fix vatesfr/xo-web#865)
2016-04-06 16:25:28 +02:00
Julien Fontanet
9efbe7771c Merge pull request #273 from vatesfr/abhamonr-consistent-stats-object
vm.stats(): Returns empty stats if none can be found.
2016-04-06 12:10:34 +02:00
wescoeur
a75caac13d Vm stats are consistents. Even without RRD stats. 2016-04-06 11:55:14 +02:00
wescoeur
279d0d20ea Ensure remote smb path is a directory. (fix vatesfr/xo-web#865) 2016-04-06 09:51:20 +02:00
Julien Fontanet
332ba96d34 ghooks: use commit-msg instead of pre-commit. 2016-04-04 11:33:30 +02:00
Julien Fontanet
3f6e5b7606 decorators/@autobind: Minor improvements. 2016-04-04 11:29:31 +02:00
Julien Fontanet
94703492fd Use http-proxy for HTTP/ws proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
df78117617 Do not crash on error in the console proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
909b9480e4 Better error message in console proxy. 2016-03-30 17:33:49 +02:00
Julien Fontanet
21762ac1aa Return to the correct page after sign in. 2016-03-30 17:33:49 +02:00
Julien Fontanet
412bc175b4 Merge pull request #270 from vatesfr/abhamonr-stats-object-contains-interval
Stats object contains interval attr.
2016-03-30 14:49:25 +02:00
wescoeur
dc0eb76e88 Stats object contains interval attr. 2016-03-30 14:34:37 +02:00
Julien Fontanet
2695941a3c Remove pFinally() tests, now implemented in promise-toolbox. 2016-03-29 18:05:32 +02:00
Julien Fontanet
3506be1a70 Update promise-toolbox to 0.3.2. 2016-03-29 09:54:24 +02:00
Julien Fontanet
cbf4786b39 Do not crash on unhandled error event. 2016-03-27 13:31:31 +02:00
Julien Fontanet
8dbf334208 Merge pull request #267 from vatesfr/back-to-babel-6
Back to babel 6
2016-03-25 17:37:52 +01:00
Julien Fontanet
60ba5fbc72 Merge pull request #268 from vatesfr/abhamonr-stats-with-halted-vm
Throw an error if a vm is halted and its stats are requested.
2016-03-25 17:37:27 +01:00
Julien Fontanet
c3ace0c44f Simply do npm test before git commit. 2016-03-25 17:36:37 +01:00
Olivier Lambert
8eceb90e63 add startTime 2016-03-25 17:33:34 +01:00
wescoeur
4754e19e83 Throw an error if a vm is halted and its stats are requested. 2016-03-25 15:49:52 +01:00
Julien Fontanet
a0559d0dc9 Revert "Work around Babel T7172."
This reverts commit ca8476d466.
2016-03-23 09:45:59 +01:00
Julien Fontanet
8d03ce19b0 Revert "Merge pull request #248 from vatesfr/babel-t7172"
This reverts commit f125b593bf, reversing
changes made to ca8476d466.
2016-03-23 09:43:30 +01:00
Julien Fontanet
2470d851e9 Revert "Merge pull request #266 from vatesfr/babel-5-workaround"
This reverts commit b77d3f123d, reversing
changes made to c10b0afaa8.
2016-03-23 09:41:54 +01:00
Julien Fontanet
df99f5c0a5 Revert "Merge pull request #265 from vatesfr/revert-babel-6"
This reverts commit 8907290d27, reversing
changes made to eb55cba34a.
2016-03-23 09:41:08 +01:00
61 changed files with 1794 additions and 1056 deletions

View File

@@ -1,15 +0,0 @@
{
"comments": false,
"compact": true,
"optional": [
// Experimental features.
// "minification.constantFolding",
// "minification.deadCodeElimination",
"es7.asyncFunctions",
"es7.decorators",
"es7.exportExtensions",
"es7.functionBind",
"runtime"
]
}

2
.gitignore vendored
View File

@@ -1,6 +1,8 @@
/.nyc_output/
/dist/
/node_modules/
/src/api/index.js
/src/xapi/mixins/index.js
/src/xo-mixins/index.js
npm-debug.log

View File

@@ -1,5 +1,6 @@
/examples/
example.js
example.js.map
*.example.js
*.example.js.map

View File

@@ -7,9 +7,25 @@
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', (reason) => {
console.log('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

View File

@@ -7,4 +7,4 @@
// Better stack traces if possible.
require('../better-stacks')
require('exec-promise')(require('../dist/logs-cli'))
require('exec-promise')(require('../dist/logs-cli').default)

View File

@@ -7,13 +7,16 @@ var gulp = require('gulp')
var babel = require('gulp-babel')
var coffee = require('gulp-coffee')
var plumber = require('gulp-plumber')
var rimraf = require('rimraf')
var sourceMaps = require('gulp-sourcemaps')
var watch = require('gulp-watch')
var join = require('path').join
// ===================================================================
var SRC_DIR = __dirname + '/src'
var DIST_DIR = __dirname + '/dist'
var SRC_DIR = join(__dirname, 'src')
var DIST_DIR = join(__dirname, 'dist')
var PRODUCTION = process.argv.indexOf('--production') !== -1
@@ -36,6 +39,10 @@ function src (patterns) {
// ===================================================================
gulp.task(function clean (cb) {
rimraf(DIST_DIR, cb)
})
gulp.task(function buildCoffee () {
return src('**/*.coffee')
.pipe(sourceMaps.init())
@@ -60,4 +67,4 @@ gulp.task(function buildEs6 () {
// ===================================================================
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))

View File

@@ -8,4 +8,4 @@ if (process.env.DEBUG === undefined) {
}
// Import the real main module.
module.exports = require('./dist')
module.exports = require('./dist').default

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "4.15.2",
"version": "5.1.2",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -13,6 +13,10 @@
"bugs": {
"url": "https://github.com/vatesfr/xo-web/issues"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
"preferGlobal": true,
"files": [
@@ -21,49 +25,46 @@
"dist/",
"config.json",
"index.js",
"signin.jade"
"signin.pug"
],
"directories": {
"bin": "bin"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"engines": {
"node": ">=0.12 <5"
"node": ">=0.12"
},
"dependencies": {
"@marsaud/smb2": "^0.7.1",
"@marsaud/smb2-promise": "^0.2.0",
"app-conf": "^0.4.0",
"babel-runtime": "^5",
"base64url": "^1.0.5",
"babel-runtime": "^6.5.0",
"base64url": "^2.0.0",
"blocked": "^1.1.0",
"bluebird": "^3.1.1",
"body-parser": "^1.13.3",
"connect-flash": "^0.1.1",
"cookie": "^0.2.3",
"cookie": "^0.3.0",
"cookie-parser": "^1.3.5",
"cron": "^1.0.9",
"d3-time-format": "^0.3.0",
"d3-time-format": "^2.0.0",
"debug": "^2.1.3",
"escape-string-regexp": "^1.0.3",
"event-to-promise": "^0.7.0",
"exec-promise": "^0.6.1",
"execa": "^0.2.2",
"execa": "^0.4.0",
"express": "^4.13.3",
"express-session": "^1.11.3",
"fatfs": "^0.10.3",
"fs-extra": "^0.26.2",
"fs-extra": "^0.30.0",
"fs-promise": "^0.4.1",
"get-stream": "^1.1.0",
"get-stream": "^2.1.0",
"hashy": "~0.4.2",
"helmet": "^1.1.0",
"helmet": "^2.0.0",
"highland": "^2.5.1",
"http-proxy": "^1.13.2",
"http-server-plus": "^0.6.4",
"human-format": "^0.6.0",
"is-my-json-valid": "^2.12.2",
"jade": "^1.11.0",
"js-yaml": "^3.2.7",
"json-rpc-peer": "^0.11.0",
"json5": "^0.4.0",
@@ -74,69 +75,46 @@
"level-party": "^3.0.4",
"level-sublevel": "^6.5.2",
"leveldown": "^1.4.2",
"lodash.assign": "^4.0.3",
"lodash.bind": "^4.1.0",
"lodash.difference": "^4.1.0",
"lodash.endswith": "^4.0.0",
"lodash.every": "^4.0.0",
"lodash.filter": "^4.2.0",
"lodash.find": "^4.2.0",
"lodash.findindex": "^4.2.0",
"lodash.foreach": "^4.1.0",
"lodash.get": "^4.1.2",
"lodash.has": "^4.2.0",
"lodash.includes": "^4.1.0",
"lodash.invert": "^4.0.1",
"lodash.isarray": "^4.0.0",
"lodash.isboolean": "^3.0.2",
"lodash.isempty": "^4.1.2",
"lodash.isfunction": "^3.0.1",
"lodash.isinteger": "^4.0.0",
"lodash.isobject": "^3.0.0",
"lodash.isstring": "^4.0.1",
"lodash.keys": "^4.0.3",
"lodash.map": "^4.2.0",
"lodash.pick": "^4.1.0",
"lodash.pickby": "^4.2.0",
"lodash.remove": "^4.0.1",
"lodash.some": "^4.2.0",
"lodash.sortby": "^4.2.0",
"lodash.startswith": "^4.0.0",
"lodash.trim": "^4.2.0",
"lodash": "^4.13.1",
"make-error": "^1",
"micromatch": "^2.3.2",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.4",
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport-local": "^1.0.0",
"promise-toolbox": "^0.2.0",
"promise-toolbox": "^0.3.2",
"proxy-agent": "^2.0.0",
"proxy-http-request": "0.1.0",
"pug": "^2.0.0-alpha6",
"redis": "^2.0.1",
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"stack-chain": "^1.3.3",
"through2": "^2.0.0",
"struct-fu": "^1.0.0",
"through2": "^2.0.0",
"trace": "^2.0.1",
"ws": "~1.0.1",
"xen-api": "^0.7.4",
"ws": "^1.1.1",
"xen-api": "^0.9.0",
"xml2js": "~0.4.6",
"xo-acl-resolver": "0.0.0",
"xo-acl-resolver": "^0.2.1",
"xo-collection": "^0.4.0",
"xo-remote-parser": "^0.1.0"
"xo-remote-parser": "^0.3"
},
"devDependencies": {
"babel-eslint": "^4.0.10",
"babel-eslint": "^6.0.4",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.5.2",
"babel-preset-es2015": "^6.5.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.0.0",
"dependency-check": "^2.4.0",
"ghooks": "^1.0.3",
"gulp": "git://github.com/gulpjs/gulp#4.0",
"gulp-babel": "^5",
"gulp-babel": "^6",
"gulp-coffee": "^2.3.1",
"gulp-plumber": "^1.0.0",
"gulp-sourcemaps": "^1.5.1",
@@ -144,30 +122,44 @@
"leche": "^2.1.1",
"mocha": "^2.2.1",
"must": "^0.13.1",
"nyc": "^7.0.0",
"rimraf": "^2.5.2",
"sinon": "^1.14.1",
"standard": "^5.2.1"
"standard": "^7.0.0"
},
"scripts": {
"build": "npm run build-indexes && gulp build --production",
"build-indexes": "./tools/generate-index src/api src/xo-mixins",
"depcheck": "dependency-check ./package.json",
"build-indexes": "./tools/generate-index src/api src/xapi/mixins src/xo-mixins",
"dev": "npm run build-indexes && gulp build",
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
"lint": "standard",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
"posttest": "npm run lint && npm run depcheck",
"prepublish": "npm run build",
"start": "node bin/xo-server",
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
"posttest": "npm run lint && dependency-check ./package.json",
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
"release": "npm version",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable"
"start": "node bin/xo-server",
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
},
"babel": {
"plugins": [
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
"stage-0",
"es2015"
]
},
"config": {
"ghooks": {
"pre-commit": "npm it"
"commit-msg": "npm test"
}
},
"standard": {
"ignore": [
"dist/**"
"dist"
],
"parser": "babel-eslint"
}

View File

@@ -7,6 +7,7 @@ html
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'styles/main.css')
link(rel = 'stylesheet' href = 'v4/styles/main.css')
body
.container
.row-login

View File

@@ -3,7 +3,7 @@ import {JsonRpcError} from 'json-rpc-peer'
// ===================================================================
// Export standard JSON-RPC errors.
export {
export { // eslint-disable-line no-duplicate-imports
InvalidJson,
InvalidParameters,
InvalidRequest,

View File

@@ -1,8 +1,9 @@
import createDebug from 'debug'
const debug = createDebug('xo:api')
import getKeys from 'lodash.keys'
import getKeys from 'lodash/keys'
import kindOf from 'kindof'
import moment from 'moment-timezone'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
@@ -188,6 +189,11 @@ methodSignature.description = 'returns the signature of an API method'
// ===================================================================
const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// ===================================================================
export default class Api {
constructor ({
context,
@@ -201,6 +207,7 @@ export default class Api {
system: {
getMethodsInfo,
getServerVersion,
getServerTimezone,
getVersion,
listMethods,
methodSignature
@@ -272,6 +279,20 @@ export default class Api {
try {
await checkPermission.call(context, method)
// API methods are in a namespace.
// Some methods use the namespace or an id parameter like:
//
// vm.detachPci vm=<string>
// vm.ejectCd id=<string>
//
// The goal here is to standardize the calls by always providing
// an id parameter when possible to simplify calls to the API.
if (params && params.id === undefined) {
const namespace = name.slice(0, name.indexOf('.'))
params.id = params[namespace]
}
checkParams(method, params)
const resolvedParams = await resolveParams.call(context, method, params)

View File

@@ -1,9 +1,9 @@
$debug = (require 'debug') 'xo:api:vm'
$find = require 'lodash.find'
$findIndex = require 'lodash.findindex'
$forEach = require 'lodash.foreach'
endsWith = require 'lodash.endswith'
startsWith = require 'lodash.startswith'
$find = require 'lodash/find'
$findIndex = require 'lodash/findIndex'
$forEach = require 'lodash/forEach'
endsWith = require 'lodash/endsWith'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{
extractProperty,

View File

@@ -16,13 +16,23 @@ export async function get ({namespace}) {
}
get.description = 'returns logs list for one namespace'
get.params = {
namespace: { type: 'string' }
}
get.permission = 'admin'
// -------------------------------------------------------------------
async function delete_ ({namespace, id}) {
const logger = await this.getLogger(namespace)
logger.del(id)
}
delete_.description = 'deletes on or several logs from a namespace'
delete_.description = 'deletes one or several logs from a namespace'
delete_.params = {
id: { type: [ 'array', 'string' ] },
namespace: { type: 'string' }
}
delete_.permission = 'admin'
export {delete_ as delete}

View File

@@ -1,7 +1,3 @@
import {
GenericError
} from '../api-errors'
// FIXME: too low level, should be removed.
// ===================================================================
@@ -24,17 +20,8 @@ delete_.resolve = {
// ===================================================================
// Disconnect
export async function disconnect ({PBD}) {
// TODO: check if PBD is attached before
try {
await this.getXapi(PBD).call('PBD.unplug', PBD._xapiRef)
} catch (error) {
if (error.code === 'VDI_IN_USE') {
throw new GenericError('VDI in use')
} else {
throw error
}
}
export async function disconnect ({ pbd }) {
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
}
disconnect.params = {
@@ -42,7 +29,7 @@ disconnect.params = {
}
disconnect.resolve = {
PBD: ['id', 'PBD', 'administrate']
pbd: ['id', 'PBD', 'administrate']
}
// ===================================================================

View File

@@ -74,7 +74,7 @@ installPatch.resolve = {
// -------------------------------------------------------------------
async function handlePatchUpload (req, res, {pool}) {
const {headers: {['content-length']: contentLength}} = req
const contentLength = req.headers['content-length']
if (!contentLength) {
res.writeHead(411)
res.end('Content length is mandatory')

View File

@@ -1,12 +1,12 @@
export async function getAll () {
return /* await */ this.getAllRemotes()
return this.getAllRemotes()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing fs remote points'
export async function get ({id}) {
return /* await */ this.getRemote(id)
return this.getRemote(id)
}
get.permission = 'admin'
@@ -15,8 +15,18 @@ get.params = {
id: {type: 'string'}
}
export async function test ({id}) {
return this.testRemote(id)
}
test.permission = 'admin'
test.description = 'Performs a read/write matching test on a remote point'
test.params = {
id: {type: 'string'}
}
export async function list ({id}) {
return /* await */ this.listRemoteBackups(id)
return this.listRemoteBackups(id)
}
list.permission = 'admin'
@@ -26,7 +36,7 @@ list.params = {
}
export async function create ({name, url}) {
return /* await */ this.createRemote({name, url})
return this.createRemote({name, url})
}
create.permission = 'admin'

View File

@@ -17,8 +17,8 @@ get.params = {
id: {type: 'string'}
}
export async function create ({jobId, cron, enabled, name}) {
return /* await */ this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled, name})
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
}
create.permission = 'admin'
@@ -30,8 +30,8 @@ create.params = {
name: {type: 'string', optional: true}
}
export async function set ({id, jobId, cron, enabled, name}) {
await this.updateSchedule(id, {job: jobId, cron, enabled, name})
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
}
set.permission = 'admin'

View File

@@ -1,3 +1,4 @@
import { asInteger } from '../xapi/utils'
import {
ensureArray,
forEach,
@@ -48,8 +49,8 @@ scan.resolve = {
// -------------------------------------------------------------------
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({SR}) {
await this.getXapi(SR).call('SR.destroy', SR._xapiRef)
export async function destroy ({ sr }) {
await this.getXapi(sr).destroySr(sr._xapiId)
}
destroy.params = {
@@ -57,13 +58,13 @@ destroy.params = {
}
destroy.resolve = {
SR: ['id', 'SR', 'administrate']
sr: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function forget ({SR}) {
await this.getXapi(SR).call('SR.forget', SR._xapiRef)
await this.getXapi(SR).forgetSr(SR._xapiId)
}
forget.params = {
@@ -76,6 +77,34 @@ forget.resolve = {
// -------------------------------------------------------------------
export async function connectAllPbds ({SR}) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
connectAllPbds.params = {
id: { type: 'string' }
}
connectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function disconnectAllPbds ({SR}) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
disconnectAllPbds.params = {
id: { type: 'string' }
}
disconnectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function createIso ({
host,
nameLabel,
@@ -316,7 +345,7 @@ export async function createIscsi ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const srRef = await xapi.call(
@@ -377,7 +406,7 @@ export async function probeIscsiIqns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -455,7 +484,7 @@ export async function probeIscsiLuns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -534,7 +563,7 @@ export async function probeIscsiExists ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))

View File

@@ -22,7 +22,7 @@ create.params = {
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw new InvalidParameters('an user cannot delete itself')
throw new InvalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
@@ -57,8 +57,11 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission}) {
await this.updateUser(id, {email, password, permission})
export async function set ({id, email, password, permission, preferences}) {
if (permission && id === this.session.get('user_id')) {
throw new InvalidParameters('a user cannot change its own permission')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
@@ -69,7 +72,8 @@ set.params = {
id: { type: 'string' },
email: { type: 'string', optional: true },
password: { type: 'string', optional: true },
permission: { type: 'string', optional: true }
permission: { type: 'string', optional: true },
preferences: { type: 'object', optional: true }
}
// -------------------------------------------------------------------

View File

@@ -1,11 +1,10 @@
# FIXME: rename to disk.*
$isArray = require 'lodash.isarray'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{InvalidParameters} = require '../api-errors'
{parseSize} = require '../utils'
{isArray: $isArray, parseSize} = require '../utils'
{JsonRpcError} = require '../api-errors'
#=====================================================================

View File

@@ -41,3 +41,31 @@ connect.params = {
connect.resolve = {
vif: ['id', 'VIF', 'operate']
}
// -------------------------------------------------------------------
export const set = ({ vif, allowedIpv4Addresses, allowedIpv6Addresses }) => (
this.getXapi(vif._xapiId).editVif({
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
)
set.params = {
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
}
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
}
}
}
set.resolve = {
vif: ['id', 'VIF', 'operate']
}

View File

@@ -1,14 +1,13 @@
$assign = require 'lodash.assign'
$assign = require 'lodash/assign'
$debug = (require 'debug') 'xo:api:vm'
$filter = require 'lodash.filter'
$findIndex = require 'lodash.findindex'
$findWhere = require 'lodash.find'
$isArray = require 'lodash.isarray'
endsWith = require 'lodash.endswith'
$filter = require 'lodash/filter'
$findIndex = require 'lodash/findIndex'
$findWhere = require 'lodash/find'
endsWith = require 'lodash/endsWith'
escapeStringRegexp = require 'escape-string-regexp'
eventToPromise = require 'event-to-promise'
sortBy = require 'lodash.sortby'
startsWith = require 'lodash.startswith'
sortBy = require 'lodash/sortBy'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
@@ -19,6 +18,7 @@ startsWith = require 'lodash.startswith'
{
forEach,
formatXml: $js2xml,
isArray: $isArray,
map,
mapToArray,
noop,
@@ -27,7 +27,7 @@ startsWith = require 'lodash.startswith'
pCatch,
pFinally
} = require '../utils'
{isVmRunning: $isVMRunning} = require('../xapi')
{isVmRunning: $isVmRunning} = require('../xapi')
#=====================================================================
@@ -53,33 +53,40 @@ checkPermissionOnSrs = (vm, permission = 'operate') -> (
#=====================================================================
# TODO: Implement ACLs
create = $coroutine ({
resourceSet
installation
name_description
name_label
template
pv_args
VDIs
VIFs
existingDisks
}) ->
{ user } = this
unless user
throw new Unauthorized()
extract = (obj, prop) ->
value = obj[prop]
delete obj[prop]
return value
# TODO: Implement ACLs
create = $coroutine (params) ->
template = extract(params, 'template')
params.template = template._xapiId
xapi = this.getXapi(template)
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.size,
vms: 1
}
objectIds = [
template.id
]
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.dynamic[1],
vms: 1
}
vdiSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
vdiSizesByDevice[vbd.device] = +vdi.virtual_size
xapiVdis = VDIs and map(VDIs, (vdi) =>
return
)
vdis = extract(params, 'VDIs')
params.vdis = vdis and map(vdis, (vdi) =>
sr = @getObject(vdi.SR)
size = parseSize(vdi.size)
@@ -94,24 +101,11 @@ create = $coroutine ({
})
)
xapi = @getXapi(template)
diskSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
diskSizesByDevice[vbd.device] = +vdi.virtual_size
return
)
xapiExistingVdis = existingDisks and map(existingDisks, (vdi, device) =>
existingVdis = extract(params, 'existingDisks')
params.existingVdis = existingVdis and map(existingVdis, (vdi, device) =>
if vdi.size?
size = parseSize(vdi.size)
diskSizesByDevice[device] = size
vdiSizesByDevice[device] = size
if vdi.$SR
sr = @getObject(vdi.$SR)
@@ -123,9 +117,10 @@ create = $coroutine ({
})
)
forEach(diskSizesByDevice, (size) => limits.disk += size)
forEach(vdiSizesByDevice, (size) => limits.disk += size)
xapiVifs = VIFs and map(VIFs, (vif) =>
vifs = extract(params, 'VIFs')
params.vifs = vifs and map(vifs, (vif) =>
network = @getObject(vif.network)
objectIds.push(network.id)
@@ -136,33 +131,45 @@ create = $coroutine ({
}
)
installation = extract(params, 'installation')
params.installRepository = installation && installation.repository
resourceSet = extract(params, 'resourceSet')
xapiVm = yield xapi.createVm(template._xapiId, params)
vm = xapi.xo.addObject(xapiVm)
{ user } = this
if resourceSet
yield this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
yield this.allocateLimitsInResourceSet(limits, resourceSet)
else unless user.permission is 'admin'
throw new Unauthorized()
xapiVm = yield xapi.createVm(template._xapiId, {
installRepository: installation && installation.repository,
nameDescription: name_description,
nameLabel: name_label,
pvArgs: pv_args,
vdis: xapiVdis,
vifs: xapiVifs,
existingVdis: xapiExistingVdis
})
vm = xapi.xo.addObject(xapiVm)
if resourceSet
yield Promise.all([
@addAcl(user.id, vm.id, 'admin'),
@addAcl(user.id, vm.id, 'admin')
xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet)
])
return vm.id
create.params = {
cloudConfig: {
type: 'string'
optional: true
}
coreOs: {
type: 'boolean'
optional: true
}
clone: {
type: 'boolean'
optional: true
}
resourceSet: {
type: 'string',
optional: true
@@ -398,99 +405,19 @@ exports.migrate = migrate
#---------------------------------------------------------------------
# FIXME: human readable strings should be handled.
set = $coroutine (params) ->
{VM} = params
xapi = @getXapi VM
set = (params) ->
VM = extract(params, 'VM')
xapi = @getXapi(VM)
{_xapiRef: ref} = VM
return xapi.editVm(VM._xapiId, params, (limits, vm) =>
resourceSet = xapi.xo.getData(vm, 'resourceSet')
resourceSet = xapi.xo.getData(ref, 'resourceSet')
if (resourceSet)
return @allocateLimitsInResourceSet(limits, resourceSet)
# Memory.
if 'memory' of params
memory = parseSize(params.memory)
if memory < VM.memory.static[0]
@throw(
'INVALID_PARAMS'
"cannot set memory below the static minimum (#{VM.memory.static[0]})"
)
if ($isVMRunning VM) and memory > VM.memory.static[1]
@throw(
'INVALID_PARAMS'
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
"for a running VM"
)
if memory < VM.memory.dynamic[0]
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memory}"
else if memory > VM.memory.static[1]
yield xapi.call 'VM.set_memory_static_max', ref, "#{memory}"
if resourceSet?
yield @allocateLimitsInResourceSet({
memory: memory - VM.memory.size
}, resourceSet)
yield xapi.call 'VM.set_memory_dynamic_max', ref, "#{memory}"
# Number of CPUs.
if 'CPUs' of params
{CPUs} = params
if resourceSet?
yield @allocateLimitsInResourceSet({
cpus: CPUs - VM.CPUs.number
}, resourceSet)
if $isVMRunning VM
if CPUs > VM.CPUs.max
@throw(
'INVALID_PARAMS'
"cannot set CPUs above the static maximum (#{VM.CPUs.max}) "+
"for a running VM"
)
yield xapi.call 'VM.set_VCPUs_number_live', ref, "#{CPUs}"
else
if CPUs > VM.CPUs.max
yield xapi.call 'VM.set_VCPUs_max', ref, "#{CPUs}"
yield xapi.call 'VM.set_VCPUs_at_startup', ref, "#{CPUs}"
# HA policy
# TODO: also handle "best-effort" case
if 'high_availability' of params
{high_availability} = params
if high_availability
yield xapi.call 'VM.set_ha_restart_priority', ref, "restart"
else
yield xapi.call 'VM.set_ha_restart_priority', ref, ""
if 'auto_poweron' of params
{auto_poweron} = params
if auto_poweron
yield xapi.call 'VM.add_to_other_config', ref, 'auto_poweron', 'true'
yield xapi.setPoolProperties({autoPowerOn: true})
else
yield xapi.call 'VM.remove_from_other_config', ref, 'auto_poweron'
if 'cpuWeight' of params
if resourceSet? and this.user.permission isnt 'admin'
if (limits.cpuWeight && this.user.permission != 'admin')
throw new Unauthorized()
yield xapi.setVcpuWeight(VM._xapiId, params.cpuWeight)
# Other fields.
for param, fields of {
'name_label'
'name_description'
'PV_args'
}
continue unless param of params
for field in (if $isArray fields then fields else [fields])
yield xapi.call "VM.set_#{field}", ref, "#{params[param]}"
return true
)
set.params = {
# Identifier of the VM to update.
@@ -509,15 +436,28 @@ set.params = {
# Number of virtual CPUs to allocate.
CPUs: { type: 'integer', optional: true }
cpusMax: { type: ['integer', 'string'], optional: true }
# Memory to allocate (in bytes).
#
# Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max
memory: { type: ['integer', 'string'], optional: true }
# Set dynamic_min
memoryMin: { type: ['integer', 'string'], optional: true }
# Set dynamic_max
memoryMax: { type: ['integer', 'string'], optional: true }
# Set static_max
memoryStaticMax: { type: ['integer', 'string'], optional: true }
# Kernel arguments for PV VM.
PV_args: { type: 'string', optional: true }
cpuWeight: { type: 'integer', optional: true}
cpuWeight: { type: ['integer', 'null'], optional: true }
cpuCap: { type: ['integer', 'null'], optional: true }
}
set.resolve = {
@@ -646,12 +586,12 @@ exports.convert = convertToTemplate
snapshot = $coroutine ({vm, name}) ->
yield checkPermissionOnSrs.call(this, vm)
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name)
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name ? "#{vm.name_label}_#{new Date().toISOString()}")
return snapshot.$id
snapshot.params = {
id: { type: 'string' }
name: { type: 'string' }
name: { type: 'string', optional: true }
}
snapshot.resolve = {
@@ -670,14 +610,14 @@ rollingDeltaBackup = $coroutine ({vm, remote, tag, depth}) ->
})
rollingDeltaBackup.params = {
vm: { type: 'string' }
id: { type: 'string' }
remote: { type: 'string' }
tag: { type: 'string'}
depth: { type: ['string', 'number'] }
}
rollingDeltaBackup.resolve = {
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate']
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
}
rollingDeltaBackup.permission = 'admin'
@@ -708,12 +648,12 @@ exports.importDeltaBackup = importDeltaBackup
deltaCopy = ({ vm, sr }) -> @deltaCopyVm(vm, sr)
deltaCopy.params = {
vm: { type: 'string' },
id: { type: 'string' },
sr: { type: 'string' }
}
deltaCopy.resolve = {
vm: [ 'vm', 'VM', 'operate'],
vm: [ 'id', 'VM', 'operate'],
sr: [ 'sr', 'SR', 'operate']
}
@@ -764,9 +704,7 @@ exports.backup = backup
#---------------------------------------------------------------------
importBackup = $coroutine ({remote, file, sr}) ->
yield @importVmBackup(remote, file, sr)
return
importBackup = ({remote, file, sr}) -> @importVmBackup(remote, file, sr)
importBackup.permission = 'admin'
importBackup.description = 'Imports a VM into host, from a file found in the chosen remote'
@@ -816,21 +754,30 @@ exports.rollingBackup = rollingBackup
#---------------------------------------------------------------------
rollingDrCopy = ({vm, pool, tag, depth}) ->
if vm.$pool is pool.id
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
return @rollingDrCopyVm({vm, sr: @getObject(pool.default_SR, 'SR'), tag, depth})
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
unless sr
unless pool
throw new InvalidParameters('either pool or sr param should be specified')
if vm.$pool is pool.id
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
sr = @getObject(pool.default_SR, 'SR')
return @rollingDrCopyVm({vm, sr, tag, depth})
rollingDrCopy.params = {
id: { type: 'string' }
pool: { type: 'string' }
tag: { type: 'string'}
depth: { type: 'number' }
id: { type: 'string' }
pool: { type: 'string', optional: true }
sr: { type: 'string', optional: true }
tag: { type: 'string'}
}
rollingDrCopy.resolve = {
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
pool: ['pool', 'pool', 'administrate']
sr: ['sr', 'SR', 'administrate']
}
rollingDrCopy.description = 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to depth'
@@ -1074,8 +1021,6 @@ exports.attachDisk = attachDisk
#---------------------------------------------------------------------
# FIXME: position should be optional and default to last.
# TODO: implement resource sets
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
@@ -1089,7 +1034,7 @@ createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
createInterface.params = {
vm: { type: 'string' }
network: { type: 'string' }
position: { type: 'string' }
position: { type: 'string', optional: true }
mtu: { type: 'string', optional: true }
mac: { type: 'string', optional: true }
}

View File

@@ -33,10 +33,6 @@ export default class Collection extends EventEmitter {
})
}
constructor () {
super()
}
async add (models, opts) {
const array = isArray(models)
if (!array) {

View File

@@ -1,7 +1,7 @@
import Collection, {ModelAlreadyExists} from '../collection'
import difference from 'lodash.difference'
import filter from 'lodash.filter'
import getKey from 'lodash.keys'
import difference from 'lodash/difference'
import filter from 'lodash/filter'
import getKey from 'lodash/keys'
import {createClient as createRedisClient} from 'redis'
import {

View File

@@ -1,4 +1,4 @@
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import {
isArray,
@@ -31,34 +31,30 @@ export const autobind = (target, key, {
enumerable,
get () {
const bounded = bind(fn, this)
if (this === target) {
return fn
}
const bound = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bounded,
value: bound,
writable: true
})
return bounded
return bound
},
set (newValue) {
if (this === target) {
// New value directly set on the prototype.
delete this[key]
this[key] = newValue
} else {
// New value set on a child object.
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
})
@@ -263,6 +259,31 @@ export const mixin = MixIns => Class => {
const { name } = Class
// Copy properties of plain object mix-ins to the prototype.
{
const allMixIns = MixIns
MixIns = []
const { prototype } = Class
const descriptors = { __proto__: null }
for (const MixIn of allMixIns) {
if (isFunction(MixIn)) {
MixIns.push(MixIn)
continue
}
for (const prop of _ownKeys(MixIn)) {
if (prop in prototype) {
throw new Error(`${name}#${prop} is already defined`)
}
(
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
).enumerable = false // Object methods are enumerable but class methods are not.
}
}
defineProperties(prototype, descriptors)
}
const Decorator = (...args) => {
const instance = new Class(...args)

View File

@@ -1,13 +1,13 @@
import assign from 'lodash.assign'
import getStream from 'get-stream'
import startsWith from 'lodash.startswith'
import assign from 'lodash/assign'
import startsWith from 'lodash/startsWith'
import { parse as parseUrl } from 'url'
import { request as httpRequest } from 'http'
import { request as httpsRequest } from 'https'
import { stringify as formatQueryString } from 'querystring'
import {
isString
isString,
streamToBuffer
} from './utils'
// -------------------------------------------------------------------
@@ -90,7 +90,7 @@ export default (...args) => {
response.cancel = () => {
req.abort()
}
response.readAll = () => getStream(response)
response.readAll = () => streamToBuffer(response)
const length = response.headers['content-length']
if (length) {

View File

@@ -2,20 +2,21 @@ import createLogger from 'debug'
const debug = createLogger('xo:main')
import appConf from 'app-conf'
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import eventToPromise from 'event-to-promise'
import has from 'lodash.has'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash.includes'
import pick from 'lodash.pick'
import includes from 'lodash/includes'
import pick from 'lodash/pick'
import proxyConsole from './proxy-console'
import proxyRequest from 'proxy-http-request'
import serveStatic from 'serve-static'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import WebSocket from 'ws'
import {compile as compileJade} from 'jade'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import {
AlreadyAuthenticated,
@@ -33,7 +34,6 @@ import {
import * as apiMethods from './api/index'
import Api from './api'
import WebServer from 'http-server-plus'
import wsProxy from './ws-proxy'
import Xo from './xo'
import {
setup as setupHttpProxy
@@ -129,8 +129,8 @@ async function setUpPassport (express, xo) {
}
// Registers the sign in form.
const signInPage = compileJade(
await readFile(__dirname + '/../signin.jade')
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(signInPage({
@@ -141,7 +141,8 @@ async function setUpPassport (express, xo) {
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const matches = req.url.match(SIGNIN_STRATEGY_RE)
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
@@ -167,7 +168,7 @@ async function setUpPassport (express, xo) {
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect('/')
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
@@ -187,9 +188,10 @@ async function setUpPassport (express, xo) {
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
} else if (/favicon|fontawesome|images|styles/.test(url)) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
@@ -222,7 +224,8 @@ async function registerPlugin (pluginPath, pluginName) {
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema
configurationSchema,
configurationPresets
} = plugin
// The default export can be either a factory or directly a plugin
@@ -235,6 +238,7 @@ async function registerPlugin (pluginPath, pluginName) {
pluginName,
instance,
configurationSchema,
configurationPresets,
version
)
}
@@ -337,13 +341,29 @@ const setUpProxies = (express, opts, xo) => {
return
}
const proxy = createProxyServer({
ignorePath: true
}).on('error', (error) => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
forEach(opts, (target, url) => {
express.use(url, (req, res) => {
proxyRequest(target + req.url, req, res)
})
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length)
})
return
}
}
next()
})
// WebSocket proxy.
@@ -353,14 +373,16 @@ const setUpProxies = (express, opts, xo) => {
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
express.on('upgrade', (req, socket, head) => {
const {url} = req
const { url } = req
for (let prefix in opts) {
if (url.lastIndexOf(prefix, 0) !== -1) {
const target = opts[prefix] + url.slice(prefix.length)
webSocketServer.handleUpgrade(req, socket, head, socket => {
wsProxy(socket, target)
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length)
})
return
}
}
@@ -398,7 +420,7 @@ const apiHelpers = {
// Handles both properties and wrapped models.
const properties = user.properties || user
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
return pick(properties, 'id', 'email', 'groups', 'permission', 'preferences', 'provider')
},
throw (errorId, data) {
@@ -494,7 +516,7 @@ const setUpConsoleProxy = (webServer, xo) => {
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { // eslint-disable-line space-before-keywords
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw new InvalidCredential()
}
@@ -512,8 +534,8 @@ const setUpConsoleProxy = (webServer, xo) => {
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (_) {
console.error(_)
} catch (error) {
console.error(error && error.stack || error)
}
})
}

View File

@@ -1,4 +1,4 @@
import assign from 'lodash.assign'
import assign from 'lodash/assign'
import {BaseError} from 'make-error'
import {
@@ -141,7 +141,7 @@ export default class JobExecutor {
event: 'jobCall.end',
runJobId,
runCallId,
error: reason
error: {...reason, message: reason.message}
})
call.error = reason

View File

@@ -3,8 +3,10 @@
import {expect} from 'chai'
import leche from 'leche'
import {productParams} from './job-executor'
import {_computeCrossProduct} from './job-executor'
import {
_computeCrossProduct,
productParams
} from './job-executor'
describe('productParams', function () {
leche.withData({

View File

@@ -1,5 +1,5 @@
import appConf from 'app-conf'
import get from 'lodash.get'
import get from 'lodash/get'
import highland from 'highland'
import levelup from 'level-party'
import ndjson from 'ndjson'

View File

@@ -15,13 +15,14 @@ export class Schedules extends Collection {
return 'schedule:'
}
create (userId, job, cron, enabled, name = undefined) {
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,
job,
cron,
enabled,
name
name,
timezone
}))
}

View File

@@ -1,3 +1,5 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
@@ -12,6 +14,18 @@ User.prototype.default = {
// -------------------------------------------------------------------
const parseProp = (obj, name) => {
const value = obj[name]
if (value == null) {
return
}
try {
return JSON.parse(value)
} catch (error) {
console.warn('cannot parse user[%s] (%s):', name, value, error)
}
}
export class Users extends Collection {
get Model () {
return User
@@ -35,7 +49,13 @@ export class Users extends Collection {
async save (user) {
// Serializes.
user.groups = JSON.stringify(user.groups)
let tmp
if (!isEmpty(tmp = user.groups)) {
user.groups = JSON.stringify(tmp)
}
if (!isEmpty(tmp = user.preferences)) {
user.preferences = JSON.stringify(tmp)
}
return /* await */ this.update(user)
}
@@ -45,13 +65,11 @@ export class Users extends Collection {
// Deserializes
forEach(users, user => {
const {groups} = user
try {
user.groups = groups ? JSON.parse(groups) : []
} catch (_) {
console.warn('cannot parse user.groups:', groups)
user.groups = []
}
let tmp
user.groups = ((tmp = parseProp(user, 'groups')) && tmp.length)
? tmp
: undefined
user.preferences = parseProp(user, 'preferences')
})
return users

View File

@@ -23,13 +23,19 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
'', ''
].join('\r\n'))
const onSend = (error) => {
if (error) {
debug('error sending to the XO client: %s', error.stack || error.message || error)
}
}
socket.pipe(partialStream('\r\n\r\n', headers => {
// TODO: check status code 200.
debug('connected')
})).on('data', data => {
if (!closed) {
// Encode to base 64.
ws.send(data.toString('base64'))
ws.send(data.toString('base64'), onSend)
}
}).on('end', () => {
if (!closed) {

View File

@@ -1,5 +1,4 @@
import eventToPromise from 'event-to-promise'
import getStream from 'get-stream'
import through2 from 'through2'
import {
@@ -8,14 +7,16 @@ import {
import {
addChecksumToReadStream,
getPseudoRandomBytes,
noop,
pCatch,
streamToBuffer,
validChecksumOfReadStream
} from '../utils'
export default class RemoteHandlerAbstract {
constructor (remote) {
this._remote = parse({...remote})
this._remote = {...remote, ...parse(remote.url)}
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
@@ -47,12 +48,41 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async test () {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error)
}
} finally {
this.unlink(testFileName).catch(noop)
}
}
async outputFile (file, data, options) {
return this._outputFile(file, data, options)
return this._outputFile(file, data, {
flags: 'wx',
...options
})
}
async _outputFile (file, data, options) {
const stream = await this.createOutputStream(file)
const stream = await this.createOutputStream(file, options)
const promise = eventToPromise(stream, 'finish')
stream.end(data)
return promise
@@ -62,8 +92,8 @@ export default class RemoteHandlerAbstract {
return this._readFile(file, options)
}
async _readFile (file, options) {
return getStream(await this.createReadStream(file, options))
_readFile (file, options) {
return this.createReadStream(file, options).then(streamToBuffer)
}
async rename (oldPath, newPath) {
@@ -128,7 +158,10 @@ export default class RemoteHandlerAbstract {
checksum = false,
...options
} = {}) {
const streamP = this._createOutputStream(file, options)
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options
})
if (!checksum) {
return streamP

View File

@@ -1,5 +1,5 @@
import fs from 'fs-promise'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import {
dirname,
resolve
@@ -12,16 +12,21 @@ import {
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'local'
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const parts = [this._remote.path]
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, this._remote.path)) {
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
@@ -30,8 +35,9 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _sync () {
if (this._remote.enabled) {
try {
await fs.ensureDir(this._remote.path)
await fs.access(this._remote.path, fs.R_OK | fs.W_OK)
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
@@ -47,7 +53,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(this._getFilePath(file), data, options)
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {

View File

@@ -11,11 +11,15 @@ export default class NfsHandler extends LocalHandler {
return 'nfs'
}
_getRealPath () {
return `/tmp/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
try {
({stdout} = await execa('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings']))
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
@@ -37,27 +41,27 @@ export default class NfsHandler extends LocalHandler {
return mounted
}
_matchesRealMount (remote) {
return remote.path in this._realMounts
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount (remote) {
await fs.ensureDir(remote.path)
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${remote.host}:/${remote.share}`, remote.path])
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount(this._remote) && !this._remote.enabled) {
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount(this._remote) && this._remote.enabled) {
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount(this._remote)
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message

View File

@@ -54,6 +54,11 @@ export default class SmbHandler extends RemoteHandlerAbstract {
? this._remote.path
: ''
// Ensure remote path is a directory.
if (path !== '' && path[path.length - 1] !== '\\') {
path += '\\'
}
if (file) {
path += file.replace(/\//g, '\\')
}

50
src/schemas/user.js Normal file
View File

@@ -0,0 +1,50 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this user'
},
email: {
type: 'string',
description: 'email address of this user'
},
groups: {
type: 'array',
items: {
type: 'string'
},
description: 'identifier of groups this user belong to'
},
permission: {
enum: ['none', 'read', 'write', 'admin'],
description: 'root permission for this user, none and admin are the only significant ones'
},
preferences: {
type: 'object',
properties: {
lang: { type: 'string' },
sshKeys: {
type: 'array',
items: {
type: 'object',
properties: {
key: { type: 'string' },
title: { type: 'string' }
},
required: [
'key',
'title'
]
}
}
},
description: 'various user preferences'
}
},
required: [
'id',
'email'
]
}

View File

@@ -1,15 +1,23 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash.foreach'
import has from 'lodash.has'
import forEach from 'lodash/forEach'
import getStream from 'get-stream'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import invert from 'lodash.invert'
import isArray from 'lodash.isarray'
import isString from 'lodash.isstring'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import xml2js from 'xml2js'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import { CronJob } from 'cron'
import {
all as pAll,
@@ -45,17 +53,7 @@ export function bufferToStream (buf) {
return stream
}
export async function streamToBuffer (stream) {
return new Promise((resolve, reject) => {
const bufs = []
stream.on('error', reject)
stream.on('data', data => {
bufs.push(data)
})
stream.on('end', () => resolve(Buffer.concat(bufs)))
})
}
export const streamToBuffer = getStream.buffer
// -------------------------------------------------------------------
@@ -179,7 +177,7 @@ export function extractProperty (obj, prop) {
// -------------------------------------------------------------------
export const generateUnsecureToken = (n = 32) => {
export const getPseudoRandomBytes = n => {
const bytes = new Buffer(n)
const odd = n & 1
@@ -191,9 +189,11 @@ export const generateUnsecureToken = (n = 32) => {
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
}
return base64url(bytes)
return bytes
}
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
// Generate a secure random Base64 string.
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
@@ -239,21 +239,30 @@ export const parseXml = (function () {
// - methods are already bound and chainable
export const lightSet = collection => {
const data = createRawObject()
collection && forEach(collection, value => {
data[value] = true
})
collection = null
if (collection) {
forEach(collection, value => {
data[value] = true
})
collection = null
}
const set = {
add: value => (data[value] = true, set),
add: value => {
data[value] = true
return set
},
clear: () => {
for (const value in data) {
delete data[value]
}
return set
},
delete: value => (delete data[value], set),
has: value => data[value]
delete: value => {
delete data[value]
return set
},
has: value => data[value],
toArray: () => keys(data)
}
return set
}
@@ -306,7 +315,7 @@ export function pSettle (promises) {
// -------------------------------------------------------------------
export {
export { // eslint-disable-line no-duplicate-imports
all as pAll,
catchPlus as pCatch,
delay as pDelay,
@@ -359,16 +368,16 @@ export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
// This functions are often used throughout xo-server.
//
// Exports them from here to avoid direct dependencies on lodash.
export { default as forEach } from 'lodash.foreach'
export { default as isArray } from 'lodash.isarray'
export { default as isBoolean } from 'lodash.isboolean'
export { default as isEmpty } from 'lodash.isempty'
export { default as isFunction } from 'lodash.isfunction'
export { default as isInteger } from 'lodash.isinteger'
export { default as isObject } from 'lodash.isobject'
export { default as isString } from 'lodash.isstring'
export { default as mapToArray } from 'lodash.map'
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
@@ -436,27 +445,30 @@ export const streamToArray = (stream, {
// -------------------------------------------------------------------
export const scheduleFn = (cronPattern, fn) => {
export const scheduleFn = (cronTime, fn, timeZone) => {
let running = false
const job = new CronJob(cronPattern, async () => {
if (running) {
return
}
const job = new CronJob({
cronTime,
onTick: async () => {
if (running) {
return
}
running = true
running = true
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
},
start: true,
timeZone
})
job.start()
return () => {
job.stop()
}

View File

@@ -1,7 +1,6 @@
/* eslint-env mocha */
import expect from 'must'
import sinon from 'sinon'
// ===================================================================
@@ -13,7 +12,6 @@ import {
formatXml,
generateToken,
parseSize,
pFinally,
pSettle
} from './utils'
@@ -144,36 +142,6 @@ describe('parseSize()', function () {
// -------------------------------------------------------------------
describe('pFinally()', () => {
it('calls a callback on resolution', async () => {
const value = {}
const spy = sinon.spy()
await expect(
Promise.resolve(value)::pFinally(spy)
).to.resolve.to.equal(
value
)
expect(spy.callCount).to.equal(1)
})
it('calls a callback on rejection', async () => {
const reason = {}
const spy = sinon.spy()
await expect(
Promise.reject(reason)::pFinally(spy)
).to.reject.to.equal(
reason
)
expect(spy.callCount).to.equal(1)
})
})
// -------------------------------------------------------------------
describe('pSettle()', () => {
it('works with arrays', async () => {
const [

View File

@@ -528,12 +528,12 @@ export default async function vhdMerge (
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error(`Unable to merge, child is not a delta backup.`)
throw new Error('Unable to merge, child is not a delta backup.')
}
// Merging in differencing disk is prohibited in our case.
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
throw new Error(`Unable to merge, parent is not a full backup.`)
throw new Error('Unable to merge, parent is not a full backup.')
}
// Allocation table map is not yet implemented.
@@ -541,7 +541,7 @@ export default async function vhdMerge (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error(`Unsupported allocation table map.`)
throw new Error('Unsupported allocation table map.')
}
// Read allocation table of child/parent.

View File

@@ -1,53 +0,0 @@
import createDebug from 'debug'
import WebSocket from 'ws'
const debug = createDebug('xo:wsProxy')
const defaults = {
// Automatically close the client connection when the remote close.
autoClose: true
}
// Proxy a WebSocket `client` to a remote server which has `url` as
// address.
export default function wsProxy (client, url, opts) {
opts = {
...defaults,
protocol: client.protocol,
...opts
}
const autoClose = !!opts.autoClose
delete opts.autoClose
function onClientSend (error) {
if (error) {
debug('client send error', error)
}
}
function onRemoteSend (error) {
if (error) {
debug('remote send error', error)
}
}
const remote = new WebSocket(url, opts).once('open', function () {
debug('connected to %s', url)
}).once('close', function () {
debug('remote closed')
if (autoClose) {
client.close()
}
}).once('error', function (error) {
debug('remote error: %s', error)
}).on('message', function (message) {
client.send(message, onClientSend)
})
client.once('close', function () {
debug('client closed')
remote.close()
}).on('message', function (message) {
remote.send(message, onRemoteSend)
})
}

View File

@@ -35,18 +35,28 @@ function link (obj, prop, idField = '$id') {
// Parse a string date time to a Unix timestamp (in seconds).
//
// If the value is a number or can be converted as one, it is assumed
// to already be a timestamp and returned.
//
// If there are no data or if the timestamp is 0, returns null.
function toTimestamp (date) {
if (!date) {
return null
}
const ms = parseDateTime(date).getTime()
const timestamp = +date
// Not NaN.
if (timestamp === timestamp) { // eslint-disable-line no-self-compare
return timestamp
}
const ms = parseDateTime(date)
if (!ms) {
return null
}
return Math.round(ms / 1000)
return Math.round(ms.getTime() / 1000)
}
// ===================================================================
@@ -85,16 +95,23 @@ const TRANSFORMS = {
const isRunning = isHostRunning(obj)
return {
// Deprecated
CPUs: obj.cpu_info,
address: obj.address,
bios_strings: obj.bios_strings,
build: obj.software_version.build_number,
CPUs: obj.cpu_info,
enabled: Boolean(obj.enabled),
cpus: {
cores: +obj.cpu_info.cpu_count,
sockets: +obj.cpu_info.socket_count
},
current_operations: obj.current_operations,
hostname: obj.hostname,
iSCSI_name: otherConfig.iscsi_iqn || null,
license_params: obj.license_params,
license_server: obj.license_server,
license_expiry: toTimestamp(obj.license_params.expiry),
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
@@ -110,6 +127,9 @@ const TRANSFORMS = {
return {
usage: 0,
size: 0,
// Deprecated
total: 0
}
})(),
@@ -118,6 +138,8 @@ const TRANSFORMS = {
power_state: metrics
? (isRunning ? 'Running' : 'Halted')
: 'Unknown',
startTime: toTimestamp(otherConfig.boot_time),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
tags: obj.tags,
version: obj.software_version.product_version,
@@ -152,6 +174,25 @@ const TRANSFORMS = {
const isHvm = isVmHvm(obj)
const isRunning = isVmRunning(obj)
const xenTools = (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})()
const vm = {
// type is redefined after for controllers/, templates &
@@ -164,7 +205,7 @@ const TRANSFORMS = {
CPUs: {
max: +obj.VCPUs_max,
number: (
isRunning && metrics
isRunning && metrics && xenTools
? +metrics.VCPUs_number
: +obj.VCPUs_at_startup
)
@@ -231,6 +272,7 @@ const TRANSFORMS = {
os_version: guestMetrics && guestMetrics.os_version || null,
power_state: obj.power_state,
snapshots: link(obj, 'snapshots'),
startTime: metrics && toTimestamp(metrics.start_time),
tags: obj.tags,
VIFs: link(obj, 'VIFs'),
virtualizationMode: isHvm ? 'hvm' : 'pv',
@@ -241,25 +283,7 @@ const TRANSFORMS = {
// - false: not optimized
// - 'out of date': optimized but drivers should be updated
// - 'up to date': optimized
xenTools: (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})(),
xenTools,
$container: (
isRunning
@@ -303,7 +327,7 @@ const TRANSFORMS = {
return disks
})(),
install_methods: (function () {
const {['install-methods']: methods} = otherConfig
const methods = otherConfig['install-methods']
return methods ? methods.split(',') : []
})(),
@@ -311,8 +335,10 @@ const TRANSFORMS = {
}
}
if (obj.VCPUs_params && obj.VCPUs_params.weight) {
vm.cpuWeight = obj.VCPUs_params.weight
let tmp
if ((tmp = obj.VCPUs_params)) {
tmp.cap && (vm.cpuCap = +tmp.cap)
tmp.weight && (vm.cpuWeight = +tmp.weight)
}
if (!isHvm) {
@@ -342,9 +368,9 @@ const TRANSFORMS = {
VDIs: link(obj, 'VDIs'),
$container: (
obj.shared
obj.shared || !obj.$PBDs[0]
? link(obj, 'pool')
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
: link(obj.$PBDs[0], 'host')
),
$PBDs: link(obj, 'PBDs')
}
@@ -440,6 +466,8 @@ const TRANSFORMS = {
return {
type: 'VIF',
allowedIpv4Addresses: obj.ipv4_allowed,
allowedIpv6Addresses: obj.ipv6_allowed,
attached: Boolean(obj.currently_attached),
device: obj.device, // TODO: should it be cast to a number?
MAC: obj.MAC,

View File

@@ -1,4 +1,4 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import JSON5 from 'json5'
import { BaseError } from 'make-error'
@@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
}
}
export class FaultyGranularity extends XapiStatsError {
constructor (msg) {
super(msg)
}
}
export class FaultyGranularity extends XapiStatsError {}
// -------------------------------------------------------------------
// Utils
@@ -289,9 +285,10 @@ export default class XapiStats {
// Load
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
// Memory
const memory = values[hostLegends.memory]
const memoryFree = values[hostLegends.memoryFree]
// Memory.
// WARNING! memory/memoryFree are in kB.
const memory = values[hostLegends.memory] * 1024
const memoryFree = values[hostLegends.memoryFree] * 1024
hostStats.memory.push(memory)
@@ -405,19 +402,24 @@ export default class XapiStats {
}
_getPoints (hostname, step, vmId) {
const hostStats = this._hosts[hostname][step]
// Return host points
if (vmId === undefined) {
return this._hosts[hostname][step]
return {
interval: step,
...hostStats
}
}
const vmsStats = this._vms[hostname][step]
// Return vm points
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
if (this._vms[hostname][step] !== undefined) {
points.stats = this._vms[hostname][step][vmId]
return {
interval: step,
endTimestamp: hostStats.endTimestamp,
stats: (vmsStats && vmsStats[vmId]) || getNewVmStats()
}
return points
}
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
@@ -528,6 +530,11 @@ export default class XapiStats {
async getVmPoints (xapi, vmId, granularity) {
const vm = xapi.getObject(vmId)
const host = vm.$resident_on
if (!host) {
throw new Error(`VM ${vmId} is halted or host could not be found.`)
}
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
}
}

View File

@@ -1,14 +1,13 @@
/* eslint-disable camelcase */
import createDebug from 'debug'
import every from 'lodash.every'
import every from 'lodash/every'
import fatfs from 'fatfs'
import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer'
import find from 'lodash.find'
import includes from 'lodash.includes'
// import isFinite from 'lodash.isfinite'
import pickBy from 'lodash.pickby'
import sortBy from 'lodash.sortby'
import find from 'lodash/find'
import includes from 'lodash/includes'
import sortBy from 'lodash/sortBy'
import unzip from 'julien-f-unzip'
import { utcFormat, utcParse } from 'd3-time-format'
import { defer } from 'promise-toolbox'
import {
wrapError as wrapXapiError,
Xapi as XapiBase
@@ -17,22 +16,21 @@ import {
satisfies as versionSatisfies
} from 'semver'
import httpRequest from './http-request'
import httpRequest from '../http-request'
import fatfsBuffer, { init as fatfsBufferInit } from '../fatfs-buffer'
import {
debounce,
deferrable
} from './decorators'
import httpProxy from './http-proxy'
deferrable,
mixin
} from '../decorators'
import httpProxy from '../http-proxy'
import {
bufferToStream,
camelToSnakeCase,
createRawObject,
ensureArray,
forEach,
isBoolean,
isFunction,
isInteger,
isObject,
map,
mapToArray,
noop,
@@ -43,11 +41,24 @@ import {
pFinally,
promisifyAll,
pSettle
} from './utils'
} from '../utils'
import {
GenericError,
ForbiddenOperation
} from './api-errors'
} from '../api-errors'
import mixins from './mixins'
import {
asBoolean,
asInteger,
extractOpaqueRef,
filterUndefineds,
getNamespaceForType,
isVmHvm,
isVmRunning,
optional,
prepareXapiParam
} from './utils'
const debug = createDebug('xo:xapi')
@@ -58,15 +69,6 @@ const TAG_COPY_SRC = 'xo:copy_of'
// ===================================================================
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
function extractOpaqueRef (str) {
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
// HTTP put, use an ugly hack if the length is not known because XAPI
// does not support chunk encoding.
const put = (stream, {
@@ -104,88 +106,11 @@ const put = (stream, {
return makeRequest().readAll()
}
const asBoolean = value => Boolean(value)
// const asFloat = value => {
// value = String(value)
// return value.indexOf('.') === -1
// ? `${value}.0`
// : value
// }
const asInteger = value => String(value)
const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
const prepareXapiParam = param => {
// if (isFinite(param) && !isInteger(param)) {
// return asFloat(param)
// }
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
return asBoolean(param)
}
if (isObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
return param
}
// ===================================================================
const typeToNamespace = createRawObject()
forEach([
'Bond',
'DR_task',
'GPU_group',
'PBD',
'PCI',
'PGPU',
'PIF',
'PIF_metrics',
'SM',
'SR',
'VBD',
'VBD_metrics',
'VDI',
'VGPU',
'VGPU_type',
'VLAN',
'VM',
'VM_appliance',
'VM_guest_metrics',
'VM_metrics',
'VMPP',
'VTPM'
], namespace => {
typeToNamespace[namespace.toLowerCase()] = namespace
})
// Object types given by `xen-api` are always lowercase but the
// namespaces in the Xen API can have a different casing.
const getNamespaceForType = (type) => typeToNamespace[type] || type
// ===================================================================
// Format a date (pseudo ISO 8601) from one XenServer get by
// xapi.call('host.get_servertime', host.$ref) for example
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
export const isHostRunning = (host) => {
const {$metrics: metrics} = host
return metrics && metrics.live
}
const VM_RUNNING_POWER_STATES = {
Running: true,
Paused: true
}
export const isVmRunning = (vm) => VM_RUNNING_POWER_STATES[vm.power_state]
export const isVmHvm = (vm) => Boolean(vm.HVM_boot_policy)
// FIXME: remove this work around when fixed, https://phabricator.babeljs.io/T2877
// export * from './utils'
require('lodash/assign')(module.exports, require('./utils'))
// VDI formats. (Raw is not available for delta vdi.)
export const VDI_FORMAT_VHD = 'vhd'
@@ -193,10 +118,20 @@ export const VDI_FORMAT_RAW = 'raw'
// ===================================================================
@mixin(mapToArray(mixins))
export default class Xapi extends XapiBase {
constructor (...args) {
super(...args)
// Patch getObject to resolve _xapiId property.
this.getObject = (getObject => (...args) => {
let tmp
if ((tmp = args[0]) != null && (tmp = tmp._xapiId) != null) {
args[0] = tmp
}
return getObject.apply(this, args)
})(this.getObject)
const genericWatchers = this._genericWatchers = createRawObject()
const objectsWatchers = this._objectWatchers = createRawObject()
const taskWatchers = this._taskWatchers = createRawObject()
@@ -243,6 +178,16 @@ export default class Xapi extends XapiBase {
this.objects.on('update', onAddOrUpdate)
}
call (...args) {
const fn = super.call
const loop = () => fn.apply(this, args)::pCatch({
code: 'TOO_MANY_PENDING_TASKS'
}, () => pDelay(5e3).then(loop))
return loop()
}
// =================================================================
_registerGenericWatcher (fn) {
@@ -264,8 +209,7 @@ export default class Xapi extends XapiBase {
// TODO: implements a timeout.
_waitObject (predicate) {
if (isFunction(predicate)) {
let resolve
const promise = new Promise(resolve_ => resolve = resolve_)
const { promise, resolve } = defer()
const unregister = this._registerGenericWatcher(obj => {
if (predicate(obj)) {
@@ -280,10 +224,7 @@ export default class Xapi extends XapiBase {
let watcher = this._objectWatchers[predicate]
if (!watcher) {
let resolve
const promise = new Promise(resolve_ => {
resolve = resolve_
})
const { promise, resolve } = defer()
// Register the watcher.
watcher = this._objectWatchers[predicate] = {
@@ -295,10 +236,26 @@ export default class Xapi extends XapiBase {
return watcher.promise
}
// Wait for an object to be in a given state.
//
// Faster than _waitObject() with a function.
_waitObjectState (idOrUuidOrRef, predicate) {
const object = this.getObject(idOrUuidOrRef, null)
if (object && predicate(object)) {
return object
}
const loop = () => this._waitObject(idOrUuidOrRef).then(
(object) => predicate(object) ? object : loop()
)
return loop()
}
// Returns the objects if already presents or waits for it.
async _getOrWaitObject (idOrUuidOrRef) {
return (
this.getObject(idOrUuidOrRef, undefined) ||
this.getObject(idOrUuidOrRef, null) ||
this._waitObject(idOrUuidOrRef)
)
}
@@ -326,18 +283,8 @@ export default class Xapi extends XapiBase {
let watcher = this._taskWatchers[ref]
if (!watcher) {
let resolve, reject
const promise = new Promise((resolve_, reject_) => {
resolve = resolve_
reject = reject_
})
// Register the watcher.
watcher = this._taskWatchers[ref] = {
promise,
resolve,
reject
}
watcher = this._taskWatchers[ref] = defer()
}
return watcher.promise
@@ -345,7 +292,15 @@ export default class Xapi extends XapiBase {
// =================================================================
async _setObjectProperties (object, props) {
_setObjectProperty (object, name, value) {
return this.call(
`${getNamespaceForType(object.$type)}.set_${camelToSnakeCase(name)}`,
object.$ref,
prepareXapiParam(value)
)
}
_setObjectProperties (object, props) {
const {
$ref: ref,
$type: type
@@ -355,11 +310,11 @@ export default class Xapi extends XapiBase {
// TODO: the thrown error should contain the name of the
// properties that failed to be set.
await Promise.all(mapToArray(props, (value, name) => {
return Promise.all(mapToArray(props, (value, name) => {
if (value != null) {
return this.call(`${namespace}.set_${camelToSnakeCase(name)}`, ref, value)
return this.call(`${namespace}.set_${camelToSnakeCase(name)}`, ref, prepareXapiParam(value))
}
}))
}))::pCatch(noop)
}
async _updateObjectMapProperty (object, prop, values) {
@@ -408,8 +363,8 @@ export default class Xapi extends XapiBase {
nameLabel,
nameDescription
}),
this._updateObjectMapProperty(pool, 'other_config', {
autoPoweron
autoPoweron != null && this._updateObjectMapProperty(pool, 'other_config', {
autoPoweron: autoPoweron ? 'true' : null
})
])
}
@@ -500,7 +455,7 @@ export default class Xapi extends XapiBase {
const resolveVersionPatches = function (uuids) {
const versionPatches = createRawObject()
forEach(uuids, ({uuid}) => {
forEach(ensureArray(uuids), ({uuid}) => {
versionPatches[uuid] = patches[uuid]
})
@@ -819,9 +774,9 @@ export default class Xapi extends XapiBase {
// If a SR is specified, it will contains the copies of the VDIs,
// otherwise they will use the SRs they are on.
async _copyVm (vm, nameLabel = vm.name_label, sr = undefined) {
let snapshotRef
let snapshot
if (isVmRunning(vm)) {
snapshotRef = await this._snapshotVm(vm)
snapshot = await this._snapshotVm(vm)
}
debug(`Copying VM ${vm.name_label}${
@@ -837,46 +792,17 @@ export default class Xapi extends XapiBase {
try {
return await this.call(
'VM.copy',
snapshotRef || vm.$ref,
snapshot ? snapshot.$ref : vm.$ref,
nameLabel,
sr ? sr.$ref : ''
)
} finally {
if (snapshotRef) {
await this._deleteVm(
await this._getOrWaitObject(snapshotRef),
true
)
if (snapshot) {
await this._deleteVm(snapshot, true)
}
}
}
async _snapshotVm (vm, nameLabel = vm.name_label) {
debug(`Snapshotting VM ${vm.name_label}${
nameLabel !== vm.name_label
? ` as ${nameLabel}`
: ''
}`)
let ref
try {
ref = await this.call('VM.snapshot_with_quiesce', vm.$ref, nameLabel)
this.addTag(ref, 'quiesce')::pCatch(noop) // ignore any failures
} catch (error) {
if (
error.code !== 'VM_SNAPSHOT_WITH_QUIESCE_NOT_SUPPORTED' &&
error.code !== 'VM_BAD_POWER_STATE' // quiesce only work on a running VM
) {
throw error
}
ref = await this.call('VM.snapshot', vm.$ref, nameLabel)
}
// Convert the template to a VM.
await this.call('VM.set_is_a_template', ref, false)
return ref
}
async cloneVm (vmId, {
nameLabel = undefined,
fast = true
@@ -938,7 +864,7 @@ export default class Xapi extends XapiBase {
}
// Low level create VM.
_createVm ({
_createVmRecord ({
actions_after_crash,
actions_after_reboot,
actions_after_shutdown,
@@ -948,6 +874,7 @@ export default class Xapi extends XapiBase {
generation_id,
ha_always_run,
ha_restart_priority,
has_vendor_device = false, // Avoid issue with some Dundee builds.
hardware_platform_version,
HVM_boot_params,
HVM_boot_policy,
@@ -1016,11 +943,12 @@ export default class Xapi extends XapiBase {
generation_id,
ha_always_run: asBoolean(ha_always_run),
ha_restart_priority,
hardware_platform_version,
has_vendor_device,
hardware_platform_version: optional(hardware_platform_version, asInteger),
// HVM_shadow_multiplier: asFloat(HVM_shadow_multiplier), // FIXME: does not work FIELD_TYPE_ERROR(hVM_shadow_multiplier)
name_description,
name_label,
order,
order: optional(order, asInteger),
protection_policy,
shutdown_delay: asInteger(shutdown_delay),
start_delay: asInteger(start_delay),
@@ -1030,161 +958,6 @@ export default class Xapi extends XapiBase {
}))
}
// TODO: clean up on error.
async createVm (templateId, {
nameDescription = undefined,
nameLabel = undefined,
pvArgs = undefined,
cpus = undefined,
installRepository = undefined,
vdis = undefined,
vifs = undefined,
existingVdis = undefined
} = {}) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
}
try {
installRepository = this.getObject(installRepository)
return 'cd'
} catch (_) {
return 'network'
}
})()
const template = this.getObject(templateId)
// Clones the template.
const vm = await this._getOrWaitObject(
await this._cloneVm(template, nameLabel)
)
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vm.$ref, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vm.$ref)
// Set VMs params.
this._setObjectProperties(vm, {
nameDescription,
PV_args: pvArgs,
VCPUs_at_startup: cpus
})
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!vdis.length || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
order = 'n' + order.replace('n', '')
} else {
order = 'ncd'
}
this._setObjectProperties(vm, {
HVM_boot_params: { ...bootParams, order }
})
}
} else { // PV
if (vm.PV_bootloader === 'eliloader') {
if (installMethod === 'network') {
// TODO: normalize RHEL URL?
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': installRepository
})
} else if (installMethod === 'cd') {
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': 'cdrom'
})
}
}
}
}
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
// bootable and the first disk bootable.
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
}
// Modify existing (previous template) disks if necessary
const this_ = this // Work around http://phabricator.babeljs.io/T7172
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
const vbd = find(vm.$VBDs, { userdevice })
if (!vbd) {
return
}
const vdi = vbd.$VDI
await this_._setObjectProperties(vdi, properties)
// if the disk is bigger
if (
size != null &&
size > vdi.virtual_size
) {
await this_.resizeVdi(vdi.$id, size)
}
// if another SR is set, move it there
if (srId) {
await this_.moveVdi(vdi.$id, srId)
}
}))
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
vdis && await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
name_label: vdiDescription.name_label,
name_description: vdiDescription.name_description,
sr: vdiDescription.sr || vdiDescription.SR
}
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Only the first VBD if installMethod is not cd is bootable.
bootable: installMethod !== 'cd' && !i
}))
}))
// Destroys the VIFs cloned from the template.
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
{
let position = 0
vifs && await Promise.all(mapToArray(vifs, vif => this._createVif(
vm,
this.getObject(vif.network),
{
position: position++,
mac: vif.mac,
mtu: vif.mtu
}
)))
}
// TODO: Assign VGPUs.
return this._waitObject(vm.$id)
}
async _deleteVm (vm, deleteDisks) {
debug(`Deleting VM ${vm.name_label}`)
@@ -1259,7 +1032,7 @@ export default class Xapi extends XapiBase {
// It's not needed to snapshot the VM to get the metadata
if (isVmRunning(vm) && !onlyMetadata) {
host = vm.$resident_on
snapshotRef = await this._snapshotVm(vm)
snapshotRef = (await this._snapshotVm(vm)).$ref
} else {
host = this.pool.$master
}
@@ -1301,11 +1074,16 @@ export default class Xapi extends XapiBase {
const baseVm = baseVmId && this.getObject(baseVmId)
// refs of VM's VDIs → base's VDIs.
const baseVdis = {}
baseVm && forEach(baseVm.$VBDs, vbd => {
const vdi = vbd.$VDI
if (vdi && !find(fullVdisRequired, id => vdi.$snapshot_of.$id === id)) {
baseVdis[vbd.VDI] = vdi
let vdi, snapshotOf
if (
(vdi = vbd.$VDI) &&
(snapshotOf = vdi.$snapshot_of) &&
!find(fullVdisRequired, id => snapshotOf.$id === id)
) {
baseVdis[vdi.snapshot_of] = vdi
}
})
@@ -1329,15 +1107,7 @@ export default class Xapi extends XapiBase {
const vdi = vbd.$VDI
// Look for a snapshot of this vdi in the base VM.
let baseVdi
baseVm && forEach(vdi.$snapshot_of.$snapshots, vdi => {
if (baseVdis[vdi.$ref]) {
baseVdi = vdi
// Stop iterating.
return false
}
})
const baseVdi = baseVdis[vdi.snapshot_of]
vdis[vdiId] = baseVdi && !disableBaseTags
? {
@@ -1345,23 +1115,27 @@ export default class Xapi extends XapiBase {
other_config: {
...vdi.other_config,
[TAG_BASE_DELTA]: baseVdi.uuid
}
},
$SR$uuid: vdi.$SR.uuid
}
: {
...vdi,
$SR$uuid: vdi.$SR.uuid
}
: vdi
const stream = streams[`${vdiId}.vhd`] = this._exportVdi(vdi, baseVdi, VDI_FORMAT_VHD)
$onFailure(() => stream.cancel())
})
const vifs = {}
forEach(vm.$VIFs, vif => {
vifs[vif.$ref] = vif
vifs[vif.$ref] = {
...vif,
$network$uuid: vif.$network.uuid
}
})
return {
// TODO: make non-enumerable?
streams: await streams::pAll(),
version: '1.0.0',
return Object.defineProperty({
version: '1.1.0',
vbds,
vdis,
vifs,
@@ -1374,7 +1148,9 @@ export default class Xapi extends XapiBase {
}
}
: vm
}
}, 'streams', {
value: await streams::pAll()
})
}
@deferrable.onFailure
@@ -1414,7 +1190,7 @@ export default class Xapi extends XapiBase {
// 1. Create the VMs.
const vm = await this._getOrWaitObject(
await this._createVm({
await this._createVmRecord({
...delta.vm,
affinity: null,
is_a_template: false
@@ -1508,11 +1284,20 @@ export default class Xapi extends XapiBase {
)),
// Create VIFs.
defaultNetwork && Promise.all(mapToArray(delta.vifs, vif => this._createVif(
vm,
networksOnPoolMasterByDevice[vif.device] || defaultNetwork,
vif
)))
Promise.all(mapToArray(delta.vifs, vif => {
const network =
this.getObject(vif.$network$uuid, null) ||
networksOnPoolMasterByDevice[vif.device] ||
defaultNetwork
if (network) {
return this._createVif(
vm,
network,
vif
)
}
}))
])
if (deleteBase && baseVm) {
@@ -1569,8 +1354,7 @@ export default class Xapi extends XapiBase {
{}
)
const this_ = this
const loop = () => this_.call(
const loop = () => this.call(
'VM.migrate_send',
vm.$ref,
token,
@@ -1652,10 +1436,6 @@ export default class Xapi extends XapiBase {
mapVdisSrs
} = {}) {
const vm = this.getObject(vmId)
if (!isVmRunning(vm)) {
throw new Error('cannot migrate a non-running VM')
}
const host = hostXapi.getObject(hostId)
const accrossPools = vm.$pool !== host.$pool
@@ -1686,12 +1466,49 @@ export default class Xapi extends XapiBase {
}
}
async _snapshotVm (vm, nameLabel = vm.name_label) {
debug(`Snapshotting VM ${vm.name_label}${
nameLabel !== vm.name_label
? ` as ${nameLabel}`
: ''
}`)
let ref
try {
ref = await this.call('VM.snapshot_with_quiesce', vm.$ref, nameLabel)
this.addTag(ref, 'quiesce')::pCatch(noop) // ignore any failures
await this._waitObjectState(ref, vm => includes(vm.tags, 'quiesce'))
} catch (error) {
const { code } = error
if (
code !== 'VM_SNAPSHOT_WITH_QUIESCE_NOT_SUPPORTED' &&
// quiesce only work on a running VM
code !== 'VM_BAD_POWER_STATE' &&
// quiesce failed, fallback on standard snapshot
// TODO: emit warning
code !== 'VM_SNAPSHOT_WITH_QUIESCE_FAILED'
) {
throw error
}
ref = await this.call('VM.snapshot', vm.$ref, nameLabel)
}
// Convert the template to a VM and wait to have receive the up-
// to-date object.
const [ , snapshot ] = await Promise.all([
this.call('VM.set_is_a_template', ref, false),
this._waitObjectState(ref, snapshot => !snapshot.is_a_template)
])
return snapshot
}
async snapshotVm (vmId, nameLabel = undefined) {
return /* await */ this._getOrWaitObject(
await this._snapshotVm(
this.getObject(vmId),
nameLabel
)
return /* await */ this._snapshotVm(
this.getObject(vmId),
nameLabel
)
}
@@ -2138,14 +1955,17 @@ export default class Xapi extends XapiBase {
vdi: vdi.$ref
}
const host = vdi.$SR.$PBDs[0].$host
const pbd = find(vdi.$SR.$PBDs, 'currently_attached')
if (!pbd) {
throw new Error('no valid PBDs found')
}
const task = this._watchTask(taskRef)
await Promise.all([
stream.checksumVerified,
task,
put(stream, {
hostname: host.address,
hostname: pbd.$host.address,
method: 'put',
path: '/import_raw_vdi/',
query
@@ -2170,7 +1990,7 @@ export default class Xapi extends XapiBase {
mtu = 1500,
position = undefined,
device = position != null && String(position),
device = position != null ? String(position) : undefined,
ipv4_allowed = undefined,
ipv6_allowed = undefined,
locking_mode = undefined,

View File

@@ -0,0 +1,10 @@
import {
makeEditObject
} from '../utils'
export default {
editVif: makeEditObject({
ipv4Allowed: true,
ipv6Allowed: true
})
}

View File

@@ -0,0 +1,53 @@
import {
mapToArray
} from '../../utils'
export default {
_connectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
)
},
async connectAllSrPbds (id) {
await this._connectAllSrPbds(this.getObject(id))
},
_disconnectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
)
},
async disconnectAllSrPbds (id) {
await this._disconnectAllSrPbds(this.getObject(id))
},
async destroySr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.destroy', sr.$ref)
},
async forgetSr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.forget', sr.$ref)
},
_plugPbd (pbd) {
return this.call('PBD.plug', pbd.$ref)
},
async plugPbd (id) {
await this._plugPbd(this.getObject(id))
},
_unplugPbd (pbd) {
return this.call('PBD.unplug', pbd.$ref)
},
async unplugPbd (id) {
await this._unplugPbd(this.getObject(id))
}
}

316
src/xapi/mixins/vm.js Normal file
View File

@@ -0,0 +1,316 @@
import find from 'lodash/find'
import gte from 'lodash/gte'
import lte from 'lodash/lte'
import {
forEach,
mapToArray,
noop,
parseSize,
pCatch
} from '../../utils'
import {
isVmHvm,
isVmRunning,
makeEditObject
} from '../utils'
export default {
// TODO: clean up on error.
async createVm (templateId, {
name_label, // deprecated
nameLabel = name_label, // eslint-disable-line camelcase
bootAfterCreate = false,
clone = true,
installRepository = undefined,
vdis = undefined,
vifs = undefined,
existingVdis = undefined,
coreOs = false,
cloudConfig = undefined,
...props
} = {}) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
}
try {
installRepository = this.getObject(installRepository)
return 'cd'
} catch (_) {
return 'network'
}
})()
const template = this.getObject(templateId)
// Clones the template.
let vm = await this._getOrWaitObject(
await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
)
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vm.$ref, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vm.$ref)
// Set VMs params.
// TODO: checkLimits
this._editVm(vm, props)
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!vdis.length || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
order = 'n' + order.replace('n', '')
} else {
order = 'ncd'
}
this._setObjectProperties(vm, {
HVM_boot_params: { ...bootParams, order }
})
}
} else { // PV
if (vm.PV_bootloader === 'eliloader') {
if (installMethod === 'network') {
// TODO: normalize RHEL URL?
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': installRepository
})
} else if (installMethod === 'cd') {
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': 'cdrom'
})
}
}
}
}
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
// bootable and the first disk bootable.
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
}
// Modify existing (previous template) disks if necessary
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
const vbd = find(vm.$VBDs, { userdevice })
if (!vbd) {
return
}
const vdi = vbd.$VDI
await this._setObjectProperties(vdi, properties)
// if the disk is bigger
if (
size != null &&
size > vdi.virtual_size
) {
await this.resizeVdi(vdi.$id, size)
}
// if another SR is set, move it there
if (srId) {
await this.moveVdi(vdi.$id, srId)
}
}))
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
if (vdis) {
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
name_label: vdiDescription.name_label,
name_description: vdiDescription.name_description,
sr: vdiDescription.sr || vdiDescription.SR
}
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Only the first VBD if installMethod is not cd is bootable.
bootable: installMethod !== 'cd' && !i,
userdevice: devices[i]
}))
}))
}
// Destroys the VIFs cloned from the template.
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
if (vifs) {
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
await Promise.all(mapToArray(vifs, (vif, index) => this._createVif(
vm,
this.getObject(vif.network),
{
device: devices[index],
mac: vif.mac,
mtu: vif.mtu
}
)))
}
// TODO: Assign VGPUs.
if (cloudConfig != null) {
// Refresh the record.
vm = this.getObject(vm.$id)
// Find the SR of the first VDI.
let srRef
forEach(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
if (vdi) {
srRef = vdi.SR
return false
}
})
const method = coreOs
? 'createCoreOsCloudInitConfigDrive'
: 'createCloudInitConfigDrive'
await this[method](vm.$id, srRef, cloudConfig)
}
if (bootAfterCreate) {
this._startVm(vm)::pCatch(noop)
}
return this._waitObject(vm.$id)
},
// High level method to edit a VM.
//
// Params do not correspond directly to XAPI props.
_editVm: makeEditObject({
autoPoweron: {
set (value, vm) {
return Promise.all([
this._updateObjectMapProperty(vm, 'other_config', {
autoPoweron: value ? 'true' : null
}),
value && this.setPoolProperties({
autoPoweron: true
})
])
}
},
CPUs: 'cpus',
cpus: {
addToLimits: true,
// Current value may have constraints with other values.
//
// If the other value is not set and the constraint is not
// respected, the other value is changed first.
constraints: {
cpusStaticMax: gte
},
get: vm => +vm.VCPUs_at_startup,
set: [
'VCPUs_at_startup',
function (value, vm) {
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
}
]
},
cpuCap: {
addToLimits: true,
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
set (cap, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
}
},
cpusMax: 'cpusStaticMax',
cpusStaticMax: {
constraints: {
cpus: lte
},
get: vm => +vm.VCPUs_max,
set: 'VCPUs_max'
},
cpuWeight: {
addToLimits: true,
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
set (weight, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
}
},
highAvailability: {
set (ha, vm) {
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
}
},
memoryMin: {
constraints: {
memoryMax: gte
},
get: vm => +vm.memory_dynamic_min,
preprocess: parseSize,
set: 'memory_dynamic_min'
},
memory: 'memoryMax',
memoryMax: {
addToLimits: true,
constraints: {
memoryMin: lte,
memoryStaticMax: gte
},
get: vm => +vm.memory_dynamic_max,
preprocess: parseSize,
set: 'memory_dynamic_max'
},
memoryStaticMax: {
constraints: {
memoryMax: lte
},
get: vm => +vm.memory_static_max,
preprocess: parseSize,
set: 'memory_static_max'
},
nameDescription: true,
nameLabel: true,
PV_args: true
}),
async editVm (id, props) {
return /* await */ this._editVm(this.getObject(id), props)
}
}

332
src/xapi/utils.js Normal file
View File

@@ -0,0 +1,332 @@
// import isFinite from 'lodash/isFinite'
import camelCase from 'lodash/camelCase'
import isEqual from 'lodash/isEqual'
import pickBy from 'lodash/pickBy'
import { utcFormat, utcParse } from 'd3-time-format'
import {
camelToSnakeCase,
createRawObject,
forEach,
isArray,
isBoolean,
isFunction,
isInteger,
isObject,
isString,
map,
mapToArray,
noop
} from '../utils'
// ===================================================================
export const asBoolean = value => Boolean(value)
// const asFloat = value => {
// value = String(value)
// return value.indexOf('.') === -1
// ? `${value}.0`
// : value
// }
export const asInteger = value => String(value)
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
export const optional = (value, fn) => value == null
? undefined
: fn ? fn(value) : value
export const prepareXapiParam = param => {
// if (isFinite(param) && !isInteger(param)) {
// return asFloat(param)
// }
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
return asBoolean(param)
}
if (isObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
return param
}
// -------------------------------------------------------------------
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
export const extractOpaqueRef = str => {
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
// -------------------------------------------------------------------
const TYPE_TO_NAMESPACE = createRawObject()
forEach([
'Bond',
'DR_task',
'GPU_group',
'PBD',
'PCI',
'PGPU',
'PIF',
'PIF_metrics',
'SM',
'SR',
'VBD',
'VBD_metrics',
'VDI',
'VGPU',
'VGPU_type',
'VLAN',
'VM',
'VM_appliance',
'VM_guest_metrics',
'VM_metrics',
'VMPP',
'VTPM'
], namespace => {
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
})
// Object types given by `xen-api` are always lowercase but the
// namespaces in the Xen API can have a different casing.
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
// -------------------------------------------------------------------
// Format a date (pseudo ISO 8601) from one XenServer get by
// xapi.call('host.get_servertime', host.$ref) for example
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
// -------------------------------------------------------------------
export const isHostRunning = host => {
const { $metrics } = host
return $metrics && $metrics.live
}
// -------------------------------------------------------------------
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
const VM_RUNNING_POWER_STATES = {
Running: true,
Paused: true
}
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
// -------------------------------------------------------------------
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
const _mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
export const makeEditObject = specs => {
const normalizeGet = (get, name) => {
if (get === true) {
const prop = camelToSnakeCase(name)
return object => object[prop]
}
if (isString(get)) {
return object => object[get]
}
return get
}
const normalizeSet = (set, name) => {
if (isFunction(set)) {
return set
}
if (set === true) {
const prop = camelToSnakeCase(name)
return function (value) {
return this._set(prop, value)
}
}
if (isString(set)) {
const index = set.indexOf('.')
if (index === -1) {
return function (value) {
return this._set(set, value)
}
}
const map = set.slice(0, index)
const prop = set.slice(index + 1)
return function (value, object) {
return this._updateObjectMapProperty(object, map, { [prop]: value })
}
}
if (!isArray(set)) {
throw new Error('must be an array, a function or a string')
}
set = mapToArray(set, normalizeSet)
const { length } = set
if (!length) {
throw new Error('invalid setter')
}
if (length === 1) {
return set[0]
}
return function (value, object) {
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
}
}
const normalizeSpec = (spec, name) => {
if (spec === true) {
spec = {
get: true,
set: true
}
}
if (spec.addToLimits === true) {
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
throw new Error('constraint must be a function')
}
const constraintSpec = specs[constraintName]
if (!constraintSpec.get) {
throw new Error('constraint values must have a get')
}
})
const { get } = spec
if (get) {
spec.get = normalizeGet(get, name)
} else if (spec.addToLimits) {
throw new Error('addToLimits cannot be defined without get')
}
spec.set = normalizeSet(spec.set, name)
return spec
}
forEach(specs, (spec, name) => {
isString(spec) || (specs[name] = normalizeSpec(spec, name))
})
// Resolves aliases and add camelCase and snake_case aliases.
forEach(specs, (spec, name) => {
if (isString(spec)) {
do {
spec = specs[spec]
} while (isString(spec))
specs[name] = spec
}
let tmp
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
})
return async function _editObject_ (id, values, checkLimits) {
const limits = checkLimits && {}
const object = this.getObject(id)
const _objectRef = object.$ref
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
// Context used to execute functions.
const context = {
__proto__: this,
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
}
const set = (value, name) => {
if (value === undefined) {
return
}
const spec = specs[name]
if (!spec) {
return
}
const { preprocess } = spec
if (preprocess) {
value = preprocess(value)
}
const { get } = spec
if (get) {
const current = get(object)
if (isEqual(value, current)) {
return
}
let addToLimits
if (limits && (addToLimits = spec.addToLimits)) {
limits[name] = addToLimits(value, current)
}
}
const cb = () => spec.set.call(context, value, object)
const { constraints } = spec
if (constraints) {
const cbs = []
forEach(constraints, (constraint, constraintName) => {
// This constraint value is already defined: bypass the constraint.
if (values[constraintName] != null) {
return
}
if (!constraint(specs[constraintName].get(object), value)) {
const cb = set(value, constraintName)
cbs.push(cb)
}
})
if (cbs.length) {
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
}
}
return cb
}
const cbs = _mapFilter(values, set)
if (checkLimits) {
await checkLimits(limits, object)
}
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
}
}

View File

@@ -27,16 +27,19 @@ export default class {
}
async _getAclsForUser (userId) {
const subjects = (await this._xo.getUser(userId)).groups.concat(userId)
const user = await this._xo.getUser(userId)
const { groups } = user
const subjects = groups
? groups.concat(userId)
: [ userId ]
const acls = []
const pushAcls = (function (push) {
return function (entries) {
push.apply(acls, entries)
}
const pushAcls = (push => entries => {
push.apply(acls, entries)
})(acls.push)
const {_acls: collection} = this
const collection = this._acls
await Promise.all(mapToArray(
subjects,
subject => collection.get({subject}).then(pushAcls)

View File

@@ -151,7 +151,7 @@ export default class {
}
async deleteAuthenticationToken (id) {
if (!await this._tokens.remove(id)) { // eslint-disable-line space-before-keywords
if (!await this._tokens.remove(id)) {
throw new NoSuchAuthenticationToken(id)
}
}
@@ -175,7 +175,7 @@ export default class {
return token
}
async _getAuthenticationTokensForUser (userId) {
async getAuthenticationTokensForUser (userId) {
return this._tokens.get({ user_id: userId })
}
}

View File

@@ -1,11 +1,11 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash.filter'
import find from 'lodash.find'
import findIndex from 'lodash.findindex'
import sortBy from 'lodash.sortby'
import startsWith from 'lodash.startswith'
import filter from 'lodash/filter'
import find from 'lodash/find'
import findIndex from 'lodash/findIndex'
import sortBy from 'lodash/sortBy'
import startsWith from 'lodash/startsWith'
import {
basename,
dirname
@@ -106,7 +106,8 @@ export default class {
const stream = await handler.createReadStream(file)
const xapi = this._xo.getXapi(sr)
await xapi.importVm(stream, { srId: sr._xapiId })
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
return xapiObjectToXo(vm).id
}
// -----------------------------------------------------------------
@@ -140,7 +141,7 @@ export default class {
stream => stream.cancel()
))
return srcXapi.deleteVm(delta.vm.$id, true)
return srcXapi.deleteVm(delta.vm.uuid, true)
})
const promise = targetXapi.importDeltaVm(
@@ -391,8 +392,7 @@ export default class {
// The problem is in the merge case, a delta merged in a full vdi
// backup forces us to browse the resulting file =>
// Significant transfer time on the network !
checksum: !isFull,
flags: 'wx'
checksum: !isFull
})
stream.on('error', error => targetStream.emit('error', error))
@@ -545,12 +545,8 @@ export default class {
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
const { streams,
...infos
} = delta
// Write Metadata.
await handler.outputFile(infoPath, JSON.stringify(infos, null, 2), {flag: 'wx'})
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
// Here we have a completed backup. We can merge old vdis.
await Promise.all(
@@ -631,7 +627,7 @@ export default class {
}
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
const targetStream = await handler.createOutputStream(file, { flags: 'wx' })
const targetStream = await handler.createOutputStream(file)
const promise = eventToPromise(targetStream, 'finish')
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {

View File

@@ -1,3 +1,4 @@
import assign from 'lodash/assign'
import JobExecutor from '../job-executor'
import { Jobs } from '../models/job'
import {
@@ -44,8 +45,10 @@ export default class {
return job_.properties
}
async updateJob (job) {
return /* await */ this._jobs.save(job)
async updateJob ({id, type, name, key, method, paramsVector}) {
const oldJob = await this.getJob(id)
assign(oldJob, {type, name, key, method, paramsVector})
return /* await */ this._jobs.save(oldJob)
}
async removeJob (id) {

View File

@@ -50,12 +50,13 @@ export default class {
name,
instance,
configurationSchema,
configurationPresets,
version
) {
const id = name
const plugin = this._plugins[id] = {
configured: !configurationSchema,
configurationPresets,
configurationSchema,
id,
instance,
@@ -102,6 +103,7 @@ export default class {
async _getPlugin (id) {
const {
configurationPresets,
configurationSchema,
loaded,
name,
@@ -121,6 +123,7 @@ export default class {
unloadable,
version,
configuration,
configurationPresets,
configurationSchema
}
}
@@ -133,11 +136,21 @@ export default class {
// Validate the configuration and configure the plugin instance.
async _configurePlugin (plugin, configuration) {
if (!plugin.configurationSchema) {
const { configurationSchema } = plugin
if (!configurationSchema) {
throw new InvalidParameters('plugin not configurable')
}
const validate = createJsonSchemaValidator(plugin.configurationSchema)
// See: https://github.com/mafintosh/is-my-json-valid/issues/116
if (configuration == null) {
throw new InvalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
}
const validate = createJsonSchemaValidator(configurationSchema)
if (!validate(configuration)) {
throw new InvalidParameters(validate.errors)
}

View File

@@ -54,6 +54,11 @@ export default class {
return new Handler[type](remote)
}
async testRemote (remote) {
const handler = await this.getRemoteHandler(remote)
return handler.test()
}
async getAllRemotes () {
return this._remotes.get()
}

View File

@@ -1,6 +1,7 @@
import every from 'lodash.every'
import remove from 'lodash.remove'
import some from 'lodash.some'
import every from 'lodash/every'
import keyBy from 'lodash/keyBy'
import remove from 'lodash/remove'
import some from 'lodash/some'
import {
NoSuchObject,
@@ -12,6 +13,7 @@ import {
isObject,
lightSet,
map,
mapToArray,
streamToArray
} from '../utils'
@@ -23,6 +25,33 @@ class NoSuchResourceSet extends NoSuchObject {
}
}
const computeVmResourcesUsage = vm => {
const processed = {}
let disks = 0
let disk = 0
forEach(vm.$VBDs, vbd => {
let vdi, vdiId
if (
vbd.type === 'Disk' &&
!processed[vdiId = vbd.VDI] &&
(vdi = vbd.$VDI)
) {
processed[vdiId] = true
++disks
disk += +vdi.virtual_size
}
})
return {
cpus: vm.VCPUs_at_startup,
disk,
disks,
memory: vm.memory_dynamic_max,
vms: 1
}
}
const normalize = set => ({
id: set.id,
limits: set.limits
@@ -84,29 +113,9 @@ export default class {
}
computeVmResourcesUsage (vm) {
const processed = {}
let disks = 0
let disk = 0
forEach(this._xo.getXapi(vm).getObject(vm._xapiId).$VBDs, (vbd) => {
let vdi, vdiId
if (
vbd.type === 'Disk' &&
!processed[vdiId = vbd.VDI] &&
(vdi = vbd.$VDI)
) {
processed[vdiId] = true
++disks
disk += +vdi.virtual_size
}
})
return {
cpus: vm.CPUs.number,
disk,
disks,
memory: vm.memory.size,
vms: 1
}
return computeVmResourcesUsage(
this._xo.getXapi(vm).getObject(vm._xapiId)
)
}
async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) {
@@ -268,7 +277,7 @@ export default class {
}
async recomputeResourceSetsLimits () {
const sets = await this.getAllResourceSets()
const sets = keyBy(await this.getAllResourceSets(), 'id')
forEach(sets, ({ limits }) => {
forEach(limits, (limit, id) => {
limit.available = limit.total
@@ -292,13 +301,15 @@ export default class {
}
const { limits } = set
forEach(this.computeVmResourcesUsage(object), (usage, resource) => {
forEach(computeVmResourcesUsage(object), (usage, resource) => {
const limit = limits[resource]
limit.available -= usage
if (limit) {
limit.available -= usage
}
})
})
})
await Promise.all(map(sets, (set) => this._save(set)))
await Promise.all(mapToArray(sets, set => this._save(set)))
}
}

View File

@@ -74,8 +74,10 @@ export default class {
_enable (schedule) {
const { id } = schedule
const stopSchedule = scheduleFn(schedule.cron, () =>
this.xo.runJobSequence([ schedule.job ])
const stopSchedule = scheduleFn(
schedule.cron,
() => this.xo.runJobSequence([ schedule.job ]),
schedule.timezone
)
this._cronJobs[id] = stopSchedule
@@ -137,8 +139,8 @@ export default class {
return /* await */ this._redisSchedules.get()
}
async createSchedule (userId, {job, cron, enabled, name}) {
const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name)
async createSchedule (userId, { job, cron, enabled, name, timezone }) {
const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name, timezone)
const schedule = schedule_.properties
this._add(schedule)
@@ -146,13 +148,18 @@ export default class {
return schedule
}
async updateSchedule (id, {job, cron, enabled, name}) {
async updateSchedule (id, { job, cron, enabled, name, timezone }) {
const schedule = await this._getSchedule(id)
if (job) schedule.set('job', job)
if (cron) schedule.set('cron', cron)
if (enabled !== undefined) schedule.set('enabled', enabled)
if (name !== undefined) schedule.set('name', name)
if (timezone === null) {
schedule.set('timezone', undefined) // Remove current timezone
} else if (timezone !== undefined) {
schedule.set('timezone', timezone)
}
await this._redisSchedules.save(schedule)

View File

@@ -1,6 +1,6 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import levelup from 'level-party'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import sublevel from 'level-sublevel'
import { ensureDir } from 'fs-promise'

View File

@@ -1,5 +1,5 @@
import filter from 'lodash.filter'
import includes from 'lodash.includes'
import filter from 'lodash/filter'
import includes from 'lodash/includes'
import {
hash,
needsRehash,
@@ -17,8 +17,9 @@ import {
Users
} from '../models/user'
import {
createRawObject,
forEach,
isEmpty,
lightSet,
mapToArray,
noop,
pCatch
@@ -38,6 +39,11 @@ class NoSuchUser extends NoSuchObject {
}
}
const addToArraySet = (set, value) => set && !includes(set, value)
? set.concat(value)
: [ value ]
const removeFromArraySet = (set, value) => set && filter(set, current => current !== value)
// ===================================================================
export default class {
@@ -57,7 +63,7 @@ export default class {
})
xo.on('start', async () => {
if (!(await users.exists())) {
if (!await users.exists()) {
const email = 'admin@admin.net'
const password = 'admin'
@@ -86,11 +92,10 @@ export default class {
await this._users.remove(id)
// Remove tokens of user.
this._xo._getAuthenticationTokensForUser(id)
this._xo.getAuthenticationTokensForUser(id)
.then(tokens => {
forEach(tokens, token => {
this._xo._tokens.remove(token.id)
::pCatch(noop)
this._xo.deleteAuthenticationToken(id)::pCatch(noop)
})
})
::pCatch(noop) // Ignore any failures.
@@ -109,7 +114,8 @@ export default class {
name = email,
password,
permission
permission,
preferences
}) {
const user = await this.getUser(id)
@@ -123,6 +129,18 @@ export default class {
user.pw_hash = await hash(password)
}
const newPreferences = { ...user.preferences }
forEach(preferences, (value, name) => {
if (value == null) {
delete newPreferences[name]
} else {
newPreferences[name] = value
}
})
user.preferences = isEmpty(newPreferences)
? undefined
: newPreferences
// TODO: remove
user.email = user.name
delete user.name
@@ -246,7 +264,7 @@ export default class {
}
async getGroup (id) {
const group = (await this._groups.first(id))
const group = await this._groups.first(id)
if (!group) {
throw new NoSuchGroup(id)
}
@@ -264,15 +282,8 @@ export default class {
this.getGroup(groupId)
])
const {groups} = user
if (!includes(groups, groupId)) {
user.groups.push(groupId)
}
const {users} = group
if (!includes(users, userId)) {
group.users.push(userId)
}
user.groups = addToArraySet(user.groups, groupId)
group.users = addToArraySet(group.users, userId)
await Promise.all([
this._users.save(user),
@@ -281,14 +292,12 @@ export default class {
}
async _removeUserFromGroup (userId, group) {
// TODO: maybe not iterating through the whole arrays?
group.users = filter(group.users, id => id !== userId)
group.users = removeFromArraySet(group.users, userId)
return this._groups.save(group)
}
async _removeGroupFromUser (groupId, user) {
// TODO: maybe not iterating through the whole arrays?
user.groups = filter(user.groups, id => id !== groupId)
user.groups = removeFromArraySet(user.groups, groupId)
return this._users.save(user)
}
@@ -307,39 +316,36 @@ export default class {
async setGroupUsers (groupId, userIds) {
const group = await this.getGroup(groupId)
const newUsersIds = createRawObject()
const oldUsersIds = createRawObject()
forEach(userIds, id => {
newUsersIds[id] = null
})
let newUsersIds = lightSet(userIds)
const oldUsersIds = []
forEach(group.users, id => {
if (id in newUsersIds) {
delete newUsersIds[id]
if (newUsersIds.has(id)) {
newUsersIds.delete(id)
} else {
oldUsersIds[id] = null
oldUsers.push(id)
}
})
newUsersIds = newUsersIds.toArray()
const getUser = ::this.getUser
const [newUsers, oldUsers] = await Promise.all([
Promise.all(mapToArray(newUsersIds, (_, id) => this.getUser(id))),
Promise.all(mapToArray(oldUsersIds, (_, id) => this.getUser(id)))
Promise.all(newUsersIds.map(getUser)),
Promise.all(oldUsersIds.map(getUser))
])
forEach(newUsers, user => {
const {groups} = user
if (!includes(groups, groupId)) {
user.groups.push(groupId)
}
user.groups = addToArraySet(user.groups, groupId)
})
forEach(oldUsers, user => {
user.groups = filter(user.groups, id => id !== groupId)
user.groups = removeFromArraySet(user.groups, groupId)
})
group.users = userIds
const saveUser = ::this._users.save
await Promise.all([
Promise.all(mapToArray(newUsers, ::this._users.save)),
Promise.all(mapToArray(oldUsers, ::this._users.save)),
Promise.all(mapToArray(newUsers, saveUser)),
Promise.all(mapToArray(oldUsers, saveUser)),
this._groups.save(group)
])
}

View File

@@ -78,7 +78,7 @@ export default class {
async unregisterXenServer (id) {
this.disconnectXenServer(id)::pCatch(noop)
if (!await this._servers.remove(id)) { // eslint-disable-line space-before-keywords
if (!await this._servers.remove(id)) {
throw new NoSuchXenServer(id)
}
}

View File

@@ -1,4 +1,4 @@
import includes from 'lodash.includes'
import includes from 'lodash/includes'
import XoCollection from 'xo-collection'
import XoUniqueIndex from 'xo-collection/unique-index'
import {createClient as createRedisClient} from 'redis'