Compare commits

..

664 Commits

Author SHA1 Message Date
Julien Fontanet
07829918e4 5.1.2 2016-07-28 15:21:12 +02:00
Julien Fontanet
b0d400b6eb fix(Xapi#exportDeltaVm): better handling of removed VDIs (#370)
Fixes vatesfr/xo-web#1333
2016-07-28 15:19:44 +02:00
Julien Fontanet
706cb895ad 5.1.1 2016-07-27 16:36:51 +02:00
Julien Fontanet
45bf539b3c fix(user.delete): fix tokens deletion 2016-07-27 13:23:16 +02:00
Julien Fontanet
0923981f8d fix(user.set): typo in error message 2016-07-27 13:01:32 +02:00
Julien Fontanet
b0ac14363d 5.1.0 2016-07-26 16:52:49 +02:00
Julien Fontanet
5d346aba37 fix(vm.create): cloudConfig handling 2016-07-26 14:26:24 +02:00
Julien Fontanet
124cb15ebe fix(resource sets): fix VM resources computation
Fixes vatesfr/xo-web#1276
2016-07-25 17:08:09 +02:00
Julien Fontanet
a244ab898d fix(vm.create): correctly store the resource set 2016-07-25 17:08:08 +02:00
Julien Fontanet
3c551590eb fix(vm.set): correctly save memory in limits 2016-07-25 17:08:07 +02:00
ABHAMON Ronan
10e30cccbc feat(models/schedule): null properly remove timezone (#368)
Related to vatesfr/xo-web#1314
2016-07-25 15:54:27 +02:00
Julien Fontanet
806a6b86a2 fix(signin): fix styles when /v4 2016-07-25 13:40:57 +02:00
Julien Fontanet
9719fdf5cc fix(sr.probe*): correctly prepare port param 2016-07-23 16:18:03 +02:00
Julien Fontanet
6d8764f8cb fix(Xapi#createVm): add missing param 2016-07-23 15:49:27 +02:00
Julien Fontanet
d9fd9cb408 fix(vm.create): better VBDs creation (#361)
Fixes vatesfr/xo-web#1257
2016-07-23 15:31:15 +02:00
Julien Fontanet
7710ec0aba feat(schemas): add user schema 2016-07-20 12:10:23 +02:00
Julien Fontanet
c97bd78cd0 fix(VM): cpuCap & cpuWeight are integers 2016-07-20 10:57:15 +02:00
ABHAMON Ronan
728c5aa86e feat(plugins): supports predefined configurations (#365)
See vatesfr/xo-web#1289
2016-07-19 17:28:53 +02:00
Pierre Donias
83d68ca293 feat(vm.set): make cpuWeight and cpuCap nullable (#364) 2016-07-19 16:53:47 +02:00
Julien Fontanet
47d7561db4 fix(VM): cpuCap can be defined when cpuWeight is not 2016-07-19 15:37:07 +02:00
ABHAMON Ronan
7d993e8319 feat(schedules): schedules support timezones (#363)
Fixes vatesfr/xo-web#1258
2016-07-19 13:32:27 +02:00
Julien Fontanet
1d1a597b22 feat(VM): expose cpuCap 2016-07-19 11:02:38 +02:00
Julien Fontanet
23082f9300 feat(vm.set): support for cpuCap (#362) 2016-07-19 10:35:03 +02:00
Julien Fontanet
ea1a7f9376 chore(Xapi#_getXenUpdates): use ensureArray() 2016-07-15 12:57:20 +02:00
Greenkeeper
1796c7bab8 chore(package): update nyc to version 7.0.0 (#358)
https://greenkeeper.io/
2016-07-14 13:09:12 +02:00
Greenkeeper
65ad76479a chore(package): update base64url to version 2.0.0 (#360)
https://greenkeeper.io/
2016-07-14 11:33:12 +02:00
Olivier Lambert
422db04ec8 5.0.5 2016-07-13 15:20:56 +02:00
Olivier Lambert
d12f60fe37 Merge pull request #359 from vatesfr/pierre-fix-create-vm
fix(vm/create): missing single quotes
2016-07-13 09:37:23 +02:00
Pierre Donias
194c1c991c fix(vm/create): missing single quotes 2016-07-12 16:40:32 +02:00
Olivier Lambert
3e8e2222c1 Merge pull request #357 from vatesfr/marsaudf-fix-job-log-error
Add message to job log error
2016-07-07 15:26:15 +02:00
Fabrice Marsaud
1620327a33 Add message to job log error 2016-07-07 14:55:43 +02:00
Olivier Lambert
b1131e3667 5.0.4 2016-07-07 12:12:54 +02:00
Olivier Lambert
db0250ac08 Merge pull request #356 from vatesfr/marsaudf-fix-patch-conflicts
Fix(xapi): handle correctly single XML elements
2016-07-07 11:22:27 +02:00
Fabrice Marsaud
0a6b605760 Handle single patch elements in parsed XML 2016-07-07 10:11:21 +02:00
Olivier Lambert
81ac2375e5 5.0.3 2016-07-06 23:23:14 +02:00
Olivier Lambert
6bcaca6cd7 Merge pull request #355 from vatesfr/issue-1233
fix(Xapi#importDeltaVm): correctly handle missing network
2016-07-06 23:21:55 +02:00
Olivier Lambert
ec8375252e fix(Xapi#importDeltaVm): correctly handle missing network 2016-07-06 23:11:47 +02:00
Julien Fontanet
766aa1762f 5.0.2 2016-07-05 17:56:02 +02:00
Julien Fontanet
5165e0a54c feat(user.set): support preferences 2016-07-05 17:19:38 +02:00
Julien Fontanet
a2f7ad627e feat(Xapi#migrateVm): allow non-running VMs
Fixes vatesfr/xo-web#1216
2016-07-05 17:09:54 +02:00
Julien Fontanet
1176c162d4 5.0.1 2016-06-30 15:46:27 +02:00
Fabrice Marsaud
a4880cd017 feat(remote.test): perform a write/read test on a remote (#354)
See vatesfr/xo-web#1075
2016-06-30 15:00:00 +02:00
Julien Fontanet
383bdce416 fix(plugin.configure): fix undefined handling 2016-06-29 13:08:02 +02:00
Julien Fontanet
7cc300dd83 fix(Xapi#createVif): fix handling when neither device nor position is not provided 2016-06-28 17:36:24 +02:00
Fabrice Marsaud
687809db9d fix(user.set): cannot change self permission (#353) 2016-06-28 13:28:31 +02:00
Julien Fontanet
1127ec3a90 feat(vif.set): allowed IPv4/IPv6 addresses (#328) 2016-06-27 15:11:46 +02:00
Julien Fontanet
a797edfae9 chore(xapi/mixins/vm): simplify _editVm() specs 2016-06-27 12:10:57 +02:00
Julien Fontanet
938e106252 feat(xapi/utils/makeEditObject): support camelCase and snake_case aliases 2016-06-27 12:10:54 +02:00
Julien Fontanet
a0eb9caaa2 feat(xapi/utils/makeEditObject): set, set.get, set.set can be true 2016-06-27 11:54:13 +02:00
Julien Fontanet
442f53d45e fix(xapi/utils/makeEditObject): use deep equality 2016-06-27 09:52:02 +02:00
Greenkeeper
68de1ca248 chore(package): update ws to version 1.1.1 (#348)
https://greenkeeper.io/
2016-06-26 20:19:47 +02:00
Greenkeeper
e16061141e chore(package): update d3-time-format to version 2.0.0 (#350)
https://greenkeeper.io/
2016-06-26 20:18:24 +02:00
Julien Fontanet
64cbe3d209 feat(build): delete dist before building 2016-06-26 17:47:56 +02:00
Julien Fontanet
ebdc6376d8 5.0.0 2016-06-24 18:34:31 +02:00
Julien Fontanet
68335123a1 feat(vm.create): all vm.set params are supported (#340) 2016-06-24 18:33:43 +02:00
Julien Fontanet
25b18f4ef8 chore(package): update xo-acl-resolver to 0.2.1 2016-06-24 14:43:18 +02:00
Julien Fontanet
9ad615b0ff fix(Xapi#_waitObjectState): fix failure when object is initially missing 2016-06-22 12:20:22 +02:00
Julien Fontanet
12eaceb032 fix(xapi-objects-to-xo): fix CPUs.number when no tools 2016-06-21 13:19:29 +02:00
Julien Fontanet
3263511b72 fix(Xapi#snapshotVm): fallback if quiesce failed
Fixes vatesfr/xo-web#1088
2016-06-21 11:21:01 +02:00
Julien Fontanet
75cae8c647 fix(Xapi#_updateObjectMapProperty): prepare XAPI param 2016-06-21 11:21:00 +02:00
Julien Fontanet
9991ef624c feat(Xapi#getObject): accept objects with _xapiId property 2016-06-21 11:21:00 +02:00
Julien Fontanet
489e9fce27 fix(xapi/index): work around Babel T2877 2016-06-21 11:21:00 +02:00
Julien Fontanet
0655628073 fix(xapi/index): incorrect import 2016-06-21 11:20:59 +02:00
Fabrice Marsaud
9460822529 feat(vm.importBackup): returns the new VM id (#345) 2016-06-20 18:07:14 +02:00
Julien Fontanet
d02358ac0d chore(xapi): move utilities into dedicated module 2016-06-17 18:43:10 +02:00
ABHAMON Ronan
366237a625 fix(XapiStats): fix unit for host free memory (#339) 2016-06-17 10:16:58 +02:00
Julien Fontanet
2f2da18994 chore: remove some unnecessary logs 2016-06-16 09:22:26 +02:00
Greenkeeper
ecd30db215 chore(package): update d3-time-format to version 1.0.0 (#338)
https://greenkeeper.io/
2016-06-15 08:40:56 +02:00
ABHAMON Ronan
1980854f6f feat(Xapi#importDeltaVm): attach VIFs to original networks if available (#335)
Fixes vatesfr/xo-web#1016
2016-06-10 11:05:54 +02:00
Julien Fontanet
7d4f006c25 feat(Xapi#exportDeltaVm): inject network/SR UUIDs in VIF/VDI records 2016-06-09 17:25:02 +02:00
Julien Fontanet
b697be2383 fix(Xapi#_snapshotVm): returns the up-to-date snapshot record 2016-06-09 17:17:14 +02:00
Fabrice Marsaud
143e53c43f chore(package): update xo-remote-parser to version 0.3.0 (#333) 2016-06-08 17:26:08 +02:00
Julien Fontanet
6dde1ade01 fix(xo-server-logs): fix broken require since Babel 6 2016-06-08 11:12:45 +02:00
Greenkeeper
d4de391ac5 chore(package): update d3-time-format to version 0.4.0 (#332)
https://greenkeeper.io/
2016-06-08 09:05:45 +02:00
Greenkeeper
af15f4bc6a chore(package): update xo-acl-resolver to version 0.2.0 (#330)
https://greenkeeper.io/
2016-06-07 16:46:23 +02:00
Fabrice Marsaud
d4ace24caa fix(job.set): protects userId from modification (#329) 2016-06-07 09:25:15 +02:00
Julien Fontanet
c5ab47fa66 chore(package): fix deps order 2016-06-06 13:38:16 +02:00
Julien Fontanet
d60051b629 fix(package): update xo-remote-parser to 0.2.1 2016-06-06 13:37:47 +02:00
Julien Fontanet
22ff330ee7 fix(package): update @marsaud/smb2 to 0.7.1 2016-06-03 18:22:37 +02:00
Olivier Lambert
dd62bef66d feat(host): expose correct timestamp for license expiry value 2016-05-31 17:24:49 +02:00
Julien Fontanet
e7feb99f8d feat(vm.create): clone param may be use to disable cloning (#318)
See vatesfr/xo-web#960
2016-05-30 11:34:39 +02:00
Julien Fontanet
6358accece fix(plugin.configure): correctly handle undefined 2016-05-30 11:12:11 +02:00
Olivier Lambert
9ce8a24eea feat(sr): add disconnect and connect all PBDs to a SR (#324) 2016-05-27 18:31:09 +02:00
Julien Fontanet
4d0673f489 feat(sr.forget): automatically disconnect PBDs (#323) 2016-05-27 18:15:09 +02:00
Olivier Lambert
fbe1e6a7d5 fix(vm): missing parameters and wrong value for set_memory_static_max 2016-05-27 15:03:49 +02:00
Greenkeeper
4ed02ca501 chore(package): update cookie to version 0.3.0 (#322)
https://greenkeeper.io/
2016-05-27 04:36:35 +02:00
Julien Fontanet
af245ed9fe fix(log.delete): id can be an array 2016-05-26 13:34:47 +02:00
Julien Fontanet
fc86a3e882 fix(vm): always consider memory dynamic max when updating resource set 2016-05-24 16:22:55 +02:00
Julien Fontanet
f9109edcf1 fix(vm.set): memoryMax should update resource set 2016-05-24 16:21:21 +02:00
Julien Fontanet
ec100e1a91 fix(vm.set): memoryMax should change dynamic max 2016-05-24 16:20:25 +02:00
Julien Fontanet
746c5f4a79 fix(vm.set): cpusMax (shame) 2016-05-24 15:13:53 +02:00
Julien Fontanet
b2611728a1 fix(vm): fix indent 2016-05-24 14:38:11 +02:00
Julien Fontanet
fc6cc4234d chore(vm.set): fix some comments 2016-05-24 14:33:40 +02:00
Julien Fontanet
7706c1cb63 feat(vm.set): memoryStaticMax 2016-05-24 14:33:02 +02:00
Julien Fontanet
4d7a07220c feat(vm.set): memoryMax increase static max if necessary 2016-05-24 14:32:14 +02:00
Julien Fontanet
436875f7dc fix(vm.set): memoryMin should not change static min 2016-05-24 14:30:26 +02:00
Julien Fontanet
21c6f53ecc fix(vm.set): cpusMax 2016-05-24 14:23:21 +02:00
Julien Fontanet
5472be8b72 4.17.0 2016-05-24 11:51:15 +02:00
Julien Fontanet
d22542fcf3 Revert "fix(leveldown): fix leveldown to version 1.4.4"
This reverts commit 5fa4c95480.
2016-05-24 11:50:36 +02:00
Julien Fontanet
1d8341eb27 Merge branch 'next-release' into stable 2016-05-24 11:49:47 +02:00
Julien Fontanet
1897a7ada3 fix(log.get): only usable by admins 2016-05-23 16:18:21 +02:00
Julien Fontanet
a048698c66 feat(log.*): add params schemas 2016-05-23 16:17:54 +02:00
Julien Fontanet
f891e57f4a fix(xapi-objects-to-xo): a SR should always have a container 2016-05-23 16:00:51 +02:00
Olivier Lambert
fcc590e48a feat(vm.snapshot): name param is optional (#320) 2016-05-23 12:56:20 +02:00
Julien Fontanet
9a02a2a65b fix(vm.set): fix call to $isVmRunning
Fixes #319
2016-05-23 10:02:34 +02:00
Julien Fontanet
536a6c5c60 feat(vm.rollingDrCopy): accepts a sr param (#315)
See vatesfr/xo-web#955
2016-05-21 14:24:16 +02:00
Julien Fontanet
86a6871ee8 fix(vm.set): correctly change min dynamic memory if necessary (#317)
Fixes vatesfr/xo-web#970
2016-05-21 14:14:57 +02:00
Julien Fontanet
6046045151 feat(vm.createInterface): position param is now optional 2016-05-21 13:11:51 +02:00
Julien Fontanet
9c3ddd4ba4 fix(Xapi#_createVm()): license issue with Dundee (#316)
Fixes vatesfr/xo-web#964.
2016-05-20 12:22:42 +02:00
Julien Fontanet
6c9f55c1d7 style(utils): fix lightSet 2016-05-17 09:08:51 +02:00
Julien Fontanet
5bec3d7dcd fix(xapi-object-to-xo): correctly set host memory size 2016-05-16 11:50:01 +02:00
Julien Fontanet
a4c309efe8 fix(package): ship signin.pug 2016-05-12 18:18:56 +02:00
Jon Sands
4e22a208dd fix(autopoweron): set pool other_config entry to true instead of on (#310)
Fixes #309 
Fixes vatesfr/xo-web#937
2016-05-12 13:17:02 +02:00
Julien Fontanet
ff9e77118e fix(Xapi): VM creation on Dundee (#303) 2016-05-11 18:03:58 +02:00
Julien Fontanet
6c6dfa9ac4 perf(Promise): use Bluebird as default implementation 2016-05-11 18:01:52 +02:00
Greenkeeper
d60d5207d8 chore(package): update xen-api to version 0.9.0 (#308)
https://greenkeeper.io/
2016-05-11 17:55:35 +02:00
ABHAMON Ronan
8c0ae892f5 feat(api): rename <namespace> param to id (#305) 2016-05-11 14:35:49 +02:00
Greenkeeper
f570492a11 chore(package): update xo-remote-parser to version 0.2.0 (#307)
https://greenkeeper.io/
2016-05-11 14:07:23 +02:00
Julien Fontanet
cc447304f5 fix(bin/xo-server): remove ES6 syntax 2016-05-10 13:43:53 +02:00
Julien Fontanet
8f8c6366e3 chore(xo-mixins/backup): use default value for remote handler flags 2016-05-05 18:22:19 +02:00
Julien Fontanet
3b13bcb098 fix(Xapi#exportDeltaVm): make streams property non-enumerable 2016-05-05 18:19:41 +02:00
Julien Fontanet
df60784b51 chore(signin): jade renamed to pug 2016-05-04 16:00:28 +02:00
Julien Fontanet
bae3122bb5 chore: various updates 2016-05-04 12:16:02 +02:00
Julien Fontanet
0770aef4bf chore(package): update standard to version 7.0.0 2016-05-04 11:59:56 +02:00
ABHAMON Ronan
c198350bfa feat(remote-handlers): cannot overwrite files by default (#297) 2016-05-03 16:56:26 +02:00
Greenkeeper
a2ed388777 chore(package): update helmet to version 2.0.0 (#298)
https://greenkeeper.io/
2016-04-30 08:10:22 +02:00
Julien Fontanet
f6670c699a 4.16.1 2016-04-29 10:28:03 +02:00
Julien Fontanet
5fa4c95480 fix(leveldown): fix leveldown to version 1.4.4
Due to Level/leveldown#276.
2016-04-29 10:27:37 +02:00
Julien Fontanet
5b8608c186 feat(utils/streamToBuffer): rebase on get-stream and use everywhere (#295) 2016-04-29 09:52:36 +02:00
Julien Fontanet
bb75d42ede 4.16.0 2016-04-29 09:44:42 +02:00
Julien Fontanet
b4b6def07a Merge branch 'next-release' into stable 2016-04-29 09:43:30 +02:00
Greenkeeper
b305700987 chore(package): update get-stream to version 2.1.0 (#294)
https://greenkeeper.io/
2016-04-29 09:14:30 +02:00
Greenkeeper
40232b7eb1 chore(package): update fs-extra to version 0.30.0 (#293)
https://greenkeeper.io/
2016-04-28 18:17:34 +02:00
Julien Fontanet
67ff666db4 Use the new execa.stdout() 2016-04-28 10:18:05 +02:00
Greenkeeper
5960fd4fe0 chore(package): update fs-extra to version 0.29.0 (#292)
https://greenkeeper.io/
2016-04-28 09:04:39 +02:00
Greenkeeper
f8b28c519c chore(package): update xo-acl-resolver to version 0.1.0 (#291)
https://greenkeeper.io/
2016-04-28 08:56:31 +02:00
Julien Fontanet
ee1105b6dd fix(Xapi#importVdiContent): find first attached PBD (#279) 2016-04-27 09:37:30 +02:00
Julien Fontanet
4778274c97 fix(Xapi#call): retries on TOO_MANY_PENDING_TASKS
Fixes fix vatesfr/xo-web#861
2016-04-27 09:28:36 +02:00
Julien Fontanet
d7ecb32238 Xapi#snapshotVm(): wait for the uptodate obj on quiesce. (#282)
Fixes vatesfr/xo-web#904
2016-04-27 09:24:00 +02:00
Greenkeeper
744306fc50 chore(package): update execa to version 0.4.0 (#290)
https://greenkeeper.io/
2016-04-27 09:13:16 +02:00
Olivier Lambert
11bbb8ed4d add host startTime and agentStartTime 2016-04-26 11:30:57 +02:00
Julien Fontanet
b5092a4444 feat(toTimestamp): handle timestamps. 2016-04-26 11:27:26 +02:00
Greenkeeper
e2442c07a9 https://greenkeeper.io/Fixes vatesfr/xo-web#769
https://greenkeeper.io/

Fixes vatesfr/xo-web#769.
2016-04-26 09:07:33 +02:00
Julien Fontanet
6f924d4e83 fix(user.delete): fix vatesfr/xo-web#901. 2016-04-25 14:33:29 +02:00
Greenkeeper
faf1508914 chore(package): update execa to version 0.3.0 (#284)
https://greenkeeper.io/
2016-04-23 17:51:56 +01:00
Julien Fontanet
7eb8152835 4.15.3 2016-04-22 16:18:21 +02:00
Julien Fontanet
8f45905831 fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 16:18:03 +02:00
Julien Fontanet
4ba2ffce5b fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 13:39:21 +02:00
Greenkeeper
ffb3659ef5 chore(package): update fs-extra to version 0.28.0 (#280)
http://greenkeeper.io/
2016-04-18 12:09:06 +01:00
Julien Fontanet
6dec07d562 signin form: fix redirect on success. 2016-04-18 11:57:58 +01:00
Julien Fontanet
afb22f3279 Merge pull request #278 from vatesfr/greenkeeper-fs-extra-0.27.0
Update fs-extra to version 0.27.0 🚀
2016-04-15 14:14:31 +02:00
greenkeeperio-bot
f2f369db64 chore(package): update fs-extra to version 0.27.0
http://greenkeeper.io/
2016-04-15 14:05:41 +02:00
Julien Fontanet
635c76db93 Deprecate host#CPUs and introduce host#cpus. 2016-04-13 10:59:29 +02:00
Julien Fontanet
5f50f1928d Merge pull request #276 from vatesfr/fix-auto-poweron
Fix auto poweron (fix vatesfr/xo-web#879).
2016-04-11 15:53:37 +02:00
Julien Fontanet
32c9ed1dc2 Fix auto poweron (fix vatesfr/xo-web#879). 2016-04-11 15:31:59 +02:00
Julien Fontanet
0536926a1f 4.15.2 2016-04-08 11:17:47 +02:00
Julien Fontanet
3959c98479 Update xo-acl-resolver to 0.0.0. 2016-04-08 11:17:26 +02:00
Julien Fontanet
2ce5735676 Fix ACLs in API calls (fix vatesfr/xo-web#870). 2016-04-08 11:05:17 +02:00
Julien Fontanet
71741e144e Merge pull request #274 from vatesfr/abhamonr-set-vm-ram-min-max-values
api.vm: Set the min/max ram values.
2016-04-07 10:08:34 +02:00
wescoeur
f2e64cdd5e api.vm: Set the min/max ram values. 2016-04-07 09:25:45 +02:00
Julien Fontanet
afaa5d5e9e Merge pull request #275 from vatesfr/abhamonr-set-vm-cpus-max
api.vm: Set vcpus max.
2016-04-06 17:39:16 +02:00
wescoeur
d82861727d api.vm: Set vcpus max. 2016-04-06 17:32:51 +02:00
Julien Fontanet
90f0795416 Merge pull request #272 from vatesfr/abhamonr-fix-smb-backup-location
Ensure remote smb path is a directory. (fix vatesfr/xo-web#865)
2016-04-06 16:25:28 +02:00
Julien Fontanet
9efbe7771c Merge pull request #273 from vatesfr/abhamonr-consistent-stats-object
vm.stats(): Returns empty stats if none can be found.
2016-04-06 12:10:34 +02:00
wescoeur
a75caac13d Vm stats are consistents. Even without RRD stats. 2016-04-06 11:55:14 +02:00
wescoeur
279d0d20ea Ensure remote smb path is a directory. (fix vatesfr/xo-web#865) 2016-04-06 09:51:20 +02:00
Julien Fontanet
332ba96d34 ghooks: use commit-msg instead of pre-commit. 2016-04-04 11:33:30 +02:00
Julien Fontanet
3f6e5b7606 decorators/@autobind: Minor improvements. 2016-04-04 11:29:31 +02:00
Julien Fontanet
94703492fd Use http-proxy for HTTP/ws proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
df78117617 Do not crash on error in the console proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
909b9480e4 Better error message in console proxy. 2016-03-30 17:33:49 +02:00
Julien Fontanet
21762ac1aa Return to the correct page after sign in. 2016-03-30 17:33:49 +02:00
Julien Fontanet
412bc175b4 Merge pull request #270 from vatesfr/abhamonr-stats-object-contains-interval
Stats object contains interval attr.
2016-03-30 14:49:25 +02:00
wescoeur
dc0eb76e88 Stats object contains interval attr. 2016-03-30 14:34:37 +02:00
Julien Fontanet
2695941a3c Remove pFinally() tests, now implemented in promise-toolbox. 2016-03-29 18:05:32 +02:00
Julien Fontanet
3506be1a70 Update promise-toolbox to 0.3.2. 2016-03-29 09:54:24 +02:00
Julien Fontanet
cbf4786b39 Do not crash on unhandled error event. 2016-03-27 13:31:31 +02:00
Julien Fontanet
8dbf334208 Merge pull request #267 from vatesfr/back-to-babel-6
Back to babel 6
2016-03-25 17:37:52 +01:00
Julien Fontanet
60ba5fbc72 Merge pull request #268 from vatesfr/abhamonr-stats-with-halted-vm
Throw an error if a vm is halted and its stats are requested.
2016-03-25 17:37:27 +01:00
Julien Fontanet
c3ace0c44f Simply do npm test before git commit. 2016-03-25 17:36:37 +01:00
Olivier Lambert
8eceb90e63 add startTime 2016-03-25 17:33:34 +01:00
wescoeur
4754e19e83 Throw an error if a vm is halted and its stats are requested. 2016-03-25 15:49:52 +01:00
Julien Fontanet
a0559d0dc9 Revert "Work around Babel T7172."
This reverts commit ca8476d466.
2016-03-23 09:45:59 +01:00
Julien Fontanet
8d03ce19b0 Revert "Merge pull request #248 from vatesfr/babel-t7172"
This reverts commit f125b593bf, reversing
changes made to ca8476d466.
2016-03-23 09:43:30 +01:00
Julien Fontanet
2470d851e9 Revert "Merge pull request #266 from vatesfr/babel-5-workaround"
This reverts commit b77d3f123d, reversing
changes made to c10b0afaa8.
2016-03-23 09:41:54 +01:00
Julien Fontanet
df99f5c0a5 Revert "Merge pull request #265 from vatesfr/revert-babel-6"
This reverts commit 8907290d27, reversing
changes made to eb55cba34a.
2016-03-23 09:41:08 +01:00
Julien Fontanet
36f5084c52 4.15.1 2016-03-22 15:13:54 +01:00
Olivier Lambert
b77d3f123d Merge pull request #266 from vatesfr/babel-5-workaround
Xapi#migrateVm(): Babel 5 workaround. Fix vatesfr/xo-web/issues/831
2016-03-22 13:46:31 +01:00
Julien Fontanet
3c14405155 Xapi#migrateVm(): Babel 5 workaround. 2016-03-22 13:21:56 +01:00
Julien Fontanet
c10b0afaa8 Xapi#copyVm(): wait copy to finish before deleting snapshot. 2016-03-22 12:14:15 +01:00
Julien Fontanet
3f7a2d6bfb Xapi#copyVm(): fix snapshot deletion. 2016-03-22 12:07:35 +01:00
Julien Fontanet
f2a0d56e01 Update xen-api to 0.7.4. 2016-03-22 10:23:29 +01:00
Julien Fontanet
0736cc8414 4.15.0 2016-03-21 16:36:17 +01:00
Julien Fontanet
53240d40a0 vm.delete(): Fix :: usage in CoffeeScript. 2016-03-21 15:55:19 +01:00
Julien Fontanet
4137dd7cc8 Xo/ResourceSet#computeVmResourcesUsage(): fix access to Xo#getXapi(). 2016-03-21 15:29:41 +01:00
Julien Fontanet
8907290d27 Merge pull request #265 from vatesfr/revert-babel-6
Revert "Merge pull request #193 from vatesfr/babel-6"
2016-03-21 15:09:37 +01:00
Julien Fontanet
401dc1cb10 Revert "Minor fixes related to Babel 6."
This reverts commit a9a1472cb7.
2016-03-21 13:02:34 +01:00
Julien Fontanet
a6b5d26f56 Revert "Merge pull request #193 from vatesfr/babel-6"
This reverts commit 250b0eee28, reversing
changes made to 57ebd5bb7a.
2016-03-21 13:02:34 +01:00
Julien Fontanet
eb55cba34a Xapi#exportDeltaVm(): fix handling of empty VBDs. 2016-03-21 13:00:56 +01:00
Julien Fontanet
b0b41d984e Update @marsaud/smb2-promise to 0.2. 2016-03-21 12:38:12 +01:00
Julien Fontanet
947f64e32d SmbHandler: fix error normalization. 2016-03-18 16:54:58 +01:00
Julien Fontanet
24ccbfa9b6 Test before commit. 2016-03-18 16:40:15 +01:00
Olivier Lambert
8110acb795 removing the debug mode for Vhd merge 2016-03-18 16:30:48 +01:00
Olivier Lambert
7473aede60 Merge pull request #259 from vatesfr/abhamonr-delta-backups-with-smb
Delta backups works with samba
2016-03-18 16:28:44 +01:00
wescoeur
6f204f721b Delta backups works with samba.
Many fixes (linux paths, windows errors support...)
All options in smb handlers have a default value.
Remove the handler._remote.path usage.
...
2016-03-18 14:57:14 +01:00
Olivier Lambert
7b0e08094a Merge pull request #264 from vatesfr/abhamonr-delta-backup-with-quiesce
Delta backup works with quiesce. (fix vatesfr/xo-web#812)
2016-03-18 14:47:12 +01:00
wescoeur
322e1a75b9 Delta backup works with quiesce. (fix vatesfr/xo-web#812) 2016-03-18 10:20:49 +01:00
Olivier Lambert
a0806d98a1 Merge pull request #263 from vatesfr/pierre-feedback-when-error-on-sr
`GenericXoError` to throw errors with human readable message.
2016-03-16 17:48:33 +01:00
Olivier Lambert
182897d971 Merge branch 'next-release' into pierre-feedback-when-error-on-sr 2016-03-16 17:42:32 +01:00
Pierre
f90a639fcc Minor fix. 2016-03-16 15:52:29 +01:00
Pierre
d95d7208a2 Minor fix. 2016-03-16 15:42:53 +01:00
Pierre
bbac8ffe64 Substituted all JsonRpcErrors by GenericErrors. 2016-03-16 15:34:19 +01:00
Pierre
801a649fb1 GenericXoError: Error with human readable message property. Used on PBD.unplug 2016-03-16 14:59:32 +01:00
Olivier Lambert
7c09ceecfd Merge pull request #262 from vatesfr/pierre-network-management
PIFs and networks management (See vatesfr/xo-web#805)
2016-03-16 11:22:06 +01:00
Pierre
8c4954fb9b Camel case fixes in pif.js 2016-03-16 10:55:15 +01:00
Pierre
fbe892105b Cast. Removed unused pieces of code. Minor fixes. 2016-03-16 10:29:42 +01:00
Pierre
584e1bb847 reconfigureIp to set the IP, netmask, DNS and gateway of a PIF 2016-03-16 10:12:40 +01:00
Pierre
c437ab282e Detroy VLANs and destroy network 2016-03-16 10:12:40 +01:00
Pierre
42a100d138 Delete network 2016-03-16 10:12:40 +01:00
Pierre
65807bf35d Fixes. 2016-03-16 10:12:40 +01:00
Pierre
2995f48ede network.create instead of createNetwork for host and pool 2016-03-16 10:12:40 +01:00
Julien Fontanet
d452702aef Better error messages on web server failure. 2016-03-11 23:41:51 +01:00
Julien Fontanet
f8ed9c7357 Merge pull request #260 from vatesfr/pierre-pool-networks
`createNetwork` on pool: network creation on pool.master (See vatesfr/xo-web#226)
2016-03-11 15:26:37 +01:00
Pierre
9143120177 network.create instead of createNetwork for host and pool 2016-03-11 11:57:12 +01:00
Pierre
fd3b1bee92 Code mutualization between host and pool. 2016-03-11 11:57:11 +01:00
Pierre
bff42954d1 createNetwork on pool: network creation on pool.master 2016-03-11 11:57:11 +01:00
Julien Fontanet
6b74fd6a02 plugin.get(): Expose versions (see vatesfr/xo-web#807). 2016-03-09 14:51:34 +01:00
Julien Fontanet
0547cebfe2 Import package.json explicitely to make dependency-check happy. 2016-03-09 11:46:09 +01:00
Julien Fontanet
caefdf4300 system.getServerVersion() (see vatesfr/xo-web#807). 2016-03-09 11:37:03 +01:00
Olivier Lambert
a59df15994 Merge pull request #255 from vatesfr/olivierlambert-cifs-iso
Prepare user/pass for CIFS ISO share
2016-03-04 13:49:08 +01:00
Olivier Lambert
33304eb8d9 add type in the new SR API 2016-03-04 13:37:37 +01:00
Olivier Lambert
eb21a1bfb3 support SMB ISO SR 2016-03-04 13:37:37 +01:00
Olivier Lambert
ce0333b0a7 prepare user/pass for CIFS ISO share 2016-03-04 13:37:37 +01:00
Julien Fontanet
25a1b53a91 Prints unhandled rejection ASAP. 2016-03-04 11:46:15 +01:00
Julien Fontanet
6aba73f970 Use ::pCatch(noop) instead of .catch(noop).
Avoid hiding programmer errors.
2016-03-04 11:21:33 +01:00
Julien Fontanet
6406bb7fb6 FIXME: remote type gessing should be done in xo-remote-parser. 2016-03-03 17:32:09 +01:00
Julien Fontanet
2458107903 Merge pull request #258 from vatesfr/marsaudf-fix-nfsHandler
Fix NFS remote handler.
2016-03-03 12:41:42 +01:00
Fabrice Marsaud
628f9bd9b5 Fixed findmnt call 2016-03-03 12:32:22 +01:00
Julien Fontanet
2d791571d5 Merge pull request #257 from vatesfr/julien-f-remove-unnecessary-await
Remove unnecessary `await`s.
2016-03-03 12:28:50 +01:00
Olivier Lambert
ed57127a79 Merge pull request #251 from vatesfr/host-state-unknown
New host state: Unknown.
2016-03-03 12:25:00 +01:00
Julien Fontanet
6d9bcff8e1 Remove unnecessary awaits.
- slightly improve perf
- slightly better stack traces
2016-03-03 12:24:22 +01:00
Julien Fontanet
8126cd1879 New host state: Unknown. 2016-03-03 12:15:30 +01:00
Julien Fontanet
ab34c2261c server.getAll(): fix related to xen-servers mixin. 2016-03-03 11:25:20 +01:00
Olivier Lambert
6953f65970 Merge pull request #256 from vatesfr/fix-vm-recoveryStart
Xapi#startVmOnCd(): fix for HVM (fix vatesfr/xo-web#794).
2016-03-03 10:10:10 +01:00
Julien Fontanet
52073e79fa Xapi#startVmOnCd(): fix for HVM (fix vatesfr/xo-web#794). 2016-03-03 10:02:09 +01:00
Julien Fontanet
8e3484bb17 Merge pull request #252 from vatesfr/better-default-https
Better defaults HTTPs config.
2016-03-02 18:04:05 +01:00
Julien Fontanet
7110da8a36 Better defaults HTTPs config. 2016-03-02 17:12:32 +01:00
Julien Fontanet
7ffd6ded51 Merge pull request #250 from vatesfr/pierre-licenses
Host and pool licenses (Fix vatesfr/xo-web#763)
2016-03-02 16:12:04 +01:00
Julien Fontanet
5e04547ecf Merge pull request #249 from vatesfr/recompute-resource-sets-limits
resourceSet.recomputeAllLimits()
2016-03-02 15:43:40 +01:00
Julien Fontanet
7cbe5f64ce Fix Xo#getXapi(). 2016-03-02 15:41:27 +01:00
Julien Fontanet
47ed78031a Xo#getAllXenServers(). 2016-03-02 15:23:32 +01:00
Pierre
fd3d24b834 getLicenseState 2016-03-02 15:15:10 +01:00
Pierre
c2f607b452 license_params and license_servers in XO host object 2016-03-02 15:15:09 +01:00
Julien Fontanet
b1328bb6e2 resourceSet.recomputeAllLimits() 2016-03-02 15:09:16 +01:00
Julien Fontanet
2a02583e27 Fix Xo#getXapi() with identifier. 2016-03-02 13:48:33 +01:00
Julien Fontanet
cfb49f9136 Merge pull request #247 from vatesfr/too-many-storage-migrates
Xapi#migrateVm(): handle TOO_MANY_STORAGE_MIGRATES (fix vatesfr/xo-we…
2016-03-02 12:47:00 +01:00
Julien Fontanet
5f20091f24 Xapi#migrateVm(): handle TOO_MANY_STORAGE_MIGRATES (fix vatesfr/xo-web#783).
When this error occurs, simply wait 10 seconds and retry.
2016-03-02 12:45:25 +01:00
Julien Fontanet
a37b8e35a1 utils: isInteger(). 2016-03-02 11:30:23 +01:00
Julien Fontanet
84c980c3ea utils: isObject(). 2016-03-02 11:07:13 +01:00
Julien Fontanet
5823057b41 utils: isArray(), isBoolean(), isFunction() & isString(). 2016-03-02 11:02:42 +01:00
Julien Fontanet
024a9b1763 Merge pull request #246 from vatesfr/xo-mixins-2
Xo mixins 2
2016-03-02 10:50:17 +01:00
Julien Fontanet
0425780cd3 vm.create(): fix setting name/description for disks (vatesfr/xo-web#774). 2016-03-02 10:49:15 +01:00
Julien Fontanet
20734dc7f3 Move authorization to a new mixin. 2016-03-02 10:38:06 +01:00
Julien Fontanet
0574c58f16 Move ACLs management to a new mixin. 2016-03-02 10:38:06 +01:00
Julien Fontanet
31e3117190 Move Xen servers management to a new mixin. 2016-03-02 10:38:06 +01:00
Julien Fontanet
f780ba2c5a vm.create(): fix updating existing disks (vatesfr/xo-web#774). 2016-03-02 10:34:44 +01:00
Olivier Lambert
f125b593bf Merge pull request #248 from vatesfr/babel-t7172
Work around Babel T7172 (second attempt).
2016-03-02 10:22:47 +01:00
Julien Fontanet
baee4e185d Work around Babel T7172 (second attempt). 2016-03-02 10:14:50 +01:00
Julien Fontanet
ca8476d466 Work around Babel T7172. 2016-03-02 10:04:12 +01:00
Julien Fontanet
757bf82a78 Merge branch 'stable' into next-release 2016-02-29 09:31:12 +01:00
Olivier Lambert
644887f727 4.14.4 2016-02-28 17:20:17 +01:00
Olivier Lambert
563b643461 Fix NFS mount issues for Linux target 2016-02-28 17:20:02 +01:00
Julien Fontanet
0e4a6fd2e1 Merge pull request #245 from vatesfr/pierre-cpu-weight-in-resource-set
vm.set(): only admins can change cpuWeight if VM in a resource set
2016-02-26 12:29:31 +01:00
Pierre
d452bf1f1c vm.set(): only admins can change cpuWeight if VM in a resource set 2016-02-26 12:22:40 +01:00
Julien Fontanet
126828a813 http-request: another work around for the Babel issue 2016-02-26 11:26:02 +01:00
Julien Fontanet
03dc6fb73a http-request: work around Babel issue. 2016-02-26 11:16:43 +01:00
Julien Fontanet
3653e89714 Merge branch 'stable' into next-release 2016-02-26 10:23:51 +01:00
Julien Fontanet
318dd14e42 4.14.3 2016-02-26 10:23:27 +01:00
Julien Fontanet
2d13844b5d Disable npm {pre,post}version magic for now. 2016-02-26 10:23:20 +01:00
Julien Fontanet
b777b7432a Merge pull request #244 from vatesfr/olivierlambert-nfs-fix
Add v3 parameter for NFS. Fix vatesfr/xo-web/issues/771
2016-02-26 10:12:24 +01:00
Olivier Lambert
6f91c225c2 Add v3 parameter for NFS. Fix vatesfr/xo-web/issues/771 2016-02-26 10:06:18 +01:00
Julien Fontanet
c355e9ca4a Xo/subjects: fix access to Xo private props (fix #755). 2016-02-25 11:36:52 +01:00
Julien Fontanet
4514ea8123 Minor fixes related to promise-toolbox. 2016-02-24 18:22:18 +01:00
Julien Fontanet
a9a1472cb7 Minor fixes related to Babel 6. 2016-02-24 18:22:00 +01:00
Julien Fontanet
250b0eee28 Merge pull request #193 from vatesfr/babel-6
Upgrade to Babel 6.
2016-02-24 16:18:36 +01:00
Julien Fontanet
5cd7527937 Upgrade to Babel 6. 2016-02-24 16:11:35 +01:00
Julien Fontanet
57ebd5bb7a Merge pull request #242 from vatesfr/use-promise-toolbox
Use promise-toolbox.
2016-02-24 16:11:13 +01:00
Julien Fontanet
c18a697d6b Use promise-toolbox. 2016-02-24 15:17:11 +01:00
Julien Fontanet
ad40b72508 4.14.2 2016-02-24 13:51:49 +01:00
Julien Fontanet
3a72e5910d Add preversion/postversion scripts. 2016-02-24 13:50:47 +01:00
Julien Fontanet
8f3eb65a05 Various fixes. 2016-02-24 13:34:12 +01:00
Julien Fontanet
700cd83ff5 4.14.1 2016-02-24 10:34:10 +01:00
Julien Fontanet
0c27881eaf vm.create(): register up-to-date VM object (fix vatesfr/xo-web#749 2016-02-24 10:21:53 +01:00
Julien Fontanet
f7fdc6acd2 4.14.0 2016-02-23 19:10:57 +01:00
Julien Fontanet
2c5f844edc Merge pull request #229 from vatesfr/resource-sets
Resource sets
2016-02-23 19:03:34 +01:00
Julien Fontanet
a253de43c5 vm.create(): correctly handle updating SR on existing disks. 2016-02-23 18:57:00 +01:00
Julien Fontanet
dbaf67a986 vm.set(): correctly count limits for CPUs. 2016-02-23 18:03:54 +01:00
Julien Fontanet
5175d06e37 vm.create(): handle limits on template disks. 2016-02-23 17:53:41 +01:00
Julien Fontanet
651a27b558 resource set: limit.available cannot be > limit.total. 2016-02-23 17:16:13 +01:00
Julien Fontanet
fd41f8def6 vm.delete(): correctly free resource set limits. 2016-02-23 17:12:19 +01:00
Julien Fontanet
208ea04fd5 resourceSet.set(): better handling of limits. 2016-02-23 15:20:20 +01:00
Julien Fontanet
5ee83a1af9 Merge pull request #241 from vatesfr/a-schedule-links-a-function
Scheduler is now in scheduling.js and use ScheduleFn function.
2016-02-23 15:06:55 +01:00
wescoeur
901c7704f4 Scheduler is now in scheduling.js and use ScheduleFn function. 2016-02-23 15:01:30 +01:00
Julien Fontanet
c6f7290f92 resourceSet.{create,set}(): limits are optional. 2016-02-23 13:11:40 +01:00
Julien Fontanet
5368eda98b vm.set(): works with resource sets. 2016-02-23 13:06:36 +01:00
Julien Fontanet
7b9be209c8 resourceSet: initial limits. 2016-02-23 10:33:50 +01:00
Julien Fontanet
cee05fea7c Some permission fixes fro vm.*(). 2016-02-22 19:23:32 +01:00
Julien Fontanet
b87acb47e2 Lots of TODOs in vm.*(). 2016-02-22 18:44:18 +01:00
Julien Fontanet
cb192bf9ea vm.convertToTemplate(): require pool admin permission. 2016-02-22 18:30:28 +01:00
Julien Fontanet
16351ba7f3 Merge pull request #236 from vatesfr/abhamonr-vhd-util-without-binary
Remove dependency on vhd-util for VHD merging
2016-02-22 14:02:40 +01:00
wescoeur
96ba128942 Remove vhd-util dependency. Merging vhd works with a node script. (fix vatesfr/xo-web#646) 2016-02-22 13:59:33 +01:00
Julien Fontanet
76c8d4af25 API: fix various permissions. 2016-02-19 16:37:27 +01:00
Julien Fontanet
3ea2b3cc00 vm.createCloudInitConfigDrive(): disable permission check on SR. 2016-02-19 16:37:27 +01:00
Julien Fontanet
0df0936022 resourceSet: fix getAll() and checkConstraints(). 2016-02-19 16:37:27 +01:00
Julien Fontanet
4fc11a7fd3 resourceSet.{add,remove}Subject(): minor fixes. 2016-02-19 16:37:27 +01:00
Julien Fontanet
8c509271a6 resourceSet.getAll(): code has been refactored around streamToArray(). 2016-02-19 16:37:27 +01:00
Julien Fontanet
67d5b63ef9 vm.create(): works with resource sets. 2016-02-19 16:37:27 +01:00
Julien Fontanet
4f999511a6 resourceSet.getAll(): code has been refactored around lightSet(). 2016-02-19 16:37:27 +01:00
Julien Fontanet
cfbf239175 resourceSet.getAll(): works also for non-admins. 2016-02-19 16:37:27 +01:00
Julien Fontanet
1aedf9bb07 resourceSet: fix deletion. 2016-02-19 16:37:27 +01:00
Julien Fontanet
c2d4423720 Fix: start event instead of starting. 2016-02-19 16:37:27 +01:00
Julien Fontanet
c2f7a2620c api: fix access to private Xo properties. 2016-02-19 16:37:27 +01:00
Julien Fontanet
6f0cda34b4 Xo/Subjects: Fix admin user creation. 2016-02-19 16:37:27 +01:00
Julien Fontanet
1a472fdf1f Xo: subjects managements moved into new mixin. 2016-02-19 16:37:27 +01:00
Julien Fontanet
0551f61228 Resource sets: use store. 2016-02-19 16:37:27 +01:00
Julien Fontanet
b900adfddd Promisify store. 2016-02-19 16:37:27 +01:00
Julien Fontanet
0e339daef5 Typo. 2016-02-19 16:37:27 +01:00
Julien Fontanet
5f5733e8b9 Various fixes and additions to resourceSet API. 2016-02-19 16:37:27 +01:00
Julien Fontanet
1372050a7b LevelDbLogger: avoid increments in keys when not necessary. 2016-02-19 16:37:27 +01:00
Julien Fontanet
1960951c5e Initial resource sets. 2016-02-19 16:37:27 +01:00
Julien Fontanet
bc070407c7 generate-indexes: rewritten in Node
- cross-platform
- properly camel-cases identifiers
2016-02-19 16:37:27 +01:00
Julien Fontanet
0172ee0b6b Remove unused import. 2016-02-19 16:37:27 +01:00
Julien Fontanet
2953bc6bb8 loggers: LevelDbLogger now inherits from AbstractLogger. 2016-02-19 16:37:27 +01:00
Julien Fontanet
c0ed3a9e3c Xo#getStore(namespace). 2016-02-19 16:37:27 +01:00
Julien Fontanet
5456e4fe75 Merge branch 'stable' into next-release 2016-02-19 16:36:59 +01:00
Julien Fontanet
867a1e960e Merge pull request #240 from vatesfr/stream-to-array
util: streamToArray(Stream, filter?: Predicate) => Promise.
2016-02-18 17:02:25 +01:00
Julien Fontanet
48dc68c3fe util: streamToArray(Stream, filter?: Predicate) => Promise. 2016-02-18 16:58:17 +01:00
Julien Fontanet
2c719f326b Xapi: lodash.pick() → lodash.pickBy(). 2016-02-18 15:45:28 +01:00
Julien Fontanet
201f92eb93 system.getMethodsInfo(): Fix breakage due to upgrade to Lodash v4. 2016-02-18 15:39:08 +01:00
Julien Fontanet
46f055b216 Merge pull request #137 from vatesfr/abhamonr-job-schema
Add: Job and crossProduct schemas.
2016-02-17 10:29:40 +01:00
Julien Fontanet
08305e679b http-proxy: agent is now the default export. 2016-02-17 09:53:00 +01:00
Julien Fontanet
e9e0b70199 Merge pull request #238 from bartekrutkowski/next-release
Correct the redis protocol in sample config.
2016-02-16 16:49:50 +01:00
Bartek Rutkowski
441d784027 Correct the redis protocol in sample config
The existing sample configuration file documents the Redis uri string with 'tcp://' prefix string, while xo-server actually expects it to be 'redis://' instead.
2016-02-16 15:47:21 +00:00
Julien Fontanet
558956bf55 Merge pull request #237 from vatesfr/http-proxy
HTTP proxy support (fix vatesfr/xo-web#737).
2016-02-16 15:57:29 +01:00
Julien Fontanet
0d8250a3ac HTTP proxy support (fix vatesfr/xo-web#737). 2016-02-16 15:50:28 +01:00
Julien Fontanet
dc1f5826f8 Lodash 4 iteration: no thisArg argument. 2016-02-16 15:50:10 +01:00
Julien Fontanet
06fb06829b Update lodash.* deps. 2016-02-16 11:53:53 +01:00
Julien Fontanet
bbf52d2611 Update deps. 2016-02-16 11:53:22 +01:00
Julien Fontanet
f55a6617e9 Remove graceful-fs: not used directly by xo-server. 2016-02-16 11:48:52 +01:00
Julien Fontanet
3bd273fbdd Merge pull request #235 from vatesfr/abhamonr-avoid-merge-delta-delta
Avoid merge between two delta vdi backups. (fix vatesfr/xo-web#702)
2016-02-11 17:32:28 +01:00
wescoeur
1b64a543f1 Avoid merge between two delta vdi backups. (fix vatesfr/xo-web#702) 2016-02-11 16:42:50 +01:00
Julien Fontanet
97b07f7d42 The main bug tracker is xo-web. 2016-02-08 15:34:13 +01:00
Olivier Lambert
ebb472b8f6 4.13.2 2016-02-05 13:01:49 +01:00
Olivier Lambert
1a2ef6479e Merge pull request #232 from vatesfr/fix-xapi-updateObjectMapProperty
Xapi#_updateObjectMapProperty(): fix major issue.
2016-02-05 13:00:58 +01:00
Olivier Lambert
876c63fe80 4.13.1 2016-02-05 13:00:02 +01:00
Julien Fontanet
32236962f5 Xapi#_updateObjectMapProperty(): fix major issue. 2016-02-05 12:53:39 +01:00
Julien Fontanet
ba66af922f 4.13.0 2016-02-04 19:43:43 +01:00
Julien Fontanet
28b9bbe54f 4.13.0-0 2016-02-04 18:01:04 +01:00
Julien Fontanet
bf6bd7cbdc Merge pull request #230 from vatesfr/pierre-vm-migration-details
Fix intra-pool migration on different SRs
2016-02-04 17:30:14 +01:00
Pierre
ddcb2468a6 Minor fixes 2016-02-04 17:26:27 +01:00
Pierre
f048b58935 Fix intra-pool migration on different SRs 2016-02-04 17:17:09 +01:00
Julien Fontanet
09f6200c2e Merge pull request #209 from vatesfr/abhamonr-checksum-verification-delta-backup
Create and verify checksums for VDI delta backups
2016-02-04 16:12:54 +01:00
wescoeur
354692fb06 Add checksum verification for delta backup on restore/merge. (fix vatesfr/xo-web#617) 2016-02-04 15:22:14 +01:00
Julien Fontanet
2c5858c2e0 Merge pull request #228 from vatesfr/pierre-vm-migration-details
Fix default migration
2016-02-04 15:18:31 +01:00
Pierre
1f41fd0436 Better handle of undefined maps 2016-02-04 11:36:04 +01:00
Pierre
e0bbefdfae Fix default migration 2016-02-04 11:02:43 +01:00
Julien Fontanet
bc6fbb2797 Xo#registerPlugin(): log errors. 2016-02-04 10:32:36 +01:00
Julien Fontanet
b579cf8128 Merge pull request #227 from vatesfr/https-redirect
Can redirect to HTTPs (fix vatesfr/xo-web#626).
2016-02-04 09:55:12 +01:00
Julien Fontanet
a94ed014b7 sample config: add redirectToHttps. 2016-02-04 09:52:37 +01:00
Julien Fontanet
0db991b668 Can redirect to HTTPs. 2016-02-03 17:39:39 +01:00
Julien Fontanet
347ced6942 Merge pull request #214 from vatesfr/better-https
Better https (fix vatesfr/xo-web#685)
2016-02-03 14:32:25 +01:00
Olivier Lambert
5d7a775b2b Merge pull request #225 from vatesfr/xo-acl-resolver
Use xo-acl-resolver.
2016-02-03 14:29:31 +01:00
Julien Fontanet
df732ab4bf Merge pull request #216 from vatesfr/vdi-snapshot-type
VDI-snapshot type
2016-02-03 14:14:42 +01:00
Fabrice Marsaud
31cd3953d6 Fixing VM object properties 2016-02-03 13:56:15 +01:00
Julien Fontanet
4666b13892 Use xo-acl-resolver. 2016-02-03 11:47:02 +01:00
Julien Fontanet
37d7ddb4b0 Merge pull request #224 from vatesfr/pierre-vm-migration-details
Custom VM migration (See vatesfr/xo-web#567)
2016-02-03 11:39:53 +01:00
Fabrice Marsaud
3abbaeb44b resolving VDI snapshots 2016-02-03 10:38:09 +01:00
Fabrice Marsaud
847ea49042 VDI-snapshot type 2016-02-03 09:57:42 +01:00
Julien Fontanet
779068c2ee HTTP security: use Helmet. 2016-02-02 20:49:33 +01:00
Julien Fontanet
140cd6882d Allows full TLS config. 2016-02-02 20:45:06 +01:00
Julien Fontanet
2e295c2391 Merge pull request #213 from vatesfr/fix-cpu-weight
Fixed cpuWeight removal for default
2016-02-02 10:24:32 +01:00
Fabrice Marsaud
596b0995f4 Prepare object values for xapi 2016-02-02 10:11:39 +01:00
Fabrice Marsaud
b61fe97893 Fixed cpuWeight removal for default 2016-02-02 09:55:40 +01:00
Julien Fontanet
209aa2ebe6 Add a TODO. 2016-02-01 17:14:05 +01:00
Julien Fontanet
c03a0e857e Merge pull request #188 from vatesfr/olivierlambert-cpu-weight
Ability to set vCPU weight
2016-02-01 16:00:21 +01:00
Olivier Lambert
2854d698e6 Implement vCPU weight 2016-02-01 15:56:36 +01:00
Pierre
944163be0e Bug fix: VDIs should be on chosen SRs. 2016-01-29 10:07:02 +01:00
Julien Fontanet
269a9eaff0 Xapi: small but important fix concerning imports. 2016-01-28 17:22:44 +01:00
Olivier Lambert
7f9c49cbc4 Merge pull request #208 from vatesfr/pierre-vm-migration-details
Custom VM migration. (See vatesfr/xo-web#567)
2016-01-28 17:02:28 +01:00
Julien Fontanet
2b6bfeeb15 Merge pull request #212 from vatesfr/contrep-better-snapshot-names
continous replication: clearer VM snapshot names.
2016-01-28 16:05:03 +01:00
Julien Fontanet
fa9742bc92 continous replication: clearer VM snapshot names. 2016-01-28 15:57:25 +01:00
Pierre
472e419abc Using forEach instead of for. Minor fixes. 2016-01-28 13:33:43 +01:00
Pierre
169d11387b Custom VM migration (See vatesfr/xo-web#567)
Optional parameters for migratePool:
- Migration network
- Map each VDI to an SR on destination host
- Map each VIF to a network on destination host
2016-01-28 13:33:43 +01:00
Julien Fontanet
e59ac6d947 Fixes regarding #660. 2016-01-28 11:42:06 +01:00
Olivier Lambert
e193b45562 Merge pull request #207 from vatesfr/abhamonr-avoid-metadata-imp-exp-delta-backups
Avoid metadata import/export in delta backups. (fix vatesfr/xo-web#651)
2016-01-28 11:35:04 +01:00
wescoeur
1ac34f810e Avoid metadata import/export in delta backups. (fix vatesfr/xo-web#651) 2016-01-28 11:12:21 +01:00
Olivier Lambert
e65e5c6e5f Merge pull request #211 from vatesfr/marsaudf-clear-logs#661
Marsaudf clear logs#661
2016-01-28 10:59:37 +01:00
Fabrice Marsaud
af6365c76a logger.delete 2016-01-28 09:00:15 +01:00
Julien Fontanet
8c672b23b5 Merge pull request #159 from vatesfr/marsaudf-smb-mounts#338
Remotes refactoring + SMB implementation.
2016-01-27 11:24:52 +01:00
Fabrice Marsaud
3b53f5ac11 fixes 2016-01-27 10:58:16 +01:00
Fabrice Marsaud
ccdc744748 fixes 2016-01-27 10:08:59 +01:00
Fabrice Marsaud
261f0b4bf0 typo fix 2016-01-27 09:11:45 +01:00
Fabrice Marsaud
495b59c2e5 update dependency 2016-01-26 17:34:00 +01:00
Fabrice Marsaud
d6e1c13c39 Handler and remotes reworked 2016-01-26 17:28:27 +01:00
Fabrice Marsaud
f7f13b9e07 PR feedback 2 2016-01-26 09:47:47 +01:00
Fabrice Marsaud
62564d747f Errors moved from API to core 2016-01-25 17:29:18 +01:00
Fabrice Marsaud
1d5d59c4c0 Remote handler reworked 2016-01-25 17:01:14 +01:00
Fabrice Marsaud
e8380b8a12 PR feedback 2016-01-25 11:45:53 +01:00
Fabrice Marsaud
c304d9cc62 No vdi merge through smb 2016-01-25 11:45:53 +01:00
Fabrice Marsaud
aad4ebf287 Remote handlers refactored, and adding a smb handler 2016-01-25 11:45:53 +01:00
Olivier Lambert
6c2f48181c Merge pull request #210 from vatesfr/handle-objects-conflicts
Properly handle multiple XAPI objects with the same XO id.
2016-01-22 16:02:19 +01:00
Julien Fontanet
480b6ff7d6 Properly handle multiple XAPI objects with the same XO id.
When there is a conflict, the existing object keep the place but when
it is removed, the other object (which is in the waiting list) will
take the new place.
2016-01-22 15:57:44 +01:00
Julien Fontanet
4bdd6f972c Remove node-inspector. 2016-01-21 16:44:42 +01:00
Olivier Lambert
6674d8456a Merge pull request #206 from vatesfr/olivierlambert-fixMigration
Correctly use destination host SR and network
2016-01-20 18:31:33 +01:00
Olivier Lambert
d1478ff694 select the correct migration network 2016-01-20 18:12:37 +01:00
Julien Fontanet
cb20d46b74 Merge pull request #205 from vatesfr/abhamonr-fix-avoid-errors-delta-backups
Delta backups: Fix various issues.
2016-01-20 17:36:33 +01:00
Olivier Lambert
9dd2538043 correctly use destination host SR and network 2016-01-20 17:26:47 +01:00
wescoeur
f25136a512 Avoid errors in delta backups. (fix)
- Wait the task end of vdi export.
- Now, in the error case of vdi backup,
  the current vdi snapshot is removed with catch(noop).
2016-01-20 17:19:41 +01:00
Julien Fontanet
03eb56ad2a Xapi#_updateObjectMapProperty(): do no hide remove errors. 2016-01-20 16:04:23 +01:00
Julien Fontanet
2508840701 4.12.1 2016-01-19 12:49:37 +01:00
Julien Fontanet
6e098f5a4f Merge pull request #203 from vatesfr/fix-scheduling
Scheduler: properly use Xo#runJobSequense() (fix vatesfr/xo-web#657).
2016-01-19 12:45:36 +01:00
Julien Fontanet
31b33406fd Scheduler: properly use Xo#runJobSequense() (fix vatesfr/xo-web#657). 2016-01-19 12:12:29 +01:00
Julien Fontanet
7ab7c763ed startup: ignore non existent paths in plugins lookup. 2016-01-19 11:49:07 +01:00
Julien Fontanet
06258e757a 4.12.0 2016-01-18 10:25:41 +01:00
Julien Fontanet
5919b43a21 @mixin(): compatibility with Node 0.12 (fix #202). 2016-01-18 10:18:02 +01:00
Julien Fontanet
7d4b9521e7 Merge pull request #199 from vatesfr/continuous-replication
Continuous VM replication.
2016-01-17 23:51:29 +01:00
Julien Fontanet
f9d2fd7997 Xapi: Ugly hack seems to be working. 2016-01-17 23:28:45 +01:00
Julien Fontanet
bdbc20c3c6 Xapi: fix private put() when length is known. 2016-01-17 21:05:18 +01:00
Julien Fontanet
69d6d03714 Better debugs in Xapi. 2016-01-17 21:03:19 +01:00
Julien Fontanet
f40e1e55b0 Xapi#importVdiContent(): revert to use Promise.all() instead of Promise.race(). 2016-01-17 12:52:41 +01:00
Julien Fontanet
b9082ed838 Xapi#deleteVm(): Correctly remove VDIs with more than one VBD on the same VM. 2016-01-17 12:52:04 +01:00
Julien Fontanet
4edfefa9a2 Homogenise task names. 2016-01-17 12:52:04 +01:00
Julien Fontanet
0f98ee5407 Xapi#importVdiContent(): better task name. 2016-01-17 12:50:44 +01:00
Julien Fontanet
7fdf119873 Temporarily disable the ugly put hack. 2016-01-17 12:50:44 +01:00
Julien Fontanet
3c054e6ea1 Various changes. 2016-01-17 12:50:42 +01:00
Julien Fontanet
98899ece72 Use $ to prefix injected params names. 2016-01-17 12:49:23 +01:00
Julien Fontanet
2061a006d0 Xapi#createDeltaVdi(): correctly set the source of cloned VDI. 2016-01-17 12:49:23 +01:00
Julien Fontanet
5496c2d7fd Various fixes. 2016-01-17 12:49:22 +01:00
Julien Fontanet
d6b862a4a9 Xapi#_createVif(): Various fixes. 2016-01-17 12:49:22 +01:00
Julien Fontanet
d581f8a852 Xapi#importDeltaVm(): explicit error when base VDI is not found. 2016-01-17 12:49:22 +01:00
Julien Fontanet
3a593ee35a Xapi#_createVm(): clearer type handling. 2016-01-17 12:49:22 +01:00
Julien Fontanet
415d34fdaa Xo#copyDeltaVm(): Cancel exports on failures. 2016-01-17 12:49:22 +01:00
Julien Fontanet
7d28191bb5 Xapi#exportDeltaVm(): full export if the base is not found. 2016-01-17 12:49:20 +01:00
Julien Fontanet
e2c7693370 Xapi#importVdiContent(): do not wait for connection closure. 2016-01-17 12:48:38 +01:00
Julien Fontanet
f17ff02f4d Continuous replication: do not rely on metadata import/export. 2016-01-17 12:48:35 +01:00
Julien Fontanet
225043e01d Properly identify last snapshot as future base. 2016-01-16 19:34:35 +01:00
Julien Fontanet
56f78349f8 Xen expects keys(other_config) to be snake or it will change them itself! 2016-01-16 19:34:35 +01:00
Julien Fontanet
8839d4f55a Delete exportDeltaVm() snapshot on failure. 2016-01-16 19:34:34 +01:00
Julien Fontanet
2562aec1d2 Missing space in utils.pDebug(). 2016-01-16 19:34:34 +01:00
Julien Fontanet
db2361be84 Fix createVbd(). 2016-01-16 19:34:34 +01:00
Julien Fontanet
d08fcbfef3 Various fixes. 2016-01-16 19:34:29 +01:00
Julien Fontanet
7601b93e65 Various fixes. 2016-01-16 19:19:51 +01:00
Julien Fontanet
1103ec40e0 Xapi#importDeltaVm(): clean after failure. 2016-01-16 19:19:51 +01:00
Julien Fontanet
af32c7e3db Properly exports vm.deltaCopy(). 2016-01-16 19:19:51 +01:00
Julien Fontanet
170918eb3b Initial continuous replication. 2016-01-16 19:19:51 +01:00
Julien Fontanet
a91e615a8d @deferrable.onSuccess() 2016-01-16 19:19:51 +01:00
Julien Fontanet
cc92c26fe3 Xapi#_importVdiContent() 2016-01-16 19:19:46 +01:00
Julien Fontanet
937135db32 Xapi#_exportVdi() 2016-01-16 18:57:15 +01:00
Julien Fontanet
01366558b4 Xapi#_deleteVbd() 2016-01-16 18:57:15 +01:00
Julien Fontanet
b0dbd54ea4 Xapi#_disconnectVbd() 2016-01-16 18:57:15 +01:00
Julien Fontanet
f113915307 Xapi#_updateObjectMapProperty() can remove a property. 2016-01-16 18:57:15 +01:00
Julien Fontanet
0a3c3d9bb1 Xapi#remoteCopyVm() falls back on local copy if possible. 2016-01-16 18:50:09 +01:00
Julien Fontanet
ba2e005c3e Merge pull request #201 from vatesfr/custom-http-request
Custom HTTP request implementation instead of got.
2016-01-16 18:31:21 +01:00
Julien Fontanet
b9ea52d65f Add missing space in forbidden operations description. 2016-01-16 18:27:23 +01:00
Julien Fontanet
f1e328d333 Better error handling in patch unzipping. 2016-01-16 18:13:13 +01:00
Julien Fontanet
23f1965398 Custom HTTP request implementation instead of got. 2016-01-16 18:13:04 +01:00
Olivier Lambert
fc82f185cb Merge pull request #200 from vatesfr/abhamonr-forever-forward-incremental-backup-fix
Old vdi bases must be removed at the backup end.
2016-01-15 14:31:04 +01:00
wescoeur
56b25f373f Old vdi bases must be removed at the backup end. 2016-01-15 14:20:11 +01:00
Olivier Lambert
1ac6add122 Merge pull request #196 from vatesfr/abhamonr-forever-forward-incremental-backup
Forever forward incremental backup (fix vatesfr/xo-web#576)
2016-01-15 14:13:02 +01:00
wescoeur
91b1a903f9 Fix rejected backup. 2016-01-15 13:48:15 +01:00
wescoeur
a8d6654ef5 Forever forward incremental backup (fix vatesfr/xo-web#576) 2016-01-15 13:12:05 +01:00
Olivier Lambert
63093b1be6 Merge pull request #198 from vatesfr/abhamonr-vbd-set-bootable-fix-getxapi
vbd.setBootable use xo.getXapi() instead of xo.getXAPI()
2016-01-14 16:28:16 +01:00
wescoeur
60abe8f37e vbd.setBootable use xo.getXapi() instead of xo.getXAPI() 2016-01-14 16:22:16 +01:00
Olivier Lambert
7ba3909aa1 Merge pull request #175 from vatesfr/abhamonr-button-bootable-disk
Add vbd.setBootable api call.
2016-01-14 16:04:30 +01:00
Julien Fontanet
eecdba2d05 Merge pull request #197 from vatesfr/deferrable-decorator
deferrable() decorator.
2016-01-14 14:33:20 +01:00
Julien Fontanet
7bdc005aa7 @deferrable() works with async functions. 2016-01-14 14:24:09 +01:00
Julien Fontanet
d46703fdc4 Cosmetic changes in decorators spec. 2016-01-14 11:58:18 +01:00
Julien Fontanet
e4aa85f603 Cosmetic changes in decorators. 2016-01-14 11:58:18 +01:00
Julien Fontanet
233124ef50 deferrable.onFailure() 2016-01-14 11:58:11 +01:00
Julien Fontanet
36a3012de2 deferrable() decorator. 2016-01-14 11:16:51 +01:00
Olivier Lambert
2b4ee96ed7 Fix issue vatesfr/xo-web/issues/643 2016-01-13 18:55:35 +01:00
Julien Fontanet
85a2afd55c Add --safe-mode which do not registers plugins. 2016-01-13 15:53:55 +01:00
Julien Fontanet
6cd0d8456a Fix plugins (broken by Xo split). 2016-01-13 15:22:14 +01:00
Julien Fontanet
7750a0a773 Integrate api/xo-mixins indexes to the build. 2016-01-13 15:21:03 +01:00
Julien Fontanet
a5364b9257 Camel case: Xo#getXAPI() → Xo#getXapi(). 2016-01-13 14:39:40 +01:00
Julien Fontanet
e0e7b1406d Fix backups listing (broken by Xo split). 2016-01-13 12:12:40 +01:00
Julien Fontanet
38b67a0002 Merge pull request #192 from vatesfr/mixins
Split Xo with mixins.
2016-01-13 11:47:49 +01:00
Julien Fontanet
18dd4f8a52 Print start/stop errors. 2016-01-13 11:40:52 +01:00
Julien Fontanet
879f9b4ea9 Remove listeners after start/stop. 2016-01-13 11:40:51 +01:00
Julien Fontanet
3db0dda67a Fix a race condition in the scheduler. 2016-01-13 11:40:51 +01:00
Julien Fontanet
ed9ee15b90 Expose Xo#scheduler. 2016-01-13 11:40:51 +01:00
Julien Fontanet
44ff85e8e9 Rename Xo {start,stop}{,ing} events. 2016-01-13 11:40:51 +01:00
Julien Fontanet
cb07e9ba11 Split Xo with mixins. 2016-01-13 11:40:48 +01:00
Julien Fontanet
bfe05ce5fc Merge pull request #184 from vatesfr/abhamonr-disable-vm-start-during-delta-import
Disable vm start during delta import and explicit notification.
2016-01-13 11:25:56 +01:00
wescoeur
64ee23cec0 Disable vm start during delta import and explicit notification. (fix vatesfr/xo-web#613) 2016-01-13 11:20:58 +01:00
Julien Fontanet
c022d3c4a4 Merge pull request #182 from vatesfr/abhamonr-properly-remove-vdi-backups
Only delete VDI exports when VM backup is successful.
2016-01-13 10:38:15 +01:00
wescoeur
69c764301f Only delete VDI exports when VM backup is successful (fix vatesfr/xo-web#644). 2016-01-13 10:33:44 +01:00
Julien Fontanet
2f777daef6 Merge pull request #168 from vatesfr/cleaner-xo-stop
xo-server should properly stops on SIGINT/SIGTERM.
2016-01-12 17:38:30 +01:00
Julien Fontanet
a10bf7330e xo-server should properly stops on SIGINT/SIGTERM. 2016-01-12 17:33:32 +01:00
Julien Fontanet
782bb5967d Update level-party to 3.0.4. 2016-01-12 15:17:15 +01:00
Olivier Lambert
aeb2f55f0d Merge pull request #191 from vatesfr/prevent-concurrent-schedule-runs
A schedule cannot have concurrent runs (fix vatesfr/xo-web#642).
2016-01-11 14:43:24 +01:00
Julien Fontanet
ae68749b1b A schedule cannot have concurrent runs (fix vatesfr/xo-web#642). 2016-01-11 14:00:52 +01:00
Julien Fontanet
a3c25d56a0 Update deps. 2016-01-08 18:39:59 +01:00
Julien Fontanet
d2b9cc8df9 Merge pull request #189 from vatesfr/olivierlambert-change-name-during-import
Change VM name during VM delta import. Fix vatesfr/xo-web/issues/641
2016-01-07 14:08:45 +01:00
Olivier Lambert
2027daa75c Change name during VM delta import. Fix vatesfr/xo-web/issues/641 2016-01-07 14:02:53 +01:00
Julien Fontanet
f3493a08bd Api#addMethod() returns a remover function. 2016-01-05 18:16:04 +01:00
Julien Fontanet
f3963269ae Initialize FAT buffer with null bytes. 2016-01-04 14:43:49 +01:00
Julien Fontanet
ae2212c245 Merge pull request #183 from vatesfr/pierre-delete-running-vm
Delete not halted VMs. (vatesfr/xo-web/issues/616)
2015-12-31 09:48:37 +01:00
Julien Fontanet
3a19ac4c93 Merge pull request #187 from vatesfr/olivierlambert-vif-deletion
VIF delete typo. Fix issue vatesfr/xo-web/issues/632
2015-12-30 20:01:49 +01:00
Olivier Lambert
666f546cf0 VIF delete typo. Fix issue vatesfr/xo-web/issues/632 2015-12-30 19:57:48 +01:00
Julien Fontanet
464f57d7da Merge pull request #186 from vatesfr/olivierlambert-custom-templates
add 'install_repository' support for vatesfr/xo-web/issues/627
2015-12-30 17:21:12 +01:00
Olivier Lambert
2a192f33a1 add 'install_repository' support for vatesfr/xo-web/issues/627 2015-12-30 17:12:26 +01:00
Julien Fontanet
9ca2674261 Make unhandled rejected promises visible on exit. 2015-12-29 10:39:26 +01:00
Julien Fontanet
24bc91dc0c Minor optimizations. 2015-12-23 13:57:29 +01:00
Julien Fontanet
cf2d5b502f Do not remove VDIs attached to other VMs. 2015-12-22 16:27:34 +01:00
Julien Fontanet
61450ef602 Typo. 2015-12-22 16:24:50 +01:00
Julien Fontanet
78f1d1738e Properly ignore snapshot deletion failures after export. 2015-12-22 16:24:00 +01:00
Pierre
9f595cf5f7 Delete not halted VMs. (See vatesfr/xo-web#616) 2015-12-22 15:45:13 +01:00
Julien Fontanet
25b8e49975 4.11.0 2015-12-22 13:35:24 +01:00
Julien Fontanet
d40086cd13 Merge branch 'next-release' into stable 2015-12-22 13:34:57 +01:00
Olivier Lambert
8f9d8d93b9 Merge pull request #181 from vatesfr/fix-vbd-state-after-metadata-import
Fix vbd state after metadata import
2015-12-22 12:23:38 +01:00
Julien Fontanet
1080c10004 Call VM.power_state_reset after a metadata import (fix vatesfr/xo-web#615). 2015-12-22 12:18:02 +01:00
Julien Fontanet
866aeca220 Revert "Snapshots running VM for metadata export (see vatesfr/xo-web#615)."
This reverts commit 121b3afc61.

It is not possible to export metadata of a snapshot.
2015-12-22 11:41:04 +01:00
Julien Fontanet
121b3afc61 Snapshots running VM for metadata export (see vatesfr/xo-web#615). 2015-12-22 11:26:03 +01:00
Julien Fontanet
e8406b04b4 Merge pull request #180 from vatesfr/fix-memory-issue-importVmBackup
Fix memory issue on Xo#importVmBackup().
2015-12-21 19:46:06 +01:00
Julien Fontanet
8e7fe81806 Disable async traces for now (fix vatesfr/xo-web#608). 2015-12-21 19:20:48 +01:00
Olivier Lambert
852807b5d7 Merge pull request #178 from vatesfr/abhamonr-incremental-backups-integration
Some corrections for integration.
2015-12-21 19:05:57 +01:00
Olivier Lambert
9928d47fa2 PR comment review 2015-12-21 19:00:36 +01:00
wescoeur
412a1bd62a Some corrections for delta integration in xo-web.
- List delta backups in subfolders.
- Fix unhandled exception. (ENOENT)
- ...
2015-12-21 17:48:34 +01:00
Julien Fontanet
b290520951 vm.import() accepts a SR id. 2015-12-21 11:13:42 +01:00
Olivier Lambert
dde677b6d3 do NOT backup a CD drive 2015-12-18 21:45:03 +01:00
Julien Fontanet
75030847bd Merge pull request #177 from vatesfr/abhamonr-fix-remote-importVm
The vm import call use a sr instead of a host.
2015-12-18 17:23:48 +01:00
wescoeur
e7b9cb76bc The vm import call use a sr instead of a host. 2015-12-18 17:18:39 +01:00
Olivier Lambert
e96c4c0dd3 restore accidently removed code 2015-12-18 16:56:25 +01:00
Julien Fontanet
b553b3fa50 Merge pull request #176 from vatesfr/olivierlambert-xenstorefix
Fix undefined xenstore_data sent to XAPI
2015-12-18 16:06:50 +01:00
Olivier Lambert
c6fb924b8f Fix undefined xenstore_data sent to XAPI 2015-12-18 16:00:06 +01:00
Julien Fontanet
b13844c4a6 Merge pull request #170 from vatesfr/pierre-read-only-connection
Connection to a Xen Server in read-only mode. (Fix vatesfr/xo-web#439)
2015-12-18 12:09:34 +01:00
Pierre
ab6c83a3fc Connection to a Xen Server in read-only mode. (Fix vatesfr/xo-web#439)
`updateXenServer` applies changes in database and also changes the connection's read-only status if the client is connected to this server.
2015-12-18 11:59:54 +01:00
Julien Fontanet
7e0a97973f Merge pull request #163 from vatesfr/abhamonr-incremental-backups
Implement rolling delta VM backup and import. (vatesfr/xo-web#494)
2015-12-18 11:24:31 +01:00
wescoeur
6a8a79bba5 Implement rolling delta VM backup and import. (vatesfr/xo-web#494) 2015-12-18 11:20:13 +01:00
wescoeur
4a0c58c50a Add vbd.setBootable api call. (fix vatesfr/xo-web/#583) 2015-12-17 17:07:35 +01:00
Julien Fontanet
eb0c963332 Coding style. 2015-12-17 16:21:25 +01:00
Julien Fontanet
023fe82932 Merge pull request #150 from vatesfr/olivierlambert-configdrive
Generic CloudConfig Drive
2015-12-16 18:19:51 +01:00
Julien Fontanet
2e1a06c7bf Generic Cloud Config drive. Fix vatesfr/xo-web/issues/549 2015-12-16 18:12:47 +01:00
Julien Fontanet
8b6961d40c VDI.resize{,_online} expect a string contrary to what the doc says. 2015-12-16 16:33:55 +01:00
Julien Fontanet
53351877da Fix typo: size → parseSize. 2015-12-16 15:48:33 +01:00
Julien Fontanet
522445894e Always parse sizes. 2015-12-16 15:36:01 +01:00
Julien Fontanet
550351bb16 Merge pull request #174 from vatesfr/abhamonr-import-vdi-content
Import VDI content is implemented.
2015-12-16 15:06:32 +01:00
wescoeur
328adbb56f Import VDI content is implemented. 2015-12-16 14:59:53 +01:00
Julien Fontanet
44a36bbba3 Use human-format v6 (much nicer with incorrect casing). 2015-12-16 13:17:07 +01:00
Julien Fontanet
4cc4adeda6 disk.{create,resize}() accept integers for size. 2015-12-16 11:34:46 +01:00
Julien Fontanet
c14e6f2a63 disk.resize() accepts human readable size. 2015-12-16 11:34:46 +01:00
Julien Fontanet
cfcb2d54d8 Merge pull request #172 from vatesfr/olivierlambert-vdimove
Allow offline VDI moving. Fix vatesfr/xo-web#591
2015-12-16 10:46:33 +01:00
Julien Fontanet
010d60e504 Coding style. 2015-12-16 09:46:39 +01:00
Julien Fontanet
eabde07ff6 Remove incorrect export. 2015-12-16 09:46:39 +01:00
Olivier Lambert
be19ad5f2a Allow offline VDI moving. Fix https://github.com/vatesfr/xo-web/issues/591 2015-12-15 19:07:20 +01:00
Julien Fontanet
d1d0816961 Merge pull request #171 from vatesfr/olivierlambert-existingdisks
allow edition of existing disks during VM creation
2015-12-15 17:56:38 +01:00
Olivier Lambert
7be7170504 allow edition of existing disks during VM creation 2015-12-15 17:51:03 +01:00
Julien Fontanet
478272f515 Merge pull request #167 from vatesfr/remoteCopyVm-set-nameLabel-asap
Xapi#remoteCopyVm() sets name label ASAP.
2015-12-15 13:49:13 +01:00
Julien Fontanet
09af6958c8 Xapi#remoteCopyVm() sets name label ASAP. 2015-12-15 11:35:03 +01:00
Julien Fontanet
adb3a2b64e Merge pull request #169 from vatesfr/olivierlambert-vdiresize
Clean VDI resize support
2015-12-14 17:09:12 +01:00
Olivier Lambert
1ee7e842dc Clean VDI resize support 2015-12-14 17:05:59 +01:00
Julien Fontanet
b080a57406 Merge pull request #162 from vatesfr/pierre-aborting-vm-export-cancels-operation
VM-export interruption properly transferred to Xen
2015-12-11 18:18:05 +01:00
Julien Fontanet
7c017e345a Minor updates. 2015-12-11 18:02:41 +01:00
Pierre
4b91343155 VM-export interruption properly transferred to Xen. (Fix vatesfr/xo-web#490)
When the connection with the client is lost, the export task is cancelled and the connection is closed.
As the task is over, the snapshot used for the export is deleted.

Cancelling the task is useless as it is cancelled by Xen
2015-12-11 18:02:41 +01:00
Julien Fontanet
02a3df8ad0 Merge pull request #164 from vatesfr/olivierlambert-import-metadata
Support VM metadata import. Fix vatesfr/xo-web#579
2015-12-11 17:01:47 +01:00
Olivier Lambert
6a7080f4ee Support VM metadata import. Fix vatesfr/xo-web#579 2015-12-11 16:56:56 +01:00
Julien Fontanet
4547042577 Fix issue in utils.createRawObject(). 2015-12-10 14:06:22 +01:00
Julien Fontanet
0e39eea7f8 Always use noop from utils. 2015-12-10 14:06:22 +01:00
Olivier Lambert
1e5aefea63 Merge pull request #161 from vatesfr/olivierlambert-set-default-sr
Set default sr
2015-12-10 12:14:27 +01:00
Olivier Lambert
02c4f333b0 minor fixes 2015-12-10 12:08:30 +01:00
Olivier Lambert
1e8fc4020b Add setDefaultSr, fix #572 2015-12-10 11:01:05 +01:00
Olivier Lambert
f969701ac1 Merge pull request #155 from vatesfr/pierre-plugins-autodiscovery
Installed plugins are detected even if the config file does not show …
2015-12-09 20:41:46 +01:00
Olivier Lambert
b236243857 Merge pull request #160 from vatesfr/marsaudf-no-backup-if-unmounted
Rolling backup fails immediatly if remote is disabled. Fix https://github.com/vatesfr/xo-web/issues/561
2015-12-09 19:13:01 +01:00
Fabrice Marsaud
39edc64922 Rolling backup fails immediatly if remote is disabled 2015-12-09 17:18:50 +01:00
Pierre
f22ece403f Installed plugins are automatically detected when the server starts. They are no more loaded from the config file.
Plugins (ie installed modules of which names start with `xo-server-`) are automatically detected and registered from `${__dirname}/node_modules/` and `/usr/local/lib/node_modules/`.
2015-12-09 16:56:48 +01:00
Julien Fontanet
f5423bb314 Merge pull request #158 from vatesfr/olivierlambert-recoveryStart
Generic recovery start (PV or HVM)
2015-12-07 18:11:03 +01:00
Julien Fontanet
b1e5945ebe Xapi#startVm() and Xapi#startVmOnCd() (fix vatesfr/xo-web#563). 2015-12-07 18:02:55 +01:00
Julien Fontanet
76b5be8171 Revert "Freeze config object before configuring plugin."
This reverts commit 789f51bd2a.

The change introduced issues with nodemailer used in xo-server-transport-email.
2015-12-03 16:00:32 +01:00
Julien Fontanet
804bca2041 Merge pull request #148 from vatesfr/abhamonr-purge-plugin-config
The plugins configurations can be cleaned.
2015-12-03 14:37:32 +01:00
Julien Fontanet
10602b47b4 4.10.2 2015-12-03 12:20:48 +01:00
Julien Fontanet
8d7c522596 Merge pull request #154 from vatesfr/julienf-fix-patches-for-6.1
Fix patches handling for XenServer 6.1.
2015-12-03 12:20:05 +01:00
Julien Fontanet
3ac455c5a7 Fix patches handling for XenServer 6.1. 2015-12-03 12:16:56 +01:00
Julien Fontanet
2b19a459df Merge pull request #152 from vatesfr/julienf-handle-xapiToXo-failures
Handle XAPI to XO objects failures.
2015-12-03 12:15:29 +01:00
Julien Fontanet
41ba2d9bf6 Properly schedule retry for the next loop. 2015-12-03 12:10:29 +01:00
Julien Fontanet
a7b5eb69d3 Handle XAPI to XO objects failures. 2015-12-03 11:13:00 +01:00
Julien Fontanet
67c209bb5e Properly handle the case where the pool object is not here yet. 2015-12-03 10:27:23 +01:00
Julien Fontanet
a6d436d9ea 4.10.1 2015-12-02 17:45:30 +01:00
Julien Fontanet
652c784e13 Update xen-api to 0.6.8 (fix vatesfr/xo-web#552). 2015-12-02 17:41:32 +01:00
wescoeur
a0a3b7a158 The plugins configurations can be cleaned. 2015-12-02 16:15:23 +01:00
Julien Fontanet
789f51bd2a Freeze config object before configuring plugin. 2015-12-02 15:20:38 +01:00
Olivier Lambert
c2f1a74f96 Merge pull request #149 from vatesfr/julienf-fix-vm-migration-collision
Use a different id (opaque ref) for VMs which are under migration.
2015-11-30 17:40:31 +01:00
Julien Fontanet
a9ed7a3f3b Use a different id (opaque ref) for VMs which are under migration. 2015-11-30 17:34:12 +01:00
Julien Fontanet
b348e88a5f 4.10.0 2015-11-27 14:24:08 +01:00
Julien Fontanet
1615395866 Merge branch 'next-release' into stable 2015-11-27 14:23:57 +01:00
Julien Fontanet
e483abcad0 Merge pull request #130 from vatesfr/marsaudf-generic-job-schedules
Enhancements to prepare generic job scheduling
2015-11-27 12:03:27 +01:00
Fabrice Marsaud
12b6760f6e Extend job & schedule API with job & schedule names, and job.runSequence 2015-11-27 11:56:37 +01:00
Julien Fontanet
6fde6d7eac Expose plugin config validation errors (vatesfr/xo-web#530). 2015-11-26 16:18:39 +01:00
Julien Fontanet
a7ef891217 Merge pull request #146 from vatesfr/abhamonr-abort-vm-export-import
Start VM export only when necessary.
2015-11-26 16:10:59 +01:00
wescoeur
8f22dfe87b Start VM export only when necessary. 2015-11-26 15:42:07 +01:00
Julien Fontanet
2dc7fab39a Merge pull request #134 from vatesfr/abhamonr-backup-jobs-notifications
Xo event `job:terminated` on job termination.
2015-11-26 15:29:39 +01:00
wescoeur
74cb2e3c63 'job:terminated' signal is emitted after job execution. (with one executionStatus object) 2015-11-26 11:07:37 +01:00
Julien Fontanet
6e763a58f1 Expose UUIDs on all Xapi objects which have one. 2015-11-25 16:53:25 +01:00
Julien Fontanet
a8e72ed410 Merge pull request #140 from vatesfr/julienf-xo-defineProperty
Xo#defineProperty() to properly expose attributes
2015-11-25 14:16:55 +01:00
Julien Fontanet
fcdfd5f936 Merge pull request #139 from vatesfr/abhamonr-remember-disabled-servers
Save enabled state for each server
2015-11-25 13:54:44 +01:00
Julien Fontanet
f1faa463c1 Xo#defineProperty() allows (plugins) to define property on Xo instance 2015-11-25 12:27:09 +01:00
wescoeur
a0f4952b54 Save enabled state for each server 2015-11-25 12:15:15 +01:00
Olivier Lambert
bd82ded07d automatically set autopoweron on the pool 2015-11-25 12:06:16 +01:00
Julien Fontanet
016e17dedb Merge pull request #145 from vatesfr/julienf-pool-autopoweron
Xapi#setPoolProperties() supports autoPowerOn.
2015-11-25 12:03:23 +01:00
Julien Fontanet
5cd3e1b368 Xapi#setPoolProperties() supports autoPowerOn. 2015-11-25 11:03:33 +01:00
Julien Fontanet
b2b39458da Merge pull request #144 from vatesfr/julienf-fix-redis-items-updates
Fix items updates in Redis.
2015-11-25 10:57:54 +01:00
wescoeur
556bbe394d Add: Job and params-vector schemas. 2015-11-25 10:44:40 +01:00
Julien Fontanet
07288b3f26 Fix items updates in Redis. 2015-11-25 10:39:50 +01:00
Julien Fontanet
90f79b7708 Shallow copy the configuration object before configuring a plugin. (fix vatesfr/xo-web#513) 2015-11-24 11:25:52 +01:00
Julien Fontanet
e220786a20 Merge pull request #142 from vatesfr/marsaudf-parameter-fix
Fix: group.getAll() has no params.
2015-11-24 10:49:51 +01:00
Fabrice Marsaud
f16b993294 Removed unwanted API parameter 2015-11-24 10:42:43 +01:00
Julien Fontanet
c241bea3bf Add vendor config file. 2015-11-20 18:28:32 +01:00
Julien Fontanet
084654cd3c Merge branch 'stable' into next-release 2015-11-20 18:17:08 +01:00
Olivier Lambert
ab3577c369 Merge pull request #136 from vatesfr/olivierlambert-cloudconfig
Cloud config management for CoreOS
2015-11-20 17:30:53 +01:00
Olivier Lambert
6efb90c94e Merge pull request #135 from vatesfr/pierre-emergency-host-shutdown
emergencyHostShutdown(hostId) : suspends all the VMs running on the host
2015-11-20 17:30:27 +01:00
Olivier Lambert
cbcc400eb4 Cloud config management for CoreOS 2015-11-20 17:12:12 +01:00
Julien Fontanet
15aec7da7e vm.clone() requires permissions on SRs. 2015-11-20 16:19:33 +01:00
Julien Fontanet
46535e4f56 Utils: pAll() & pReflect() 2015-11-20 15:31:55 +01:00
Julien Fontanet
e3f945c079 Minor fixes. 2015-11-20 14:34:07 +01:00
Julien Fontanet
04239c57fe pSettle() returns an object for an object. 2015-11-20 11:42:49 +01:00
Pierre
ad4439ed55 emergencyHostShutdown(hostId) : suspends all the VMs running on the host and then shuts the host down 2015-11-20 11:26:03 +01:00
Julien Fontanet
9fe3ef430f More tests for pSettle(). 2015-11-20 10:43:56 +01:00
Julien Fontanet
ff30773097 Fix pSettle() to accept non-promises. 2015-11-20 10:43:56 +01:00
Julien Fontanet
f7531d1e18 pSettle() accepts either arrays or objects. 2015-11-20 10:43:56 +01:00
Olivier Lambert
658008ab64 add comment for quiesce 2015-11-19 16:03:28 +01:00
Olivier Lambert
b089d63112 allow snapshots on halted VMs 2015-11-19 16:01:08 +01:00
Julien Fontanet
ee9b1b7f57 Merge pull request #133 from vatesfr/abhamonr-validate-config-plugins-registration
Avoid plugin loading if config is not valid
2015-11-19 14:40:33 +01:00
wescoeur
cd0fc8176f Avoid plugin loading if config is not valid 2015-11-19 13:51:25 +01:00
Julien Fontanet
8e291e3e46 Define CoffeeScript API modules as ES6 (fix default value). 2015-11-19 13:13:53 +01:00
Julien Fontanet
e3024076cd Merge pull request #131 from vatesfr/abhamonr-delete-user-with-tokens
remove tokens on user deletion
2015-11-19 12:55:38 +01:00
wescoeur
6105874abc remove tokens on user deletion 2015-11-19 12:40:31 +01:00
Julien Fontanet
1855f7829d Advanced setting: verboseApiLogsOnErrors. 2015-11-19 11:11:54 +01:00
Julien Fontanet
456e8bd9c0 New FIXME. 2015-11-19 11:11:54 +01:00
Julien Fontanet
d5f2efac26 Merge pull request #132 from vatesfr/abhamonr-add-leveldown-dep
Add leveldown dep
2015-11-19 10:45:17 +01:00
wescoeur
21e692623c Add leveldown dep 2015-11-19 10:40:11 +01:00
Julien Fontanet
80e9589af5 Initial --repair command in xo-server-logs. 2015-11-19 10:11:01 +01:00
Julien Fontanet
b2b9ae0677 Quick fix for groups.getAll() attributes. 2015-11-19 09:26:34 +01:00
Julien Fontanet
63122905e6 Comments. 2015-11-18 18:36:27 +01:00
Julien Fontanet
f99b6f4646 Merge pull request #123 from vatesfr/abhamonr-logs-cli
CLI to explore xo-server's logs.
2015-11-18 17:01:26 +01:00
wescoeur
39090c2a22 Logs CLI:
- Can print logs for one namespace or all namespaces
- Can sort logs since one start timestamp/until one end timestamp
- The sort results can be limited by one value
2015-11-18 16:57:28 +01:00
Julien Fontanet
76baa8c791 Minor fix. 2015-11-18 16:40:08 +01:00
Julien Fontanet
74e4b9d6d2 Merge imports. 2015-11-18 15:24:38 +01:00
Julien Fontanet
bbfc5039f7 Merge pull request #129 from vatesfr/abhamonr-ghost-user
Avoid ghost user and ghost group.
2015-11-18 15:14:31 +01:00
wescoeur
b2fd694483 Avoid ghost user and ghost group. 2015-11-18 15:02:54 +01:00
Julien Fontanet
b03f38ff22 Include user name in console proxy logs. 2015-11-17 16:24:22 +01:00
Julien Fontanet
fe48811047 Include user name in API logs. 2015-11-17 16:24:07 +01:00
Julien Fontanet
bd9396b031 Ability to not create users on first sign in (fix #497). 2015-11-17 15:59:31 +01:00
Julien Fontanet
f0497ec16d Move default configuration to config.json. 2015-11-17 15:49:45 +01:00
Julien Fontanet
7e9e179fa7 Minor fixes. 2015-11-17 15:01:58 +01:00
Julien Fontanet
de62464ad8 Improve security: check token for console access. 2015-11-17 15:01:58 +01:00
Julien Fontanet
f6911ca195 Merge pull request #128 from vatesfr/olivierlambert-setbootorder
Rename vm.bootOrder() to vm.setBootOrder() and ensure VM is HVM.
2015-11-17 15:00:03 +01:00
Olivier Lambert
aec09ed8d2 Rename vm.bootOrder() to vm.setBootOrder() and ensure VM is HVM 2015-11-17 14:56:15 +01:00
Julien Fontanet
51a983e460 Logs clients IPs for WebSocket connections. 2015-11-17 13:24:07 +01:00
Julien Fontanet
0eb46e29c7 Merge pull request #122 from vatesfr/fix-poolPatches-removal
Properly remove objects for which `xo.id !== xapi.$id`.
2015-11-17 11:00:05 +01:00
Julien Fontanet
5ee11c7b6b Properly remove objects for which xo.id !== xapi.$id. 2015-11-17 10:33:56 +01:00
Olivier Lambert
b55accd76f add tag for quiesced snapshots 2015-11-16 12:41:16 +01:00
Julien Fontanet
fef2be1bc7 Merge pull request #125 from vatesfr/olivierlambert-snapquiesce
Add VM snapshot quiesce support
2015-11-16 10:59:39 +01:00
Olivier Lambert
0b3858f91d Add VM snapshot quiesce support 2015-11-16 10:55:51 +01:00
Julien Fontanet
d07ea1b337 Explicit Node versions compatibility. 2015-11-16 10:31:28 +01:00
106 changed files with 11078 additions and 4855 deletions

View File

@@ -1,15 +0,0 @@
{
"comments": false,
"compact": true,
"optional": [
// Experimental features.
// "minification.constantFolding",
// "minification.deadCodeElimination",
"es7.asyncFunctions",
"es7.decorators",
"es7.exportExtensions",
"es7.functionBind",
"runtime"
]
}

4
.gitignore vendored
View File

@@ -1,5 +1,9 @@
/.nyc_output/
/dist/
/node_modules/
/src/api/index.js
/src/xapi/mixins/index.js
/src/xo-mixins/index.js
npm-debug.log
npm-debug.log.*

View File

@@ -1,5 +1,6 @@
/examples/
example.js
example.js.map
*.example.js
*.example.js.map

View File

@@ -3,7 +3,6 @@ node_js:
# - 'stable'
- '4'
- '0.12'
- '0.10'
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/

View File

@@ -37,6 +37,4 @@ $ npm run dev
## How to report a bug?
If you are certain the bug is exclusively related to XO-Server, you may use the [bugtracker of this repository](https://github.com/vatesfr/xo-server/issues).
Otherwise, please consider using the [bugtracker of the general repository](https://github.com/vatesfr/xo/issues).
All bug reports should go into the [bugtracker of xo-web](https://github.com/vatesfr/xo-web/issues).

View File

@@ -1,7 +1,12 @@
Error.stackTraceLimit = 100
// Async stacks.
try { require('trace') } catch (_) {}
//
// Disabled for now as it cause a huge memory usage with
// fs.createReadStream().
// TODO: find a way to reenable.
//
// try { require('trace') } catch (_) {}
// Removes internal modules.
try {

View File

@@ -7,4 +7,25 @@
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

10
bin/xo-server-logs Executable file
View File

@@ -0,0 +1,10 @@
#!/usr/bin/env node
'use strict'
// ===================================================================
// Better stack traces if possible.
require('../better-stacks')
require('exec-promise')(require('../dist/logs-cli').default)

40
config.json Normal file
View File

@@ -0,0 +1,40 @@
// Vendor config: DO NOT TOUCH!
//
// See sample.config.yaml to override.
{
"http": {
"listen": [
{
"port": 80
}
],
"mounts": {},
// Ciphers to use.
//
// These are the default ciphers in Node 4.2.6, we are setting
// them explicitly for older Node versions.
"ciphers": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:DHE-RSA-AES256-SHA384:ECDHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA256:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA",
// Tell Node to respect the cipher order.
"honorCipherOrder": true,
// Specify to use at least TLSv1.1.
// See: https://github.com/certsimple/minimum-tls-version
"secureOptions": 117440512
},
"datadir": "/var/lib/xo-server/data",
// Should users be created on first sign in?
//
// Necessary for external authentication providers.
"createUserOnFirstSignin": true,
// Whether API logs should contains the full request/response on
// errors.
//
// This is disabled by default for performance (lots of data) and
// security concerns (avoiding sensitive data in the logs) but can
// be turned for investigation by the administrator.
"verboseApiLogsOnErrors": false
}

View File

@@ -7,13 +7,16 @@ var gulp = require('gulp')
var babel = require('gulp-babel')
var coffee = require('gulp-coffee')
var plumber = require('gulp-plumber')
var rimraf = require('rimraf')
var sourceMaps = require('gulp-sourcemaps')
var watch = require('gulp-watch')
var join = require('path').join
// ===================================================================
var SRC_DIR = __dirname + '/src'
var DIST_DIR = __dirname + '/dist'
var SRC_DIR = join(__dirname, 'src')
var DIST_DIR = join(__dirname, 'dist')
var PRODUCTION = process.argv.indexOf('--production') !== -1
@@ -36,6 +39,10 @@ function src (patterns) {
// ===================================================================
gulp.task(function clean (cb) {
rimraf(DIST_DIR, cb)
})
gulp.task(function buildCoffee () {
return src('**/*.coffee')
.pipe(sourceMaps.init())
@@ -60,4 +67,4 @@ gulp.task(function buildEs6 () {
// ===================================================================
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))

View File

@@ -8,4 +8,4 @@ if (process.env.DEBUG === undefined) {
}
// Import the real main module.
module.exports = require('./dist')
module.exports = require('./dist').default

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "4.9.2",
"version": "5.1.2",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -11,7 +11,11 @@
],
"homepage": "http://github.com/vatesfr/xo-server/",
"bugs": {
"url": "https://github.com/vatesfr/xo-server/issues"
"url": "https://github.com/vatesfr/xo-web/issues"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
"preferGlobal": true,
@@ -19,94 +23,98 @@
"better-stacks.js",
"bin/",
"dist/",
"config.json",
"index.js",
"signin.jade"
"signin.pug"
],
"directories": {
"bin": "bin"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
"engines": {
"node": ">=0.12"
},
"dependencies": {
"@marsaud/smb2": "^0.7.1",
"@marsaud/smb2-promise": "^0.2.0",
"app-conf": "^0.4.0",
"babel-runtime": "^5",
"base64url": "1.0.4",
"babel-runtime": "^6.5.0",
"base64url": "^2.0.0",
"blocked": "^1.1.0",
"bluebird": "^2.9.14",
"bluebird": "^3.1.1",
"body-parser": "^1.13.3",
"connect-flash": "^0.1.1",
"cookie": "^0.3.0",
"cookie-parser": "^1.3.5",
"cron": "^1.0.9",
"d3-time-format": "^0.1.3",
"d3-time-format": "^2.0.0",
"debug": "^2.1.3",
"escape-string-regexp": "^1.0.3",
"event-to-promise": "^0.4.0",
"exec-promise": "^0.5.1",
"event-to-promise": "^0.7.0",
"exec-promise": "^0.6.1",
"execa": "^0.4.0",
"express": "^4.13.3",
"express-session": "^1.11.3",
"fs-extra": "^0.26.2",
"fs-promise": "^0.3.1",
"got": "^5.0.0",
"graceful-fs": "^4.1.2",
"fatfs": "^0.10.3",
"fs-extra": "^0.30.0",
"fs-promise": "^0.4.1",
"get-stream": "^2.1.0",
"hashy": "~0.4.2",
"helmet": "^2.0.0",
"highland": "^2.5.1",
"http-server-plus": "^0.5.1",
"human-format": "^0.5.0",
"http-proxy": "^1.13.2",
"http-server-plus": "^0.6.4",
"human-format": "^0.6.0",
"is-my-json-valid": "^2.12.2",
"jade": "^1.11.0",
"js-yaml": "^3.2.7",
"json5": "^0.4.0",
"json-rpc-peer": "^0.11.0",
"json5": "^0.4.0",
"julien-f-source-map-support": "0.0.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^1.3.0",
"level-party": "^3.0.4",
"level-sublevel": "^6.5.2",
"lodash.assign": "^3.0.0",
"lodash.bind": "^3.0.0",
"lodash.difference": "^3.2.0",
"lodash.endswith": "^3.0.2",
"lodash.filter": "^3.1.0",
"lodash.find": "^3.0.0",
"lodash.findindex": "^3.0.0",
"lodash.foreach": "^3.0.1",
"lodash.has": "^3.0.0",
"lodash.includes": "^3.1.1",
"lodash.isarray": "^3.0.0",
"lodash.isempty": "^3.0.0",
"lodash.isfunction": "^3.0.1",
"lodash.isobject": "^3.0.0",
"lodash.isstring": "^3.0.0",
"lodash.keys": "^3.0.4",
"lodash.map": "^3.0.0",
"lodash.pick": "^3.0.0",
"lodash.sortby": "^3.1.4",
"lodash.startswith": "^3.0.1",
"leveldown": "^1.4.2",
"lodash": "^4.13.1",
"make-error": "^1",
"micromatch": "^2.3.2",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.4",
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport-local": "^1.0.0",
"proxy-http-request": "0.1.0",
"promise-toolbox": "^0.3.2",
"proxy-agent": "^2.0.0",
"pug": "^2.0.0-alpha6",
"redis": "^2.0.1",
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"stack-chain": "^1.3.3",
"struct-fu": "^1.0.0",
"through2": "^2.0.0",
"trace": "^2.0.1",
"ws": "~0.8.0",
"xen-api": "^0.6.4",
"ws": "^1.1.1",
"xen-api": "^0.9.0",
"xml2js": "~0.4.6",
"xo-collection": "^0.4.0"
"xo-acl-resolver": "^0.2.1",
"xo-collection": "^0.4.0",
"xo-remote-parser": "^0.3"
},
"devDependencies": {
"babel-eslint": "^4.0.10",
"babel-eslint": "^6.0.4",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.5.2",
"babel-preset-es2015": "^6.5.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.0.0",
"dependency-check": "^2.4.0",
"ghooks": "^1.0.3",
"gulp": "git://github.com/gulpjs/gulp#4.0",
"gulp-babel": "^5",
"gulp-babel": "^6",
"gulp-coffee": "^2.3.1",
"gulp-plumber": "^1.0.0",
"gulp-sourcemaps": "^1.5.1",
@@ -114,22 +122,44 @@
"leche": "^2.1.1",
"mocha": "^2.2.1",
"must": "^0.13.1",
"node-inspector": "^0.12.2",
"nyc": "^7.0.0",
"rimraf": "^2.5.2",
"sinon": "^1.14.1",
"standard": "^5.2.1"
"standard": "^7.0.0"
},
"scripts": {
"build": "gulp build --production",
"dev": "gulp build",
"build": "npm run build-indexes && gulp build --production",
"depcheck": "dependency-check ./package.json",
"build-indexes": "./tools/generate-index src/api src/xapi/mixins src/xo-mixins",
"dev": "npm run build-indexes && gulp build",
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
"lint": "standard",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
"posttest": "npm run lint && npm run depcheck",
"prepublish": "npm run build",
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
"release": "npm version",
"start": "node bin/xo-server",
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
"posttest": "npm run lint && dependency-check ./package.json"
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
},
"babel": {
"plugins": [
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
"stage-0",
"es2015"
]
},
"config": {
"ghooks": {
"commit-msg": "npm test"
}
},
"standard": {
"ignore": [
"dist/**"
"dist"
],
"parser": "babel-eslint"
}

View File

@@ -66,6 +66,8 @@ http:
#socket: './http.sock'
# Basic HTTPS.
#
# You can find the list of possible options there https://nodejs.org/docs/latest/api/tls.html#tls.createServer
# -
# # The only difference is the presence of the certificate and the
# # key.
@@ -83,7 +85,7 @@ http:
# # certificate authority up to the root.
# #
# # Default: undefined
# certificate: './certificate.pem'
# cert: './certificate.pem'
# # File containing the private key (PEM format).
# #
@@ -93,6 +95,10 @@ http:
# # Default: undefined
# key: './key.pem'
# If set to true, all HTTP traffic will be redirected to the first
# HTTPs configuration.
#redirectToHttps: true
# List of files/directories which will be served.
mounts:
#'/': '/path/to/xo-web/dist/'
@@ -101,13 +107,19 @@ http:
proxies:
# '/any/url': 'http://localhost:54722'
# HTTP proxy configuration used by xo-server to fetch resources on the
# Internet.
#
# See: https://github.com/TooTallNate/node-proxy-agent#maps-proxy-protocols-to-httpagent-implementations
#httpProxy: 'http://jsmith:qwerty@proxy.lan:3128'
#=====================================================================
# Connection to the Redis server.
redis:
# Syntax: tcp://[db[:password]@]hostname[:port]
# Syntax: redis://[db[:password]@]hostname[:port]
#
# Default: tcp://localhost:6379
# Default: redis://localhost:6379
#uri: ''
# Directory containing the database of XO.

View File

@@ -7,6 +7,7 @@ html
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'styles/main.css')
link(rel = 'stylesheet' href = 'v4/styles/main.css')
body
.container
.row-login

View File

@@ -1,123 +0,0 @@
// These global variables are not a problem because the algorithm is
// synchronous.
let permissionsByObject
let getObject
// -------------------------------------------------------------------
const authorized = () => true // eslint-disable-line no-unused-vars
const forbiddden = () => false // eslint-disable-line no-unused-vars
function and (...checkers) { // eslint-disable-line no-unused-vars
return function (object, permission) {
for (const checker of checkers) {
if (!checker(object, permission)) {
return false
}
}
return true
}
}
function or (...checkers) { // eslint-disable-line no-unused-vars
return function (object, permission) {
for (const checker of checkers) {
if (checker(object, permission)) {
return true
}
}
return false
}
}
// -------------------------------------------------------------------
function checkMember (memberName) {
return function (object, permission) {
const member = object[memberName]
return checkAuthorization(member, permission)
}
}
function checkSelf ({ id }, permission) {
const permissionsForObject = permissionsByObject[id]
return (
permissionsForObject &&
permissionsForObject[permission]
)
}
// ===================================================================
const checkAuthorizationByTypes = {
host: or(checkSelf, checkMember('$poolId')),
message: checkMember('$object'),
network: or(checkSelf, checkMember('$poolId')),
SR: or(checkSelf, checkMember('$poolId')),
task: checkMember('$host'),
VBD: checkMember('VDI'),
// Access to a VDI is granted if the user has access to the
// containing SR or to a linked VM.
VDI (vdi, permission) {
// Check authorization for the containing SR.
if (checkAuthorization(vdi.$SR, permission)) {
return true
}
// Check authorization for each of the connected VMs.
for (const { VM: vm } of vdi.$VBDs) {
if (checkAuthorization(vm, permission)) {
return true
}
}
return false
},
VIF: or(checkMember('$network'), checkMember('$VM')),
VM: or(checkSelf, checkMember('$container')),
'VM-snapshot': checkMember('$snapshot_of'),
'VM-template': authorized
}
function checkAuthorization (objectId, permission) {
const object = getObject(objectId)
const checker = checkAuthorizationByTypes[object.type] || checkSelf
return checker(object, permission)
}
// -------------------------------------------------------------------
export default function (
permissionsByObject_,
getObject_,
permissions
) {
// Assign global variables.
permissionsByObject = permissionsByObject_
getObject = getObject_
try {
for (const [objectId, permission] of permissions) {
if (!checkAuthorization(objectId, permission)) {
return false
}
}
return true
} finally {
// Free the global variables.
permissionsByObject = getObject = null
}
}

View File

@@ -3,7 +3,7 @@ import {JsonRpcError} from 'json-rpc-peer'
// ===================================================================
// Export standard JSON-RPC errors.
export {
export { // eslint-disable-line no-duplicate-imports
InvalidJson,
InvalidParameters,
InvalidRequest,
@@ -50,3 +50,21 @@ export class AlreadyAuthenticated extends JsonRpcError {
super('already authenticated', 4)
}
}
// -------------------------------------------------------------------
export class ForbiddenOperation extends JsonRpcError {
constructor (operation, reason) {
super(`forbidden operation: ${operation}`, 5, reason)
}
}
// -------------------------------------------------------------------
// To be used with a user-readable message.
// The message can be destined to be displayed to the front-end user.
export class GenericError extends JsonRpcError {
constructor (message) {
super(message, 6)
}
}

View File

@@ -1,9 +1,9 @@
import createDebug from 'debug'
const debug = createDebug('xo:api')
import getKeys from 'lodash.keys'
import isFunction from 'lodash.isfunction'
import getKeys from 'lodash/keys'
import kindOf from 'kindof'
import moment from 'moment-timezone'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
@@ -13,13 +13,29 @@ import {
NoSuchObject,
Unauthorized
} from './api-errors'
import {
version as xoServerVersion
} from '../package.json'
import {
createRawObject,
forEach
forEach,
isFunction,
noop
} from './utils'
// ===================================================================
const PERMISSIONS = {
none: 0,
read: 1,
write: 2,
admin: 3
}
const hasPermission = (user, permission) => (
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
)
// FIXME: this function is specific to XO and should not be defined in
// this file.
function checkPermission (method) {
@@ -42,7 +58,7 @@ function checkPermission (method) {
return
}
if (!user.hasPermission(permission)) {
if (!hasPermission(user, permission)) {
throw new Unauthorized()
}
}
@@ -78,7 +94,10 @@ function resolveParams (method, params) {
throw new Unauthorized()
}
const userId = user.get('id')
const userId = user.id
// Do not alter the original object.
params = { ...params }
const permissions = []
forEach(resolve, ([param, types, permission = 'administrate'], key) => {
@@ -95,7 +114,12 @@ function resolveParams (method, params) {
// Register this new value.
params[key] = object
permissions.push([ object.id, permission ])
// Permission default to 'administrate' but can be set to a falsy
// value (except null or undefined which trigger the default
// value) to simply do a resolve without checking any permissions.
if (permission) {
permissions.push([ object.id, permission ])
}
})
return this.hasPermissions(userId, permissions).then(success => {
@@ -112,13 +136,13 @@ function resolveParams (method, params) {
function getMethodsInfo () {
const methods = {}
forEach(this.api._methods, function (method, name) {
this[name] = {
forEach(this.api._methods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
}, methods)
})
return methods
}
@@ -126,6 +150,11 @@ getMethodsInfo.description = 'returns the signatures of all available API method
// -------------------------------------------------------------------
const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
@@ -160,14 +189,25 @@ methodSignature.description = 'returns the signature of an API method'
// ===================================================================
const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// ===================================================================
export default class Api {
constructor ({context} = {}) {
constructor ({
context,
verboseLogsOnErrors
} = {}) {
this._methods = createRawObject()
this._verboseLogsOnErrors = verboseLogsOnErrors
this.context = context
this.addMethods({
system: {
getMethodsInfo,
getServerVersion,
getServerTimezone,
getVersion,
listMethods,
methodSignature
@@ -176,12 +216,25 @@ export default class Api {
}
addMethod (name, method) {
this._methods[name] = method
const methods = this._methods
if (name in methods) {
throw new Error(`API method ${name} already exists`)
}
methods[name] = method
let unset = () => {
delete methods[name]
unset = noop
}
return () => unset()
}
addMethods (methods) {
let base = ''
forEach(methods, function addMethod (method, name) {
const addMethod = (method, name) => {
name = base + name
if (isFunction(method)) {
@@ -191,9 +244,10 @@ export default class Api {
const oldBase = base
base = name + '.'
forEach(method, addMethod, this)
forEach(method, addMethod)
base = oldBase
}, this)
}
forEach(methods, addMethod)
}
async call (session, name, params) {
@@ -204,24 +258,46 @@ export default class Api {
throw new MethodNotFound(name)
}
const context = Object.create(this.context)
context.api = this // Used by system.*().
context.session = session
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = Object.create(this.context, {
api: { // Used by system.*().
value: this
},
session: {
value: session
}
})
// FIXME: too coupled with XO.
// Fetch and inject the current user.
const userId = session.get('user_id', undefined)
if (userId) {
context.user = await context._getUser(userId)
}
context.user = userId && await context.getUser(userId)
const userName = context.user
? context.user.email
: '(unknown user)'
try {
await checkPermission.call(context, method)
// API methods are in a namespace.
// Some methods use the namespace or an id parameter like:
//
// vm.detachPci vm=<string>
// vm.ejectCd id=<string>
//
// The goal here is to standardize the calls by always providing
// an id parameter when possible to simplify calls to the API.
if (params && params.id === undefined) {
const namespace = name.slice(0, name.indexOf('.'))
params.id = params[namespace]
}
checkParams(method, params)
await resolveParams.call(context, method, params)
const resolvedParams = await resolveParams.call(context, method, params)
let result = await method.call(context, params)
let result = await method.call(context, resolvedParams)
// If nothing was returned, consider this operation a success
// and return true.
@@ -230,7 +306,8 @@ export default class Api {
}
debug(
'%s(...) [%s] ==> %s',
'%s | %s(...) [%s] ==> %s',
userName,
name,
ms(Date.now() - startTime),
kindOf(result)
@@ -238,16 +315,28 @@ export default class Api {
return result
} catch (error) {
debug(
'%s(...) [%s] =!> %s',
name,
ms(Date.now() - startTime),
error
)
if (this._verboseLogsOnErrors) {
debug(
'%s | %s(%j) [%s] =!> %s',
userName,
name,
params,
ms(Date.now() - startTime),
error
)
const stack = error && error.stack
if (stack) {
console.error(stack)
const stack = error && error.stack
if (stack) {
console.error(stack)
}
} else {
debug(
'%s | %s(...) [%s] =!> %s',
userName,
name,
ms(Date.now() - startTime),
error
)
}
throw error

View File

@@ -1,26 +0,0 @@
#!/usr/bin/env sh
# TODO: this generation should probably be automated and integrated
# into the build system.
set -e -u
cd "$(dirname "$(which "$0")")"
{
printf %s '//
// This file has been generated by ./.generate-index.sh
//
// It MUST be re-generated each time an API namespace (read file) is
// added or removed.
//
'
for f in *.js *.coffee
do
base=${f%.*}
[ "$base" != index ] || continue
printf '%s\n' "export * as $base from './$base'"
done | sort
} > index.js

View File

@@ -1,5 +1,5 @@
export async function get () {
return await this.getAllAcls()
return /* await */ this.getAllAcls()
}
get.permission = 'admin'
@@ -9,7 +9,7 @@ get.description = 'get existing ACLs'
// -------------------------------------------------------------------
export async function getCurrentPermissions () {
return await this.getPermissionsForUser(this.session.get('user_id'))
return /* await */ this.getPermissionsForUser(this.session.get('user_id'))
}
getCurrentPermissions.permission = ''

View File

@@ -3,9 +3,9 @@ import {parseSize} from '../utils'
// ===================================================================
export async function create ({name, size, sr}) {
const vdi = await this.getXAPI(sr).createVdi(parseSize(size), {
const vdi = await this.getXapi(sr).createVdi(parseSize(size), {
name_label: name,
sr: sr.id
sr: sr._xapiId
})
return vdi.$id
}
@@ -14,10 +14,27 @@ create.description = 'create a new disk on a SR'
create.params = {
name: { type: 'string' },
size: { type: 'string' },
size: { type: ['integer', 'string'] },
sr: { type: 'string' }
}
create.resolve = {
sr: ['sr', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function resize ({ vdi, size }) {
await this.getXapi(vdi).resizeVdi(vdi._xapiId, parseSize(size))
}
resize.description = 'resize an existing VDI'
resize.params = {
id: { type: 'string' },
size: { type: ['integer', 'string'] }
}
resize.resolve = {
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate']
}

View File

@@ -1,8 +1,6 @@
export async function register ({vm}) {
await this.getXAPI(vm).registerDockerContainer(vm.id)
await this.getXapi(vm).registerDockerContainer(vm._xapiId)
}
register.permission = 'admin'
register.description = 'Register the VM for Docker management'
register.params = {
@@ -16,10 +14,8 @@ register.resolve = {
// -----------------------------------------------------------------------------
export async function deregister ({vm}) {
await this.getXAPI(vm).unregisterDockerContainer(vm.id)
await this.getXapi(vm).unregisterDockerContainer(vm._xapiId)
}
deregister.permission = 'admin'
deregister.description = 'Deregister the VM for Docker management'
deregister.params = {
@@ -33,28 +29,26 @@ deregister.resolve = {
// -----------------------------------------------------------------------------
export async function start ({vm, container}) {
await this.getXAPI(vm).startDockerContainer(vm.id, container)
await this.getXapi(vm).startDockerContainer(vm._xapiId, container)
}
export async function stop ({vm, container}) {
await this.getXAPI(vm).stopDockerContainer(vm.id, container)
await this.getXapi(vm).stopDockerContainer(vm._xapiId, container)
}
export async function restart ({vm, container}) {
await this.getXAPI(vm).restartDockerContainer(vm.id, container)
await this.getXapi(vm).restartDockerContainer(vm._xapiId, container)
}
export async function pause ({vm, container}) {
await this.getXAPI(vm).pauseDockerContainer(vm.id, container)
await this.getXapi(vm).pauseDockerContainer(vm._xapiId, container)
}
export async function unpause ({vm, container}) {
await this.getXAPI(vm).unpauseDockerContainer(vm.id, container)
await this.getXapi(vm).unpauseDockerContainer(vm._xapiId, container)
}
for (let fn of [start, stop, restart, pause, unpause]) {
fn.permission = 'admin'
fn.params = {
vm: { type: 'string' },
container: { type: 'string' }

View File

@@ -27,14 +27,11 @@ delete_.params = {
// -------------------------------------------------------------------
export async function getAll () {
return await this._groups.get()
return /* await */ this.getAllGroups()
}
delete_.description = 'returns all the existing group'
delete_.permission = 'admin'
delete_.params = {
id: {type: 'string'}
}
getAll.description = 'returns all the existing group'
getAll.permission = 'admin'
// -------------------------------------------------------------------

View File

@@ -1,28 +1,29 @@
$debug = (require 'debug') 'xo:api:vm'
$find = require 'lodash.find'
$findIndex = require 'lodash.findindex'
$forEach = require 'lodash.foreach'
endsWith = require 'lodash.endswith'
got = require('got')
startsWith = require 'lodash.startswith'
$find = require 'lodash/find'
$findIndex = require 'lodash/findIndex'
$forEach = require 'lodash/forEach'
endsWith = require 'lodash/endsWith'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{parseXml, promisify} = require '../utils'
{
extractProperty,
parseXml,
promisify
} = require '../utils'
#=====================================================================
set = $coroutine (params) ->
{host} = params
xapi = @getXAPI host
set = ({
host,
for param, field of {
'name_label'
'name_description'
}
continue unless param of params
yield xapi.call "host.set_#{field}", host.ref, params[param]
return true
# TODO: use camel case.
name_label: nameLabel,
name_description: nameDescription
}) ->
return @getXapi(host).setHostProperties(host._xapiId, {
nameLabel,
nameDescription
})
set.description = 'changes the properties of an host'
@@ -43,18 +44,19 @@ exports.set = set
#---------------------------------------------------------------------
restart = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.disable', host.ref
yield xapi.call 'host.reboot', host.ref
return true
# FIXME: set force to false per default when correctly implemented in
# UI.
restart = ({host, force = true}) ->
return @getXapi(host).rebootHost(host._xapiId, force)
restart.description = 'restart the host'
restart.params = {
id: { type: 'string' }
id: { type: 'string' },
force: {
type: 'boolean',
optional: true
}
}
restart.resolve = {
@@ -65,12 +67,8 @@ exports.restart = restart
#---------------------------------------------------------------------
restartAgent = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.restart_agent', host.ref
return true
restartAgent = ({host}) ->
return @getXapi(host).restartHostAgent(host._xapiId)
restartAgent.description = 'restart the Xen agent on the host'
@@ -79,7 +77,7 @@ restartAgent.params = {
}
restartAgent.resolve = {
host: ['id', 'host', 'operate'],
host: ['id', 'host', 'administrate'],
}
# TODO camel case
@@ -87,12 +85,8 @@ exports.restart_agent = restartAgent
#---------------------------------------------------------------------
start = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.power_on', host.ref
return true
start = ({host}) ->
return @getXapi(host).powerOnHost(host._xapiId)
start.description = 'start the host'
@@ -108,13 +102,8 @@ exports.start = start
#---------------------------------------------------------------------
stop = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.disable', host.ref
yield xapi.call 'host.shutdown', host.ref
return true
stop = ({host}) ->
return @getXapi(host).shutdownHost(host._xapiId)
stop.description = 'stop the host'
@@ -130,12 +119,8 @@ exports.stop = stop
#---------------------------------------------------------------------
detach = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'pool.eject', host.ref
return true
detach = ({host}) ->
return @getXapi(host).ejectHostFromPool(host._xapiId)
detach.description = 'eject the host of a pool'
@@ -151,12 +136,8 @@ exports.detach = detach
#---------------------------------------------------------------------
enable = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.enable', host.ref
return true
enable = ({host}) ->
return @getXapi(host).enableHost(host._xapiId)
enable.description = 'enable to create VM on the host'
@@ -172,12 +153,8 @@ exports.enable = enable
#---------------------------------------------------------------------
disable = $coroutine ({host}) ->
xapi = @getXAPI host
yield xapi.call 'host.disable', host.ref
return true
disable = ({host}) ->
return @getXapi(host).disableHost(host._xapiId)
disable.description = 'disable to create VM on the hsot'
@@ -191,49 +168,13 @@ disable.resolve = {
exports.disable = disable
#---------------------------------------------------------------------
createNetwork = $coroutine ({host, name, description, pif, mtu, vlan}) ->
xapi = @getXAPI host
description = description ? 'Created with Xen Orchestra'
network_ref = yield xapi.call 'network.create', {
name_label: name,
name_description: description,
MTU: mtu ? '1500'
other_config: {}
}
if pif?
vlan = vlan ? '0'
pif = @getObject pif, 'PIF'
yield xapi.call 'pool.create_VLAN_from_PIF', pif.ref, network_ref, vlan
return true
createNetwork.params = {
host: { type: 'string' }
name: { type: 'string' }
description: { type: 'string', optional: true }
pif: { type: 'string', optional: true }
mtu: { type: 'string', optional: true }
vlan: { type: 'string', optional: true }
}
createNetwork.resolve = {
host: ['host', 'host', 'administrate'],
}
createNetwork.permission = 'admin'
exports.createNetwork = createNetwork
#---------------------------------------------------------------------
# Returns an array of missing new patches in the host
# Returns an empty array if up-to-date
# Throws an error if the host is not running the latest XS version
listMissingPatches = ({host}) ->
return @getXAPI(host).listMissingPoolPatchesOnHost(host.id)
return @getXapi(host).listMissingPoolPatchesOnHost(host._xapiId)
listMissingPatches.params = {
host: { type: 'string' }
@@ -250,7 +191,7 @@ listMissingPatches.description = 'return an array of missing new patches in the
#---------------------------------------------------------------------
installPatch = ({host, patch: patchUuid}) ->
return @getXAPI(host).installPoolPatchOnHost(patchUuid, host.id)
return @getXapi(host).installPoolPatchOnHost(patchUuid, host._xapiId)
installPatch.description = 'install a patch on an host'
@@ -268,7 +209,7 @@ exports.installPatch = installPatch
#---------------------------------------------------------------------
installAllPatches = ({host}) ->
return @getXAPI(host).installAllPoolPatchesOnHost(host.id)
return @getXapi(host).installAllPoolPatchesOnHost(host._xapiId)
installAllPatches.description = 'install all the missing patches on a host'
@@ -284,9 +225,25 @@ exports.installAllPatches = installAllPatches
#---------------------------------------------------------------------
stats = $coroutine ({host, granularity}) ->
stats = yield @getXapiHostStats(host, granularity)
return stats
emergencyShutdownHost = ({host}) ->
return @getXapi(host).emergencyShutdownHost(host._xapiId)
emergencyShutdownHost.description = 'suspend all VMs and shutdown host'
emergencyShutdownHost.params = {
host: { type: 'string' }
}
emergencyShutdownHost.resolve = {
host: ['host', 'host', 'administrate']
}
exports.emergencyShutdownHost = emergencyShutdownHost
#---------------------------------------------------------------------
stats = ({host, granularity}) ->
return @getXapiHostStats(host, granularity)
stats.description = 'returns statistic of the host'
@@ -303,3 +260,9 @@ stats.resolve = {
}
exports.stats = stats;
#=====================================================================
Object.defineProperty(exports, '__esModule', {
value: true
})

View File

@@ -1,35 +0,0 @@
//
// This file has been generated by ./.generate-index.sh
//
// It MUST be re-generated each time an API namespace (read file) is
// added or removed.
//
export * as acl from './acl'
export * as disk from './disk'
export * as docker from './docker'
export * as group from './group'
export * as host from './host'
export * as job from './job'
export * as log from './log'
export * as message from './message'
export * as pbd from './pbd'
export * as pif from './pif'
export * as plugin from './plugin'
export * as pool from './pool'
export * as remote from './remote'
export * as role from './role'
export * as schedule from './schedule'
export * as scheduler from './scheduler'
export * as server from './server'
export * as session from './session'
export * as sr from './sr'
export * as tag from './tag'
export * as task from './task'
export * as test from './test'
export * as token from './token'
export * as user from './user'
export * as vbd from './vbd'
export * as vdi from './vdi'
export * as vif from './vif'
export * as vm from './vm'
export * as xo from './xo'

View File

@@ -1,14 +1,14 @@
// FIXME so far, no acls for jobs
export async function getAll () {
return await this.getAllJobs()
return /* await */ this.getAllJobs()
}
getAll.permission = 'admin'
getAll.description = 'Gets all available jobs'
export async function get (id) {
return await this.getJob(id)
return /* await */ this.getJob(id)
}
get.permission = 'admin'
@@ -27,6 +27,7 @@ create.params = {
job: {
type: 'object',
properties: {
name: {type: 'string', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
@@ -47,7 +48,8 @@ create.params = {
}
}
}
}
},
optional: true
}
}
}
@@ -64,6 +66,7 @@ set.params = {
type: 'object',
properties: {
id: {type: 'string'},
name: {type: 'string', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
@@ -84,7 +87,8 @@ set.params = {
}
}
}
}
},
optional: true
}
}
}
@@ -101,3 +105,13 @@ delete_.params = {
}
export {delete_ as delete}
export async function runSequence ({idSequence}) {
await this.runJobSequence(idSequence)
}
runSequence.permission = 'admin'
runSequence.description = 'Runs jobs sequentially, in the provided order'
runSequence.params = {
idSequence: {type: 'array', items: {type: 'string'}}
}

View File

@@ -1,5 +1,5 @@
export async function get ({namespace}) {
const logger = this.getLogger(namespace)
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
@@ -16,3 +16,23 @@ export async function get ({namespace}) {
}
get.description = 'returns logs list for one namespace'
get.params = {
namespace: { type: 'string' }
}
get.permission = 'admin'
// -------------------------------------------------------------------
async function delete_ ({namespace, id}) {
const logger = await this.getLogger(namespace)
logger.del(id)
}
delete_.description = 'deletes one or several logs from a namespace'
delete_.params = {
id: { type: [ 'array', 'string' ] },
namespace: { type: 'string' }
}
delete_.permission = 'admin'
export {delete_ as delete}

View File

@@ -1,5 +1,5 @@
async function delete_ ({message}) {
await this.getXAPI(message).call('message.destroy', message.ref)
async function delete_ ({ message }) {
await this.getXapi(message).call('message.destroy', message._xapiRef)
}
export {delete_ as delete}

38
src/api/network.js Normal file
View File

@@ -0,0 +1,38 @@
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
return this.getXapi(pool).createNetwork({
name,
description,
pifId: pif && this.getObject(pif, 'PIF')._xapiId,
mtu: +mtu,
vlan: +vlan
})
}
create.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pif: { type: 'string', optional: true },
mtu: { type: ['integer', 'string'], optional: true },
vlan: { type: ['integer', 'string'], optional: true }
}
create.resolve = {
pool: ['pool', 'pool', 'administrate']
}
create.permission = 'admin'
// =================================================================
export async function delete_ ({ network }) {
return this.getXapi(network).deleteNetwork(network._xapiId)
}
export {delete_ as delete}
delete_.params = {
id: { type: 'string' }
}
delete_.resolve = {
network: ['id', 'network', 'administrate']
}

View File

@@ -5,7 +5,7 @@
async function delete_ ({PBD}) {
// TODO: check if PBD is attached before
await this.getXAPI(PBD).call('PBD.destroy', PBD.ref)
await this.getXapi(PBD).call('PBD.destroy', PBD._xapiRef)
}
export {delete_ as delete}
@@ -20,9 +20,8 @@ delete_.resolve = {
// ===================================================================
// Disconnect
export async function disconnect ({PBD}) {
// TODO: check if PBD is attached before
await this.getXAPI(PBD).call('PBD.unplug', PBD.ref)
export async function disconnect ({ pbd }) {
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
}
disconnect.params = {
@@ -30,7 +29,7 @@ disconnect.params = {
}
disconnect.resolve = {
PBD: ['id', 'PBD', 'administrate']
pbd: ['id', 'PBD', 'administrate']
}
// ===================================================================
@@ -38,7 +37,7 @@ disconnect.resolve = {
export async function connect ({PBD}) {
// TODO: check if PBD is attached before
await this.getXAPI(PBD).call('PBD.plug', PBD.ref)
await this.getXapi(PBD).call('PBD.plug', PBD._xapiRef)
}
connect.params = {

View File

@@ -1,9 +1,11 @@
// TODO: too low level, move into host.
// ===================================================================
// Delete
async function delete_ ({PIF}) {
async function delete_ ({pif}) {
// TODO: check if PIF is attached before
await this.getXAPI(PIF).call('PIF.destroy', PIF.ref)
await this.getXapi(pif).call('PIF.destroy', pif._xapiRef)
}
export {delete_ as delete}
@@ -12,15 +14,15 @@ delete_.params = {
}
delete_.resolve = {
PIF: ['id', 'PIF', 'administrate']
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
// Disconnect
export async function disconnect ({PIF}) {
export async function disconnect ({pif}) {
// TODO: check if PIF is attached before
await this.getXAPI(PIF).call('PIF.unplug', PIF.ref)
await this.getXapi(pif).call('PIF.unplug', pif._xapiRef)
}
disconnect.params = {
@@ -28,14 +30,14 @@ disconnect.params = {
}
disconnect.resolve = {
PIF: ['id', 'PIF', 'administrate']
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
// Connect
export async function connect ({PIF}) {
export async function connect ({pif}) {
// TODO: check if PIF is attached before
await this.getXAPI(PIF).call('PIF.plug', PIF.ref)
await this.getXapi(pif).call('PIF.plug', pif._xapiRef)
}
connect.params = {
@@ -43,5 +45,24 @@ connect.params = {
}
connect.resolve = {
PIF: ['id', 'PIF', 'administrate']
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
// Reconfigure IP
export async function reconfigureIp ({ pif, mode = 'DHCP', ip, netmask, gateway, dns }) {
await this.getXapi(pif).call('PIF.reconfigure_ip', pif._xapiRef, mode, ip, netmask, gateway, dns)
}
reconfigureIp.params = {
id: { type: 'string', optional: true },
mode: { type: 'string', optional: true },
ip: { type: 'string', optional: true },
netmask: { type: 'string', optional: true },
gateway: { type: 'string', optional: true },
dns: { type: 'string', optional: true }
}
reconfigureIp.resolve = {
pif: ['id', 'PIF', 'administrate']
}

View File

@@ -1,5 +1,5 @@
export async function get () {
return await this.getPlugins()
return /* await */ this.getPlugins()
}
get.description = 'returns a list of all installed plugins'
@@ -86,3 +86,19 @@ unload.params = {
}
unload.permission = 'admin'
// -------------------------------------------------------------------
export async function purgeConfiguration ({ id }) {
await this.purgePluginConfiguration(id)
}
purgeConfiguration.description = 'removes a plugin configuration'
purgeConfiguration.params = {
id: {
type: 'string'
}
}
purgeConfiguration.permission = 'admin'

View File

@@ -1,12 +1,18 @@
import {JsonRpcError} from '../api-errors'
import {extractProperty} from '../utils'
import {GenericError} from '../api-errors'
// ===================================================================
export async function set (params) {
const pool = extractProperty(params, 'pool')
export async function set ({
pool,
await this.getXAPI(pool).setPoolProperties(params)
// TODO: use camel case.
name_description: nameDescription,
name_label: nameLabel
}) {
await this.getXapi(pool).setPoolProperties({
nameDescription,
nameLabel
})
}
set.params = {
@@ -29,8 +35,27 @@ set.resolve = {
// -------------------------------------------------------------------
export async function setDefaultSr ({pool, sr}) {
await this.getXapi(pool).setDefaultSr(sr._xapiId)
}
setDefaultSr.params = {
pool: {
type: 'string'
},
sr: {
type: 'string'
}
}
setDefaultSr.resolve = {
pool: ['pool', 'pool', 'administrate'],
sr: ['sr', 'SR']
}
// -------------------------------------------------------------------
export async function installPatch ({pool, patch: patchUuid}) {
await this.getXAPI(pool).installPoolPatchOnAllHosts(patchUuid)
await this.getXapi(pool).installPoolPatchOnAllHosts(patchUuid)
}
installPatch.params = {
@@ -49,14 +74,14 @@ installPatch.resolve = {
// -------------------------------------------------------------------
async function handlePatchUpload (req, res, {pool}) {
const {headers: {['content-length']: contentLength}} = req
const contentLength = req.headers['content-length']
if (!contentLength) {
res.writeHead(411)
res.end('Content length is mandatory')
return
}
await this.getXAPI(pool).uploadPoolPatch(req, contentLength)
await this.getXapi(pool).uploadPoolPatch(req, contentLength)
}
export async function uploadPatch ({pool}) {
@@ -82,10 +107,10 @@ export {uploadPatch as patch}
export async function mergeInto ({ source, target, force }) {
try {
await this.mergeXenPools(source.id, target.id, force)
await this.mergeXenPools(source._xapiId, target._xapiId, force)
} catch (e) {
// FIXME: should we expose plain XAPI error messages?
throw new JsonRpcError(e.message)
throw new GenericError(e.message)
}
}
@@ -99,3 +124,22 @@ mergeInto.resolve = {
source: ['source', 'pool', 'administrate'],
target: ['target', 'pool', 'administrate']
}
// -------------------------------------------------------------------
export async function getLicenseState ({pool}) {
return this.getXapi(pool).call(
'pool.get_license_state',
pool._xapiId.$ref,
)
}
getLicenseState.params = {
pool: {
type: 'string'
}
}
getLicenseState.resolve = {
pool: ['pool', 'pool', 'administrate']
}

View File

@@ -1,12 +1,12 @@
export async function getAll () {
return await this.getAllRemotes()
return this.getAllRemotes()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing fs remote points'
export async function get (id) {
return await this.getRemote(id)
export async function get ({id}) {
return this.getRemote(id)
}
get.permission = 'admin'
@@ -15,8 +15,18 @@ get.params = {
id: {type: 'string'}
}
export async function list (id) {
return await this.listRemote(id)
export async function test ({id}) {
return this.testRemote(id)
}
test.permission = 'admin'
test.description = 'Performs a read/write matching test on a remote point'
test.params = {
id: {type: 'string'}
}
export async function list ({id}) {
return this.listRemoteBackups(id)
}
list.permission = 'admin'
@@ -26,7 +36,7 @@ list.params = {
}
export async function create ({name, url}) {
return await this.createRemote({name, url})
return this.createRemote({name, url})
}
create.permission = 'admin'
@@ -49,22 +59,6 @@ set.params = {
enabled: {type: 'boolean', optional: true}
}
export async function importVm ({id, file, host}) {
await this.importVmFromRemote(id, file, host)
}
importVm.permission = 'admin'
importVm.description = 'Imports a VM into host, from a file found in the chosen remote'
importVm.params = {
id: {type: 'string'},
file: {type: 'string'},
host: {type: 'string'}
}
importVm.resolve = {
host: ['host', 'host', 'administrate']
}
async function delete_ ({id}) {
await this.removeRemote(id)
}

229
src/api/resource-set.js Normal file
View File

@@ -0,0 +1,229 @@
import {
Unauthorized
} from '../api-errors'
// ===================================================================
export function create ({ name, subjects, objects, limits }) {
return this.createResourceSet(name, subjects, objects, limits)
}
create.permission = 'admin'
create.params = {
name: {
type: 'string'
},
subjects: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
objects: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
limits: {
type: 'object',
optional: true
}
}
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteResourceSet(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.params = {
id: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function set ({ id, name, subjects, objects, limits }) {
return this.updateResourceSet(id, {
limits,
name,
objects,
subjects
})
}
set.permission = 'admin'
set.params = {
id: {
type: 'string'
},
name: {
type: 'string',
optional: true
},
subjects: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
objects: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
limits: {
type: 'object',
optional: true
}
}
// -------------------------------------------------------------------
export function get ({ id }) {
return this.getResourceSet(id)
}
get.permission = 'admin'
get.params = {
id: {
type: 'string'
}
}
// -------------------------------------------------------------------
export async function getAll () {
const { user } = this
if (!user) {
throw new Unauthorized()
}
return this.getAllResourceSets(user.id)
}
// -------------------------------------------------------------------
export function addObject ({ id, object }) {
return this.addObjectToResourceSet(object, id)
}
addObject.permission = 'admin'
addObject.params = {
id: {
type: 'string'
},
object: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function removeObject ({ id, object }) {
return this.removeObjectFromResourceSet(object, id)
}
removeObject.permission = 'admin'
removeObject.params = {
id: {
type: 'string'
},
object: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function addSubject ({ id, subject }) {
return this.addSubjectToResourceSet(subject, id)
}
addSubject.permission = 'admin'
addSubject.params = {
id: {
type: 'string'
},
subject: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function removeSubject ({ id, subject }) {
return this.removeSubjectFromResourceSet(subject, id)
}
removeSubject.permission = 'admin'
removeSubject.params = {
id: {
type: 'string'
},
subject: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function addLimit ({ id, limitId, quantity }) {
return this.addLimitToResourceSet(limitId, quantity, id)
}
addLimit.permission = 'admin'
addLimit.params = {
id: {
type: 'string'
},
limitId: {
type: 'string'
},
quantity: {
type: 'integer'
}
}
// -------------------------------------------------------------------
export function removeLimit ({ id, limitId }) {
return this.removeLimitFromResourceSet(limitId, id)
}
removeLimit.permission = 'admin'
removeLimit.params = {
id: {
type: 'string'
},
limitId: {
type: 'string'
}
}
// -------------------------------------------------------------------
export function recomputeAllLimits () {
return this.recomputeResourceSetsLimits()
}
recomputeAllLimits.permission = 'admin'

View File

@@ -1,3 +1,3 @@
export async function getAll () {
return await this.getRoles()
return /* await */ this.getRoles()
}

View File

@@ -1,14 +1,14 @@
// FIXME so far, no acls for schedules
export async function getAll () {
return await this.getAllSchedules()
return /* await */ this.getAllSchedules()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing schedules'
export async function get (id) {
return await this.getSchedule(id)
return /* await */ this.getSchedule(id)
}
get.permission = 'admin'
@@ -17,8 +17,8 @@ get.params = {
id: {type: 'string'}
}
export async function create ({jobId, cron, enabled}) {
return await this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled})
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
}
create.permission = 'admin'
@@ -26,11 +26,12 @@ create.description = 'Creates a new schedule'
create.params = {
jobId: {type: 'string'},
cron: {type: 'string'},
enabled: {type: 'boolean', optional: true}
enabled: {type: 'boolean', optional: true},
name: {type: 'string', optional: true}
}
export async function set ({id, jobId, cron, enabled}) {
await this.updateSchedule(id, {job: jobId, cron, enabled})
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
}
set.permission = 'admin'
@@ -39,7 +40,8 @@ set.params = {
id: {type: 'string'},
jobId: {type: 'string', optional: true},
cron: {type: 'string', optional: true},
enabled: {type: 'boolean', optional: true}
enabled: {type: 'boolean', optional: true},
name: {type: 'string', optional: true}
}
async function delete_ ({id}) {

View File

@@ -23,7 +23,7 @@ disable.params = {
}
export function getScheduleTable () {
return this.scheduler.scheduleTable
return this.scheduleTable
}
disable.permission = 'admin'

View File

@@ -1,14 +1,20 @@
import {
noop,
pCatch
} from '../utils'
export async function add ({
host,
username,
password,
readOnly,
autoConnect = true
}) {
const server = await this.registerXenServer({host, username, password})
const server = await this.registerXenServer({host, username, password, readOnly})
if (autoConnect) {
// Connect asynchronously, ignore any error.
this.connectXenServer(server.id).catch(() => {})
// Connect asynchronously, ignore any errors.
this.connectXenServer(server.id)::pCatch(noop)
}
return server.id
@@ -54,14 +60,8 @@ remove.params = {
// TODO: remove this function when users are integrated to the main
// collection.
export async function getAll () {
const servers = await this._servers.get()
for (let i = 0, n = servers.length; i < n; ++i) {
servers[i] = this.getServerPublicProperties(servers[i])
}
return servers
export function getAll () {
return this.getAllXenServers()
}
getAll.description = 'returns all the registered Xen server'
@@ -70,11 +70,11 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, host, username, password}) {
await this.updateXenServer(id, {host, username, password})
export async function set ({id, host, username, password, readOnly}) {
await this.updateXenServer(id, {host, username, password, readOnly})
}
set.description = 'changes the propeorties of a Xen server'
set.description = 'changes the properties of a Xen server'
set.permission = 'admin'
@@ -99,6 +99,7 @@ set.params = {
// -------------------------------------------------------------------
export async function connect ({id}) {
this.updateXenServer(id, {enabled: true})::pCatch(noop)
await this.connectXenServer(id)
}
@@ -115,6 +116,7 @@ connect.params = {
// -------------------------------------------------------------------
export async function disconnect ({id}) {
this.updateXenServer(id, {enabled: false})::pCatch(noop)
await this.disconnectXenServer(id)
}

View File

@@ -1,3 +1,4 @@
import { asInteger } from '../xapi/utils'
import {
ensureArray,
forEach,
@@ -6,11 +7,17 @@ import {
// ===================================================================
export async function set (params) {
const {sr} = params
delete params.sr
export async function set ({
sr,
await this.getXAPI(sr).setSrProperties(sr.id, params)
// TODO: use camel case.
name_description: nameDescription,
name_label: nameLabel
}) {
await this.getXapi(sr).setSrProperties(sr._xapiId, {
nameDescription,
nameLabel
})
}
set.params = {
@@ -28,7 +35,7 @@ set.resolve = {
// -------------------------------------------------------------------
export async function scan ({SR}) {
await this.getXAPI(SR).call('SR.scan', SR.ref)
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
}
scan.params = {
@@ -42,8 +49,8 @@ scan.resolve = {
// -------------------------------------------------------------------
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({SR}) {
await this.getXAPI(SR).call('SR.destroy', SR.ref)
export async function destroy ({ sr }) {
await this.getXapi(sr).destroySr(sr._xapiId)
}
destroy.params = {
@@ -51,13 +58,13 @@ destroy.params = {
}
destroy.resolve = {
SR: ['id', 'SR', 'administrate']
sr: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function forget ({SR}) {
await this.getXAPI(SR).call('SR.forget', SR.ref)
await this.getXapi(SR).forgetSr(SR._xapiId)
}
forget.params = {
@@ -70,24 +77,59 @@ forget.resolve = {
// -------------------------------------------------------------------
export async function connectAllPbds ({SR}) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
connectAllPbds.params = {
id: { type: 'string' }
}
connectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function disconnectAllPbds ({SR}) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
disconnectAllPbds.params = {
id: { type: 'string' }
}
disconnectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function createIso ({
host,
nameLabel,
nameDescription,
path
path,
type,
user,
password
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
// FIXME: won't work for IPv6
// Detect if NFS or local path for ISO files
const deviceConfig = {location: path}
if (path.indexOf(':') === -1) { // not NFS share
// TODO: legacy will be removed in XAPI soon by FileSR
const deviceConfig = {}
if (type === 'local') {
deviceConfig.legacy_mode = 'true'
} else if (type === 'smb') {
path = path.replace(/\\/g, '/')
deviceConfig.username = user
deviceConfig.cifspassword = password
}
deviceConfig.location = path
const srRef = await xapi.call(
'SR.create',
host.ref,
host._xapiRef,
deviceConfig,
'0', // SR size 0 because ISO
nameLabel,
@@ -106,7 +148,10 @@ createIso.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
path: { type: 'string' }
path: { type: 'string' },
type: { type: 'string' },
user: { type: 'string', optional: true },
password: { type: 'string', optional: true }
}
createIso.resolve = {
@@ -126,7 +171,7 @@ export async function createNfs ({
serverPath,
nfsVersion
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
server,
@@ -140,7 +185,7 @@ export async function createNfs ({
const srRef = await xapi.call(
'SR.create',
host.ref,
host._xapiRef,
deviceConfig,
'0',
nameLabel,
@@ -179,7 +224,7 @@ export async function createLvm ({
nameDescription,
device
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
device
@@ -187,7 +232,7 @@ export async function createLvm ({
const srRef = await xapi.call(
'SR.create',
host.ref,
host._xapiRef,
deviceConfig,
'0',
nameLabel,
@@ -221,7 +266,7 @@ export async function probeNfs ({
host,
server
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
server
@@ -232,7 +277,7 @@ export async function probeNfs ({
try {
await xapi.call(
'SR.probe',
host.ref,
host._xapiRef,
deviceConfig,
'nfs',
{}
@@ -284,7 +329,7 @@ export async function createIscsi ({
chapUser,
chapPassword
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
target,
@@ -300,12 +345,12 @@ export async function createIscsi ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const srRef = await xapi.call(
'SR.create',
host.ref,
host._xapiRef,
deviceConfig,
'0',
nameLabel,
@@ -347,7 +392,7 @@ export async function probeIscsiIqns ({
chapUser,
chapPassword
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp
@@ -361,7 +406,7 @@ export async function probeIscsiIqns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -369,7 +414,7 @@ export async function probeIscsiIqns ({
try {
await xapi.call(
'SR.probe',
host.ref,
host._xapiRef,
deviceConfig,
'lvmoiscsi',
{}
@@ -424,7 +469,7 @@ export async function probeIscsiLuns ({
chapUser,
chapPassword
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp,
@@ -439,7 +484,7 @@ export async function probeIscsiLuns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -447,7 +492,7 @@ export async function probeIscsiLuns ({
try {
await xapi.call(
'SR.probe',
host.ref,
host._xapiRef,
deviceConfig,
'lvmoiscsi',
{}
@@ -502,7 +547,7 @@ export async function probeIscsiExists ({
chapUser,
chapPassword
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
target: targetIp,
@@ -518,10 +563,10 @@ export async function probeIscsiExists ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'lvmoiscsi', {}))
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
@@ -555,14 +600,14 @@ export async function probeNfsExists ({
server,
serverPath
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
const deviceConfig = {
server,
serverpath: serverPath
}
const xml = parseXml(await xapi.call('SR.probe', host.ref, deviceConfig, 'nfs', {}))
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'nfs', {}))
const srs = []
@@ -594,7 +639,7 @@ export async function reattach ({
nameDescription,
type
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
if (type === 'iscsi') {
type = 'lvmoiscsi' // the internal XAPI name
@@ -637,7 +682,7 @@ export async function reattachIso ({
nameDescription,
type
}) {
const xapi = this.getXAPI(host)
const xapi = this.getXapi(host)
if (type === 'iscsi') {
type = 'lvmoiscsi' // the internal XAPI name

View File

@@ -1,5 +1,5 @@
export async function add ({tag, object}) {
await this.getXAPI(object).addTag(object.id, tag)
await this.getXapi(object).addTag(object._xapiId, tag)
}
add.description = 'add a new tag to an object'
@@ -16,7 +16,7 @@ add.params = {
// -------------------------------------------------------------------
export async function remove ({tag, object}) {
await this.getXAPI(object).removeTag(object.id, tag)
await this.getXapi(object).removeTag(object._xapiId, tag)
}
remove.description = 'remove an existing tag from an object'

View File

@@ -1,5 +1,5 @@
export async function cancel ({task}) {
await this.getXAPI(task).call('task.cancel', task.ref)
await this.getXapi(task).call('task.cancel', task._xapiRef)
}
cancel.params = {
@@ -13,7 +13,7 @@ cancel.resolve = {
// -------------------------------------------------------------------
export async function destroy ({task}) {
await this.getXAPI(task).call('task.destroy', task.ref)
await this.getXapi(task).call('task.destroy', task._xapiRef)
}
destroy.params = {

View File

@@ -22,7 +22,7 @@ create.params = {
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw new InvalidParameters('an user cannot delete itself')
throw new InvalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
@@ -45,7 +45,7 @@ delete_.params = {
// collection.
export async function getAll () {
// Retrieves the users.
const users = await this._users.get()
const users = await this.getAllUsers()
// Filters out private properties.
return mapToArray(users, this.getUserPublicProperties)
@@ -57,8 +57,11 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission}) {
await this.updateUser(id, {email, password, permission})
export async function set ({id, email, password, permission, preferences}) {
if (permission && id === this.session.get('user_id')) {
throw new InvalidParameters('a user cannot change its own permission')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
@@ -69,7 +72,8 @@ set.params = {
id: { type: 'string' },
email: { type: 'string', optional: true },
password: { type: 'string', optional: true },
permission: { type: 'string', optional: true }
permission: { type: 'string', optional: true },
preferences: { type: 'object', optional: true }
}
// -------------------------------------------------------------------

View File

@@ -5,10 +5,10 @@
#=====================================================================
delete_ = $coroutine ({vbd}) ->
xapi = @getXAPI vbd
xapi = @getXapi vbd
# TODO: check if VBD is attached before
yield xapi.call 'VBD.destroy', vbd.ref
yield xapi.call 'VBD.destroy', vbd._xapiRef
return true
@@ -25,12 +25,9 @@ exports.delete = delete_
#---------------------------------------------------------------------
disconnect = $coroutine ({vbd}) ->
xapi = @getXAPI vbd
# TODO: check if VBD is attached before
yield xapi.call 'VBD.unplug_force', vbd.ref
return true
xapi = @getXapi vbd
yield xapi.disconnectVbd(vbd._xapiRef)
return
disconnect.params = {
id: { type: 'string' }
@@ -45,12 +42,9 @@ exports.disconnect = disconnect
#---------------------------------------------------------------------
connect = $coroutine ({vbd}) ->
xapi = @getXAPI vbd
# TODO: check if VBD is attached before
yield xapi.call 'VBD.plug', vbd.ref
return true
xapi = @getXapi vbd
yield xapi.connectVbd(vbd._xapiRef)
return
connect.params = {
id: { type: 'string' }
@@ -66,9 +60,9 @@ exports.connect = connect
set = $coroutine (params) ->
{vbd} = params
xapi = @getXAPI vbd
xapi = @getXapi vbd
{ref} = vbd
{ _xapiRef: ref } = vbd
# VBD position
if 'position' of params
@@ -87,3 +81,29 @@ set.resolve = {
}
exports.set = set
#---------------------------------------------------------------------
setBootable = $coroutine ({vbd, bootable}) ->
xapi = @getXapi vbd
{ _xapiRef: ref } = vbd
yield xapi.call 'VBD.set_bootable', ref, bootable
return
setBootable.params = {
vbd: { type: 'string' }
bootable: { type: 'boolean' }
}
setBootable.resolve = {
vbd: ['vbd', 'VBD', 'administrate'],
}
exports.setBootable = setBootable
#=====================================================================
Object.defineProperty(exports, '__esModule', {
value: true
})

View File

@@ -1,15 +1,16 @@
# FIXME: rename to disk.*
$isArray = require 'lodash.isarray'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{InvalidParameters} = require '../api-errors'
{parseSize} = require '../utils'
{isArray: $isArray, parseSize} = require '../utils'
{JsonRpcError} = require '../api-errors'
#=====================================================================
delete_ = $coroutine ({vdi}) ->
yield @getXAPI(vdi).deleteVdi(vdi.id)
yield @getXapi(vdi).deleteVdi(vdi._xapiId)
return
@@ -18,7 +19,7 @@ delete_.params = {
}
delete_.resolve = {
vdi: ['id', 'VDI', 'administrate'],
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
}
exports.delete = delete_
@@ -28,9 +29,9 @@ exports.delete = delete_
# FIXME: human readable strings should be handled.
set = $coroutine (params) ->
{vdi} = params
xapi = @getXAPI vdi
xapi = @getXapi vdi
{ref} = vdi
{_xapiRef: ref} = vdi
# Size.
if 'size' of params
@@ -40,8 +41,7 @@ set = $coroutine (params) ->
throw new InvalidParameters(
"cannot set new size (#{size}) below the current size (#{vdi.size})"
)
yield xapi.call 'VDI.resize_online', ref, "#{size}"
yield xapi.resizeVdi(ref, size)
# Other fields.
for param, fields of {
@@ -68,7 +68,7 @@ set.params = {
}
set.resolve = {
vdi: ['id', 'VDI', 'administrate'],
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
}
exports.set = set
@@ -76,10 +76,9 @@ exports.set = set
#---------------------------------------------------------------------
migrate = $coroutine ({vdi, sr}) ->
xapi = @getXAPI vdi
xapi = @getXapi vdi
# TODO: check if VDI is attached before
yield xapi.call 'VDI.pool_migrate', vdi.ref, sr.ref, {}
yield xapi.moveVdi(vdi._xapiRef, sr._xapiRef)
return true
@@ -89,8 +88,14 @@ migrate.params = {
}
migrate.resolve = {
vdi: ['id', 'VDI', 'administrate'],
vdi: ['id', ['VDI', 'VDI-snapshot'], 'administrate'],
sr: ['sr_id', 'SR', 'administrate'],
}
exports.migrate = migrate
#=====================================================================
Object.defineProperty(exports, '__esModule', {
value: true
})

View File

@@ -1,6 +1,6 @@
// TODO: move into vm and rename to removeInterface
async function delete_ ({vif}) {
await this.getXAPI(vif).deleteVif(vif.id)
await this.getXapi(vif).deleteVif(vif._xapiId)
}
export {delete_ as delete}
@@ -16,7 +16,7 @@ delete_.resolve = {
// TODO: move into vm and rename to disconnectInterface
export async function disconnect ({vif}) {
// TODO: check if VIF is attached before
await this.getXAPI(vif).call('VIF.unplug_force', vif.ref)
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
}
disconnect.params = {
@@ -31,7 +31,7 @@ disconnect.resolve = {
// TODO: move into vm and rename to connectInterface
export async function connect ({vif}) {
// TODO: check if VIF is attached before
await this.getXAPI(vif).call('VIF.plug', vif.ref)
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
}
connect.params = {
@@ -41,3 +41,31 @@ connect.params = {
connect.resolve = {
vif: ['id', 'VIF', 'operate']
}
// -------------------------------------------------------------------
export const set = ({ vif, allowedIpv4Addresses, allowedIpv6Addresses }) => (
this.getXapi(vif._xapiId).editVif({
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
)
set.params = {
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
}
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
}
}
}
set.resolve = {
vif: ['id', 'VIF', 'operate']
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,11 @@
import isArray from 'lodash.isarray'
import isObject from 'lodash.isobject'
import Model from './model'
import {BaseError} from 'make-error'
import {EventEmitter} from 'events'
import {mapInPlace} from './utils'
import {
isArray,
isObject,
map
} from './utils'
// ===================================================================
@@ -31,10 +33,6 @@ export default class Collection extends EventEmitter {
})
}
constructor () {
super()
}
async add (models, opts) {
const array = isArray(models)
if (!array) {
@@ -42,7 +40,7 @@ export default class Collection extends EventEmitter {
}
const {Model} = this
mapInPlace(models, model => {
map(models, model => {
if (!(model instanceof Model)) {
model = new Model(model)
}
@@ -54,7 +52,7 @@ export default class Collection extends EventEmitter {
}
return model.properties
})
}, models)
models = await this._add(models, opts)
this.emit('add', models)
@@ -82,7 +80,7 @@ export default class Collection extends EventEmitter {
: {}
}
return await this._get(properties)
return /* await */ this._get(properties)
}
async remove (ids) {
@@ -103,7 +101,7 @@ export default class Collection extends EventEmitter {
}
const {Model} = this
mapInPlace(models, model => {
map(models, model => {
if (!(model instanceof Model)) {
// TODO: Problems, we may be mixing in some default
// properties which will overwrite existing ones.
@@ -125,7 +123,7 @@ export default class Collection extends EventEmitter {
}
return model.properties
})
}, models)
models = await this._update(models)
this.emit('update', models)

View File

@@ -1,8 +1,8 @@
import Collection, {ModelAlreadyExists} from '../collection'
import difference from 'lodash.difference'
import filter from 'lodash.filter'
import getKey from 'lodash.keys'
import {createClient as createRedisClient, RedisClient, Multi} from 'redis'
import difference from 'lodash/difference'
import filter from 'lodash/filter'
import getKey from 'lodash/keys'
import {createClient as createRedisClient} from 'redis'
import {
forEach,
@@ -13,11 +13,6 @@ import {
// ===================================================================
promisifyAll(RedisClient.prototype)
promisifyAll(Multi.prototype)
// ===================================================================
// ///////////////////////////////////////////////////////////////////
// Data model:
// - prefix +'_id': value of the last generated identifier;
@@ -46,7 +41,7 @@ export default class Redis extends Collection {
this.indexes = indexes
this.prefix = prefix
this.redis = connection || createRedisClient(uri)
this.redis = promisifyAll.call(connection || createRedisClient(uri))
}
_extract (ids) {
@@ -55,7 +50,7 @@ export default class Redis extends Collection {
const models = []
return Promise.all(mapToArray(ids, id => {
return redis.hgetallAsync(prefix + id).then(model => {
return redis.hgetall(prefix + id).then(model => {
// If empty, consider it a no match.
if (isEmpty(model)) {
return
@@ -78,10 +73,10 @@ export default class Redis extends Collection {
return Promise.all(mapToArray(models, async model => {
// Generate a new identifier if necessary.
if (model.id === undefined) {
model.id = idPrefix + String(await redis.incrAsync(prefix + '_id'))
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
}
const success = await redis.saddAsync(prefix + '_ids', model.id)
const success = await redis.sadd(prefix + '_ids', model.id)
// The entry already exists an we are not in replace mode.
if (!success && !replace) {
@@ -100,8 +95,10 @@ export default class Redis extends Collection {
params.push(name, value)
})
const key = `${prefix}:${model.id}`
const promises = [
redis.hmsetAsync(prefix + ':' + model.id, ...params)
redis.del(key),
redis.hmset(key, ...params)
]
// Update indexes.
@@ -112,7 +109,7 @@ export default class Redis extends Collection {
}
const key = prefix + '_' + index + ':' + value
promises.push(redis.saddAsync(key, model.id))
promises.push(redis.sadd(key, model.id))
})
await Promise.all(promises)
@@ -125,7 +122,7 @@ export default class Redis extends Collection {
const {prefix, redis} = this
if (isEmpty(properties)) {
return redis.smembersAsync(prefix + '_ids').then(ids => this._extract(ids))
return redis.smembers(prefix + '_ids').then(ids => this._extract(ids))
}
// Special treatment for the identifier.
@@ -148,7 +145,7 @@ export default class Redis extends Collection {
}
const keys = mapToArray(properties, (value, index) => `${prefix}_${index}:${value}`)
return redis.sinterAsync(...keys).then(ids => this._extract(ids))
return redis.sinter(...keys).then(ids => this._extract(ids))
}
_remove (ids) {
@@ -158,10 +155,10 @@ export default class Redis extends Collection {
return Promise.all([
// Remove the identifiers from the main index.
redis.sremAsync(prefix + '_ids', ...ids),
redis.srem(prefix + '_ids', ...ids),
// Remove the models.
redis.delAsync(mapToArray(ids, id => `${prefix}:${id}`))
redis.del(mapToArray(ids, id => `${prefix}:${id}`))
])
}

View File

@@ -1,10 +1,6 @@
import {EventEmitter} from 'events'
import {createRawObject} from './utils'
// ===================================================================
// const noop = () => {}
import {createRawObject, noop} from './utils'
// ===================================================================
@@ -18,7 +14,7 @@ export default class Connection extends EventEmitter {
// Close the connection.
close () {
// Prevent errors when the connection is closed more than once.
// this.close = noop
this.close = noop
this.emit('close')
}

View File

@@ -1,69 +1,79 @@
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import {
isArray,
isPromise,
isFunction,
noop,
pFinally
} from './utils'
// ===================================================================
const {defineProperty} = Object
const {
defineProperties,
defineProperty,
getOwnPropertyDescriptor
} = Object
// ===================================================================
// See: https://github.com/jayphelps/core-decorators.js#autobind
export function autobind (target, key, {
//
// TODO: make it work for all class methods.
export const autobind = (target, key, {
configurable,
enumerable,
value: fn,
writable
}) {
return {
configurable,
enumerable,
}) => ({
configurable,
enumerable,
get () {
const bounded = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bounded,
writable: true
})
return bounded
},
set (newValue) {
if (this === target) {
// New value directly set on the prototype.
delete this[key]
this[key] = newValue
} else {
// New value set on a child object.
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
get () {
if (this === target) {
return fn
}
const bound = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bound,
writable: true
})
return bound
},
set (newValue) {
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
}
})
// -------------------------------------------------------------------
// Debounce decorator for methods.
//
// See: https://github.com/wycats/javascript-decorators
export const debounce = (duration) => (target, name, descriptor) => {
const {value: fn} = descriptor
//
// TODO: make it work for single functions.
export const debounce = duration => (target, name, descriptor) => {
const fn = descriptor.value
// This symbol is used to store the related data directly on the
// current object.
const s = Symbol()
function debounced () {
let data = this[s] || (this[s] = {
const data = this[s] || (this[s] = {
lastCall: 0,
wrapper: null
})
@@ -80,8 +90,258 @@ export const debounce = (duration) => (target, name, descriptor) => {
}
return data.wrapper()
}
debounced.reset = (obj) => { delete obj[s] }
debounced.reset = obj => { delete obj[s] }
descriptor.value = debounced
return descriptor
}
// -------------------------------------------------------------------
const _push = Array.prototype.push
export const deferrable = (target, name, descriptor) => {
let fn
function newFn () {
const deferreds = []
const defer = fn => {
deferreds.push(fn)
}
defer.clear = () => {
deferreds.length = 0
}
const args = [ defer ]
_push.apply(args, arguments)
let executeDeferreds = () => {
let i = deferreds.length
while (i) {
deferreds[--i]()
}
}
try {
const result = fn.apply(this, args)
if (isPromise(result)) {
result::pFinally(executeDeferreds)
// Do not execute the deferreds in the finally block.
executeDeferreds = noop
}
return result
} finally {
executeDeferreds()
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
return descriptor
}
fn = target
return newFn
}
// Deferred functions are only executed on failures.
//
// i.e.: defer.clear() is automatically called in case of success.
deferrable.onFailure = (target, name, descriptor) => {
let fn
function newFn (defer) {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(result => {
defer.clear()
return result
})
: (defer.clear(), result)
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// Deferred functions are only executed on success.
//
// i.e.: defer.clear() is automatically called in case of failure.
deferrable.onSuccess = (target, name, descriptor) => {
let fn
function newFn (defer) {
try {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(null, error => {
defer.clear()
throw error
})
: result
} catch (error) {
defer.clear()
throw error
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// -------------------------------------------------------------------
const _ownKeys = (
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
(({
getOwnPropertyNames: names,
getOwnPropertySymbols: symbols
}) => symbols
? obj => names(obj).concat(symbols(obj))
: names
)(Object)
)
const _bindPropertyDescriptor = (descriptor, thisArg) => {
const { get, set, value } = descriptor
if (get) {
descriptor.get = bind(get, thisArg)
}
if (set) {
descriptor.set = bind(set, thisArg)
}
if (isFunction(value)) {
descriptor.value = bind(value, thisArg)
}
return descriptor
}
const _isIgnoredProperty = name => (
name[0] === '_' ||
name === 'constructor'
)
const _IGNORED_STATIC_PROPERTIES = {
__proto__: null,
arguments: true,
caller: true,
length: true,
name: true,
prototype: true
}
const _isIgnoredStaticProperty = name => _IGNORED_STATIC_PROPERTIES[name]
export const mixin = MixIns => Class => {
if (!isArray(MixIns)) {
MixIns = [ MixIns ]
}
const { name } = Class
// Copy properties of plain object mix-ins to the prototype.
{
const allMixIns = MixIns
MixIns = []
const { prototype } = Class
const descriptors = { __proto__: null }
for (const MixIn of allMixIns) {
if (isFunction(MixIn)) {
MixIns.push(MixIn)
continue
}
for (const prop of _ownKeys(MixIn)) {
if (prop in prototype) {
throw new Error(`${name}#${prop} is already defined`)
}
(
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
).enumerable = false // Object methods are enumerable but class methods are not.
}
}
defineProperties(prototype, descriptors)
}
const Decorator = (...args) => {
const instance = new Class(...args)
for (const MixIn of MixIns) {
const { prototype } = MixIn
const mixinInstance = new MixIn(instance)
const descriptors = { __proto__: null }
for (const prop of _ownKeys(prototype)) {
if (_isIgnoredProperty(prop)) {
continue
}
if (prop in instance) {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = _bindPropertyDescriptor(
getOwnPropertyDescriptor(prototype, prop),
mixinInstance
)
}
defineProperties(instance, descriptors)
}
return instance
}
// Copy original and mixed-in static properties on Decorator class.
const descriptors = { __proto__: null }
for (const prop of _ownKeys(Class)) {
let descriptor
if (!(
// Special properties are not defined...
_isIgnoredStaticProperty(prop) &&
// if they already exist...
(descriptor = getOwnPropertyDescriptor(Decorator, prop)) &&
// and are not configurable.
!descriptor.configurable
)) {
descriptors[prop] = getOwnPropertyDescriptor(Class, prop)
}
}
for (const MixIn of MixIns) {
for (const prop of _ownKeys(MixIn)) {
if (_isIgnoredStaticProperty(prop)) {
continue
}
if (prop in descriptors) {
throw new Error(`${name}.${prop} is already defined`)
}
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
}
}
defineProperties(Decorator, descriptors)
return Decorator
}

View File

@@ -4,11 +4,11 @@ import expect from 'must'
// ===================================================================
import {autobind, debounce} from './decorators'
import {autobind, debounce, deferrable} from './decorators'
// ===================================================================
describe('autobind', function () {
describe('autobind()', () => {
class Foo {
@autobind
getFoo () {
@@ -16,25 +16,25 @@ describe('autobind', function () {
}
}
it('returns a bound instance for a method', function () {
it('returns a bound instance for a method', () => {
const foo = new Foo()
const {getFoo} = foo
const { getFoo } = foo
expect(getFoo()).to.equal(foo)
})
it('returns the same bound instance each time', function () {
it('returns the same bound instance each time', () => {
const foo = new Foo()
expect(foo.getFoo).to.equal(foo.getFoo)
})
it('works with multiple instances of the same class', function () {
it('works with multiple instances of the same class', () => {
const foo1 = new Foo()
const foo2 = new Foo()
const {getFoo: getFoo1} = foo1
const {getFoo: getFoo2} = foo2
const getFoo1 = foo1.getFoo
const getFoo2 = foo2.getFoo
expect(getFoo1()).to.equal(foo1)
expect(getFoo2()).to.equal(foo2)
@@ -43,7 +43,7 @@ describe('autobind', function () {
// -------------------------------------------------------------------
describe('debounce', function () {
describe('debounce()', () => {
let i
class Foo {
@@ -53,11 +53,11 @@ describe('debounce', function () {
}
}
beforeEach(function () {
beforeEach(() => {
i = 0
})
it('works', function (done) {
it('works', done => {
const foo = new Foo()
expect(i).to.equal(0)
@@ -68,7 +68,7 @@ describe('debounce', function () {
foo.foo()
expect(i).to.equal(1)
setTimeout(function () {
setTimeout(() => {
foo.foo()
expect(i).to.equal(2)
@@ -76,3 +76,98 @@ describe('debounce', function () {
}, 2e1)
})
})
// -------------------------------------------------------------------
describe('deferrable()', () => {
it('works with normal termination', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(0)
})
it('defer.clear() removes previous deferreds', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
defer.clear()
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(2)
})
it('works with exception', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
throw i
})
expect(() => fn()).to.throw(4)
expect(i).to.equal(0)
})
it('works with promise resolution', async () => {
let i = 0
const fn = deferrable(async defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
return i
})
await expect(fn()).to.eventually.equal(4)
expect(i).to.equal(0)
})
it('works with promise rejection', async () => {
let i = 0
const fn = deferrable(async defer => {
// Wait a turn of the events loop.
await Promise.resolve()
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
throw i
})
await expect(fn()).to.reject.to.equal(4)
expect(i).to.equal(0)
})
})

84
src/fatfs-buffer.js Normal file
View File

@@ -0,0 +1,84 @@
// Buffer driver for [fatfs](https://github.com/natevw/fatfs).
//
// Usage:
//
// ```js
// import fatfs from 'fatfs'
// import fatfsBuffer, { init as fatfsBufferInit } from './fatfs-buffer'
//
// const buffer = fatfsBufferinit()
//
// const fs = fatfs.createFileSystem(fatfsBuffer(buffer))
//
// fs.writeFile('/foo', 'content of foo', function (err, content) {
// if (err) {
// console.error(err)
// }
// })
import { boot16 as fat16 } from 'fatfs/structs'
const SECTOR_SIZE = 512
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
export function init () {
const buf = new Buffer(10 * 1024 * 1024) // 10MB
buf.fill(0)
// https://github.com/natevw/fatfs/blob/master/structs.js
fat16.pack({
jmpBoot: new Buffer('eb3c90', 'hex'),
OEMName: 'mkfs.fat',
BytsPerSec: SECTOR_SIZE,
SecPerClus: 4,
ResvdSecCnt: 1,
NumFATs: 2,
RootEntCnt: 512,
TotSec16: 20480,
Media: 248,
FATSz16: 20,
SecPerTrk: 32,
NumHeads: 64,
HiddSec: 0,
TotSec32: 0,
DrvNum: 128,
Reserved1: 0,
BootSig: 41,
VolID: 895111106,
VolLab: 'NO NAME ',
FilSysType: 'FAT16 '
}, buf)
// End of sector.
buf[0x1fe] = 0x55
buf[0x1ff] = 0xaa
// Mark sector as reserved.
buf[0x200] = 0xf8
buf[0x201] = 0xff
buf[0x202] = 0xff
buf[0x203] = 0xff
// Mark sector as reserved.
buf[0x2a00] = 0xf8
buf[0x2a01] = 0xff
buf[0x2a02] = 0xff
buf[0x2a03] = 0xff
return buf
}
export default buffer => {
return {
sectorSize: SECTOR_SIZE,
numSectors: Math.floor(buffer.length / SECTOR_SIZE),
readSectors: (i, target, cb) => {
buffer.copy(target, 0, i * SECTOR_SIZE)
cb()
},
writeSectors: (i, source, cb) => {
source.copy(buffer, i * SECTOR_SIZE, 0)
cb()
}
}
}

54
src/glob-matcher.js Normal file
View File

@@ -0,0 +1,54 @@
// See: https://gist.github.com/julien-f/5b9a3537eb82a34b04e2
var matcher = require('micromatch').matcher
module.exports = function globMatcher (patterns, opts) {
if (!Array.isArray(patterns)) {
if (patterns[0] === '!') {
var m = matcher(patterns.slice(1), opts)
return function (string) {
return !m(string)
}
} else {
return matcher(patterns, opts)
}
}
var noneMustMatch = []
var anyMustMatch = []
// TODO: could probably be optimized by combining all positive patterns (and all negative patterns) as a single matcher.
for (var i = 0, n = patterns.length; i < n; ++i) {
var pattern = patterns[i]
if (pattern[0] === '!') {
noneMustMatch.push(matcher(pattern.slice(1), opts))
} else {
anyMustMatch.push(matcher(pattern, opts))
}
}
var nNone = noneMustMatch.length
var nAny = anyMustMatch.length
return function (string) {
var i
for (i = 0; i < nNone; ++i) {
if (noneMustMatch[i](string)) {
return false
}
}
if (nAny === 0) {
return true
}
for (i = 0; i < nAny; ++i) {
if (anyMustMatch[i](string)) {
return true
}
}
return false
}
}

13
src/http-proxy.js Normal file
View File

@@ -0,0 +1,13 @@
import ProxyAgent from 'proxy-agent'
let agent
export { agent as default }
export function setup (uri) {
agent = uri != null
? new ProxyAgent(uri)
: undefined
}
const { env } = process
setup(env.http_proxy || env.HTTP_PROXY)

124
src/http-request.js Normal file
View File

@@ -0,0 +1,124 @@
import assign from 'lodash/assign'
import startsWith from 'lodash/startsWith'
import { parse as parseUrl } from 'url'
import { request as httpRequest } from 'http'
import { request as httpsRequest } from 'https'
import { stringify as formatQueryString } from 'querystring'
import {
isString,
streamToBuffer
} from './utils'
// -------------------------------------------------------------------
export default (...args) => {
let req
const pResponse = new Promise((resolve, reject) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const {
body,
headers: { ...headers } = {},
protocol,
query,
...rest
} = opts
if (headers['content-length'] == null && body != null) {
let tmp
if (isString(body)) {
headers['content-length'] = Buffer.byteLength(body)
} else if (
(
(tmp = body.headers) &&
(tmp = tmp['content-length']) != null
) ||
(tmp = body.length) != null
) {
headers['content-length'] = tmp
}
}
if (query) {
rest.path = `${rest.pathname || rest.path || '/'}?${
isString(query)
? query
: formatQueryString(query)
}`
}
// Some headers can be explicitly removed by setting them to null.
const headersToRemove = []
for (const header in headers) {
if (headers[header] === null) {
delete headers[header]
headersToRemove.push(header)
}
}
req = (
protocol && startsWith(protocol.toLowerCase(), 'https')
? httpsRequest
: httpRequest
)({
...rest,
headers
})
for (let i = 0, length = headersToRemove.length; i < length; ++i) {
req.removeHeader(headersToRemove[i])
}
if (body) {
if (typeof body.pipe === 'function') {
body.pipe(req)
} else {
req.end(body)
}
} else {
req.end()
}
req.on('error', reject)
req.once('response', resolve)
}).then(response => {
response.cancel = () => {
req.abort()
}
response.readAll = () => streamToBuffer(response)
const length = response.headers['content-length']
if (length) {
response.length = length
}
const code = response.statusCode
if (code < 200 || code >= 300) {
const error = new Error(response.statusMessage)
error.code = code
Object.defineProperty(error, 'response', {
configurable: true,
value: response,
writable: true
})
throw error
}
return response
})
pResponse.cancel = () => {
req.emit('error', new Error('HTTP request canceled!'))
req.abort()
}
pResponse.readAll = () => pResponse.then(response => response.readAll())
pResponse.request = req
return pResponse
}

View File

@@ -2,19 +2,21 @@ import createLogger from 'debug'
const debug = createLogger('xo:main')
import appConf from 'app-conf'
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import eventToPromise from 'event-to-promise'
import has from 'lodash.has'
import isArray from 'lodash.isarray'
import isFunction from 'lodash.isfunction'
import pick from 'lodash.pick'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import pick from 'lodash/pick'
import proxyConsole from './proxy-console'
import proxyRequest from 'proxy-http-request'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
import WebSocket from 'ws'
import {compile as compileJade} from 'jade'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import {
AlreadyAuthenticated,
@@ -24,20 +26,25 @@ import {
NotImplemented
} from './api-errors'
import JsonRpcPeer from 'json-rpc-peer'
import {readFile} from 'fs-promise'
import {
readFile,
readdir
} from 'fs-promise'
import * as apiMethods from './api/index'
import Api from './api'
import JobExecutor from './job-executor'
import RemoteHandler from './remote-handler'
import Scheduler from './scheduler'
import WebServer from 'http-server-plus'
import wsProxy from './ws-proxy'
import Xo from './xo'
import {
setup as setupHttpProxy
} from './http-proxy'
import {
createRawObject,
forEach,
mapToArray
isArray,
isFunction,
mapToArray,
pFromCallback
} from './utils'
import bodyParser from 'body-parser'
@@ -45,30 +52,17 @@ import connectFlash from 'connect-flash'
import cookieParser from 'cookie-parser'
import expressSession from 'express-session'
import passport from 'passport'
import {Strategy as LocalStrategy} from 'passport-local'
import { parse as parseCookies } from 'cookie'
import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const info = (...args) => {
console.info('[Info]', ...args)
}
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
// ===================================================================
const DEFAULTS = {
http: {
listen: [
{ port: 80 }
],
mounts: {}
},
datadir: '/var/lib/xo-server/data'
}
const DEPRECATED_ENTRIES = [
'users',
'servers'
@@ -76,7 +70,6 @@ const DEPRECATED_ENTRIES = [
async function loadConfiguration () {
const config = await appConf.load('xo-server', {
defaults: DEFAULTS,
ignoreUnknownFormats: true
})
@@ -97,6 +90,8 @@ async function loadConfiguration () {
function createExpressApp () {
const app = createExpress()
app.use(helmet())
// Registers the cookie-parser and express-session middlewares,
// necessary for connect-flash.
app.use(cookieParser())
@@ -134,8 +129,8 @@ async function setUpPassport (express, xo) {
}
// Registers the sign in form.
const signInPage = compileJade(
await readFile(__dirname + '/../signin.jade')
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(signInPage({
@@ -146,7 +141,8 @@ async function setUpPassport (express, xo) {
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const matches = req.url.match(SIGNIN_STRATEGY_RE)
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
@@ -172,7 +168,7 @@ async function setUpPassport (express, xo) {
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect('/')
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
@@ -192,9 +188,10 @@ async function setUpPassport (express, xo) {
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
} else if (/favicon|fontawesome|images|styles/.test(url)) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
@@ -214,25 +211,21 @@ async function setUpPassport (express, xo) {
// ===================================================================
const debugPlugin = createLogger('xo:plugin')
async function registerPlugin (pluginConf, pluginName) {
debugPlugin('register %s', pluginName)
const pluginPath = (function (name) {
try {
return require.resolve('xo-server-' + name)
} catch (e) {
return require.resolve(name)
}
})(pluginName)
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema
configurationSchema,
configurationPresets
} = plugin
// The default export can be either a factory or directly a plugin
@@ -241,36 +234,74 @@ async function registerPlugin (pluginConf, pluginName) {
? factory({ xo: this })
: factory
await this._registerPlugin(
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
pluginConf
configurationPresets,
version
)
}
function registerPlugins (plugins, xo) {
return Promise.all(mapToArray(plugins, (conf, name) => {
return registerPlugin.call(xo, conf, name).then(
() => {
debugPlugin(`successfully register ${name}`)
},
error => {
debugPlugin(`failed register ${name}`)
debugPlugin(error)
}
)
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(mapToArray(files, name => {
if (startsWith(name, PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
}))
}
async function registerPlugins (xo) {
await Promise.all(mapToArray([
`${__dirname}/../node_modules/`,
'/usr/local/lib/node_modules/'
], xo::registerPluginsInPath))
}
// ===================================================================
async function makeWebServerListen (opts) {
// Read certificate and key if necessary.
const {certificate, key} = opts
if (certificate && key) {
[opts.certificate, opts.key] = await Promise.all([
readFile(certificate),
async function makeWebServerListen ({
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}) {
if (cert && key) {
[opts.cert, opts.key] = await Promise.all([
readFile(cert),
readFile(key)
])
}
@@ -279,14 +310,18 @@ async function makeWebServerListen (opts) {
const niceAddress = await this.listen(opts)
debug(`Web server listening on ${niceAddress}`)
} catch (error) {
warn(`Web server could not listen on ${error.niceAddress}`)
if (error.niceAddress) {
warn(`Web server could not listen on ${error.niceAddress}`)
const {code} = error
if (code === 'EACCES') {
warn(' Access denied.')
warn(' Ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' Address already in use.')
const {code} = error
if (code === 'EACCES') {
warn(' Access denied.')
warn(' Ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' Address already in use.')
}
} else {
warn('Web server could not listen:', error.message)
}
}
}
@@ -294,40 +329,60 @@ async function makeWebServerListen (opts) {
async function createWebServer (opts) {
const webServer = new WebServer()
await Promise.all(mapToArray(opts, makeWebServerListen, webServer))
await Promise.all(mapToArray(opts, webServer::makeWebServerListen))
return webServer
}
// ===================================================================
const setUpProxies = (express, opts) => {
const setUpProxies = (express, opts, xo) => {
if (!opts) {
return
}
const proxy = createProxyServer({
ignorePath: true
}).on('error', (error) => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
forEach(opts, (target, url) => {
express.use(url, (req, res) => {
proxyRequest(target + req.url, req, res)
})
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length)
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true
})
express.on('upgrade', (req, socket, head) => {
const {url} = req
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
for (let prefix in opts) {
if (url.lastIndexOf(prefix, 0) !== -1) {
const target = opts[prefix] + url.slice(prefix.length)
webSocketServer.handleUpgrade(req, socket, head, socket => {
wsProxy(socket, target)
express.on('upgrade', (req, socket, head) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length)
})
return
}
}
@@ -352,13 +407,6 @@ const setUpStaticFiles = (express, opts) => {
// ===================================================================
function setUpWebSocketServer (webServer) {
return new WebSocket.Server({
server: webServer,
path: '/api/'
})
}
const errorClasses = {
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
INVALID_CREDENTIAL: InvalidCredential,
@@ -372,20 +420,7 @@ const apiHelpers = {
// Handles both properties and wrapped models.
const properties = user.properties || user
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
},
getServerPublicProperties (server) {
// Handles both properties and wrapped models.
const properties = server.properties || server
server = pick(properties, 'id', 'host', 'username')
// Injects connection status.
const xapi = this._xapis[server.id]
server.status = xapi ? xapi.status : 'disconnected'
return server
return pick(properties, 'id', 'email', 'groups', 'permission', 'preferences', 'provider')
},
throw (errorId, data) {
@@ -393,16 +428,29 @@ const apiHelpers = {
}
}
const setUpApi = (webSocketServer, xo) => {
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
server: webServer,
path: '/api/'
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = { __proto__: xo, ...apiHelpers }
const api = new Api({
context
context,
verboseLogsOnErrors
})
xo.defineProperty('api', api)
api.addMethods(apiMethods)
webSocketServer.on('connection', socket => {
debug('+ WebSocket connection')
const { remoteAddress } = socket.upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
@@ -420,7 +468,7 @@ const setUpApi = (webSocketServer, xo) => {
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection')
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
@@ -443,25 +491,6 @@ const setUpApi = (webSocketServer, xo) => {
}
})
})
return api
}
const setUpScheduler = (api, xo) => {
const jobExecutor = new JobExecutor(xo, api)
const scheduler = new Scheduler(xo, {executor: jobExecutor})
xo.scheduler = scheduler
return scheduler
}
const setUpRemoteHandler = async xo => {
const remoteHandler = new RemoteHandler()
xo.remoteHandler = remoteHandler
xo.initRemotes()
xo.syncAllRemotes()
return remoteHandler
}
// ===================================================================
@@ -472,8 +501,9 @@ const setUpConsoleProxy = (webServer, xo) => {
const webSocketServer = new WebSocket.Server({
noServer: true
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
webServer.on('upgrade', (req, socket, head) => {
webServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
@@ -481,68 +511,49 @@ const setUpConsoleProxy = (webServer, xo) => {
const [, id] = matches
try {
const xapi = xo.getXAPI(id, ['VM', 'VM-controller'])
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw new InvalidCredential()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (_) {
console.error(_)
} catch (error) {
console.error(error && error.stack || error)
}
})
}
// ===================================================================
const registerPasswordAuthenticationProvider = xo => {
async function passwordAuthenticationProvider ({
username,
password
}) {
if (username === undefined || password === undefined) {
return
}
const USAGE = (({
name,
version
}) => `Usage: ${name} [--safe-mode]
const user = await xo.getUserByName(username, true)
if (user && await xo.checkUserPassword(user.id, password)) {
return user.id
}
}
xo.registerAuthenticationProvider(passwordAuthenticationProvider)
}
const registerTokenAuthenticationProvider = xo => {
async function tokenAuthenticationProvider ({
token: tokenId
}) {
if (!tokenId) {
return
}
try {
return (await xo.getAuthenticationToken(tokenId)).user_id
} catch (e) {
return
}
}
xo.registerAuthenticationProvider(tokenAuthenticationProvider)
}
// ===================================================================
const help = (function ({name, version}) {
return () => `${name} v${version}`
})(require('../package.json'))
${name} v${version}`)(require('../package.json'))
// ===================================================================
export default async function main (args) {
if (args.indexOf('--help') !== -1 || args.indexOf('-h') !== -1) {
return help()
if (includes(args, '--help') || includes(args, '-h')) {
return USAGE
}
{
@@ -571,18 +582,47 @@ export default async function main (args) {
warn('Failed to change user/group:', error)
}
// Create the main object which will connects to Xen servers and
// manages all the models.
const xo = new Xo()
await xo.start(config)
if (config.httpProxy) {
setupHttpProxy(config.httpProxy)
}
// Loads default authentication providers.
registerPasswordAuthenticationProvider(xo)
registerTokenAuthenticationProvider(xo)
// Creates main object.
const xo = new Xo(config)
// Register web server close on XO stop.
xo.on('stop', () => pFromCallback(cb => webServer.close(cb)))
// Connects to all registered servers.
await xo.start()
// Express is used to manage non WebSocket connections.
const express = createExpressApp()
if (config.http.redirectToHttps) {
let port
forEach(config.http.listen, listen => {
if (
listen.port &&
(listen.cert || listen.certificate)
) {
port = listen.port
return false
}
})
if (port === undefined) {
warn('Could not setup HTTPs redirection: no HTTPs port found')
} else {
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
// Must be set up before the API.
setUpConsoleProxy(webServer, xo)
@@ -600,50 +640,28 @@ export default async function main (args) {
})
// Must be set up before the static files.
const webSocketServer = setUpWebSocketServer(webServer)
const api = setUpApi(webSocketServer, xo)
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
const scheduler = setUpScheduler(api, xo)
setUpRemoteHandler(xo)
setUpProxies(express, config.http.proxies)
setUpProxies(express, config.http.proxies, xo)
setUpStaticFiles(express, config.http.mounts)
if (config.plugins) {
await registerPlugins(config.plugins, xo)
if (!includes(args, '--safe-mode')) {
await registerPlugins(xo)
}
if (!(await xo._users.exists())) {
const email = 'admin@admin.net'
const password = 'admin'
await xo.createUser(email, {password, permission: 'admin'})
info('Default user created:', email, ' with password', password)
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
const shutdown = signal => {
debug('%s caught, closing…', signal)
xo.stop()
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
process.on('SIGINT', async () => {
debug('SIGINT caught, closing web server…')
process.on('SIGINT', () => shutdown('SIGINT'))
process.on('SIGTERM', () => shutdown('SIGTERM'))
webServer.close()
await eventToPromise(xo, 'stopped')
webSocketServer.close()
scheduler.disableAll()
await xo.disableAllRemotes()
})
process.on('SIGTERM', async () => {
debug('SIGTERM caught, closing web server…')
webServer.close()
webSocketServer.close()
scheduler.disableAll()
await xo.disableAllRemotes()
})
return eventToPromise(webServer, 'close')
debug('bye :-)')
}

View File

@@ -1,4 +1,4 @@
import assign from 'lodash.assign'
import assign from 'lodash/assign'
import {BaseError} from 'make-error'
import {
@@ -43,13 +43,16 @@ export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
}
export default class JobExecutor {
constructor (xo, api) {
constructor (xo) {
this.xo = xo
this.api = api
this._extractValueCb = {
'set': items => items.values
}
this._logger = this.xo.getLogger('jobs')
// The logger is not available until Xo has started.
xo.on('start', () => xo.getLogger('jobs').then(logger => {
this._logger = logger
}))
}
async exec (job) {
@@ -62,7 +65,9 @@ export default class JobExecutor {
try {
if (job.type === 'call') {
await this._execCall(job, runJobId)
const execStatus = await this._execCall(job, runJobId)
this.xo.emit('job:terminated', execStatus)
} else {
throw new UnsupportedJobType(job)
}
@@ -83,10 +88,14 @@ export default class JobExecutor {
async _execCall (job, runJobId) {
let paramsFlatVector
if (job.paramsVector.type === 'crossProduct') {
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
if (job.paramsVector) {
if (job.paramsVector.type === 'crossProduct') {
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
} else {
throw new UnsupportedVectorType(job.paramsVector)
}
} else {
throw new UnsupportedVectorType(job.paramsVector)
paramsFlatVector = [{}] // One call with no parameters
}
const connection = this.xo.createUserConnection()
@@ -94,6 +103,12 @@ export default class JobExecutor {
connection.set('user_id', job.userId)
const execStatus = {
runJobId,
start: Date.now(),
calls: {}
}
forEach(paramsFlatVector, params => {
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
event: 'jobCall.start',
@@ -102,8 +117,14 @@ export default class JobExecutor {
params
})
const call = execStatus.calls[runCallId] = {
method: job.method,
params,
start: Date.now()
}
promises.push(
this.api.call(connection, job.method, assign({}, params)).then(
this.xo.api.call(connection, job.method, assign({}, params)).then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
@@ -111,21 +132,29 @@ export default class JobExecutor {
runCallId,
returnedValue: value
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: reason
error: {...reason, message: reason.message}
})
call.error = reason
call.end = Date.now()
}
)
)
})
connection.close()
await Promise.all(promises)
execStatus.end = Date.now()
return execStatus
}
}

View File

@@ -3,8 +3,10 @@
import {expect} from 'chai'
import leche from 'leche'
import {productParams} from './job-executor'
import {_computeCrossProduct} from './job-executor'
import {
_computeCrossProduct,
productParams
} from './job-executor'
describe('productParams', function () {
leche.withData({

22
src/loggers/abstract.js Normal file
View File

@@ -0,0 +1,22 @@
export default class AbstractLogger {}
// See: https://en.wikipedia.org/wiki/Syslog#Severity_level
const LEVELS = [
'emergency',
'alert',
'critical',
'error',
'warning',
'notice',
'informational',
'debug'
]
// Create high level log methods.
for (const level of LEVELS) {
Object.defineProperty(AbstractLogger.prototype, level, {
value (message, data) {
return this._add(level, message, data)
}
})
}

View File

@@ -1,44 +1,41 @@
import highland from 'highland'
// See: https://en.wikipedia.org/wiki/Syslog#Severity_level
const LEVELS = [
'emergency',
'alert',
'critical',
'error',
'warning',
'notice',
'informational',
'debug'
]
import AbstractLogger from './abstract'
import { forEach, noop } from '../utils'
let lastDate = 0
let lastId = 0
let increment = 0
function generateUniqueKey (date) {
lastId = (date === lastDate) ? (lastId + 1) : 0
lastDate = date
if (date === lastDate) {
return `${date}:${increment++}`
}
return `${lastDate}:${lastId}`
increment = 0
return String(lastDate = date)
}
export default class LevelDbLogger {
export default class LevelDbLogger extends AbstractLogger {
constructor (db, namespace) {
super()
this._db = db
this._namespace = namespace
}
_add (level, message, data) {
const time = Date.now()
const log = {
level,
message,
data,
namespace: this._namespace,
time: Date.now()
time
}
const key = generateUniqueKey(log.time)
this._db.put(key, log)
const key = generateUniqueKey(time)
this._db.putSync(key, log)
return key
}
@@ -46,13 +43,17 @@ export default class LevelDbLogger {
return highland(this._db.createReadStream())
.filter(({value}) => value.namespace === this._namespace)
}
}
// Create high level log methods.
for (const level of LEVELS) {
Object.defineProperty(LevelDbLogger.prototype, level, {
value (message, data) {
return this._add(level, message, data)
del (id) {
if (!Array.isArray(id)) {
id = [id]
}
})
forEach(id, id => {
this._db.get(id).then(value => {
if (value.namespace === this._namespace) {
this._db.delSync(id, noop)
}
})
})
}
}

202
src/logs-cli.js Normal file
View File

@@ -0,0 +1,202 @@
import appConf from 'app-conf'
import get from 'lodash/get'
import highland from 'highland'
import levelup from 'level-party'
import ndjson from 'ndjson'
import parseArgs from 'minimist'
import sublevel from 'level-sublevel'
import util from 'util'
import { repair as repairDb } from 'leveldown'
import {forEach} from './utils'
import globMatcher from './glob-matcher'
// ===================================================================
async function printLogs (db, args) {
let stream = highland(db.createReadStream({reverse: true}))
if (args.since) {
stream = stream.filter(({value}) => (value.time >= args.since))
}
if (args.until) {
stream = stream.filter(({value}) => (value.time <= args.until))
}
const fields = Object.keys(args.matchers)
if (fields.length > 0) {
stream = stream.filter(({value}) => {
for (const field of fields) {
const fieldValue = get(value, field)
if (fieldValue === undefined || !args.matchers[field](fieldValue)) {
return false
}
}
return true
})
}
stream = stream.take(args.limit)
if (args.json) {
stream = highland(stream.pipe(ndjson.serialize()))
.each(value => {
process.stdout.write(value)
})
} else {
stream = stream.each(value => {
console.log(util.inspect(value, { depth: null }))
})
}
return new Promise(resolve => {
stream.done(resolve)
})
}
// ===================================================================
function helper () {
console.error(`
xo-server-logs --help, -h
Display this help message.
xo-server-logs [--json] [--limit=<limit>] [--since=<date>] [--until=<date>] [<pattern>...]
Prints the logs.
--json
Display the results as new line delimited JSON for consumption
by another program.
--limit=<limit>, -n <limit>
Limit the number of results to be displayed (default 100)
--since=<date>, --until=<date>
Start showing entries on or newer than the specified date, or on
or older than the specified date.
<date> should use the format \`YYYY-MM-DD\`.
<pattern>
Patterns can be used to filter the entries.
Patterns have the following format \`<field>=<value>\`/\`<field>\`.
xo-server-logs --repair
Repair/compact the database.
This is an advanced operation and should be used only when necessary and offline (xo-server should be stopped).
`)
}
// ===================================================================
function getArgs () {
const stringArgs = ['since', 'until', 'limit']
const args = parseArgs(process.argv.slice(2), {
string: stringArgs,
boolean: ['help', 'json', 'repair'],
default: {
limit: 100,
json: false,
help: false
},
alias: {
limit: 'n',
help: 'h'
}
})
const patterns = {}
for (let value of args._) {
value = String(value)
const i = value.indexOf('=')
if (i !== -1) {
const field = value.slice(0, i)
const pattern = value.slice(i + 1)
patterns[pattern]
? patterns[field].push(pattern)
: patterns[field] = [ pattern ]
} else if (!patterns[value]) {
patterns[value] = null
}
}
const trueFunction = () => true
args.matchers = {}
for (const field in patterns) {
const values = patterns[field]
args.matchers[field] = (values === null) ? trueFunction : globMatcher(values)
}
// Warning: minimist makes one array of values if the same option is used many times.
// (But only for strings args, not boolean)
forEach(stringArgs, arg => {
if (args[arg] instanceof Array) {
throw new Error(`error: too many values for ${arg} argument`)
}
})
;['since', 'until'].forEach(arg => {
if (args[arg] !== undefined) {
args[arg] = Date.parse(args[arg])
if (isNaN(args[arg])) {
throw new Error(`error: bad ${arg} timestamp format`)
}
}
})
if (isNaN(args.limit = +args.limit)) {
throw new Error('error: limit is not a valid number')
}
return args
}
// ===================================================================
export default async function main () {
const args = getArgs()
if (args.help) {
helper()
return
}
const config = await appConf.load('xo-server', {
ignoreUnknownFormats: true
})
if (args.repair) {
await new Promise((resolve, reject) => {
repairDb(`${config.datadir}/leveldb`, error => {
if (error) {
reject(error)
} else {
resolve()
}
})
})
return
}
const db = sublevel(levelup(
`${config.datadir}/leveldb`,
{ valueEncoding: 'json' }
)).sublevel('logs')
return printLogs(db, args)
}

View File

@@ -2,7 +2,8 @@ import {EventEmitter} from 'events'
import {
forEach,
isEmpty
isEmpty,
isString
} from './utils'
// ===================================================================
@@ -41,7 +42,7 @@ export default class Model extends EventEmitter {
set (properties, value) {
// This method can also be used with two arguments to set a single
// property.
if (value !== undefined) {
if (isString(properties)) {
properties = { [properties]: value }
}

View File

@@ -29,7 +29,7 @@ export class Groups extends Collection {
// Serializes.
group.users = JSON.stringify(group.users)
return await this.update(group)
return /* await */ this.update(group)
}
async get (properties) {

View File

@@ -19,13 +19,13 @@ export class Jobs extends Collection {
job.userId = userId
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return await this.add(new Job(job))
return /* await */ this.add(new Job(job))
}
async save (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return await this.update(job)
return /* await */ this.update(job)
}
async get (properties) {

View File

@@ -18,7 +18,7 @@ export class PluginsMetadata extends Collection {
}
async save ({ id, autoload, configuration }) {
return await this.update({
return /* await */ this.update({
id,
autoload: autoload ? 'true' : 'false',
configuration: configuration && JSON.stringify(configuration)
@@ -31,7 +31,7 @@ export class PluginsMetadata extends Collection {
throw new Error('no such plugin metadata')
}
return await this.save({
return /* await */ this.save({
...pluginMetadata.properties,
...data
})

View File

@@ -1,6 +1,8 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import {
forEach
} from '../utils'
// ===================================================================
@@ -25,7 +27,7 @@ export class Remotes extends Collection {
}
async save (remote) {
return await this.update(remote)
return /* await */ this.update(remote)
}
async get (properties) {

View File

@@ -15,17 +15,19 @@ export class Schedules extends Collection {
return 'schedule:'
}
create (userId, job, cron, enabled) {
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,
job,
cron,
enabled
enabled,
name,
timezone
}))
}
async save (schedule) {
return await this.update(schedule)
return /* await */ this.update(schedule)
}
async get (properties) {

View File

@@ -12,11 +12,11 @@ export class Servers extends Collection {
return Server
}
async create ({host, username, password}) {
async create ({host, username, password, readOnly}) {
if (await this.exists({host})) {
throw new Error('server already exists')
}
return await this.add({host, username, password})
return /* await */ this.add({host, username, password, readOnly})
}
}

View File

@@ -1,4 +1,4 @@
import { hash } from 'hashy'
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
@@ -6,26 +6,7 @@ import { forEach } from '../utils'
// ===================================================================
const PERMISSIONS = {
none: 0,
read: 1,
write: 2,
admin: 3
}
// ===================================================================
export default class User extends Model {
hasPermission (permission) {
return PERMISSIONS[this.get('permission')] >= PERMISSIONS[permission]
}
setPassword (password) {
return hash(password).then(hash => {
return this.set('pw_hash', hash)
})
}
}
export default class User extends Model {}
User.prototype.default = {
permission: 'none'
@@ -33,6 +14,18 @@ User.prototype.default = {
// -------------------------------------------------------------------
const parseProp = (obj, name) => {
const value = obj[name]
if (value == null) {
return
}
try {
return JSON.parse(value)
} catch (error) {
console.warn('cannot parse user[%s] (%s):', name, value, error)
}
}
export class Users extends Collection {
get Model () {
return User
@@ -44,30 +37,27 @@ export class Users extends Collection {
throw new Error(`the user ${email} already exists`)
}
// Password is a special case.
const password = properties.password
delete properties.password
// Adds the email to the user's properties.
properties.email = email
// Create the user object.
const user = new User(properties)
// Sets the password if any.
if (password != null) {
await user.setPassword(password)
}
// Adds the user to the collection.
return await this.add(user)
return /* await */ this.add(user)
}
async save (user) {
// Serializes.
user.groups = JSON.stringify(user.groups)
let tmp
if (!isEmpty(tmp = user.groups)) {
user.groups = JSON.stringify(tmp)
}
if (!isEmpty(tmp = user.preferences)) {
user.preferences = JSON.stringify(tmp)
}
return await this.update(user)
return /* await */ this.update(user)
}
async get (properties) {
@@ -75,13 +65,11 @@ export class Users extends Collection {
// Deserializes
forEach(users, user => {
const {groups} = user
try {
user.groups = groups ? JSON.parse(groups) : []
} catch (_) {
console.warn('cannot parse user.groups:', groups)
user.groups = []
}
let tmp
user.groups = ((tmp = parseProp(user, 'groups')) && tmp.length)
? tmp
: undefined
user.preferences = parseProp(user, 'preferences')
})
return users

View File

@@ -23,13 +23,19 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
'', ''
].join('\r\n'))
const onSend = (error) => {
if (error) {
debug('error sending to the XO client: %s', error.stack || error.message || error)
}
}
socket.pipe(partialStream('\r\n\r\n', headers => {
// TODO: check status code 200.
debug('connected')
})).on('data', data => {
if (!closed) {
// Encode to base 64.
ws.send(data.toString('base64'))
ws.send(data.toString('base64'), onSend)
}
}).on('end', () => {
if (!closed) {

View File

@@ -1,141 +0,0 @@
import filter from 'lodash.filter'
import fs from 'fs-promise'
import {exec} from 'child_process'
import {
forEach,
promisify
} from './utils'
const execAsync = promisify(exec)
const noop = () => {}
class NfsMounter {
async _loadRealMounts () {
let stdout
try {
[stdout] = await execAsync('findmnt -P -t nfs,nfs4 --output SOURCE,TARGET --noheadings')
} catch (exc) {
// When no mounts are found, the call pretends to fail...
}
const mounted = {}
if (stdout) {
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
const match = regex.exec(m)
mounted[match[3]] = {
host: match[1],
share: match[2]
}
}
})
}
this._realMounts = mounted
return mounted
}
_fullPath (path) {
return path
}
_matchesRealMount (mount) {
return this._fullPath(mount.path) in this._realMounts
}
async _mount (mount) {
const path = this._fullPath(mount.path)
await fs.ensureDir(path)
return await execAsync(`mount -t nfs ${mount.host}:${mount.share} ${path}`)
}
async forget (mount) {
try {
await this._umount(mount)
} catch (_) {
// We have to go on...
}
}
async _umount (mount) {
const path = this._fullPath(mount.path)
await execAsync(`umount ${path}`)
}
async sync (mount) {
await this._loadRealMounts()
if (this._matchesRealMount(mount) && !mount.enabled) {
try {
await this._umount(mount)
} catch (exc) {
mount.enabled = true
mount.error = exc.message
}
} else if (!this._matchesRealMount(mount) && mount.enabled) {
try {
await this._mount(mount)
} catch (exc) {
mount.enabled = false
mount.error = exc.message
}
}
return mount
}
async disableAll (mounts) {
await this._loadRealMounts()
forEach(mounts, async mount => {
if (this._matchesRealMount(mount)) {
try {
await this._umount(mount)
} catch (_) {
// We have to go on...
}
}
})
}
}
class LocalHandler {
constructor () {
this.forget = noop
this.disableAll = noop
}
async sync (local) {
if (local.enabled) {
try {
await fs.ensureDir(local.path)
await fs.access(local.path, fs.R_OK | fs.W_OK)
} catch (exc) {
local.enabled = false
local.error = exc.message
}
}
return local
}
}
export default class RemoteHandler {
constructor () {
this.handlers = {
nfs: new NfsMounter(),
local: new LocalHandler()
}
}
async sync (remote) {
return await this.handlers[remote.type].sync(remote)
}
async forget (remote) {
return await this.handlers[remote.type].forget(remote)
}
async disableAll (remotes) {
const promises = []
forEach(['local', 'nfs'], type => promises.push(this.handlers[type].disableAll(filter(remotes, remote => remote.type === type))))
await Promise.all(promises)
}
}

View File

@@ -0,0 +1,210 @@
import eventToPromise from 'event-to-promise'
import through2 from 'through2'
import {
parse
} from 'xo-remote-parser'
import {
addChecksumToReadStream,
getPseudoRandomBytes,
noop,
pCatch,
streamToBuffer,
validChecksumOfReadStream
} from '../utils'
export default class RemoteHandlerAbstract {
constructor (remote) {
this._remote = {...remote, ...parse(remote.url)}
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
}
get type () {
throw new Error('Not implemented')
}
/**
* Asks the handler to sync the state of the effective remote with its' metadata
*/
async sync () {
return this._sync()
}
async _sync () {
throw new Error('Not implemented')
}
/**
* Free the resources possibly dedicated to put the remote at work, when it is no more needed
*/
async forget () {
return this._forget()
}
async _forget () {
throw new Error('Not implemented')
}
async test () {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error)
}
} finally {
this.unlink(testFileName).catch(noop)
}
}
async outputFile (file, data, options) {
return this._outputFile(file, data, {
flags: 'wx',
...options
})
}
async _outputFile (file, data, options) {
const stream = await this.createOutputStream(file, options)
const promise = eventToPromise(stream, 'finish')
stream.end(data)
return promise
}
async readFile (file, options) {
return this._readFile(file, options)
}
_readFile (file, options) {
return this.createReadStream(file, options).then(streamToBuffer)
}
async rename (oldPath, newPath) {
return this._rename(oldPath, newPath)
}
async _rename (oldPath, newPath) {
throw new Error('Not implemented')
}
async list (dir = '.') {
return this._list(dir)
}
async _list (dir) {
throw new Error('Not implemented')
}
async createReadStream (file, {
checksum = false,
ignoreMissingChecksum = false,
...options
} = {}) {
const streamP = this._createReadStream(file, options).then(async stream => {
await eventToPromise(stream, 'readable')
if (stream.length === undefined) {
stream.length = await this.getSize(file)::pCatch(noop)
}
return stream
})
if (!checksum) {
return streamP
}
try {
checksum = await this.readFile(`${file}.checksum`)
} catch (error) {
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
return streamP
}
throw error
}
let stream = await streamP
const { length } = stream
stream = validChecksumOfReadStream(stream, checksum.toString())
stream.length = length
return stream
}
async _createReadStream (file, options) {
throw new Error('Not implemented')
}
async createOutputStream (file, {
checksum = false,
...options
} = {}) {
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options
})
if (!checksum) {
return streamP
}
const connectorStream = through2()
const forwardError = error => {
connectorStream.emit('error', error)
}
const streamWithChecksum = addChecksumToReadStream(connectorStream)
streamWithChecksum.pipe(await streamP)
streamWithChecksum.checksum
.then(value => this.outputFile(`${file}.checksum`, value))
.catch(forwardError)
return connectorStream
}
async _createOutputStream (file, options) {
throw new Error('Not implemented')
}
async unlink (file, {
checksum = false
} = {}) {
if (checksum) {
this._unlink(`${file}.checksum`)::pCatch(noop)
}
return this._unlink(file)
}
async _unlink (file) {
throw new Error('Not implemented')
}
async getSize (file) {
return this._getSize(file)
}
async _getSize (file) {
throw new Error('Not implemented')
}
}

View File

@@ -0,0 +1,90 @@
import fs from 'fs-promise'
import startsWith from 'lodash/startsWith'
import {
dirname,
resolve
} from 'path'
import RemoteHandlerAbstract from './abstract'
import {
noop
} from '../utils'
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
}
async _sync () {
if (this._remote.enabled) {
try {
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
return noop()
}
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {
return fs.readFile(this._getFilePath(file), options)
}
async _rename (oldPath, newPath) {
return fs.rename(this._getFilePath(oldPath), this._getFilePath(newPath))
}
async _list (dir = '.') {
return fs.readdir(this._getFilePath(dir))
}
async _createReadStream (file, options) {
return fs.createReadStream(this._getFilePath(file), options)
}
async _createOutputStream (file, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
return fs.createWriteStream(path, options)
}
async _unlink (file) {
return fs.unlink(this._getFilePath(file))
}
async _getSize (file) {
const stats = await fs.stat(this._getFilePath(file))
return stats.size
}
}

View File

@@ -0,0 +1,84 @@
import execa from 'execa'
import fs from 'fs-promise'
import LocalHandler from './local'
import {
forEach
} from '../utils'
export default class NfsHandler extends LocalHandler {
get type () {
return 'nfs'
}
_getRealPath () {
return `/tmp/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
try {
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
const match = regex.exec(m)
mounted[match[3]] = {
host: match[1],
share: match[2]
}
}
})
} catch (exc) {
// When no mounts are found, the call pretends to fail...
if (exc.stderr !== '') {
throw exc
}
}
this._realMounts = mounted
return mounted
}
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
}
}
return this._remote
}
async _forget () {
try {
await this._umount(this._remote)
} catch (_) {
// We have to go on...
}
}
async _umount (remote) {
await execa('umount', [remote.path])
}
}

191
src/remote-handlers/smb.js Normal file
View File

@@ -0,0 +1,191 @@
import Smb2 from '@marsaud/smb2-promise'
import RemoteHandlerAbstract from './abstract'
import {
noop,
pFinally
} from '../utils'
// Normalize the error code for file not found.
const normalizeError = error => {
const { code } = error
return (
code === 'STATUS_OBJECT_NAME_NOT_FOUND' ||
code === 'STATUS_OBJECT_PATH_NOT_FOUND'
)
? Object.create(error, {
code: {
configurable: true,
readable: true,
value: 'ENOENT',
writable: true
}
})
: error
}
export default class SmbHandler extends RemoteHandlerAbstract {
constructor (remote) {
super(remote)
this._forget = noop
}
get type () {
return 'smb'
}
_getClient (remote) {
return new Smb2({
share: `\\\\${remote.host}`,
domain: remote.domain,
username: remote.username,
password: remote.password,
autoCloseTimeout: 0
})
}
_getFilePath (file) {
if (file === '.') {
file = undefined
}
let path = (this._remote.path !== '')
? this._remote.path
: ''
// Ensure remote path is a directory.
if (path !== '' && path[path.length - 1] !== '\\') {
path += '\\'
}
if (file) {
path += file.replace(/\//g, '\\')
}
return path
}
_dirname (file) {
const parts = file.split('\\')
parts.pop()
return parts.join('\\')
}
async _sync () {
if (this._remote.enabled) {
try {
// Check access (smb2 does not expose connect in public so far...)
await this.list()
} catch (error) {
this._remote.enabled = false
this._remote.error = error.message
}
}
return this._remote
}
async _outputFile (file, data, options = {}) {
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
if (dir) {
await client.ensureDir(dir)
}
return client.writeFile(path, data, options)::pFinally(() => { client.close() })
}
async _readFile (file, options = {}) {
const client = this._getClient(this._remote)
let content
try {
content = await client.readFile(this._getFilePath(file), options)::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return content
}
async _rename (oldPath, newPath) {
const client = this._getClient(this._remote)
try {
await client.rename(this._getFilePath(oldPath), this._getFilePath(newPath))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
}
async _list (dir = '.') {
const client = this._getClient(this._remote)
let list
try {
list = await client.readdir(this._getFilePath(dir))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return list
}
async _createReadStream (file, options = {}) {
const client = this._getClient(this._remote)
let stream
try {
// FIXME ensure that options are properly handled by @marsaud/smb2
stream = await client.createReadStream(this._getFilePath(file), options)
stream.on('end', () => client.close())
} catch (error) {
throw normalizeError(error)
}
return stream
}
async _createOutputStream (file, options = {}) {
const client = this._getClient(this._remote)
const path = this._getFilePath(file)
const dir = this._dirname(path)
let stream
try {
if (dir) {
await client.ensureDir(dir)
}
stream = await client.createWriteStream(path, options) // FIXME ensure that options are properly handled by @marsaud/smb2
} catch (err) {
client.close()
throw err
}
stream.on('finish', () => client.close())
return stream
}
async _unlink (file) {
const client = this._getClient(this._remote)
try {
await client.unlink(this._getFilePath(file))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
}
async _getSize (file) {
const client = await this._getClient(this._remote)
let size
try {
size = await client.getSize(this._getFilePath(file))::pFinally(() => { client.close() })
} catch (error) {
throw normalizeError(error)
}
return size
}
}

View File

@@ -1,164 +0,0 @@
import {BaseError} from 'make-error'
import {CronJob} from 'cron'
import { forEach } from './utils'
const _resolveId = scheduleOrId => scheduleOrId.id || scheduleOrId
export class SchedulerError extends BaseError {}
export class ScheduleOverride extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ID ' + _resolveId(scheduleOrId) + ' is already added')
}
}
export class NoSuchSchedule extends SchedulerError {
constructor (scheduleOrId) {
super('No schedule found for ID ' + _resolveId(scheduleOrId))
}
}
export class ScheduleNotEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId)) + ' is not enabled'
}
}
export class ScheduleAlreadyEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId) + ' is already enabled')
}
}
export class ScheduleJobNotFound extends SchedulerError {
constructor (jobId, scheduleId) {
super('Job ' + jobId + ' not found for Schedule ' + scheduleId)
}
}
export default class Scheduler {
constructor (xo, {executor}) {
this.executor = executor
this.xo = xo
this._scheduleTable = undefined
this._loadSchedules()
}
async _loadSchedules () {
this._schedules = {}
const schedules = await this.xo.getAllSchedules()
this._scheduleTable = {}
this._cronJobs = {}
forEach(schedules, schedule => {
this._add(schedule)
})
}
add (schedule) {
if (this.exists(schedule)) {
throw new ScheduleOverride(schedule)
}
this._add(schedule)
}
_add (schedule) {
const id = _resolveId(schedule)
this._schedules[id] = schedule
this._scheduleTable[id] = false
if (schedule.enabled) {
this._enable(schedule)
}
}
remove (id) {
try {
this._disable(id)
} catch (exc) {
if (!exc instanceof SchedulerError) {
throw exc
}
} finally {
delete this._schedules[id]
delete this._scheduleTable[id]
}
}
exists (scheduleOrId) {
const id_ = _resolveId(scheduleOrId)
return id_ in this._schedules
}
async get (id) {
if (!this.exists(id)) {
throw new NoSuchSchedule(id)
}
return this._schedules[id]
}
async _get (id) {
const schedule = await this.xo.getSchedule(id)
if (!schedule) {
throw new NoSuchSchedule(id)
}
return schedule
}
async update (schedule) {
if (!this.exists(schedule)) {
throw new NoSuchSchedule(schedule)
}
const enabled = this.isEnabled(schedule)
if (enabled) {
await this._disable(schedule)
}
this._add(schedule)
}
isEnabled (scheduleOrId) {
return this._scheduleTable[_resolveId(scheduleOrId)]
}
_enable (schedule) {
const jobId = schedule.job
const cronJob = new CronJob(schedule.cron, async () => {
try {
const job = await this._getJob(jobId, schedule.id)
this.executor.exec(job)
} catch (_) {
// FIXME What do we do ?
}
})
this._cronJobs[schedule.id] = cronJob
cronJob.start()
this._scheduleTable[schedule.id] = true
}
async _getJob (id, scheduleId) {
const job = await this.xo.getJob(id)
if (!job) {
throw new ScheduleJobNotFound(id, scheduleId)
}
return job
}
_disable (scheduleOrId) {
if (!this.exists(scheduleOrId)) {
throw new NoSuchSchedule(scheduleOrId)
}
if (!this.isEnabled(scheduleOrId)) {
throw new ScheduleNotEnabled(scheduleOrId)
}
const id = _resolveId(scheduleOrId)
this._cronJobs[id].stop()
delete this._cronJobs[id]
this._scheduleTable[id] = false
}
disableAll () {
forEach(this.scheduleTable, (enabled, id) => {
if (enabled) {
this._disable(id)
}
})
}
get scheduleTable () {
return this._scheduleTable
}
}

39
src/schemas/job.js Normal file
View File

@@ -0,0 +1,39 @@
import paramsVector from 'job/params-vector'
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['call']
},
id: {
type: 'string',
description: 'job identifier'
},
name: {
type: 'string',
description: 'human readable name'
},
userId: {
type: 'string',
description: 'identifier of the user who have created the job (the permissions of the user are used by the job)'
},
key: {
type: 'string'
// TODO description
},
method: {
type: 'string',
description: 'called method'
},
paramsVector
},
required: [
'type',
'id',
'userId',
'key',
'method'
]
}

View File

@@ -0,0 +1,59 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['crossProduct']
},
items: {
type: 'array',
description: 'vector of values to multiply with others vectors',
items: {
type: 'object',
properties: {
type: {
enum: ['set']
},
values: {
type: 'array',
items: {
type: 'object'
},
minItems: 1
}
},
required: [
'type',
'values'
]
},
minItems: 1
}
},
required: [
'type',
'items'
]
}
/* Example:
{
"type": "cross product",
"items": [
{
"type": "set",
"values": [
{"id": 0, "name": "snapshost de 0"},
{"id": 1, "name": "snapshost de 1"}
],
},
{
"type": "set",
"values": [
{"force": true}
]
}
]
}
*/

View File

@@ -8,7 +8,7 @@ export default {
},
time: {
type: 'string',
description: 'timestamp (in miliseconds) of this log'
description: 'timestamp (in milliseconds) of this log'
},
message: {
type: 'string',
@@ -18,14 +18,7 @@ export default {
type: 'string',
description: 'space to store logs'
},
data: {
oneOf: [
{ '$ref': 'log/jobStart.js' },
{ '$ref': 'log/jobEnd.js' },
{ '$ref': 'log/jobCallStart.js' },
{ '$ref': 'log/jobCallEnd.js' }
]
}
data: {}
},
required: [
'id',

50
src/schemas/user.js Normal file
View File

@@ -0,0 +1,50 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this user'
},
email: {
type: 'string',
description: 'email address of this user'
},
groups: {
type: 'array',
items: {
type: 'string'
},
description: 'identifier of groups this user belong to'
},
permission: {
enum: ['none', 'read', 'write', 'admin'],
description: 'root permission for this user, none and admin are the only significant ones'
},
preferences: {
type: 'object',
properties: {
lang: { type: 'string' },
sshKeys: {
type: 'array',
items: {
type: 'object',
properties: {
key: { type: 'string' },
title: { type: 'string' }
},
required: [
'key',
'title'
]
}
}
},
description: 'various user preferences'
}
},
required: [
'id',
'email'
]
}

View File

@@ -1,17 +1,62 @@
import base64url from 'base64url'
import forEach from 'lodash.foreach'
import has from 'lodash.has'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash/forEach'
import getStream from 'get-stream'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import isArray from 'lodash.isarray'
import isString from 'lodash.isstring'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import xml2js from 'xml2js'
import {promisify} from 'bluebird'
import {randomBytes} from 'crypto'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import { CronJob } from 'cron'
import {
all as pAll,
defer,
promisify,
reflect as pReflect
} from 'promise-toolbox'
import {
createHash,
randomBytes
} from 'crypto'
import { Readable } from 'stream'
import through2 from 'through2'
import {utcFormat as d3TimeFormat} from 'd3-time-format'
// ===================================================================
export function bufferToStream (buf) {
const stream = new Readable()
let i = 0
const { length } = buf
stream._read = function (size) {
if (i === length) {
return this.push(null)
}
const newI = Math.min(i + size, length)
this.push(buf.slice(i, newI))
i = newI
}
return stream
}
export const streamToBuffer = getStream.buffer
// -------------------------------------------------------------------
export function camelToSnakeCase (string) {
return string.replace(
/([a-z])([A-Z])/g,
@@ -24,7 +69,91 @@ export function camelToSnakeCase (string) {
// Returns an empty object without prototype (if possible).
export const createRawObject = Object.create
? (createObject => () => createObject(null))(Object.create)
: () => {}
: () => ({})
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
sha512: '6'
}
const ID_TO_ALGORITHM = invert(ALGORITHM_TO_ID)
// Wrap a readable stream in a stream with a checksum promise
// attribute which is resolved at the end of an input stream.
// (Finally .checksum contains the checksum of the input stream)
//
// Example:
// const sourceStream = ...
// const targetStream = ...
// const checksumStream = addChecksumToReadStream(sourceStream)
// await Promise.all([
// eventToPromise(checksumStream.pipe(targetStream), 'finish'),
// checksumStream.checksum.then(console.log)
// ])
export const addChecksumToReadStream = (stream, algorithm = 'md5') => {
const algorithmId = ALGORITHM_TO_ID[algorithm]
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithm}`)
}
const hash = createHash(algorithm)
const { promise, resolve } = defer()
const wrapper = stream.pipe(through2(
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
resolve(hash.digest('hex'))
callback()
}
))
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksum = promise.then(hash => `$${algorithmId}$$${hash}`)
return wrapper
}
// Check if the checksum of a readable stream is equals to an expected checksum.
// The given stream is wrapped in a stream which emits an error event
// if the computed checksum is not equals to the expected checksum.
export const validChecksumOfReadStream = (stream, expectedChecksum) => {
const algorithmId = expectedChecksum.slice(1, expectedChecksum.indexOf('$', 1))
if (!algorithmId) {
throw new Error(`unknown algorithm: ${algorithmId}`)
}
const hash = createHash(ID_TO_ALGORITHM[algorithmId])
const wrapper = stream.pipe(through2(
{ highWaterMark: 0 },
(chunk, enc, callback) => {
hash.update(chunk)
callback(null, chunk)
},
callback => {
const checksum = `$${algorithmId}$$${hash.digest('hex')}`
callback(
checksum !== expectedChecksum
? new Error(`Bad checksum (${checksum}), expected: ${expectedChecksum}`)
: null
)
}
))
stream.on('error', error => wrapper.emit('error', error))
wrapper.checksumVerified = eventToPromise(wrapper, 'end')
return wrapper
}
// -------------------------------------------------------------------
@@ -48,10 +177,27 @@ export function extractProperty (obj, prop) {
// -------------------------------------------------------------------
export const getPseudoRandomBytes = n => {
const bytes = new Buffer(n)
const odd = n & 1
for (let i = 0, m = n - odd; i < m; i += 2) {
bytes.writeUInt16BE(Math.random() * 65536 | 0, i)
}
if (odd) {
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
}
return bytes
}
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
// Generate a secure random Base64 string.
export const generateToken = (function (randomBytes) {
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
})(promisify(randomBytes))
})(randomBytes::promisify())
// -------------------------------------------------------------------
@@ -87,63 +233,99 @@ export const parseXml = (function () {
// -------------------------------------------------------------------
// Very light and fast set.
//
// - works only with strings
// - methods are already bound and chainable
export const lightSet = collection => {
const data = createRawObject()
if (collection) {
forEach(collection, value => {
data[value] = true
})
collection = null
}
const set = {
add: value => {
data[value] = true
return set
},
clear: () => {
for (const value in data) {
delete data[value]
}
return set
},
delete: value => {
delete data[value]
return set
},
has: value => data[value],
toArray: () => keys(data)
}
return set
}
// -------------------------------------------------------------------
// This function does nothing and returns undefined.
//
// It is often used to swallow promise's errors.
export function noop () {}
export const noop = () => {}
// -------------------------------------------------------------------
// Ponyfill for Promise.finally(cb)
//
// Usage: promise::pFinally(cb)
export function pFinally (cb) {
return this.then(
value => this.constructor.resolve(cb()).then(() => value),
reason => this.constructor.resolve(cb()).then(() => {
throw reason
})
// Usage: pDebug(promise, name) or promise::pDebug(name)
export function pDebug (promise, name) {
if (arguments.length === 1) {
name = promise
promise = this
}
Promise.resolve(promise).then(
value => {
console.log(
'%s',
`Promise ${name} resolved${value !== undefined ? ` with ${kindOf(value)}` : ''}`
)
},
reason => {
console.log(
'%s',
`Promise ${name} rejected${reason !== undefined ? ` with ${kindOf(reason)}` : ''}`
)
}
)
return promise
}
// Given an array which contains promises return a promise that is
// fulfilled when all the items in the array are either fulfilled or
// rejected.
// Given a collection (array or object) which contains promises,
// return a promise that is fulfilled when all the items in the
// collection are either fulfilled or rejected.
//
// This promise will be fulfilled with a collection (of the same type,
// array or object) containing promise inspections.
//
// Usage: pSettle(promises) or promises::pSettle()
export function pSettle (promises) {
const statuses = promises.map(promise => promise.then(
value => ({
isFulfilled: () => true,
isRejected: () => false,
value: () => value,
reason: () => {
throw new Error('no reason, the promise has been fulfilled')
}
}),
reason => ({
isFulfilled: () => false,
isRejected: () => true,
value: () => {
throw new Error('no value, the promise has been rejected')
},
reason: () => reason
})
))
return Promise.all(statuses)
return (this || promises)::pAll(p => p::pReflect())
}
// -------------------------------------------------------------------
export {
// Create a function which returns promises instead of taking a
// callback.
export { // eslint-disable-line no-duplicate-imports
all as pAll,
catchPlus as pCatch,
delay as pDelay,
fromCallback as pFromCallback,
isPromise,
lastly as pFinally,
promisify,
// For all enumerable methods of an object, create a new method
// which name is suffixed with `Async` which return promises instead
// of taking a callback.
promisifyAll
} from 'bluebird'
promisifyAll,
reflect as pReflect
} from 'promise-toolbox'
// -------------------------------------------------------------------
@@ -165,6 +347,19 @@ export function parseSize (size) {
// -------------------------------------------------------------------
const _has = Object.prototype.hasOwnProperty
// Removes an own property from an object and returns its value.
export const popProperty = obj => {
for (const prop in obj) {
if (_has.call(obj, prop)) {
return extractProperty(obj, prop)
}
}
}
// -------------------------------------------------------------------
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
@@ -173,10 +368,16 @@ export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
// This functions are often used throughout xo-server.
//
// Exports them from here to avoid direct dependencies on lodash.
export { default as forEach } from 'lodash.foreach'
export { default as isEmpty } from 'lodash.isempty'
export { default as mapToArray } from 'lodash.map'
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
@@ -200,11 +401,10 @@ export const DONE = {}
export function map (
collection,
iteratee,
thisArg,
target = has(collection, 'length') ? [] : {}
) {
forEach(collection, (item, i) => {
const value = iteratee.call(thisArg, item, i, collection, DONE)
const value = iteratee(item, i, collection, DONE)
if (value === DONE) {
return false
}
@@ -215,11 +415,6 @@ export function map (
return target
}
// Helper to `map()` to update the current collection.
export function mapInPlace (collection, iteratee, thisArg) {
return map(collection, iteratee, thisArg, collection)
}
// -------------------------------------------------------------------
// Create a hash from multiple values.
@@ -234,5 +429,52 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper
} = {}) => new Promise((resolve, reject) => {
stream = highland(stream).stopOnError(reject)
if (filter) {
stream = stream.filter(filter)
}
if (mapper) {
stream = stream.map(mapper)
}
stream.toArray(resolve)
})
// -------------------------------------------------------------------
export const scheduleFn = (cronTime, fn, timeZone) => {
let running = false
const job = new CronJob({
cronTime,
onTick: async () => {
if (running) {
return
}
running = true
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
},
start: true,
timeZone
})
return () => {
job.stop()
}
}
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value

View File

@@ -1,7 +1,6 @@
/* eslint-env mocha */
import expect from 'must'
import sinon from 'sinon'
// ===================================================================
@@ -13,7 +12,6 @@ import {
formatXml,
generateToken,
parseSize,
pFinally,
pSettle
} from './utils'
@@ -122,32 +120,6 @@ describe('generateToken()', () => {
// -------------------------------------------------------------------
describe('pSettle()', () => {
it('makes an array of PromiseInspection', async () => {
const [
status1,
status2
] = await pSettle([
Promise.resolve(42),
Promise.reject('fatality')
])
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(false)
expect(status1.value()).to.equal(42)
expect(::status2.value).to.throw()
expect(::status1.reason).to.throw()
expect(status2.reason()).to.equal('fatality')
})
})
// -------------------------------------------------------------------
describe('parseSize()', function () {
it('parses a human size', function () {
expect(parseSize('1G')).to.equal(1e9)
@@ -170,56 +142,60 @@ describe('parseSize()', function () {
// -------------------------------------------------------------------
describe('pFinally()', () => {
it('calls a callback on resolution', async () => {
const value = {}
const spy = sinon.spy()
await expect(
Promise.resolve(value)::pFinally(spy)
).to.resolve.to.equal(
value
)
expect(spy.callCount).to.equal(1)
})
it('calls a callback on rejection', async () => {
const reason = {}
const spy = sinon.spy()
await expect(
Promise.reject(reason)::pFinally(spy)
).to.reject.to.equal(
reason
)
expect(spy.callCount).to.equal(1)
})
})
// -------------------------------------------------------------------
describe('pSettle()', () => {
it('makes an array of PromiseInspection', async () => {
it('works with arrays', async () => {
const [
status1,
status2
status2,
status3
] = await pSettle([
Promise.resolve(42),
Math.PI,
Promise.reject('fatality')
])
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(true)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(false)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.value()).to.equal(42)
expect(::status2.value).to.throw()
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(::status1.reason).to.throw()
expect(status2.reason()).to.equal('fatality')
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
})
it('works with objects', async () => {
const {
a: status1,
b: status2,
c: status3
} = await pSettle({
a: Promise.resolve(42),
b: Math.PI,
c: Promise.reject('fatality')
})
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
})
})

566
src/vhd-merge.js Normal file
View File

@@ -0,0 +1,566 @@
import fu from 'struct-fu'
import {
noop,
streamToBuffer
} from './utils'
const VHD_UTIL_DEBUG = 0
const debug = VHD_UTIL_DEBUG
? str => console.log(`[vhd-util]${str}`)
: noop
// ===================================================================
//
// Spec:
// https://www.microsoft.com/en-us/download/details.aspx?id=23850
//
// C implementation:
// https://github.com/rubiojr/vhd-util-convert
//
// ===================================================================
// Sizes in bytes.
const VHD_FOOTER_SIZE = 512
const VHD_HEADER_SIZE = 1024
const VHD_SECTOR_SIZE = 512
// Block allocation table entry size. (Block addr)
const VHD_ENTRY_SIZE = 4
const VHD_PARENT_LOCATOR_ENTRIES = 8
const VHD_PLATFORM_CODE_NONE = 0
// Types of backup treated. Others are not supported.
const HARD_DISK_TYPE_DYNAMIC = 3 // Full backup.
const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
// Other.
const BLOCK_UNUSED = 0xFFFFFFFF
const BIT_MASK = 0x80
// ===================================================================
const fuFooter = fu.struct([
fu.char('cookie', 8), // 0
fu.uint32('features'), // 8
fu.uint32('fileFormatVersion'), // 12
fu.struct('dataOffset', [
fu.uint32('high'), // 16
fu.uint32('low') // 20
]),
fu.uint32('timestamp'), // 24
fu.char('creatorApplication', 4), // 28
fu.uint32('creatorVersion'), // 32
fu.uint32('creatorHostOs'), // 36
fu.struct('originalSize', [ // At the creation, current size of the hard disk.
fu.uint32('high'), // 40
fu.uint32('low') // 44
]),
fu.struct('currentSize', [ // Current size of the virtual disk. At the creation: currentSize = originalSize.
fu.uint32('high'), // 48
fu.uint32('low') // 52
]),
fu.struct('diskGeometry', [
fu.uint16('cylinders'), // 56
fu.uint8('heads'), // 58
fu.uint8('sectorsPerTrackCylinder') // 59
]),
fu.uint32('diskType'), // 60 Disk type, must be equal to HARD_DISK_TYPE_DYNAMIC/HARD_DISK_TYPE_DIFFERENCING.
fu.uint32('checksum'), // 64
fu.uint8('uuid', 16), // 68
fu.char('saved'), // 84
fu.char('hidden'), // 85
fu.char('reserved', 426) // 86
])
const fuHeader = fu.struct([
fu.char('cookie', 8),
fu.struct('dataOffset', [
fu.uint32('high'),
fu.uint32('low')
]),
fu.struct('tableOffset', [ // Absolute byte offset of the Block Allocation Table.
fu.uint32('high'),
fu.uint32('low')
]),
fu.uint32('headerVersion'),
fu.uint32('maxTableEntries'), // Max entries in the Block Allocation Table.
fu.uint32('blockSize'), // Block size in bytes. Default (2097152 => 2MB)
fu.uint32('checksum'),
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
fu.uint32('platformDataLength'),
fu.uint32('reserved'),
fu.struct('platformDataOffset', [ // Absolute byte offset of the locator data.
fu.uint32('high'),
fu.uint32('low')
])
], VHD_PARENT_LOCATOR_ENTRIES),
fu.char('reserved2', 256)
])
// ===================================================================
// Helpers
// ===================================================================
const SIZE_OF_32_BITS = Math.pow(2, 32)
const uint32ToUint64 = (fu) => fu.high * SIZE_OF_32_BITS + fu.low
// Returns a 32 bits integer corresponding to a Vhd version.
const getVhdVersion = (major, minor) => (major << 16) | (minor & 0x0000FFFF)
// Sectors conversions.
const sectorsRoundUp = bytes => Math.floor((bytes + VHD_SECTOR_SIZE - 1) / VHD_SECTOR_SIZE)
const sectorsRoundUpNoZero = bytes => sectorsRoundUp(bytes) || 1
const sectorsToBytes = sectors => sectors * VHD_SECTOR_SIZE
// Check/Set a bit on a vhd map.
const mapTestBit = (map, bit) => ((map[bit >> 3] << (bit & 7)) & BIT_MASK) !== 0
const mapSetBit = (map, bit) => { map[bit >> 3] |= (BIT_MASK >> (bit & 7)) }
const packField = (field, value, buf) => {
const { offset } = field
field.pack(
value,
buf,
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
)
}
const unpackField = (field, buf) => {
const { offset } = field
return field.unpack(
buf,
(typeof offset !== 'object') ? { bytes: offset, bits: 0 } : offset
)
}
// ===================================================================
// Returns the checksum of a raw footer.
// The raw footer is altered with the new sum.
function checksumFooter (rawFooter) {
const checksumField = fuFooter.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawFooter)
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawFooter)
return sum
}
function getParentLocatorSize (parentLocatorEntry) {
const { platformDataSpace } = parentLocatorEntry
if (platformDataSpace < VHD_SECTOR_SIZE) {
return sectorsToBytes(platformDataSpace)
}
return (platformDataSpace % VHD_SECTOR_SIZE === 0)
? platformDataSpace
: 0
}
// ===================================================================
class Vhd {
constructor (handler, path) {
this._handler = handler
this._path = path
}
// =================================================================
// Read functions.
// =================================================================
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
let end = uint32ToUint64(this.footer.dataOffset) + VHD_HEADER_SIZE
const blockAllocationTableSize = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
// Max(end, block allocation table end)
end = Math.max(end, uint32ToUint64(header.tableOffset) + blockAllocationTableSize)
for (let i = 0; i < VHD_PARENT_LOCATOR_ENTRIES; i++) {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
const dataOffset = uint32ToUint64(entry.platformDataOffset)
// Max(end, locator end)
end = Math.max(end, dataOffset + getParentLocatorSize(entry))
}
}
debug(`End of headers: ${end}.`)
return end
}
// Returns the first sector after data.
getEndOfData () {
let end = Math.floor(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
const { maxTableEntries } = this.header
for (let i = 0; i < maxTableEntries; i++) {
let blockAddr = this.readAllocationTableEntry(i)
if (blockAddr !== BLOCK_UNUSED) {
// Compute next block address.
blockAddr += this.sectorsPerBlock + this.sectorsOfBitmap
end = Math.max(end, blockAddr)
}
}
debug(`End of data: ${end}.`)
return sectorsToBytes(end)
}
// Returns the start position of the vhd footer.
// The real footer, not the copy at the beginning of the vhd file.
async getFooterStart () {
const stats = await this._handler.getSize(this._path)
return stats.size - VHD_FOOTER_SIZE
}
// Get the beginning (footer + header) of a vhd file.
async readHeaderAndFooter () {
const buf = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: 0,
end: VHD_FOOTER_SIZE + VHD_HEADER_SIZE - 1
})
)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumFooter(buf)
// Checksum child & parent.
if (sumToTest !== sum) {
throw new Error(`Bad checksum in vhd. Expected: ${sum}. Given: ${sumToTest}. (data=${buf.toString('hex')})`)
}
const header = this.header = fuHeader.unpack(buf.slice(VHD_FOOTER_SIZE))
this.footer = fuFooter.unpack(buf)
// Compute the number of sectors in one block.
// Default: One block contains 4096 sectors of 512 bytes.
const sectorsPerBlock = this.sectorsPerBlock = Math.floor(header.blockSize / VHD_SECTOR_SIZE)
// Compute bitmap size in sectors.
// Default: 1.
const sectorsOfBitmap = this.sectorsOfBitmap = sectorsRoundUpNoZero(sectorsPerBlock >> 3)
// Full block size => data block size + bitmap size.
this.fullBlockSize = sectorsToBytes(sectorsPerBlock + sectorsOfBitmap)
// In bytes.
// Default: 512.
this.bitmapSize = sectorsToBytes(sectorsOfBitmap)
}
// Check if a vhd object has a block allocation table.
hasBlockAllocationTableMap () {
return this.footer.fileFormatVersion > getVhdVersion(1, 0)
}
// Returns a buffer that contains the block allocation table of a vhd file.
async readBlockTable () {
const { header } = this
const offset = uint32ToUint64(header.tableOffset)
const size = sectorsToBytes(
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
this.blockTable = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + size - 1
})
)
}
// Returns the address block at the entry location of one table.
readAllocationTableEntry (entry) {
return this.blockTable.readUInt32BE(entry * VHD_ENTRY_SIZE)
}
// Returns the data content of a block. (Not the bitmap !)
async readBlockData (blockAddr) {
const { blockSize } = this.header
const handler = this._handler
const path = this._path
const blockDataAddr = sectorsToBytes(blockAddr + this.sectorsOfBitmap)
const footerStart = await this.getFooterStart()
const isPadded = footerStart < (blockDataAddr + blockSize)
// Size ot the current block in the vhd file.
const size = isPadded ? (footerStart - blockDataAddr) : sectorsToBytes(this.sectorsPerBlock)
debug(`Read block data at: ${blockDataAddr}. (size=${size})`)
const buf = await streamToBuffer(
await handler.createReadStream(path, {
start: blockDataAddr,
end: blockDataAddr + size - 1
})
)
// Padded by zero !
if (isPadded) {
return Buffer.concat([buf, new Buffer(blockSize - size).fill(0)])
}
return buf
}
// Returns a buffer that contains the bitmap of a block.
//
// TODO: merge with readBlockData().
async readBlockBitmap (blockAddr) {
const { bitmapSize } = this
const offset = sectorsToBytes(blockAddr)
debug(`Read bitmap at: ${offset}. (size=${bitmapSize})`)
return streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + bitmapSize - 1
})
)
}
// =================================================================
// Write functions.
// =================================================================
// Write a buffer at a given position in a vhd file.
async _write (buffer, offset) {
// TODO: could probably be merged in remote handlers.
return this._handler.createOutputStream(this._path, {
start: offset,
flags: 'r+'
}).then(stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.write(buffer, () => {
stream.end()
resolve()
})
}))
}
// Write an entry in the allocation table.
writeAllocationTableEntry (entry, value) {
this.blockTable.writeUInt32BE(value, entry * VHD_ENTRY_SIZE)
}
// Make a new empty block at vhd end.
// Update block allocation table in context and in file.
async createBlock (blockId) {
// End of file !
let offset = this.getEndOfData()
// Padded on bound sector.
if (offset % VHD_SECTOR_SIZE) {
offset += (VHD_SECTOR_SIZE - (offset % VHD_SECTOR_SIZE))
}
const blockAddr = Math.floor(offset / VHD_SECTOR_SIZE)
const {
blockTable,
fullBlockSize
} = this
debug(`Create block at ${blockAddr}. (size=${fullBlockSize}, offset=${offset})`)
// New entry in block allocation table.
this.writeAllocationTableEntry(blockId, blockAddr)
const tableOffset = uint32ToUint64(this.header.tableOffset)
const entry = blockId * VHD_ENTRY_SIZE
// Write an empty block and addr in vhd file.
await this._write(new Buffer(fullBlockSize).fill(0), offset)
await this._write(blockTable.slice(entry, entry + VHD_ENTRY_SIZE), tableOffset + entry)
return blockAddr
}
// Write a bitmap at a block address.
async writeBlockBitmap (blockAddr, bitmap) {
const { bitmapSize } = this
if (bitmap.length !== bitmapSize) {
throw new Error(`Bitmap length is not correct ! ${bitmap.length}`)
}
const offset = sectorsToBytes(blockAddr)
debug(`Write bitmap at: ${offset}. (size=${bitmapSize}, data=${bitmap.toString('hex')})`)
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, n) {
let blockAddr = this.readAllocationTableEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
const endSectorId = beginSectorId + n
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(`Write block data at: ${offset}. (counter=${n}, blockId=${block.id}, blockSector=${beginSectorId})`)
await this._write(
block.data.slice(
sectorsToBytes(beginSectorId),
sectorsToBytes(endSectorId)
),
sectorsToBytes(offset)
)
const bitmap = await this.readBlockBitmap(this.bitmapSize, blockAddr)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(bitmap, i)
}
await this.writeBlockBitmap(blockAddr, bitmap)
}
// Merge block id (of vhd child) into vhd parent.
async coalesceBlock (child, blockAddr, blockId) {
// Get block data and bitmap of block id.
const blockData = await child.readBlockData(blockAddr)
const blockBitmap = await child.readBlockBitmap(blockAddr)
debug(`Coalesce block ${blockId} at ${blockAddr}.`)
// For each sector of block data...
const { sectorsPerBlock } = child
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(blockBitmap, i)) {
continue
}
let sectors = 0
// Count changed sectors.
for (; sectors + i < sectorsPerBlock; sectors++) {
if (!mapTestBit(blockBitmap, sectors + i)) {
break
}
}
// Write n sectors into parent.
debug(`Coalesce block: write. (offset=${i}, sectors=${sectors})`)
await this.writeBlockSectors(
{ id: blockId, data: blockData },
i,
sectors
)
i += sectors
}
}
// Write a context footer. (At the end and beggining of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumFooter(rawFooter)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
}
// Merge vhd child into vhd parent.
//
// Child must be a delta backup !
// Parent must be a full backup !
export default async function vhdMerge (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
// Reading footer and header.
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter()
])
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error('Unable to merge, child is not a delta backup.')
}
// Merging in differencing disk is prohibited in our case.
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
throw new Error('Unable to merge, parent is not a full backup.')
}
// Allocation table map is not yet implemented.
if (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error('Unsupported allocation table map.')
}
// Read allocation table of child/parent.
await Promise.all([
parentVhd.readBlockTable(),
childVhd.readBlockTable()
])
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
const blockAddr = childVhd.readAllocationTableEntry(blockId)
if (blockAddr !== BLOCK_UNUSED) {
await parentVhd.coalesceBlock(
childVhd,
blockAddr,
blockId
)
}
}
await parentVhd.writeFooter()
}

View File

@@ -1,53 +0,0 @@
import createDebug from 'debug'
import WebSocket from 'ws'
const debug = createDebug('xo:wsProxy')
const defaults = {
// Automatically close the client connection when the remote close.
autoClose: true
}
// Proxy a WebSocket `client` to a remote server which has `url` as
// address.
export default function wsProxy (client, url, opts) {
opts = {
...defaults,
protocol: client.protocol,
...opts
}
const autoClose = !!opts.autoClose
delete opts.autoClose
function onClientSend (error) {
if (error) {
debug('client send error', error)
}
}
function onRemoteSend (error) {
if (error) {
debug('remote send error', error)
}
}
const remote = new WebSocket(url, opts).once('open', function () {
debug('connected to %s', url)
}).once('close', function () {
debug('remote closed')
if (autoClose) {
client.close()
}
}).once('error', function (error) {
debug('remote error: %s', error)
}).on('message', function (message) {
client.send(message, onClientSend)
})
client.once('close', function () {
debug('client closed')
remote.close()
}).on('message', function (message) {
remote.send(message, onRemoteSend)
})
}

641
src/xapi-object-to-xo.js Normal file
View File

@@ -0,0 +1,641 @@
import {
ensureArray,
extractProperty,
forEach,
isArray,
mapToArray,
parseXml
} from './utils'
import {
isHostRunning,
isVmHvm,
isVmRunning,
parseDateTime
} from './xapi'
// ===================================================================
const {
defineProperties,
freeze
} = Object
function link (obj, prop, idField = '$id') {
const dynamicValue = obj[`$${prop}`]
if (dynamicValue == null) {
return dynamicValue // Properly handles null and undefined.
}
if (isArray(dynamicValue)) {
return mapToArray(dynamicValue, idField)
}
return dynamicValue[idField]
}
// Parse a string date time to a Unix timestamp (in seconds).
//
// If the value is a number or can be converted as one, it is assumed
// to already be a timestamp and returned.
//
// If there are no data or if the timestamp is 0, returns null.
function toTimestamp (date) {
if (!date) {
return null
}
const timestamp = +date
// Not NaN.
if (timestamp === timestamp) { // eslint-disable-line no-self-compare
return timestamp
}
const ms = parseDateTime(date)
if (!ms) {
return null
}
return Math.round(ms.getTime() / 1000)
}
// ===================================================================
const TRANSFORMS = {
pool (obj) {
return {
default_SR: link(obj, 'default_SR'),
HA_enabled: Boolean(obj.ha_enabled),
master: link(obj, 'master'),
tags: obj.tags,
name_description: obj.name_description,
name_label: obj.name_label || obj.$master.name_label
// TODO
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
}
},
// -----------------------------------------------------------------
host (obj) {
const {
$metrics: metrics,
other_config: otherConfig
} = obj
const isRunning = isHostRunning(obj)
return {
// Deprecated
CPUs: obj.cpu_info,
address: obj.address,
bios_strings: obj.bios_strings,
build: obj.software_version.build_number,
enabled: Boolean(obj.enabled),
cpus: {
cores: +obj.cpu_info.cpu_count,
sockets: +obj.cpu_info.socket_count
},
current_operations: obj.current_operations,
hostname: obj.hostname,
iSCSI_name: otherConfig.iscsi_iqn || null,
license_params: obj.license_params,
license_server: obj.license_server,
license_expiry: toTimestamp(obj.license_params.expiry),
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
if (metrics) {
const free = +metrics.memory_free
const total = +metrics.memory_total
return {
usage: total - free,
size: total
}
}
return {
usage: 0,
size: 0,
// Deprecated
total: 0
}
})(),
patches: link(obj, 'patches'),
powerOnMode: obj.power_on_mode,
power_state: metrics
? (isRunning ? 'Running' : 'Halted')
: 'Unknown',
startTime: toTimestamp(otherConfig.boot_time),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
tags: obj.tags,
version: obj.software_version.product_version,
// TODO: dedupe.
PIFs: link(obj, 'PIFs'),
$PIFs: link(obj, 'PIFs'),
PCIs: link(obj, 'PCIs'),
$PCIs: link(obj, 'PCIs'),
PGPUs: link(obj, 'PGPUs'),
$PGPUs: link(obj, 'PGPUs'),
$PBDs: link(obj, 'PBDs')
// TODO:
// - controller = vmControllersByContainer.items[host.id]
// - SRs = srsByContainer.items[host.id]
// - tasks = tasksByHost.items[host.id]
// - templates = vmTemplatesByContainer.items[host.id]
// - VMs = vmsByContainer.items[host.id]
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
}
},
// -----------------------------------------------------------------
vm (obj) {
const {
$guest_metrics: guestMetrics,
$metrics: metrics,
other_config: otherConfig
} = obj
const isHvm = isVmHvm(obj)
const isRunning = isVmRunning(obj)
const xenTools = (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})()
const vm = {
// type is redefined after for controllers/, templates &
// snapshots.
type: 'VM',
addresses: guestMetrics && guestMetrics.networks || null,
auto_poweron: Boolean(otherConfig.auto_poweron),
boot: obj.HVM_boot_params,
CPUs: {
max: +obj.VCPUs_max,
number: (
isRunning && metrics && xenTools
? +metrics.VCPUs_number
: +obj.VCPUs_at_startup
)
},
current_operations: obj.current_operations,
docker: (function () {
const monitor = otherConfig['xscontainer-monitor']
if (!monitor) {
return
}
if (monitor === 'False') {
return {
enabled: false
}
}
const {
docker_ps: process,
docker_info: info,
docker_version: version
} = otherConfig
return {
enabled: true,
info: info && parseXml(info).docker_info,
process: process && parseXml(process).docker_ps,
version: version && parseXml(version).docker_version
}
})(),
// TODO: there is two possible value: "best-effort" and "restart"
high_availability: Boolean(obj.ha_restart_priority),
memory: (function () {
const dynamicMin = +obj.memory_dynamic_min
const dynamicMax = +obj.memory_dynamic_max
const staticMin = +obj.memory_static_min
const staticMax = +obj.memory_static_max
const memory = {
dynamic: [ dynamicMin, dynamicMax ],
static: [ staticMin, staticMax ]
}
const gmMemory = guestMetrics && guestMetrics.memory
if (!isRunning) {
memory.size = dynamicMax
} else if (gmMemory && gmMemory.used) {
memory.usage = +gmMemory.used
memory.size = +gmMemory.total
} else if (metrics) {
memory.size = +metrics.memory_actual
} else {
memory.size = dynamicMax
}
return memory
})(),
name_description: obj.name_description,
name_label: obj.name_label,
other: otherConfig,
os_version: guestMetrics && guestMetrics.os_version || null,
power_state: obj.power_state,
snapshots: link(obj, 'snapshots'),
startTime: metrics && toTimestamp(metrics.start_time),
tags: obj.tags,
VIFs: link(obj, 'VIFs'),
virtualizationMode: isHvm ? 'hvm' : 'pv',
// <=> Are the Xen Server tools installed?
//
// - undefined: unknown status
// - false: not optimized
// - 'out of date': optimized but drivers should be updated
// - 'up to date': optimized
xenTools,
$container: (
isRunning
? link(obj, 'resident_on')
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
),
$VBDs: link(obj, 'VBDs'),
// TODO: dedupe
VGPUs: link(obj, 'VGPUs'),
$VGPUs: link(obj, 'VGPUs')
}
if (obj.is_control_domain) {
vm.type += '-controller'
} else if (obj.is_a_snapshot) {
vm.type += '-snapshot'
vm.snapshot_time = toTimestamp(obj.snapshot_time)
vm.$snapshot_of = link(obj, 'snapshot_of')
} else if (obj.is_a_template) {
vm.type += '-template'
vm.CPUs.number = +obj.VCPUs_at_startup
vm.template_info = {
arch: otherConfig['install-arch'],
disks: (function () {
const {disks: xml} = otherConfig
let data
if (!xml || !(data = parseXml(xml)).provision) {
return []
}
const disks = ensureArray(data.provision.disk)
forEach(disks, function normalize (disk) {
disk.bootable = disk.bootable === 'true'
disk.size = +disk.size
disk.SR = extractProperty(disk, 'sr')
})
return disks
})(),
install_methods: (function () {
const methods = otherConfig['install-methods']
return methods ? methods.split(',') : []
})(),
install_repository: otherConfig['install-repository']
}
}
let tmp
if ((tmp = obj.VCPUs_params)) {
tmp.cap && (vm.cpuCap = +tmp.cap)
tmp.weight && (vm.cpuWeight = +tmp.weight)
}
if (!isHvm) {
vm.PV_args = obj.PV_args
}
return vm
},
// -----------------------------------------------------------------
sr (obj) {
return {
type: 'SR',
content_type: obj.content_type,
// TODO: Should it replace usage?
physical_usage: +obj.physical_utilisation,
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.physical_size,
SR_type: obj.type,
tags: obj.tags,
usage: +obj.virtual_allocation,
VDIs: link(obj, 'VDIs'),
$container: (
obj.shared || !obj.$PBDs[0]
? link(obj, 'pool')
: link(obj.$PBDs[0], 'host')
),
$PBDs: link(obj, 'PBDs')
}
},
// -----------------------------------------------------------------
pbd (obj) {
return {
type: 'PBD',
attached: obj.currently_attached,
host: link(obj, 'host'),
SR: link(obj, 'SR')
}
},
// -----------------------------------------------------------------
pif (obj) {
return {
type: 'PIF',
attached: Boolean(obj.currently_attached),
device: obj.device,
dns: obj.DNS,
disallowUnplug: Boolean(obj.disallow_unplug),
gateway: obj.gateway,
ip: obj.IP,
mac: obj.MAC,
management: Boolean(obj.management), // TODO: find a better name.
mode: obj.ip_configuration_mode,
mtu: +obj.MTU,
netmask: obj.netmask,
// A non physical PIF is a "copy" of an existing physical PIF (same device)
// A physical PIF cannot be unplugged
physical: Boolean(obj.physical),
vlan: +obj.VLAN,
$host: link(obj, 'host'),
$network: link(obj, 'network')
}
},
// -----------------------------------------------------------------
vdi (obj) {
if (!obj.managed) {
return
}
const vdi = {
type: 'VDI',
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.virtual_size,
snapshots: link(obj, 'snapshots'),
tags: obj.tags,
usage: +obj.physical_utilisation,
$SR: link(obj, 'SR'),
$VBDs: link(obj, 'VBDs')
}
if (obj.is_a_snapshot) {
vdi.type += '-snapshot'
vdi.snapshot_time = toTimestamp(obj.snapshot_time)
vdi.$snapshot_of = link(obj, 'snapshot_of')
}
return vdi
},
// -----------------------------------------------------------------
vbd (obj) {
return {
type: 'VBD',
attached: Boolean(obj.currently_attached),
bootable: Boolean(obj.bootable),
is_cd_drive: obj.type === 'CD',
position: obj.userdevice,
read_only: obj.mode === 'RO',
VDI: link(obj, 'VDI'),
VM: link(obj, 'VM')
}
},
// -----------------------------------------------------------------
vif (obj) {
return {
type: 'VIF',
allowedIpv4Addresses: obj.ipv4_allowed,
allowedIpv6Addresses: obj.ipv6_allowed,
attached: Boolean(obj.currently_attached),
device: obj.device, // TODO: should it be cast to a number?
MAC: obj.MAC,
MTU: +obj.MTU,
$network: link(obj, 'network'),
$VM: link(obj, 'VM')
}
},
// -----------------------------------------------------------------
network (obj) {
return {
bridge: obj.bridge,
MTU: +obj.MTU,
name_description: obj.name_description,
name_label: obj.name_label,
tags: obj.tags,
PIFs: link(obj, 'PIFs'),
VIFs: link(obj, 'VIFs')
}
},
// -----------------------------------------------------------------
message (obj) {
return {
body: obj.body,
name: obj.name,
time: toTimestamp(obj.timestamp),
$object: obj.obj_uuid // Special link as it is already an UUID.
}
},
// -----------------------------------------------------------------
task (obj) {
return {
created: toTimestamp(obj.created),
current_operations: obj.current_operations,
finished: toTimestamp(obj.finished),
name_description: obj.name_description,
name_label: obj.name_label,
progress: +obj.progress,
result: obj.result,
status: obj.status,
$host: link(obj, 'resident_on')
}
},
// -----------------------------------------------------------------
host_patch (obj) {
return {
applied: Boolean(obj.applied),
time: toTimestamp(obj.timestamp_applied),
pool_patch: link(obj, 'pool_patch', '$ref'),
$host: link(obj, 'host')
}
},
// -----------------------------------------------------------------
pool_patch (obj) {
return {
id: obj.$ref,
applied: Boolean(obj.pool_applied),
description: obj.name_description,
guidance: obj.after_apply_guidance,
name: obj.name_label,
size: +obj.size,
uuid: obj.uuid,
// TODO: what does it mean, should we handle it?
// version: obj.version,
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
$host_patches: link(obj, 'host_patches')
}
},
// -----------------------------------------------------------------
pci (obj) {
return {
type: 'PCI',
class_name: obj.class_name,
device_name: obj.device_name,
pci_id: obj.pci_id,
$host: link(obj, 'host')
}
},
// -----------------------------------------------------------------
pgpu (obj) {
return {
type: 'PGPU',
pci: link(obj, 'PCI'),
// TODO: dedupe.
host: link(obj, 'host'),
$host: link(obj, 'host'),
vgpus: link(obj, 'resident_VGPUs'),
$vgpus: link(obj, 'resident_VGPUs')
}
},
// -----------------------------------------------------------------
vgpu (obj) {
return {
type: 'VGPU',
currentlyAttached: Boolean(obj.currently_attached),
device: obj.device,
resident_on: link(obj, 'resident_on'),
vm: link(obj, 'VM')
}
}
}
// ===================================================================
export default xapiObj => {
const transform = TRANSFORMS[xapiObj.$type.toLowerCase()]
if (!transform) {
return
}
const xoObj = transform(xapiObj)
if (!xoObj) {
return
}
if (!('id' in xoObj)) {
xoObj.id = xapiObj.$id
}
if (!('type' in xoObj)) {
xoObj.type = xapiObj.$type
}
if (
'uuid' in xapiObj &&
!('uuid' in xoObj)
) {
xoObj.uuid = xapiObj.uuid
}
xoObj.$pool = xapiObj.$pool.$id
xoObj.$poolId = xoObj.$pool // TODO: deprecated, remove when no longer used in xo-web
// Internal properties.
defineProperties(xoObj, {
_xapiId: {
value: xapiObj.$id
},
_xapiRef: {
value: xapiObj.$ref
}
})
// Freezes and returns the new object.
return freeze(xoObj)
}

View File

@@ -1,549 +0,0 @@
import isArray from 'lodash.isarray'
import {
ensureArray,
extractProperty,
forEach,
mapToArray,
parseXml
} from './utils'
import {
isHostRunning,
isVmHvm,
isVmRunning,
parseDateTime
} from './xapi'
// ===================================================================
function link (obj, prop, idField = '$id') {
const dynamicValue = obj[`$${prop}`]
if (dynamicValue == null) {
return dynamicValue // Properly handles null and undefined.
}
if (isArray(dynamicValue)) {
return mapToArray(dynamicValue, idField)
}
return dynamicValue[idField]
}
// Parse a string date time to a Unix timestamp (in seconds).
//
// If there are no data or if the timestamp is 0, returns null.
function toTimestamp (date) {
if (!date) {
return null
}
const ms = parseDateTime(date).getTime()
if (!ms) {
return null
}
return Math.round(ms / 1000)
}
// ===================================================================
export function pool (obj) {
return {
default_SR: link(obj, 'default_SR'),
HA_enabled: Boolean(obj.ha_enabled),
master: link(obj, 'master'),
tags: obj.tags,
name_description: obj.name_description,
name_label: obj.name_label || obj.$master.name_label
// TODO
// - ? networks = networksByPool.items[pool.id] (network.$pool.id)
// - hosts = hostsByPool.items[pool.id] (host.$pool.$id)
// - patches = poolPatchesByPool.items[pool.id] (poolPatch.$pool.id)
// - SRs = srsByContainer.items[pool.id] (sr.$container.id)
// - templates = vmTemplatesByContainer.items[pool.id] (vmTemplate.$container.$id)
// - VMs = vmsByContainer.items[pool.id] (vm.$container.id)
// - $running_hosts = runningHostsByPool.items[pool.id] (runningHost.$pool.id)
// - $running_VMs = runningVmsByPool.items[pool.id] (runningHost.$pool.id)
// - $VMs = vmsByPool.items[pool.id] (vm.$pool.id)
}
}
// -------------------------------------------------------------------
export function host (obj) {
const {
$metrics: metrics,
other_config: otherConfig
} = obj
const isRunning = isHostRunning(obj)
return {
address: obj.address,
bios_strings: obj.bios_strings,
build: obj.software_version.build_number,
CPUs: obj.cpu_info,
enabled: Boolean(obj.enabled),
current_operations: obj.current_operations,
hostname: obj.hostname,
iSCSI_name: otherConfig.iscsi_iqn || null,
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
if (metrics) {
const free = +metrics.memory_free
const total = +metrics.memory_total
return {
usage: total - free,
size: total
}
}
return {
usage: 0,
total: 0
}
})(),
patches: link(obj, 'patches'),
powerOnMode: obj.power_on_mode,
power_state: isRunning ? 'Running' : 'Halted',
tags: obj.tags,
version: obj.software_version.product_version,
// TODO: dedupe.
PIFs: link(obj, 'PIFs'),
$PIFs: link(obj, 'PIFs'),
PCIs: link(obj, 'PCIs'),
$PCIs: link(obj, 'PCIs'),
PGPUs: link(obj, 'PGPUs'),
$PGPUs: link(obj, 'PGPUs'),
$PBDs: link(obj, 'PBDs')
// TODO:
// - controller = vmControllersByContainer.items[host.id]
// - SRs = srsByContainer.items[host.id]
// - tasks = tasksByHost.items[host.id]
// - templates = vmTemplatesByContainer.items[host.id]
// - VMs = vmsByContainer.items[host.id]
// - $vCPUs = sum(host.VMs, vm => host.CPUs.number)
}
}
// -------------------------------------------------------------------
export function vm (obj) {
const {
$guest_metrics: guestMetrics,
$metrics: metrics,
other_config: otherConfig
} = obj
const isHvm = isVmHvm(obj)
const isRunning = isVmRunning(obj)
const vm = {
// type is redefined after for controllers/, templates &
// snapshots.
type: 'VM',
addresses: guestMetrics && guestMetrics.networks || null,
auto_poweron: Boolean(otherConfig.auto_poweron),
boot: obj.HVM_boot_params,
CPUs: {
max: +obj.VCPUs_max,
number: (
isRunning && metrics
? +metrics.VCPUs_number
: +obj.VCPUs_at_startup
)
},
current_operations: obj.current_operations,
docker: (function () {
const monitor = otherConfig['xscontainer-monitor']
if (!monitor) {
return
}
if (monitor === 'False') {
return {
enabled: false
}
}
const {
docker_ps: process,
docker_info: info,
docker_version: version
} = otherConfig
return {
enabled: true,
info: info && parseXml(info).docker_info,
process: process && parseXml(process).docker_ps,
version: version && parseXml(version).docker_version
}
})(),
// TODO: there is two possible value: "best-effort" and "restart"
high_availability: Boolean(obj.ha_restart_priority),
memory: (function () {
const dynamicMin = +obj.memory_dynamic_min
const dynamicMax = +obj.memory_dynamic_max
const staticMin = +obj.memory_static_min
const staticMax = +obj.memory_static_max
const memory = {
dynamic: [ dynamicMin, dynamicMax ],
static: [ staticMin, staticMax ]
}
const gmMemory = guestMetrics && guestMetrics.memory
if (!isRunning) {
memory.size = dynamicMax
} else if (gmMemory && gmMemory.used) {
memory.usage = +gmMemory.used
memory.size = +gmMemory.total
} else if (metrics) {
memory.size = +metrics.memory_actual
} else {
memory.size = dynamicMax
}
return memory
})(),
name_description: obj.name_description,
name_label: obj.name_label,
other: otherConfig,
os_version: guestMetrics && guestMetrics.os_version || null,
power_state: obj.power_state,
snapshot_time: toTimestamp(obj.snapshot_time),
snapshots: link(obj, 'snapshots'),
tags: obj.tags,
VIFs: link(obj, 'VIFs'),
virtualizationMode: isHvm ? 'hvm' : 'pv',
// <=> Are the Xen Server tools installed?
//
// - undefined: unknown status
// - false: not optimized
// - 'out of date': optimized but drivers should be updated
// - 'up to date': optimized
xenTools: (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})(),
$container: (
isRunning
? link(obj, 'resident_on')
: link(obj, 'pool') // TODO: handle local VMs (`VM.get_possible_hosts()`).
),
$VBDs: link(obj, 'VBDs'),
// TODO: dedupe
VGPUs: link(obj, 'VGPUs'),
$VGPUs: link(obj, 'VGPUs')
}
if (obj.is_control_domain) {
vm.type += '-controller'
} else if (obj.is_a_snapshot) {
vm.type += '-snapshot'
vm.$snapshot_of = link(obj, 'snapshot_of')
} else if (obj.is_a_template) {
vm.type += '-template'
vm.CPUs.number = +obj.VCPUs_at_startup
vm.template_info = {
arch: otherConfig['install-arch'],
disks: (function () {
const {disks: xml} = otherConfig
let data
if (!xml || !(data = parseXml(xml)).provision) {
return []
}
const disks = ensureArray(data.provision.disk)
forEach(disks, function normalize (disk) {
disk.bootable = disk.bootable === 'true'
disk.size = +disk.size
disk.SR = extractProperty(disk, 'sr')
})
return disks
})(),
install_methods: (function () {
const {['install-methods']: methods} = otherConfig
return methods ? methods.split(',') : []
})()
}
}
if (!isHvm) {
vm.PV_args = obj.PV_args
}
return vm
}
// -------------------------------------------------------------------
export function sr (obj) {
return {
type: 'SR',
content_type: obj.content_type,
name_description: obj.name_description,
name_label: obj.name_label,
physical_usage: +obj.physical_utilisation,
size: +obj.physical_size,
SR_type: obj.type,
tags: obj.tags,
usage: +obj.virtual_allocation,
VDIs: link(obj, 'VDIs'),
$container: (
obj.shared
? link(obj, 'pool')
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
),
$PBDs: link(obj, 'PBDs')
}
}
// -------------------------------------------------------------------
export function pbd (obj) {
return {
type: 'PBD',
attached: obj.currently_attached,
host: link(obj, 'host'),
SR: link(obj, 'SR')
}
}
// -------------------------------------------------------------------
export function pif (obj) {
return {
type: 'PIF',
attached: Boolean(obj.currently_attached),
device: obj.device,
IP: obj.IP,
MAC: obj.MAC,
management: Boolean(obj.management), // TODO: find a better name.
mode: obj.ip_configuration_mode,
MTU: +obj.MTU,
netmask: obj.netmask,
vlan: +obj.VLAN,
// TODO: What is it?
//
// Could it mean “is this a physical interface?”.
// How could a PIF not be physical?
// physical: obj.physical,
$host: link(obj, 'host'),
$network: link(obj, 'network')
}
}
// -------------------------------------------------------------------
// TODO: should we have a VDI-snapshot type like we have with VMs?
export function vdi (obj) {
if (!obj.managed) {
return
}
return {
type: 'VDI',
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.virtual_size,
snapshots: link(obj, 'snapshots'),
snapshot_time: toTimestamp(obj.snapshot_time),
tags: obj.tags,
usage: +obj.physical_utilisation,
$snapshot_of: link(obj, 'snapshot_of'),
$SR: link(obj, 'SR'),
$VBDs: link(obj, 'VBDs')
}
}
// -------------------------------------------------------------------
export function vbd (obj) {
return {
type: 'VBD',
attached: Boolean(obj.currently_attached),
bootable: Boolean(obj.bootable),
is_cd_drive: obj.type === 'CD',
position: obj.userdevice,
read_only: obj.mode === 'RO',
VDI: link(obj, 'VDI'),
VM: link(obj, 'VM')
}
}
// -------------------------------------------------------------------
export function vif (obj) {
return {
type: 'VIF',
attached: Boolean(obj.currently_attached),
device: obj.device, // TODO: should it be cast to a number?
MAC: obj.MAC,
MTU: +obj.MTU,
$network: link(obj, 'network'),
$VM: link(obj, 'VM')
}
}
// -------------------------------------------------------------------
export function network (obj) {
return {
bridge: obj.bridge,
MTU: +obj.MTU,
name_description: obj.name_description,
name_label: obj.name_label,
tags: obj.tags,
PIFs: link(obj, 'PIFs'),
VIFs: link(obj, 'VIFs')
}
}
// -------------------------------------------------------------------
export function message (obj) {
return {
body: obj.body,
name: obj.name,
time: toTimestamp(obj.timestamp),
$object: obj.obj_uuid // Special link as it is already an UUID.
}
}
// -------------------------------------------------------------------
export function task (obj) {
return {
created: toTimestamp(obj.created),
current_operations: obj.current_operations,
finished: toTimestamp(obj.finished),
name_description: obj.name_description,
name_label: obj.name_label,
progress: +obj.progress,
result: obj.result,
status: obj.status,
$host: link(obj, 'resident_on')
}
}
// -------------------------------------------------------------------
export function host_patch (obj) {
return {
applied: Boolean(obj.applied),
time: toTimestamp(obj.timestamp_applied),
pool_patch: link(obj, 'pool_patch', '$ref'),
$host: link(obj, 'host')
}
}
// -------------------------------------------------------------------
export function pool_patch (obj) {
return {
id: obj.$ref,
applied: Boolean(obj.pool_applied),
description: obj.name_description,
guidance: obj.after_apply_guidance,
name: obj.name_label,
size: +obj.size,
uuid: obj.uuid,
// TODO: what does it mean, should we handle it?
// version: obj.version,
// TODO: host.[$]pool_patches ←→ pool.[$]host_patches
$host_patches: link(obj, 'host_patches')
}
}
// -------------------------------------------------------------------
export function pci (obj) {
return {
type: 'PCI',
class_name: obj.class_name,
device_name: obj.device_name,
pci_id: obj.pci_id,
$host: link(obj, 'host')
}
}
// -------------------------------------------------------------------
export function pgpu (obj) {
return {
type: 'PGPU',
pci: link(obj, 'PCI'),
// TODO: dedupe.
host: link(obj, 'host'),
$host: link(obj, 'host'),
vgpus: link(obj, 'resident_VGPUs'),
$vgpus: link(obj, 'resident_VGPUs')
}
}
// -------------------------------------------------------------------
export function vgpu (obj) {
return {
type: 'VGPU',
currentlyAttached: Boolean(obj.currently_attached),
device: obj.device,
resident_on: link(obj, 'resident_on'),
vm: link(obj, 'VM')
}
}

View File

@@ -1,8 +1,8 @@
import endsWith from 'lodash.endswith'
import got from 'got'
import endsWith from 'lodash/endsWith'
import JSON5 from 'json5'
import { BaseError } from 'make-error'
import httpRequest from './http-request'
import { parseDateTime } from './xapi'
const RRD_STEP_SECONDS = 5
@@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
}
}
export class FaultyGranularity extends XapiStatsError {
constructor (msg) {
super(msg)
}
}
export class FaultyGranularity extends XapiStatsError {}
// -------------------------------------------------------------------
// Utils
@@ -289,9 +285,10 @@ export default class XapiStats {
// Load
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
// Memory
const memory = values[hostLegends.memory]
const memoryFree = values[hostLegends.memoryFree]
// Memory.
// WARNING! memory/memoryFree are in kB.
const memory = values[hostLegends.memory] * 1024
const memoryFree = values[hostLegends.memoryFree] * 1024
hostStats.memory.push(memory)
@@ -391,8 +388,8 @@ export default class XapiStats {
// Execute one http request on a XenServer for get stats
// Return stats (Json format) or throws got exception
async _getJson (url) {
const response = await got(url, { rejectUnauthorized: false })
return JSON5.parse(response.body)
const body = await httpRequest(url, { rejectUnauthorized: false }).readAll()
return JSON5.parse(body)
}
async _getLastTimestamp (xapi, host, step) {
@@ -405,19 +402,24 @@ export default class XapiStats {
}
_getPoints (hostname, step, vmId) {
const hostStats = this._hosts[hostname][step]
// Return host points
if (vmId === undefined) {
return this._hosts[hostname][step]
return {
interval: step,
...hostStats
}
}
const vmsStats = this._vms[hostname][step]
// Return vm points
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
if (this._vms[hostname][step] !== undefined) {
points.stats = this._vms[hostname][step][vmId]
return {
interval: step,
endTimestamp: hostStats.endTimestamp,
stats: (vmsStats && vmsStats[vmId]) || getNewVmStats()
}
return points
}
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
@@ -528,6 +530,11 @@ export default class XapiStats {
async getVmPoints (xapi, vmId, granularity) {
const vm = xapi.getObject(vmId)
const host = vm.$resident_on
if (!host) {
throw new Error(`VM ${vmId} is halted or host could not be found.`)
}
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
}
}

File diff suppressed because it is too large Load Diff

2169
src/xapi/index.js Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,10 @@
import {
makeEditObject
} from '../utils'
export default {
editVif: makeEditObject({
ipv4Allowed: true,
ipv6Allowed: true
})
}

View File

@@ -0,0 +1,53 @@
import {
mapToArray
} from '../../utils'
export default {
_connectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
)
},
async connectAllSrPbds (id) {
await this._connectAllSrPbds(this.getObject(id))
},
_disconnectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
)
},
async disconnectAllSrPbds (id) {
await this._disconnectAllSrPbds(this.getObject(id))
},
async destroySr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.destroy', sr.$ref)
},
async forgetSr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.forget', sr.$ref)
},
_plugPbd (pbd) {
return this.call('PBD.plug', pbd.$ref)
},
async plugPbd (id) {
await this._plugPbd(this.getObject(id))
},
_unplugPbd (pbd) {
return this.call('PBD.unplug', pbd.$ref)
},
async unplugPbd (id) {
await this._unplugPbd(this.getObject(id))
}
}

316
src/xapi/mixins/vm.js Normal file
View File

@@ -0,0 +1,316 @@
import find from 'lodash/find'
import gte from 'lodash/gte'
import lte from 'lodash/lte'
import {
forEach,
mapToArray,
noop,
parseSize,
pCatch
} from '../../utils'
import {
isVmHvm,
isVmRunning,
makeEditObject
} from '../utils'
export default {
// TODO: clean up on error.
async createVm (templateId, {
name_label, // deprecated
nameLabel = name_label, // eslint-disable-line camelcase
bootAfterCreate = false,
clone = true,
installRepository = undefined,
vdis = undefined,
vifs = undefined,
existingVdis = undefined,
coreOs = false,
cloudConfig = undefined,
...props
} = {}) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
}
try {
installRepository = this.getObject(installRepository)
return 'cd'
} catch (_) {
return 'network'
}
})()
const template = this.getObject(templateId)
// Clones the template.
let vm = await this._getOrWaitObject(
await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
)
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vm.$ref, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vm.$ref)
// Set VMs params.
// TODO: checkLimits
this._editVm(vm, props)
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!vdis.length || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
order = 'n' + order.replace('n', '')
} else {
order = 'ncd'
}
this._setObjectProperties(vm, {
HVM_boot_params: { ...bootParams, order }
})
}
} else { // PV
if (vm.PV_bootloader === 'eliloader') {
if (installMethod === 'network') {
// TODO: normalize RHEL URL?
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': installRepository
})
} else if (installMethod === 'cd') {
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': 'cdrom'
})
}
}
}
}
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
// bootable and the first disk bootable.
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
}
// Modify existing (previous template) disks if necessary
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
const vbd = find(vm.$VBDs, { userdevice })
if (!vbd) {
return
}
const vdi = vbd.$VDI
await this._setObjectProperties(vdi, properties)
// if the disk is bigger
if (
size != null &&
size > vdi.virtual_size
) {
await this.resizeVdi(vdi.$id, size)
}
// if another SR is set, move it there
if (srId) {
await this.moveVdi(vdi.$id, srId)
}
}))
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
if (vdis) {
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
name_label: vdiDescription.name_label,
name_description: vdiDescription.name_description,
sr: vdiDescription.sr || vdiDescription.SR
}
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Only the first VBD if installMethod is not cd is bootable.
bootable: installMethod !== 'cd' && !i,
userdevice: devices[i]
}))
}))
}
// Destroys the VIFs cloned from the template.
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
if (vifs) {
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
await Promise.all(mapToArray(vifs, (vif, index) => this._createVif(
vm,
this.getObject(vif.network),
{
device: devices[index],
mac: vif.mac,
mtu: vif.mtu
}
)))
}
// TODO: Assign VGPUs.
if (cloudConfig != null) {
// Refresh the record.
vm = this.getObject(vm.$id)
// Find the SR of the first VDI.
let srRef
forEach(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
if (vdi) {
srRef = vdi.SR
return false
}
})
const method = coreOs
? 'createCoreOsCloudInitConfigDrive'
: 'createCloudInitConfigDrive'
await this[method](vm.$id, srRef, cloudConfig)
}
if (bootAfterCreate) {
this._startVm(vm)::pCatch(noop)
}
return this._waitObject(vm.$id)
},
// High level method to edit a VM.
//
// Params do not correspond directly to XAPI props.
_editVm: makeEditObject({
autoPoweron: {
set (value, vm) {
return Promise.all([
this._updateObjectMapProperty(vm, 'other_config', {
autoPoweron: value ? 'true' : null
}),
value && this.setPoolProperties({
autoPoweron: true
})
])
}
},
CPUs: 'cpus',
cpus: {
addToLimits: true,
// Current value may have constraints with other values.
//
// If the other value is not set and the constraint is not
// respected, the other value is changed first.
constraints: {
cpusStaticMax: gte
},
get: vm => +vm.VCPUs_at_startup,
set: [
'VCPUs_at_startup',
function (value, vm) {
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
}
]
},
cpuCap: {
addToLimits: true,
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
set (cap, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
}
},
cpusMax: 'cpusStaticMax',
cpusStaticMax: {
constraints: {
cpus: lte
},
get: vm => +vm.VCPUs_max,
set: 'VCPUs_max'
},
cpuWeight: {
addToLimits: true,
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
set (weight, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
}
},
highAvailability: {
set (ha, vm) {
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
}
},
memoryMin: {
constraints: {
memoryMax: gte
},
get: vm => +vm.memory_dynamic_min,
preprocess: parseSize,
set: 'memory_dynamic_min'
},
memory: 'memoryMax',
memoryMax: {
addToLimits: true,
constraints: {
memoryMin: lte,
memoryStaticMax: gte
},
get: vm => +vm.memory_dynamic_max,
preprocess: parseSize,
set: 'memory_dynamic_max'
},
memoryStaticMax: {
constraints: {
memoryMax: lte
},
get: vm => +vm.memory_static_max,
preprocess: parseSize,
set: 'memory_static_max'
},
nameDescription: true,
nameLabel: true,
PV_args: true
}),
async editVm (id, props) {
return /* await */ this._editVm(this.getObject(id), props)
}
}

332
src/xapi/utils.js Normal file
View File

@@ -0,0 +1,332 @@
// import isFinite from 'lodash/isFinite'
import camelCase from 'lodash/camelCase'
import isEqual from 'lodash/isEqual'
import pickBy from 'lodash/pickBy'
import { utcFormat, utcParse } from 'd3-time-format'
import {
camelToSnakeCase,
createRawObject,
forEach,
isArray,
isBoolean,
isFunction,
isInteger,
isObject,
isString,
map,
mapToArray,
noop
} from '../utils'
// ===================================================================
export const asBoolean = value => Boolean(value)
// const asFloat = value => {
// value = String(value)
// return value.indexOf('.') === -1
// ? `${value}.0`
// : value
// }
export const asInteger = value => String(value)
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
export const optional = (value, fn) => value == null
? undefined
: fn ? fn(value) : value
export const prepareXapiParam = param => {
// if (isFinite(param) && !isInteger(param)) {
// return asFloat(param)
// }
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
return asBoolean(param)
}
if (isObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
return param
}
// -------------------------------------------------------------------
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
export const extractOpaqueRef = str => {
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
// -------------------------------------------------------------------
const TYPE_TO_NAMESPACE = createRawObject()
forEach([
'Bond',
'DR_task',
'GPU_group',
'PBD',
'PCI',
'PGPU',
'PIF',
'PIF_metrics',
'SM',
'SR',
'VBD',
'VBD_metrics',
'VDI',
'VGPU',
'VGPU_type',
'VLAN',
'VM',
'VM_appliance',
'VM_guest_metrics',
'VM_metrics',
'VMPP',
'VTPM'
], namespace => {
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
})
// Object types given by `xen-api` are always lowercase but the
// namespaces in the Xen API can have a different casing.
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
// -------------------------------------------------------------------
// Format a date (pseudo ISO 8601) from one XenServer get by
// xapi.call('host.get_servertime', host.$ref) for example
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
// -------------------------------------------------------------------
export const isHostRunning = host => {
const { $metrics } = host
return $metrics && $metrics.live
}
// -------------------------------------------------------------------
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
const VM_RUNNING_POWER_STATES = {
Running: true,
Paused: true
}
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
// -------------------------------------------------------------------
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
const _mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
export const makeEditObject = specs => {
const normalizeGet = (get, name) => {
if (get === true) {
const prop = camelToSnakeCase(name)
return object => object[prop]
}
if (isString(get)) {
return object => object[get]
}
return get
}
const normalizeSet = (set, name) => {
if (isFunction(set)) {
return set
}
if (set === true) {
const prop = camelToSnakeCase(name)
return function (value) {
return this._set(prop, value)
}
}
if (isString(set)) {
const index = set.indexOf('.')
if (index === -1) {
return function (value) {
return this._set(set, value)
}
}
const map = set.slice(0, index)
const prop = set.slice(index + 1)
return function (value, object) {
return this._updateObjectMapProperty(object, map, { [prop]: value })
}
}
if (!isArray(set)) {
throw new Error('must be an array, a function or a string')
}
set = mapToArray(set, normalizeSet)
const { length } = set
if (!length) {
throw new Error('invalid setter')
}
if (length === 1) {
return set[0]
}
return function (value, object) {
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
}
}
const normalizeSpec = (spec, name) => {
if (spec === true) {
spec = {
get: true,
set: true
}
}
if (spec.addToLimits === true) {
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
throw new Error('constraint must be a function')
}
const constraintSpec = specs[constraintName]
if (!constraintSpec.get) {
throw new Error('constraint values must have a get')
}
})
const { get } = spec
if (get) {
spec.get = normalizeGet(get, name)
} else if (spec.addToLimits) {
throw new Error('addToLimits cannot be defined without get')
}
spec.set = normalizeSet(spec.set, name)
return spec
}
forEach(specs, (spec, name) => {
isString(spec) || (specs[name] = normalizeSpec(spec, name))
})
// Resolves aliases and add camelCase and snake_case aliases.
forEach(specs, (spec, name) => {
if (isString(spec)) {
do {
spec = specs[spec]
} while (isString(spec))
specs[name] = spec
}
let tmp
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
})
return async function _editObject_ (id, values, checkLimits) {
const limits = checkLimits && {}
const object = this.getObject(id)
const _objectRef = object.$ref
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
// Context used to execute functions.
const context = {
__proto__: this,
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
}
const set = (value, name) => {
if (value === undefined) {
return
}
const spec = specs[name]
if (!spec) {
return
}
const { preprocess } = spec
if (preprocess) {
value = preprocess(value)
}
const { get } = spec
if (get) {
const current = get(object)
if (isEqual(value, current)) {
return
}
let addToLimits
if (limits && (addToLimits = spec.addToLimits)) {
limits[name] = addToLimits(value, current)
}
}
const cb = () => spec.set.call(context, value, object)
const { constraints } = spec
if (constraints) {
const cbs = []
forEach(constraints, (constraint, constraintName) => {
// This constraint value is already defined: bypass the constraint.
if (values[constraintName] != null) {
return
}
if (!constraint(specs[constraintName].get(object), value)) {
const cb = set(value, constraintName)
cbs.push(cb)
}
})
if (cbs.length) {
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
}
}
return cb
}
const cbs = _mapFilter(values, set)
if (checkLimits) {
await checkLimits(limits, object)
}
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
}
}

167
src/xo-mixins/acls.js Normal file
View File

@@ -0,0 +1,167 @@
import checkAuthorization from 'xo-acl-resolver'
import {
ModelAlreadyExists
} from '../collection'
import {
Acls
} from '../models/acl'
import {
createRawObject,
forEach,
includes,
mapToArray
} from '../utils'
// ===================================================================
export default class {
constructor (xo) {
this._xo = xo
this._acls = new Acls({
connection: xo._redis,
prefix: 'xo:acl',
indexes: ['subject', 'object']
})
}
async _getAclsForUser (userId) {
const user = await this._xo.getUser(userId)
const { groups } = user
const subjects = groups
? groups.concat(userId)
: [ userId ]
const acls = []
const pushAcls = (push => entries => {
push.apply(acls, entries)
})(acls.push)
const collection = this._acls
await Promise.all(mapToArray(
subjects,
subject => collection.get({subject}).then(pushAcls)
))
return acls
}
async addAcl (subjectId, objectId, action) {
try {
await this._acls.create(subjectId, objectId, action)
} catch (error) {
if (!(error instanceof ModelAlreadyExists)) {
throw error
}
}
}
async removeAcl (subjectId, objectId, action) {
await this._acls.delete(subjectId, objectId, action)
}
// TODO: remove when new collection.
async getAllAcls () {
return this._acls.get()
}
async getPermissionsForUser (userId) {
const [
acls,
permissionsByRole
] = await Promise.all([
this._getAclsForUser(userId),
this._getPermissionsByRole()
])
const permissions = createRawObject()
for (const { action, object: objectId } of acls) {
const current = (
permissions[objectId] ||
(permissions[objectId] = createRawObject())
)
const permissionsForRole = permissionsByRole[action]
if (permissionsForRole) {
for (const permission of permissionsForRole) {
current[permission] = 1
}
} else {
current[action] = 1
}
}
return permissions
}
async hasPermissions (userId, permissions) {
const user = await this._xo.getUser(userId)
// Special case for super XO administrators.
if (user.permission === 'admin') {
return true
}
return checkAuthorization(
await this.getPermissionsForUser(userId),
id => this._xo.getObject(id),
permissions
)
}
// -----------------------------------------------------------------
async _getPermissionsByRole () {
const roles = await this.getRoles()
const permissions = createRawObject()
for (const role of roles) {
permissions[role.id] = role.permissions
}
return permissions
}
// TODO: delete when merged with the new collection.
async getRoles () {
return [
{
id: 'viewer',
name: 'Viewer',
permissions: [
'view'
]
},
{
id: 'operator',
name: 'Operator',
permissions: [
'view',
'operate'
]
},
{
id: 'admin',
name: 'Admin',
permissions: [
'view',
'operate',
'administrate'
]
}
]
}
// Returns an array of roles which have a given permission.
async getRolesForPermission (permission) {
const roles = []
forEach(await this.getRoles(), role => {
if (includes(role.permissions, permission)) {
roles.push(role.id)
}
})
return roles
}
}

View File

@@ -0,0 +1,181 @@
import Token, { Tokens } from '../models/token'
import {
NoSuchObject
} from '../api-errors'
import {
createRawObject,
generateToken,
pCatch,
noop
} from '../utils'
// ===================================================================
class NoSuchAuthenticationToken extends NoSuchObject {
constructor (id) {
super(id, 'authentication token')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._xo = xo
// Store last failures by user to throttle tries (slow bruteforce
// attacks).
this._failures = createRawObject()
this._providers = new Set()
// Creates persistent collections.
this._tokens = new Tokens({
connection: xo._redis,
prefix: 'xo:token',
indexes: ['user_id']
})
// Password authentication provider.
this.registerAuthenticationProvider(async ({
username,
password
}) => {
if (username === undefined || password === undefined) {
return
}
const user = await xo.getUserByName(username, true)
if (user && await xo.checkUserPassword(user.id, password)) {
return user.id
}
})
// Token authentication provider.
this.registerAuthenticationProvider(async ({
token: tokenId
}) => {
if (!tokenId) {
return
}
try {
return (await xo.getAuthenticationToken(tokenId)).user_id
} catch (e) {
return
}
})
}
registerAuthenticationProvider (provider) {
return this._providers.add(provider)
}
unregisterAuthenticationProvider (provider) {
return this._providers.delete(provider)
}
async _authenticateUser (credentials) {
for (const provider of this._providers) {
try {
// A provider can return:
// - `null` if the user could not be authenticated
// - the identifier of the authenticated user
// - an object with a property `username` containing the name
// of the authenticated user
const result = await provider(credentials)
// No match.
if (!result) {
continue
}
return result.username
? await this._xo.registerUser(undefined, result.username)
: await this._xo.getUser(result)
} catch (error) {
// DEPRECATED: Authentication providers may just throw `null`
// to indicate they could not authenticate the user without
// any special errors.
if (error) console.error(error.stack || error)
}
}
return false
}
async authenticateUser (credentials) {
// TODO: remove when email has been replaced by username.
if (credentials.email) {
credentials.username = credentials.email
} else if (credentials.username) {
credentials.email = credentials.username
}
const failures = this._failures
const { username } = credentials
const now = Date.now()
let lastFailure
if (
username &&
(lastFailure = failures[username]) &&
(lastFailure + 2e3) > now
) {
throw new Error('too fast authentication tries')
}
const user = await this._authenticateUser(credentials)
if (user) {
delete failures[username]
} else {
failures[username] = now
}
return user
}
// -----------------------------------------------------------------
async createAuthenticationToken ({userId}) {
const token = new Token({
id: await generateToken(),
user_id: userId,
expiration: Date.now() + 1e3 * 60 * 60 * 24 * 30 // 1 month validity.
})
await this._tokens.add(token)
// TODO: use plain properties directly.
return token.properties
}
async deleteAuthenticationToken (id) {
if (!await this._tokens.remove(id)) {
throw new NoSuchAuthenticationToken(id)
}
}
async getAuthenticationToken (id) {
let token = await this._tokens.first(id)
if (!token) {
throw new NoSuchAuthenticationToken(id)
}
token = token.properties
if (!(
token.expiration > Date.now()
)) {
this._tokens.remove(id)::pCatch(noop)
throw new NoSuchAuthenticationToken(id)
}
return token
}
async getAuthenticationTokensForUser (userId) {
return this._tokens.get({ user_id: userId })
}
}

716
src/xo-mixins/backups.js Normal file
View File

@@ -0,0 +1,716 @@
import endsWith from 'lodash/endsWith'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash/filter'
import find from 'lodash/find'
import findIndex from 'lodash/findIndex'
import sortBy from 'lodash/sortBy'
import startsWith from 'lodash/startsWith'
import {
basename,
dirname
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import vhdMerge from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import {
deferrable
} from '../decorators'
import {
forEach,
mapToArray,
noop,
pCatch,
pSettle,
safeDateFormat
} from '../utils'
import {
VDI_FORMAT_VHD
} from '../xapi'
// ===================================================================
const DELTA_BACKUP_EXT = '.json'
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
// Test if a file is a vdi backup. (full or delta)
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
// Test if a file is a delta/full vdi backup.
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
// Get the timestamp of a vdi backup. (full or delta)
const getVdiTimestamp = name => {
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
return arr[1]
}
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
async function checkFileIntegrity (handler, name) {
let stream
try {
stream = await handler.createReadStream(name, { checksum: true })
} catch (error) {
if (error.code === 'ENOENT') {
return
}
throw error
}
stream.resume()
await eventToPromise(stream, 'finish')
}
// ===================================================================
export default class {
constructor (xo) {
this._xo = xo
}
async listRemoteBackups (remoteId) {
const handler = await this._xo.getRemoteHandler(remoteId)
// List backups. (No delta)
const backupFilter = file => endsWith(file, '.xva')
const files = await handler.list()
const backups = filter(files, backupFilter)
// List delta backups.
const deltaDirs = filter(files, file => startsWith(file, 'vm_delta_'))
for (const deltaDir of deltaDirs) {
const files = await handler.list(deltaDir)
const deltaBackups = filter(files, isDeltaBackup)
backups.push(...mapToArray(
deltaBackups,
deltaBackup => {
return `${deltaDir}/${getDeltaBackupNameWithoutExt(deltaBackup)}`
}
))
}
return backups
}
async importVmBackup (remoteId, file, sr) {
const handler = await this._xo.getRemoteHandler(remoteId)
const stream = await handler.createReadStream(file)
const xapi = this._xo.getXapi(sr)
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
return xapiObjectToXo(vm).id
}
// -----------------------------------------------------------------
@deferrable.onFailure
async deltaCopyVm ($onFailure, srcVm, targetSr) {
const srcXapi = this._xo.getXapi(srcVm)
const targetXapi = this._xo.getXapi(targetSr)
// Get Xen objects from XO objects.
srcVm = srcXapi.getObject(srcVm._xapiId)
targetSr = targetXapi.getObject(targetSr._xapiId)
// 1. Find the local base for this SR (if any).
const TAG_LAST_BASE_DELTA = `xo:base_delta:${targetSr.uuid}`
const localBaseUuid = (id => {
if (id != null) {
const base = srcXapi.getObject(id, null)
return base && base.uuid
}
})(srcVm.other_config[TAG_LAST_BASE_DELTA])
// 2. Copy.
const dstVm = await (async () => {
const delta = await srcXapi.exportDeltaVm(srcVm.$id, localBaseUuid, {
snapshotNameLabel: `XO_DELTA_EXPORT: ${targetSr.name_label} (${targetSr.uuid})`
})
$onFailure(async () => {
await Promise.all(mapToArray(
delta.streams,
stream => stream.cancel()
))
return srcXapi.deleteVm(delta.vm.uuid, true)
})
const promise = targetXapi.importDeltaVm(
delta,
{
deleteBase: true, // Remove the remote base.
srId: targetSr.$id
}
)
// Once done, (asynchronously) remove the (now obsolete) local
// base.
if (localBaseUuid) {
promise.then(() => srcXapi.deleteVm(localBaseUuid, true))::pCatch(noop)
}
// (Asynchronously) Identify snapshot as future base.
promise.then(() => {
return srcXapi._updateObjectMapProperty(srcVm, 'other_config', {
[TAG_LAST_BASE_DELTA]: delta.vm.uuid
})
})::pCatch(noop)
return promise
})()
// 5. Return the identifier of the new XO VM object.
return xapiObjectToXo(dstVm).id
}
// -----------------------------------------------------------------
// TODO: The other backup methods must use this function !
// Prerequisite: The backups array must be ordered. (old to new backups)
async _removeOldBackups (backups, handler, dir, n) {
if (n <= 0) {
return
}
const getPath = (file, dir) => dir ? `${dir}/${file}` : file
await Promise.all(
mapToArray(backups.slice(0, n), async backup => /* await */ handler.unlink(getPath(backup, dir)))
)
}
// -----------------------------------------------------------------
async _legacyImportDeltaVdiBackup (xapi, { vmId, handler, dir, vdiInfo }) {
const vdi = await xapi.createVdi(vdiInfo.virtual_size, vdiInfo)
const vdiId = vdi.$id
// dir = vm_delta_xxx
// xoPath = vdi_xxx/timestamp_(full|delta).vhd
// vdiDir = vdi_xxx
const { xoPath } = vdiInfo
const filePath = `${dir}/${xoPath}`
const vdiDir = dirname(xoPath)
const backups = await this._listDeltaVdiDependencies(handler, filePath)
for (const backup of backups) {
const stream = await handler.createReadStream(`${dir}/${vdiDir}/${backup}`)
await xapi.importVdiContent(vdiId, stream, {
format: VDI_FORMAT_VHD
})
}
return vdiId
}
async _legacyImportDeltaVmBackup (xapi, { remoteId, handler, filePath, info, sr }) {
// Import vm metadata.
const vm = await (async () => {
const stream = await handler.createReadStream(`${filePath}.xva`)
return /* await */ xapi.importVm(stream, { onlyMetadata: true })
})()
const vmName = vm.name_label
const dir = dirname(filePath)
// Disable start and change the VM name label during import.
await Promise.all([
xapi.addForbiddenOperationToVm(vm.$id, 'start', 'Delta backup import...'),
xapi._setObjectProperties(vm, { name_label: `[Importing...] ${vmName}` })
])
// Destroy vbds if necessary. Why ?
// Because XenServer creates Vbds linked to the vdis of the backup vm if it exists.
await xapi.destroyVbdsFromVm(vm.uuid)
// Import VDIs.
const vdiIds = {}
await Promise.all(
mapToArray(
info.vdis,
async vdiInfo => {
vdiInfo.sr = sr._xapiId
const vdiId = await this._legacyImportDeltaVdiBackup(xapi, { vmId: vm.$id, handler, dir, vdiInfo })
vdiIds[vdiInfo.uuid] = vdiId
}
)
)
await Promise.all(
mapToArray(
info.vbds,
vbdInfo => {
xapi.attachVdiToVm(vdiIds[vbdInfo.xoVdi], vm.$id, vbdInfo)
}
)
)
// Import done, reenable start and set real vm name.
await Promise.all([
xapi.removeForbiddenOperationFromVm(vm.$id, 'start'),
xapi._setObjectProperties(vm, { name_label: vmName })
])
return vm
}
// -----------------------------------------------------------------
async _listVdiBackups (handler, dir) {
let files
try {
files = await handler.list(dir)
} catch (error) {
if (error.code === 'ENOENT') {
files = []
} else {
throw error
}
}
const backups = sortBy(filter(files, fileName => isVdiBackup(fileName)))
let i
// Avoid unstable state: No full vdi found to the beginning of array. (base)
for (i = 0; i < backups.length && isDeltaVdiBackup(backups[i]); i++);
await this._removeOldBackups(backups, handler, dir, i)
return backups.slice(i)
}
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
const backups = await this._listVdiBackups(handler, dir)
let i = backups.length - depth
// No merge.
if (i <= 0) {
return
}
const timestamp = getVdiTimestamp(backups[i])
const newFullBackup = `${dir}/${timestamp}_full.vhd`
await checkFileIntegrity(handler, `${dir}/${backups[i]}`)
let j = i
for (; j > 0 && isDeltaVdiBackup(backups[j]); j--);
const fullBackupId = j
// Remove old backups before the most recent full.
if (j > 0) {
for (j--; j >= 0; j--) {
await handler.unlink(`${dir}/${backups[j]}`, { checksum: true })
}
}
const parent = `${dir}/${backups[fullBackupId]}`
for (j = fullBackupId + 1; j <= i; j++) {
const backup = `${dir}/${backups[j]}`
try {
await checkFileIntegrity(handler, backup)
await vhdMerge(handler, parent, handler, backup)
} catch (e) {
console.error('Unable to use vhd-util.', e)
throw e
}
await handler.unlink(backup, { checksum: true })
}
// Rename the first old full backup to the new full backup.
await handler.rename(parent, newFullBackup)
}
async _listDeltaVdiDependencies (handler, filePath) {
const dir = dirname(filePath)
const filename = basename(filePath)
const backups = await this._listVdiBackups(handler, dir)
// Search file. (delta or full backup)
const i = findIndex(backups, backup =>
getVdiTimestamp(backup) === getVdiTimestamp(filename)
)
if (i === -1) {
throw new Error('VDI to import not found in this remote.')
}
// Search full backup.
let j
for (j = i; j >= 0 && isDeltaVdiBackup(backups[j]); j--);
if (j === -1) {
throw new Error(`Unable to found full vdi backup of: ${filePath}`)
}
return backups.slice(j, i + 1)
}
// -----------------------------------------------------------------
async _listDeltaVmBackups (handler, dir) {
const files = await handler.list(dir)
return sortBy(filter(files, isDeltaBackup))
}
async _saveDeltaVdiBackup (xapi, { vdiParent, isFull, handler, stream, dir, depth }) {
const backupDirectory = `vdi_${vdiParent.uuid}`
dir = `${dir}/${backupDirectory}`
const date = safeDateFormat(new Date())
// For old versions: remove old bases if exists.
const bases = sortBy(
filter(vdiParent.$snapshots, { name_label: 'XO_DELTA_BASE_VDI_SNAPSHOT' }),
base => base.snapshot_time
)
forEach(bases, base => { xapi.deleteVdi(base.$id)::pCatch(noop) })
// Export full or delta backup.
const vdiFilename = `${date}_${isFull ? 'full' : 'delta'}.vhd`
const backupFullPath = `${dir}/${vdiFilename}`
try {
const targetStream = await handler.createOutputStream(backupFullPath, {
// FIXME: Checksum is not computed for full vdi backups.
// The problem is in the merge case, a delta merged in a full vdi
// backup forces us to browse the resulting file =>
// Significant transfer time on the network !
checksum: !isFull
})
stream.on('error', error => targetStream.emit('error', error))
await Promise.all([
eventToPromise(stream.pipe(targetStream), 'finish'),
stream.task
])
} catch (error) {
// Remove new backup. (corrupt).
await handler.unlink(backupFullPath, { checksum: true })::pCatch(noop)
throw error
}
// Returns relative path.
return `${backupDirectory}/${vdiFilename}`
}
async _removeOldDeltaVmBackups (xapi, { handler, dir, depth }) {
const backups = await this._listDeltaVmBackups(handler, dir)
const nOldBackups = backups.length - depth
if (nOldBackups > 0) {
await Promise.all(
mapToArray(backups.slice(0, nOldBackups), async backup => {
// Remove json file.
await handler.unlink(`${dir}/${backup}`)
// Remove xva file.
// Version 0.0.0 (Legacy) Delta Backup.
handler.unlink(`${dir}/${getDeltaBackupNameWithoutExt(backup)}.xva`)::pCatch(noop)
})
)
}
}
@deferrable.onFailure
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
// Get most recent base.
const bases = sortBy(
filter(vm.$snapshots, { name_label: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}` }),
base => base.snapshot_time
)
const baseVm = bases.pop()
forEach(bases, base => { xapi.deleteVm(base.$id, true)::pCatch(noop) })
// Check backup dirs.
const dir = `vm_delta_${tag}_${vm.uuid}`
const fullVdisRequired = []
await Promise.all(
mapToArray(vm.$VBDs, async vbd => {
if (!vbd.VDI || vbd.type !== 'Disk') {
return
}
const vdi = vbd.$VDI
const backups = await this._listVdiBackups(handler, `${dir}/vdi_${vdi.uuid}`)
// Force full if missing full.
if (!find(backups, isFullVdiBackup)) {
fullVdisRequired.push(vdi.$id)
}
})
)
// Export...
const delta = await xapi.exportDeltaVm(vm.$id, baseVm && baseVm.$id, {
snapshotNameLabel: `XO_DELTA_BASE_VM_SNAPSHOT_${tag}`,
fullVdisRequired,
disableBaseTags: true
})
$onFailure(async () => {
await Promise.all(mapToArray(
delta.streams,
stream => stream.cancel()
))
await xapi.deleteVm(delta.vm.$id, true)
})
// Save vdis.
const vdiBackups = await pSettle(
mapToArray(delta.vdis, async (vdi, key) => {
const vdiParent = xapi.getObject(vdi.snapshot_of)
return this._saveDeltaVdiBackup(xapi, {
vdiParent,
isFull: !baseVm || find(fullVdisRequired, id => vdiParent.$id === id),
handler,
stream: delta.streams[`${key}.vhd`],
dir,
depth
})
.then(path => {
delta.vdis[key] = {
...delta.vdis[key],
xoPath: path
}
return path
})
})
)
const fulFilledVdiBackups = []
let success = true
// One or many vdi backups have failed.
for (const vdiBackup of vdiBackups) {
if (vdiBackup.isFulfilled()) {
fulFilledVdiBackups.push(vdiBackup)
} else {
console.error(`Rejected backup: ${vdiBackup.reason()}`)
success = false
}
}
$onFailure(async () => {
await Promise.all(
mapToArray(fulFilledVdiBackups, vdiBackup => {
return handler.unlink(`${dir}/${vdiBackup.value()}`, { checksum: true })::pCatch(noop)
})
)
})
if (!success) {
throw new Error('Rolling delta vm backup failed.')
}
const date = safeDateFormat(new Date())
const backupFormat = `${date}_${vm.name_label}`
const infoPath = `${dir}/${backupFormat}${DELTA_BACKUP_EXT}`
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
// Write Metadata.
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
// Here we have a completed backup. We can merge old vdis.
await Promise.all(
mapToArray(vdiBackups, vdiBackup => {
const backupName = vdiBackup.value()
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
})
)
// Delete old backups.
await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, depth })
if (baseVm) {
xapi.deleteVm(baseVm.$id, true)::pCatch(noop)
}
// Returns relative path.
return `${dir}/${backupFormat}`
}
async importDeltaVmBackup ({sr, remoteId, filePath}) {
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(sr)
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
let vm
const { version } = delta
if (!version) {
// Legacy import. (Version 0.0.0)
vm = await this._legacyImportDeltaVmBackup(xapi, {
remoteId, handler, filePath, info: delta, sr
})
} else if (versionSatisfies(delta.version, '^1')) {
const basePath = dirname(filePath)
const streams = delta.streams = {}
await Promise.all(
mapToArray(
delta.vdis,
async (vdi, id) => {
const vdisFolder = `${basePath}/${dirname(vdi.xoPath)}`
const backups = await this._listDeltaVdiDependencies(handler, `${basePath}/${vdi.xoPath}`)
streams[`${id}.vhd`] = await Promise.all(mapToArray(backups, async backup =>
handler.createReadStream(`${vdisFolder}/${backup}`, { checksum: true, ignoreMissingChecksum: true })
))
}
)
)
vm = await xapi.importDeltaVm(delta, {
srId: sr._xapiId,
disableStartAfterImport: false
})
} else {
throw new Error(`Unsupported delta backup version: ${version}`)
}
return xapiObjectToXo(vm).id
}
// -----------------------------------------------------------------
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
}
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
const targetStream = await handler.createOutputStream(file)
const promise = eventToPromise(targetStream, 'finish')
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
compress,
onlyMetadata: onlyMetadata || false
})
sourceStream.pipe(targetStream)
await promise
}
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const files = await handler.list()
const reg = new RegExp('^[^_]+_' + escapeStringRegexp(`${tag}_${vm.name_label}.xva`))
const backups = sortBy(filter(files, (fileName) => reg.test(fileName)))
const date = safeDateFormat(new Date())
const file = `${date}_${tag}_${vm.name_label}.xva`
await this._backupVm(vm, handler, file, {compress, onlyMetadata})
await this._removeOldBackups(backups, handler, undefined, backups.length - (depth - 1))
}
async rollingSnapshotVm (vm, tag, depth) {
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
const reg = new RegExp('^rollingSnapshot_[^_]+_' + escapeStringRegexp(tag) + '_')
const snapshots = sortBy(filter(vm.$snapshots, snapshot => reg.test(snapshot.name_label)), 'name_label')
const date = safeDateFormat(new Date())
await xapi.snapshotVm(vm.$id, `rollingSnapshot_${date}_${tag}_${vm.name_label}`)
const promises = []
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
const oldSnap = snapshots.shift()
promises.push(xapi.deleteVm(oldSnap.uuid, true))
}
await Promise.all(promises)
}
async rollingDrCopyVm ({vm, sr, tag, depth}) {
tag = 'DR_' + tag
const reg = new RegExp('^' + escapeStringRegexp(`${vm.name_label}_${tag}_`) + '[0-9]{8}T[0-9]{6}Z$')
const targetXapi = this._xo.getXapi(sr)
sr = targetXapi.getObject(sr._xapiId)
const sourceXapi = this._xo.getXapi(vm)
vm = sourceXapi.getObject(vm._xapiId)
const vms = []
forEach(sr.$VDIs, vdi => {
const vbds = vdi.$VBDs
const vm = vbds && vbds[0] && vbds[0].$VM
if (vm && reg.test(vm.name_label)) {
vms.push(vm)
}
})
const olderCopies = sortBy(vms, 'name_label')
const copyName = `${vm.name_label}_${tag}_${safeDateFormat(new Date())}`
const drCopy = await sourceXapi.remoteCopyVm(vm.$id, targetXapi, sr.$id, {
nameLabel: copyName
})
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
const promises = []
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
const oldDRVm = olderCopies.shift()
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
}
await Promise.all(promises)
}
}

79
src/xo-mixins/jobs.js Normal file
View File

@@ -0,0 +1,79 @@
import assign from 'lodash/assign'
import JobExecutor from '../job-executor'
import { Jobs } from '../models/job'
import {
GenericError,
NoSuchObject
} from '../api-errors'
// ===================================================================
class NoSuchJob extends NoSuchObject {
constructor (id) {
super(id, 'job')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._executor = new JobExecutor(xo)
this._jobs = new Jobs({
connection: xo._redis,
prefix: 'xo:job',
indexes: ['user_id', 'key']
})
}
async getAllJobs () {
return /* await */ this._jobs.get()
}
async getJob (id) {
const job = await this._jobs.first(id)
if (!job) {
throw new NoSuchJob(id)
}
return job.properties
}
async createJob (userId, job) {
// TODO: use plain objects
const job_ = await this._jobs.create(userId, job)
return job_.properties
}
async updateJob ({id, type, name, key, method, paramsVector}) {
const oldJob = await this.getJob(id)
assign(oldJob, {type, name, key, method, paramsVector})
return /* await */ this._jobs.save(oldJob)
}
async removeJob (id) {
return /* await */ this._jobs.remove(id)
}
async runJobSequence (idSequence) {
const notFound = []
for (const id of idSequence) {
let job
try {
job = await this.getJob(id)
} catch (error) {
if (error instanceof NoSuchJob) {
notFound.push(id)
} else {
throw error
}
}
if (job) {
await this._executor.exec(job)
}
}
if (notFound.length > 0) {
throw new GenericError(`The following jobs were not found: ${notFound.join()}`)
}
}
}

222
src/xo-mixins/plugins.js Normal file
View File

@@ -0,0 +1,222 @@
import createJsonSchemaValidator from 'is-my-json-valid'
import { PluginsMetadata } from '../models/plugin-metadata'
import {
InvalidParameters,
NoSuchObject
} from '../api-errors'
import {
createRawObject,
isFunction,
mapToArray
} from '../utils'
// ===================================================================
class NoSuchPlugin extends NoSuchObject {
constructor (id) {
super(id, 'plugin')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._plugins = createRawObject()
this._pluginsMetadata = new PluginsMetadata({
connection: xo._redis,
prefix: 'xo:plugin-metadata'
})
}
_getRawPlugin (id) {
const plugin = this._plugins[id]
if (!plugin) {
throw new NoSuchPlugin(id)
}
return plugin
}
async _getPluginMetadata (id) {
const metadata = await this._pluginsMetadata.first(id)
return metadata
? metadata.properties
: null
}
async registerPlugin (
name,
instance,
configurationSchema,
configurationPresets,
version
) {
const id = name
const plugin = this._plugins[id] = {
configured: !configurationSchema,
configurationPresets,
configurationSchema,
id,
instance,
name,
unloadable: isFunction(instance.unload),
version
}
const metadata = await this._getPluginMetadata(id)
let autoload = true
let configuration
if (metadata) {
({
autoload,
configuration
} = metadata)
} else {
console.log(`[NOTICE] register plugin ${name} for the first time`)
await this._pluginsMetadata.save({
id,
autoload
})
}
// Configure plugin if necessary. (i.e. configurationSchema)
// Load plugin.
// Ignore configuration and loading errors.
Promise.resolve()
.then(() => {
if (!plugin.configured) {
return this._configurePlugin(plugin, configuration)
}
})
.then(() => {
if (autoload) {
return this.loadPlugin(id)
}
})
.catch(error => {
console.error('register plugin %s: %s', name, error && error.stack || error)
})
}
async _getPlugin (id) {
const {
configurationPresets,
configurationSchema,
loaded,
name,
unloadable,
version
} = this._getRawPlugin(id)
const {
autoload,
configuration
} = (await this._getPluginMetadata(id)) || {}
return {
id,
name,
autoload,
loaded,
unloadable,
version,
configuration,
configurationPresets,
configurationSchema
}
}
async getPlugins () {
return /* await */ Promise.all(
mapToArray(this._plugins, ({ id }) => this._getPlugin(id))
)
}
// Validate the configuration and configure the plugin instance.
async _configurePlugin (plugin, configuration) {
const { configurationSchema } = plugin
if (!configurationSchema) {
throw new InvalidParameters('plugin not configurable')
}
// See: https://github.com/mafintosh/is-my-json-valid/issues/116
if (configuration == null) {
throw new InvalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
}
const validate = createJsonSchemaValidator(configurationSchema)
if (!validate(configuration)) {
throw new InvalidParameters(validate.errors)
}
// Sets the plugin configuration.
await plugin.instance.configure({
// Shallow copy of the configuration object to avoid most of the
// errors when the plugin is altering the configuration object
// which is handed over to it.
...configuration
})
plugin.configured = true
}
// Validate the configuration, configure the plugin instance and
// save the new configuration.
async configurePlugin (id, configuration) {
const plugin = this._getRawPlugin(id)
await this._configurePlugin(plugin, configuration)
// Saves the configuration.
await this._pluginsMetadata.merge(id, { configuration })
}
async disablePluginAutoload (id) {
// TODO: handle case where autoload is already disabled.
await this._pluginsMetadata.merge(id, { autoload: false })
}
async enablePluginAutoload (id) {
// TODO: handle case where autoload is already enabled.
await this._pluginsMetadata.merge(id, { autoload: true })
}
async loadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (plugin.loaded) {
throw new InvalidParameters('plugin already loaded')
}
if (!plugin.configured) {
throw new InvalidParameters('plugin not configured')
}
await plugin.instance.load()
plugin.loaded = true
}
async unloadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (!plugin.loaded) {
throw new InvalidParameters('plugin already unloaded')
}
if (plugin.unloadable === false) {
throw new InvalidParameters('plugin cannot be unloaded')
}
await plugin.instance.unload()
plugin.loaded = false
}
async purgePluginConfiguration (id) {
await this._pluginsMetadata.merge(id, { configuration: undefined })
}
}

135
src/xo-mixins/remotes.js Normal file
View File

@@ -0,0 +1,135 @@
import RemoteHandlerLocal from '../remote-handlers/local'
import RemoteHandlerNfs from '../remote-handlers/nfs'
import RemoteHandlerSmb from '../remote-handlers/smb'
import {
forEach
} from '../utils'
import {
NoSuchObject
} from '../api-errors'
import {
Remotes
} from '../models/remote'
// ===================================================================
class NoSuchRemote extends NoSuchObject {
constructor (id) {
super(id, 'remote')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._remotes = new Remotes({
connection: xo._redis,
prefix: 'xo:remote',
indexes: ['enabled']
})
xo.on('start', async () => {
await this.initRemotes()
await this.syncAllRemotes()
})
xo.on('stop', () => this.forgetAllRemotes())
}
async getRemoteHandler (remote) {
if (typeof remote === 'string') {
remote = await this.getRemote(remote)
}
const Handler = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs
}
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
if (!Handler[type]) {
throw new Error('Unhandled remote type')
}
return new Handler[type](remote)
}
async testRemote (remote) {
const handler = await this.getRemoteHandler(remote)
return handler.test()
}
async getAllRemotes () {
return this._remotes.get()
}
async _getRemote (id) {
const remote = await this._remotes.first(id)
if (!remote) {
throw new NoSuchRemote(id)
}
return remote
}
async getRemote (id) {
return (await this._getRemote(id)).properties
}
async createRemote ({name, url}) {
let remote = await this._remotes.create(name, url)
return /* await */ this.updateRemote(remote.get('id'), {enabled: true})
}
async updateRemote (id, {name, url, enabled, error}) {
const remote = await this._getRemote(id)
this._updateRemote(remote, {name, url, enabled, error})
const handler = await this.getRemoteHandler(remote.properties)
const props = await handler.sync()
this._updateRemote(remote, props)
return (await this._remotes.save(remote)).properties
}
_updateRemote (remote, {name, url, enabled, error}) {
if (name) remote.set('name', name)
if (url) remote.set('url', url)
if (enabled !== undefined) remote.set('enabled', enabled)
if (error) {
remote.set('error', error)
} else {
remote.set('error', '')
}
}
async removeRemote (id) {
const handler = await this.getRemoteHandler(id)
await handler.forget()
await this._remotes.remove(id)
}
// TODO: Should it be private?
async syncAllRemotes () {
const remotes = await this.getAllRemotes()
forEach(remotes, remote => {
this.updateRemote(remote.id, {})
})
}
// TODO: Should it be private?
async forgetAllRemotes () {
const remotes = await this.getAllRemotes()
for (let remote of remotes) {
try {
(await this.getRemoteHandler(remote)).forget()
} catch (_) {}
}
}
// TODO: Should it be private?
async initRemotes () {
const remotes = await this.getAllRemotes()
if (!remotes || !remotes.length) {
await this.createRemote({name: 'default', url: 'file://var/lib/xoa-backups'})
}
}
}

View File

@@ -0,0 +1,315 @@
import every from 'lodash/every'
import keyBy from 'lodash/keyBy'
import remove from 'lodash/remove'
import some from 'lodash/some'
import {
NoSuchObject,
Unauthorized
} from '../api-errors'
import {
forEach,
generateUnsecureToken,
isObject,
lightSet,
map,
mapToArray,
streamToArray
} from '../utils'
// ===================================================================
class NoSuchResourceSet extends NoSuchObject {
constructor (id) {
super(id, 'resource set')
}
}
const computeVmResourcesUsage = vm => {
const processed = {}
let disks = 0
let disk = 0
forEach(vm.$VBDs, vbd => {
let vdi, vdiId
if (
vbd.type === 'Disk' &&
!processed[vdiId = vbd.VDI] &&
(vdi = vbd.$VDI)
) {
processed[vdiId] = true
++disks
disk += +vdi.virtual_size
}
})
return {
cpus: vm.VCPUs_at_startup,
disk,
disks,
memory: vm.memory_dynamic_max,
vms: 1
}
}
const normalize = set => ({
id: set.id,
limits: set.limits
? map(set.limits, limit => isObject(limit)
? limit
: {
available: limit,
total: limit
}
)
: {},
name: set.name || '',
objects: set.objects || [],
subjects: set.subjects || []
})
// ===================================================================
export default class {
constructor (xo) {
this._xo = xo
this._store = null
xo.on('start', async () => {
this._store = await xo.getStore('resourceSets')
})
}
async _generateId () {
let id
do {
id = generateUnsecureToken(8)
} while (await this._store.has(id))
return id
}
_save (set) {
return this._store.put(set.id, set)
}
async checkResourceSetConstraints (id, userId, objectIds) {
const set = await this.getResourceSet(id)
const user = await this._xo.getUser(userId)
if ((
user.permission !== 'admin' &&
// The set does not contains ANY subjects related to this user
// (itself or its groups).
!some(set.subjects, lightSet(user.groups).add(user.id).has)
) || (
objectIds &&
// The set does not contains ALL objects.
!every(objectIds, lightSet(set.objects).has)
)) {
throw new Unauthorized()
}
}
computeVmResourcesUsage (vm) {
return computeVmResourcesUsage(
this._xo.getXapi(vm).getObject(vm._xapiId)
)
}
async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) {
const id = await this._generateId()
const set = normalize({
id,
name,
objects,
subjects,
limits
})
await this._store.put(id, set)
return set
}
async deleteResourceSet (id) {
const store = this._store
if (await store.has(id)) {
return store.del(id)
}
throw new NoSuchResourceSet(id)
}
async updateResourceSet (id, {
name = undefined,
subjects = undefined,
objects = undefined,
limits = undefined
}) {
const set = await this.getResourceSet(id)
if (name) {
set.name = name
}
if (subjects) {
set.subjects = subjects
}
if (objects) {
set.objects = objects
}
if (limits) {
const previousLimits = set.limits
set.limits = map(limits, (quantity, id) => {
const previous = previousLimits[id]
if (!previous) {
return {
available: quantity,
total: quantity
}
}
const { available, total } = previous
return {
available: available - total + quantity,
total: quantity
}
})
}
await this._save(set)
}
// If userId is provided, only resource sets available to that user
// will be returned.
async getAllResourceSets (userId = undefined) {
let filter
if (userId != null) {
const user = await this._xo.getUser(userId)
if (user.permission !== 'admin') {
const userHasSubject = lightSet(user.groups).add(user.id).has
filter = set => some(set.subjects, userHasSubject)
}
}
return streamToArray(this._store.createValueStream(), {
filter,
mapper: normalize
})
}
getResourceSet (id) {
return this._store.get(id).then(normalize, error => {
if (error.notFound) {
throw new NoSuchResourceSet(id)
}
throw error
})
}
async addObjectToResourceSet (objectId, setId) {
const set = await this.getResourceSet(setId)
set.objects.push(objectId)
await this._save(set)
}
async removeObjectFromResourceSet (objectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.objects)
await this._save(set)
}
async addSubjectToResourceSet (subjectId, setId) {
const set = await this.getResourceSet(setId)
set.subjects.push(subjectId)
await this._save(set)
}
async removeSubjectToResourceSet (subjectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.subjects, subjectId)
await this._save(set)
}
async addLimitToResourceSet (limitId, quantity, setId) {
const set = await this.getResourceSet(setId)
set.limits[limitId] = quantity
await this._save(set)
}
async removeLimitFromResourceSet (limitId, setId) {
const set = await this.getResourceSet(setId)
delete set.limits[limitId]
await this._save(set)
}
async allocateLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
const limit = set.limits[id]
if (!limit) {
return
}
if ((limit.available -= quantity) < 0) {
throw new Error(`not enough ${id} available in the set ${setId}`)
}
})
await this._save(set)
}
async releaseLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
const limit = set.limits[id]
if (!limit) {
return
}
if ((limit.available += quantity) > limit.total) {
limit.available = limit.total
}
})
await this._save(set)
}
async recomputeResourceSetsLimits () {
const sets = keyBy(await this.getAllResourceSets(), 'id')
forEach(sets, ({ limits }) => {
forEach(limits, (limit, id) => {
limit.available = limit.total
})
})
forEach(this._xo.getAllXapis(), xapi => {
forEach(xapi.objects.all, object => {
let id
let set
if (
object.$type !== 'vm' ||
// No set for this VM.
!(id = xapi.xo.getData(object, 'resourceSet')) ||
// Not our set.
!(set = sets[id])
) {
return
}
const { limits } = set
forEach(computeVmResourcesUsage(object), (usage, resource) => {
const limit = limits[resource]
if (limit) {
limit.available -= usage
}
})
})
})
await Promise.all(mapToArray(sets, set => this._save(set)))
}
}

Some files were not shown because too many files have changed in this diff Show More