Compare commits

..

517 Commits

Author SHA1 Message Date
Julien Fontanet
3989bfa832 5.7.2 2017-04-11 17:02:57 +02:00
Julien Fontanet
740e1ccd5b fix(job.set): make all fields optional (2) 2017-04-10 16:45:27 +02:00
Julien Fontanet
bc03f2ed17 fix(job.set): make all fields optional 2017-04-10 16:15:45 +02:00
Julien Fontanet
4b5cb86d87 chore(Xapi#_createVbd): base default empty prop on VDI presence 2017-04-10 16:15:45 +02:00
Julien Fontanet
b877d6088b fix(models/utils/parseProp): reduce verbosity 2017-04-10 16:15:45 +02:00
Julien Fontanet
26cfd439e1 fix(xapi-objects-to-xo): use $ref for VM-template ids
Fixes vatesfr/xo-web#2075
2017-04-10 16:15:44 +02:00
Julien Fontanet
eafcf66f4d 5.7.1 2017-04-06 19:30:09 +02:00
Pierre Donias
a8fad8193b feat(pack,patch): XS 7.1 mechanism (#527)
See vatesfr/xo-web#2058
2017-04-06 19:05:43 +02:00
badrAZ
ca695c38cd feat(vm.revertSnapshot): restore power state (#526)
Fixes vatesfr/xo-web#834
2017-04-06 15:24:30 +02:00
badrAZ
6e0f98a809 feat(VBD): expose the VBD device (#525)
See vatesfr/xo-web#2064
2017-04-06 11:07:16 +02:00
Julien Fontanet
8901d3ce64 fix(sample config): IPv6 used by default 2017-03-31 18:25:30 +02:00
Julien Fontanet
2641966285 5.7.0 2017-03-31 16:37:22 +02:00
Julien Fontanet
09dc2265fe fix(Xapi#uploadPoolPatch): use HTTPs 2017-03-31 11:19:27 +02:00
Julien Fontanet
a1fa139ef1 fix(Xapi): accept self-signed certs for export/import VDI/VM 2017-03-31 11:19:26 +02:00
badrAZ
df26a500c4 feat(servers): add a label to servers (#523)
See vatesfr/xo-web#1965
2017-03-29 16:32:02 +02:00
badrAZ
a465218ba3 feat(jobs): configure job timeout (#522)
See vatesfr/xo-web#1956
2017-03-29 10:39:52 +02:00
Julien Fontanet
1ead6eb916 fix(Xapi): export/import of VDI/VM should use HTTPs 2017-03-28 17:33:17 +02:00
Julien Fontanet
44fe1b31ba chore(package): update all dependencies 2017-03-24 11:39:35 +01:00
Julien Fontanet
0856d3d5c9 fix: update yarn.lock 2017-03-24 11:38:32 +01:00
Pierre Donias
f6b74ea836 feat(vm): affinity host (#521)
See vatesfr/xo-web#1983
2017-03-24 11:35:00 +01:00
greenkeeper[bot]
e0cef71700 fix(package): update ms to version 1.0.0 (#520)
https://greenkeeper.io/
2017-03-19 22:49:02 +01:00
Julien Fontanet
19d1f70458 fix(xapi-objects-to-xo): PV drivers check for XS >= 7.1
Fixes vatesfr/xo-web#2024
2017-03-17 18:05:05 +01:00
badrAZ
43c103e436 feat(new-vm): shared VM in resource set (#518)
Fixes vatesfr/xo-web#1964.
2017-03-15 14:45:36 +01:00
Julien Fontanet
6ff4f096a3 chore(build): do not build tests 2017-03-10 14:46:06 +01:00
Pierre Donias
d8d82441c3 fix(Xapi#createVm): only one disk should be bootable (#516)
See vatesfr/xo-web#1994.

For more complex setups, create the VMs and change bootable flags afterward.
2017-03-08 16:31:22 +01:00
Olivier Lambert
4f489e1854 feat(sr): add lvmohba SR type management (#517) 2017-03-08 14:03:57 +01:00
Julien Fontanet
9ab275df5d fix(vm.set): behave if the resource set no longer exist 2017-03-04 16:27:25 +01:00
Julien Fontanet
66c1754eb8 chore(package): update xo-common to v0.1.1 2017-03-04 16:16:58 +01:00
Julien Fontanet
e67bab1f5c fix(package): require Node >= 4.5 2017-03-04 03:24:49 +01:00
Julien Fontanet
ceb6667112 fix: coding style 2017-03-03 16:32:58 +01:00
badrAZ
cafba0b361 fix(vm.resume): remove unused force param (#514) 2017-03-03 15:49:12 +01:00
Julien Fontanet
3e4efcf297 fix(vm.importBackup): fix datetime in VM name 2017-03-01 14:57:02 +01:00
Julien Fontanet
4ccadac148 5.6.4 2017-03-01 14:46:09 +01:00
Julien Fontanet
6e148c18b0 fix(vm.importDeltaBackup): fix datetime in VM name (#513) 2017-03-01 14:34:29 +01:00
Julien Fontanet
75f849982e fix(Xapi#exportVdi): returned promise has a cancel() method (#512) 2017-03-01 14:12:56 +01:00
Julien Fontanet
8722ef45ac chore: add mention-bot config 2017-03-01 13:27:54 +01:00
Julien Fontanet
b347c78a8c fix(vm.create): do not get stuck when installing from ISO 2017-02-28 15:43:06 +01:00
Julien Fontanet
88ae24855a feat(models/utils/parseProp): do not warn on empty string 2017-02-28 09:58:41 +01:00
Julien Fontanet
356884ea53 feat: reduce the default debugging level 2017-02-28 09:58:41 +01:00
badrAZ
51fba21dd6 feat(vm.import): throw if type is xva and data is provided (#506) 2017-02-27 11:57:40 +01:00
Julien Fontanet
6aa5d608bf chore(package): babel-preset-es2015 → babel-preset-env 2017-02-27 11:22:34 +01:00
Julien Fontanet
21ad2c5744 fix(package): commit-msg → commitmsg 2017-02-27 11:20:40 +01:00
Julien Fontanet
dea1163159 fix(package): jest is a dev dependency 2017-02-27 11:20:19 +01:00
Julien Fontanet
c4c2e8cf74 chore(package): fix for jest 19 2017-02-27 11:19:38 +01:00
Julien Fontanet
5883c35cf3 chore(package): update all dependencies 2017-02-27 11:18:46 +01:00
Julien Fontanet
4d2719a424 5.6.3 2017-02-24 16:12:25 +01:00
Nicolas Raynaud
7cf2d0d01c fix(xosan) (#509)
- fix 2 bricks configuration
- limit bricks size to 2TB
- fix arbiter cleanup
2017-02-24 16:11:59 +01:00
Julien Fontanet
5a08b512a7 fix(xosan.createSR): use address prop 2017-02-24 14:56:27 +01:00
Pierre Donias
aa6ff6cd64 fix(xapi.installSupplementalPackOnAllHosts): multi-pipe stream (#508)
Fixes vatesfr/xo-web#1957
2017-02-24 11:41:47 +01:00
Julien Fontanet
89421d292c 5.6.2 2017-02-23 18:38:14 +01:00
Julien Fontanet
55c6515bac fix(vhdMerge): ensure parent.bat.size >= child.bat.size
Fixes vatesfr/xo-web#1939
2017-02-23 18:35:52 +01:00
Pierre Donias
5db6f6a58c feat(XOSAN) (#505) 2017-02-23 18:33:23 +01:00
Julien Fontanet
eeedf6ab28 feat(Vhd#ensurebatSize): no need to wait before writing footer 2017-02-23 18:31:05 +01:00
Julien Fontanet
3758cd207b fix(Vhd#ensureBatSize): Buffer#size → Buffer#length 2017-02-23 18:31:05 +01:00
Julien Fontanet
c15ede6239 fix(Vhd#ensureBatSize): avoid style error 2017-02-23 18:31:05 +01:00
Julien Fontanet
e54e31e059 feat(vhdMerge): avoid unnecessary footer write 2017-02-23 18:31:05 +01:00
Julien Fontanet
8c573aa8e4 chore(Vhd): add debugs 2017-02-23 18:31:05 +01:00
Julien Fontanet
654559086c chore(Vhd#writeBlockSectors): expect end sector instead of # of sectors 2017-02-23 18:31:05 +01:00
Julien Fontanet
b0d9679568 chore(Vhd#coalesceBlock): do not pass blockAddr 2017-02-23 18:31:05 +01:00
Julien Fontanet
87fdaf7fa7 chore(Vhd): merge readBlockBitmap() readBlockData() into _readBlock() 2017-02-23 18:31:05 +01:00
Julien Fontanet
bb3a365166 fix(Vhd#writeBlockSectors): pass correct params to Vhd#readBlockBitmap() 2017-02-23 18:31:05 +01:00
Julien Fontanet
2be183d980 various updates 2017-02-23 18:31:05 +01:00
Julien Fontanet
c6dc846838 fix(Vhd#ensureBatSize): extend BAT at correct offset 2017-02-23 18:31:05 +01:00
Julien Fontanet
1142f1d59a various updates 2017-02-23 18:31:05 +01:00
Julien Fontanet
126c470979 fix(Vhd#getEndOfHeaders): correctly compute parent locator size 2017-02-23 18:31:05 +01:00
Julien Fontanet
d679dc3e8b fix(vhdMerge): update diskGeometry and originalSize 2017-02-23 18:31:05 +01:00
Julien Fontanet
d5422dfe89 fix(Vhd#ensureBatSize): do not round maxTableEntries 2017-02-23 18:31:05 +01:00
Julien Fontanet
d64237b4f2 fix(package): dependency-check should ignore constant-stream 2017-02-23 18:31:05 +01:00
Julien Fontanet
7b7e4942f2 fix(constant-stream): optimize data length 2017-02-23 18:31:05 +01:00
Julien Fontanet
e4c343a587 chore(Vhd#_writeStream): merge back into Vhd#_write() 2017-02-23 18:31:05 +01:00
Julien Fontanet
1a8ae21478 feat(Vhd#_write): can also handle streams 2017-02-23 18:31:05 +01:00
Julien Fontanet
dd37a5b584 chore(Vhd#_write): simplify code 2017-02-23 18:31:05 +01:00
Julien Fontanet
eec340e6c0 chore(Vhd#_getBatEntry): remove logging 2017-02-23 18:31:05 +01:00
Julien Fontanet
c2fb5ba1f0 fix(vhdMerge): update currentSize and timestamp 2017-02-23 18:31:05 +01:00
Julien Fontanet
36d7e17b86 fix(Vhd#ensureBatSize): update BAT before writing footer 2017-02-23 18:31:05 +01:00
Julien Fontanet
5a1dc49428 fix(Vhd#ensureBatSize): various fixes 2017-02-23 18:31:05 +01:00
Julien Fontanet
47caf54772 fix(Vhd#ensureBatSize): extend local BAT buffer as well 2017-02-23 18:31:05 +01:00
Julien Fontanet
6af50a8c44 feat(Vhd#ensurebatSize): do not move first block if not necessary 2017-02-23 18:31:05 +01:00
Julien Fontanet
1b27407970 chore(Vhd#_getFirstAndLastBlocks): value → sector 2017-02-23 18:31:05 +01:00
Julien Fontanet
4da6306c67 fix(Vhd#_getFirstAndLastBlocks): should not be async 2017-02-23 18:31:05 +01:00
Julien Fontanet
f950b7a725 fix(Vhd#_readBlockData) 2017-02-23 18:31:05 +01:00
Julien Fontanet
930cf9ed04 fix(Vhd#ensureBatSize): update header.maxTableEntries 2017-02-23 18:31:05 +01:00
Julien Fontanet
744016e752 chore(vhd-merge): Vhd#_read() 2017-02-23 18:31:05 +01:00
Julien Fontanet
2fb4e907df fix(vhdMerge): ensure parent.bat.size >= child.bat.size
Fixes vatesfr/xo-web#1939
2017-02-23 18:31:05 +01:00
Julien Fontanet
ef2a815e52 chore(vhdMerge): ensure block sizes are identical 2017-02-23 18:31:05 +01:00
Julien Fontanet
33a81d4f3c chore(vhd-merge): rename and clean BAT methods
- readAllocationTableEntry() → _getBatEntry()
- writeAllocationTableEntry() → _setBatEntry()
2017-02-23 18:31:05 +01:00
Julien Fontanet
4911df0bf9 chore(Vhd#writeAllocationTableEntry): commit the change in the file 2017-02-23 18:31:05 +01:00
Julien Fontanet
2b612b5db7 feat(constant-stream): emit the same data n times 2017-02-23 18:31:05 +01:00
Julien Fontanet
bfe81b52ef chore(vhd-merge): Vhd#_writeStream() 2017-02-23 18:31:05 +01:00
Julien Fontanet
26f6a4beb9 chore(vhd-merge): Vhd#_readStream() 2017-02-23 18:31:05 +01:00
Julien Fontanet
52e9c3053a fix(console proxy): works with ws v2
Fixes vatesfr/xo-web#1954
2017-02-23 16:59:41 +01:00
Julien Fontanet
908d1f1ec8 feat(Xo#defineProperties): helper to define multiple properties 2017-02-23 12:23:18 +01:00
Julien Fontanet
6a1120f95b feat(Xo#defineProperty): accept a thisArg param 2017-02-23 12:18:54 +01:00
Julien Fontanet
2a2780c25c feat(Xapi#deleteVm): delete disks by default 2017-02-23 11:06:10 +01:00
Julien Fontanet
7d4152197b chore: update yarn.lock 2017-02-22 22:07:52 +01:00
greenkeeper[bot]
9c742600ff fix(package): update jest to version 19.0.1 (#504)
https://greenkeeper.io/
2017-02-22 12:10:12 +01:00
greenkeeper[bot]
a035bf132a fix(package): update pretty-format to version 19.0.0 (#503)
https://greenkeeper.io/
2017-02-21 10:52:37 +01:00
Julien Fontanet
b989d157a0 feat(redis): can connect via Unix socket
Fixes vatesfr/xo-web#1944
2017-02-16 11:32:35 +01:00
Julien Fontanet
261587511b chore(package): update all dependencies 2017-02-13 16:13:19 +01:00
Julien Fontanet
ff798801fb chore(package): update xen-api to v0.10.0-2 2017-02-13 16:08:09 +01:00
Julien Fontanet
9b4aab0d19 chore: update yarn.lock 2017-02-13 16:07:20 +01:00
greenkeeper[bot]
6e42cf9952 fix(package): update fs-promise to version 2.0.0 (#501)
https://greenkeeper.io/
2017-02-13 10:34:33 +01:00
Julien Fontanet
4c3a8ca312 fix(Xapi#createVm): wait for VM record to have all its VBDs/VIFs (#500) 2017-02-09 16:49:58 +01:00
Julien Fontanet
a63eb48f03 fix(Xapi#createVm): do not wait for a new record 2017-02-09 16:06:25 +01:00
Julien Fontanet
d0214f805e fix(Xapi#createVm): _waitObject → _waitObjectState 2017-02-09 16:06:25 +01:00
badrAZ
d736bd6501 fix(vm.create): VIFs param is optional (#499) 2017-02-09 11:16:20 +01:00
Pierre Donias
2ce4a11e0a fix(vm.delete): import mapFilter (#497) 2017-02-07 11:27:30 +01:00
Pierre Donias
e5ab8fe3e4 feat(vm.delete): remove ACLs (#496) 2017-02-02 22:11:13 +01:00
Julien Fontanet
657b74a084 Revert "feat(vm.snapshot): copy ACLs from VM to snapshot (#495)"
This reverts commit dfee98b66b.

Should not be necessary: snapshots inherit ACLs from their VM.
2017-02-01 10:24:26 +01:00
Pierre Donias
dfee98b66b feat(vm.snapshot): copy ACLs from VM to snapshot (#495)
See vatesfr/xo-web#1865

Also, correctly remove ACLs on VM deletion.
2017-02-01 10:22:17 +01:00
Julien Fontanet
f65b9f695e 5.6.1 2017-01-30 18:07:39 +01:00
Julien Fontanet
4056385cd3 feat(backups): do not rely on JSON format for lvm commands (#493) 2017-01-30 18:04:27 +01:00
Pierre Donias
96d56d43bc feat(Xapi#installSupplementalPackOnAllHosts) (#491)
See vatesfr/xo-web#1896
2017-01-30 10:53:26 +01:00
Julien Fontanet
eba8f95e58 5.6.0 2017-01-27 16:42:07 +01:00
Julien Fontanet
7e2da1ff93 [WiP] feat(backups): implements file restore for LVM (#490)
Fixes vatesfr/xo-web#1878
2017-01-27 16:37:34 +01:00
Pierre Donias
b7b7e81468 feat(host.installSupplementalPack) (#487)
See vatesfr/xo-web#1460
2017-01-25 16:08:31 +01:00
Pierre Donias
0c7768f5d2 fix(vm.delete): IP addresses should always be deallocated (#488)
Fixes vatesfr/xo-web#1906
2017-01-25 15:46:33 +01:00
Pierre Donias
8fe6a56dfc fix(Xapi#installAllPoolPatchesOnHost): ignore PATCH_ALREADY_APPLIED error (#489)
Fixes vatesfr/xo-web#1904
2017-01-25 15:46:15 +01:00
Julien Fontanet
7b9dae980d fix(vm.create): properly handle optional param VDIs 2017-01-24 13:36:36 +01:00
Olivier Lambert
b59ba6b7bb feat(api): add description for some API calls (#486)
Fixes vatesfr/xo-web#1882
2017-01-17 15:15:18 +01:00
Julien Fontanet
8cdee4d173 chore(xo): disable too many listeners warning 2017-01-16 15:50:18 +01:00
Julien Fontanet
c9ed5fbe00 chore: update yarn.lock 2017-01-16 15:18:46 +01:00
Julien Fontanet
e698e89968 feat(/signout): URL to sign out 2017-01-16 14:33:58 +01:00
Pierre Donias
02f198d42c feat(backup.fetchFiles): multiple files support (#485)
See vatesfr/xo-web#1877
2017-01-16 09:33:22 +01:00
Pierre Donias
61d2d0263b feat(patching): eject tools ISOs before patching host (#479)
Fixes #1798
2017-01-13 18:20:31 +01:00
badrAZ
ed477e99a8 feat(plugin): provide a getDataDir() to plugins (#483)
It returns the path of a directory where the plugin can store data.
2017-01-13 18:13:44 +01:00
Olivier Lambert
1449be8d66 feat(host): expose supplemental packs (#480) 2017-01-12 17:54:48 +01:00
greenkeeper[bot]
28902d8747 fix(package): update execa to version 0.6.0 (#478)
https://greenkeeper.io/
2017-01-09 10:50:31 +01:00
Julien Fontanet
d534592479 5.5.4 2017-01-06 16:57:47 +01:00
Pierre Donias
b2f6ea9116 fix(vm.set): allocate resources when editing VM (#477)
Fixes vatesfr/xo-web#1695
2017-01-06 16:54:49 +01:00
Pierre Donias
8bf38bb29b feat(server): store connection error in database (#472)
See vatesfr/xo-web#1833
2017-01-06 16:38:17 +01:00
greenkeeper[bot]
9c6a78b678 fix(package): update promise-toolbox to version 0.8.0 (#476)
https://greenkeeper.io/
2017-01-06 11:34:27 +01:00
Pierre Donias
850199d7fc fix(resource-sets): recompute limits (#475)
Fixes vatesfr/xo-web#1866
2017-01-06 10:09:36 +01:00
Pierre Donias
4282928960 fix(vif/create): locking mode when allocating IP addresses (#474)
Fixes vatesfr/xo-web#1747
2017-01-06 09:55:55 +01:00
Julien Fontanet
356dd89d9f chore(package): upgrade jest to v 0.18.1 2017-01-03 18:30:28 +01:00
Julien Fontanet
7dd2391e5a fix(group.setUsers): oldUsers → oldUsersIds 2017-01-03 11:20:25 +01:00
Julien Fontanet
e0093f236a fix(group.create): do not attempt to parse empty prop 2017-01-03 10:47:10 +01:00
Julien Fontanet
8c5c32268a fix: users and groups serialization in Redis
Fixes vatesfr/xo-web#1852.
2017-01-02 16:52:51 +01:00
greenkeeper[bot]
b61ccc1af1 fix(package): update hashy to version 0.6.0 (#470)
https://greenkeeper.io/
2017-01-02 13:01:29 +01:00
Julien Fontanet
7caf0e40f4 5.5.3 2017-01-02 10:56:08 +01:00
Julien Fontanet
a16508db10 fix(remotes): do not error on disabled remote
- testRemote()
- updateRemote()
- remoteRemote()
- forgetAllRemotes()
2016-12-25 20:07:42 +01:00
Julien Fontanet
81bff342b9 chore(package): update decorator-synchronized to version 0.2.3 2016-12-22 16:25:46 +01:00
Julien Fontanet
49d41a76a0 5.5.2 2016-12-22 11:22:45 +01:00
Julien Fontanet
b1732b3298 fix(file restore): work around for invalid delta VHD path (#467)
See vatesfr/xo-web#1842.
2016-12-22 11:20:51 +01:00
Julien Fontanet
9372cdb6c7 fix(vm.rollingDeltaBackup): do not hide error 2016-12-22 10:21:38 +01:00
Julien Fontanet
1d8e54b83e chore(backups): use directly Xo#getRemoteHandler() 2016-12-22 09:50:16 +01:00
Julien Fontanet
30c5600271 chore(Xo#getRemoteHandler): use intermediary variable 2016-12-22 09:49:36 +01:00
Julien Fontanet
9f7e5c3a9a fix(Xo#getRemoteHandler): throws if remote is disabled 2016-12-22 09:49:04 +01:00
Julien Fontanet
37c9342717 fix(vm.rollingDeltaBackup): correctly delete snapshot in case of failure 2016-12-21 22:35:43 +01:00
Julien Fontanet
8827f8e940 fix(backup.fetchFiles): encode URI suffix
Fix issue with whitespaces in the filename.
2016-12-20 17:07:13 +01:00
Julien Fontanet
58334bf4a1 fix(backup.list): timestamps should be integers 2016-12-20 17:07:13 +01:00
Julien Fontanet
b898a6702c chore(package): use husky instead of ghooks 2016-12-20 17:07:13 +01:00
Julien Fontanet
6d78a810b9 perf(RemoteHandlerAbstract/createReadStream): optimise code
- avoid async function: overhead with transpilation
- do as much as possible in parallel
- fix: do not add length property in range mode
2016-12-20 17:07:13 +01:00
Julien Fontanet
8fc4eb8cdf 5.5.1 2016-12-20 13:38:02 +01:00
Julien Fontanet
b3fac0c56f fix(backup.list): datetimes should be timestamps 2016-12-20 12:50:17 +01:00
Julien Fontanet
0b063b1f5e 5.5.0 2016-12-20 12:29:16 +01:00
Olivier Lambert
480f05e676 feat(vm): add install time (#465) 2016-12-20 12:19:11 +01:00
Julien Fontanet
1ac8af34ec feat(backup): implement file restore (#461)
See vatesfr/xo-web#1590

Current implementation has following limitations:

- only support local and NFS remotes
- requires installation of libvhdi-utils
- files can only be recovered one by one
2016-12-20 12:18:22 +01:00
Julien Fontanet
34ff8b0f02 feat(Xapi#exportDeltaVm): don't export VDIs with names starting with [NOBAK] (#464)
Fixes vatesfr/xo-web#826
2016-12-14 10:57:25 +01:00
Julien Fontanet
77c3684e28 chore(tests): execute tests directly in src/ 2016-12-13 18:20:17 +01:00
Julien Fontanet
93038ea838 chore(package): remove unused trace 2016-12-13 14:08:38 +01:00
Julien Fontanet
46348f7cba feat: yarn integration 2016-12-13 12:15:26 +01:00
Julien Fontanet
ccc0e45daf feat(tests): use Jest instead of mocha/chai/must 2016-12-13 12:15:03 +01:00
Julien Fontanet
46ca03b017 chore(package): clean scripts 2016-12-13 11:55:12 +01:00
Julien Fontanet
1bfe3197a5 chore(Travis): test with Node stable 2016-12-13 11:51:04 +01:00
Julien Fontanet
4d2617fe68 chore(package): requires Node >= 4 2016-12-13 11:49:54 +01:00
Julien Fontanet
92e289f9da fix(decorators/mixin): do not use arrow function for constructor
It works because of the transpilation but it's not valid ES2015.
2016-12-13 11:41:41 +01:00
greenkeeper[bot]
a8c7558a77 chore(package): update index-modules to version 0.2.1 (#463) 2016-12-12 16:49:10 +01:00
greenkeeper[bot]
c756e7ecbe chore(package): update index-modules to version 0.2.0 (#462)
https://greenkeeper.io/
2016-12-12 16:16:44 +01:00
Pierre Donias
1998c56e84 feat(vm.delete): release resource set and IP-pool addresses (#460)
Fixes vatesfr/xo-web#1657, fixes vatesfr/xo-web#1748
2016-12-12 15:14:31 +01:00
Julien Fontanet
2ed55b1616 chore(decorators): remove unused @autobind. 2016-12-08 11:47:17 +01:00
Julien Fontanet
0c8d456fd3 chore(package): use bind-property-descriptor instead of custom implementation 2016-12-08 11:46:29 +01:00
Julien Fontanet
9e4924caf6 5.4.1 2016-12-02 16:37:17 +01:00
Julien Fontanet
7f391a5860 Merge branch 'next-release' into stable 2016-12-02 16:37:13 +01:00
Julien Fontanet
5c7249c8fc fix(Xapi#exportDeltaVm): remove TAG_BASE_DELTA if full export
Fixes vatesfr/xo-web#1811
2016-12-02 16:09:27 +01:00
Pierre Donias
932d00133d feat(job-executor.match): __not pattern property (#459)
See vatesfr/xo-web#1503
2016-12-01 14:56:52 +01:00
Julien Fontanet
32a371bf13 chore(package): use golike-defer instead of custom implementation 2016-11-30 15:40:30 +01:00
Julien Fontanet
5d0622d2cf 5.4.0 2016-11-23 11:10:01 +01:00
Pierre Donias
9ab9155bf0 fix(vif.set): remove old VIF before creating new one (#457)
Fixes #1784
2016-11-23 10:38:24 +01:00
Julien Fontanet
86a1ed6d46 chore(package): remove unused nyc 2016-11-23 10:00:45 +01:00
Julien Fontanet
b3c9936d74 chore(package): update xen-api to v0.9.6 2016-11-23 09:58:04 +01:00
greenkeeper[bot]
21b4d7cf11 chore(package): update nyc to version 10.0.0 (#456)
https://greenkeeper.io/
2016-11-23 09:12:26 +01:00
greenkeeper[bot]
4ec07f9ff8 fix(package): update get-stream to version 3.0.0 (#458)
https://greenkeeper.io/
2016-11-23 09:11:39 +01:00
greenkeeper[bot]
b7c89d6f64 fix(package): update http-server-plus to version 0.8.0 (#454)
https://greenkeeper.io/
2016-11-18 14:44:50 +01:00
greenkeeper[bot]
0eb168ec70 fix(package): update uuid to version 3.0.0 (#453)
https://greenkeeper.io/
2016-11-18 09:10:07 +01:00
Olivier Lambert
8ac1a66e93 feat(sr.shared): new boolean property (#452) 2016-11-17 14:33:45 +01:00
badrAZ
301da3662a fix(plugin.test): data param is optional (#451) 2016-11-16 16:08:11 +01:00
greenkeeper[bot]
e474946cb7 fix(package): update xo-common to version 0.1.0 (#450)
https://greenkeeper.io/
2016-11-16 12:01:27 +01:00
Pierre Donias
9a0ca1ebb2 feat(api): map 10 XAPI errors to XO errors (#449)
Fixes vatesfr/xo-web#1481
2016-11-16 11:22:31 +01:00
Julien Fontanet
520f7b2a77 feat(job.create,job.set): ability to set userId (#448)
See vatesfr/xo-web#1733
2016-11-14 17:42:19 +01:00
Pierre Donias
c0b3b3aab8 Fix userId. 2016-11-14 16:59:10 +01:00
Pierre Donias
d499332ce3 It should be possible to not change a job's user. 2016-11-14 15:56:54 +01:00
Pierre Donias
19ce06e0bb feat(job#create,job#set): userId parameter
See vatesfr/xo-web#1733
2016-11-14 15:33:09 +01:00
greenkeeper[bot]
ea6ff4224e fix(package): update fs-promise to version 1.0.0 (#447)
https://greenkeeper.io/
2016-11-10 08:56:37 +01:00
Julien Fontanet
871d1f8632 fix(plugins registration): params order 2016-11-09 17:05:10 +01:00
badrAZ
77ce2ff6d1 feat(plugin.test): plugins can be tested (#446)
See vatesfr/xo-web#1749
2016-11-09 14:58:19 +01:00
Pierre Donias
6383104796 fix(Xapi#editPif): destroy VLAN from each PIF before creating new VLAN (#444) 2016-11-08 16:50:12 +01:00
Julien Fontanet
b99b4159c8 feat(Redis): support aliased commands
Fixes #443
2016-11-08 10:23:53 +01:00
Olivier Lambert
8bedb1f3b9 Merge pull request #442 from vatesfr/pierre-fix-xo-error
fix(api): xoError is not an object
2016-11-07 18:18:45 +01:00
Pierre Donias
dc85804a27 fix(api): xoError is not an object 2016-11-07 17:58:16 +01:00
greenkeeper[bot]
42a31e512a fix(package): update json-rpc-peer to version 0.13.0 (#441)
https://greenkeeper.io/
2016-11-07 14:57:53 +01:00
Pierre Donias
2be7388696 feat(api-errors): throw custom errors when XAPI error is caught (#440)
See vatesfr/xo-web#1717
2016-11-07 14:15:23 +01:00
Julien Fontanet
bc5b00781b 5.3.3 2016-11-04 11:44:09 +01:00
Olivier Lambert
313e2b3de6 fix(Sr): add type cifs in deviceConfig. Fixes vatesfr/xo-web#1615 (#439) 2016-11-04 11:42:03 +01:00
Julien Fontanet
0bbd002060 fix(xo.importConfig): dont unnecessarily delete existing users
Do not delete existing users with same name & id
2016-11-04 09:42:56 +01:00
Julien Fontanet
5e785266a5 fix(xo.importConfig): correctly import ACLs
Fixes vatesfr/xo-web#1722
2016-11-04 09:40:41 +01:00
Julien Fontanet
5870769e7d fix(vm.import{,Delta}Backup): make restored VMs identifiable
Their names is prefixed with the exported date and they have a specific tag (*restored from backup*).

Fixes vatesfr/xo-web#1719
2016-11-03 16:22:42 +01:00
Julien Fontanet
79b80dcd07 fix(pif#carrier): cast to boolean 2016-11-02 16:50:12 +01:00
Olivier Lambert
6f6e547e6c feat(pif): add carrier (#438)
Fixes vatesfr/xo-web#1702
2016-11-02 16:25:44 +01:00
greenkeeper[bot]
352c9357df chore(package): update dependencies (#437)
https://greenkeeper.io/
2016-11-01 19:05:11 +01:00
Pierre Donias
1ba4641641 feat(acls): handle xo.clean (#436) 2016-10-31 15:53:50 +01:00
Greenkeeper
60e0047285 chore(package): update helmet to version 3.0.0 (#435)
https://greenkeeper.io/
2016-10-29 12:52:18 +02:00
Pierre Donias
235e7c143c fix(signin): new Bootstrap classes (#434) 2016-10-28 10:11:41 +02:00
Julien Fontanet
522d6eed92 5.3.2 2016-10-27 18:49:32 +02:00
Julien Fontanet
9d1d6ea4c5 feat(xo): export/import config (#427)
See vatesfr/xo-web#786
2016-10-27 18:48:19 +02:00
Julien Fontanet
0afd506a41 5.3.1 2016-10-27 18:25:16 +02:00
Julien Fontanet
9dfb837e3f fix(Xapi#importDeltaVm): gracefully handle missing vif.$network$uuid (#433) 2016-10-27 16:46:45 +02:00
fufroma
4ab63b569f fix(RemoteHandlerNfs): move mount points in /run/xo-server/mounts
Fixes vatesfr/xo-web#1405
2016-10-27 15:56:33 +02:00
Julien Fontanet
8d390d256d fix(http-request): handle redirections (#432) 2016-10-27 15:34:54 +02:00
Julien Fontanet
4eec5e06fc fix(package): test on Node 6, not 7 (#431) 2016-10-27 12:24:40 +02:00
Julien Fontanet
e4063b1ba8 feat(sample.config.yaml): add warning about YAML 2016-10-24 22:52:11 +02:00
Greenkeeper
0c3227cf8e chore(package): update promise-toolbox to version 0.7.0 (#428)
https://greenkeeper.io/
2016-10-24 15:01:17 +02:00
Pierre Donias
7bed200bf5 feat(pif): editVlan (#426)
Fix vatesfr/xo-web#1092
2016-10-24 10:24:44 +02:00
Julien Fontanet
4f763e2109 5.3.0 2016-10-20 16:01:53 +02:00
Pierre Donias
75167fb65b feat(pif): expose IP config modes (#424)
See vatesfr/xo-web#1651
2016-10-20 12:44:35 +02:00
Julien Fontanet
675588f780 feat(delta backups): force checksums refresh
See vatesfr/xo-web#1672
2016-10-20 12:38:26 +02:00
Julien Fontanet
2d6f94edd8 fix(vhd-merge/chainVhd): correctly await _write()
Fixes vatesfr/xo-web#1672
2016-10-20 12:31:20 +02:00
Julien Fontanet
247c66ef4b feat(IP pools): can be used in resource sets (#413)
See vatesfr/xo-web#1565
2016-10-19 11:17:05 +02:00
Greenkeeper
1076fac40f Update gulp-sourcemaps to version 2.1.1 🚀 (#422)
https://greenkeeper.io/
2016-10-14 10:44:27 +02:00
Julien Fontanet
14a4a415a2 5.2.6 2016-10-13 18:51:16 +02:00
Julien Fontanet
524355b59c fix(vhd-merge/chainVhd): correctly compute header checksum (#419)
Fixes vatesfr/xo-web#1656
2016-10-13 18:49:58 +02:00
Greenkeeper
36fe49f3f5 Update promise-toolbox to version 0.6.0 🚀 (#416)
https://greenkeeper.io/
2016-10-12 09:19:19 +02:00
Greenkeeper
c0c0af9b14 chore(package): update execa to version 0.5.0 (#411)
https://greenkeeper.io/
2016-10-05 10:40:31 +02:00
Julien Fontanet
d1e472d482 chore(package): use babel-plugin-lodash 2016-10-04 16:05:01 +02:00
Julien Fontanet
c80e43ad0d fix(vm.create): don't require view perm on VM template 2016-10-04 16:03:06 +02:00
Julien Fontanet
fdd395e2b6 fix(vm.create): correctly check resourceSet objects
Related to vatesfr/xo-web#1620
2016-10-04 15:51:04 +02:00
Julien Fontanet
e094437168 fix(package): update xo-acl-resolver to version 0.2.2
See vatesfr/xo-web#1620
2016-10-04 15:24:01 +02:00
Pierre Donias
2ee0be7466 fix(xapi/utils/makeEditObject): constraints works with user props (#410) 2016-10-04 15:02:27 +02:00
Julien Fontanet
2784a7cc92 Create ISSUE_TEMPLATE.md 2016-10-03 16:24:24 +02:00
Julien Fontanet
b09f998d6c 5.2.5 2016-10-03 09:39:52 +02:00
Nicolas Raynaud
bdeb5895f6 fix(deltaBackups): update checksum after altering VHD files (#408)
Fixes vatesfr/xo-web#1606
2016-09-30 14:31:33 +02:00
Pierre Donias
3944b8aaee feat(network): create a bonded network (#407)
Fixes vatesfr/xo-web#876
2016-09-30 13:51:33 +02:00
Nicolas Raynaud
6e66cffb92 feat(deltaBackups): correctly chain VHDs (#406)
The goal is for a tool like vhdimount to be able to mount any file and use it as a disk to recover specific file in it.
2016-09-29 17:31:36 +02:00
Pierre Donias
57092ee788 feat(vif.set): support for network, MAC and currently_attached (#403)
Fixes vatesfr/xo-web#1446
2016-09-28 15:09:17 +02:00
Julien Fontanet
70e9e1c706 chore(package): update human-format to version 0.7.0 2016-09-28 09:58:54 +02:00
Greenkeeper
9662b8fbee chore(package): update babel-eslint to version 7.0.0 (#404)
https://greenkeeper.io/
2016-09-27 23:39:30 +02:00
Julien Fontanet
9f66421ae7 fix(bootstrap): C-c twice force stop the server 2016-09-27 10:44:24 +02:00
Greenkeeper
50584c2e50 chore(package): update http-server-plus to version 0.7.0 (#402)
https://greenkeeper.io/
2016-09-27 09:30:16 +02:00
Julien Fontanet
7be4e1901a chore(package): use index-modules 2016-09-26 15:41:41 +02:00
Julien Fontanet
b47146de45 fix(pbd/attached): should be a boolean 2016-09-22 13:20:49 +02:00
Julien Fontanet
97b229b2c7 fix(vm.set): works with VM templates
Fixes vatesfr/xo-web#1569
2016-09-22 10:39:20 +02:00
Julien Fontanet
6bb5bb9403 5.2.4 2016-09-21 10:20:46 +02:00
Julien Fontanet
8c4b8271d8 fix(pool.setDefaultSr): remove pool param
Fixes vatesfr/xo-web#1558
2016-09-20 11:45:36 +02:00
Julien Fontanet
69291c0574 chore(package): update xo-vmdk-to-vhd to version 0.0.12
Fixes vatesfr/xo-web#1551
2016-09-20 10:41:42 +02:00
Julien Fontanet
2dc073dcd6 fix(vm.resourceSet): handle xo namespace 2016-09-19 13:15:23 +02:00
Julien Fontanet
1894cb35d2 feat(vm): expose resourceSet prop 2016-09-19 12:10:09 +02:00
Julien Fontanet
cd37420b07 Merge pull request #398 from vatesfr/greenkeeper-standard-8.1.0
Update standard to version 8.1.0 🚀
2016-09-18 05:17:41 +02:00
Julien Fontanet
55cb6b39db fix(Xo#removeSchedule): correctly test instance of SchedulerError 2016-09-18 05:12:36 +02:00
greenkeeperio-bot
89d13b2285 chore(package): update standard to version 8.1.0
https://greenkeeper.io/
2016-09-17 20:51:59 +02:00
Julien Fontanet
1b64b0468a fix(group.delete): remove associated ACLs
Fixes vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
085fb83294 fix(user.delete): remove associated ACLs
See vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
edd606563f feat(vm.revert): can snapshot before (#395)
See vatesfr/xo-web#1445
2016-09-15 14:59:43 +02:00
Julien Fontanet
fb804e99f0 5.2.3 2016-09-14 18:02:32 +02:00
Pierre Donias
1707cbcb54 feat(signin): use XO 5 style (#394)
Fixes vatesfr/xo-web#1161
2016-09-14 17:56:05 +02:00
Julien Fontanet
6d6a630c31 5.2.2 2016-09-14 17:37:42 +02:00
Julien Fontanet
ff2990e8e5 chore(package): update @marsaud/smb2-promise to version 0.2.1
Fixes vatesfr/xo-web#1511
2016-09-14 17:32:52 +02:00
Nicolas Raynaud
d679aff0fb chore(package): remove node-smb2 dependency (#393) 2016-09-14 16:23:28 +02:00
Julien Fontanet
603a444905 fix(Xapi#importVm): remove VM's VDIs on failure 2016-09-14 14:11:20 +02:00
Julien Fontanet
a002958448 fix(DR): remove previous VDIs
Fixes vatesfr/xo-web#1510
2016-09-14 14:11:20 +02:00
Julien Fontanet
cb4bc37424 fix(DR): delete VMs in all cases
Previous copies were not deleted when there were as many as the depth.

Fixes vatesfr/xo-web#1509
2016-09-14 14:11:19 +02:00
Julien Fontanet
0fc6f917e6 5.2.1 2016-09-13 16:44:35 +02:00
Julien Fontanet
ec0d012b24 feat(vm.set): support tags (#392)
Fixes vatesfr/xo-web#1431
2016-09-13 16:35:40 +02:00
Julien Fontanet
2cd4b171a1 chore(package): update json5 to version 0.5.0 2016-09-13 11:28:56 +02:00
Julien Fontanet
0cb6906c4d chore(package): is-my-json-valid to v2.13.1 2016-09-13 11:25:22 +02:00
Julien Fontanet
4c19b93c30 chore(package): update fs-promise to version 0.5.0 2016-09-13 11:23:42 +02:00
Julien Fontanet
6165f1b405 fix(vm.create): select SR of first disk-VDI (#391)
Fixes vatesfr/xo-web#1493
2016-09-12 16:32:43 +02:00
Julien Fontanet
37a4221e43 fix(vm.docker.containers): yes, again 2016-09-12 12:13:45 +02:00
Julien Fontanet
9831b222b5 fix(vm.docker.containers) 2016-09-12 12:11:15 +02:00
Julien Fontanet
7b6f44fb74 fix(vm.createInterface): syntax fix 2016-09-12 12:06:34 +02:00
Julien Fontanet
399f4d0ea3 feat(vm.docker.containers): like vm.docker.process.items but always an array 2016-09-12 11:43:36 +02:00
Julien Fontanet
26a668a875 fix(vm.createInterface): accept integers for position and mtu 2016-09-12 11:36:30 +02:00
Julien Fontanet
bf96262b6e feat(Xapi#createVif): default MTU is network's MTU 2016-09-12 11:05:31 +02:00
Julien Fontanet
1155fa1fe9 chore(vm.create): remove some console.log()s 2016-09-09 15:31:25 +02:00
Julien Fontanet
1875d31731 5.2.0 2016-09-09 15:16:03 +02:00
Julien Fontanet
6f855fd14e feat(IP pools): groups of IP addresses (#371) 2016-09-09 15:12:30 +02:00
Julien Fontanet
08e392bb46 fix(vm.create): correctly compute limits usage (#389)
Fixes vatesfr/xo-web#1365
2016-09-09 12:55:10 +02:00
Julien Fontanet
66d63e0546 fix(test.wait): fix setTimeout params order 2016-09-08 18:40:55 +02:00
Julien Fontanet
7ee56fe8bc feat(pool.installAllPatches): install all patches on a pool (#388)
See vatesfr/xo-web#1392
2016-09-07 17:54:00 +02:00
Julien Fontanet
669d04ee48 fix(vm.migrate): error on unused default SR
Fixes #1466
2016-09-05 14:21:17 +02:00
Julien Fontanet
cb1b37326e fix(vm.rollingDrCopy): avoid duplicates in VMs list (#387)
Fixes vatesfr/xo-web#1464
2016-09-05 13:41:20 +02:00
Julien Fontanet
7bb73bee67 feat(vm.rollingDrCopy): failure to destroy old copies is not fatal 2016-09-05 11:29:54 +02:00
Julien Fontanet
7286ddc338 chore(JobExecutor): use utils/serializeError() 2016-09-05 11:29:53 +02:00
Olivier Lambert
7d1f9e33fe feat(network): add defaultIsLocked to API (#385) 2016-09-01 14:49:20 +02:00
Ronan Abhamon
63c676ebfe feat(vm.import): supports OVA (#375)
See vatesfr/xo-web#709
2016-09-01 14:11:15 +02:00
Greenkeeper
fcaf6b7923 chore(package): update json-rpc-peer to version 0.12.0 (#383)
https://greenkeeper.io/
2016-08-25 11:56:54 -04:00
Julien Fontanet
9f347a170a fix(xapi/utils): correctly isPlainObject 2016-08-18 16:21:34 +02:00
Julien Fontanet
2f7cd4426d fix(xapi/utils/prepareXapiParam): array handling 2016-08-18 16:15:51 +02:00
Julien Fontanet
854f256470 fix(xapi/getNamespaceForType): add missing VIF 2016-08-18 15:27:47 +02:00
Julien Fontanet
5d0b40f752 fix(utils/camelToSnakeCase): better number handling 2016-08-18 15:23:57 +02:00
Julien Fontanet
27a2853ee8 fix(vif.set): add missing param 2016-08-18 15:13:46 +02:00
Julien Fontanet
67f6b80312 fix(vif.set): do not use an arrow function 2016-08-18 15:01:13 +02:00
Julien Fontanet
016037adc1 fix(user.set): can be used by non admins 2016-08-18 14:17:07 +02:00
Julien Fontanet
70d5c1034d 5.1.6 2016-08-18 10:54:36 +02:00
Greenkeeper
ed6fb8754f chore(package): update mocha to version 3.0.2 (#376)
https://greenkeeper.io/
2016-08-18 10:53:05 +02:00
Julien Fontanet
6d08a9b11c feat(JobExecutor): a current job will only run 2 calls at a time (#382)
Fixes vatesfr/xo-web#915
2016-08-18 10:52:29 +02:00
Julien Fontanet
cf6aa7cf79 fix(package): update xen-api to 0.9.4
Again, fixes vatesfr/xo-web#1384
2016-08-18 09:42:28 +02:00
Julien Fontanet
6c4e57aae0 chore(JobExecutor#_execCall): forEach+Array#push → mapToArray 2016-08-17 18:13:30 +02:00
Julien Fontanet
d08a04959c 5.1.5 2016-08-16 19:15:52 +02:00
Julien Fontanet
2762f74ce5 fix(package): update xen-api to 0.9.3 2016-08-16 19:12:46 +02:00
Julien Fontanet
6ebcf6eec5 5.1.4 2016-08-16 18:18:04 +02:00
Julien Fontanet
25b78fb7e1 fix(package): update xen-api to 0.9.2
Fixes vatesfr/xo-web#1384
2016-08-16 18:15:32 +02:00
Greenkeeper
670dd2dd96 chore(package): update promise-toolbox to version 0.5.0 (#381)
https://greenkeeper.io/
2016-08-16 12:22:57 +02:00
Julien Fontanet
1baf04f786 fix(NfsHandler#_unmount): use _getRealPath() (#380)
Fixes vatesfr/xo-web#1396.
2016-08-15 14:22:19 +02:00
Greenkeeper
ce05b7a041 chore(package): update nyc to version 8.1.0 (#379)
https://greenkeeper.io/
2016-08-14 19:06:00 +02:00
Olivier Lambert
290cc146c8 fix(xapi): allow to unplug VBDs when VM is running 2016-08-11 16:32:06 +02:00
Olivier Lambert
db4d46a584 fix(sr): don't share a local ISO SR. Fixes vatesfr/xo-web#1389 2016-08-10 14:39:05 +02:00
Olivier Lambert
8ed2e51dde feat(network): add network.set method 2016-08-08 14:54:23 +02:00
Olivier Lambert
33702c09a6 feat(vm copy): allow snapshot copy. Related to vatesfr/xo-web#1353 2016-08-08 14:07:27 +02:00
Olivier Lambert
45aeca3753 5.1.3 2016-08-05 11:08:11 +02:00
Olivier Lambert
deae7dfb4d fix(xen-api): avoid reserved key conflicts. Fixes vatesfr/xo-web#1369 2016-08-05 11:06:58 +02:00
Julien Fontanet
2af043ebdd chore(jshint): remove unused config file 2016-08-03 09:46:52 +02:00
Olivier Lambert
e121295735 Merge pull request #373 from nraynaud/next-release
fix (readme): fix installation documentation link
2016-08-02 12:32:05 +02:00
Nicolas Raynaud
7c1c405a64 fix installation documentation link 2016-08-02 12:22:39 +02:00
Olivier Lambert
5d7c95a34d fix(xapi): typo on host disable method. Fixes vatesfr/xo-web#1351 2016-07-30 20:22:12 +02:00
Julien Fontanet
504c934fc9 fix(JobExecutor#_execCall): xo.api.call() → xo.callApiMethod() 2016-07-29 15:28:24 +02:00
Julien Fontanet
81b0223f73 fix(JobExecutor#exec): forward the error 2016-07-29 15:27:58 +02:00
Julien Fontanet
6d1e410bfd fix(JobExecutor#exec): correctly log the error 2016-07-29 15:27:32 +02:00
Julien Fontanet
26c5c6152d fix(job-executor/map): paramName handling 2016-07-29 14:37:42 +02:00
Julien Fontanet
d83bf0ebaf fix(Xo#_watchObject): check for notify() 2016-07-29 14:29:57 +02:00
Julien Fontanet
5adfe9a552 chore(index): remove debug trace 2016-07-29 13:54:54 +02:00
ABHAMON Ronan
883f461dc7 feat(job-executor): supports dynamic param vectors (#369)
See vatesfr/xo-web#837
2016-07-29 13:26:53 +02:00
Julien Fontanet
8595ebc258 feat(api): generate logs on errors
See vatesfr/xo-web#1344
2016-07-29 10:32:48 +02:00
Julien Fontanet
2bd31f4560 chore(api): remove legacy helpers 2016-07-28 15:21:59 +02:00
Julien Fontanet
6df85ecadd fix(vm.*): add missing import 2016-07-28 15:21:59 +02:00
Julien Fontanet
07829918e4 5.1.2 2016-07-28 15:21:12 +02:00
Julien Fontanet
b0d400b6eb fix(Xapi#exportDeltaVm): better handling of removed VDIs (#370)
Fixes vatesfr/xo-web#1333
2016-07-28 15:19:44 +02:00
Julien Fontanet
706cb895ad 5.1.1 2016-07-27 16:36:51 +02:00
Julien Fontanet
45bf539b3c fix(user.delete): fix tokens deletion 2016-07-27 13:23:16 +02:00
Julien Fontanet
0923981f8d fix(user.set): typo in error message 2016-07-27 13:01:32 +02:00
Julien Fontanet
b0ac14363d 5.1.0 2016-07-26 16:52:49 +02:00
Julien Fontanet
5d346aba37 fix(vm.create): cloudConfig handling 2016-07-26 14:26:24 +02:00
Julien Fontanet
124cb15ebe fix(resource sets): fix VM resources computation
Fixes vatesfr/xo-web#1276
2016-07-25 17:08:09 +02:00
Julien Fontanet
a244ab898d fix(vm.create): correctly store the resource set 2016-07-25 17:08:08 +02:00
Julien Fontanet
3c551590eb fix(vm.set): correctly save memory in limits 2016-07-25 17:08:07 +02:00
ABHAMON Ronan
10e30cccbc feat(models/schedule): null properly remove timezone (#368)
Related to vatesfr/xo-web#1314
2016-07-25 15:54:27 +02:00
Julien Fontanet
806a6b86a2 fix(signin): fix styles when /v4 2016-07-25 13:40:57 +02:00
Julien Fontanet
9719fdf5cc fix(sr.probe*): correctly prepare port param 2016-07-23 16:18:03 +02:00
Julien Fontanet
6d8764f8cb fix(Xapi#createVm): add missing param 2016-07-23 15:49:27 +02:00
Julien Fontanet
d9fd9cb408 fix(vm.create): better VBDs creation (#361)
Fixes vatesfr/xo-web#1257
2016-07-23 15:31:15 +02:00
Julien Fontanet
7710ec0aba feat(schemas): add user schema 2016-07-20 12:10:23 +02:00
Julien Fontanet
c97bd78cd0 fix(VM): cpuCap & cpuWeight are integers 2016-07-20 10:57:15 +02:00
ABHAMON Ronan
728c5aa86e feat(plugins): supports predefined configurations (#365)
See vatesfr/xo-web#1289
2016-07-19 17:28:53 +02:00
Pierre Donias
83d68ca293 feat(vm.set): make cpuWeight and cpuCap nullable (#364) 2016-07-19 16:53:47 +02:00
Julien Fontanet
47d7561db4 fix(VM): cpuCap can be defined when cpuWeight is not 2016-07-19 15:37:07 +02:00
ABHAMON Ronan
7d993e8319 feat(schedules): schedules support timezones (#363)
Fixes vatesfr/xo-web#1258
2016-07-19 13:32:27 +02:00
Julien Fontanet
1d1a597b22 feat(VM): expose cpuCap 2016-07-19 11:02:38 +02:00
Julien Fontanet
23082f9300 feat(vm.set): support for cpuCap (#362) 2016-07-19 10:35:03 +02:00
Julien Fontanet
ea1a7f9376 chore(Xapi#_getXenUpdates): use ensureArray() 2016-07-15 12:57:20 +02:00
Greenkeeper
1796c7bab8 chore(package): update nyc to version 7.0.0 (#358)
https://greenkeeper.io/
2016-07-14 13:09:12 +02:00
Greenkeeper
65ad76479a chore(package): update base64url to version 2.0.0 (#360)
https://greenkeeper.io/
2016-07-14 11:33:12 +02:00
Olivier Lambert
422db04ec8 5.0.5 2016-07-13 15:20:56 +02:00
Olivier Lambert
d12f60fe37 Merge pull request #359 from vatesfr/pierre-fix-create-vm
fix(vm/create): missing single quotes
2016-07-13 09:37:23 +02:00
Pierre Donias
194c1c991c fix(vm/create): missing single quotes 2016-07-12 16:40:32 +02:00
Olivier Lambert
3e8e2222c1 Merge pull request #357 from vatesfr/marsaudf-fix-job-log-error
Add message to job log error
2016-07-07 15:26:15 +02:00
Fabrice Marsaud
1620327a33 Add message to job log error 2016-07-07 14:55:43 +02:00
Olivier Lambert
b1131e3667 5.0.4 2016-07-07 12:12:54 +02:00
Olivier Lambert
db0250ac08 Merge pull request #356 from vatesfr/marsaudf-fix-patch-conflicts
Fix(xapi): handle correctly single XML elements
2016-07-07 11:22:27 +02:00
Fabrice Marsaud
0a6b605760 Handle single patch elements in parsed XML 2016-07-07 10:11:21 +02:00
Olivier Lambert
81ac2375e5 5.0.3 2016-07-06 23:23:14 +02:00
Olivier Lambert
6bcaca6cd7 Merge pull request #355 from vatesfr/issue-1233
fix(Xapi#importDeltaVm): correctly handle missing network
2016-07-06 23:21:55 +02:00
Olivier Lambert
ec8375252e fix(Xapi#importDeltaVm): correctly handle missing network 2016-07-06 23:11:47 +02:00
Julien Fontanet
766aa1762f 5.0.2 2016-07-05 17:56:02 +02:00
Julien Fontanet
5165e0a54c feat(user.set): support preferences 2016-07-05 17:19:38 +02:00
Julien Fontanet
a2f7ad627e feat(Xapi#migrateVm): allow non-running VMs
Fixes vatesfr/xo-web#1216
2016-07-05 17:09:54 +02:00
Julien Fontanet
1176c162d4 5.0.1 2016-06-30 15:46:27 +02:00
Fabrice Marsaud
a4880cd017 feat(remote.test): perform a write/read test on a remote (#354)
See vatesfr/xo-web#1075
2016-06-30 15:00:00 +02:00
Julien Fontanet
383bdce416 fix(plugin.configure): fix undefined handling 2016-06-29 13:08:02 +02:00
Julien Fontanet
7cc300dd83 fix(Xapi#createVif): fix handling when neither device nor position is not provided 2016-06-28 17:36:24 +02:00
Fabrice Marsaud
687809db9d fix(user.set): cannot change self permission (#353) 2016-06-28 13:28:31 +02:00
Julien Fontanet
1127ec3a90 feat(vif.set): allowed IPv4/IPv6 addresses (#328) 2016-06-27 15:11:46 +02:00
Julien Fontanet
a797edfae9 chore(xapi/mixins/vm): simplify _editVm() specs 2016-06-27 12:10:57 +02:00
Julien Fontanet
938e106252 feat(xapi/utils/makeEditObject): support camelCase and snake_case aliases 2016-06-27 12:10:54 +02:00
Julien Fontanet
a0eb9caaa2 feat(xapi/utils/makeEditObject): set, set.get, set.set can be true 2016-06-27 11:54:13 +02:00
Julien Fontanet
442f53d45e fix(xapi/utils/makeEditObject): use deep equality 2016-06-27 09:52:02 +02:00
Greenkeeper
68de1ca248 chore(package): update ws to version 1.1.1 (#348)
https://greenkeeper.io/
2016-06-26 20:19:47 +02:00
Greenkeeper
e16061141e chore(package): update d3-time-format to version 2.0.0 (#350)
https://greenkeeper.io/
2016-06-26 20:18:24 +02:00
Julien Fontanet
64cbe3d209 feat(build): delete dist before building 2016-06-26 17:47:56 +02:00
Julien Fontanet
ebdc6376d8 5.0.0 2016-06-24 18:34:31 +02:00
Julien Fontanet
68335123a1 feat(vm.create): all vm.set params are supported (#340) 2016-06-24 18:33:43 +02:00
Julien Fontanet
25b18f4ef8 chore(package): update xo-acl-resolver to 0.2.1 2016-06-24 14:43:18 +02:00
Julien Fontanet
9ad615b0ff fix(Xapi#_waitObjectState): fix failure when object is initially missing 2016-06-22 12:20:22 +02:00
Julien Fontanet
12eaceb032 fix(xapi-objects-to-xo): fix CPUs.number when no tools 2016-06-21 13:19:29 +02:00
Julien Fontanet
3263511b72 fix(Xapi#snapshotVm): fallback if quiesce failed
Fixes vatesfr/xo-web#1088
2016-06-21 11:21:01 +02:00
Julien Fontanet
75cae8c647 fix(Xapi#_updateObjectMapProperty): prepare XAPI param 2016-06-21 11:21:00 +02:00
Julien Fontanet
9991ef624c feat(Xapi#getObject): accept objects with _xapiId property 2016-06-21 11:21:00 +02:00
Julien Fontanet
489e9fce27 fix(xapi/index): work around Babel T2877 2016-06-21 11:21:00 +02:00
Julien Fontanet
0655628073 fix(xapi/index): incorrect import 2016-06-21 11:20:59 +02:00
Fabrice Marsaud
9460822529 feat(vm.importBackup): returns the new VM id (#345) 2016-06-20 18:07:14 +02:00
Julien Fontanet
d02358ac0d chore(xapi): move utilities into dedicated module 2016-06-17 18:43:10 +02:00
ABHAMON Ronan
366237a625 fix(XapiStats): fix unit for host free memory (#339) 2016-06-17 10:16:58 +02:00
Julien Fontanet
2f2da18994 chore: remove some unnecessary logs 2016-06-16 09:22:26 +02:00
Greenkeeper
ecd30db215 chore(package): update d3-time-format to version 1.0.0 (#338)
https://greenkeeper.io/
2016-06-15 08:40:56 +02:00
ABHAMON Ronan
1980854f6f feat(Xapi#importDeltaVm): attach VIFs to original networks if available (#335)
Fixes vatesfr/xo-web#1016
2016-06-10 11:05:54 +02:00
Julien Fontanet
7d4f006c25 feat(Xapi#exportDeltaVm): inject network/SR UUIDs in VIF/VDI records 2016-06-09 17:25:02 +02:00
Julien Fontanet
b697be2383 fix(Xapi#_snapshotVm): returns the up-to-date snapshot record 2016-06-09 17:17:14 +02:00
Fabrice Marsaud
143e53c43f chore(package): update xo-remote-parser to version 0.3.0 (#333) 2016-06-08 17:26:08 +02:00
Julien Fontanet
6dde1ade01 fix(xo-server-logs): fix broken require since Babel 6 2016-06-08 11:12:45 +02:00
Greenkeeper
d4de391ac5 chore(package): update d3-time-format to version 0.4.0 (#332)
https://greenkeeper.io/
2016-06-08 09:05:45 +02:00
Greenkeeper
af15f4bc6a chore(package): update xo-acl-resolver to version 0.2.0 (#330)
https://greenkeeper.io/
2016-06-07 16:46:23 +02:00
Fabrice Marsaud
d4ace24caa fix(job.set): protects userId from modification (#329) 2016-06-07 09:25:15 +02:00
Julien Fontanet
c5ab47fa66 chore(package): fix deps order 2016-06-06 13:38:16 +02:00
Julien Fontanet
d60051b629 fix(package): update xo-remote-parser to 0.2.1 2016-06-06 13:37:47 +02:00
Julien Fontanet
22ff330ee7 fix(package): update @marsaud/smb2 to 0.7.1 2016-06-03 18:22:37 +02:00
Olivier Lambert
dd62bef66d feat(host): expose correct timestamp for license expiry value 2016-05-31 17:24:49 +02:00
Julien Fontanet
e7feb99f8d feat(vm.create): clone param may be use to disable cloning (#318)
See vatesfr/xo-web#960
2016-05-30 11:34:39 +02:00
Julien Fontanet
6358accece fix(plugin.configure): correctly handle undefined 2016-05-30 11:12:11 +02:00
Olivier Lambert
9ce8a24eea feat(sr): add disconnect and connect all PBDs to a SR (#324) 2016-05-27 18:31:09 +02:00
Julien Fontanet
4d0673f489 feat(sr.forget): automatically disconnect PBDs (#323) 2016-05-27 18:15:09 +02:00
Olivier Lambert
fbe1e6a7d5 fix(vm): missing parameters and wrong value for set_memory_static_max 2016-05-27 15:03:49 +02:00
Greenkeeper
4ed02ca501 chore(package): update cookie to version 0.3.0 (#322)
https://greenkeeper.io/
2016-05-27 04:36:35 +02:00
Julien Fontanet
af245ed9fe fix(log.delete): id can be an array 2016-05-26 13:34:47 +02:00
Julien Fontanet
fc86a3e882 fix(vm): always consider memory dynamic max when updating resource set 2016-05-24 16:22:55 +02:00
Julien Fontanet
f9109edcf1 fix(vm.set): memoryMax should update resource set 2016-05-24 16:21:21 +02:00
Julien Fontanet
ec100e1a91 fix(vm.set): memoryMax should change dynamic max 2016-05-24 16:20:25 +02:00
Julien Fontanet
746c5f4a79 fix(vm.set): cpusMax (shame) 2016-05-24 15:13:53 +02:00
Julien Fontanet
b2611728a1 fix(vm): fix indent 2016-05-24 14:38:11 +02:00
Julien Fontanet
fc6cc4234d chore(vm.set): fix some comments 2016-05-24 14:33:40 +02:00
Julien Fontanet
7706c1cb63 feat(vm.set): memoryStaticMax 2016-05-24 14:33:02 +02:00
Julien Fontanet
4d7a07220c feat(vm.set): memoryMax increase static max if necessary 2016-05-24 14:32:14 +02:00
Julien Fontanet
436875f7dc fix(vm.set): memoryMin should not change static min 2016-05-24 14:30:26 +02:00
Julien Fontanet
21c6f53ecc fix(vm.set): cpusMax 2016-05-24 14:23:21 +02:00
Julien Fontanet
5472be8b72 4.17.0 2016-05-24 11:51:15 +02:00
Julien Fontanet
d22542fcf3 Revert "fix(leveldown): fix leveldown to version 1.4.4"
This reverts commit 5fa4c95480.
2016-05-24 11:50:36 +02:00
Julien Fontanet
1d8341eb27 Merge branch 'next-release' into stable 2016-05-24 11:49:47 +02:00
Julien Fontanet
1897a7ada3 fix(log.get): only usable by admins 2016-05-23 16:18:21 +02:00
Julien Fontanet
a048698c66 feat(log.*): add params schemas 2016-05-23 16:17:54 +02:00
Julien Fontanet
f891e57f4a fix(xapi-objects-to-xo): a SR should always have a container 2016-05-23 16:00:51 +02:00
Olivier Lambert
fcc590e48a feat(vm.snapshot): name param is optional (#320) 2016-05-23 12:56:20 +02:00
Julien Fontanet
9a02a2a65b fix(vm.set): fix call to $isVmRunning
Fixes #319
2016-05-23 10:02:34 +02:00
Julien Fontanet
536a6c5c60 feat(vm.rollingDrCopy): accepts a sr param (#315)
See vatesfr/xo-web#955
2016-05-21 14:24:16 +02:00
Julien Fontanet
86a6871ee8 fix(vm.set): correctly change min dynamic memory if necessary (#317)
Fixes vatesfr/xo-web#970
2016-05-21 14:14:57 +02:00
Julien Fontanet
6046045151 feat(vm.createInterface): position param is now optional 2016-05-21 13:11:51 +02:00
Julien Fontanet
9c3ddd4ba4 fix(Xapi#_createVm()): license issue with Dundee (#316)
Fixes vatesfr/xo-web#964.
2016-05-20 12:22:42 +02:00
Julien Fontanet
6c9f55c1d7 style(utils): fix lightSet 2016-05-17 09:08:51 +02:00
Julien Fontanet
5bec3d7dcd fix(xapi-object-to-xo): correctly set host memory size 2016-05-16 11:50:01 +02:00
Julien Fontanet
a4c309efe8 fix(package): ship signin.pug 2016-05-12 18:18:56 +02:00
Jon Sands
4e22a208dd fix(autopoweron): set pool other_config entry to true instead of on (#310)
Fixes #309 
Fixes vatesfr/xo-web#937
2016-05-12 13:17:02 +02:00
Julien Fontanet
ff9e77118e fix(Xapi): VM creation on Dundee (#303) 2016-05-11 18:03:58 +02:00
Julien Fontanet
6c6dfa9ac4 perf(Promise): use Bluebird as default implementation 2016-05-11 18:01:52 +02:00
Greenkeeper
d60d5207d8 chore(package): update xen-api to version 0.9.0 (#308)
https://greenkeeper.io/
2016-05-11 17:55:35 +02:00
ABHAMON Ronan
8c0ae892f5 feat(api): rename <namespace> param to id (#305) 2016-05-11 14:35:49 +02:00
Greenkeeper
f570492a11 chore(package): update xo-remote-parser to version 0.2.0 (#307)
https://greenkeeper.io/
2016-05-11 14:07:23 +02:00
Julien Fontanet
cc447304f5 fix(bin/xo-server): remove ES6 syntax 2016-05-10 13:43:53 +02:00
Julien Fontanet
8f8c6366e3 chore(xo-mixins/backup): use default value for remote handler flags 2016-05-05 18:22:19 +02:00
Julien Fontanet
3b13bcb098 fix(Xapi#exportDeltaVm): make streams property non-enumerable 2016-05-05 18:19:41 +02:00
Julien Fontanet
df60784b51 chore(signin): jade renamed to pug 2016-05-04 16:00:28 +02:00
Julien Fontanet
bae3122bb5 chore: various updates 2016-05-04 12:16:02 +02:00
Julien Fontanet
0770aef4bf chore(package): update standard to version 7.0.0 2016-05-04 11:59:56 +02:00
ABHAMON Ronan
c198350bfa feat(remote-handlers): cannot overwrite files by default (#297) 2016-05-03 16:56:26 +02:00
Greenkeeper
a2ed388777 chore(package): update helmet to version 2.0.0 (#298)
https://greenkeeper.io/
2016-04-30 08:10:22 +02:00
Julien Fontanet
f6670c699a 4.16.1 2016-04-29 10:28:03 +02:00
Julien Fontanet
5fa4c95480 fix(leveldown): fix leveldown to version 1.4.4
Due to Level/leveldown#276.
2016-04-29 10:27:37 +02:00
Julien Fontanet
5b8608c186 feat(utils/streamToBuffer): rebase on get-stream and use everywhere (#295) 2016-04-29 09:52:36 +02:00
Julien Fontanet
bb75d42ede 4.16.0 2016-04-29 09:44:42 +02:00
Julien Fontanet
b4b6def07a Merge branch 'next-release' into stable 2016-04-29 09:43:30 +02:00
Greenkeeper
b305700987 chore(package): update get-stream to version 2.1.0 (#294)
https://greenkeeper.io/
2016-04-29 09:14:30 +02:00
Greenkeeper
40232b7eb1 chore(package): update fs-extra to version 0.30.0 (#293)
https://greenkeeper.io/
2016-04-28 18:17:34 +02:00
Julien Fontanet
67ff666db4 Use the new execa.stdout() 2016-04-28 10:18:05 +02:00
Greenkeeper
5960fd4fe0 chore(package): update fs-extra to version 0.29.0 (#292)
https://greenkeeper.io/
2016-04-28 09:04:39 +02:00
Greenkeeper
f8b28c519c chore(package): update xo-acl-resolver to version 0.1.0 (#291)
https://greenkeeper.io/
2016-04-28 08:56:31 +02:00
Julien Fontanet
ee1105b6dd fix(Xapi#importVdiContent): find first attached PBD (#279) 2016-04-27 09:37:30 +02:00
Julien Fontanet
4778274c97 fix(Xapi#call): retries on TOO_MANY_PENDING_TASKS
Fixes fix vatesfr/xo-web#861
2016-04-27 09:28:36 +02:00
Julien Fontanet
d7ecb32238 Xapi#snapshotVm(): wait for the uptodate obj on quiesce. (#282)
Fixes vatesfr/xo-web#904
2016-04-27 09:24:00 +02:00
Greenkeeper
744306fc50 chore(package): update execa to version 0.4.0 (#290)
https://greenkeeper.io/
2016-04-27 09:13:16 +02:00
Olivier Lambert
11bbb8ed4d add host startTime and agentStartTime 2016-04-26 11:30:57 +02:00
Julien Fontanet
b5092a4444 feat(toTimestamp): handle timestamps. 2016-04-26 11:27:26 +02:00
Greenkeeper
e2442c07a9 https://greenkeeper.io/Fixes vatesfr/xo-web#769
https://greenkeeper.io/

Fixes vatesfr/xo-web#769.
2016-04-26 09:07:33 +02:00
Julien Fontanet
6f924d4e83 fix(user.delete): fix vatesfr/xo-web#901. 2016-04-25 14:33:29 +02:00
Greenkeeper
faf1508914 chore(package): update execa to version 0.3.0 (#284)
https://greenkeeper.io/
2016-04-23 17:51:56 +01:00
Julien Fontanet
7eb8152835 4.15.3 2016-04-22 16:18:21 +02:00
Julien Fontanet
8f45905831 fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 16:18:03 +02:00
Julien Fontanet
4ba2ffce5b fix(vm.deltaCopy()): delete snapshot when import fails. 2016-04-22 13:39:21 +02:00
Greenkeeper
ffb3659ef5 chore(package): update fs-extra to version 0.28.0 (#280)
http://greenkeeper.io/
2016-04-18 12:09:06 +01:00
Julien Fontanet
6dec07d562 signin form: fix redirect on success. 2016-04-18 11:57:58 +01:00
Julien Fontanet
afb22f3279 Merge pull request #278 from vatesfr/greenkeeper-fs-extra-0.27.0
Update fs-extra to version 0.27.0 🚀
2016-04-15 14:14:31 +02:00
greenkeeperio-bot
f2f369db64 chore(package): update fs-extra to version 0.27.0
http://greenkeeper.io/
2016-04-15 14:05:41 +02:00
Julien Fontanet
635c76db93 Deprecate host#CPUs and introduce host#cpus. 2016-04-13 10:59:29 +02:00
Julien Fontanet
5f50f1928d Merge pull request #276 from vatesfr/fix-auto-poweron
Fix auto poweron (fix vatesfr/xo-web#879).
2016-04-11 15:53:37 +02:00
Julien Fontanet
32c9ed1dc2 Fix auto poweron (fix vatesfr/xo-web#879). 2016-04-11 15:31:59 +02:00
Julien Fontanet
71741e144e Merge pull request #274 from vatesfr/abhamonr-set-vm-ram-min-max-values
api.vm: Set the min/max ram values.
2016-04-07 10:08:34 +02:00
wescoeur
f2e64cdd5e api.vm: Set the min/max ram values. 2016-04-07 09:25:45 +02:00
Julien Fontanet
afaa5d5e9e Merge pull request #275 from vatesfr/abhamonr-set-vm-cpus-max
api.vm: Set vcpus max.
2016-04-06 17:39:16 +02:00
wescoeur
d82861727d api.vm: Set vcpus max. 2016-04-06 17:32:51 +02:00
Julien Fontanet
90f0795416 Merge pull request #272 from vatesfr/abhamonr-fix-smb-backup-location
Ensure remote smb path is a directory. (fix vatesfr/xo-web#865)
2016-04-06 16:25:28 +02:00
Julien Fontanet
9efbe7771c Merge pull request #273 from vatesfr/abhamonr-consistent-stats-object
vm.stats(): Returns empty stats if none can be found.
2016-04-06 12:10:34 +02:00
wescoeur
a75caac13d Vm stats are consistents. Even without RRD stats. 2016-04-06 11:55:14 +02:00
wescoeur
279d0d20ea Ensure remote smb path is a directory. (fix vatesfr/xo-web#865) 2016-04-06 09:51:20 +02:00
Julien Fontanet
332ba96d34 ghooks: use commit-msg instead of pre-commit. 2016-04-04 11:33:30 +02:00
Julien Fontanet
3f6e5b7606 decorators/@autobind: Minor improvements. 2016-04-04 11:29:31 +02:00
Julien Fontanet
94703492fd Use http-proxy for HTTP/ws proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
df78117617 Do not crash on error in the console proxy. 2016-03-30 17:33:50 +02:00
Julien Fontanet
909b9480e4 Better error message in console proxy. 2016-03-30 17:33:49 +02:00
Julien Fontanet
21762ac1aa Return to the correct page after sign in. 2016-03-30 17:33:49 +02:00
Julien Fontanet
412bc175b4 Merge pull request #270 from vatesfr/abhamonr-stats-object-contains-interval
Stats object contains interval attr.
2016-03-30 14:49:25 +02:00
wescoeur
dc0eb76e88 Stats object contains interval attr. 2016-03-30 14:34:37 +02:00
Julien Fontanet
2695941a3c Remove pFinally() tests, now implemented in promise-toolbox. 2016-03-29 18:05:32 +02:00
Julien Fontanet
3506be1a70 Update promise-toolbox to 0.3.2. 2016-03-29 09:54:24 +02:00
Julien Fontanet
cbf4786b39 Do not crash on unhandled error event. 2016-03-27 13:31:31 +02:00
Julien Fontanet
8dbf334208 Merge pull request #267 from vatesfr/back-to-babel-6
Back to babel 6
2016-03-25 17:37:52 +01:00
Julien Fontanet
60ba5fbc72 Merge pull request #268 from vatesfr/abhamonr-stats-with-halted-vm
Throw an error if a vm is halted and its stats are requested.
2016-03-25 17:37:27 +01:00
Julien Fontanet
c3ace0c44f Simply do npm test before git commit. 2016-03-25 17:36:37 +01:00
Olivier Lambert
8eceb90e63 add startTime 2016-03-25 17:33:34 +01:00
wescoeur
4754e19e83 Throw an error if a vm is halted and its stats are requested. 2016-03-25 15:49:52 +01:00
Julien Fontanet
a0559d0dc9 Revert "Work around Babel T7172."
This reverts commit ca8476d466.
2016-03-23 09:45:59 +01:00
Julien Fontanet
8d03ce19b0 Revert "Merge pull request #248 from vatesfr/babel-t7172"
This reverts commit f125b593bf, reversing
changes made to ca8476d466.
2016-03-23 09:43:30 +01:00
Julien Fontanet
2470d851e9 Revert "Merge pull request #266 from vatesfr/babel-5-workaround"
This reverts commit b77d3f123d, reversing
changes made to c10b0afaa8.
2016-03-23 09:41:54 +01:00
Julien Fontanet
df99f5c0a5 Revert "Merge pull request #265 from vatesfr/revert-babel-6"
This reverts commit 8907290d27, reversing
changes made to eb55cba34a.
2016-03-23 09:41:08 +01:00
111 changed files with 13492 additions and 3195 deletions

View File

@@ -1,15 +0,0 @@
{
"comments": false,
"compact": true,
"optional": [
// Experimental features.
// "minification.constantFolding",
// "minification.deadCodeElimination",
"es7.asyncFunctions",
"es7.decorators",
"es7.exportExtensions",
"es7.functionBind",
"runtime"
]
}

2
.gitignore vendored
View File

@@ -1,6 +1,8 @@
/.nyc_output/
/dist/
/node_modules/
/src/api/index.js
/src/xapi/mixins/index.js
/src/xo-mixins/index.js
npm-debug.log

View File

@@ -1,93 +0,0 @@
{
// Julien Fontanet JSHint configuration
// https://gist.github.com/julien-f/8095615
//
// Changes from defaults:
// - all enforcing options (except `++` & `--`) enabled
// - single quotes
// - indentation set to 2 instead of 4
// - almost all relaxing options disabled
// - environments are set to Node.js
//
// See http://jshint.com/docs/ for more details
"maxerr" : 50, // {int} Maximum error before stopping
// Enforcing
"bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
"camelcase" : true, // true: Identifiers must be in camelCase
"curly" : true, // true: Require {} for every new block or scope
"eqeqeq" : true, // true: Require triple equals (===) for comparison
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
"freeze" : true, // true: Prohibit overwriting prototypes of native objects (Array, Date, ...)
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
"indent" : 2, // {int} Number of spaces to use for indentation
"latedef" : true, // true: Require variables/functions to be defined before being used
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
"noempty" : true, // true: Prohibit use of empty blocks
"nonbsp" : true, // true: Prohibit use of non breakable spaces
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
"plusplus" : false, // true: Prohibit use of `++` & `--`
"quotmark" : "single", // Quotation mark consistency:
// false : do nothing (default)
// true : ensure whatever is used is consistent
// "single" : require single quotes
// "double" : require double quotes
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
"unused" : true, // true: Require all defined variables be used
"strict" : false, // true: Requires all functions run in ES5 Strict Mode
"maxcomplexity" : 7, // {int} Max cyclomatic complexity per function
"maxdepth" : 3, // {int} Max depth of nested blocks (within functions)
"maxlen" : 80, // {int} Max number of characters per line
"maxparams" : 4, // {int} Max number of formal params allowed per function
"maxstatements" : 20, // {int} Max number statements per function
// Relaxing
"asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
"boss" : false, // true: Tolerate assignments where comparisons would be expected
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
"eqnull" : false, // true: Tolerate use of `== null`
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
"evil" : false, // true: Tolerate use of `eval` and `new Function()`
"expr" : false, // true: Tolerate `ExpressionStatement` as Programs
"funcscope" : false, // true: Tolerate defining variables inside control statements
"globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
"iterator" : false, // true: Tolerate using the `__iterator__` property
"lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
"laxbreak" : false, // true: Tolerate possibly unsafe line breakings
"laxcomma" : false, // true: Tolerate comma-first style coding
"loopfunc" : false, // true: Tolerate functions being defined in loops
"moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
// (ex: `for each`, multiple try/catch, function expression…)
"multistr" : false, // true: Tolerate multi-line strings
"notypeof" : false, // true: Tolerate typeof comparison with unknown values.
"proto" : false, // true: Tolerate using the `__proto__` property
"scripturl" : false, // true: Tolerate script-targeted URLs
"shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
"sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
"supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
"validthis" : false, // true: Tolerate using this in a non-constructor function
"noyield" : false, // true: Tolerate generators without yields
// Environments
"browser" : false, // Web Browser (window, document, etc)
"browserify" : false, // Browserify (node.js code in the browser)
"couch" : false, // CouchDB
"devel" : false, // Development/debugging (alert, confirm, etc)
"dojo" : false, // Dojo Toolkit
"jquery" : false, // jQuery
"mocha" : false, // mocha
"mootools" : false, // MooTools
"node" : true, // Node.js
"nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
"phantom" : false, // PhantomJS
"prototypejs" : false, // Prototype and Scriptaculous
"rhino" : false, // Rhino
"worker" : false, // Web Workers
"wsh" : false, // Windows Scripting Host
"yui" : false, // Yahoo User Interface
// Custom Globals
"globals" : {} // additional predefined global variables
}

3
.mention-bot Normal file
View File

@@ -0,0 +1,3 @@
{
"userBlacklist": [ "greenkeeper", "Wescoeur" ]
}

View File

@@ -1 +0,0 @@
--require ./better-stacks.js

View File

@@ -1,5 +1,6 @@
/examples/
example.js
example.js.map
*.example.js
*.example.js.map

View File

@@ -1,8 +1,8 @@
language: node_js
node_js:
# - 'stable'
- '4'
- '0.12'
- stable
- 6
- 4
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/

3
ISSUE_TEMPLATE.md Normal file
View File

@@ -0,0 +1,3 @@
# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER!
https://github.com/vatesfr/xo-web/issues

View File

@@ -19,7 +19,7 @@ ___
## Installation
Manual install procedure is [available here](https://github.com/vatesfr/xo/blob/master/doc/installation/README.md#installation).
Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html).
## Compilation

View File

@@ -1,13 +1,5 @@
Error.stackTraceLimit = 100
// Async stacks.
//
// Disabled for now as it cause a huge memory usage with
// fs.createReadStream().
// TODO: find a way to reenable.
//
// try { require('trace') } catch (_) {}
// Removes internal modules.
try {
var sep = require('path').sep

View File

@@ -7,9 +7,25 @@
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', (reason) => {
console.log('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

View File

@@ -7,4 +7,4 @@
// Better stack traces if possible.
require('../better-stacks')
require('exec-promise')(require('../dist/logs-cli'))
require('exec-promise')(require('../dist/logs-cli').default)

View File

@@ -7,13 +7,16 @@ var gulp = require('gulp')
var babel = require('gulp-babel')
var coffee = require('gulp-coffee')
var plumber = require('gulp-plumber')
var rimraf = require('rimraf')
var sourceMaps = require('gulp-sourcemaps')
var watch = require('gulp-watch')
var join = require('path').join
// ===================================================================
var SRC_DIR = __dirname + '/src'
var DIST_DIR = __dirname + '/dist'
var SRC_DIR = join(__dirname, 'src')
var DIST_DIR = join(__dirname, 'dist')
var PRODUCTION = process.argv.indexOf('--production') !== -1
@@ -36,6 +39,10 @@ function src (patterns) {
// ===================================================================
gulp.task(function clean (cb) {
rimraf(DIST_DIR, cb)
})
gulp.task(function buildCoffee () {
return src('**/*.coffee')
.pipe(sourceMaps.init())
@@ -51,7 +58,7 @@ gulp.task(function buildCoffee () {
})
gulp.task(function buildEs6 () {
return src('**/*.js')
return src([ '**/*.js', '!*.spec.js' ])
.pipe(sourceMaps.init())
.pipe(babel())
.pipe(sourceMaps.write('.'))
@@ -60,4 +67,4 @@ gulp.task(function buildEs6 () {
// ===================================================================
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))

View File

@@ -4,8 +4,8 @@
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xen-api,xo:*'
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// Import the real main module.
module.exports = require('./dist')
module.exports = require('./dist').default

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "4.15.2",
"version": "5.7.2",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -13,6 +13,10 @@
"bugs": {
"url": "https://github.com/vatesfr/xo-web/issues"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"author": "Julien Fontanet <julien.fontanet@vates.fr>",
"preferGlobal": true,
"files": [
@@ -21,153 +25,156 @@
"dist/",
"config.json",
"index.js",
"signin.jade"
"signin.pug"
],
"directories": {
"bin": "bin"
},
"repository": {
"type": "git",
"url": "git://github.com/vatesfr/xo-server.git"
},
"engines": {
"node": ">=0.12 <5"
"node": ">=4.5"
},
"dependencies": {
"@marsaud/smb2-promise": "^0.2.0",
"app-conf": "^0.4.0",
"babel-runtime": "^5",
"base64url": "^1.0.5",
"blocked": "^1.1.0",
"bluebird": "^3.1.1",
"body-parser": "^1.13.3",
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"app-conf": "^0.4.1",
"archiver": "^1.3.0",
"arp-a": "^0.5.1",
"babel-runtime": "^6.23.0",
"base64url": "^2.0.0",
"bind-property-descriptor": "^0.0.0",
"blocked": "^1.2.1",
"bluebird": "^3.5.0",
"body-parser": "^1.17.1",
"connect-flash": "^0.1.1",
"cookie": "^0.2.3",
"cookie-parser": "^1.3.5",
"cron": "^1.0.9",
"d3-time-format": "^0.3.0",
"debug": "^2.1.3",
"escape-string-regexp": "^1.0.3",
"event-to-promise": "^0.7.0",
"cookie": "^0.3.1",
"cookie-parser": "^1.4.3",
"cron": "^1.2.1",
"d3-time-format": "^2.0.5",
"debug": "^2.6.3",
"decorator-synchronized": "^0.2.3",
"escape-string-regexp": "^1.0.5",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.6.1",
"execa": "^0.2.2",
"express": "^4.13.3",
"express-session": "^1.11.3",
"fatfs": "^0.10.3",
"fs-extra": "^0.26.2",
"fs-promise": "^0.4.1",
"get-stream": "^1.1.0",
"hashy": "~0.4.2",
"helmet": "^1.1.0",
"highland": "^2.5.1",
"http-server-plus": "^0.6.4",
"human-format": "^0.6.0",
"is-my-json-valid": "^2.12.2",
"jade": "^1.11.0",
"js-yaml": "^3.2.7",
"json-rpc-peer": "^0.11.0",
"json5": "^0.4.0",
"execa": "^0.6.3",
"express": "^4.15.2",
"express-session": "^1.15.1",
"fatfs": "^0.10.4",
"from2": "^2.3.0",
"fs-extra": "^2.1.2",
"fs-promise": "^2.0.1",
"golike-defer": "^0.0.0",
"hashy": "~0.6.1",
"helmet": "^3.5.0",
"highland": "^2.10.5",
"http-proxy": "^1.16.2",
"http-server-plus": "^0.8.0",
"human-format": "^0.8.0",
"is-my-json-valid": "^2.16.0",
"is-redirect": "^1.0.0",
"js-yaml": "^3.8.2",
"json-rpc-peer": "^0.13.1",
"json5": "^0.5.1",
"julien-f-source-map-support": "0.0.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^1.3.0",
"level": "^1.6.0",
"level-party": "^3.0.4",
"level-sublevel": "^6.5.2",
"leveldown": "^1.4.2",
"lodash.assign": "^4.0.3",
"lodash.bind": "^4.1.0",
"lodash.difference": "^4.1.0",
"lodash.endswith": "^4.0.0",
"lodash.every": "^4.0.0",
"lodash.filter": "^4.2.0",
"lodash.find": "^4.2.0",
"lodash.findindex": "^4.2.0",
"lodash.foreach": "^4.1.0",
"lodash.get": "^4.1.2",
"lodash.has": "^4.2.0",
"lodash.includes": "^4.1.0",
"lodash.invert": "^4.0.1",
"lodash.isarray": "^4.0.0",
"lodash.isboolean": "^3.0.2",
"lodash.isempty": "^4.1.2",
"lodash.isfunction": "^3.0.1",
"lodash.isinteger": "^4.0.0",
"lodash.isobject": "^3.0.0",
"lodash.isstring": "^4.0.1",
"lodash.keys": "^4.0.3",
"lodash.map": "^4.2.0",
"lodash.pick": "^4.1.0",
"lodash.pickby": "^4.2.0",
"lodash.remove": "^4.0.1",
"lodash.some": "^4.2.0",
"lodash.sortby": "^4.2.0",
"lodash.startswith": "^4.0.0",
"lodash.trim": "^4.2.0",
"level-sublevel": "^6.6.1",
"leveldown": "^1.6.0",
"lodash": "^4.17.4",
"make-error": "^1",
"micromatch": "^2.3.2",
"micromatch": "^2.3.11",
"minimist": "^1.2.0",
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"moment-timezone": "^0.5.11",
"ms": "^1.0.0",
"multikey-hash": "^1.0.4",
"ndjson": "^1.5.0",
"parse-pairs": "^0.2.2",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport": "^0.3.2",
"passport-local": "^1.0.0",
"promise-toolbox": "^0.2.0",
"pretty-format": "^19.0.0",
"promise-toolbox": "^0.8.2",
"proxy-agent": "^2.0.0",
"proxy-http-request": "0.1.0",
"redis": "^2.0.1",
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"stack-chain": "^1.3.3",
"through2": "^2.0.0",
"struct-fu": "^1.0.0",
"trace": "^2.0.1",
"ws": "~1.0.1",
"xen-api": "^0.7.4",
"xml2js": "~0.4.6",
"xo-acl-resolver": "0.0.0",
"xo-collection": "^0.4.0",
"xo-remote-parser": "^0.1.0"
"pug": "^2.0.0-beta11",
"redis": "^2.7.1",
"schema-inspector": "^1.6.8",
"semver": "^5.3.0",
"serve-static": "^1.12.1",
"split-lines": "^1.1.0",
"stack-chain": "^1.3.7",
"tar-stream": "^1.5.2",
"through2": "^2.0.3",
"tmp": "^0.0.31",
"uuid": "^3.0.1",
"ws": "^2.2.2",
"xen-api": "^0.10.0-2",
"xml2js": "~0.4.17",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
"xo-common": "^0.1.1",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"babel-eslint": "^4.0.10",
"chai": "^3.0.0",
"dependency-check": "^2.4.0",
"ghooks": "^1.0.3",
"babel-eslint": "^7.2.1",
"babel-plugin-lodash": "^3.2.11",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.2.2",
"babel-preset-stage-0": "^6.22.0",
"dependency-check": "^2.8.0",
"gulp": "git://github.com/gulpjs/gulp#4.0",
"gulp-babel": "^5",
"gulp-coffee": "^2.3.1",
"gulp-plumber": "^1.0.0",
"gulp-sourcemaps": "^1.5.1",
"gulp-watch": "^4.2.2",
"leche": "^2.1.1",
"mocha": "^2.2.1",
"must": "^0.13.1",
"sinon": "^1.14.1",
"standard": "^5.2.1"
"gulp-babel": "^6",
"gulp-coffee": "^2.3.4",
"gulp-plumber": "^1.1.0",
"gulp-sourcemaps": "^2.4.1",
"gulp-watch": "^4.3.11",
"husky": "^0.13.2",
"index-modules": "^0.3.0",
"jest": "^19.0.2",
"rimraf": "^2.6.1",
"standard": "^9.0.2"
},
"scripts": {
"build": "npm run build-indexes && gulp build --production",
"build-indexes": "./tools/generate-index src/api src/xo-mixins",
"dev": "npm run build-indexes && gulp build",
"lint": "standard",
"build": "gulp build --production",
"commitmsg": "npm test",
"dev": "gulp build",
"dev-test": "jest --bail --watch",
"posttest": "standard && dependency-check -i constant-stream ./package.json",
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins",
"predev": "npm run prebuild",
"prepublish": "npm run build",
"start": "node bin/xo-server",
"test": "mocha --opts .mocha.opts \"dist/**/*.spec.js\"",
"posttest": "npm run lint && dependency-check ./package.json",
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
"release": "npm version",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable"
"test": "jest"
},
"config": {
"ghooks": {
"pre-commit": "npm it"
}
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-0"
]
},
"jest": {
"roots": [
"<rootDir>/src"
],
"testRegex": "\\.spec\\.js$"
},
"standard": {
"ignore": [
"dist/**"
"dist"
],
"parser": "babel-eslint"
}

View File

@@ -1,11 +1,17 @@
# Example XO-Server configuration.
# BE *VERY* CAREFUL WHEN EDITING!
# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT!
# visit http://www.yamllint.com/ to validate this file as needed
#=====================================================================
# Example XO-Server configuration.
#
# This file is automatically looking for at the following places:
# - `$HOME/.config/xo-server/config.yaml`
# - `/etc/xo-server/config.yaml`
#
# The first entries have priority.
#
# Note: paths are relative to the configuration file.
#=====================================================================
@@ -43,16 +49,17 @@ http:
# Hosts & ports on which to listen.
#
# By default, the server listens on 0.0.0.0:80.
# By default, the server listens on [::]:80.
listen:
# Basic HTTP.
-
# Address on which the server is listening on.
#
# Sets it to '127.0.0.1' to listen only on the local host.
# Sets it to 'localhost' for IP to listen only on the local host.
#
# Default: '0.0.0.0' (all addresses)
#hostname: '127.0.0.1'
# Default: all IPv6 addresses if available, otherwise all IPv4
# addresses.
#hostname: 'localhost'
# Port on which the server is listening on.
#
@@ -117,10 +124,23 @@ http:
# Connection to the Redis server.
redis:
# Syntax: redis://[db[:password]@]hostname[:port]
# Unix sockets can be used
#
# Default: redis://localhost:6379
#uri: ''
# Default: undefined
#socket: /var/run/redis/redis.sock
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
#
# Default: redis://localhost:6379/0
#uri: redis://redis.company.lan/42
# List of aliased commands.
#
# See http://redis.io/topics/security#disabling-of-specific-commands
#renameCommands:
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
# Directory containing the database of XO.
# Currently used for logs.

View File

@@ -1,59 +0,0 @@
doctype html
html
head
meta(charset = 'utf-8')
meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1')
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'styles/main.css')
body
.container
.row-login
.page-header
img(src = 'images/logo_small.png')
h2 Xen Orchestra
form.form-horizontal(action = 'signin/local' method = 'post')
fieldset
legend.login
h3 Sign in
if error
p.text-danger #{error}
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control.input-sm(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control.input-sm(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.form-group
.col-sm-5
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| Remember me
.form-group
.col-sm-12
button.btn.btn-login.btn-block.btn-success
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}

50
signin.pug Normal file
View File

@@ -0,0 +1,50 @@
doctype html
html
head
meta(charset = 'utf-8')
meta(http-equiv = 'X-UA-Compatible' content = 'IE=edge,chrome=1')
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'index.css')
body(style = 'display: flex; height: 100vh;')
div(style = 'margin: auto; width: 20em;')
div.mb-2(style = 'display: flex;')
img(src = 'assets/logo.png' style = 'margin: auto;')
h2.text-xs-center.mb-2 Xen Orchestra
form(action = 'signin/local' method = 'post')
fieldset
if error
p.text-danger #{error}
.input-group.mb-1
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.input-group.mb-1
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| &nbsp;
| Remember me
div
button.btn.btn-block.btn-info
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}

View File

@@ -1,70 +0,0 @@
import {JsonRpcError} from 'json-rpc-peer'
// ===================================================================
// Export standard JSON-RPC errors.
export {
InvalidJson,
InvalidParameters,
InvalidRequest,
JsonRpcError,
MethodNotFound
} from 'json-rpc-peer'
// -------------------------------------------------------------------
export class NotImplemented extends JsonRpcError {
constructor () {
super('not implemented', 0)
}
}
// -------------------------------------------------------------------
export class NoSuchObject extends JsonRpcError {
constructor (id, type) {
super('no such object', 1, {id, type})
}
}
// -------------------------------------------------------------------
export class Unauthorized extends JsonRpcError {
constructor () {
super('not authenticated or not enough permissions', 2)
}
}
// -------------------------------------------------------------------
export class InvalidCredential extends JsonRpcError {
constructor () {
super('invalid credential', 3)
}
}
// -------------------------------------------------------------------
export class AlreadyAuthenticated extends JsonRpcError {
constructor () {
super('already authenticated', 4)
}
}
// -------------------------------------------------------------------
export class ForbiddenOperation extends JsonRpcError {
constructor (operation, reason) {
super(`forbidden operation: ${operation}`, 5, reason)
}
}
// -------------------------------------------------------------------
// To be used with a user-readable message.
// The message can be destined to be displayed to the front-end user.
export class GenericError extends JsonRpcError {
constructor (message) {
super(message, 6)
}
}

0
src/api/.index-modules Normal file
View File

98
src/api/backup.js Normal file
View File

@@ -0,0 +1,98 @@
import archiver from 'archiver'
import { basename } from 'path'
import { format } from 'json-rpc-peer'
import { forEach } from 'lodash'
// ===================================================================
export function list ({ remote }) {
return this.listVmBackups(remote)
}
list.permission = 'admin'
list.params = {
remote: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanDisk ({ remote, disk }) {
return this.scanDiskBackup(remote, disk)
}
scanDisk.permission = 'admin'
scanDisk.params = {
remote: { type: 'string' },
disk: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanFiles ({ remote, disk, partition, path }) {
return this.scanFilesInDiskBackup(remote, disk, partition, path)
}
scanFiles.permission = 'admin'
scanFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
path: { type: 'string' }
}
// -------------------------------------------------------------------
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
res.setHeader('content-disposition', 'attachment')
res.setHeader('content-type', 'application/octet-stream')
const nFiles = paths.length
// Send lone file directly
if (nFiles === 1) {
files[0].pipe(res)
return
}
const archive = archiver(archiveFormat)
archive.on('error', error => {
console.error(error)
res.end(format.error(0, error))
})
forEach(files, file => {
archive.append(file, { name: basename(file.path) })
})
archive.finalize()
archive.pipe(res)
}).catch(error => {
console.error(error)
res.writeHead(500)
res.end(format.error(0, error))
})
}
export async function fetchFiles ({ format = 'zip', ...params }) {
const fileName = params.paths.length > 1
? `restore_${new Date().toJSON()}.${format}`
: basename(params.paths[0])
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
suffix: encodeURI(`/${fileName}`)
}).then(url => ({ $getFrom: url }))
}
fetchFiles.permission = 'admin'
fetchFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
format: { type: 'string', optional: true },
partition: { type: 'string', optional: true },
paths: {
type: 'array',
items: { type: 'string' },
minLength: 1
}
}

View File

@@ -1,14 +1,15 @@
$debug = (require 'debug') 'xo:api:vm'
$find = require 'lodash.find'
$findIndex = require 'lodash.findindex'
$forEach = require 'lodash.foreach'
endsWith = require 'lodash.endswith'
startsWith = require 'lodash.startswith'
$find = require 'lodash/find'
$findIndex = require 'lodash/findIndex'
$forEach = require 'lodash/forEach'
endsWith = require 'lodash/endsWith'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
extractProperty,
parseXml,
promisify
mapToArray,
parseXml
} = require '../utils'
#=====================================================================
@@ -261,6 +262,42 @@ stats.resolve = {
exports.stats = stats;
#---------------------------------------------------------------------
handleInstallSupplementalPack = $coroutine (req, res, { hostId }) ->
xapi = @getXapi(hostId)
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
req.length = req.headers['content-length']
try
yield xapi.installSupplementalPack(req, { hostId })
res.end(format.response(0))
catch e
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
return
installSupplementalPack = $coroutine ({host}) ->
return {
$sendTo: yield @registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })
}
installSupplementalPack.description = 'installs supplemental pack from ISO file'
installSupplementalPack.params = {
host: { type: 'string' }
}
installSupplementalPack.resolve = {
host: ['host', 'host', 'admin']
}
exports.installSupplementalPack = installSupplementalPack;
#=====================================================================
Object.defineProperty(exports, '__esModule', {

44
src/api/ip-pool.js Normal file
View File

@@ -0,0 +1,44 @@
import { unauthorized } from 'xo-common/api-errors'
export function create (props) {
return this.createIpPool(props)
}
create.permission = 'admin'
create.description = 'Creates a new ipPool'
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteIpPool(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.description = 'Delete an ipPool'
// -------------------------------------------------------------------
export function getAll (params) {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getAllIpPools(user.permission === 'admin'
? params && params.userId
: user.id
)
}
getAll.description = 'List all ipPools'
// -------------------------------------------------------------------
export function set ({ id, ...props }) {
return this.updateIpPool(id, props)
}
set.permission = 'admin'
set.description = 'Allow to modify an existing ipPool'

View File

@@ -18,7 +18,11 @@ get.params = {
}
export async function create ({job}) {
return (await this.createJob(this.session.get('user_id'), job)).id
if (!job.userId) {
job.userId = this.session.get('user_id')
}
return (await this.createJob(job)).id
}
create.permission = 'admin'
@@ -27,7 +31,9 @@ create.params = {
job: {
type: 'object',
properties: {
userId: {type: 'string', optional: true},
name: {type: 'string', optional: true},
timeout: {type: 'number', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
@@ -38,14 +44,7 @@ create.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},
@@ -67,9 +66,10 @@ set.params = {
properties: {
id: {type: 'string'},
name: {type: 'string', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
timeout: {type: ['number', 'null'], optional: true},
type: {type: 'string', optional: true},
key: {type: 'string', optional: true},
method: {type: 'string', optional: true},
paramsVector: {
type: 'object',
properties: {
@@ -77,14 +77,7 @@ set.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},

View File

@@ -16,13 +16,23 @@ export async function get ({namespace}) {
}
get.description = 'returns logs list for one namespace'
get.params = {
namespace: { type: 'string' }
}
get.permission = 'admin'
// -------------------------------------------------------------------
async function delete_ ({namespace, id}) {
const logger = await this.getLogger(namespace)
logger.del(id)
}
delete_.description = 'deletes on or several logs from a namespace'
delete_.description = 'deletes one or several logs from a namespace'
delete_.params = {
id: { type: [ 'array', 'string' ] },
namespace: { type: 'string' }
}
delete_.permission = 'admin'
export {delete_ as delete}

View File

@@ -1,3 +1,9 @@
import { mapToArray } from '../utils'
export function getBondModes () {
return ['balance-slb', 'active-backup', 'lacp']
}
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
return this.getXapi(pool).createNetwork({
name,
@@ -24,6 +30,81 @@ create.permission = 'admin'
// =================================================================
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
return this.getXapi(pool).createBondedNetwork({
name,
description,
pifIds: mapToArray(pifs, pif =>
this.getObject(pif, 'PIF')._xapiId
),
mtu: +mtu,
mac,
bondMode
})
}
createBonded.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pifs: {
type: 'array',
items: {
type: 'string'
}
},
mtu: { type: ['integer', 'string'], optional: true },
// RegExp since schema-inspector does not provide a param check based on an enumeration
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }
}
createBonded.resolve = {
pool: ['pool', 'pool', 'administrate']
}
createBonded.permission = 'admin'
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
// ===================================================================
export async function set ({
network,
name_description: nameDescription,
name_label: nameLabel,
defaultIsLocked,
id
}) {
await this.getXapi(network).setNetworkProperties(network._xapiId, {
nameDescription,
nameLabel,
defaultIsLocked
})
}
set.params = {
id: {
type: 'string'
},
name_label: {
type: 'string',
optional: true
},
name_description: {
type: 'string',
optional: true
},
defaultIsLocked: {
type: 'boolean',
optional: true
}
}
set.resolve = {
network: ['id', 'network', 'administrate']
}
// =================================================================
export async function delete_ ({ network }) {
return this.getXapi(network).deleteNetwork(network._xapiId)
}

View File

@@ -1,7 +1,3 @@
import {
GenericError
} from '../api-errors'
// FIXME: too low level, should be removed.
// ===================================================================
@@ -24,17 +20,8 @@ delete_.resolve = {
// ===================================================================
// Disconnect
export async function disconnect ({PBD}) {
// TODO: check if PBD is attached before
try {
await this.getXapi(PBD).call('PBD.unplug', PBD._xapiRef)
} catch (error) {
if (error.code === 'VDI_IN_USE') {
throw new GenericError('VDI in use')
} else {
throw error
}
}
export async function disconnect ({ pbd }) {
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
}
disconnect.params = {
@@ -42,7 +29,7 @@ disconnect.params = {
}
disconnect.resolve = {
PBD: ['id', 'PBD', 'administrate']
pbd: ['id', 'PBD', 'administrate']
}
// ===================================================================

View File

@@ -1,5 +1,15 @@
// TODO: too low level, move into host.
import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi'
export function getIpv4ConfigurationModes () {
return IPV4_CONFIG_MODES
}
export function getIpv6ConfigurationModes () {
return IPV6_CONFIG_MODES
}
// ===================================================================
// Delete
@@ -66,3 +76,18 @@ reconfigureIp.params = {
reconfigureIp.resolve = {
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
export async function editPif ({ pif, vlan }) {
await this.getXapi(pif).editPif(pif._xapiId, { vlan })
}
editPif.params = {
id: { type: 'string' },
vlan: { type: ['integer', 'string'] }
}
editPif.resolve = {
pif: ['id', 'PIF', 'administrate']
}

View File

@@ -102,3 +102,24 @@ purgeConfiguration.params = {
}
purgeConfiguration.permission = 'admin'
// ---------------------------------------------------------------------
export async function test ({ id, data }) {
await this.testPlugin(id, data)
}
test.description = 'Test a plugin with its current configuration'
test.params = {
id: {
type: 'string'
},
data: {
optional: true
}
}
test.permission = 'admin'
// ---------------------------------------------------------------------

View File

@@ -1,4 +1,4 @@
import {GenericError} from '../api-errors'
import { format } from 'json-rpc-peer'
// ===================================================================
@@ -35,21 +35,21 @@ set.resolve = {
// -------------------------------------------------------------------
export async function setDefaultSr ({pool, sr}) {
await this.getXapi(pool).setDefaultSr(sr._xapiId)
export async function setDefaultSr ({ sr }) {
await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ])
await this.getXapi(sr).setDefaultSr(sr._xapiId)
}
setDefaultSr.permission = '' // signed in
setDefaultSr.params = {
pool: {
type: 'string'
},
sr: {
type: 'string'
}
}
setDefaultSr.resolve = {
pool: ['pool', 'pool', 'administrate'],
sr: ['sr', 'SR']
}
// -------------------------------------------------------------------
@@ -70,11 +70,28 @@ installPatch.params = {
installPatch.resolve = {
pool: ['pool', 'pool', 'administrate']
}
// -------------------------------------------------------------------
export async function installAllPatches ({ pool }) {
await this.getXapi(pool).installAllPoolPatchesOnAllHosts()
}
installAllPatches.params = {
pool: {
type: 'string'
}
}
installAllPatches.resolve = {
pool: ['pool', 'pool', 'administrate']
}
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
// -------------------------------------------------------------------
async function handlePatchUpload (req, res, {pool}) {
const {headers: {['content-length']: contentLength}} = req
const contentLength = req.headers['content-length']
if (!contentLength) {
res.writeHead(411)
res.end('Content length is mandatory')
@@ -106,12 +123,7 @@ export {uploadPatch as patch}
// -------------------------------------------------------------------
export async function mergeInto ({ source, target, force }) {
try {
await this.mergeXenPools(source._xapiId, target._xapiId, force)
} catch (e) {
// FIXME: should we expose plain XAPI error messages?
throw new GenericError(e.message)
}
await this.mergeXenPools(source._xapiId, target._xapiId, force)
}
mergeInto.params = {
@@ -130,7 +142,7 @@ mergeInto.resolve = {
export async function getLicenseState ({pool}) {
return this.getXapi(pool).call(
'pool.get_license_state',
pool._xapiId.$ref,
pool._xapiId.$ref
)
}
@@ -143,3 +155,38 @@ getLicenseState.params = {
getLicenseState.resolve = {
pool: ['pool', 'pool', 'administrate']
}
// -------------------------------------------------------------------
async function handleInstallSupplementalPack (req, res, { poolId }) {
const xapi = this.getXapi(poolId)
// Timeout seems to be broken in Node 4.
// See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) // 12 hours
req.length = req.headers['content-length']
try {
await xapi.installSupplementalPackOnAllHosts(req)
res.end(format.response(0))
} catch (e) {
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
}
}
export async function installSupplementalPack ({ pool }) {
return {
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id })
}
}
installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts'
installSupplementalPack.params = {
pool: { type: 'string' }
}
installSupplementalPack.resolve = {
pool: ['pool', 'pool', 'admin']
}

View File

@@ -1,12 +1,12 @@
export async function getAll () {
return /* await */ this.getAllRemotes()
return this.getAllRemotes()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing fs remote points'
export async function get ({id}) {
return /* await */ this.getRemote(id)
return this.getRemote(id)
}
get.permission = 'admin'
@@ -15,8 +15,18 @@ get.params = {
id: {type: 'string'}
}
export async function test ({id}) {
return this.testRemote(id)
}
test.permission = 'admin'
test.description = 'Performs a read/write matching test on a remote point'
test.params = {
id: {type: 'string'}
}
export async function list ({id}) {
return /* await */ this.listRemoteBackups(id)
return this.listRemoteBackups(id)
}
list.permission = 'admin'
@@ -26,7 +36,7 @@ list.params = {
}
export async function create ({name, url}) {
return /* await */ this.createRemote({name, url})
return this.createRemote({name, url})
}
create.permission = 'admin'

View File

@@ -1,6 +1,6 @@
import {
Unauthorized
} from '../api-errors'
unauthorized
} from 'xo-common/api-errors'
// ===================================================================
@@ -51,11 +51,12 @@ delete_.params = {
// -------------------------------------------------------------------
export function set ({ id, name, subjects, objects, limits }) {
export function set ({ id, name, subjects, objects, ipPools, limits }) {
return this.updateResourceSet(id, {
limits,
name,
objects,
ipPools,
subjects
})
}
@@ -84,6 +85,13 @@ set.params = {
},
optional: true
},
ipPools: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
limits: {
type: 'object',
optional: true
@@ -109,12 +117,14 @@ get.params = {
export async function getAll () {
const { user } = this
if (!user) {
throw new Unauthorized()
throw unauthorized()
}
return this.getAllResourceSets(user.id)
}
getAll.description = 'Get the list of all existing resource set'
// -------------------------------------------------------------------
export function addObject ({ id, object }) {
@@ -227,3 +237,4 @@ export function recomputeAllLimits () {
}
recomputeAllLimits.permission = 'admin'
recomputeAllLimits.description = 'Recompute manually the current resource set usage'

View File

@@ -1,3 +1,5 @@
export async function getAll () {
return /* await */ this.getRoles()
}
getAll.description = 'Returns the list of all existing roles'

View File

@@ -17,8 +17,8 @@ get.params = {
id: {type: 'string'}
}
export async function create ({jobId, cron, enabled, name}) {
return /* await */ this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled, name})
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
}
create.permission = 'admin'
@@ -30,8 +30,8 @@ create.params = {
name: {type: 'string', optional: true}
}
export async function set ({id, jobId, cron, enabled, name}) {
await this.updateSchedule(id, {job: jobId, cron, enabled, name})
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
}
set.permission = 'admin'

View File

@@ -4,13 +4,14 @@ import {
} from '../utils'
export async function add ({
label,
host,
username,
password,
readOnly,
autoConnect = true
}) {
const server = await this.registerXenServer({host, username, password, readOnly})
const server = await this.registerXenServer({label, host, username, password, readOnly})
if (autoConnect) {
// Connect asynchronously, ignore any errors.
@@ -25,6 +26,10 @@ add.description = 'register a new Xen server'
add.permission = 'admin'
add.params = {
label: {
optional: true,
type: 'string'
},
host: {
type: 'string'
},
@@ -70,8 +75,8 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, host, username, password, readOnly}) {
await this.updateXenServer(id, {host, username, password, readOnly})
export async function set ({id, label, host, username, password, readOnly}) {
await this.updateXenServer(id, {label, host, username, password, readOnly})
}
set.description = 'changes the properties of a Xen server'
@@ -82,6 +87,10 @@ set.params = {
id: {
type: 'string'
},
label: {
type: 'string',
optional: true
},
host: {
type: 'string',
optional: true

View File

@@ -1,21 +1,18 @@
import {deprecate} from 'util'
import {InvalidCredential, AlreadyAuthenticated} from '../api-errors'
import { getUserPublicProperties } from '../utils'
import {invalidCredentials} from 'xo-common/api-errors'
// ===================================================================
export async function signIn (credentials) {
if (this.session.has('user_id')) {
throw new AlreadyAuthenticated()
}
const user = await this.authenticateUser(credentials)
if (!user) {
throw new InvalidCredential()
throw invalidCredentials()
}
this.session.set('user_id', user.id)
return this.getUserPublicProperties(user)
return getUserPublicProperties(user)
}
signIn.description = 'sign in'
@@ -55,7 +52,7 @@ export async function getUser () {
return userId === undefined
? null
: this.getUserPublicProperties(await this.getUser(userId))
: getUserPublicProperties(await this.getUser(userId))
}
getUser.description = 'return the currently connected user'

View File

@@ -1,3 +1,4 @@
import { asInteger } from '../xapi/utils'
import {
ensureArray,
forEach,
@@ -33,7 +34,7 @@ set.resolve = {
// -------------------------------------------------------------------
export async function scan ({SR}) {
export async function scan ({ SR }) {
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
}
@@ -48,8 +49,16 @@ scan.resolve = {
// -------------------------------------------------------------------
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({SR}) {
await this.getXapi(SR).call('SR.destroy', SR._xapiRef)
export async function destroy ({ sr }) {
const xapi = this.getXapi(sr)
if (sr.SR_type === 'xosan') {
const config = xapi.xo.getData(sr, 'xosan_config')
// we simply forget because the hosted disks are been destroyed with the VMs
await xapi.forgetSr(sr._xapiId)
await Promise.all(config.nodes.map(node => xapi.deleteVm(node.vm.id, true)))
return xapi.deleteNetwork(config.network)
}
await xapi.destroySr(sr._xapiId)
}
destroy.params = {
@@ -57,13 +66,13 @@ destroy.params = {
}
destroy.resolve = {
SR: ['id', 'SR', 'administrate']
sr: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function forget ({SR}) {
await this.getXapi(SR).call('SR.forget', SR._xapiRef)
export async function forget ({ SR }) {
await this.getXapi(SR).forgetSr(SR._xapiId)
}
forget.params = {
@@ -76,6 +85,34 @@ forget.resolve = {
// -------------------------------------------------------------------
export async function connectAllPbds ({ SR }) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
connectAllPbds.params = {
id: { type: 'string' }
}
connectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function disconnectAllPbds ({ SR }) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
disconnectAllPbds.params = {
id: { type: 'string' }
}
disconnectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function createIso ({
host,
nameLabel,
@@ -92,6 +129,7 @@ export async function createIso ({
deviceConfig.legacy_mode = 'true'
} else if (type === 'smb') {
path = path.replace(/\\/g, '/')
deviceConfig.type = 'cifs'
deviceConfig.username = user
deviceConfig.cifspassword = password
}
@@ -107,7 +145,7 @@ export async function createIso ({
nameDescription,
'iso', // SR type ISO
'iso', // SR content type ISO
true,
type !== 'local',
{}
)
@@ -184,6 +222,51 @@ createNfs.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// HBA SR
// This functions creates an HBA SR
export async function createHba ({
host,
nameLabel,
nameDescription,
scsiId
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
scsiId
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'lvmoohba', // SR LVM over HBA
'user', // recommended by Citrix
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createHba.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
scsiId: { type: 'string' }
}
createHba.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// Local LVM SR
@@ -283,6 +366,55 @@ probeNfs.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// This function helps to detect all HBA devices on the host
export async function probeHba ({
host
}) {
const xapi = this.getXapi(host)
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
'type',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code !== 'SR_BACKEND_FAILURE_107') {
throw error
}
xml = parseXml(error.params[2])
}
const hbaDevices = []
forEach(ensureArray(xml.Devlist.BlockDevice), hbaDevice => {
hbaDevices.push({
hba: hbaDevice.hba.trim(),
path: hbaDevice.path.trim(),
scsciId: hbaDevice.SCSIid.trim(),
size: hbaDevice.size.trim(),
vendor: hbaDevice.vendor.trim()
})
})
return hbaDevices
}
probeHba.params = {
host: { type: 'string' }
}
probeHba.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// ISCSI SR
@@ -316,7 +448,7 @@ export async function createIscsi ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const srRef = await xapi.call(
@@ -377,7 +509,7 @@ export async function probeIscsiIqns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -455,7 +587,7 @@ export async function probeIscsiLuns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -534,7 +666,7 @@ export async function probeIscsiExists ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))
@@ -542,7 +674,7 @@ export async function probeIscsiExists ({
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({uuid: sr.UUID.trim()})
srs.push({ uuid: sr.UUID.trim() })
})
return srs
@@ -584,7 +716,7 @@ export async function probeNfsExists ({
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({uuid: sr.UUID.trim()})
srs.push({ uuid: sr.UUID.trim() })
})
return srs

67
src/api/system.js Normal file
View File

@@ -0,0 +1,67 @@
import forEach from 'lodash/forEach'
import getKeys from 'lodash/keys'
import moment from 'moment-timezone'
import { noSuchObject } from 'xo-common/api-errors'
import { version as xoServerVersion } from '../../package.json'
// ===================================================================
export function getMethodsInfo () {
const methods = {}
forEach(this.apiMethods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
export const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// -------------------------------------------------------------------
export const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
export const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
export function listMethods () {
return getKeys(this.apiMethods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
export function methodSignature ({method: name}) {
const method = this.apiMethods[name]
if (!method) {
throw noSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'

View File

@@ -36,9 +36,9 @@ hasPermission.params = {
export function wait ({duration, returnValue}) {
return new Promise(resolve => {
setTimeout(+duration, () => {
setTimeout(() => {
resolve(returnValue)
})
}, +duration)
})
}

View File

@@ -1,10 +1,10 @@
import {InvalidParameters} from '../api-errors'
import { mapToArray } from '../utils'
import {invalidParameters} from 'xo-common/api-errors'
import { getUserPublicProperties, mapToArray } from '../utils'
// ===================================================================
export async function create ({email, password, permission}) {
return (await this.createUser(email, {password, permission})).id
return (await this.createUser({email, password, permission})).id
}
create.description = 'creates a new user'
@@ -22,7 +22,7 @@ create.params = {
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw new InvalidParameters('an user cannot delete itself')
throw invalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
@@ -48,7 +48,7 @@ export async function getAll () {
const users = await this.getAllUsers()
// Filters out private properties.
return mapToArray(users, this.getUserPublicProperties)
return mapToArray(users, getUserPublicProperties)
}
getAll.description = 'returns all the existing users'
@@ -57,19 +57,29 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission}) {
await this.updateUser(id, {email, password, permission})
export async function set ({id, email, password, permission, preferences}) {
const isAdmin = this.user && this.user.permission === 'admin'
if (isAdmin) {
if (permission && id === this.session.get('user_id')) {
throw invalidParameters('a user cannot change its own permission')
}
} else if (email || password || permission) {
throw invalidParameters('this properties can only changed by an administrator')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
set.permission = 'admin'
set.permission = ''
set.params = {
id: { type: 'string' },
email: { type: 'string', optional: true },
password: { type: 'string', optional: true },
permission: { type: 'string', optional: true }
permission: { type: 'string', optional: true },
preferences: { type: 'object', optional: true }
}
// -------------------------------------------------------------------

View File

@@ -1,12 +1,11 @@
# FIXME: rename to disk.*
$isArray = require 'lodash.isarray'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{InvalidParameters} = require '../api-errors'
{parseSize} = require '../utils'
{JsonRpcError} = require '../api-errors'
{invalidParameters} = require 'xo-common/api-errors'
{isArray: $isArray, parseSize} = require '../utils'
{JsonRpcError} = require 'json-rpc-peer'
#=====================================================================
@@ -39,7 +38,7 @@ set = $coroutine (params) ->
size = parseSize(params.size)
if size < vdi.size
throw new InvalidParameters(
throw invalidParameters(
"cannot set new size (#{size}) below the current size (#{vdi.size})"
)
yield xapi.resizeVdi(ref, size)

View File

@@ -1,5 +1,19 @@
import {
diffItems,
noop,
pCatch
} from '../utils'
// ===================================================================
// TODO: move into vm and rename to removeInterface
async function delete_ ({vif}) {
this.allocIpAddresses(
vif.id,
null,
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
)::pCatch(noop)
await this.getXapi(vif).deleteVif(vif._xapiId)
}
export {delete_ as delete}
@@ -13,10 +27,11 @@ delete_.resolve = {
}
// -------------------------------------------------------------------
// TODO: move into vm and rename to disconnectInterface
export async function disconnect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
await this.getXapi(vif).disconnectVif(vif._xapiId)
}
disconnect.params = {
@@ -31,7 +46,7 @@ disconnect.resolve = {
// TODO: move into vm and rename to connectInterface
export async function connect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
await this.getXapi(vif).connectVif(vif._xapiId)
}
connect.params = {
@@ -41,3 +56,86 @@ connect.params = {
connect.resolve = {
vif: ['id', 'VIF', 'operate']
}
// -------------------------------------------------------------------
export async function set ({
vif,
network,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses,
attached
}) {
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
const newIpAddresses = []
{
const { push } = newIpAddresses
push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses)
push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses)
}
if (network || mac) {
const xapi = this.getXapi(vif)
const vm = xapi.getObject(vif.$VM)
mac == null && (mac = vif.MAC)
network = xapi.getObject((network && network.id) || vif.$network)
attached == null && (attached = vif.attached)
await this.allocIpAddresses(vif.id, null, oldIpAddresses)
await xapi.deleteVif(vif._xapiId)
// create new VIF with new parameters
const newVif = await xapi.createVif(vm.$id, network.$id, {
mac,
currently_attached: attached,
ipv4_allowed: newIpAddresses
})
await this.allocIpAddresses(newVif.$id, newIpAddresses)
return
}
const [ addAddresses, removeAddresses ] = diffItems(
newIpAddresses,
oldIpAddresses
)
await this.allocIpAddresses(
vif.id,
addAddresses,
removeAddresses
)
return this.getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
}
set.params = {
id: { type: 'string' },
network: { type: 'string', optional: true },
mac: { type: 'string', optional: true },
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
attached: { type: 'boolean', optional: true }
}
set.resolve = {
vif: ['id', 'VIF', 'operate'],
network: ['network', 'network', 'operate']
}

View File

@@ -1,25 +1,29 @@
$assign = require 'lodash.assign'
$assign = require 'lodash/assign'
$debug = (require 'debug') 'xo:api:vm'
$filter = require 'lodash.filter'
$findIndex = require 'lodash.findindex'
$findWhere = require 'lodash.find'
$isArray = require 'lodash.isarray'
endsWith = require 'lodash.endswith'
$filter = require 'lodash/filter'
$findIndex = require 'lodash/findIndex'
$findWhere = require 'lodash/find'
concat = require 'lodash/concat'
endsWith = require 'lodash/endsWith'
escapeStringRegexp = require 'escape-string-regexp'
eventToPromise = require 'event-to-promise'
sortBy = require 'lodash.sortby'
startsWith = require 'lodash.startswith'
merge = require 'lodash/merge'
sortBy = require 'lodash/sortBy'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
GenericError,
Unauthorized
} = require('../api-errors')
forbiddenOperation,
invalidParameters,
unauthorized
} = require('xo-common/api-errors')
{
forEach,
formatXml: $js2xml,
isArray: $isArray,
map,
mapFilter,
mapToArray,
noop,
parseSize,
@@ -27,7 +31,7 @@ startsWith = require 'lodash.startswith'
pCatch,
pFinally
} = require '../utils'
{isVmRunning: $isVMRunning} = require('../xapi')
{isVmRunning: $isVmRunning} = require('../xapi')
#=====================================================================
@@ -47,39 +51,51 @@ checkPermissionOnSrs = (vm, permission = 'operate') -> (
)
return @hasPermissions(@session.get('user_id'), permissions).then((success) => (
throw new Unauthorized() unless success
throw unauthorized() unless success
))
)
#=====================================================================
# TODO: Implement ACLs
create = $coroutine ({
resourceSet
installation
name_description
name_label
template
pv_args
VDIs
VIFs
existingDisks
}) ->
{ user } = this
unless user
throw new Unauthorized()
extract = (obj, prop) ->
value = obj[prop]
delete obj[prop]
return value
# TODO: Implement ACLs
create = $coroutine (params) ->
{ user } = this
resourceSet = extract(params, 'resourceSet')
if not resourceSet and user.permission isnt 'admin'
throw unauthorized()
template = extract(params, 'template')
params.template = template._xapiId
xapi = this.getXapi(template)
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.size,
vms: 1
}
objectIds = [
template.id
]
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.dynamic[1],
vms: 1
}
vdiSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
vdiSizesByDevice[vbd.userdevice] = +vdi.virtual_size
xapiVdis = VDIs and map(VDIs, (vdi) =>
return
)
vdis = extract(params, 'VDIs')
params.vdis = vdis and map(vdis, (vdi) =>
sr = @getObject(vdi.SR)
size = parseSize(vdi.size)
@@ -87,31 +103,18 @@ create = $coroutine ({
limits.disk += size
return $assign({}, vdi, {
device: vdi.device ? vdi.position,
device: vdi.userdevice ? vdi.device ? vdi.position,
size,
SR: sr._xapiId,
type: vdi.type
})
)
xapi = @getXapi(template)
diskSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
diskSizesByDevice[vbd.device] = +vdi.virtual_size
return
)
xapiExistingVdis = existingDisks and map(existingDisks, (vdi, device) =>
existingVdis = extract(params, 'existingDisks')
params.existingVdis = existingVdis and map(existingVdis, (vdi, userdevice) =>
if vdi.size?
size = parseSize(vdi.size)
diskSizesByDevice[device] = size
vdiSizesByDevice[userdevice] = size
if vdi.$SR
sr = @getObject(vdi.$SR)
@@ -123,9 +126,10 @@ create = $coroutine ({
})
)
forEach(diskSizesByDevice, (size) => limits.disk += size)
forEach(vdiSizesByDevice, (size) => limits.disk += size)
xapiVifs = VIFs and map(VIFs, (vif) =>
vifs = extract(params, 'VIFs')
params.vifs = vifs and map(vifs, (vif) =>
network = @getObject(vif.network)
objectIds.push(network.id)
@@ -133,36 +137,70 @@ create = $coroutine ({
return {
mac: vif.mac
network: network._xapiId
ipv4_allowed: vif.allowedIpv4Addresses
ipv6_allowed: vif.allowedIpv6Addresses
}
)
installation = extract(params, 'installation')
params.installRepository = installation && installation.repository
checkLimits = null
if resourceSet
yield this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
yield this.allocateLimitsInResourceSet(limits, resourceSet)
else unless user.permission is 'admin'
throw new Unauthorized()
xapiVm = yield xapi.createVm(template._xapiId, {
installRepository: installation && installation.repository,
nameDescription: name_description,
nameLabel: name_label,
pvArgs: pv_args,
vdis: xapiVdis,
vifs: xapiVifs,
existingVdis: xapiExistingVdis
})
checkLimits = $coroutine (limits2) =>
yield this.allocateLimitsInResourceSet(limits, resourceSet)
yield this.allocateLimitsInResourceSet(limits2, resourceSet)
xapiVm = yield xapi.createVm(template._xapiId, params, checkLimits)
vm = xapi.xo.addObject(xapiVm)
if resourceSet
yield Promise.all([
@addAcl(user.id, vm.id, 'admin'),
if params.share
$resourceSet = yield @getResourceSet(resourceSet)
Promise.all(map($resourceSet.subjects, (subjectId) => @addAcl(subjectId, vm.id, 'admin')))
else
@addAcl(user.id, vm.id, 'admin')
xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet)
])
for vifId in vm.VIFs
vif = @getObject(vifId, 'VIF')
yield this.allocIpAddresses(vifId, concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses)).catch(() =>
xapi.deleteVif(vif._xapiId)
)
if params.bootAfterCreate
pCatch.call(xapi.startVm(vm._xapiId), noop)
return vm.id
create.params = {
affinityHost: { type: 'string', optional: true }
bootAfterCreate: {
type: 'boolean'
optional: true
}
cloudConfig: {
type: 'string'
optional: true
}
coreOs: {
type: 'boolean'
optional: true
}
clone: {
type: 'boolean'
optional: true
}
resourceSet: {
type: 'string',
optional: true
@@ -184,6 +222,12 @@ create.params = {
# PV Args
pv_args: { type: 'string', optional: true }
share: {
type: 'boolean',
optional: true
}
# TODO: add the install repository!
# VBD.insert/eject
# Also for the console!
@@ -193,6 +237,7 @@ create.params = {
# Virtual interfaces to create for the new VM.
VIFs: {
optional: true
type: 'array'
items: {
type: 'object'
@@ -204,6 +249,18 @@ create.params = {
optional: true # Auto-generated per default.
type: 'string'
}
allowedIpv4Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
allowedIpv6Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
}
}
}
@@ -246,19 +303,43 @@ create.params = {
}
create.resolve = {
template: ['template', 'VM-template', 'administrate'],
template: ['template', 'VM-template', ''],
}
exports.create = create
#---------------------------------------------------------------------
delete_ = ({vm, delete_disks: deleteDisks}) ->
delete_ = $coroutine ({vm, delete_disks: deleteDisks = false }) ->
cpus = vm.CPUs.number
memory = vm.memory.size
xapi = @getXapi(vm)
@getAllAcls().then((acls) =>
Promise.all(mapFilter(acls, (acl) =>
if (acl.object == vm.id)
return pCatch.call(
@removeAcl(acl.subject, acl.object, acl.action),
noop
)
))
)
# Update IP pools
yield Promise.all(map(vm.VIFs, (vifId) =>
vif = xapi.getObject(vifId)
return pCatch.call(
this.allocIpAddresses(
vifId,
null,
concat(vif.ipv4_allowed, vif.ipv6_allowed)
),
noop
)
))
# Update resource sets
resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
if resourceSet?
disk = 0
@@ -275,10 +356,16 @@ delete_ = ({vm, delete_disks: deleteDisks}) ->
return
)
pCatch.call(@releaseLimitsInResourceSet(
@computeVmResourcesUsage(vm),
resourceSet
), noop)
resourceSetUsage = @computeVmResourcesUsage(vm)
ipPoolsUsage = yield @computeVmIpPoolsUsage(vm)
pCatch.call(
@releaseLimitsInResourceSet(
merge(resourceSetUsage, ipPoolsUsage),
resourceSet
),
noop
)
return xapi.deleteVm(vm._xapiId, deleteDisks)
@@ -361,7 +448,7 @@ migrate = $coroutine ({
])
unless yield @hasPermissions(@session.get('user_id'), permissions)
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).migrateVm(vm._xapiId, @getXapi(host), host._xapiId, {
migrationNetworkId: migrationNetwork?._xapiId
@@ -398,99 +485,23 @@ exports.migrate = migrate
#---------------------------------------------------------------------
# FIXME: human readable strings should be handled.
set = $coroutine (params) ->
{VM} = params
xapi = @getXapi VM
set = (params) ->
VM = extract(params, 'VM')
xapi = @getXapi(VM)
{_xapiRef: ref} = VM
return xapi.editVm(VM._xapiId, params, $coroutine (limits, vm) =>
resourceSet = xapi.xo.getData(vm, 'resourceSet')
resourceSet = xapi.xo.getData(ref, 'resourceSet')
if (resourceSet)
try
return yield @allocateLimitsInResourceSet(limits, resourceSet)
catch error
# if the resource set no longer exist, behave as if the VM is free
throw error unless noSuchObject.is(error)
# Memory.
if 'memory' of params
memory = parseSize(params.memory)
if memory < VM.memory.static[0]
@throw(
'INVALID_PARAMS'
"cannot set memory below the static minimum (#{VM.memory.static[0]})"
)
if ($isVMRunning VM) and memory > VM.memory.static[1]
@throw(
'INVALID_PARAMS'
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
"for a running VM"
)
if memory < VM.memory.dynamic[0]
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memory}"
else if memory > VM.memory.static[1]
yield xapi.call 'VM.set_memory_static_max', ref, "#{memory}"
if resourceSet?
yield @allocateLimitsInResourceSet({
memory: memory - VM.memory.size
}, resourceSet)
yield xapi.call 'VM.set_memory_dynamic_max', ref, "#{memory}"
# Number of CPUs.
if 'CPUs' of params
{CPUs} = params
if resourceSet?
yield @allocateLimitsInResourceSet({
cpus: CPUs - VM.CPUs.number
}, resourceSet)
if $isVMRunning VM
if CPUs > VM.CPUs.max
@throw(
'INVALID_PARAMS'
"cannot set CPUs above the static maximum (#{VM.CPUs.max}) "+
"for a running VM"
)
yield xapi.call 'VM.set_VCPUs_number_live', ref, "#{CPUs}"
else
if CPUs > VM.CPUs.max
yield xapi.call 'VM.set_VCPUs_max', ref, "#{CPUs}"
yield xapi.call 'VM.set_VCPUs_at_startup', ref, "#{CPUs}"
# HA policy
# TODO: also handle "best-effort" case
if 'high_availability' of params
{high_availability} = params
if high_availability
yield xapi.call 'VM.set_ha_restart_priority', ref, "restart"
else
yield xapi.call 'VM.set_ha_restart_priority', ref, ""
if 'auto_poweron' of params
{auto_poweron} = params
if auto_poweron
yield xapi.call 'VM.add_to_other_config', ref, 'auto_poweron', 'true'
yield xapi.setPoolProperties({autoPowerOn: true})
else
yield xapi.call 'VM.remove_from_other_config', ref, 'auto_poweron'
if 'cpuWeight' of params
if resourceSet? and this.user.permission isnt 'admin'
throw new Unauthorized()
yield xapi.setVcpuWeight(VM._xapiId, params.cpuWeight)
# Other fields.
for param, fields of {
'name_label'
'name_description'
'PV_args'
}
continue unless param of params
for field in (if $isArray fields then fields else [fields])
yield xapi.call "VM.set_#{field}", ref, "#{params[param]}"
return true
if (limits.cpuWeight && this.user.permission != 'admin')
throw unauthorized()
)
set.params = {
# Identifier of the VM to update.
@@ -509,19 +520,34 @@ set.params = {
# Number of virtual CPUs to allocate.
CPUs: { type: 'integer', optional: true }
cpusMax: { type: ['integer', 'string'], optional: true }
# Memory to allocate (in bytes).
#
# Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max
memory: { type: ['integer', 'string'], optional: true }
# Set dynamic_min
memoryMin: { type: ['integer', 'string'], optional: true }
# Set dynamic_max
memoryMax: { type: ['integer', 'string'], optional: true }
# Set static_max
memoryStaticMax: { type: ['integer', 'string'], optional: true }
# Kernel arguments for PV VM.
PV_args: { type: 'string', optional: true }
cpuWeight: { type: 'integer', optional: true}
cpuWeight: { type: ['integer', 'null'], optional: true }
cpuCap: { type: ['integer', 'null'], optional: true }
affinityHost: { type: ['string', 'null'], optional: true }
}
set.resolve = {
VM: ['id', ['VM', 'VM-snapshot'], 'administrate']
VM: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate']
}
exports.set = set
@@ -609,7 +635,7 @@ copy.params = {
}
copy.resolve = {
vm: [ 'vm', 'VM', 'administrate' ]
vm: [ 'vm', ['VM', 'VM-snapshot'], 'administrate' ]
sr: [ 'sr', 'SR', 'operate' ]
}
@@ -622,7 +648,7 @@ convertToTemplate = $coroutine ({vm}) ->
unless yield @hasPermissions(@session.get('user_id'), [
[ vm.$pool, 'administrate' ]
])
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).call 'VM.set_is_a_template', vm._xapiRef, true
@@ -646,12 +672,12 @@ exports.convert = convertToTemplate
snapshot = $coroutine ({vm, name}) ->
yield checkPermissionOnSrs.call(this, vm)
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name)
snapshot = yield @getXapi(vm).snapshotVm(vm._xapiRef, name ? "#{vm.name_label}_#{new Date().toISOString()}")
return snapshot.$id
snapshot.params = {
id: { type: 'string' }
name: { type: 'string' }
name: { type: 'string', optional: true }
}
snapshot.resolve = {
@@ -670,14 +696,14 @@ rollingDeltaBackup = $coroutine ({vm, remote, tag, depth}) ->
})
rollingDeltaBackup.params = {
vm: { type: 'string' }
id: { type: 'string' }
remote: { type: 'string' }
tag: { type: 'string'}
depth: { type: ['string', 'number'] }
}
rollingDeltaBackup.resolve = {
vm: ['vm', ['VM', 'VM-snapshot'], 'administrate']
vm: ['id', ['VM', 'VM-snapshot'], 'administrate']
}
rollingDeltaBackup.permission = 'admin'
@@ -708,12 +734,12 @@ exports.importDeltaBackup = importDeltaBackup
deltaCopy = ({ vm, sr }) -> @deltaCopyVm(vm, sr)
deltaCopy.params = {
vm: { type: 'string' },
id: { type: 'string' },
sr: { type: 'string' }
}
deltaCopy.resolve = {
vm: [ 'vm', 'VM', 'operate'],
vm: [ 'id', 'VM', 'operate'],
sr: [ 'sr', 'SR', 'operate']
}
@@ -764,9 +790,7 @@ exports.backup = backup
#---------------------------------------------------------------------
importBackup = $coroutine ({remote, file, sr}) ->
yield @importVmBackup(remote, file, sr)
return
importBackup = ({remote, file, sr}) -> @importVmBackup(remote, file, sr)
importBackup.permission = 'admin'
importBackup.description = 'Imports a VM into host, from a file found in the chosen remote'
@@ -816,21 +840,30 @@ exports.rollingBackup = rollingBackup
#---------------------------------------------------------------------
rollingDrCopy = ({vm, pool, tag, depth}) ->
if vm.$pool is pool.id
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
return @rollingDrCopyVm({vm, sr: @getObject(pool.default_SR, 'SR'), tag, depth})
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
unless sr
unless pool
throw invalidParameters('either pool or sr param should be specified')
if vm.$pool is pool.id
throw forbiddenOperation('Disaster Recovery attempts to copy on the same pool')
sr = @getObject(pool.default_SR, 'SR')
return @rollingDrCopyVm({vm, sr, tag, depth})
rollingDrCopy.params = {
id: { type: 'string' }
pool: { type: 'string' }
tag: { type: 'string'}
depth: { type: 'number' }
id: { type: 'string' }
pool: { type: 'string', optional: true }
sr: { type: 'string', optional: true }
tag: { type: 'string'}
}
rollingDrCopy.resolve = {
vm: ['id', ['VM', 'VM-snapshot'], 'administrate'],
pool: ['pool', 'pool', 'administrate']
sr: ['sr', 'SR', 'administrate']
}
rollingDrCopy.description = 'Copies a VM to a different pool, with a tagged name, and removes the oldest VM with the same tag from this pool, according to depth'
@@ -871,8 +904,7 @@ stop = $coroutine ({vm, force}) ->
yield xapi.call 'VM.clean_shutdown', vm._xapiRef
catch error
if error.code is 'VM_MISSING_PV_DRIVERS' or error.code is 'VM_LACKS_FEATURE_SHUTDOWN'
# TODO: Improve reporting: this message is unclear.
@throw 'INVALID_PARAMS'
throw invalidParameters('clean shutdown requires PV drivers')
else
throw error
@@ -907,18 +939,11 @@ exports.suspend = suspend
#---------------------------------------------------------------------
resume = $coroutine ({vm, force}) ->
# FIXME: WTF this is?
if not force
force = true
yield @getXapi(vm).call 'VM.resume', vm._xapiRef, false, force
return true
resume = ({vm}) ->
return @getXapi(vm).resumeVm(vm._xapiId)
resume.params = {
id: { type: 'string' }
force: { type: 'boolean', optional: true }
}
resume.resolve = {
@@ -928,15 +953,12 @@ exports.resume = resume
#---------------------------------------------------------------------
# revert a snapshot to its parent VM
revert = $coroutine ({snapshot}) ->
# Attempts a revert from this snapshot to its parent VM
yield @getXapi(snapshot).call 'VM.revert', snapshot._xapiRef
return true
revert = ({snapshot, snapshotBefore}) ->
return @getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore)
revert.params = {
id: { type: 'string' }
id: { type: 'string' },
snapshotBefore: { type: 'boolean', optional: true }
}
revert.resolve = {
@@ -996,30 +1018,33 @@ exports.export = export_;
#---------------------------------------------------------------------
handleVmImport = $coroutine (req, res, { xapi, srId }) ->
handleVmImport = $coroutine (req, res, { data, srId, type, xapi }) ->
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
try
vm = yield xapi.importVm(req, { srId })
vm = yield xapi.importVm(req, { data, srId, type })
res.end(format.response(0, vm.$id))
catch e
res.writeHead(500)
res.end(format.error(0, new GenericError(e.message)))
res.end(format.error(0, new Error(e.message)))
return
# TODO: "sr_id" can be passed in URL to target a specific SR
import_ = $coroutine ({host, sr}) ->
import_ = $coroutine ({ data, host, sr, type }) ->
if data and type is 'xva'
throw invalidParameters('unsupported field data for the file type xva')
if not sr
if not host
throw new InvalidParameters('you must provide either host or SR')
throw invalidParameters('you must provide either host or SR')
xapi = @getXapi(host)
sr = xapi.pool.$default_SR
if not sr
throw new InvalidParameters('there is not default SR in this pool')
throw invalidParameters('there is not default SR in this pool')
# FIXME: must have administrate permission on default SR.
else
@@ -1027,13 +1052,45 @@ import_ = $coroutine ({host, sr}) ->
return {
$sendTo: yield @registerHttpRequest(handleVmImport, {
data,
srId: sr._xapiId,
type,
xapi
})
}
import_.params = {
data: {
type: 'object',
optional: true,
properties: {
descriptionLabel: { type: 'string' },
disks: {
type: 'array',
items: {
type: 'object',
properties: {
capacity: { type: 'integer' },
descriptionLabel: { type: 'string' },
nameLabel: { type: 'string' },
path: { type: 'string' },
position: { type: 'integer' }
}
},
optional: true
},
memory: { type: 'integer' },
nameLabel: { type: 'string' },
nCpus: { type: 'integer' },
networks: {
type: 'array',
items: { type: 'string' },
optional: true
},
}
},
host: { type: 'string', optional: true },
type: { type: 'string', optional: true },
sr: { type: 'string', optional: true }
}
@@ -1074,24 +1131,48 @@ exports.attachDisk = attachDisk
#---------------------------------------------------------------------
# FIXME: position should be optional and default to last.
# TODO: implement resource sets
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
createInterface = $coroutine ({
vm,
network,
position,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses
}) ->
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
mac,
mtu,
position
position,
ipv4_allowed: allowedIpv4Addresses,
ipv6_allowed: allowedIpv6Addresses
})
{ push } = ipAddresses = []
push.apply(ipAddresses, allowedIpv4Addresses) if allowedIpv4Addresses
push.apply(ipAddresses, allowedIpv6Addresses) if allowedIpv6Addresses
pCatch.call(@allocIpAddresses(vif.$id, allo), noop) if ipAddresses.length
return vif.$id
createInterface.params = {
vm: { type: 'string' }
network: { type: 'string' }
position: { type: 'string' }
mtu: { type: 'string', optional: true }
position: { type: ['integer', 'string'], optional: true }
mac: { type: 'string', optional: true }
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
}
}
createInterface.resolve = {
@@ -1170,10 +1251,7 @@ setBootOrder = $coroutine ({vm, order}) ->
yield xapi.call 'VM.set_HVM_boot_params', vm._xapiRef, order
return true
@throw(
'INVALID_PARAMS'
'You can only set the boot order on a HVM guest'
)
throw invalidParameters('You can only set the boot order on a HVM guest')
setBootOrder.params = {
vm: { type: 'string' },

View File

@@ -1,5 +1,50 @@
import { streamToBuffer } from '../utils'
// ===================================================================
export function clean () {
return this.clean()
}
clean.permission = 'admin'
// -------------------------------------------------------------------
export async function exportConfig () {
return {
$getFrom: await this.registerHttpRequest((req, res) => {
res.writeHead(200, 'OK', {
'content-disposition': 'attachment'
})
return this.exportConfig()
},
undefined,
{ suffix: '/config.json' })
}
}
exportConfig.permission = 'admin'
// -------------------------------------------------------------------
export function getAllObjects () {
return this.getObjects()
}
getAllObjects.permission = ''
getAllObjects.description = 'Returns all XO objects'
// -------------------------------------------------------------------
export async function importConfig () {
return {
$sendTo: await this.registerHttpRequest(async (req, res) => {
await this.importConfig(JSON.parse(await streamToBuffer(req)))
res.end('config successfully imported')
})
}
}
importConfig.permission = 'admin'

478
src/api/xosan.js Normal file
View File

@@ -0,0 +1,478 @@
import arp from 'arp-a'
import createLogger from 'debug'
import defer from 'golike-defer'
import execa from 'execa'
import fromPairs from 'lodash/fromPairs'
import fs from 'fs-promise'
import map from 'lodash/map'
import splitLines from 'split-lines'
import {
filter,
includes
} from 'lodash'
import {
noop,
pCatch,
pFromCallback,
splitFirst
} from '../utils'
const debug = createLogger('xo:xosan')
const SSH_KEY_FILE = 'id_rsa_xosan'
const NETWORK_PREFIX = '172.31.100.'
const XOSAN_VM_SYSTEM_DISK_SIZE = 10 * 1024 * 1024 * 1024
const XOSAN_DATA_DISK_USEAGE_RATIO = 0.99
const XOSAN_MAX_DISK_SIZE = 2093050 * 1024 * 1024 // a bit under 2To
const CURRENTLY_CREATING_SRS = {}
export async function getVolumeInfo ({ sr }) {
const xapi = this.getXapi(sr)
const giantIPtoVMDict = {}
const data = xapi.xo.getData(sr, 'xosan_config')
if (!data || !data.nodes) {
return null
}
const nodes = data.nodes
nodes.forEach(conf => {
giantIPtoVMDict[conf.vm.ip] = xapi.getObject(conf.vm.id)
})
const oneHostAndVm = nodes[0]
const resultCmd = await remoteSsh(xapi, {
host: xapi.getObject(oneHostAndVm.host),
address: oneHostAndVm.vm.ip
}, 'gluster volume info xosan')
const result = resultCmd['stdout']
/*
Volume Name: xosan
Type: Disperse
Volume ID: 1d4d0e57-8b6b-43f9-9d40-c48be1df7548
Status: Started
Snapshot Count: 0
Number of Bricks: 1 x (2 + 1) = 3
Transport-type: tcp
Bricks:
Brick1: 192.168.0.201:/bricks/brick1/xosan1
Brick2: 192.168.0.202:/bricks/brick1/xosan1
Brick3: 192.168.0.203:/bricks/brick1/xosan1
Options Reconfigured:
client.event-threads: 16
server.event-threads: 16
performance.client-io-threads: on
nfs.disable: on
performance.readdir-ahead: on
transport.address-family: inet
features.shard: on
features.shard-block-size: 64MB
network.remote-dio: enable
cluster.eager-lock: enable
performance.io-cache: off
performance.read-ahead: off
performance.quick-read: off
performance.stat-prefetch: on
performance.strict-write-ordering: off
cluster.server-quorum-type: server
cluster.quorum-type: auto
*/
const info = fromPairs(
splitLines(result.trim()).map(line =>
splitFirst(line, ':').map(val => val.trim())
)
)
const getNumber = item => +item.substr(5)
const brickKeys = filter(Object.keys(info), key => key.match(/^Brick[1-9]/)).sort((i1, i2) => getNumber(i1) - getNumber(i2))
// expected brickKeys : [ 'Brick1', 'Brick2', 'Brick3' ]
info['Bricks'] = brickKeys.map(key => {
const ip = info[key].split(':')[0]
return { config: info[key], ip: ip, vm: giantIPtoVMDict[ip] }
})
const entry = await pFromCallback(cb => arp.table(cb))
if (entry) {
const brick = info['Bricks'].find(element => element.config.split(':')[0] === entry.ip)
if (brick) {
brick.mac = entry.mac
}
}
return info
}
getVolumeInfo.description = 'info on gluster volume'
getVolumeInfo.permission = 'admin'
getVolumeInfo.params = {
sr: {
type: 'string'
}
}
getVolumeInfo.resolve = {
sr: ['sr', 'SR', 'administrate']
}
function floor2048 (value) {
return 2048 * Math.floor(value / 2048)
}
async function copyVm (xapi, originalVm, params) {
return { vm: await xapi.copyVm(originalVm, params.sr), params }
}
async function prepareGlusterVm (xapi, vmAndParam, xosanNetwork, increaseDataDisk = true) {
let vm = vmAndParam.vm
// refresh the object so that sizes are correct
const params = vmAndParam.params
const ip = params.xenstore_data['vm-data/ip']
const sr = xapi.getObject(params.sr.$id)
await xapi._waitObjectState(sr.$id, sr => Boolean(sr.$PBDs))
const host = sr.$PBDs[0].$host
const firstVif = vm.$VIFs[0]
if (xosanNetwork.$id !== firstVif.$network.$id) {
await xapi.call('VIF.move', firstVif.$ref, xosanNetwork.$ref)
}
await xapi.editVm(vm, {
name_label: params.name_label,
name_description: params.name_description
})
await xapi.call('VM.set_xenstore_data', vm.$ref, params.xenstore_data)
if (increaseDataDisk) {
const dataDisk = vm.$VBDs.map(vbd => vbd.$VDI).find(vdi => vdi && vdi.name_label === 'xosan_data')
const srFreeSpace = sr.physical_size - sr.physical_utilisation
// we use a percentage because it looks like the VDI overhead is proportional
const newSize = floor2048((srFreeSpace + dataDisk.virtual_size) * XOSAN_DATA_DISK_USEAGE_RATIO)
await xapi._resizeVdi(dataDisk, Math.min(newSize, XOSAN_MAX_DISK_SIZE))
}
await xapi.startVm(vm)
debug('waiting for boot of ', ip)
// wait until we find the assigned IP in the networks, we are just checking the boot is complete
const vmIsUp = vm => Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip))
vm = await xapi._waitObjectState(vm.$id, vmIsUp)
debug('booted ', ip)
return { address: ip, host, vm }
}
async function callPlugin (xapi, host, command, params) {
debug('calling plugin', host.address, command)
return JSON.parse(await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params))
}
async function remoteSsh (xapi, hostAndAddress, cmd) {
const result = await callPlugin(xapi, hostAndAddress.host, 'run_ssh', {
destination: 'root@' + hostAndAddress.address,
cmd: cmd
})
if (result.exit !== 0) {
throw new Error('ssh error: ' + JSON.stringify(result))
}
debug(result)
return result
}
async function setPifIp (xapi, pif, address) {
await xapi.call('PIF.reconfigure_ip', pif.$ref, 'Static', address, '255.255.255.0', NETWORK_PREFIX + '1', '')
}
const createNetworkAndInsertHosts = defer.onFailure(async function ($onFailure, xapi, pif, vlan) {
let hostIpLastNumber = 1
const xosanNetwork = await xapi.createNetwork({
name: 'XOSAN network',
description: 'XOSAN network',
pifId: pif._xapiId,
mtu: 9000,
vlan: +vlan
})
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
await Promise.all(xosanNetwork.$PIFs.map(pif => setPifIp(xapi, pif, NETWORK_PREFIX + (hostIpLastNumber++))))
return xosanNetwork
})
async function getOrCreateSshKey (xapi) {
let sshKey = xapi.xo.getData(xapi.pool, 'xosan_ssh_key')
if (!sshKey) {
const readKeys = async () => {
sshKey = {
private: await fs.readFile(SSH_KEY_FILE, 'ascii'),
public: await fs.readFile(SSH_KEY_FILE + '.pub', 'ascii')
}
xapi.xo.setData(xapi.pool, 'xosan_ssh_key', sshKey)
}
try {
await readKeys()
} catch (e) {
await execa('ssh-keygen', ['-q', '-f', SSH_KEY_FILE, '-t', 'rsa', '-b', '4096', '-N', ''])
await readKeys()
}
}
return sshKey
}
async function configureGluster (redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter = null) {
const configByType = {
replica_arbiter: {
creation: 'replica 3 arbiter 1',
extra: []
},
replica: {
creation: 'replica ' + redundancy + ' ',
extra: ['gluster volume set xosan cluster.data-self-heal on']
},
disperse: {
creation: 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ',
extra: []
}
}
let brickVms = arbiter ? ipAndHosts.concat(arbiter) : ipAndHosts
for (let i = 1; i < brickVms.length; i++) {
await remoteSsh(xapi, firstIpAndHost, 'gluster peer probe ' + brickVms[i].address)
}
const creation = configByType[glusterType].creation
const volumeCreation = 'gluster volume create xosan ' + creation +
' ' + brickVms.map(ipAndHost => (ipAndHost.address + ':/bricks/xosan/xosandir')).join(' ')
debug('creating volume: ', volumeCreation)
await remoteSsh(xapi, firstIpAndHost, volumeCreation)
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan network.remote-dio enable')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan cluster.eager-lock enable')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-cache off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.read-ahead off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.quick-read off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.strict-write-ordering off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan client.event-threads 8')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan server.event-threads 8')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-thread-count 64')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.stat-prefetch on')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard on')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard-block-size 512MB')
for (const confChunk of configByType[glusterType].extra) {
await remoteSsh(xapi, firstIpAndHost, confChunk)
}
await remoteSsh(xapi, firstIpAndHost, 'gluster volume start xosan')
}
export const createSR = defer.onFailure(async function ($onFailure, { template, pif, vlan, srs, glusterType, redundancy }) {
if (!this.requestResource) {
throw new Error('requestResource is not a function')
}
if (srs.length < 1) {
return // TODO: throw an error
}
let vmIpLastNumber = 101
const xapi = this.getXapi(srs[0])
if (CURRENTLY_CREATING_SRS[xapi.pool.$id]) {
throw new Error('createSR is already running for this pool')
}
CURRENTLY_CREATING_SRS[xapi.pool.$id] = true
try {
const xosanNetwork = await createNetworkAndInsertHosts(xapi, pif, vlan)
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
const sshKey = await getOrCreateSshKey(xapi)
const srsObjects = map(srs, srId => xapi.getObject(srId))
const vmParameters = map(srs, srId => {
const sr = xapi.getObject(srId)
const host = sr.$PBDs[0].$host
return {
sr,
host,
name_label: `XOSAN - ${sr.name_label} - ${host.name_label}`,
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
// the values of the xenstore_data object *have* to be string, don't forget.
xenstore_data: {
'vm-data/hostname': 'XOSAN' + sr.name_label,
'vm-data/sshkey': sshKey.public,
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
'vm-data/mtu': String(xosanNetwork.MTU),
'vm-data/vlan': String(vlan)
}
}
})
await Promise.all(vmParameters.map(vmParam => callPlugin(xapi, vmParam.host, 'receive_ssh_keys', {
private_key: sshKey.private,
public_key: sshKey.public,
force: 'true'
})))
const firstVM = await xapi.importVm(
await this.requestResource('xosan', template.id, template.version),
{ srId: vmParameters[0].sr.$ref, type: 'xva' }
)
$onFailure(() => xapi.deleteVm(firstVM, true)::pCatch(noop))
await xapi.editVm(firstVM, {
autoPoweron: true
})
const copiedVms = await Promise.all(vmParameters.slice(1).map(param => copyVm(xapi, firstVM, param)))
// TODO: Promise.all() is certainly not the right operation to execute all the given promises whether they fulfill or reject.
$onFailure(() => Promise.all(copiedVms.map(vm => xapi.deleteVm(vm.vm, true)))::pCatch(noop))
const vmsAndParams = [{
vm: firstVM,
params: vmParameters[0]
}].concat(copiedVms)
let arbiter = null
if (srs.length === 2) {
const sr = vmParameters[0].sr
const arbiterConfig = {
sr: sr,
host: vmParameters[0].host,
name_label: vmParameters[0].name_label + ' arbiter',
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
xenstore_data: {
'vm-data/hostname': 'XOSAN' + sr.name_label + '_arb',
'vm-data/sshkey': sshKey.public,
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
'vm-data/mtu': String(xosanNetwork.MTU),
'vm-data/vlan': String(vlan)
}
}
const arbiterVm = await copyVm(xapi, firstVM, arbiterConfig)
$onFailure(() => xapi.deleteVm(arbiterVm.vm, true)::pCatch(noop))
arbiter = await prepareGlusterVm(xapi, arbiterVm, xosanNetwork, false)
}
const ipAndHosts = await Promise.all(map(vmsAndParams, vmAndParam => prepareGlusterVm(xapi, vmAndParam, xosanNetwork)))
const firstIpAndHost = ipAndHosts[0]
await configureGluster(redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter)
debug('xosan gluster volume started')
const config = { server: firstIpAndHost.address + ':/xosan', backupserver: ipAndHosts[1].address }
const xosanSr = await xapi.call('SR.create', srsObjects[0].$PBDs[0].$host.$ref, config, 0, 'XOSAN', 'XOSAN', 'xosan', '', true, {})
if (arbiter) {
ipAndHosts.push(arbiter)
}
// we just forget because the cleanup actions will be executed before.
$onFailure(() => xapi.forgetSr(xosanSr)::pCatch(noop))
await xapi.xo.setData(xosanSr, 'xosan_config', {
nodes: ipAndHosts.map(param => ({
host: param.host.$id,
vm: { id: param.vm.$id, ip: param.address }
})),
network: xosanNetwork.$id
})
} finally {
delete CURRENTLY_CREATING_SRS[xapi.pool.$id]
}
})
createSR.description = 'create gluster VM'
createSR.permission = 'admin'
createSR.params = {
srs: {
type: 'array',
items: {
type: 'string'
}
},
pif: {
type: 'string'
},
vlan: {
type: 'string'
},
glusterType: {
type: 'string'
},
redundancy: {
type: 'number'
}
}
createSR.resolve = {
srs: ['sr', 'SR', 'administrate'],
pif: ['pif', 'PIF', 'administrate']
}
export function checkSrIsBusy ({ poolId }) {
return !!CURRENTLY_CREATING_SRS[poolId]
}
checkSrIsBusy.description = 'checks if there is a xosan SR curently being created on the given pool id'
checkSrIsBusy.permission = 'admin'
checkSrIsBusy.params = { poolId: { type: 'string' } }
const POSSIBLE_CONFIGURATIONS = {}
POSSIBLE_CONFIGURATIONS[2] = [{ layout: 'replica_arbiter', redundancy: 3, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[3] = [
{ layout: 'disperse', redundancy: 1, capacity: 2 },
{ layout: 'replica', redundancy: 3, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[4] = [{ layout: 'replica', redundancy: 2, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[5] = [{ layout: 'disperse', redundancy: 1, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[6] = [
{ layout: 'disperse', redundancy: 2, capacity: 4 },
{ layout: 'replica', redundancy: 2, capacity: 3 },
{ layout: 'replica', redundancy: 3, capacity: 2 }]
POSSIBLE_CONFIGURATIONS[7] = [{ layout: 'disperse', redundancy: 3, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[8] = [{ layout: 'replica', redundancy: 2, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[9] = [
{ layout: 'disperse', redundancy: 1, capacity: 8 },
{ layout: 'replica', redundancy: 3, capacity: 3 }]
POSSIBLE_CONFIGURATIONS[10] = [
{ layout: 'disperse', redundancy: 2, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 5 }]
POSSIBLE_CONFIGURATIONS[11] = [{ layout: 'disperse', redundancy: 3, capacity: 8 }]
POSSIBLE_CONFIGURATIONS[12] = [
{ layout: 'disperse', redundancy: 4, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 6 }]
POSSIBLE_CONFIGURATIONS[13] = [{ layout: 'disperse', redundancy: 5, capacity: 8 }]
POSSIBLE_CONFIGURATIONS[14] = [
{ layout: 'disperse', redundancy: 6, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 7 }]
POSSIBLE_CONFIGURATIONS[15] = [
{ layout: 'disperse', redundancy: 7, capacity: 8 },
{ layout: 'replica', redundancy: 3, capacity: 5 }]
POSSIBLE_CONFIGURATIONS[16] = [{ layout: 'replica', redundancy: 2, capacity: 8 }]
export async function computeXosanPossibleOptions ({ lvmSrs }) {
const count = lvmSrs.length
const configurations = POSSIBLE_CONFIGURATIONS[count]
if (!configurations) {
return null
}
if (count > 0) {
const xapi = this.getXapi(lvmSrs[0])
const srs = map(lvmSrs, srId => xapi.getObject(srId))
const srSizes = map(srs, sr => sr.physical_size - sr.physical_utilisation)
const minSize = Math.min.apply(null, srSizes)
const brickSize = (minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO
return configurations.map(conf => ({ ...conf, availableSpace: brickSize * conf.capacity }))
}
}
computeXosanPossibleOptions.params = {
lvmSrs: {
type: 'array',
items: {
type: 'string'
}
}
}
// ---------------------------------------------------------------------
export async function downloadAndInstallXosanPack ({ id, version, pool }) {
if (!this.requestResource) {
throw new Error('requestResource is not a function')
}
const xapi = this.getXapi(pool.id)
const res = await this.requestResource('xosan', id, version)
return xapi.installSupplementalPackOnAllHosts(res)
}
downloadAndInstallXosanPack.description = 'Register a resource via cloud plugin'
downloadAndInstallXosanPack.params = {
id: { type: 'string' },
version: { type: 'string' },
pool: { type: 'string' }
}
downloadAndInstallXosanPack.resolve = {
pool: ['pool', 'pool', 'administrate']
}
downloadAndInstallXosanPack.permission = 'admin'

View File

@@ -33,10 +33,6 @@ export default class Collection extends EventEmitter {
})
}
constructor () {
super()
}
async add (models, opts) {
const array = isArray(models)
if (!array) {

View File

@@ -1,8 +1,9 @@
import Collection, {ModelAlreadyExists} from '../collection'
import difference from 'lodash.difference'
import filter from 'lodash.filter'
import getKey from 'lodash.keys'
import difference from 'lodash/difference'
import filter from 'lodash/filter'
import getKey from 'lodash/keys'
import {createClient as createRedisClient} from 'redis'
import {v4 as generateUuid} from 'uuid'
import {
forEach,
@@ -35,13 +36,13 @@ export default class Redis extends Collection {
connection,
indexes = [],
prefix,
uri = 'tcp://localhost:6379'
uri
}) {
super()
this.indexes = indexes
this.prefix = prefix
this.redis = promisifyAll.call(connection || createRedisClient(uri))
this.redis = promisifyAll(connection || createRedisClient(uri))
}
_extract (ids) {
@@ -68,12 +69,12 @@ export default class Redis extends Collection {
// TODO: remove “replace” which is a temporary measure, implement
// “set()” instead.
const {indexes, prefix, redis, idPrefix = ''} = this
const {indexes, prefix, redis} = this
return Promise.all(mapToArray(models, async model => {
// Generate a new identifier if necessary.
if (model.id === undefined) {
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
model.id = generateUuid()
}
const success = await redis.sadd(prefix + '_ids', model.id)
@@ -149,6 +150,10 @@ export default class Redis extends Collection {
}
_remove (ids) {
if (isEmpty(ids)) {
return
}
const {prefix, redis} = this
// TODO: handle indexes.

View File

@@ -1,69 +1,19 @@
import bind from 'lodash.bind'
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import {
isArray,
isPromise,
isFunction,
noop,
pFinally
isFunction
} from './utils'
// ===================================================================
const {
defineProperties,
defineProperty,
getOwnPropertyDescriptor
} = Object
// ===================================================================
// See: https://github.com/jayphelps/core-decorators.js#autobind
//
// TODO: make it work for all class methods.
export const autobind = (target, key, {
configurable,
enumerable,
value: fn,
writable
}) => ({
configurable,
enumerable,
get () {
const bounded = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bounded,
writable: true
})
return bounded
},
set (newValue) {
if (this === target) {
// New value directly set on the prototype.
delete this[key]
this[key] = newValue
} else {
// New value set on a child object.
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
}
})
// -------------------------------------------------------------------
// Debounce decorator for methods.
//
// See: https://github.com/wycats/javascript-decorators
@@ -74,7 +24,7 @@ export const debounce = duration => (target, name, descriptor) => {
// This symbol is used to store the related data directly on the
// current object.
const s = Symbol()
const s = Symbol(`debounced ${name} data`)
function debounced () {
const data = this[s] || (this[s] = {
@@ -102,119 +52,8 @@ export const debounce = duration => (target, name, descriptor) => {
// -------------------------------------------------------------------
const _push = Array.prototype.push
export const deferrable = (target, name, descriptor) => {
let fn
function newFn () {
const deferreds = []
const defer = fn => {
deferreds.push(fn)
}
defer.clear = () => {
deferreds.length = 0
}
const args = [ defer ]
_push.apply(args, arguments)
let executeDeferreds = () => {
let i = deferreds.length
while (i) {
deferreds[--i]()
}
}
try {
const result = fn.apply(this, args)
if (isPromise(result)) {
result::pFinally(executeDeferreds)
// Do not execute the deferreds in the finally block.
executeDeferreds = noop
}
return result
} finally {
executeDeferreds()
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
return descriptor
}
fn = target
return newFn
}
// Deferred functions are only executed on failures.
//
// i.e.: defer.clear() is automatically called in case of success.
deferrable.onFailure = (target, name, descriptor) => {
let fn
function newFn (defer) {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(result => {
defer.clear()
return result
})
: (defer.clear(), result)
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// Deferred functions are only executed on success.
//
// i.e.: defer.clear() is automatically called in case of failure.
deferrable.onSuccess = (target, name, descriptor) => {
let fn
function newFn (defer) {
try {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(null, error => {
defer.clear()
throw error
})
: result
} catch (error) {
defer.clear()
throw error
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// -------------------------------------------------------------------
const _ownKeys = (
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
(({
getOwnPropertyNames: names,
getOwnPropertySymbols: symbols
@@ -224,22 +63,6 @@ const _ownKeys = (
)(Object)
)
const _bindPropertyDescriptor = (descriptor, thisArg) => {
const { get, set, value } = descriptor
if (get) {
descriptor.get = bind(get, thisArg)
}
if (set) {
descriptor.set = bind(set, thisArg)
}
if (isFunction(value)) {
descriptor.value = bind(value, thisArg)
}
return descriptor
}
const _isIgnoredProperty = name => (
name[0] === '_' ||
name === 'constructor'
@@ -263,7 +86,32 @@ export const mixin = MixIns => Class => {
const { name } = Class
const Decorator = (...args) => {
// Copy properties of plain object mix-ins to the prototype.
{
const allMixIns = MixIns
MixIns = []
const { prototype } = Class
const descriptors = { __proto__: null }
for (const MixIn of allMixIns) {
if (isFunction(MixIn)) {
MixIns.push(MixIn)
continue
}
for (const prop of _ownKeys(MixIn)) {
if (prop in prototype) {
throw new Error(`${name}#${prop} is already defined`)
}
(
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
).enumerable = false // Object methods are enumerable but class methods are not.
}
}
defineProperties(prototype, descriptors)
}
function Decorator (...args) {
const instance = new Class(...args)
for (const MixIn of MixIns) {
@@ -279,8 +127,9 @@ export const mixin = MixIns => Class => {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = _bindPropertyDescriptor(
getOwnPropertyDescriptor(prototype, prop),
descriptors[prop] = getBoundPropertyDescriptor(
prototype,
prop,
mixinInstance
)
}

View File

@@ -1,48 +1,9 @@
/* eslint-env mocha */
/* eslint-env jest */
import expect from 'must'
import {debounce} from './decorators'
// ===================================================================
import {autobind, debounce, deferrable} from './decorators'
// ===================================================================
describe('autobind()', () => {
class Foo {
@autobind
getFoo () {
return this
}
}
it('returns a bound instance for a method', () => {
const foo = new Foo()
const { getFoo } = foo
expect(getFoo()).to.equal(foo)
})
it('returns the same bound instance each time', () => {
const foo = new Foo()
expect(foo.getFoo).to.equal(foo.getFoo)
})
it('works with multiple instances of the same class', () => {
const foo1 = new Foo()
const foo2 = new Foo()
const getFoo1 = foo1.getFoo
const getFoo2 = foo2.getFoo
expect(getFoo1()).to.equal(foo1)
expect(getFoo2()).to.equal(foo2)
})
})
// -------------------------------------------------------------------
describe('debounce()', () => {
let i
@@ -60,114 +21,19 @@ describe('debounce()', () => {
it('works', done => {
const foo = new Foo()
expect(i).to.equal(0)
expect(i).toBe(0)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
setTimeout(() => {
foo.foo()
expect(i).to.equal(2)
expect(i).toBe(2)
done()
}, 2e1)
})
})
// -------------------------------------------------------------------
describe('deferrable()', () => {
it('works with normal termination', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(0)
})
it('defer.clear() removes previous deferreds', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
defer.clear()
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(2)
})
it('works with exception', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
throw i
})
expect(() => fn()).to.throw(4)
expect(i).to.equal(0)
})
it('works with promise resolution', async () => {
let i = 0
const fn = deferrable(async defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
return i
})
await expect(fn()).to.eventually.equal(4)
expect(i).to.equal(0)
})
it('works with promise rejection', async () => {
let i = 0
const fn = deferrable(async defer => {
// Wait a turn of the events loop.
await Promise.resolve()
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
throw i
})
await expect(fn()).to.reject.to.equal(4)
expect(i).to.equal(0)
})
})

View File

@@ -20,14 +20,15 @@ import { boot16 as fat16 } from 'fatfs/structs'
const SECTOR_SIZE = 512
const TEN_MIB = 10 * 1024 * 1024
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
export function init () {
const buf = new Buffer(10 * 1024 * 1024) // 10MB
buf.fill(0)
const buf = Buffer.alloc(TEN_MIB)
// https://github.com/natevw/fatfs/blob/master/structs.js
fat16.pack({
jmpBoot: new Buffer('eb3c90', 'hex'),
jmpBoot: Buffer.from('eb3c90', 'hex'),
OEMName: 'mkfs.fat',
BytsPerSec: SECTOR_SIZE,
SecPerClus: 4,

View File

@@ -1,27 +1,23 @@
import assign from 'lodash.assign'
import getStream from 'get-stream'
import startsWith from 'lodash.startswith'
import { parse as parseUrl } from 'url'
import isRedirect from 'is-redirect'
import { assign, isString, startsWith } from 'lodash'
import { cancellable } from 'promise-toolbox'
import { request as httpRequest } from 'http'
import { request as httpsRequest } from 'https'
import { stringify as formatQueryString } from 'querystring'
import {
isString
} from './utils'
format as formatUrl,
parse as parseUrl,
resolve as resolveUrl
} from 'url'
import { streamToBuffer } from './utils'
// -------------------------------------------------------------------
export default (...args) => {
const raw = opts => {
let req
const pResponse = new Promise((resolve, reject) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const {
body,
headers: { ...headers } = {},
@@ -62,11 +58,16 @@ export default (...args) => {
}
}
req = (
protocol && startsWith(protocol.toLowerCase(), 'https')
? httpsRequest
: httpRequest
)({
const secure = protocol && startsWith(protocol.toLowerCase(), 'https')
let requestFn
if (secure) {
requestFn = httpsRequest
} else {
requestFn = httpRequest
delete rest.rejectUnauthorized
}
req = requestFn({
...rest,
headers
})
@@ -90,7 +91,7 @@ export default (...args) => {
response.cancel = () => {
req.abort()
}
response.readAll = () => getStream(response)
response.readAll = () => streamToBuffer(response)
const length = response.headers['content-length']
if (length) {
@@ -98,6 +99,11 @@ export default (...args) => {
}
const code = response.statusCode
const { location } = response.headers
if (isRedirect(code) && location) {
assign(opts, parseUrl(resolveUrl(formatUrl(opts), location)))
return raw(opts)
}
if (code < 200 || code >= 300) {
const error = new Error(response.statusMessage)
error.code = code
@@ -112,13 +118,27 @@ export default (...args) => {
return response
})
pResponse.cancel = () => {
req.emit('error', new Error('HTTP request canceled!'))
req.abort()
}
pResponse.readAll = () => pResponse.then(response => response.readAll())
pResponse.request = req
return pResponse
}
const httpRequestPlus = ($cancelToken, ...args) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const pResponse = raw(opts)
$cancelToken.promise.then(() => {
const { request } = pResponse
request.emit('error', new Error('HTTP request canceled!'))
request.abort()
})
pResponse.readAll = () => pResponse.then(response => response.readAll())
return pResponse
}
export default cancellable(httpRequestPlus)

View File

@@ -1,39 +1,29 @@
import createLogger from 'debug'
const debug = createLogger('xo:main')
import appConf from 'app-conf'
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import eventToPromise from 'event-to-promise'
import has from 'lodash.has'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash.includes'
import pick from 'lodash.pick'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import proxyRequest from 'proxy-http-request'
import serveStatic from 'serve-static'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import WebSocket from 'ws'
import {compile as compileJade} from 'jade'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import {
AlreadyAuthenticated,
InvalidCredential,
InvalidParameters,
NoSuchObject,
NotImplemented
} from './api-errors'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import {
readFile,
readdir
ensureDir,
readdir,
readFile
} from 'fs-promise'
import * as apiMethods from './api/index'
import Api from './api'
import WebServer from 'http-server-plus'
import wsProxy from './ws-proxy'
import Xo from './xo'
import {
setup as setupHttpProxy
@@ -57,6 +47,8 @@ import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const debug = createLogger('xo:main')
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
@@ -129,8 +121,8 @@ async function setUpPassport (express, xo) {
}
// Registers the sign in form.
const signInPage = compileJade(
await readFile(__dirname + '/../signin.jade')
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(signInPage({
@@ -139,9 +131,15 @@ async function setUpPassport (express, xo) {
}))
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const matches = req.url.match(SIGNIN_STRATEGY_RE)
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
@@ -167,7 +165,7 @@ async function setUpPassport (express, xo) {
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect('/')
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
@@ -187,9 +185,10 @@ async function setUpPassport (express, xo) {
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles/.test(req.url)) {
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
@@ -222,19 +221,28 @@ async function registerPlugin (pluginPath, pluginName) {
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema
configurationSchema,
configurationPresets,
testSchema
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({ xo: this })
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
}})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
testSchema,
version
)
}
@@ -337,13 +345,29 @@ const setUpProxies = (express, opts, xo) => {
return
}
const proxy = createProxyServer({
ignorePath: true
}).on('error', (error) => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
forEach(opts, (target, url) => {
express.use(url, (req, res) => {
proxyRequest(target + req.url, req, res)
})
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length)
})
return
}
}
next()
})
// WebSocket proxy.
@@ -353,14 +377,16 @@ const setUpProxies = (express, opts, xo) => {
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
express.on('upgrade', (req, socket, head) => {
const {url} = req
const { url } = req
for (let prefix in opts) {
if (url.lastIndexOf(prefix, 0) !== -1) {
const target = opts[prefix] + url.slice(prefix.length)
webSocketServer.handleUpgrade(req, socket, head, socket => {
wsProxy(socket, target)
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length)
})
return
}
}
@@ -385,47 +411,13 @@ const setUpStaticFiles = (express, opts) => {
// ===================================================================
const errorClasses = {
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
INVALID_CREDENTIAL: InvalidCredential,
INVALID_PARAMS: InvalidParameters,
NO_SUCH_OBJECT: NoSuchObject,
NOT_IMPLEMENTED: NotImplemented
}
const apiHelpers = {
getUserPublicProperties (user) {
// Handles both properties and wrapped models.
const properties = user.properties || user
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
},
throw (errorId, data) {
throw new (errorClasses[errorId])(data)
}
}
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
server: webServer,
path: '/api/'
noServer: true
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = { __proto__: xo, ...apiHelpers }
const api = new Api({
context,
verboseLogsOnErrors
})
xo.defineProperty('api', api)
api.addMethods(apiMethods)
webSocketServer.on('connection', socket => {
const onConnection = socket => {
const { remoteAddress } = socket.upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
@@ -439,7 +431,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return api.call(connection, message.method, message.params)
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
@@ -468,6 +460,11 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
socket.send(data, onSend)
}
})
}
webServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, onConnection)
}
})
}
@@ -494,8 +491,8 @@ const setUpConsoleProxy = (webServer, xo) => {
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) { // eslint-disable-line space-before-keywords
throw new InvalidCredential()
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw invalidCredentials()
}
const { remoteAddress } = socket
@@ -512,8 +509,8 @@ const setUpConsoleProxy = (webServer, xo) => {
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (_) {
console.error(_)
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
@@ -628,16 +625,24 @@ export default async function main (args) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
const shutdown = signal => {
debug('%s caught, closing…', signal)
xo.stop()
}
forEach([ 'SIGINT', 'SIGTERM' ], signal => {
let alreadyCalled = false
// Gracefully shutdown on signals.
process.on('SIGINT', () => shutdown('SIGINT'))
process.on('SIGTERM', () => shutdown('SIGTERM'))
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await eventToPromise(xo, 'stopped')

View File

@@ -1,9 +1,20 @@
import assign from 'lodash.assign'
import {BaseError} from 'make-error'
import assign from 'lodash/assign'
import Bluebird from 'bluebird'
import every from 'lodash/every'
import filter from 'lodash/filter'
import isArray from 'lodash/isArray'
import isPlainObject from 'lodash/isPlainObject'
import map from 'lodash/map'
import mapValues from 'lodash/mapValues'
import size from 'lodash/size'
import some from 'lodash/some'
import { BaseError } from 'make-error'
import { timeout } from 'promise-toolbox'
import { crossProduct } from './math'
import {
createRawObject,
forEach
serializeError,
thunkToArray
} from './utils'
export class JobExecutorError extends BaseError {}
@@ -18,30 +29,67 @@ export class UnsupportedVectorType extends JobExecutorError {
}
}
export const productParams = (...args) => {
let product = createRawObject()
assign(product, ...args)
return product
// ===================================================================
const match = (pattern, value) => {
if (isPlainObject(pattern)) {
if (size(pattern) === 1) {
if (pattern.__or) {
return some(pattern.__or, subpattern => match(subpattern, value))
}
if (pattern.__not) {
return !match(pattern.__not, value)
}
}
return isPlainObject(value) && every(pattern, (subpattern, key) => (
value[key] !== undefined && match(subpattern, value[key])
))
}
if (isArray(pattern)) {
return isArray(value) && every(pattern, subpattern =>
some(value, subvalue => match(subpattern, subvalue))
)
}
return pattern === value
}
export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
const upstreamValues = []
const itemsCopy = items.slice()
const item = itemsCopy.pop()
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
forEach(values, value => {
if (itemsCopy.length) {
let downstreamValues = _computeCrossProduct(itemsCopy, productCb, extractValueMap)
forEach(downstreamValues, downstreamValue => {
upstreamValues.push(productCb(value, downstreamValue))
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(crossProduct(
map(items, value => resolveParamsVector.call(this, value))
))
},
fetchObjects ({ pattern }) {
return filter(this.xo.getObjects(), object => match(pattern, object))
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value
})
} else {
upstreamValues.push(value)
}
})
return upstreamValues
})
},
set: ({ values }) => values
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default class JobExecutor {
constructor (xo) {
this.xo = xo
@@ -76,30 +124,24 @@ export default class JobExecutor {
event: 'job.end',
runJobId
})
} catch (e) {
} catch (error) {
this._logger.error(`The execution of ${job.id} has failed.`, {
event: 'job.end',
runJobId,
error: e
error: serializeError(error)
})
throw error
}
}
async _execCall (job, runJobId) {
let paramsFlatVector
if (job.paramsVector) {
if (job.paramsVector.type === 'crossProduct') {
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
} else {
throw new UnsupportedVectorType(job.paramsVector)
}
} else {
paramsFlatVector = [{}] // One call with no parameters
}
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(this, paramsVector)
: [{}] // One call with no parameters
const connection = this.xo.createUserConnection()
const promises = []
connection.set('user_id', job.userId)
@@ -109,7 +151,7 @@ export default class JobExecutor {
calls: {}
}
forEach(paramsFlatVector, params => {
await Bluebird.map(paramsFlatVector, params => {
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
event: 'jobCall.start',
runJobId,
@@ -122,37 +164,40 @@ export default class JobExecutor {
params,
start: Date.now()
}
let promise = this.xo.callApiMethod(connection, job.method, assign({}, params))
if (job.timeout) {
promise = promise::timeout(job.timeout)
}
promises.push(
this.xo.api.call(connection, job.method, assign({}, params)).then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
return promise.then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: reason
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason)
})
call.error = reason
call.end = Date.now()
}
)
call.error = reason
call.end = Date.now()
}
)
}, {
concurrency: 2
})
connection.close()
await Promise.all(promises)
execStatus.end = Date.now()
return execStatus

View File

@@ -1,69 +1,100 @@
/* eslint-env mocha */
/* eslint-env jest */
import {expect} from 'chai'
import leche from 'leche'
import { forEach } from 'lodash'
import { resolveParamsVector } from './job-executor'
import {productParams} from './job-executor'
import {_computeCrossProduct} from './job-executor'
describe('resolveParamsVector', function () {
forEach({
'cross product with three sets': [
// Expected result.
[ { id: 3, value: 'foo', remote: 'local' },
{ id: 7, value: 'foo', remote: 'local' },
{ id: 10, value: 'foo', remote: 'local' },
{ id: 3, value: 'bar', remote: 'local' },
{ id: 7, value: 'bar', remote: 'local' },
{ id: 10, value: 'bar', remote: 'local' } ],
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { id: 3 }, { id: 7 }, { id: 10 } ]
}, {
type: 'set',
values: [ { value: 'foo' }, { value: 'bar' } ]
}, {
type: 'set',
values: [ { remote: 'local' } ]
}]
}
],
'cross product with `set` and `map`': [
// Expected result.
[
{ remote: 'local', id: 'vm:2' },
{ remote: 'smb', id: 'vm:2' }
],
describe('productParams', function () {
leche.withData({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { remote: 'local' }, { remote: 'smb' } ]
}, {
type: 'map',
collection: {
type: 'fetchObjects',
pattern: {
$pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] },
power_state: 'Running',
tags: [ 'foo' ],
type: 'VM'
}
},
iteratee: {
type: 'extractProperties',
mapping: { id: 'id' }
}
}]
},
// Context.
{
xo: {
getObjects: function () {
return [{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted'
}, {
id: 'vm:2',
$pool: 'pool:1',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Running'
}, {
id: 'host:1',
type: 'host',
power_state: 'Running'
}, {
id: 'vm:3',
$pool: 'pool:8',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Halted'
}]
}
}
}
]
}, function (resultSet, ...sets) {
it('Assembles all given param sets in on set', function () {
expect(productParams(...sets)).to.eql(resultSet)
})
})
})
describe('_computeCrossProduct', function () {
// Gives the sum of all args
const addTest = (...args) => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = (...args) => args.reduce((prev, curr) => prev * curr, 1)
leche.withData({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, function (product, items, cb) {
it('Crosses sets of values with a crossProduct callback', function () {
expect(_computeCrossProduct(items, cb)).to.have.members(product)
}, ([ expectedResult, entry, context ], name) => {
describe(`with ${name}`, () => {
it('Resolves params vector', () => {
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
})
})
})
})

View File

@@ -1,5 +1,5 @@
import appConf from 'app-conf'
import get from 'lodash.get'
import get from 'lodash/get'
import highland from 'highland'
import levelup from 'level-party'
import ndjson from 'ndjson'

33
src/lvm.js Normal file
View File

@@ -0,0 +1,33 @@
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser } from 'parse-pairs'
import { isArray, map } from 'lodash'
// ===================================================================
const parse = createParser({
keyTransform: key => key.slice(5).toLowerCase()
})
const makeFunction = command => (fields, ...args) =>
execa.stdout(command, [
'--noheading',
'--nosuffix',
'--nameprefixes',
'--unbuffered',
'--units',
'b',
'-o',
String(fields),
...args
]).then(stdout => map(
splitLines(stdout),
isArray(fields)
? parse
: line => {
const data = parse(line)
return data[fields]
}
))
export const lvs = makeFunction('lvs')
export const pvs = makeFunction('pvs')

48
src/math.js Normal file
View File

@@ -0,0 +1,48 @@
import assign from 'lodash/assign'
const _combine = (vectors, n, cb) => {
if (!n) {
return
}
const nLast = n - 1
const vector = vectors[nLast]
const m = vector.length
if (n === 1) {
for (let i = 0; i < m; ++i) {
cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal
}
return
}
for (let i = 0; i < m; ++i) {
const value = vector[i]
_combine(vectors, nLast, (vector) => {
vector.push(value)
cb(vector)
})
}
}
// Compute all combinations from vectors.
//
// Ex: combine([[2, 3], [5, 7]])
// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ]
export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
// Merge the properties of an objects set in one object.
//
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
export const mergeObjects = objects => assign({}, ...objects)
// Compute a cross product between vectors.
//
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
combine(vectors)(vector => {
cb(mergeFn(vector))
})
)

74
src/math.spec.js Normal file
View File

@@ -0,0 +1,74 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { thunkToArray } from './utils'
import {
crossProduct,
mergeObjects
} from './math'
describe('mergeObjects', function () {
forEach({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
]
}, ([ resultSet, ...sets ], name) => {
describe(`with ${name}`, () => {
it('Assembles all given param sets in on set', function () {
expect(mergeObjects(sets)).toEqual(resultSet)
})
})
})
})
describe('crossProduct', function () {
// Gives the sum of all args
const addTest = args => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
forEach({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, ([ product, items, cb ], name) => {
describe(`with ${name}`, () => {
it('Crosses sets of values with a crossProduct callback', function () {
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
})
})
})
})

View File

@@ -1,8 +1,12 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Group extends Model {}
@@ -14,20 +18,16 @@ export class Groups extends Collection {
return Group
}
get idPrefix () {
return 'group:'
}
create (name) {
return this.add(new Group({
name,
users: '[]'
}))
return this.add(new Group({ name }))
}
async save (group) {
// Serializes.
group.users = JSON.stringify(group.users)
let tmp
group.users = isEmpty(tmp = group.users)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(group)
}
@@ -37,13 +37,7 @@ export class Groups extends Collection {
// Deserializes.
forEach(groups, group => {
const {users} = group
try {
group.users = JSON.parse(users)
} catch (error) {
console.warn('cannot parse group.users:', users)
group.users = []
}
group.users = parseProp('group', group, 'users', [])
})
return groups

View File

@@ -11,12 +11,7 @@ export class Jobs extends Collection {
return Job
}
get idPrefix () {
return 'job:'
}
async create (userId, job) {
job.userId = userId
async create (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.add(new Job(job))

View File

@@ -13,10 +13,6 @@ export class PluginsMetadata extends Collection {
return PluginMetadata
}
get idPrefix () {
return 'plugin-metadata:'
}
async save ({ id, autoload, configuration }) {
return /* await */ this.update({
id,

View File

@@ -13,10 +13,6 @@ export class Remotes extends Collection {
return Remote
}
get idPrefix () {
return 'remote-'
}
create (name, url) {
return this.add(new Remote({
name,

View File

@@ -11,17 +11,14 @@ export class Schedules extends Collection {
return Schedule
}
get idPrefix () {
return 'schedule:'
}
create (userId, job, cron, enabled, name = undefined) {
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,
job,
cron,
enabled,
name
name,
timezone
}))
}

View File

@@ -1,5 +1,8 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
@@ -12,11 +15,26 @@ export class Servers extends Collection {
return Server
}
async create ({host, username, password, readOnly}) {
async create ({label, host, username, password, readOnly}) {
if (await this.exists({host})) {
throw new Error('server already exists')
}
return /* await */ this.add({host, username, password, readOnly})
return /* await */ this.add({label, host, username, password, readOnly})
}
async get (properties) {
const servers = await super.get(properties)
// Deserializes
forEach(servers, server => {
if (server.error) {
server.error = parseProp('server', server, 'error', '')
} else {
delete server.error
}
})
return servers
}
}

View File

@@ -1,7 +1,11 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class User extends Model {}
@@ -17,15 +21,14 @@ export class Users extends Collection {
return User
}
async create (email, properties = {}) {
async create (properties) {
const { email } = properties
// Avoid duplicates.
if (await this.exists({email})) {
throw new Error(`the user ${email} already exists`)
}
// Adds the email to the user's properties.
properties.email = email
// Create the user object.
const user = new User(properties)
@@ -35,7 +38,13 @@ export class Users extends Collection {
async save (user) {
// Serializes.
user.groups = JSON.stringify(user.groups)
let tmp
user.groups = isEmpty(tmp = user.groups)
? undefined
: JSON.stringify(tmp)
user.preferences = isEmpty(tmp = user.preferences)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(user)
}
@@ -45,13 +54,8 @@ export class Users extends Collection {
// Deserializes
forEach(users, user => {
const {groups} = user
try {
user.groups = groups ? JSON.parse(groups) : []
} catch (_) {
console.warn('cannot parse user.groups:', groups)
user.groups = []
}
user.groups = parseProp('user', user, 'groups', [])
user.preferences = parseProp('user', user, 'preferences', {})
})
return users

16
src/models/utils.js Normal file
View File

@@ -0,0 +1,16 @@
export const parseProp = (type, obj, name, defaultValue) => {
const value = obj[name]
if (
value == null ||
value === '' // do not warn on this trivial and minor error
) {
return defaultValue
}
try {
return JSON.parse(value)
} catch (error) {
// do not display the error because it can occurs a lot and fill
// up log files
return defaultValue
}
}

42
src/node_modules/constant-stream.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
import from2 from 'from2'
const constantStream = (data, n = 1) => {
if (!Buffer.isBuffer(data)) {
data = Buffer.from(data)
}
const { length } = data
if (!length) {
throw new Error('data should not be empty')
}
n *= length
let currentLength = length
return from2((size, next) => {
if (n <= 0) {
return next(null, null)
}
if (n < size) {
size = n
}
if (size < currentLength) {
const m = Math.floor(size / length) * length || length
n -= m
return next(null, data.slice(0, m))
}
// if more than twice the data length is requested, repeat the data
if (size > currentLength * 2) {
currentLength = Math.floor(size / length) * length
data = Buffer.alloc(currentLength, data)
}
n -= currentLength
return next(null, data)
})
}
export { constantStream as default }

View File

@@ -23,13 +23,19 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
'', ''
].join('\r\n'))
const onSend = (error) => {
if (error) {
debug('error sending to the XO client: %s', error.stack || error.message || error)
}
}
socket.pipe(partialStream('\r\n\r\n', headers => {
// TODO: check status code 200.
debug('connected')
})).on('data', data => {
if (!closed) {
// Encode to base 64.
ws.send(data.toString('base64'))
ws.send(data.toString('base64'), onSend)
}
}).on('end', () => {
if (!closed) {
@@ -50,7 +56,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
.on('message', data => {
if (!closed) {
// Decode from base 64.
socket.write(new Buffer(data, 'base64'))
socket.write(Buffer.from(data, 'base64'))
}
})
.on('close', () => {

View File

@@ -1,5 +1,4 @@
import eventToPromise from 'event-to-promise'
import getStream from 'get-stream'
import through2 from 'through2'
import {
@@ -8,14 +7,16 @@ import {
import {
addChecksumToReadStream,
getPseudoRandomBytes,
noop,
pCatch,
streamToBuffer,
validChecksumOfReadStream
} from '../utils'
export default class RemoteHandlerAbstract {
constructor (remote) {
this._remote = parse({...remote})
this._remote = {...remote, ...parse(remote.url)}
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
@@ -47,12 +48,41 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async test () {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error)
}
} finally {
this.unlink(testFileName).catch(noop)
}
}
async outputFile (file, data, options) {
return this._outputFile(file, data, options)
return this._outputFile(file, data, {
flags: 'wx',
...options
})
}
async _outputFile (file, data, options) {
const stream = await this.createOutputStream(file)
const stream = await this.createOutputStream(file, options)
const promise = eventToPromise(stream, 'finish')
stream.end(data)
return promise
@@ -62,8 +92,8 @@ export default class RemoteHandlerAbstract {
return this._readFile(file, options)
}
async _readFile (file, options) {
return getStream(await this.createReadStream(file, options))
_readFile (file, options) {
return this.createReadStream(file, options).then(streamToBuffer)
}
async rename (oldPath, newPath) {
@@ -82,53 +112,72 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async createReadStream (file, {
createReadStream (file, {
checksum = false,
ignoreMissingChecksum = false,
...options
} = {}) {
const streamP = this._createReadStream(file, options).then(async stream => {
await eventToPromise(stream, 'readable')
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = eventToPromise(stream, 'readable')
if (stream.length === undefined) {
stream.length = await this.getSize(file)::pCatch(noop)
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([ promise, this.getSize(file).then(size => {
stream.length = size
}, noop) ])
}
return stream
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
try {
checksum = await this.readFile(`${file}.checksum`)
} catch (error) {
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
return streamP
// avoid a unhandled rejection warning
streamP.catch(noop)
return this.readFile(`${file}.checksum`).then(
checksum => streamP.then(stream => {
const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream.length = length
return stream
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error
}
throw error
}
let stream = await streamP
const { length } = stream
stream = validChecksumOfReadStream(stream, checksum.toString())
stream.length = length
return stream
)
}
async _createReadStream (file, options) {
throw new Error('Not implemented')
}
async refreshChecksum (path) {
const stream = addChecksumToReadStream(await this.createReadStream(path))
stream.resume() // start reading the whole file
const checksum = await stream.checksum
await this.outputFile(`${path}.checksum`, checksum)
}
async createOutputStream (file, {
checksum = false,
...options
} = {}) {
const streamP = this._createOutputStream(file, options)
const streamP = this._createOutputStream(file, {
flags: 'wx',
...options
})
if (!checksum) {
return streamP

View File

@@ -1,5 +1,5 @@
import fs from 'fs-promise'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import {
dirname,
resolve
@@ -12,16 +12,21 @@ import {
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'local'
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const parts = [this._remote.path]
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, this._remote.path)) {
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
@@ -30,8 +35,9 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _sync () {
if (this._remote.enabled) {
try {
await fs.ensureDir(this._remote.path)
await fs.access(this._remote.path, fs.R_OK | fs.W_OK)
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
@@ -47,7 +53,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(this._getFilePath(file), data, options)
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {
@@ -80,5 +86,4 @@ export default class LocalHandler extends RemoteHandlerAbstract {
const stats = await fs.stat(this._getFilePath(file))
return stats.size
}
}

View File

@@ -11,11 +11,15 @@ export default class NfsHandler extends LocalHandler {
return 'nfs'
}
_getRealPath () {
return `/run/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
try {
({stdout} = await execa('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings']))
stdout = await execa.stdout('findmnt', ['-P', '-t', 'nfs,nfs4', '--output', 'SOURCE,TARGET', '--noheadings'])
const regex = /^SOURCE="([^:]*):(.*)" TARGET="(.*)"$/
forEach(stdout.split('\n'), m => {
if (m) {
@@ -37,27 +41,27 @@ export default class NfsHandler extends LocalHandler {
return mounted
}
_matchesRealMount (remote) {
return remote.path in this._realMounts
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount (remote) {
await fs.ensureDir(remote.path)
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${remote.host}:/${remote.share}`, remote.path])
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount(this._remote) && !this._remote.enabled) {
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount(this._remote) && this._remote.enabled) {
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount(this._remote)
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
@@ -75,6 +79,6 @@ export default class NfsHandler extends LocalHandler {
}
async _umount (remote) {
await execa('umount', [remote.path])
await execa('umount', [this._getRealPath()])
}
}

View File

@@ -54,6 +54,11 @@ export default class SmbHandler extends RemoteHandlerAbstract {
? this._remote.path
: ''
// Ensure remote path is a directory.
if (path !== '' && path[path.length - 1] !== '\\') {
path += '\\'
}
if (file) {
path += file.replace(/\//g, '\\')
}

View File

@@ -1,5 +1,3 @@
import paramsVector from 'job/params-vector'
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
@@ -27,7 +25,13 @@ export default {
type: 'string',
description: 'called method'
},
paramsVector
paramsVector: {
type: 'object'
},
timeout: {
type: 'number',
description: 'number of milliseconds after which the job is considered failed'
}
},
required: [
'type',

View File

@@ -1,59 +0,0 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['crossProduct']
},
items: {
type: 'array',
description: 'vector of values to multiply with others vectors',
items: {
type: 'object',
properties: {
type: {
enum: ['set']
},
values: {
type: 'array',
items: {
type: 'object'
},
minItems: 1
}
},
required: [
'type',
'values'
]
},
minItems: 1
}
},
required: [
'type',
'items'
]
}
/* Example:
{
"type": "cross product",
"items": [
{
"type": "set",
"values": [
{"id": 0, "name": "snapshost de 0"},
{"id": 1, "name": "snapshost de 1"}
],
},
{
"type": "set",
"values": [
{"force": true}
]
}
]
}
*/

View File

@@ -20,7 +20,7 @@ export default {
},
unloadable: {
type: 'boolean',
default: 'true',
default: true,
description: 'whether or not this plugin can be unloaded'
},
configuration: {
@@ -30,6 +30,14 @@ export default {
configurationSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'configuration schema for this plugin (not present if not configurable)'
},
testable: {
type: 'boolean',
description: 'whether or not this plugin can be tested'
},
testSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'test schema for this plugin'
}
},
required: [

50
src/schemas/user.js Normal file
View File

@@ -0,0 +1,50 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this user'
},
email: {
type: 'string',
description: 'email address of this user'
},
groups: {
type: 'array',
items: {
type: 'string'
},
description: 'identifier of groups this user belong to'
},
permission: {
enum: ['none', 'read', 'write', 'admin'],
description: 'root permission for this user, none and admin are the only significant ones'
},
preferences: {
type: 'object',
properties: {
lang: { type: 'string' },
sshKeys: {
type: 'array',
items: {
type: 'object',
properties: {
key: { type: 'string' },
title: { type: 'string' }
},
required: [
'key',
'title'
]
}
}
},
description: 'various user preferences'
}
},
required: [
'id',
'email'
]
}

View File

@@ -0,0 +1,44 @@
import assert from 'assert'
const streamToExistingBuffer = (
stream,
buffer,
offset = 0,
end = buffer.length
) => new Promise((resolve, reject) => {
assert(offset >= 0)
assert(end > offset)
assert(end <= buffer.length)
let i = offset
const onData = chunk => {
const prev = i
i += chunk.length
if (i > end) {
return onError(new Error('too much data'))
}
chunk.copy(buffer, prev)
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(i - offset)
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToExistingBuffer as default }

View File

@@ -0,0 +1,20 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs'
import { fromCallback } from 'promise-toolbox'
import streamToExistingBuffer from './stream-to-existing-buffer'
describe('streamToExistingBuffer()', () => {
it('read the content of a stream in a buffer', async () => {
const stream = createReadStream(__filename)
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
const buf = Buffer.allocUnsafe(expected.length + 1)
buf[0] = 'A'.charCodeAt()
await streamToExistingBuffer(stream, buf, 1)
expect(String(buf)).toBe(`A${expected}`)
})
})

View File

@@ -0,0 +1,27 @@
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
const chunks = []
let length = 0
const onData = chunk => {
chunks.push(chunk)
length += chunk.length
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(Buffer.concat(chunks, length))
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToNewBuffer as default }

View File

@@ -1,19 +1,33 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash.foreach'
import has from 'lodash.has'
import forEach from 'lodash/forEach'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import invert from 'lodash.invert'
import isArray from 'lodash.isarray'
import isString from 'lodash.isstring'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import through2 from 'through2'
import { CronJob } from 'cron'
import { Readable } from 'stream'
import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
promisify,
reflect as pReflect
} from 'promise-toolbox'
@@ -21,9 +35,6 @@ import {
createHash,
randomBytes
} from 'crypto'
import { Readable } from 'stream'
import through2 from 'through2'
import {utcFormat as d3TimeFormat} from 'd3-time-format'
// ===================================================================
@@ -45,23 +56,13 @@ export function bufferToStream (buf) {
return stream
}
export async function streamToBuffer (stream) {
return new Promise((resolve, reject) => {
const bufs = []
stream.on('error', reject)
stream.on('data', data => {
bufs.push(data)
})
stream.on('end', () => resolve(Buffer.concat(bufs)))
})
}
export streamToBuffer from './stream-to-new-buffer'
// -------------------------------------------------------------------
export function camelToSnakeCase (string) {
return string.replace(
/([a-z])([A-Z])/g,
/([a-z0-9])([A-Z])/g,
(_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}`
)
}
@@ -75,6 +76,27 @@ export const createRawObject = Object.create
// -------------------------------------------------------------------
// Only works with string items!
export const diffItems = (coll1, coll2) => {
const removed = createRawObject()
forEach(coll2, value => {
removed[value] = true
})
const added = []
forEach(coll1, value => {
if (value in removed) {
delete removed[value]
} else {
added.push(value)
}
})
return [ added, keys(removed) ]
}
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
@@ -179,8 +201,15 @@ export function extractProperty (obj, prop) {
// -------------------------------------------------------------------
export const generateUnsecureToken = (n = 32) => {
const bytes = new Buffer(n)
export const getUserPublicProperties = user => pick(
user.properties || user,
'id', 'email', 'groups', 'permission', 'preferences', 'provider'
)
// -------------------------------------------------------------------
export const getPseudoRandomBytes = n => {
const bytes = Buffer.allocUnsafe(n)
const odd = n & 1
for (let i = 0, m = n - odd; i < m; i += 2) {
@@ -191,13 +220,15 @@ export const generateUnsecureToken = (n = 32) => {
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
}
return base64url(bytes)
return bytes
}
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
// Generate a secure random Base64 string.
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
})(randomBytes::promisify())
})(promisify(randomBytes))
// -------------------------------------------------------------------
@@ -239,21 +270,30 @@ export const parseXml = (function () {
// - methods are already bound and chainable
export const lightSet = collection => {
const data = createRawObject()
collection && forEach(collection, value => {
data[value] = true
})
collection = null
if (collection) {
forEach(collection, value => {
data[value] = true
})
collection = null
}
const set = {
add: value => (data[value] = true, set),
add: value => {
data[value] = true
return set
},
clear: () => {
for (const value in data) {
delete data[value]
}
return set
},
delete: value => (delete data[value], set),
has: value => data[value]
delete: value => {
delete data[value]
return set
},
has: value => data[value],
toArray: () => keys(data)
}
return set
}
@@ -306,7 +346,7 @@ export function pSettle (promises) {
// -------------------------------------------------------------------
export {
export { // eslint-disable-line no-duplicate-imports
all as pAll,
catchPlus as pCatch,
delay as pDelay,
@@ -353,22 +393,24 @@ export const popProperty = obj => {
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')
export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
// -------------------------------------------------------------------
// This functions are often used throughout xo-server.
//
// Exports them from here to avoid direct dependencies on lodash.
export { default as forEach } from 'lodash.foreach'
export { default as isArray } from 'lodash.isarray'
export { default as isBoolean } from 'lodash.isboolean'
export { default as isEmpty } from 'lodash.isempty'
export { default as isFunction } from 'lodash.isfunction'
export { default as isInteger } from 'lodash.isinteger'
export { default as isObject } from 'lodash.isobject'
export { default as isString } from 'lodash.isstring'
export { default as mapToArray } from 'lodash.map'
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
@@ -412,7 +454,7 @@ export function map (
export const multiKeyHash = (...args) => new Promise(resolve => {
const hash = multiKeyHashInt(...args)
const buf = new Buffer(4)
const buf = Buffer.allocUnsafe(4)
buf.writeUInt32LE(hash, 0)
resolve(base64url(buf))
@@ -420,6 +462,11 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
// -------------------------------------------------------------------
export const resolveSubpath = (root, path) =>
resolve(root, `./${resolve('/', path)}`)
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper
@@ -436,27 +483,30 @@ export const streamToArray = (stream, {
// -------------------------------------------------------------------
export const scheduleFn = (cronPattern, fn) => {
export const scheduleFn = (cronTime, fn, timeZone) => {
let running = false
const job = new CronJob(cronPattern, async () => {
if (running) {
return
}
const job = new CronJob({
cronTime,
onTick: async () => {
if (running) {
return
}
running = true
running = true
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', (error && error.stack) || error)
} finally {
running = false
}
},
start: true,
timeZone
})
job.start()
return () => {
job.stop()
}
@@ -464,5 +514,68 @@ export const scheduleFn = (cronPattern, fn) => {
// -------------------------------------------------------------------
// Create a serializable object from an error.
export const serializeError = error => ({
message: error.message,
stack: error.stack,
...error // Copy enumerable properties.
})
// -------------------------------------------------------------------
// Create an array which contains the results of one thunk function.
// Only works with synchronous thunks.
export const thunkToArray = thunk => {
const values = []
thunk(::values.push)
return values
}
// -------------------------------------------------------------------
// Creates a new function which throws an error.
//
// ```js
// promise.catch(throwFn('an error has occured'))
//
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw (
isString(error)
? new Error(error)
: error
)
}
// -------------------------------------------------------------------
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value
// -------------------------------------------------------------------
export const mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
// -------------------------------------------------------------------
export const splitFirst = (string, separator) => {
const i = string.indexOf(separator)
return i === -1 ? null : [
string.slice(0, i),
string.slice(i + separator.length)
]
}

View File

@@ -1,19 +1,14 @@
/* eslint-env mocha */
import expect from 'must'
import sinon from 'sinon'
// ===================================================================
/* eslint-env jest */
import {
camelToSnakeCase,
createRawObject,
diffItems,
ensureArray,
extractProperty,
formatXml,
generateToken,
parseSize,
pFinally,
pSettle
} from './utils'
@@ -21,57 +16,69 @@ import {
describe('camelToSnakeCase()', function () {
it('converts a string from camelCase to snake_case', function () {
expect(camelToSnakeCase('fooBar')).to.equal('foo_bar')
expect(camelToSnakeCase('fooBar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed')
})
it('does not alter snake_case strings', function () {
expect(camelToSnakeCase('foo_bar')).to.equal('foo_bar')
expect(camelToSnakeCase('foo_bar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed')
})
it('does not alter upper case letters expect those from the camelCase', function () {
expect(camelToSnakeCase('fooBar_BAZ')).to.equal('foo_bar_BAZ')
expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ')
})
})
// -------------------------------------------------------------------
describe('createRawObject()', () => {
it('returns an object', () => {
expect(createRawObject()).to.be.an.object()
})
it('returns an empty object', () => {
expect(createRawObject()).to.be.empty()
expect(createRawObject()).toEqual({})
})
it('creates a new object each time', () => {
expect(createRawObject()).to.not.equal(createRawObject())
expect(createRawObject()).not.toBe(createRawObject())
})
if (Object.getPrototypeOf) {
it('creates an object without a prototype', () => {
expect(Object.getPrototypeOf(createRawObject())).to.be.null()
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
})
}
})
// -------------------------------------------------------------------
describe('diffItems', () => {
it('computes the added/removed items between 2 iterables', () => {
expect(diffItems(
['foo', 'bar'],
['baz', 'foo']
)).toEqual([
['bar'],
['baz']
])
})
})
// -------------------------------------------------------------------
describe('ensureArray()', function () {
it('wrap the value in an array', function () {
const value = 'foo'
expect(ensureArray(value)).to.eql([value])
expect(ensureArray(value)).toEqual([value])
})
it('returns an empty array for undefined', function () {
expect(ensureArray(undefined)).to.eql([])
expect(ensureArray(undefined)).toEqual([])
})
it('returns the object itself if is already an array', function () {
const array = ['foo', 'bar', 'baz']
expect(ensureArray(array)).to.equal(array)
expect(ensureArray(array)).toBe(array)
})
})
@@ -82,15 +89,15 @@ describe('extractProperty()', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(extractProperty(obj, 'prop')).toBe(value)
})
it('removes the property from the object', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(obj).to.not.have.property('prop')
expect(extractProperty(obj, 'prop')).toBe(value)
expect(obj.prop).not.toBeDefined()
})
})
@@ -105,7 +112,7 @@ describe('formatXml()', function () {
{$: {baz: 'plip'}}
]
}
})).to.equal(`<foo>
})).toBe(`<foo>
<bar baz="plop"/>
<bar baz="plip"/>
</foo>`)
@@ -116,7 +123,7 @@ describe('formatXml()', function () {
describe('generateToken()', () => {
it('generates a string', async () => {
expect(await generateToken()).to.be.a.string()
expect(typeof await generateToken()).toBe('string')
})
})
@@ -124,51 +131,21 @@ describe('generateToken()', () => {
describe('parseSize()', function () {
it('parses a human size', function () {
expect(parseSize('1G')).to.equal(1e9)
expect(parseSize('1G')).toBe(1e9)
})
it('returns the parameter if already a number', function () {
expect(parseSize(1e6)).to.equal(1e6)
expect(parseSize(1e6)).toBe(1e6)
})
it('throws if the string cannot be parsed', function () {
expect(function () {
parseSize('foo')
}).to.throw()
}).toThrow()
})
it('supports the B unit as suffix', function () {
expect(parseSize('3MB')).to.equal(3e6)
})
})
// -------------------------------------------------------------------
describe('pFinally()', () => {
it('calls a callback on resolution', async () => {
const value = {}
const spy = sinon.spy()
await expect(
Promise.resolve(value)::pFinally(spy)
).to.resolve.to.equal(
value
)
expect(spy.callCount).to.equal(1)
})
it('calls a callback on rejection', async () => {
const reason = {}
const spy = sinon.spy()
await expect(
Promise.reject(reason)::pFinally(spy)
).to.reject.to.equal(
reason
)
expect(spy.callCount).to.equal(1)
expect(parseSize('3MB')).toBe(3e6)
})
})
@@ -176,6 +153,7 @@ describe('pFinally()', () => {
describe('pSettle()', () => {
it('works with arrays', async () => {
const rejection = 'fatality'
const [
status1,
status2,
@@ -183,27 +161,29 @@ describe('pSettle()', () => {
] = await pSettle([
Promise.resolve(42),
Math.PI,
Promise.reject('fatality')
Promise.reject(rejection)
])
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
it('works with objects', async () => {
const rejection = 'fatality'
const {
a: status1,
b: status2,
@@ -211,23 +191,23 @@ describe('pSettle()', () => {
} = await pSettle({
a: Promise.resolve(42),
b: Math.PI,
c: Promise.reject('fatality')
c: Promise.reject(rejection)
})
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
})

View File

@@ -1,4 +1,10 @@
import fu from 'struct-fu'
// TODO: remove once completely merged in vhd.js
import assert from 'assert'
import constantStream from 'constant-stream'
import eventToPromise from 'event-to-promise'
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import {
noop,
@@ -39,6 +45,10 @@ const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
const BLOCK_UNUSED = 0xFFFFFFFF
const BIT_MASK = 0x80
// unused block as buffer containing a uint32BE
const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(VHD_ENTRY_SIZE)
BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
// ===================================================================
const fuFooter = fu.struct([
@@ -91,7 +101,7 @@ const fuHeader = fu.struct([
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char('parentUnicodeName', 512),
fu.char16be('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
@@ -144,40 +154,28 @@ const unpackField = (field, buf) => {
}
// ===================================================================
// Returns the checksum of a raw footer.
// The raw footer is altered with the new sum.
function checksumFooter (rawFooter) {
const checksumField = fuFooter.fields.checksum
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawFooter)
packField(checksumField, 0, rawStruct)
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
for (let i = 0, n = struct.size; i < n; i++) {
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawFooter)
packField(checksumField, sum, rawStruct)
return sum
}
function getParentLocatorSize (parentLocatorEntry) {
const { platformDataSpace } = parentLocatorEntry
if (platformDataSpace < VHD_SECTOR_SIZE) {
return sectorsToBytes(platformDataSpace)
}
return (platformDataSpace % VHD_SECTOR_SIZE === 0)
? platformDataSpace
: 0
}
// ===================================================================
class Vhd {
@@ -190,6 +188,17 @@ class Vhd {
// Read functions.
// =================================================================
_readStream (start, n) {
return this._handler.createReadStream(this._path, {
start,
end: start + n - 1 // end is inclusive
})
}
_read (start, n) {
return this._readStream(start, n).then(streamToBuffer)
}
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
@@ -207,10 +216,10 @@ class Vhd {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
const dataOffset = uint32ToUint64(entry.platformDataOffset)
// Max(end, locator end)
end = Math.max(end, dataOffset + getParentLocatorSize(entry))
end = Math.max(end,
uint32ToUint64(entry.platformDataOffset) +
sectorsToBytes(entry.platformDataSpace)
)
}
}
@@ -221,17 +230,15 @@ class Vhd {
// Returns the first sector after data.
getEndOfData () {
let end = Math.floor(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
let end = Math.ceil(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
const { maxTableEntries } = this.header
for (let i = 0; i < maxTableEntries; i++) {
let blockAddr = this.readAllocationTableEntry(i)
const blockAddr = this._getBatEntry(i)
if (blockAddr !== BLOCK_UNUSED) {
// Compute next block address.
blockAddr += this.sectorsPerBlock + this.sectorsOfBitmap
end = Math.max(end, blockAddr)
end = Math.max(end, blockAddr + fullBlockSize)
}
}
@@ -240,24 +247,12 @@ class Vhd {
return sectorsToBytes(end)
}
// Returns the start position of the vhd footer.
// The real footer, not the copy at the beginning of the vhd file.
async getFooterStart () {
const stats = await this._handler.getSize(this._path)
return stats.size - VHD_FOOTER_SIZE
}
// Get the beginning (footer + header) of a vhd file.
async readHeaderAndFooter () {
const buf = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: 0,
end: VHD_FOOTER_SIZE + VHD_HEADER_SIZE - 1
})
)
const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumFooter(buf)
const sumToTest = checksumStruct(buf, fuFooter)
// Checksum child & parent.
if (sumToTest !== sum) {
@@ -297,119 +292,176 @@ class Vhd {
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
this.blockTable = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + size - 1
})
)
this.blockTable = await this._read(offset, size)
}
// Returns the address block at the entry location of one table.
readAllocationTableEntry (entry) {
return this.blockTable.readUInt32BE(entry * VHD_ENTRY_SIZE)
// return the first sector (bitmap) of a block
_getBatEntry (block) {
return this.blockTable.readUInt32BE(block * VHD_ENTRY_SIZE)
}
// Returns the data content of a block. (Not the bitmap !)
async readBlockData (blockAddr) {
const { blockSize } = this.header
const handler = this._handler
const path = this._path
const blockDataAddr = sectorsToBytes(blockAddr + this.sectorsOfBitmap)
const footerStart = await this.getFooterStart()
const isPadded = footerStart < (blockDataAddr + blockSize)
// Size ot the current block in the vhd file.
const size = isPadded ? (footerStart - blockDataAddr) : sectorsToBytes(this.sectorsPerBlock)
debug(`Read block data at: ${blockDataAddr}. (size=${size})`)
const buf = await streamToBuffer(
await handler.createReadStream(path, {
start: blockDataAddr,
end: blockDataAddr + size - 1
})
)
// Padded by zero !
if (isPadded) {
return Buffer.concat([buf, new Buffer(blockSize - size).fill(0)])
_readBlock (blockId, onlyBitmap = false) {
const blockAddr = this._getBatEntry(blockId)
if (blockAddr === BLOCK_UNUSED) {
throw new Error(`no such block ${blockId}`)
}
return buf
return this._read(
sectorsToBytes(blockAddr),
onlyBitmap ? this.bitmapSize : this.fullBlockSize
).then(buf => onlyBitmap
? { bitmap: buf }
: {
bitmap: buf.slice(0, this.bitmapSize),
data: buf.slice(this.bitmapSize)
}
)
}
// Returns a buffer that contains the bitmap of a block.
// get the identifiers and first sectors of the first and last block
// in the file
//
// TODO: merge with readBlockData().
async readBlockBitmap (blockAddr) {
const { bitmapSize } = this
const offset = sectorsToBytes(blockAddr)
// return undefined if none
_getFirstAndLastBlocks () {
const n = this.header.maxTableEntries
const bat = this.blockTable
let i = 0
let j = 0
let first, firstSector, last, lastSector
debug(`Read bitmap at: ${offset}. (size=${bitmapSize})`)
// get first allocated block for initialization
while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) {
i += 1
j += VHD_ENTRY_SIZE
return streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + bitmapSize - 1
})
)
if (i === n) {
return
}
}
lastSector = firstSector
first = last = i
while (i < n) {
const sector = bat.readUInt32BE(j)
if (sector !== BLOCK_UNUSED) {
if (sector < firstSector) {
first = i
firstSector = sector
} else if (sector > lastSector) {
last = i
lastSector = sector
}
}
i += 1
j += VHD_ENTRY_SIZE
}
return { first, firstSector, last, lastSector }
}
// =================================================================
// Write functions.
// =================================================================
// Write a buffer at a given position in a vhd file.
async _write (buffer, offset) {
// Write a buffer/stream at a given position in a vhd file.
_write (data, offset) {
debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`)
// TODO: could probably be merged in remote handlers.
return this._handler.createOutputStream(this._path, {
start: offset,
flags: 'r+'
}).then(stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.write(buffer, () => {
stream.end()
resolve()
})
}))
flags: 'r+',
start: offset
}).then(
Buffer.isBuffer(data)
? stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
: stream => eventToPromise(data.pipe(stream), 'finish')
)
}
// Write an entry in the allocation table.
writeAllocationTableEntry (entry, value) {
this.blockTable.writeUInt32BE(value, entry * VHD_ENTRY_SIZE)
async ensureBatSize (size) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= size) {
return
}
const tableOffset = uint32ToUint64(header.tableOffset)
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
// extend BAT
const maxTableEntries = header.maxTableEntries = size
const batSize = maxTableEntries * VHD_ENTRY_SIZE
const prevBat = this.blockTable
const bat = this.blockTable = Buffer.allocUnsafe(batSize)
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevBat.length)
debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
const extendBat = () => {
debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
return this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
tableOffset + prevBat.length
)
}
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([
extendBat(),
this.writeHeader()
])
}
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`)
return Promise.all([
// copy the first block at the end
this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream =>
this._write(stream, sectorsToBytes(newFirstSector))
).then(extendBat),
this._setBatEntry(first, newFirstSector),
this.writeHeader(),
this.writeFooter()
])
}
// set the first sector (bitmap) of a block
_setBatEntry (block, blockSector) {
const i = block * VHD_ENTRY_SIZE
const { blockTable } = this
blockTable.writeUInt32BE(blockSector, i)
return this._write(
blockTable.slice(i, i + VHD_ENTRY_SIZE),
uint32ToUint64(this.header.tableOffset) + i
)
}
// Make a new empty block at vhd end.
// Update block allocation table in context and in file.
async createBlock (blockId) {
// End of file !
let offset = this.getEndOfData()
const blockAddr = Math.ceil(this.getEndOfData() / VHD_SECTOR_SIZE)
// Padded on bound sector.
if (offset % VHD_SECTOR_SIZE) {
offset += (VHD_SECTOR_SIZE - (offset % VHD_SECTOR_SIZE))
}
debug(`create block ${blockId} at ${blockAddr}`)
const blockAddr = Math.floor(offset / VHD_SECTOR_SIZE)
await Promise.all([
// Write an empty block and addr in vhd file.
this._write(
constantStream([ 0 ], this.fullBlockSize),
sectorsToBytes(blockAddr)
),
const {
blockTable,
fullBlockSize
} = this
debug(`Create block at ${blockAddr}. (size=${fullBlockSize}, offset=${offset})`)
// New entry in block allocation table.
this.writeAllocationTableEntry(blockId, blockAddr)
const tableOffset = uint32ToUint64(this.header.tableOffset)
const entry = blockId * VHD_ENTRY_SIZE
// Write an empty block and addr in vhd file.
await this._write(new Buffer(fullBlockSize).fill(0), offset)
await this._write(blockTable.slice(entry, entry + VHD_ENTRY_SIZE), tableOffset + entry)
this._setBatEntry(blockId, blockAddr)
])
return blockAddr
}
@@ -428,17 +480,16 @@ class Vhd {
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, n) {
let blockAddr = this.readAllocationTableEntry(block.id)
async writeBlockSectors (block, beginSectorId, endSectorId) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
const endSectorId = beginSectorId + n
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(`Write block data at: ${offset}. (counter=${n}, blockId=${block.id}, blockSector=${beginSectorId})`)
debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`)
await this._write(
block.data.slice(
@@ -448,7 +499,7 @@ class Vhd {
sectorsToBytes(offset)
)
const bitmap = await this.readBlockBitmap(this.bitmapSize, blockAddr)
const { bitmap } = await this._readBlock(block.id, true)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(bitmap, i)
@@ -458,61 +509,69 @@ class Vhd {
}
// Merge block id (of vhd child) into vhd parent.
async coalesceBlock (child, blockAddr, blockId) {
async coalesceBlock (child, blockId) {
// Get block data and bitmap of block id.
const blockData = await child.readBlockData(blockAddr)
const blockBitmap = await child.readBlockBitmap(blockAddr)
const { bitmap, data } = await child._readBlock(blockId)
debug(`Coalesce block ${blockId} at ${blockAddr}.`)
debug(`coalesceBlock block=${blockId}`)
// For each sector of block data...
const { sectorsPerBlock } = child
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(blockBitmap, i)) {
if (!mapTestBit(bitmap, i)) {
continue
}
let sectors = 0
let endSector = i + 1
// Count changed sectors.
for (; sectors + i < sectorsPerBlock; sectors++) {
if (!mapTestBit(blockBitmap, sectors + i)) {
break
}
while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) {
++endSector
}
// Write n sectors into parent.
debug(`Coalesce block: write. (offset=${i}, sectors=${sectors})`)
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
await this.writeBlockSectors(
{ id: blockId, data: blockData },
{ id: blockId, data },
i,
sectors
endSector
)
i += sectors
i = endSector
}
}
// Write a context footer. (At the end and beggining of a vhd file.)
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumFooter(rawFooter)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = VHD_FOOTER_SIZE
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
return this._write(rawHeader, offset)
}
}
// Merge vhd child into vhd parent.
//
// Child must be a delta backup !
// Parent must be a full backup !
//
// TODO: update the identifier of the parent VHD.
export default async function vhdMerge (
parentHandler, parentPath,
childHandler, childPath
@@ -526,14 +585,16 @@ export default async function vhdMerge (
childVhd.readHeaderAndFooter()
])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error(`Unable to merge, child is not a delta backup.`)
throw new Error('Unable to merge, child is not a delta backup.')
}
// Merging in differencing disk is prohibited in our case.
if (parentVhd.footer.diskType !== HARD_DISK_TYPE_DYNAMIC) {
throw new Error(`Unable to merge, parent is not a full backup.`)
throw new Error('Unable to merge, parent is not a full backup.')
}
// Allocation table map is not yet implemented.
@@ -541,7 +602,7 @@ export default async function vhdMerge (
parentVhd.hasBlockAllocationTableMap() ||
childVhd.hasBlockAllocationTableMap()
) {
throw new Error(`Unsupported allocation table map.`)
throw new Error('Unsupported allocation table map.')
}
// Read allocation table of child/parent.
@@ -550,17 +611,66 @@ export default async function vhdMerge (
childVhd.readBlockTable()
])
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
const blockAddr = childVhd.readAllocationTableEntry(blockId)
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
if (blockAddr !== BLOCK_UNUSED) {
await parentVhd.coalesceBlock(
childVhd,
blockAddr,
blockId
)
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) {
await parentVhd.coalesceBlock(childVhd, blockId)
}
}
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.timestamp = cFooter.timestamp
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
}
// returns true if the child was actually modified
export async function chainVhd (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter()
])
const { header } = childVhd
const parentName = parentPath.split('/').pop()
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
!isEqual(header.parentUuid, parentUuid)
) {
header.parentUuid = parentUuid
header.parentUnicodeName = parentName
await childVhd.writeHeader()
return true
}
// The checksum was broken between xo-server v5.2.4 and v5.2.5
//
// Replace by a correct checksum if necessary.
//
// TODO: remove when enough time as passed (6 months).
{
const rawHeader = fuHeader.pack(header)
const checksum = checksumStruct(rawHeader, fuHeader)
if (checksum !== header.checksum) {
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
return true
}
}
return false
}

View File

@@ -1,53 +0,0 @@
import createDebug from 'debug'
import WebSocket from 'ws'
const debug = createDebug('xo:wsProxy')
const defaults = {
// Automatically close the client connection when the remote close.
autoClose: true
}
// Proxy a WebSocket `client` to a remote server which has `url` as
// address.
export default function wsProxy (client, url, opts) {
opts = {
...defaults,
protocol: client.protocol,
...opts
}
const autoClose = !!opts.autoClose
delete opts.autoClose
function onClientSend (error) {
if (error) {
debug('client send error', error)
}
}
function onRemoteSend (error) {
if (error) {
debug('remote send error', error)
}
}
const remote = new WebSocket(url, opts).once('open', function () {
debug('connected to %s', url)
}).once('close', function () {
debug('remote closed')
if (autoClose) {
client.close()
}
}).once('error', function (error) {
debug('remote error: %s', error)
}).on('message', function (message) {
client.send(message, onClientSend)
})
client.once('close', function () {
debug('client closed')
remote.close()
}).on('message', function (message) {
remote.send(message, onRemoteSend)
})
}

View File

@@ -1,8 +1,14 @@
import {
startsWith
} from 'lodash'
import {
ensureArray,
extractProperty,
forEach,
isArray,
isEmpty,
mapFilter,
mapToArray,
parseXml
} from './utils'
@@ -12,6 +18,9 @@ import {
isVmRunning,
parseDateTime
} from './xapi'
import {
useUpdateSystem
} from './xapi/utils'
// ===================================================================
@@ -35,18 +44,28 @@ function link (obj, prop, idField = '$id') {
// Parse a string date time to a Unix timestamp (in seconds).
//
// If the value is a number or can be converted as one, it is assumed
// to already be a timestamp and returned.
//
// If there are no data or if the timestamp is 0, returns null.
function toTimestamp (date) {
if (!date) {
return null
}
const ms = parseDateTime(date).getTime()
const timestamp = +date
// Not NaN.
if (timestamp === timestamp) { // eslint-disable-line no-self-compare
return timestamp
}
const ms = parseDateTime(date)
if (!ms) {
return null
}
return Math.round(ms / 1000)
return Math.round(ms.getTime() / 1000)
}
// ===================================================================
@@ -83,18 +102,51 @@ const TRANSFORMS = {
} = obj
const isRunning = isHostRunning(obj)
const { software_version } = obj
let supplementalPacks, patches
if (useUpdateSystem(obj)) {
supplementalPacks = []
patches = []
forEach(obj.$updates, update => {
const formattedUpdate = {
name: update.name_label,
description: update.name_description,
author: update.key.split('-')[3],
version: update.version,
guidance: update.after_apply_guidance,
hosts: link(update, 'hosts'),
vdi: link(update, 'vdi'),
size: update.installation_size
}
if (startsWith(update.name_label, 'XS')) {
patches.push(formattedUpdate)
} else {
supplementalPacks.push(formattedUpdate)
}
})
}
return {
// Deprecated
CPUs: obj.cpu_info,
address: obj.address,
bios_strings: obj.bios_strings,
build: obj.software_version.build_number,
CPUs: obj.cpu_info,
enabled: Boolean(obj.enabled),
cpus: {
cores: +obj.cpu_info.cpu_count,
sockets: +obj.cpu_info.socket_count
},
current_operations: obj.current_operations,
hostname: obj.hostname,
iSCSI_name: otherConfig.iscsi_iqn || null,
license_params: obj.license_params,
license_server: obj.license_server,
license_expiry: toTimestamp(obj.license_params.expiry),
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
@@ -110,14 +162,32 @@ const TRANSFORMS = {
return {
usage: 0,
size: 0,
// Deprecated
total: 0
}
})(),
patches: link(obj, 'patches'),
patches: patches || link(obj, 'patches'),
powerOnMode: obj.power_on_mode,
power_state: metrics
? (isRunning ? 'Running' : 'Halted')
: 'Unknown',
startTime: toTimestamp(otherConfig.boot_time),
supplementalPacks: supplementalPacks ||
mapFilter(software_version, (value, key) => {
let author, name
if (([ author, name ] = key.split(':')).length === 2) {
const [ description, version ] = value.split(', ')
return {
name,
description,
author,
version: version.split(' ')[1]
}
}
}),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
tags: obj.tags,
version: obj.software_version.product_version,
@@ -152,19 +222,49 @@ const TRANSFORMS = {
const isHvm = isVmHvm(obj)
const isRunning = isVmRunning(obj)
const xenTools = (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { major, minor } = guestMetrics.PV_drivers_version
const [ hostMajor, hostMinor ] = (obj.$resident_on || obj.$pool.$master)
.software_version
.product_version
.split('.')
return major >= hostMajor && minor >= hostMinor
? 'up to date'
: 'out of date'
})()
let resourceSet = otherConfig['xo:resource_set']
if (resourceSet) {
try {
resourceSet = JSON.parse(resourceSet)
} catch (_) {
resourceSet = undefined
}
}
const vm = {
// type is redefined after for controllers/, templates &
// snapshots.
type: 'VM',
addresses: guestMetrics && guestMetrics.networks || null,
addresses: (guestMetrics && guestMetrics.networks) || null,
affinityHost: link(obj, 'affinity'),
auto_poweron: Boolean(otherConfig.auto_poweron),
boot: obj.HVM_boot_params,
CPUs: {
max: +obj.VCPUs_max,
number: (
isRunning && metrics
isRunning && metrics && xenTools
? +metrics.VCPUs_number
: +obj.VCPUs_at_startup
)
@@ -191,7 +291,8 @@ const TRANSFORMS = {
return {
enabled: true,
info: info && parseXml(info).docker_info,
process: process && parseXml(process).docker_ps,
containers: ensureArray(process && parseXml(process).docker_ps.item),
process: process && parseXml(process).docker_ps, // deprecated (only used in v4)
version: version && parseXml(version).docker_version
}
})(),
@@ -225,12 +326,15 @@ const TRANSFORMS = {
return memory
})(),
installTime: metrics && toTimestamp(metrics.install_time),
name_description: obj.name_description,
name_label: obj.name_label,
other: otherConfig,
os_version: guestMetrics && guestMetrics.os_version || null,
os_version: (guestMetrics && guestMetrics.os_version) || null,
power_state: obj.power_state,
resourceSet,
snapshots: link(obj, 'snapshots'),
startTime: metrics && toTimestamp(metrics.start_time),
tags: obj.tags,
VIFs: link(obj, 'VIFs'),
virtualizationMode: isHvm ? 'hvm' : 'pv',
@@ -241,25 +345,7 @@ const TRANSFORMS = {
// - false: not optimized
// - 'out of date': optimized but drivers should be updated
// - 'up to date': optimized
xenTools: (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})(),
xenTools,
$container: (
isRunning
@@ -281,6 +367,7 @@ const TRANSFORMS = {
vm.snapshot_time = toTimestamp(obj.snapshot_time)
vm.$snapshot_of = link(obj, 'snapshot_of')
} else if (obj.is_a_template) {
vm.id = obj.$ref // use refs for templates as they
vm.type += '-template'
vm.CPUs.number = +obj.VCPUs_at_startup
@@ -303,7 +390,7 @@ const TRANSFORMS = {
return disks
})(),
install_methods: (function () {
const {['install-methods']: methods} = otherConfig
const methods = otherConfig['install-methods']
return methods ? methods.split(',') : []
})(),
@@ -311,8 +398,10 @@ const TRANSFORMS = {
}
}
if (obj.VCPUs_params && obj.VCPUs_params.weight) {
vm.cpuWeight = obj.VCPUs_params.weight
let tmp
if ((tmp = obj.VCPUs_params)) {
tmp.cap && (vm.cpuCap = +tmp.cap)
tmp.weight && (vm.cpuWeight = +tmp.weight)
}
if (!isHvm) {
@@ -336,15 +425,18 @@ const TRANSFORMS = {
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.physical_size,
shared: Boolean(obj.shared),
SR_type: obj.type,
tags: obj.tags,
usage: +obj.virtual_allocation,
VDIs: link(obj, 'VDIs'),
other_config: obj.other_config,
sm_config: obj.sm_config,
$container: (
obj.shared
obj.shared || !obj.$PBDs[0]
? link(obj, 'pool')
: obj.$PBDs[0] && link(obj.$PBDs[0], 'host')
: link(obj.$PBDs[0], 'host')
),
$PBDs: link(obj, 'PBDs')
}
@@ -356,26 +448,32 @@ const TRANSFORMS = {
return {
type: 'PBD',
attached: obj.currently_attached,
attached: Boolean(obj.currently_attached),
host: link(obj, 'host'),
SR: link(obj, 'SR')
SR: link(obj, 'SR'),
device_config: obj.device_config
}
},
// -----------------------------------------------------------------
pif (obj) {
const metrics = obj.$metrics
return {
type: 'PIF',
attached: Boolean(obj.currently_attached),
isBondMaster: !isEmpty(obj.bond_master_of),
device: obj.device,
deviceName: metrics && metrics.device_name,
dns: obj.DNS,
disallowUnplug: Boolean(obj.disallow_unplug),
gateway: obj.gateway,
ip: obj.IP,
mac: obj.MAC,
management: Boolean(obj.management), // TODO: find a better name.
carrier: Boolean(metrics && metrics.carrier),
mode: obj.ip_configuration_mode,
mtu: +obj.MTU,
netmask: obj.netmask,
@@ -426,6 +524,7 @@ const TRANSFORMS = {
attached: Boolean(obj.currently_attached),
bootable: Boolean(obj.bootable),
device: obj.device || null,
is_cd_drive: obj.type === 'CD',
position: obj.userdevice,
read_only: obj.mode === 'RO',
@@ -440,6 +539,8 @@ const TRANSFORMS = {
return {
type: 'VIF',
allowedIpv4Addresses: obj.ipv4_allowed,
allowedIpv6Addresses: obj.ipv6_allowed,
attached: Boolean(obj.currently_attached),
device: obj.device, // TODO: should it be cast to a number?
MAC: obj.MAC,
@@ -455,9 +556,11 @@ const TRANSFORMS = {
network (obj) {
return {
bridge: obj.bridge,
defaultIsLocked: obj.default_locking_mode === 'disabled',
MTU: +obj.MTU,
name_description: obj.name_description,
name_label: obj.name_label,
other_config: obj.other_config,
tags: obj.tags,
PIFs: link(obj, 'PIFs'),
VIFs: link(obj, 'VIFs')

View File

@@ -1,4 +1,4 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import JSON5 from 'json5'
import { BaseError } from 'make-error'
@@ -32,11 +32,7 @@ export class UnknownLegendFormat extends XapiStatsError {
}
}
export class FaultyGranularity extends XapiStatsError {
constructor (msg) {
super(msg)
}
}
export class FaultyGranularity extends XapiStatsError {}
// -------------------------------------------------------------------
// Utils
@@ -289,9 +285,10 @@ export default class XapiStats {
// Load
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
// Memory
const memory = values[hostLegends.memory]
const memoryFree = values[hostLegends.memoryFree]
// Memory.
// WARNING! memory/memoryFree are in kB.
const memory = values[hostLegends.memory] * 1024
const memoryFree = values[hostLegends.memoryFree] * 1024
hostStats.memory.push(memory)
@@ -405,19 +402,24 @@ export default class XapiStats {
}
_getPoints (hostname, step, vmId) {
const hostStats = this._hosts[hostname][step]
// Return host points
if (vmId === undefined) {
return this._hosts[hostname][step]
return {
interval: step,
...hostStats
}
}
const vmsStats = this._vms[hostname][step]
// Return vm points
const points = { endTimestamp: this._hosts[hostname][step].endTimestamp }
if (this._vms[hostname][step] !== undefined) {
points.stats = this._vms[hostname][step][vmId]
return {
interval: step,
endTimestamp: hostStats.endTimestamp,
stats: (vmsStats && vmsStats[vmId]) || getNewVmStats()
}
return points
}
async _getAndUpdatePoints (xapi, host, vmId, granularity) {
@@ -528,6 +530,11 @@ export default class XapiStats {
async getVmPoints (xapi, vmId, granularity) {
const vm = xapi.getObject(vmId)
const host = vm.$resident_on
if (!host) {
throw new Error(`VM ${vmId} is halted or host could not be found.`)
}
return this._getAndUpdatePoints(xapi, host, vm.uuid, granularity)
}
}

File diff suppressed because it is too large Load Diff

View File

View File

@@ -0,0 +1,60 @@
import { isEmpty } from '../../utils'
import { makeEditObject } from '../utils'
export default {
async _connectVif (vif) {
await this.call('VIF.plug', vif.$ref)
},
async connectVif (vifId) {
await this._connectVif(this.getObject(vifId))
},
async _deleteVif (vif) {
await this.call('VIF.destroy', vif.$ref)
},
async deleteVif (vifId) {
const vif = this.getObject(vifId)
if (vif.currently_attached) {
await this._disconnectVif(vif)
}
await this._deleteVif(vif)
},
async _disconnectVif (vif) {
await this.call('VIF.unplug_force', vif.$ref)
},
async disconnectVif (vifId) {
await this._disconnectVif(this.getObject(vifId))
},
editVif: makeEditObject({
ipv4Allowed: {
get: true,
set: [
'ipv4Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
},
ipv6Allowed: {
get: true,
set: [
'ipv6Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
}
})
}

380
src/xapi/mixins/patching.js Normal file
View File

@@ -0,0 +1,380 @@
import deferrable from 'golike-defer'
import filter from 'lodash/filter'
import includes from 'lodash/includes'
import some from 'lodash/some'
import sortBy from 'lodash/sortBy'
import unzip from 'julien-f-unzip'
import httpProxy from '../../http-proxy'
import httpRequest from '../../http-request'
import { debounce } from '../../decorators'
import {
createRawObject,
ensureArray,
forEach,
mapFilter,
mapToArray,
parseXml
} from '../../utils'
import {
debug,
put,
useUpdateSystem
} from '../utils'
export default {
// FIXME: should be static
@debounce(24 * 60 * 60 * 1000)
async _getXenUpdates () {
const { readAll, statusCode } = await httpRequest(
'http://updates.xensource.com/XenServer/updates.xml',
{ agent: httpProxy }
)
if (statusCode !== 200) {
throw new Error('cannot fetch patches list from Citrix')
}
const data = parseXml(await readAll()).patchdata
const patches = createRawObject()
forEach(data.patches.patch, patch => {
patches[patch.uuid] = {
date: patch.timestamp,
description: patch['name-description'],
documentationUrl: patch.url,
guidance: patch['after-apply-guidance'],
name: patch['name-label'],
url: patch['patch-url'],
uuid: patch.uuid,
conflicts: mapToArray(ensureArray(patch.conflictingpatches), patch => {
return patch.conflictingpatch.uuid
}),
requirements: mapToArray(ensureArray(patch.requiredpatches), patch => {
return patch.requiredpatch.uuid
})
// TODO: what does it mean, should we handle it?
// version: patch.version,
}
if (patches[patch.uuid].conflicts[0] === undefined) {
patches[patch.uuid].conflicts.length = 0
}
if (patches[patch.uuid].requirements[0] === undefined) {
patches[patch.uuid].requirements.length = 0
}
})
const resolveVersionPatches = function (uuids) {
const versionPatches = createRawObject()
forEach(ensureArray(uuids), ({uuid}) => {
versionPatches[uuid] = patches[uuid]
})
return versionPatches
}
const versions = createRawObject()
let latestVersion
forEach(data.serverversions.version, version => {
versions[version.value] = {
date: version.timestamp,
name: version.name,
id: version.value,
documentationUrl: version.url,
patches: resolveVersionPatches(version.patch)
}
if (version.latest) {
latestVersion = versions[version.value]
}
})
return {
patches,
latestVersion,
versions
}
},
// =================================================================
// Returns installed and not installed patches for a given host.
async _getPoolPatchesForHost (host) {
const versions = (await this._getXenUpdates()).versions
const hostVersions = host.software_version
const version =
versions[hostVersions.product_version] ||
versions[hostVersions.product_version_text]
return version
? version.patches
: []
},
_getInstalledPoolPatchesOnHost (host) {
const installed = createRawObject()
// platform_version < 2.1.1
forEach(host.$patches, hostPatch => {
installed[hostPatch.$pool_patch.uuid] = true
})
// platform_version >= 2.1.1
forEach(host.$updates, update => {
installed[update.uuid] = true // TODO: ignore packs
})
return installed
},
async _listMissingPoolPatchesOnHost (host) {
const all = await this._getPoolPatchesForHost(host)
const installed = this._getInstalledPoolPatchesOnHost(host)
const installable = createRawObject()
forEach(all, (patch, uuid) => {
if (installed[uuid]) {
return
}
for (const uuid of patch.conflicts) {
if (uuid in installed) {
return
}
}
installable[uuid] = patch
})
return installable
},
async listMissingPoolPatchesOnHost (hostId) {
// Returns an array to not break compatibility.
return mapToArray(
await this._listMissingPoolPatchesOnHost(this.getObject(hostId))
)
},
async _ejectToolsIsos (hostRef) {
return Promise.all(mapFilter(
this.objects.all,
vm => {
if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) {
return
}
const shouldEjectCd = some(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
return vdi && vdi.is_tools_iso
})
if (shouldEjectCd) {
return this.ejectCdFromVm(vm.$id)
}
}
))
},
// -----------------------------------------------------------------
_isPoolPatchInstallableOnHost (patchUuid, host) {
const installed = this._getInstalledPoolPatchesOnHost(host)
if (installed[patchUuid]) {
return false
}
let installable = true
forEach(installed, patch => {
if (includes(patch.conflicts, patchUuid)) {
installable = false
return false
}
})
return installable
},
// -----------------------------------------------------------------
// platform_version < 2.1.1 ----------------------------------------
async uploadPoolPatch (stream, patchName = 'unknown') {
const taskRef = await this._createTask('Patch upload', patchName)
const task = this._watchTask(taskRef)
const [ patchRef ] = await Promise.all([
task,
put(stream, {
hostname: this.pool.$master.address,
path: '/pool_patch_upload',
protocol: 'https',
query: {
session_id: this.sessionId,
task_id: taskRef
},
rejectUnauthorized: false
}, task)
])
return this._getOrWaitObject(patchRef)
},
async _getOrUploadPoolPatch (uuid) {
try {
return this.getObjectByUuid(uuid)
} catch (error) {}
debug('downloading patch %s', uuid)
const patchInfo = (await this._getXenUpdates()).patches[uuid]
if (!patchInfo) {
throw new Error('no such patch ' + uuid)
}
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
stream = await new Promise((resolve, reject) => {
const PATCH_RE = /\.xsupdate$/
stream.pipe(unzip.Parse()).on('entry', entry => {
if (PATCH_RE.test(entry.path)) {
entry.length = entry.size
resolve(entry)
} else {
entry.autodrain()
}
}).on('error', reject)
})
return this.uploadPoolPatch(stream, patchInfo.name)
},
// patform_version >= 2.1.1 ----------------------------------------
_installPatch: deferrable(async function ($defer, stream, { hostId }) {
if (!stream.length) {
throw new Error('stream must have a length')
}
const vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO')
$defer(() => this._deleteVdi(vdi))
const updateRef = await this.call('pool_update.introduce', vdi.$ref)
// TODO: check update status
// await this.call('pool_update.precheck', updateRef, host.$ref)
// - ok_livepatch_complete An applicable live patch exists for every required component
// - ok_livepatch_incomplete An applicable live patch exists but it is not sufficient
// - ok There is no applicable live patch
await this.call('pool_update.apply', updateRef, this.getObject(hostId).$ref)
}),
async _downloadPatchAndInstall (uuid, hostId) {
debug('downloading patch %s', uuid)
const patchInfo = (await this._getXenUpdates()).patches[uuid]
if (!patchInfo) {
throw new Error('no such patch ' + uuid)
}
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
stream = await new Promise((resolve, reject) => {
stream.pipe(unzip.Parse()).on('entry', entry => {
entry.length = entry.size
resolve(entry)
}).on('error', reject)
})
return this._installPatch(stream, { hostId })
},
// -----------------------------------------------------------------
async _installPoolPatchOnHost (patchUuid, host) {
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ])
await this.call('pool_patch.apply', patch.$ref, host.$ref)
},
_installPatchUpdateOnHost (patchUuid, host) {
return Promise.all([ this._downloadPatchAndInstall(patchUuid, host.$id), this._ejectToolsIsos(host.$ref) ])
},
async _checkSoftwareVersionAndInstallPatch (patchUuid, hostId) {
const host = this.getObject(hostId)
return useUpdateSystem(host)
? this._installPatchUpdateOnHost(patchUuid, host)
: this._installPoolPatchOnHost(patchUuid, host)
},
async installPoolPatchOnHost (patchUuid, hostId) {
debug('installing patch %s', patchUuid)
return this._checkSoftwareVersionAndInstallPatch(patchUuid, hostId)
},
// -----------------------------------------------------------------
async installPoolPatchOnAllHosts (patchUuid) {
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos() ])
await this.call('pool_patch.pool_apply', patch.$ref)
},
// -----------------------------------------------------------------
async _installPoolPatchOnHostAndRequirements (patch, host, patchesByUuid) {
const { requirements } = patch
if (requirements.length) {
for (const requirementUuid of requirements) {
if (this._isPoolPatchInstallableOnHost(requirementUuid, host)) {
const requirement = patchesByUuid[requirementUuid]
await this._installPoolPatchOnHostAndRequirements(requirement, host, patchesByUuid)
host = this.getObject(host.$id)
}
}
}
await this._checkSoftwareVersionAndInstallPatch(patch.uuid, host)
},
async installAllPoolPatchesOnHost (hostId) {
let host = this.getObject(hostId)
const installableByUuid = await this._listMissingPoolPatchesOnHost(host)
// List of all installable patches sorted from the newest to the
// oldest.
const installable = sortBy(
installableByUuid,
patch => -Date.parse(patch.date)
)
for (let i = 0, n = installable.length; i < n; ++i) {
const patch = installable[i]
if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) {
await this._installPoolPatchOnHostAndRequirements(patch, host, installableByUuid).catch(error => {
if (error.code !== 'PATCH_ALREADY_APPLIED') {
throw error
}
})
host = this.getObject(host.$id)
}
}
},
async installAllPoolPatchesOnAllHosts () {
await this.installAllPoolPatchesOnHost(this.pool.master)
// TODO: use pool_update.pool_apply for platform_version ^2.1.1
await Promise.all(mapToArray(
filter(this.objects.all, { $type: 'host' }),
host => this.installAllPoolPatchesOnHost(host.$id)
))
}
}

View File

@@ -0,0 +1,53 @@
import {
mapToArray
} from '../../utils'
export default {
_connectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
)
},
async connectAllSrPbds (id) {
await this._connectAllSrPbds(this.getObject(id))
},
_disconnectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
)
},
async disconnectAllSrPbds (id) {
await this._disconnectAllSrPbds(this.getObject(id))
},
async destroySr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.destroy', sr.$ref)
},
async forgetSr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.forget', sr.$ref)
},
_plugPbd (pbd) {
return this.call('PBD.plug', pbd.$ref)
},
async plugPbd (id) {
await this._plugPbd(this.getObject(id))
},
_unplugPbd (pbd) {
return this.call('PBD.unplug', pbd.$ref)
},
async unplugPbd (id) {
await this._unplugPbd(this.getObject(id))
}
}

371
src/xapi/mixins/vm.js Normal file
View File

@@ -0,0 +1,371 @@
import deferrable from 'golike-defer'
import find from 'lodash/find'
import gte from 'lodash/gte'
import isEmpty from 'lodash/isEmpty'
import lte from 'lodash/lte'
import {
forEach,
mapToArray,
noop,
parseSize,
pCatch
} from '../../utils'
import {
isVmHvm,
isVmRunning,
makeEditObject,
NULL_REF
} from '../utils'
export default {
// TODO: clean up on error.
@deferrable.onFailure
async createVm ($onFailure, templateId, {
name_label, // deprecated
nameLabel = name_label, // eslint-disable-line camelcase
clone = true,
installRepository = undefined,
vdis = undefined,
vifs = undefined,
existingVdis = undefined,
coreOs = false,
cloudConfig = undefined,
...props
} = {}, checkLimits) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
}
try {
installRepository = this.getObject(installRepository)
return 'cd'
} catch (_) {
return 'network'
}
})()
const template = this.getObject(templateId)
// Clones the template.
const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
$onFailure(() => this.deleteVm(vmRef)::pCatch(noop))
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vmRef, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vmRef)
let vm = await this._getOrWaitObject(vmRef)
// Set VMs params.
await this._editVm(vm, props, checkLimits)
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!isEmpty(vdis) || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
order = 'n' + order.replace('n', '')
} else {
order = 'ncd'
}
this._setObjectProperties(vm, {
HVM_boot_params: { ...bootParams, order }
})
}
} else { // PV
if (vm.PV_bootloader === 'eliloader') {
if (installMethod === 'network') {
// TODO: normalize RHEL URL?
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': installRepository
})
} else if (installMethod === 'cd') {
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': 'cdrom'
})
}
}
}
}
let nVbds = vm.VBDs.length
let hasBootableDisk = !!find(vm.$VBDs, 'bootable')
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
// bootable and the first disk bootable.
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
hasBootableDisk = true
++nVbds
}
// Modify existing (previous template) disks if necessary
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
const vbd = find(vm.$VBDs, { userdevice })
if (!vbd) {
return
}
const vdi = vbd.$VDI
await this._setObjectProperties(vdi, properties)
// if the disk is bigger
if (
size != null &&
size > vdi.virtual_size
) {
await this.resizeVdi(vdi.$id, size)
}
// if another SR is set, move it there
if (srId) {
await this.moveVdi(vdi.$id, srId)
}
}))
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
if (!isEmpty(vdis)) {
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
++nVbds
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
name_label: vdiDescription.name_label,
name_description: vdiDescription.name_description,
sr: vdiDescription.sr || vdiDescription.SR
}
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Either the CD or the 1st disk is bootable (only useful for PV VMs)
bootable: !(hasBootableDisk || i),
userdevice: devices[i]
}))
}))
}
// Destroys the VIFs cloned from the template.
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
let nVifs = 0
if (vifs) {
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
await Promise.all(mapToArray(vifs, (vif, index) => {
++nVifs
return this._createVif(
vm,
this.getObject(vif.network),
{
ipv4_allowed: vif.ipv4_allowed,
ipv6_allowed: vif.ipv6_allowed,
device: devices[index],
locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked',
mac: vif.mac,
mtu: vif.mtu
}
)
}))
}
// TODO: Assign VGPUs.
if (cloudConfig != null) {
// Refresh the record.
vm = await this._waitObjectState(vm.$id, vm => vm.VBDs.length === nVbds)
// Find the SR of the first VDI.
let srRef
forEach(vm.$VBDs, vbd => {
let vdi
if (
vbd.type === 'Disk' &&
(vdi = vbd.$VDI)
) {
srRef = vdi.SR
return false
}
})
const method = coreOs
? 'createCoreOsCloudInitConfigDrive'
: 'createCloudInitConfigDrive'
await this[method](vm.$id, srRef, cloudConfig)
++nVbds
}
// wait for the record with all the VBDs and VIFs
return this._waitObjectState(vm.$id, vm =>
vm.VBDs.length === nVbds &&
vm.VIFs.length === nVifs
)
},
// High level method to edit a VM.
//
// Params do not correspond directly to XAPI props.
_editVm: makeEditObject({
affinityHost: {
get: 'affinity',
set (value, vm) {
return this._setObjectProperty(
vm,
'affinity',
value ? this.getObject(value).$ref : NULL_REF
)
}
},
autoPoweron: {
set (value, vm) {
return Promise.all([
this._updateObjectMapProperty(vm, 'other_config', {
autoPoweron: value ? 'true' : null
}),
value && this.setPoolProperties({
autoPoweron: true
})
])
}
},
CPUs: 'cpus',
cpus: {
addToLimits: true,
// Current value may have constraints with other values.
//
// If the other value is not set and the constraint is not
// respected, the other value is changed first.
constraints: {
cpusStaticMax: gte
},
get: vm => +vm.VCPUs_at_startup,
set: [
'VCPUs_at_startup',
function (value, vm) {
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
}
]
},
cpuCap: {
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
set (cap, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
}
},
cpusMax: 'cpusStaticMax',
cpusStaticMax: {
constraints: {
cpus: lte
},
get: vm => +vm.VCPUs_max,
set: 'VCPUs_max'
},
cpuWeight: {
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
set (weight, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
}
},
highAvailability: {
set (ha, vm) {
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
}
},
memoryMin: {
constraints: {
memoryMax: gte
},
get: vm => +vm.memory_dynamic_min,
preprocess: parseSize,
set: 'memory_dynamic_min'
},
memory: 'memoryMax',
memoryMax: {
addToLimits: true,
limitName: 'memory',
constraints: {
memoryMin: lte,
memoryStaticMax: gte
},
get: vm => +vm.memory_dynamic_max,
preprocess: parseSize,
set: 'memory_dynamic_max'
},
memoryStaticMax: {
constraints: {
memoryMax: lte
},
get: vm => +vm.memory_static_max,
preprocess: parseSize,
set: 'memory_static_max'
},
nameDescription: true,
nameLabel: true,
PV_args: true,
tags: true
}),
async editVm (id, props, checkLimits) {
return /* await */ this._editVm(this.getObject(id), props, checkLimits)
},
async revertVm (snapshotId, snapshotBefore = true) {
const snapshot = this.getObject(snapshotId)
if (snapshotBefore) {
await this._snapshotVm(snapshot.$snapshot_of)
}
await this.call('VM.revert', snapshot.$ref)
if (snapshot.snapshot_info['power-state-at-snapshot'] === 'Running') {
const vm = snapshot.$snapshot_of
if (vm.power_state === 'Halted') {
this.startVm(vm.$id)::pCatch(noop)
} else if (vm.power_state === 'Suspended') {
this.resumeVm(vm.$id)::pCatch(noop)
}
}
},
async resumeVm (vmId) {
// the force parameter is always true
return this.call('VM.resume', this.getObject(vmId).$ref, false, true)
}
}

View File

@@ -0,0 +1,53 @@
import { NULL_REF } from './utils'
const OTHER_CONFIG_TEMPLATE = {
actions_after_crash: 'restart',
actions_after_reboot: 'restart',
actions_after_shutdown: 'destroy',
affinity: null,
blocked_operations: {},
ha_always_run: false,
HVM_boot_params: {
order: 'cdn'
},
HVM_boot_policy: 'BIOS order',
HVM_shadow_multiplier: 1,
is_a_template: false,
memory_dynamic_max: 4294967296,
memory_dynamic_min: 4294967296,
memory_static_max: 4294967296,
memory_static_min: 134217728,
order: 0,
other_config: {
vgpu_pci: '',
base_template_name: 'Other install media',
mac_seed: '5e88eb6a-d680-c47f-a94a-028886971ba4',
'install-methods': 'cdrom'
},
PCI_bus: '',
platform: {
timeoffset: '0',
nx: 'true',
acpi: '1',
apic: 'true',
pae: 'true',
hpet: 'true',
viridian: 'true'
},
protection_policy: NULL_REF,
PV_args: '',
PV_bootloader: '',
PV_bootloader_args: '',
PV_kernel: '',
PV_legacy_args: '',
PV_ramdisk: '',
recommendations: '<restrictions><restriction field="memory-static-max" max="137438953472" /><restriction field="vcpus-max" max="32" /><restriction property="number-of-vbds" max="255" /><restriction property="number-of-vifs" max="7" /><restriction field="has-vendor-device" value="false" /></restrictions>',
shutdown_delay: 0,
start_delay: 0,
user_version: 1,
VCPUs_at_startup: 1,
VCPUs_max: 1,
VCPUs_params: {},
version: 0
}
export { OTHER_CONFIG_TEMPLATE as default }

392
src/xapi/utils.js Normal file
View File

@@ -0,0 +1,392 @@
// import isFinite from 'lodash/isFinite'
import camelCase from 'lodash/camelCase'
import createDebug from 'debug'
import isEqual from 'lodash/isEqual'
import isPlainObject from 'lodash/isPlainObject'
import pickBy from 'lodash/pickBy'
import { utcFormat, utcParse } from 'd3-time-format'
import { satisfies as versionSatisfies } from 'semver'
import httpRequest from '../http-request'
import {
camelToSnakeCase,
createRawObject,
forEach,
isArray,
isBoolean,
isFunction,
isInteger,
isString,
map,
mapFilter,
mapToArray,
noop,
pFinally
} from '../utils'
// ===================================================================
export const asBoolean = value => Boolean(value)
// const asFloat = value => {
// value = String(value)
// return value.indexOf('.') === -1
// ? `${value}.0`
// : value
// }
export const asInteger = value => String(value)
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
export const optional = (value, fn) => value == null
? undefined
: fn ? fn(value) : value
export const prepareXapiParam = param => {
// if (isFinite(param) && !isInteger(param)) {
// return asFloat(param)
// }
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
return asBoolean(param)
}
if (isArray(param)) {
return map(param, prepareXapiParam)
}
if (isPlainObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
return param
}
// -------------------------------------------------------------------
export const debug = createDebug('xo:xapi')
// -------------------------------------------------------------------
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
export const extractOpaqueRef = str => {
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
// -------------------------------------------------------------------
const TYPE_TO_NAMESPACE = createRawObject()
forEach([
'Bond',
'DR_task',
'GPU_group',
'PBD',
'PCI',
'PGPU',
'PIF',
'PIF_metrics',
'SM',
'SR',
'VBD',
'VBD_metrics',
'VDI',
'VGPU',
'VGPU_type',
'VIF',
'VLAN',
'VM',
'VM_appliance',
'VM_guest_metrics',
'VM_metrics',
'VMPP',
'VTPM'
], namespace => {
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
})
// Object types given by `xen-api` are always lowercase but the
// namespaces in the Xen API can have a different casing.
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
// -------------------------------------------------------------------
// Format a date (pseudo ISO 8601) from one XenServer get by
// xapi.call('host.get_servertime', host.$ref) for example
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
// -------------------------------------------------------------------
export const isHostRunning = host => {
const { $metrics } = host
return $metrics && $metrics.live
}
// -------------------------------------------------------------------
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
const VM_RUNNING_POWER_STATES = {
Running: true,
Paused: true
}
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
// -------------------------------------------------------------------
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
export const makeEditObject = specs => {
const normalizeGet = (get, name) => {
if (get === true) {
const prop = camelToSnakeCase(name)
return object => object[prop]
}
if (isString(get)) {
return object => object[get]
}
return get
}
const normalizeSet = (set, name) => {
if (isFunction(set)) {
return set
}
if (set === true) {
const prop = camelToSnakeCase(name)
return function (value) {
return this._set(prop, value)
}
}
if (isString(set)) {
const index = set.indexOf('.')
if (index === -1) {
const prop = camelToSnakeCase(set)
return function (value) {
return this._set(prop, value)
}
}
const map = set.slice(0, index)
const prop = set.slice(index + 1)
return function (value, object) {
return this._updateObjectMapProperty(object, map, { [prop]: value })
}
}
if (!isArray(set)) {
throw new Error('must be an array, a function or a string')
}
set = mapToArray(set, normalizeSet)
const { length } = set
if (!length) {
throw new Error('invalid setter')
}
if (length === 1) {
return set[0]
}
return function (value, object) {
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
}
}
const normalizeSpec = (spec, name) => {
if (spec === true) {
spec = {
get: true,
set: true
}
}
if (spec.addToLimits === true) {
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
}
if (!spec.limitName) {
spec.limitName = name
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
throw new Error('constraint must be a function')
}
const constraintSpec = specs[constraintName]
if (!constraintSpec.get) {
throw new Error('constraint values must have a get')
}
})
const { get } = spec
if (get) {
spec.get = normalizeGet(get, name)
} else if (spec.addToLimits) {
throw new Error('addToLimits cannot be defined without get')
}
spec.set = normalizeSet(spec.set, name)
return spec
}
forEach(specs, (spec, name) => {
isString(spec) || (specs[name] = normalizeSpec(spec, name))
})
// Resolves aliases and add camelCase and snake_case aliases.
forEach(specs, (spec, name) => {
if (isString(spec)) {
do {
spec = specs[spec]
} while (isString(spec))
specs[name] = spec
}
let tmp
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
})
return async function _editObject_ (id, values, checkLimits) {
const limits = checkLimits && {}
const object = this.getObject(id)
const _objectRef = object.$ref
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
// Context used to execute functions.
const context = {
__proto__: this,
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
}
const set = (value, name) => {
if (value === undefined) {
return
}
const spec = specs[name]
if (!spec) {
return
}
const { preprocess } = spec
if (preprocess) {
value = preprocess(value)
}
const { get } = spec
if (get) {
const current = get(object)
if (isEqual(value, current)) {
return
}
let addToLimits
if (limits && (addToLimits = spec.addToLimits)) {
limits[spec.limitName] = addToLimits(value, current)
}
}
const cb = () => spec.set.call(context, value, object)
const { constraints } = spec
if (constraints) {
const cbs = []
forEach(constraints, (constraint, constraintName) => {
// Before setting a property to a new value, if the constraint check fails (e.g. memoryMin > memoryMax):
// - if the user wants to set the constraint (ie constraintNewValue is defined):
// constraint <-- constraintNewValue THEN property <-- value (e.g. memoryMax <-- 2048 THEN memoryMin <-- 1024)
// - if the user DOES NOT want to set the constraint (ie constraintNewValue is NOT defined):
// constraint <-- value THEN property <-- value (e.g. memoryMax <-- 1024 THEN memoryMin <-- 1024)
// FIXME: Some values combinations will lead to setting the same property twice, which is not perfect but works for now.
const constraintCurrentValue = specs[constraintName].get(object)
const constraintNewValue = values[constraintName]
if (!constraint(constraintCurrentValue, value)) {
const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName)
if (cb) {
cbs.push(cb)
}
}
})
if (cbs.length) {
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
}
}
return cb
}
const cbs = mapFilter(values, set)
if (checkLimits) {
await checkLimits(limits, object)
}
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
}
}
// ===================================================================
export const NULL_REF = 'OpaqueRef:NULL'
// ===================================================================
// HTTP put, use an ugly hack if the length is not known because XAPI
// does not support chunk encoding.
export const put = (stream, {
headers: { ...headers } = {},
...opts
}, task) => {
const makeRequest = () => httpRequest({
...opts,
body: stream,
headers,
method: 'put'
})
// Xen API does not support chunk encoding.
if (stream.length == null) {
headers['transfer-encoding'] = null
const promise = makeRequest()
if (task) {
// Some connections need the task to resolve (VDI import).
task::pFinally(() => {
promise.cancel()
})
} else {
// Some tasks need the connection to close (VM import).
promise.request.once('finish', () => {
promise.cancel()
})
}
return promise.readAll()
}
return makeRequest().readAll()
}
export const useUpdateSystem = host => {
// Match Xen Center's condition: https://github.com/xenserver/xenadmin/blob/f3a64fc54bbff239ca6f285406d9034f57537d64/XenModel/Utils/Helpers.cs#L420
return versionSatisfies(host.software_version.platform_version, '^2.1.1')
}

View File

View File

@@ -19,27 +19,47 @@ export default class {
constructor (xo) {
this._xo = xo
this._acls = new Acls({
const aclsDb = this._acls = new Acls({
connection: xo._redis,
prefix: 'xo:acl',
indexes: ['subject', 'object']
})
xo.on('start', () => {
xo.addConfigManager('acls',
() => aclsDb.get(),
acls => aclsDb.update(acls)
)
})
xo.on('clean', async () => {
const acls = await aclsDb.get()
const toRemove = []
forEach(acls, ({ subject, object, action, id }) => {
if (!subject || !object || !action) {
toRemove.push(id)
}
})
await aclsDb.remove(toRemove)
})
}
async _getAclsForUser (userId) {
const subjects = (await this._xo.getUser(userId)).groups.concat(userId)
const user = await this._xo.getUser(userId)
const { groups } = user
const subjects = groups
? groups.concat(userId)
: [ userId ]
const acls = []
const pushAcls = (function (push) {
return function (entries) {
push.apply(acls, entries)
}
const pushAcls = (push => entries => {
push.apply(acls, entries)
})(acls.push)
const {_acls: collection} = this
await Promise.all(mapToArray(
subjects,
subject => collection.get({subject}).then(pushAcls)
subject => this.getAclsForSubject(subject).then(pushAcls)
))
return acls
@@ -64,6 +84,10 @@ export default class {
return this._acls.get()
}
async getAclsForSubject (subjectId) {
return this._acls.get({ subject: subjectId })
}
async getPermissionsForUser (userId) {
const [
acls,

View File

@@ -1,29 +1,26 @@
import createDebug from 'debug'
const debug = createDebug('xo:api')
import getKeys from 'lodash.keys'
import kindOf from 'kindof'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
import * as methods from '../api'
import {
InvalidParameters,
MethodNotFound,
NoSuchObject,
Unauthorized
} from './api-errors'
import {
version as xoServerVersion
} from '../package.json'
MethodNotFound
} from 'json-rpc-peer'
import {
createRawObject,
forEach,
isFunction,
noop
} from './utils'
noop,
serializeError
} from '../utils'
import * as errors from 'xo-common/api-errors'
// ===================================================================
const debug = createDebug('xo:api')
const PERMISSIONS = {
none: 0,
read: 1,
@@ -31,39 +28,30 @@ const PERMISSIONS = {
admin: 3
}
// TODO:
// - error when adding a server to a pool with incompatible version
// - error when halted VM migration failure is due to XS < 7
const XAPI_ERROR_TO_XO_ERROR = {
EHOSTUNREACH: errors.serverUnreachable,
HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }),
NO_HOSTS_AVAILABLE: errors.noHostsAvailable,
NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade,
OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }),
PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }),
PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }),
SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed,
VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }),
VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }),
VM_IS_TEMPLATE: errors.vmIsTemplate,
VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }),
VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }),
VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) })
}
const hasPermission = (user, permission) => (
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
)
// FIXME: this function is specific to XO and should not be defined in
// this file.
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw new Unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw new Unauthorized()
}
}
// -------------------------------------------------------------------
function checkParams (method, params) {
const schema = method.params
if (!schema) {
@@ -76,11 +64,34 @@ function checkParams (method, params) {
}, params)
if (!result.valid) {
throw new InvalidParameters(result.error)
throw errors.invalidParameters(result.error)
}
}
// -------------------------------------------------------------------
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw errors.unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw errors.unauthorized()
}
}
function resolveParams (method, params) {
const resolve = method.resolve
@@ -90,7 +101,7 @@ function resolveParams (method, params) {
const {user} = this
if (!user) {
throw new Unauthorized()
throw errors.unauthorized()
}
const userId = user.id
@@ -126,89 +137,29 @@ function resolveParams (method, params) {
return params
}
throw new Unauthorized()
throw errors.unauthorized()
})
}
// ===================================================================
function getMethodsInfo () {
const methods = {}
forEach(this.api._methods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
function listMethods () {
return getKeys(this.api._methods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
function methodSignature ({method: name}) {
const method = this.api.getMethod(name)
if (!method) {
throw new NoSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'
// ===================================================================
export default class Api {
constructor ({
context,
verboseLogsOnErrors
} = {}) {
constructor (xo) {
this._logger = null
this._methods = createRawObject()
this._verboseLogsOnErrors = verboseLogsOnErrors
this.context = context
this._xo = xo
this.addMethods({
system: {
getMethodsInfo,
getServerVersion,
getVersion,
listMethods,
methodSignature
}
this.addApiMethods(methods)
xo.on('start', async () => {
this._logger = await xo.getLogger('api')
})
}
addMethod (name, method) {
get apiMethods () {
return this._methods
}
addApiMethod (name, method) {
const methods = this._methods
if (name in methods) {
@@ -217,21 +168,22 @@ export default class Api {
methods[name] = method
let unset = () => {
let remove = () => {
delete methods[name]
unset = noop
remove = noop
}
return () => unset()
return () => remove()
}
addMethods (methods) {
addApiMethods (methods) {
let base = ''
const removes = []
const addMethod = (method, name) => {
name = base + name
if (isFunction(method)) {
this.addMethod(name, method)
removes.push(this.addApiMethod(name, method))
return
}
@@ -240,20 +192,35 @@ export default class Api {
forEach(method, addMethod)
base = oldBase
}
forEach(methods, addMethod)
try {
forEach(methods, addMethod)
} catch (error) {
// Remove all added methods.
forEach(removes, remove => remove())
// Forward the error
throw error
}
let remove = () => {
forEach(removes, remove => remove())
remove = noop
}
return remove
}
async call (session, name, params) {
async callApiMethod (session, name, params) {
const startTime = Date.now()
const method = this.getMethod(name)
const method = this._methods[name]
if (!method) {
throw new MethodNotFound(name)
}
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = Object.create(this.context, {
const context = Object.create(this._xo, {
api: { // Used by system.*().
value: this
},
@@ -262,17 +229,30 @@ export default class Api {
}
})
// FIXME: too coupled with XO.
// Fetch and inject the current user.
const userId = session.get('user_id', undefined)
context.user = userId && await context.getUser(userId)
context.user = userId && await this._xo.getUser(userId)
const userName = context.user
? context.user.email
: '(unknown user)'
try {
await checkPermission.call(context, method)
checkParams(method, params)
// API methods are in a namespace.
// Some methods use the namespace or an id parameter like:
//
// vm.detachPci vm=<string>
// vm.ejectCd id=<string>
//
// The goal here is to standardize the calls by always providing
// an id parameter when possible to simplify calls to the API.
if (params && params.id === undefined) {
const namespace = name.slice(0, name.indexOf('.'))
params.id = params[namespace]
}
checkParams.call(context, method, params)
const resolvedParams = await resolveParams.call(context, method, params)
@@ -294,15 +274,19 @@ export default class Api {
return result
} catch (error) {
if (this._verboseLogsOnErrors) {
debug(
'%s | %s(%j) [%s] =!> %s',
userName,
name,
params,
ms(Date.now() - startTime),
error
)
const data = {
userId,
method: name,
params,
duration: Date.now() - startTime,
error: serializeError(error)
}
const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}`
this._logger.error(message, data)
if (this._xo._config.verboseLogsOnErrors) {
debug(message)
const stack = error && error.stack
if (stack) {
@@ -318,11 +302,18 @@ export default class Api {
)
}
const xoError = XAPI_ERROR_TO_XO_ERROR[error.code]
if (xoError) {
throw xoError(error.params, ref => {
try {
return this._xo.getObject(ref).id
} catch (e) {
return ref
}
})
}
throw error
}
}
getMethod (name) {
return this._methods[name]
}
}

View File

@@ -1,9 +1,8 @@
import Token, { Tokens } from '../models/token'
import {
NoSuchObject
} from '../api-errors'
import { noSuchObject } from 'xo-common/api-errors'
import {
createRawObject,
forEach,
generateToken,
pCatch,
noop
@@ -11,13 +10,10 @@ import {
// ===================================================================
class NoSuchAuthenticationToken extends NoSuchObject {
constructor (id) {
super(id, 'authentication token')
}
}
const noSuchAuthenticationToken = id =>
noSuchObject(id, 'authenticationToken')
// ===================================================================
const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30
export default class {
constructor (xo) {
@@ -30,7 +26,7 @@ export default class {
this._providers = new Set()
// Creates persistent collections.
this._tokens = new Tokens({
const tokensDb = this._tokens = new Tokens({
connection: xo._redis,
prefix: 'xo:token',
indexes: ['user_id']
@@ -61,9 +57,26 @@ export default class {
try {
return (await xo.getAuthenticationToken(tokenId)).user_id
} catch (e) {
return
}
} catch (error) {}
})
xo.on('clean', async () => {
const tokens = await tokensDb.get()
const toRemove = []
const now = Date.now()
forEach(tokens, ({ expiration, id }) => {
if (!expiration || expiration < now) {
toRemove.push(id)
}
})
await tokensDb.remove(toRemove)
})
xo.on('start', () => {
xo.addConfigManager('authTokens',
() => tokensDb.get(),
tokens => tokensDb.update(tokens)
)
})
}
@@ -141,7 +154,7 @@ export default class {
const token = new Token({
id: await generateToken(),
user_id: userId,
expiration: Date.now() + 1e3 * 60 * 60 * 24 * 30 // 1 month validity.
expiration: Date.now() + ONE_MONTH
})
await this._tokens.add(token)
@@ -151,15 +164,15 @@ export default class {
}
async deleteAuthenticationToken (id) {
if (!await this._tokens.remove(id)) { // eslint-disable-line space-before-keywords
throw new NoSuchAuthenticationToken(id)
if (!await this._tokens.remove(id)) {
throw noSuchAuthenticationToken(id)
}
}
async getAuthenticationToken (id) {
let token = await this._tokens.first(id)
if (!token) {
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
token = token.properties
@@ -169,13 +182,13 @@ export default class {
)) {
this._tokens.remove(id)::pCatch(noop)
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
return token
}
async _getAuthenticationTokensForUser (userId) {
async getAuthenticationTokensForUser (userId) {
return this._tokens.get({ user_id: userId })
}
}

View File

@@ -1,29 +1,44 @@
import endsWith from 'lodash.endswith'
import deferrable from 'golike-defer'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash.filter'
import find from 'lodash.find'
import findIndex from 'lodash.findindex'
import sortBy from 'lodash.sortby'
import startsWith from 'lodash.startswith'
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser as createPairsParser } from 'parse-pairs'
import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import {
basename,
dirname
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import vhdMerge from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import {
deferrable
} from '../decorators'
endsWith,
filter,
find,
findIndex,
includes,
once,
sortBy,
startsWith,
trim
} from 'lodash'
import vhdMerge, { chainVhd } from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import { lvs, pvs } from '../lvm'
import {
forEach,
mapFilter,
mapToArray,
noop,
pCatch,
pFinally,
pFromCallback,
pSettle,
safeDateFormat
resolveSubpath,
safeDateFormat,
safeDateParse,
tmpDir
} from '../utils'
import {
VDI_FORMAT_VHD
@@ -34,6 +49,8 @@ import {
const DELTA_BACKUP_EXT = '.json'
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
const shortDate = utcFormat('%Y-%m-%d')
// Test if a file is a vdi backup. (full or delta)
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
@@ -41,6 +58,41 @@ const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
const toTimestamp = date => date && Math.round(date.getTime() / 1000)
const parseVmBackupPath = name => {
const base = basename(name)
let baseMatches
baseMatches = /^([^_]+)_([^_]+)_(.+)\.xva$/.exec(base)
if (baseMatches) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[3],
tag: baseMatches[2],
type: 'xva'
}
}
let dirMatches
if (
(baseMatches = /^([^_]+)_(.+)\.json$/.exec(base)) &&
(dirMatches = /^vm_delta_([^_]+)_(.+)$/.exec(basename(dirname(name))))
) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[2],
tag: dirMatches[1],
type: 'delta',
uuid: dirMatches[2]
}
}
throw new Error('invalid VM backup filename')
}
// Get the timestamp of a vdi backup. (full or delta)
const getVdiTimestamp = name => {
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
@@ -50,21 +102,200 @@ const getVdiTimestamp = name => {
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
// Checksums have been corrupted between 5.2.6 and 5.2.7.
//
// For a short period of time, bad checksums will be regenerated
// instead of rejected.
//
// TODO: restore when enough time has passed (a week/a month).
async function checkFileIntegrity (handler, name) {
let stream
await handler.refreshChecksum(name)
// let stream
//
// try {
// stream = await handler.createReadStream(name, { checksum: true })
// } catch (error) {
// if (error.code === 'ENOENT') {
// return
// }
//
// throw error
// }
//
// stream.resume()
// await eventToPromise(stream, 'finish')
}
try {
stream = await handler.createReadStream(name, { checksum: true })
} catch (error) {
if (error.code === 'ENOENT') {
return
}
// -------------------------------------------------------------------
throw error
const listPartitions = (() => {
const IGNORED = {}
forEach([
// https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38
0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5,
0x82 // swap
], type => {
IGNORED[type] = true
})
const TYPES = {
0x7: 'NTFS',
0x83: 'linux',
0xc: 'FAT'
}
stream.resume()
await eventToPromise(stream, 'finish')
const parseLine = createPairsParser({
keyTransform: key => key === 'UUID'
? 'id'
: key.toLowerCase(),
valueTransform: (value, key) => key === 'start' || key === 'size'
? +value
: key === 'type'
? TYPES[+value] || value
: value
})
return device => execa.stdout('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path
]).then(stdout => mapFilter(splitLines(stdout), line => {
const partition = parseLine(line)
const { type } = partition
if (type != null && !IGNORED[+type]) {
return partition
}
}))
})()
// handle LVM logical volumes automatically
const listPartitions2 = device => listPartitions(device).then(partitions => {
const partitions2 = []
const promises = []
forEach(partitions, partition => {
if (+partition.type === 0x8e) {
promises.push(mountLvmPv(device, partition).then(device => {
const promise = listLvmLvs(device).then(lvs => {
forEach(lvs, lv => {
partitions2.push({
name: lv.lv_name,
size: +lv.lv_size,
id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`
})
})
})
promise::pFinally(device.unmount)
return promise
}))
} else {
partitions2.push(partition)
}
})
return Promise.all(promises).then(() => partitions2)
})
const mountPartition = (device, partitionId) => Promise.all([
partitionId != null && listPartitions(device),
tmpDir()
]).then(([ partitions, path ]) => {
const options = [
'loop',
'ro'
]
if (partitions) {
const partition = find(partitions, { id: partitionId })
const { start } = partition
if (start != null) {
options.push(`offset=${start * 512}`)
}
}
const mount = options => execa('mount', [
`--options=${options.join(',')}`,
`--source=${device.path}`,
`--target=${path}`
])
// `noload` option is used for ext3/ext4, if it fails it might
// `be another fs, try without
return mount([ ...options, 'noload' ]).catch(() =>
mount(options)
).then(() => ({
path,
unmount: once(() => execa('umount', [ '--lazy', path ]))
}), error => {
console.log(error)
throw error
})
})
// handle LVM logical volumes automatically
const mountPartition2 = (device, partitionId) => {
if (
partitionId == null ||
!includes(partitionId, '/')
) {
return mountPartition(device, partitionId)
}
const [ pvId, vgName, lvName ] = partitionId.split('/')
return listPartitions(device).then(partitions =>
find(partitions, { id: pvId })
).then(pvId => mountLvmPv(device, pvId)).then(device1 =>
execa('vgchange', [ '-ay', vgName ]).then(() =>
lvs([ 'lv_name', 'lv_path' ], vgName).then(lvs =>
find(lvs, { lv_name: lvName }).lv_path
)
).then(path =>
mountPartition({ path }).then(device2 => ({
...device2,
unmount: () => device2.unmount().then(device1.unmount)
}))
).catch(error => device1.unmount().then(() => {
throw error
}))
)
}
// -------------------------------------------------------------------
const listLvmLvs = device => pvs([
'lv_name',
'lv_path',
'lv_size',
'vg_name'
], device.path).then(pvs => filter(pvs, 'lv_name'))
const mountLvmPv = (device, partition) => {
const args = []
if (partition) {
args.push('-o', partition.start * 512)
}
args.push(
'--show',
'-f',
device.path
)
return execa.stdout('losetup', args).then(stdout => {
const path = trim(stdout)
return {
path,
unmount: once(() => Promise.all([
execa('losetup', [ '-d', path ]),
pvs('vg_name', path).then(vgNames => execa('vgchange', [
'-an',
...vgNames
]))
]))
}
})
}
// ===================================================================
@@ -72,6 +303,15 @@ async function checkFileIntegrity (handler, name) {
export default class {
constructor (xo) {
this._xo = xo
// clean any LVM volumes that might have not been properly
// unmounted
xo.on('start', () => Promise.all([
execa('losetup', [ '-D' ]),
execa('vgchange', [ '-an' ])
]).then(() =>
execa('pvscan', [ '--cache' ])
))
}
async listRemoteBackups (remoteId) {
@@ -101,12 +341,53 @@ export default class {
return backups
}
async listVmBackups (remoteId) {
const handler = await this._xo.getRemoteHandler(remoteId)
const backups = []
await Promise.all(mapToArray(await handler.list(), entry => {
if (endsWith(entry, '.xva')) {
backups.push(parseVmBackupPath(entry))
} else if (startsWith(entry, 'vm_delta_')) {
return handler.list(entry).then(children => Promise.all(mapToArray(children, child => {
if (endsWith(child, '.json')) {
const path = `${entry}/${child}`
const record = parseVmBackupPath(path)
backups.push(record)
return handler.readFile(path).then(data => {
record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({
id: `${entry}/${vdi.xoPath}`,
name: vdi.name_label,
uuid: vdi.uuid
}))
}).catch(noop)
}
})))
}
}))
return backups
}
async importVmBackup (remoteId, file, sr) {
const handler = await this._xo.getRemoteHandler(remoteId)
const stream = await handler.createReadStream(file)
const xapi = this._xo.getXapi(sr)
await xapi.importVm(stream, { srId: sr._xapiId })
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
const { datetime } = parseVmBackupPath(file)
await Promise.all([
xapi.addTag(vm.$id, 'restored from backup'),
xapi.editVm(vm.$id, {
name_label: `${vm.name_label} (${shortDate(datetime * 1e3)})`
})
])
return xapiObjectToXo(vm).id
}
// -----------------------------------------------------------------
@@ -140,7 +421,7 @@ export default class {
stream => stream.cancel()
))
return srcXapi.deleteVm(delta.vm.$id, true)
return srcXapi.deleteVm(delta.vm.uuid)
})
const promise = targetXapi.importDeltaVm(
@@ -154,7 +435,7 @@ export default class {
// Once done, (asynchronously) remove the (now obsolete) local
// base.
if (localBaseUuid) {
promise.then(() => srcXapi.deleteVm(localBaseUuid, true))::pCatch(noop)
promise.then(() => srcXapi.deleteVm(localBaseUuid))::pCatch(noop)
}
// (Asynchronously) Identify snapshot as future base.
@@ -290,6 +571,18 @@ export default class {
return backups.slice(i)
}
// fix the parent UUID and filename in delta files after download from xapi or backup compression
async _chainDeltaVdiBackups ({handler, dir}) {
const backups = await this._listVdiBackups(handler, dir)
for (let i = 1; i < backups.length; i++) {
const childPath = dir + '/' + backups[i]
const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath)
if (modified) {
await handler.refreshChecksum(childPath)
}
}
}
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
const backups = await this._listVdiBackups(handler, dir)
let i = backups.length - depth
@@ -391,8 +684,7 @@ export default class {
// The problem is in the merge case, a delta merged in a full vdi
// backup forces us to browse the resulting file =>
// Significant transfer time on the network !
checksum: !isFull,
flags: 'wx'
checksum: !isFull
})
stream.on('error', error => targetStream.emit('error', error))
@@ -432,16 +724,7 @@ export default class {
@deferrable.onFailure
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
@@ -452,7 +735,7 @@ export default class {
base => base.snapshot_time
)
const baseVm = bases.pop()
forEach(bases, base => { xapi.deleteVm(base.$id, true)::pCatch(noop) })
forEach(bases, base => { xapi.deleteVm(base.$id)::pCatch(noop) })
// Check backup dirs.
const dir = `vm_delta_${tag}_${vm.uuid}`
@@ -487,7 +770,7 @@ export default class {
stream => stream.cancel()
))
await xapi.deleteVm(delta.vm.$id, true)
await xapi.deleteVm(delta.vm.uuid)
})
// Save vdis.
@@ -515,15 +798,15 @@ export default class {
)
const fulFilledVdiBackups = []
let success = true
let error
// One or many vdi backups have failed.
for (const vdiBackup of vdiBackups) {
if (vdiBackup.isFulfilled()) {
fulFilledVdiBackups.push(vdiBackup)
} else {
console.error(`Rejected backup: ${vdiBackup.reason()}`)
success = false
error = vdiBackup.reason()
console.error('Rejected backup:', error)
}
}
@@ -535,8 +818,8 @@ export default class {
)
})
if (!success) {
throw new Error('Rolling delta vm backup failed.')
if (error) {
throw error
}
const date = safeDateFormat(new Date())
@@ -545,19 +828,17 @@ export default class {
$onFailure(() => handler.unlink(infoPath)::pCatch(noop))
const { streams,
...infos
} = delta
// Write Metadata.
await handler.outputFile(infoPath, JSON.stringify(infos, null, 2), {flag: 'wx'})
await handler.outputFile(infoPath, JSON.stringify(delta, null, 2))
// Here we have a completed backup. We can merge old vdis.
await Promise.all(
mapToArray(vdiBackups, vdiBackup => {
const backupName = vdiBackup.value()
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
const backupDir = `${dir}/${backupDirectory}`
return this._mergeDeltaVdiBackups({ handler, dir: backupDir, depth })
.then(() => { this._chainDeltaVdiBackups({ handler, dir: backupDir }) })
})
)
@@ -565,7 +846,7 @@ export default class {
await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, depth })
if (baseVm) {
xapi.deleteVm(baseVm.$id, true)::pCatch(noop)
xapi.deleteVm(baseVm.$id)::pCatch(noop)
}
// Returns relative path.
@@ -573,10 +854,13 @@ export default class {
}
async importDeltaVmBackup ({sr, remoteId, filePath}) {
filePath = `${filePath}${DELTA_BACKUP_EXT}`
const { datetime } = parseVmBackupPath(filePath)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(sr)
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
const delta = JSON.parse(await handler.readFile(filePath))
let vm
const { version } = delta
@@ -603,9 +887,12 @@ export default class {
)
)
delta.vm.name_label += ` (${shortDate(datetime * 1e3)})`
delta.vm.tags.push('restored from backup')
vm = await xapi.importDeltaVm(delta, {
srId: sr._xapiId,
disableStartAfterImport: false
disableStartAfterImport: false,
srId: sr._xapiId
})
} else {
throw new Error(`Unsupported delta backup version: ${version}`)
@@ -617,21 +904,12 @@ export default class {
// -----------------------------------------------------------------
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
}
async _backupVm (vm, handler, file, {compress, onlyMetadata}) {
const targetStream = await handler.createOutputStream(file, { flags: 'wx' })
const targetStream = await handler.createOutputStream(file)
const promise = eventToPromise(targetStream, 'finish')
const sourceStream = await this._xo.getXapi(vm).exportVm(vm._xapiId, {
@@ -644,16 +922,7 @@ export default class {
}
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const files = await handler.list()
@@ -680,7 +949,7 @@ export default class {
const promises = []
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
const oldSnap = snapshots.shift()
promises.push(xapi.deleteVm(oldSnap.uuid, true))
promises.push(xapi.deleteVm(oldSnap.uuid))
}
await Promise.all(promises)
}
@@ -694,12 +963,12 @@ export default class {
const sourceXapi = this._xo.getXapi(vm)
vm = sourceXapi.getObject(vm._xapiId)
const vms = []
const vms = {}
forEach(sr.$VDIs, vdi => {
const vbds = vdi.$VBDs
const vm = vbds && vbds[0] && vbds[0].$VM
if (vm && reg.test(vm.name_label)) {
vms.push(vm)
vms[vm.$id] = vm
}
})
const olderCopies = sortBy(vms, 'name_label')
@@ -710,11 +979,121 @@ export default class {
})
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
const promises = []
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
const oldDRVm = olderCopies.shift()
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
const n = 1 - depth
await Promise.all(mapToArray(n ? olderCopies.slice(0, n) : olderCopies, vm =>
// Do not consider a failure to delete an old copy as a fatal error.
targetXapi.deleteVm(vm.$id)::pCatch(noop)
))
}
// -----------------------------------------------------------------
_mountVhd (remoteId, vhdPath) {
return Promise.all([
this._xo.getRemoteHandler(remoteId),
tmpDir()
]).then(([ handler, mountDir ]) => {
if (!handler._getRealPath) {
throw new Error(`this remote is not supported`)
}
const remotePath = handler._getRealPath()
vhdPath = resolveSubpath(remotePath, vhdPath)
return Promise.resolve().then(() => {
// TODO: remove when no longer necessary.
//
// Currently, the filenames of the VHD changes over time
// (delta → full), but the JSON is not updated, therefore the
// VHD path may need to be fixed.
return endsWith(vhdPath, '_delta.vhd')
? pFromCallback(cb => stat(vhdPath, cb)).then(
() => vhdPath,
error => {
if (error && error.code === 'ENOENT') {
return `${vhdPath.slice(0, -10)}_full.vhd`
}
}
)
: vhdPath
}).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() =>
pFromCallback(cb => readdir(mountDir, cb)).then(entries => {
let max = 0
forEach(entries, entry => {
const matches = /^vhdi(\d+)/.exec(entry)
if (matches) {
const value = +matches[1]
if (value > max) {
max = value
}
}
})
if (!max) {
throw new Error('no disks found')
}
return {
path: `${mountDir}/vhdi${max}`,
unmount: once(() => execa('fusermount', [ '-uz', mountDir ]))
}
})
)
})
}
_mountPartition (remoteId, vhdPath, partitionId) {
return this._mountVhd(remoteId, vhdPath).then(device =>
mountPartition2(device, partitionId).then(partition => ({
...partition,
unmount: () => partition.unmount().then(device.unmount)
})).catch(error => device.unmount().then(() => {
throw error
}))
)
}
@deferrable
async scanDiskBackup ($defer, remoteId, vhdPath) {
const device = await this._mountVhd(remoteId, vhdPath)
$defer(device.unmount)
return {
partitions: await listPartitions2(device)
}
await Promise.all(promises)
}
@deferrable
async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
$defer(partition.unmount)
path = resolveSubpath(partition.path, path)
const entries = await pFromCallback(cb => readdir(path, cb))
const entriesMap = {}
await Promise.all(mapToArray(entries, async name => {
const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::pCatch(noop)
if (stats) {
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
}
}))
return entriesMap
}
async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
let i = 0
const onEnd = () => {
if (!--i) {
partition.unmount()
}
}
return mapToArray(paths, path => {
++i
return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd)
})
}
}

View File

@@ -0,0 +1,33 @@
import { map, noop } from '../utils'
import { all as pAll } from 'promise-toolbox'
export default class ConfigManagement {
constructor () {
this._managers = { __proto__: null }
}
addConfigManager (id, exporter, importer) {
const managers = this._managers
if (id in managers) {
throw new Error(`${id} is already taken`)
}
this._managers[id] = { exporter, importer }
}
exportConfig () {
return map(this._managers, ({ exporter }, key) => exporter())::pAll()
}
importConfig (config) {
const managers = this._managers
return map(config, (entry, key) => {
const manager = managers[key]
if (manager) {
return manager.importer(entry)
}
})::pAll().then(noop)
}
}

307
src/xo-mixins/ip-pools.js Normal file
View File

@@ -0,0 +1,307 @@
import concat from 'lodash/concat'
import countBy from 'lodash/countBy'
import diff from 'lodash/difference'
import findIndex from 'lodash/findIndex'
import flatten from 'lodash/flatten'
import highland from 'highland'
import includes from 'lodash/includes'
import isObject from 'lodash/isObject'
import keys from 'lodash/keys'
import mapValues from 'lodash/mapValues'
import pick from 'lodash/pick'
import remove from 'lodash/remove'
import synchronized from 'decorator-synchronized'
import { noSuchObject } from 'xo-common/api-errors'
import { fromCallback } from 'promise-toolbox'
import {
forEach,
generateUnsecureToken,
isEmpty,
lightSet,
mapToArray,
streamToArray,
throwFn
} from '../utils'
// ===================================================================
const normalize = ({
addresses,
id = throwFn('id is a required field'),
name = '',
networks,
resourceSets
}) => ({
addresses,
id,
name,
networks,
resourceSets
})
const _isAddressInIpPool = (address, network, ipPool) => (
ipPool.addresses && (address in ipPool.addresses) &&
includes(ipPool.networks, isObject(network) ? network.id : network)
)
// ===================================================================
// Note: an address cannot be in two different pools sharing a
// network.
export default class IpPools {
constructor (xo) {
this._store = null
this._xo = xo
xo.on('start', async () => {
this._store = await xo.getStore('ipPools')
xo.addConfigManager('ipPools',
() => this.getAllIpPools(),
ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool)))
)
})
}
async createIpPool ({ addresses, name, networks }) {
const id = await this._generateId()
await this._save({
addresses,
id,
name,
networks
})
return id
}
async deleteIpPool (id) {
const store = this._store
if (await store.has(id)) {
await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => {
await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id)
return this._xo.removeIpPoolFromResourceSet(id, set.id)
}))
await this._removeIpAddressesFromVifs(
mapValues((await this.getIpPool(id)).addresses, 'vifs')
)
return store.del(id)
}
throw noSuchObject(id, 'ipPool')
}
_getAllIpPools (filter) {
return streamToArray(this._store.createValueStream(), {
filter,
mapper: normalize
})
}
async getAllIpPools (userId) {
let filter
if (userId != null) {
const user = await this._xo.getUser(userId)
if (user.permission !== 'admin') {
const resourceSets = await this._xo.getAllResourceSets(userId)
const ipPools = lightSet(flatten(mapToArray(resourceSets, 'ipPools')))
filter = ({ id }) => ipPools.has(id)
}
}
return this._getAllIpPools(filter)
}
getIpPool (id) {
return this._store.get(id).then(normalize, error => {
throw error.notFound ? noSuchObject(id, 'ipPool') : error
})
}
async _getAddressIpPool (address, network) {
const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool))
return ipPools && ipPools[0]
}
// Returns a map that indicates how many IPs from each IP pool the VM uses
// e.g.: { 'ipPool:abc': 3, 'ipPool:xyz': 7 }
async computeVmIpPoolsUsage (vm) {
const vifs = vm.VIFs
const ipPools = []
for (const vifId of vifs) {
const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId)
for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) {
const ipPool = await this._getAddressIpPool(address, $network)
ipPool && ipPools.push(ipPool.id)
}
}
return countBy(ipPools, ({ id }) => `ipPool:${id}`)
}
@synchronized
allocIpAddresses (vifId, addAddresses, removeAddresses) {
const updatedIpPools = {}
const limits = {}
const xoVif = this._xo.getObject(vifId)
const xapi = this._xo.getXapi(xoVif)
const vif = xapi.getObject(xoVif._xapiId)
const allocAndSave = (() => {
const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet')
return () => {
const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool)))
return resourseSetId
? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then(
saveIpPools
)
: saveIpPools()
}
})()
return fromCallback(cb => {
const network = vif.$network
const networkId = network.$id
const isVif = id => id === vifId
highland(this._store.createValueStream()).each(ipPool => {
const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool
if (!(addresses && networks && includes(networks, networkId))) {
return false
}
let allocations = 0
let changed = false
forEach(removeAddresses, address => {
let vifs, i
if (
(vifs = addresses[address]) &&
(vifs = vifs.vifs) &&
(i = findIndex(vifs, isVif)) !== -1
) {
vifs.splice(i, 1)
--allocations
changed = true
}
})
forEach(addAddresses, address => {
const data = addresses[address]
if (!data) {
return
}
const vifs = data.vifs || (data.vifs = [])
if (!includes(vifs, vifId)) {
vifs.push(vifId)
++allocations
changed = true
}
})
if (changed) {
const { id } = ipPool
updatedIpPools[id] = ipPool
limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations
}
}).toCallback(cb)
}).then(allocAndSave)
}
async _removeIpAddressesFromVifs (mapAddressVifs) {
const mapVifAddresses = {}
forEach(mapAddressVifs, (vifs, address) => {
forEach(vifs, vifId => {
if (mapVifAddresses[vifId]) {
mapVifAddresses[vifId].push(address)
} else {
mapVifAddresses[vifId] = [ address ]
}
})
})
const { getXapi } = this._xo
return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => {
let vif
try {
// The IP may not have been correctly deallocated from the IP pool when the VIF was deleted
vif = this._xo.getObject(vifId)
} catch (error) {
return
}
const { allowedIpv4Addresses, allowedIpv6Addresses } = vif
remove(allowedIpv4Addresses, address => includes(addresses, address))
remove(allowedIpv6Addresses, address => includes(addresses, address))
this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses))
return getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
}))
}
async updateIpPool (id, {
addresses,
name,
networks,
resourceSets
}) {
const ipPool = await this.getIpPool(id)
const previousAddresses = { ...ipPool.addresses }
name != null && (ipPool.name = name)
if (addresses) {
const addresses_ = ipPool.addresses || {}
forEach(addresses, (props, address) => {
if (props === null) {
delete addresses_[address]
} else {
addresses_[address] = props
}
})
// Remove the addresses that are no longer in the IP pool from the concerned VIFs
const deletedAddresses = diff(keys(previousAddresses), keys(addresses_))
await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses))
if (isEmpty(addresses_)) {
delete ipPool.addresses
} else {
ipPool.addresses = addresses_
}
}
// TODO: Implement patching like for addresses.
if (networks) {
ipPool.networks = networks
}
// TODO: Implement patching like for addresses.
if (resourceSets) {
ipPool.resourceSets = resourceSets
}
await this._save(ipPool)
}
async _generateId () {
let id
do {
id = generateUnsecureToken(8)
} while (await this._store.has(id))
return id
}
_save (ipPool) {
ipPool = normalize(ipPool)
return this._store.put(ipPool.id, ipPool)
}
}

Some files were not shown because too many files have changed in this diff Show More