Compare commits

..

345 Commits

Author SHA1 Message Date
Julien Fontanet
2340045dd1 5.7.4 2017-04-24 15:51:01 +02:00
Julien Fontanet
ddfbf1d6c4 feat(plugin): expose plugin description 2017-04-24 15:37:52 +02:00
Pierre Donias
8639faeffc fix(Xapi#installSupplementalPackOnAllHosts): synchronize call_plugin (#532) 2017-04-21 16:32:58 +02:00
Julien Fontanet
ff1b643cf4 5.7.3 2017-04-12 17:14:10 +02:00
Pierre Donias
a73acedc4d fix(XOSAN): ignore error if pack has already been installed (#529)
Fixes vatesfr/xo-web#2084
2017-04-12 16:33:56 +02:00
Julien Fontanet
7ffeb44e77 fix(xapi-objects-to-xo): only use refs for XS templates (#528)
Using refs as ids for user templates causes issues on VM creation
2017-04-12 13:58:51 +02:00
Julien Fontanet
3989bfa832 5.7.2 2017-04-11 17:02:57 +02:00
Julien Fontanet
740e1ccd5b fix(job.set): make all fields optional (2) 2017-04-10 16:45:27 +02:00
Julien Fontanet
bc03f2ed17 fix(job.set): make all fields optional 2017-04-10 16:15:45 +02:00
Julien Fontanet
4b5cb86d87 chore(Xapi#_createVbd): base default empty prop on VDI presence 2017-04-10 16:15:45 +02:00
Julien Fontanet
b877d6088b fix(models/utils/parseProp): reduce verbosity 2017-04-10 16:15:45 +02:00
Julien Fontanet
26cfd439e1 fix(xapi-objects-to-xo): use $ref for VM-template ids
Fixes vatesfr/xo-web#2075
2017-04-10 16:15:44 +02:00
Julien Fontanet
eafcf66f4d 5.7.1 2017-04-06 19:30:09 +02:00
Pierre Donias
a8fad8193b feat(pack,patch): XS 7.1 mechanism (#527)
See vatesfr/xo-web#2058
2017-04-06 19:05:43 +02:00
badrAZ
ca695c38cd feat(vm.revertSnapshot): restore power state (#526)
Fixes vatesfr/xo-web#834
2017-04-06 15:24:30 +02:00
badrAZ
6e0f98a809 feat(VBD): expose the VBD device (#525)
See vatesfr/xo-web#2064
2017-04-06 11:07:16 +02:00
Julien Fontanet
8901d3ce64 fix(sample config): IPv6 used by default 2017-03-31 18:25:30 +02:00
Julien Fontanet
2641966285 5.7.0 2017-03-31 16:37:22 +02:00
Julien Fontanet
09dc2265fe fix(Xapi#uploadPoolPatch): use HTTPs 2017-03-31 11:19:27 +02:00
Julien Fontanet
a1fa139ef1 fix(Xapi): accept self-signed certs for export/import VDI/VM 2017-03-31 11:19:26 +02:00
badrAZ
df26a500c4 feat(servers): add a label to servers (#523)
See vatesfr/xo-web#1965
2017-03-29 16:32:02 +02:00
badrAZ
a465218ba3 feat(jobs): configure job timeout (#522)
See vatesfr/xo-web#1956
2017-03-29 10:39:52 +02:00
Julien Fontanet
1ead6eb916 fix(Xapi): export/import of VDI/VM should use HTTPs 2017-03-28 17:33:17 +02:00
Julien Fontanet
44fe1b31ba chore(package): update all dependencies 2017-03-24 11:39:35 +01:00
Julien Fontanet
0856d3d5c9 fix: update yarn.lock 2017-03-24 11:38:32 +01:00
Pierre Donias
f6b74ea836 feat(vm): affinity host (#521)
See vatesfr/xo-web#1983
2017-03-24 11:35:00 +01:00
greenkeeper[bot]
e0cef71700 fix(package): update ms to version 1.0.0 (#520)
https://greenkeeper.io/
2017-03-19 22:49:02 +01:00
Julien Fontanet
19d1f70458 fix(xapi-objects-to-xo): PV drivers check for XS >= 7.1
Fixes vatesfr/xo-web#2024
2017-03-17 18:05:05 +01:00
badrAZ
43c103e436 feat(new-vm): shared VM in resource set (#518)
Fixes vatesfr/xo-web#1964.
2017-03-15 14:45:36 +01:00
Julien Fontanet
6ff4f096a3 chore(build): do not build tests 2017-03-10 14:46:06 +01:00
Pierre Donias
d8d82441c3 fix(Xapi#createVm): only one disk should be bootable (#516)
See vatesfr/xo-web#1994.

For more complex setups, create the VMs and change bootable flags afterward.
2017-03-08 16:31:22 +01:00
Olivier Lambert
4f489e1854 feat(sr): add lvmohba SR type management (#517) 2017-03-08 14:03:57 +01:00
Julien Fontanet
9ab275df5d fix(vm.set): behave if the resource set no longer exist 2017-03-04 16:27:25 +01:00
Julien Fontanet
66c1754eb8 chore(package): update xo-common to v0.1.1 2017-03-04 16:16:58 +01:00
Julien Fontanet
e67bab1f5c fix(package): require Node >= 4.5 2017-03-04 03:24:49 +01:00
Julien Fontanet
ceb6667112 fix: coding style 2017-03-03 16:32:58 +01:00
badrAZ
cafba0b361 fix(vm.resume): remove unused force param (#514) 2017-03-03 15:49:12 +01:00
Julien Fontanet
3e4efcf297 fix(vm.importBackup): fix datetime in VM name 2017-03-01 14:57:02 +01:00
Julien Fontanet
4ccadac148 5.6.4 2017-03-01 14:46:09 +01:00
Julien Fontanet
6e148c18b0 fix(vm.importDeltaBackup): fix datetime in VM name (#513) 2017-03-01 14:34:29 +01:00
Julien Fontanet
75f849982e fix(Xapi#exportVdi): returned promise has a cancel() method (#512) 2017-03-01 14:12:56 +01:00
Julien Fontanet
8722ef45ac chore: add mention-bot config 2017-03-01 13:27:54 +01:00
Julien Fontanet
b347c78a8c fix(vm.create): do not get stuck when installing from ISO 2017-02-28 15:43:06 +01:00
Julien Fontanet
88ae24855a feat(models/utils/parseProp): do not warn on empty string 2017-02-28 09:58:41 +01:00
Julien Fontanet
356884ea53 feat: reduce the default debugging level 2017-02-28 09:58:41 +01:00
badrAZ
51fba21dd6 feat(vm.import): throw if type is xva and data is provided (#506) 2017-02-27 11:57:40 +01:00
Julien Fontanet
6aa5d608bf chore(package): babel-preset-es2015 → babel-preset-env 2017-02-27 11:22:34 +01:00
Julien Fontanet
21ad2c5744 fix(package): commit-msg → commitmsg 2017-02-27 11:20:40 +01:00
Julien Fontanet
dea1163159 fix(package): jest is a dev dependency 2017-02-27 11:20:19 +01:00
Julien Fontanet
c4c2e8cf74 chore(package): fix for jest 19 2017-02-27 11:19:38 +01:00
Julien Fontanet
5883c35cf3 chore(package): update all dependencies 2017-02-27 11:18:46 +01:00
Julien Fontanet
4d2719a424 5.6.3 2017-02-24 16:12:25 +01:00
Nicolas Raynaud
7cf2d0d01c fix(xosan) (#509)
- fix 2 bricks configuration
- limit bricks size to 2TB
- fix arbiter cleanup
2017-02-24 16:11:59 +01:00
Julien Fontanet
5a08b512a7 fix(xosan.createSR): use address prop 2017-02-24 14:56:27 +01:00
Pierre Donias
aa6ff6cd64 fix(xapi.installSupplementalPackOnAllHosts): multi-pipe stream (#508)
Fixes vatesfr/xo-web#1957
2017-02-24 11:41:47 +01:00
Julien Fontanet
89421d292c 5.6.2 2017-02-23 18:38:14 +01:00
Julien Fontanet
55c6515bac fix(vhdMerge): ensure parent.bat.size >= child.bat.size
Fixes vatesfr/xo-web#1939
2017-02-23 18:35:52 +01:00
Pierre Donias
5db6f6a58c feat(XOSAN) (#505) 2017-02-23 18:33:23 +01:00
Julien Fontanet
eeedf6ab28 feat(Vhd#ensurebatSize): no need to wait before writing footer 2017-02-23 18:31:05 +01:00
Julien Fontanet
3758cd207b fix(Vhd#ensureBatSize): Buffer#size → Buffer#length 2017-02-23 18:31:05 +01:00
Julien Fontanet
c15ede6239 fix(Vhd#ensureBatSize): avoid style error 2017-02-23 18:31:05 +01:00
Julien Fontanet
e54e31e059 feat(vhdMerge): avoid unnecessary footer write 2017-02-23 18:31:05 +01:00
Julien Fontanet
8c573aa8e4 chore(Vhd): add debugs 2017-02-23 18:31:05 +01:00
Julien Fontanet
654559086c chore(Vhd#writeBlockSectors): expect end sector instead of # of sectors 2017-02-23 18:31:05 +01:00
Julien Fontanet
b0d9679568 chore(Vhd#coalesceBlock): do not pass blockAddr 2017-02-23 18:31:05 +01:00
Julien Fontanet
87fdaf7fa7 chore(Vhd): merge readBlockBitmap() readBlockData() into _readBlock() 2017-02-23 18:31:05 +01:00
Julien Fontanet
bb3a365166 fix(Vhd#writeBlockSectors): pass correct params to Vhd#readBlockBitmap() 2017-02-23 18:31:05 +01:00
Julien Fontanet
2be183d980 various updates 2017-02-23 18:31:05 +01:00
Julien Fontanet
c6dc846838 fix(Vhd#ensureBatSize): extend BAT at correct offset 2017-02-23 18:31:05 +01:00
Julien Fontanet
1142f1d59a various updates 2017-02-23 18:31:05 +01:00
Julien Fontanet
126c470979 fix(Vhd#getEndOfHeaders): correctly compute parent locator size 2017-02-23 18:31:05 +01:00
Julien Fontanet
d679dc3e8b fix(vhdMerge): update diskGeometry and originalSize 2017-02-23 18:31:05 +01:00
Julien Fontanet
d5422dfe89 fix(Vhd#ensureBatSize): do not round maxTableEntries 2017-02-23 18:31:05 +01:00
Julien Fontanet
d64237b4f2 fix(package): dependency-check should ignore constant-stream 2017-02-23 18:31:05 +01:00
Julien Fontanet
7b7e4942f2 fix(constant-stream): optimize data length 2017-02-23 18:31:05 +01:00
Julien Fontanet
e4c343a587 chore(Vhd#_writeStream): merge back into Vhd#_write() 2017-02-23 18:31:05 +01:00
Julien Fontanet
1a8ae21478 feat(Vhd#_write): can also handle streams 2017-02-23 18:31:05 +01:00
Julien Fontanet
dd37a5b584 chore(Vhd#_write): simplify code 2017-02-23 18:31:05 +01:00
Julien Fontanet
eec340e6c0 chore(Vhd#_getBatEntry): remove logging 2017-02-23 18:31:05 +01:00
Julien Fontanet
c2fb5ba1f0 fix(vhdMerge): update currentSize and timestamp 2017-02-23 18:31:05 +01:00
Julien Fontanet
36d7e17b86 fix(Vhd#ensureBatSize): update BAT before writing footer 2017-02-23 18:31:05 +01:00
Julien Fontanet
5a1dc49428 fix(Vhd#ensureBatSize): various fixes 2017-02-23 18:31:05 +01:00
Julien Fontanet
47caf54772 fix(Vhd#ensureBatSize): extend local BAT buffer as well 2017-02-23 18:31:05 +01:00
Julien Fontanet
6af50a8c44 feat(Vhd#ensurebatSize): do not move first block if not necessary 2017-02-23 18:31:05 +01:00
Julien Fontanet
1b27407970 chore(Vhd#_getFirstAndLastBlocks): value → sector 2017-02-23 18:31:05 +01:00
Julien Fontanet
4da6306c67 fix(Vhd#_getFirstAndLastBlocks): should not be async 2017-02-23 18:31:05 +01:00
Julien Fontanet
f950b7a725 fix(Vhd#_readBlockData) 2017-02-23 18:31:05 +01:00
Julien Fontanet
930cf9ed04 fix(Vhd#ensureBatSize): update header.maxTableEntries 2017-02-23 18:31:05 +01:00
Julien Fontanet
744016e752 chore(vhd-merge): Vhd#_read() 2017-02-23 18:31:05 +01:00
Julien Fontanet
2fb4e907df fix(vhdMerge): ensure parent.bat.size >= child.bat.size
Fixes vatesfr/xo-web#1939
2017-02-23 18:31:05 +01:00
Julien Fontanet
ef2a815e52 chore(vhdMerge): ensure block sizes are identical 2017-02-23 18:31:05 +01:00
Julien Fontanet
33a81d4f3c chore(vhd-merge): rename and clean BAT methods
- readAllocationTableEntry() → _getBatEntry()
- writeAllocationTableEntry() → _setBatEntry()
2017-02-23 18:31:05 +01:00
Julien Fontanet
4911df0bf9 chore(Vhd#writeAllocationTableEntry): commit the change in the file 2017-02-23 18:31:05 +01:00
Julien Fontanet
2b612b5db7 feat(constant-stream): emit the same data n times 2017-02-23 18:31:05 +01:00
Julien Fontanet
bfe81b52ef chore(vhd-merge): Vhd#_writeStream() 2017-02-23 18:31:05 +01:00
Julien Fontanet
26f6a4beb9 chore(vhd-merge): Vhd#_readStream() 2017-02-23 18:31:05 +01:00
Julien Fontanet
52e9c3053a fix(console proxy): works with ws v2
Fixes vatesfr/xo-web#1954
2017-02-23 16:59:41 +01:00
Julien Fontanet
908d1f1ec8 feat(Xo#defineProperties): helper to define multiple properties 2017-02-23 12:23:18 +01:00
Julien Fontanet
6a1120f95b feat(Xo#defineProperty): accept a thisArg param 2017-02-23 12:18:54 +01:00
Julien Fontanet
2a2780c25c feat(Xapi#deleteVm): delete disks by default 2017-02-23 11:06:10 +01:00
Julien Fontanet
7d4152197b chore: update yarn.lock 2017-02-22 22:07:52 +01:00
greenkeeper[bot]
9c742600ff fix(package): update jest to version 19.0.1 (#504)
https://greenkeeper.io/
2017-02-22 12:10:12 +01:00
greenkeeper[bot]
a035bf132a fix(package): update pretty-format to version 19.0.0 (#503)
https://greenkeeper.io/
2017-02-21 10:52:37 +01:00
Julien Fontanet
b989d157a0 feat(redis): can connect via Unix socket
Fixes vatesfr/xo-web#1944
2017-02-16 11:32:35 +01:00
Julien Fontanet
261587511b chore(package): update all dependencies 2017-02-13 16:13:19 +01:00
Julien Fontanet
ff798801fb chore(package): update xen-api to v0.10.0-2 2017-02-13 16:08:09 +01:00
Julien Fontanet
9b4aab0d19 chore: update yarn.lock 2017-02-13 16:07:20 +01:00
greenkeeper[bot]
6e42cf9952 fix(package): update fs-promise to version 2.0.0 (#501)
https://greenkeeper.io/
2017-02-13 10:34:33 +01:00
Julien Fontanet
4c3a8ca312 fix(Xapi#createVm): wait for VM record to have all its VBDs/VIFs (#500) 2017-02-09 16:49:58 +01:00
Julien Fontanet
a63eb48f03 fix(Xapi#createVm): do not wait for a new record 2017-02-09 16:06:25 +01:00
Julien Fontanet
d0214f805e fix(Xapi#createVm): _waitObject → _waitObjectState 2017-02-09 16:06:25 +01:00
badrAZ
d736bd6501 fix(vm.create): VIFs param is optional (#499) 2017-02-09 11:16:20 +01:00
Pierre Donias
2ce4a11e0a fix(vm.delete): import mapFilter (#497) 2017-02-07 11:27:30 +01:00
Pierre Donias
e5ab8fe3e4 feat(vm.delete): remove ACLs (#496) 2017-02-02 22:11:13 +01:00
Julien Fontanet
657b74a084 Revert "feat(vm.snapshot): copy ACLs from VM to snapshot (#495)"
This reverts commit dfee98b66b.

Should not be necessary: snapshots inherit ACLs from their VM.
2017-02-01 10:24:26 +01:00
Pierre Donias
dfee98b66b feat(vm.snapshot): copy ACLs from VM to snapshot (#495)
See vatesfr/xo-web#1865

Also, correctly remove ACLs on VM deletion.
2017-02-01 10:22:17 +01:00
Julien Fontanet
f65b9f695e 5.6.1 2017-01-30 18:07:39 +01:00
Julien Fontanet
4056385cd3 feat(backups): do not rely on JSON format for lvm commands (#493) 2017-01-30 18:04:27 +01:00
Pierre Donias
96d56d43bc feat(Xapi#installSupplementalPackOnAllHosts) (#491)
See vatesfr/xo-web#1896
2017-01-30 10:53:26 +01:00
Julien Fontanet
eba8f95e58 5.6.0 2017-01-27 16:42:07 +01:00
Julien Fontanet
7e2da1ff93 [WiP] feat(backups): implements file restore for LVM (#490)
Fixes vatesfr/xo-web#1878
2017-01-27 16:37:34 +01:00
Pierre Donias
b7b7e81468 feat(host.installSupplementalPack) (#487)
See vatesfr/xo-web#1460
2017-01-25 16:08:31 +01:00
Pierre Donias
0c7768f5d2 fix(vm.delete): IP addresses should always be deallocated (#488)
Fixes vatesfr/xo-web#1906
2017-01-25 15:46:33 +01:00
Pierre Donias
8fe6a56dfc fix(Xapi#installAllPoolPatchesOnHost): ignore PATCH_ALREADY_APPLIED error (#489)
Fixes vatesfr/xo-web#1904
2017-01-25 15:46:15 +01:00
Julien Fontanet
7b9dae980d fix(vm.create): properly handle optional param VDIs 2017-01-24 13:36:36 +01:00
Olivier Lambert
b59ba6b7bb feat(api): add description for some API calls (#486)
Fixes vatesfr/xo-web#1882
2017-01-17 15:15:18 +01:00
Julien Fontanet
8cdee4d173 chore(xo): disable too many listeners warning 2017-01-16 15:50:18 +01:00
Julien Fontanet
c9ed5fbe00 chore: update yarn.lock 2017-01-16 15:18:46 +01:00
Julien Fontanet
e698e89968 feat(/signout): URL to sign out 2017-01-16 14:33:58 +01:00
Pierre Donias
02f198d42c feat(backup.fetchFiles): multiple files support (#485)
See vatesfr/xo-web#1877
2017-01-16 09:33:22 +01:00
Pierre Donias
61d2d0263b feat(patching): eject tools ISOs before patching host (#479)
Fixes #1798
2017-01-13 18:20:31 +01:00
badrAZ
ed477e99a8 feat(plugin): provide a getDataDir() to plugins (#483)
It returns the path of a directory where the plugin can store data.
2017-01-13 18:13:44 +01:00
Olivier Lambert
1449be8d66 feat(host): expose supplemental packs (#480) 2017-01-12 17:54:48 +01:00
greenkeeper[bot]
28902d8747 fix(package): update execa to version 0.6.0 (#478)
https://greenkeeper.io/
2017-01-09 10:50:31 +01:00
Julien Fontanet
d534592479 5.5.4 2017-01-06 16:57:47 +01:00
Pierre Donias
b2f6ea9116 fix(vm.set): allocate resources when editing VM (#477)
Fixes vatesfr/xo-web#1695
2017-01-06 16:54:49 +01:00
Pierre Donias
8bf38bb29b feat(server): store connection error in database (#472)
See vatesfr/xo-web#1833
2017-01-06 16:38:17 +01:00
greenkeeper[bot]
9c6a78b678 fix(package): update promise-toolbox to version 0.8.0 (#476)
https://greenkeeper.io/
2017-01-06 11:34:27 +01:00
Pierre Donias
850199d7fc fix(resource-sets): recompute limits (#475)
Fixes vatesfr/xo-web#1866
2017-01-06 10:09:36 +01:00
Pierre Donias
4282928960 fix(vif/create): locking mode when allocating IP addresses (#474)
Fixes vatesfr/xo-web#1747
2017-01-06 09:55:55 +01:00
Julien Fontanet
356dd89d9f chore(package): upgrade jest to v 0.18.1 2017-01-03 18:30:28 +01:00
Julien Fontanet
7dd2391e5a fix(group.setUsers): oldUsers → oldUsersIds 2017-01-03 11:20:25 +01:00
Julien Fontanet
e0093f236a fix(group.create): do not attempt to parse empty prop 2017-01-03 10:47:10 +01:00
Julien Fontanet
8c5c32268a fix: users and groups serialization in Redis
Fixes vatesfr/xo-web#1852.
2017-01-02 16:52:51 +01:00
greenkeeper[bot]
b61ccc1af1 fix(package): update hashy to version 0.6.0 (#470)
https://greenkeeper.io/
2017-01-02 13:01:29 +01:00
Julien Fontanet
7caf0e40f4 5.5.3 2017-01-02 10:56:08 +01:00
Julien Fontanet
a16508db10 fix(remotes): do not error on disabled remote
- testRemote()
- updateRemote()
- remoteRemote()
- forgetAllRemotes()
2016-12-25 20:07:42 +01:00
Julien Fontanet
81bff342b9 chore(package): update decorator-synchronized to version 0.2.3 2016-12-22 16:25:46 +01:00
Julien Fontanet
49d41a76a0 5.5.2 2016-12-22 11:22:45 +01:00
Julien Fontanet
b1732b3298 fix(file restore): work around for invalid delta VHD path (#467)
See vatesfr/xo-web#1842.
2016-12-22 11:20:51 +01:00
Julien Fontanet
9372cdb6c7 fix(vm.rollingDeltaBackup): do not hide error 2016-12-22 10:21:38 +01:00
Julien Fontanet
1d8e54b83e chore(backups): use directly Xo#getRemoteHandler() 2016-12-22 09:50:16 +01:00
Julien Fontanet
30c5600271 chore(Xo#getRemoteHandler): use intermediary variable 2016-12-22 09:49:36 +01:00
Julien Fontanet
9f7e5c3a9a fix(Xo#getRemoteHandler): throws if remote is disabled 2016-12-22 09:49:04 +01:00
Julien Fontanet
37c9342717 fix(vm.rollingDeltaBackup): correctly delete snapshot in case of failure 2016-12-21 22:35:43 +01:00
Julien Fontanet
8827f8e940 fix(backup.fetchFiles): encode URI suffix
Fix issue with whitespaces in the filename.
2016-12-20 17:07:13 +01:00
Julien Fontanet
58334bf4a1 fix(backup.list): timestamps should be integers 2016-12-20 17:07:13 +01:00
Julien Fontanet
b898a6702c chore(package): use husky instead of ghooks 2016-12-20 17:07:13 +01:00
Julien Fontanet
6d78a810b9 perf(RemoteHandlerAbstract/createReadStream): optimise code
- avoid async function: overhead with transpilation
- do as much as possible in parallel
- fix: do not add length property in range mode
2016-12-20 17:07:13 +01:00
Julien Fontanet
8fc4eb8cdf 5.5.1 2016-12-20 13:38:02 +01:00
Julien Fontanet
b3fac0c56f fix(backup.list): datetimes should be timestamps 2016-12-20 12:50:17 +01:00
Julien Fontanet
0b063b1f5e 5.5.0 2016-12-20 12:29:16 +01:00
Olivier Lambert
480f05e676 feat(vm): add install time (#465) 2016-12-20 12:19:11 +01:00
Julien Fontanet
1ac8af34ec feat(backup): implement file restore (#461)
See vatesfr/xo-web#1590

Current implementation has following limitations:

- only support local and NFS remotes
- requires installation of libvhdi-utils
- files can only be recovered one by one
2016-12-20 12:18:22 +01:00
Julien Fontanet
34ff8b0f02 feat(Xapi#exportDeltaVm): don't export VDIs with names starting with [NOBAK] (#464)
Fixes vatesfr/xo-web#826
2016-12-14 10:57:25 +01:00
Julien Fontanet
77c3684e28 chore(tests): execute tests directly in src/ 2016-12-13 18:20:17 +01:00
Julien Fontanet
93038ea838 chore(package): remove unused trace 2016-12-13 14:08:38 +01:00
Julien Fontanet
46348f7cba feat: yarn integration 2016-12-13 12:15:26 +01:00
Julien Fontanet
ccc0e45daf feat(tests): use Jest instead of mocha/chai/must 2016-12-13 12:15:03 +01:00
Julien Fontanet
46ca03b017 chore(package): clean scripts 2016-12-13 11:55:12 +01:00
Julien Fontanet
1bfe3197a5 chore(Travis): test with Node stable 2016-12-13 11:51:04 +01:00
Julien Fontanet
4d2617fe68 chore(package): requires Node >= 4 2016-12-13 11:49:54 +01:00
Julien Fontanet
92e289f9da fix(decorators/mixin): do not use arrow function for constructor
It works because of the transpilation but it's not valid ES2015.
2016-12-13 11:41:41 +01:00
greenkeeper[bot]
a8c7558a77 chore(package): update index-modules to version 0.2.1 (#463) 2016-12-12 16:49:10 +01:00
greenkeeper[bot]
c756e7ecbe chore(package): update index-modules to version 0.2.0 (#462)
https://greenkeeper.io/
2016-12-12 16:16:44 +01:00
Pierre Donias
1998c56e84 feat(vm.delete): release resource set and IP-pool addresses (#460)
Fixes vatesfr/xo-web#1657, fixes vatesfr/xo-web#1748
2016-12-12 15:14:31 +01:00
Julien Fontanet
2ed55b1616 chore(decorators): remove unused @autobind. 2016-12-08 11:47:17 +01:00
Julien Fontanet
0c8d456fd3 chore(package): use bind-property-descriptor instead of custom implementation 2016-12-08 11:46:29 +01:00
Julien Fontanet
9e4924caf6 5.4.1 2016-12-02 16:37:17 +01:00
Julien Fontanet
7f391a5860 Merge branch 'next-release' into stable 2016-12-02 16:37:13 +01:00
Julien Fontanet
5c7249c8fc fix(Xapi#exportDeltaVm): remove TAG_BASE_DELTA if full export
Fixes vatesfr/xo-web#1811
2016-12-02 16:09:27 +01:00
Pierre Donias
932d00133d feat(job-executor.match): __not pattern property (#459)
See vatesfr/xo-web#1503
2016-12-01 14:56:52 +01:00
Julien Fontanet
32a371bf13 chore(package): use golike-defer instead of custom implementation 2016-11-30 15:40:30 +01:00
Julien Fontanet
5d0622d2cf 5.4.0 2016-11-23 11:10:01 +01:00
Pierre Donias
9ab9155bf0 fix(vif.set): remove old VIF before creating new one (#457)
Fixes #1784
2016-11-23 10:38:24 +01:00
Julien Fontanet
86a1ed6d46 chore(package): remove unused nyc 2016-11-23 10:00:45 +01:00
Julien Fontanet
b3c9936d74 chore(package): update xen-api to v0.9.6 2016-11-23 09:58:04 +01:00
greenkeeper[bot]
21b4d7cf11 chore(package): update nyc to version 10.0.0 (#456)
https://greenkeeper.io/
2016-11-23 09:12:26 +01:00
greenkeeper[bot]
4ec07f9ff8 fix(package): update get-stream to version 3.0.0 (#458)
https://greenkeeper.io/
2016-11-23 09:11:39 +01:00
greenkeeper[bot]
b7c89d6f64 fix(package): update http-server-plus to version 0.8.0 (#454)
https://greenkeeper.io/
2016-11-18 14:44:50 +01:00
greenkeeper[bot]
0eb168ec70 fix(package): update uuid to version 3.0.0 (#453)
https://greenkeeper.io/
2016-11-18 09:10:07 +01:00
Olivier Lambert
8ac1a66e93 feat(sr.shared): new boolean property (#452) 2016-11-17 14:33:45 +01:00
badrAZ
301da3662a fix(plugin.test): data param is optional (#451) 2016-11-16 16:08:11 +01:00
greenkeeper[bot]
e474946cb7 fix(package): update xo-common to version 0.1.0 (#450)
https://greenkeeper.io/
2016-11-16 12:01:27 +01:00
Pierre Donias
9a0ca1ebb2 feat(api): map 10 XAPI errors to XO errors (#449)
Fixes vatesfr/xo-web#1481
2016-11-16 11:22:31 +01:00
Julien Fontanet
520f7b2a77 feat(job.create,job.set): ability to set userId (#448)
See vatesfr/xo-web#1733
2016-11-14 17:42:19 +01:00
Pierre Donias
c0b3b3aab8 Fix userId. 2016-11-14 16:59:10 +01:00
Pierre Donias
d499332ce3 It should be possible to not change a job's user. 2016-11-14 15:56:54 +01:00
Pierre Donias
19ce06e0bb feat(job#create,job#set): userId parameter
See vatesfr/xo-web#1733
2016-11-14 15:33:09 +01:00
greenkeeper[bot]
ea6ff4224e fix(package): update fs-promise to version 1.0.0 (#447)
https://greenkeeper.io/
2016-11-10 08:56:37 +01:00
Julien Fontanet
871d1f8632 fix(plugins registration): params order 2016-11-09 17:05:10 +01:00
badrAZ
77ce2ff6d1 feat(plugin.test): plugins can be tested (#446)
See vatesfr/xo-web#1749
2016-11-09 14:58:19 +01:00
Pierre Donias
6383104796 fix(Xapi#editPif): destroy VLAN from each PIF before creating new VLAN (#444) 2016-11-08 16:50:12 +01:00
Julien Fontanet
b99b4159c8 feat(Redis): support aliased commands
Fixes #443
2016-11-08 10:23:53 +01:00
Olivier Lambert
8bedb1f3b9 Merge pull request #442 from vatesfr/pierre-fix-xo-error
fix(api): xoError is not an object
2016-11-07 18:18:45 +01:00
Pierre Donias
dc85804a27 fix(api): xoError is not an object 2016-11-07 17:58:16 +01:00
greenkeeper[bot]
42a31e512a fix(package): update json-rpc-peer to version 0.13.0 (#441)
https://greenkeeper.io/
2016-11-07 14:57:53 +01:00
Pierre Donias
2be7388696 feat(api-errors): throw custom errors when XAPI error is caught (#440)
See vatesfr/xo-web#1717
2016-11-07 14:15:23 +01:00
Julien Fontanet
bc5b00781b 5.3.3 2016-11-04 11:44:09 +01:00
Olivier Lambert
313e2b3de6 fix(Sr): add type cifs in deviceConfig. Fixes vatesfr/xo-web#1615 (#439) 2016-11-04 11:42:03 +01:00
Julien Fontanet
0bbd002060 fix(xo.importConfig): dont unnecessarily delete existing users
Do not delete existing users with same name & id
2016-11-04 09:42:56 +01:00
Julien Fontanet
5e785266a5 fix(xo.importConfig): correctly import ACLs
Fixes vatesfr/xo-web#1722
2016-11-04 09:40:41 +01:00
Julien Fontanet
5870769e7d fix(vm.import{,Delta}Backup): make restored VMs identifiable
Their names is prefixed with the exported date and they have a specific tag (*restored from backup*).

Fixes vatesfr/xo-web#1719
2016-11-03 16:22:42 +01:00
Julien Fontanet
79b80dcd07 fix(pif#carrier): cast to boolean 2016-11-02 16:50:12 +01:00
Olivier Lambert
6f6e547e6c feat(pif): add carrier (#438)
Fixes vatesfr/xo-web#1702
2016-11-02 16:25:44 +01:00
greenkeeper[bot]
352c9357df chore(package): update dependencies (#437)
https://greenkeeper.io/
2016-11-01 19:05:11 +01:00
Pierre Donias
1ba4641641 feat(acls): handle xo.clean (#436) 2016-10-31 15:53:50 +01:00
Greenkeeper
60e0047285 chore(package): update helmet to version 3.0.0 (#435)
https://greenkeeper.io/
2016-10-29 12:52:18 +02:00
Pierre Donias
235e7c143c fix(signin): new Bootstrap classes (#434) 2016-10-28 10:11:41 +02:00
Julien Fontanet
522d6eed92 5.3.2 2016-10-27 18:49:32 +02:00
Julien Fontanet
9d1d6ea4c5 feat(xo): export/import config (#427)
See vatesfr/xo-web#786
2016-10-27 18:48:19 +02:00
Julien Fontanet
0afd506a41 5.3.1 2016-10-27 18:25:16 +02:00
Julien Fontanet
9dfb837e3f fix(Xapi#importDeltaVm): gracefully handle missing vif.$network$uuid (#433) 2016-10-27 16:46:45 +02:00
fufroma
4ab63b569f fix(RemoteHandlerNfs): move mount points in /run/xo-server/mounts
Fixes vatesfr/xo-web#1405
2016-10-27 15:56:33 +02:00
Julien Fontanet
8d390d256d fix(http-request): handle redirections (#432) 2016-10-27 15:34:54 +02:00
Julien Fontanet
4eec5e06fc fix(package): test on Node 6, not 7 (#431) 2016-10-27 12:24:40 +02:00
Julien Fontanet
e4063b1ba8 feat(sample.config.yaml): add warning about YAML 2016-10-24 22:52:11 +02:00
Greenkeeper
0c3227cf8e chore(package): update promise-toolbox to version 0.7.0 (#428)
https://greenkeeper.io/
2016-10-24 15:01:17 +02:00
Pierre Donias
7bed200bf5 feat(pif): editVlan (#426)
Fix vatesfr/xo-web#1092
2016-10-24 10:24:44 +02:00
Julien Fontanet
4f763e2109 5.3.0 2016-10-20 16:01:53 +02:00
Pierre Donias
75167fb65b feat(pif): expose IP config modes (#424)
See vatesfr/xo-web#1651
2016-10-20 12:44:35 +02:00
Julien Fontanet
675588f780 feat(delta backups): force checksums refresh
See vatesfr/xo-web#1672
2016-10-20 12:38:26 +02:00
Julien Fontanet
2d6f94edd8 fix(vhd-merge/chainVhd): correctly await _write()
Fixes vatesfr/xo-web#1672
2016-10-20 12:31:20 +02:00
Julien Fontanet
247c66ef4b feat(IP pools): can be used in resource sets (#413)
See vatesfr/xo-web#1565
2016-10-19 11:17:05 +02:00
Greenkeeper
1076fac40f Update gulp-sourcemaps to version 2.1.1 🚀 (#422)
https://greenkeeper.io/
2016-10-14 10:44:27 +02:00
Julien Fontanet
14a4a415a2 5.2.6 2016-10-13 18:51:16 +02:00
Julien Fontanet
524355b59c fix(vhd-merge/chainVhd): correctly compute header checksum (#419)
Fixes vatesfr/xo-web#1656
2016-10-13 18:49:58 +02:00
Greenkeeper
36fe49f3f5 Update promise-toolbox to version 0.6.0 🚀 (#416)
https://greenkeeper.io/
2016-10-12 09:19:19 +02:00
Greenkeeper
c0c0af9b14 chore(package): update execa to version 0.5.0 (#411)
https://greenkeeper.io/
2016-10-05 10:40:31 +02:00
Julien Fontanet
d1e472d482 chore(package): use babel-plugin-lodash 2016-10-04 16:05:01 +02:00
Julien Fontanet
c80e43ad0d fix(vm.create): don't require view perm on VM template 2016-10-04 16:03:06 +02:00
Julien Fontanet
fdd395e2b6 fix(vm.create): correctly check resourceSet objects
Related to vatesfr/xo-web#1620
2016-10-04 15:51:04 +02:00
Julien Fontanet
e094437168 fix(package): update xo-acl-resolver to version 0.2.2
See vatesfr/xo-web#1620
2016-10-04 15:24:01 +02:00
Pierre Donias
2ee0be7466 fix(xapi/utils/makeEditObject): constraints works with user props (#410) 2016-10-04 15:02:27 +02:00
Julien Fontanet
2784a7cc92 Create ISSUE_TEMPLATE.md 2016-10-03 16:24:24 +02:00
Julien Fontanet
b09f998d6c 5.2.5 2016-10-03 09:39:52 +02:00
Nicolas Raynaud
bdeb5895f6 fix(deltaBackups): update checksum after altering VHD files (#408)
Fixes vatesfr/xo-web#1606
2016-09-30 14:31:33 +02:00
Pierre Donias
3944b8aaee feat(network): create a bonded network (#407)
Fixes vatesfr/xo-web#876
2016-09-30 13:51:33 +02:00
Nicolas Raynaud
6e66cffb92 feat(deltaBackups): correctly chain VHDs (#406)
The goal is for a tool like vhdimount to be able to mount any file and use it as a disk to recover specific file in it.
2016-09-29 17:31:36 +02:00
Pierre Donias
57092ee788 feat(vif.set): support for network, MAC and currently_attached (#403)
Fixes vatesfr/xo-web#1446
2016-09-28 15:09:17 +02:00
Julien Fontanet
70e9e1c706 chore(package): update human-format to version 0.7.0 2016-09-28 09:58:54 +02:00
Greenkeeper
9662b8fbee chore(package): update babel-eslint to version 7.0.0 (#404)
https://greenkeeper.io/
2016-09-27 23:39:30 +02:00
Julien Fontanet
9f66421ae7 fix(bootstrap): C-c twice force stop the server 2016-09-27 10:44:24 +02:00
Greenkeeper
50584c2e50 chore(package): update http-server-plus to version 0.7.0 (#402)
https://greenkeeper.io/
2016-09-27 09:30:16 +02:00
Julien Fontanet
7be4e1901a chore(package): use index-modules 2016-09-26 15:41:41 +02:00
Julien Fontanet
b47146de45 fix(pbd/attached): should be a boolean 2016-09-22 13:20:49 +02:00
Julien Fontanet
97b229b2c7 fix(vm.set): works with VM templates
Fixes vatesfr/xo-web#1569
2016-09-22 10:39:20 +02:00
Julien Fontanet
6bb5bb9403 5.2.4 2016-09-21 10:20:46 +02:00
Julien Fontanet
8c4b8271d8 fix(pool.setDefaultSr): remove pool param
Fixes vatesfr/xo-web#1558
2016-09-20 11:45:36 +02:00
Julien Fontanet
69291c0574 chore(package): update xo-vmdk-to-vhd to version 0.0.12
Fixes vatesfr/xo-web#1551
2016-09-20 10:41:42 +02:00
Julien Fontanet
2dc073dcd6 fix(vm.resourceSet): handle xo namespace 2016-09-19 13:15:23 +02:00
Julien Fontanet
1894cb35d2 feat(vm): expose resourceSet prop 2016-09-19 12:10:09 +02:00
Julien Fontanet
cd37420b07 Merge pull request #398 from vatesfr/greenkeeper-standard-8.1.0
Update standard to version 8.1.0 🚀
2016-09-18 05:17:41 +02:00
Julien Fontanet
55cb6b39db fix(Xo#removeSchedule): correctly test instance of SchedulerError 2016-09-18 05:12:36 +02:00
greenkeeperio-bot
89d13b2285 chore(package): update standard to version 8.1.0
https://greenkeeper.io/
2016-09-17 20:51:59 +02:00
Julien Fontanet
1b64b0468a fix(group.delete): remove associated ACLs
Fixes vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
085fb83294 fix(user.delete): remove associated ACLs
See vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
edd606563f feat(vm.revert): can snapshot before (#395)
See vatesfr/xo-web#1445
2016-09-15 14:59:43 +02:00
Julien Fontanet
fb804e99f0 5.2.3 2016-09-14 18:02:32 +02:00
Pierre Donias
1707cbcb54 feat(signin): use XO 5 style (#394)
Fixes vatesfr/xo-web#1161
2016-09-14 17:56:05 +02:00
Julien Fontanet
6d6a630c31 5.2.2 2016-09-14 17:37:42 +02:00
Julien Fontanet
ff2990e8e5 chore(package): update @marsaud/smb2-promise to version 0.2.1
Fixes vatesfr/xo-web#1511
2016-09-14 17:32:52 +02:00
Nicolas Raynaud
d679aff0fb chore(package): remove node-smb2 dependency (#393) 2016-09-14 16:23:28 +02:00
Julien Fontanet
603a444905 fix(Xapi#importVm): remove VM's VDIs on failure 2016-09-14 14:11:20 +02:00
Julien Fontanet
a002958448 fix(DR): remove previous VDIs
Fixes vatesfr/xo-web#1510
2016-09-14 14:11:20 +02:00
Julien Fontanet
cb4bc37424 fix(DR): delete VMs in all cases
Previous copies were not deleted when there were as many as the depth.

Fixes vatesfr/xo-web#1509
2016-09-14 14:11:19 +02:00
Julien Fontanet
0fc6f917e6 5.2.1 2016-09-13 16:44:35 +02:00
Julien Fontanet
ec0d012b24 feat(vm.set): support tags (#392)
Fixes vatesfr/xo-web#1431
2016-09-13 16:35:40 +02:00
Julien Fontanet
2cd4b171a1 chore(package): update json5 to version 0.5.0 2016-09-13 11:28:56 +02:00
Julien Fontanet
0cb6906c4d chore(package): is-my-json-valid to v2.13.1 2016-09-13 11:25:22 +02:00
Julien Fontanet
4c19b93c30 chore(package): update fs-promise to version 0.5.0 2016-09-13 11:23:42 +02:00
Julien Fontanet
6165f1b405 fix(vm.create): select SR of first disk-VDI (#391)
Fixes vatesfr/xo-web#1493
2016-09-12 16:32:43 +02:00
Julien Fontanet
37a4221e43 fix(vm.docker.containers): yes, again 2016-09-12 12:13:45 +02:00
Julien Fontanet
9831b222b5 fix(vm.docker.containers) 2016-09-12 12:11:15 +02:00
Julien Fontanet
7b6f44fb74 fix(vm.createInterface): syntax fix 2016-09-12 12:06:34 +02:00
Julien Fontanet
399f4d0ea3 feat(vm.docker.containers): like vm.docker.process.items but always an array 2016-09-12 11:43:36 +02:00
Julien Fontanet
26a668a875 fix(vm.createInterface): accept integers for position and mtu 2016-09-12 11:36:30 +02:00
Julien Fontanet
bf96262b6e feat(Xapi#createVif): default MTU is network's MTU 2016-09-12 11:05:31 +02:00
Julien Fontanet
1155fa1fe9 chore(vm.create): remove some console.log()s 2016-09-09 15:31:25 +02:00
Julien Fontanet
1875d31731 5.2.0 2016-09-09 15:16:03 +02:00
Julien Fontanet
6f855fd14e feat(IP pools): groups of IP addresses (#371) 2016-09-09 15:12:30 +02:00
Julien Fontanet
08e392bb46 fix(vm.create): correctly compute limits usage (#389)
Fixes vatesfr/xo-web#1365
2016-09-09 12:55:10 +02:00
Julien Fontanet
66d63e0546 fix(test.wait): fix setTimeout params order 2016-09-08 18:40:55 +02:00
Julien Fontanet
7ee56fe8bc feat(pool.installAllPatches): install all patches on a pool (#388)
See vatesfr/xo-web#1392
2016-09-07 17:54:00 +02:00
Julien Fontanet
669d04ee48 fix(vm.migrate): error on unused default SR
Fixes #1466
2016-09-05 14:21:17 +02:00
Julien Fontanet
cb1b37326e fix(vm.rollingDrCopy): avoid duplicates in VMs list (#387)
Fixes vatesfr/xo-web#1464
2016-09-05 13:41:20 +02:00
Julien Fontanet
7bb73bee67 feat(vm.rollingDrCopy): failure to destroy old copies is not fatal 2016-09-05 11:29:54 +02:00
Julien Fontanet
7286ddc338 chore(JobExecutor): use utils/serializeError() 2016-09-05 11:29:53 +02:00
Olivier Lambert
7d1f9e33fe feat(network): add defaultIsLocked to API (#385) 2016-09-01 14:49:20 +02:00
Ronan Abhamon
63c676ebfe feat(vm.import): supports OVA (#375)
See vatesfr/xo-web#709
2016-09-01 14:11:15 +02:00
Greenkeeper
fcaf6b7923 chore(package): update json-rpc-peer to version 0.12.0 (#383)
https://greenkeeper.io/
2016-08-25 11:56:54 -04:00
Julien Fontanet
9f347a170a fix(xapi/utils): correctly isPlainObject 2016-08-18 16:21:34 +02:00
Julien Fontanet
2f7cd4426d fix(xapi/utils/prepareXapiParam): array handling 2016-08-18 16:15:51 +02:00
Julien Fontanet
854f256470 fix(xapi/getNamespaceForType): add missing VIF 2016-08-18 15:27:47 +02:00
Julien Fontanet
5d0b40f752 fix(utils/camelToSnakeCase): better number handling 2016-08-18 15:23:57 +02:00
Julien Fontanet
27a2853ee8 fix(vif.set): add missing param 2016-08-18 15:13:46 +02:00
Julien Fontanet
67f6b80312 fix(vif.set): do not use an arrow function 2016-08-18 15:01:13 +02:00
Julien Fontanet
016037adc1 fix(user.set): can be used by non admins 2016-08-18 14:17:07 +02:00
Julien Fontanet
70d5c1034d 5.1.6 2016-08-18 10:54:36 +02:00
Greenkeeper
ed6fb8754f chore(package): update mocha to version 3.0.2 (#376)
https://greenkeeper.io/
2016-08-18 10:53:05 +02:00
Julien Fontanet
6d08a9b11c feat(JobExecutor): a current job will only run 2 calls at a time (#382)
Fixes vatesfr/xo-web#915
2016-08-18 10:52:29 +02:00
Julien Fontanet
cf6aa7cf79 fix(package): update xen-api to 0.9.4
Again, fixes vatesfr/xo-web#1384
2016-08-18 09:42:28 +02:00
Julien Fontanet
6c4e57aae0 chore(JobExecutor#_execCall): forEach+Array#push → mapToArray 2016-08-17 18:13:30 +02:00
Julien Fontanet
d08a04959c 5.1.5 2016-08-16 19:15:52 +02:00
Julien Fontanet
2762f74ce5 fix(package): update xen-api to 0.9.3 2016-08-16 19:12:46 +02:00
Julien Fontanet
6ebcf6eec5 5.1.4 2016-08-16 18:18:04 +02:00
Julien Fontanet
25b78fb7e1 fix(package): update xen-api to 0.9.2
Fixes vatesfr/xo-web#1384
2016-08-16 18:15:32 +02:00
Greenkeeper
670dd2dd96 chore(package): update promise-toolbox to version 0.5.0 (#381)
https://greenkeeper.io/
2016-08-16 12:22:57 +02:00
Julien Fontanet
1baf04f786 fix(NfsHandler#_unmount): use _getRealPath() (#380)
Fixes vatesfr/xo-web#1396.
2016-08-15 14:22:19 +02:00
Greenkeeper
ce05b7a041 chore(package): update nyc to version 8.1.0 (#379)
https://greenkeeper.io/
2016-08-14 19:06:00 +02:00
Olivier Lambert
290cc146c8 fix(xapi): allow to unplug VBDs when VM is running 2016-08-11 16:32:06 +02:00
Olivier Lambert
db4d46a584 fix(sr): don't share a local ISO SR. Fixes vatesfr/xo-web#1389 2016-08-10 14:39:05 +02:00
Olivier Lambert
8ed2e51dde feat(network): add network.set method 2016-08-08 14:54:23 +02:00
Olivier Lambert
33702c09a6 feat(vm copy): allow snapshot copy. Related to vatesfr/xo-web#1353 2016-08-08 14:07:27 +02:00
Olivier Lambert
45aeca3753 5.1.3 2016-08-05 11:08:11 +02:00
Olivier Lambert
deae7dfb4d fix(xen-api): avoid reserved key conflicts. Fixes vatesfr/xo-web#1369 2016-08-05 11:06:58 +02:00
Julien Fontanet
2af043ebdd chore(jshint): remove unused config file 2016-08-03 09:46:52 +02:00
Olivier Lambert
e121295735 Merge pull request #373 from nraynaud/next-release
fix (readme): fix installation documentation link
2016-08-02 12:32:05 +02:00
Nicolas Raynaud
7c1c405a64 fix installation documentation link 2016-08-02 12:22:39 +02:00
Olivier Lambert
5d7c95a34d fix(xapi): typo on host disable method. Fixes vatesfr/xo-web#1351 2016-07-30 20:22:12 +02:00
Julien Fontanet
504c934fc9 fix(JobExecutor#_execCall): xo.api.call() → xo.callApiMethod() 2016-07-29 15:28:24 +02:00
Julien Fontanet
81b0223f73 fix(JobExecutor#exec): forward the error 2016-07-29 15:27:58 +02:00
Julien Fontanet
6d1e410bfd fix(JobExecutor#exec): correctly log the error 2016-07-29 15:27:32 +02:00
Julien Fontanet
26c5c6152d fix(job-executor/map): paramName handling 2016-07-29 14:37:42 +02:00
Julien Fontanet
d83bf0ebaf fix(Xo#_watchObject): check for notify() 2016-07-29 14:29:57 +02:00
Julien Fontanet
5adfe9a552 chore(index): remove debug trace 2016-07-29 13:54:54 +02:00
ABHAMON Ronan
883f461dc7 feat(job-executor): supports dynamic param vectors (#369)
See vatesfr/xo-web#837
2016-07-29 13:26:53 +02:00
Julien Fontanet
8595ebc258 feat(api): generate logs on errors
See vatesfr/xo-web#1344
2016-07-29 10:32:48 +02:00
Julien Fontanet
2bd31f4560 chore(api): remove legacy helpers 2016-07-28 15:21:59 +02:00
Julien Fontanet
6df85ecadd fix(vm.*): add missing import 2016-07-28 15:21:59 +02:00
Julien Fontanet
07829918e4 5.1.2 2016-07-28 15:21:12 +02:00
Julien Fontanet
b0d400b6eb fix(Xapi#exportDeltaVm): better handling of removed VDIs (#370)
Fixes vatesfr/xo-web#1333
2016-07-28 15:19:44 +02:00
Julien Fontanet
706cb895ad 5.1.1 2016-07-27 16:36:51 +02:00
Julien Fontanet
45bf539b3c fix(user.delete): fix tokens deletion 2016-07-27 13:23:16 +02:00
Julien Fontanet
0923981f8d fix(user.set): typo in error message 2016-07-27 13:01:32 +02:00
94 changed files with 11961 additions and 2387 deletions

View File

@@ -1,93 +0,0 @@
{
// Julien Fontanet JSHint configuration
// https://gist.github.com/julien-f/8095615
//
// Changes from defaults:
// - all enforcing options (except `++` & `--`) enabled
// - single quotes
// - indentation set to 2 instead of 4
// - almost all relaxing options disabled
// - environments are set to Node.js
//
// See http://jshint.com/docs/ for more details
"maxerr" : 50, // {int} Maximum error before stopping
// Enforcing
"bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
"camelcase" : true, // true: Identifiers must be in camelCase
"curly" : true, // true: Require {} for every new block or scope
"eqeqeq" : true, // true: Require triple equals (===) for comparison
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
"freeze" : true, // true: Prohibit overwriting prototypes of native objects (Array, Date, ...)
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
"indent" : 2, // {int} Number of spaces to use for indentation
"latedef" : true, // true: Require variables/functions to be defined before being used
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
"noempty" : true, // true: Prohibit use of empty blocks
"nonbsp" : true, // true: Prohibit use of non breakable spaces
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
"plusplus" : false, // true: Prohibit use of `++` & `--`
"quotmark" : "single", // Quotation mark consistency:
// false : do nothing (default)
// true : ensure whatever is used is consistent
// "single" : require single quotes
// "double" : require double quotes
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
"unused" : true, // true: Require all defined variables be used
"strict" : false, // true: Requires all functions run in ES5 Strict Mode
"maxcomplexity" : 7, // {int} Max cyclomatic complexity per function
"maxdepth" : 3, // {int} Max depth of nested blocks (within functions)
"maxlen" : 80, // {int} Max number of characters per line
"maxparams" : 4, // {int} Max number of formal params allowed per function
"maxstatements" : 20, // {int} Max number statements per function
// Relaxing
"asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
"boss" : false, // true: Tolerate assignments where comparisons would be expected
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
"eqnull" : false, // true: Tolerate use of `== null`
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
"evil" : false, // true: Tolerate use of `eval` and `new Function()`
"expr" : false, // true: Tolerate `ExpressionStatement` as Programs
"funcscope" : false, // true: Tolerate defining variables inside control statements
"globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
"iterator" : false, // true: Tolerate using the `__iterator__` property
"lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
"laxbreak" : false, // true: Tolerate possibly unsafe line breakings
"laxcomma" : false, // true: Tolerate comma-first style coding
"loopfunc" : false, // true: Tolerate functions being defined in loops
"moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
// (ex: `for each`, multiple try/catch, function expression…)
"multistr" : false, // true: Tolerate multi-line strings
"notypeof" : false, // true: Tolerate typeof comparison with unknown values.
"proto" : false, // true: Tolerate using the `__proto__` property
"scripturl" : false, // true: Tolerate script-targeted URLs
"shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
"sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
"supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
"validthis" : false, // true: Tolerate using this in a non-constructor function
"noyield" : false, // true: Tolerate generators without yields
// Environments
"browser" : false, // Web Browser (window, document, etc)
"browserify" : false, // Browserify (node.js code in the browser)
"couch" : false, // CouchDB
"devel" : false, // Development/debugging (alert, confirm, etc)
"dojo" : false, // Dojo Toolkit
"jquery" : false, // jQuery
"mocha" : false, // mocha
"mootools" : false, // MooTools
"node" : true, // Node.js
"nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
"phantom" : false, // PhantomJS
"prototypejs" : false, // Prototype and Scriptaculous
"rhino" : false, // Rhino
"worker" : false, // Web Workers
"wsh" : false, // Windows Scripting Host
"yui" : false, // Yahoo User Interface
// Custom Globals
"globals" : {} // additional predefined global variables
}

3
.mention-bot Normal file
View File

@@ -0,0 +1,3 @@
{
"userBlacklist": [ "greenkeeper", "Wescoeur" ]
}

View File

@@ -1 +0,0 @@
--require ./better-stacks.js

View File

@@ -1,8 +1,8 @@
language: node_js
node_js:
# - 'stable'
- '4'
- '0.12'
- stable
- 6
- 4
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/

3
ISSUE_TEMPLATE.md Normal file
View File

@@ -0,0 +1,3 @@
# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER!
https://github.com/vatesfr/xo-web/issues

View File

@@ -19,7 +19,7 @@ ___
## Installation
Manual install procedure is [available here](https://github.com/vatesfr/xo/blob/master/doc/installation/README.md#installation).
Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html).
## Compilation

View File

@@ -1,13 +1,5 @@
Error.stackTraceLimit = 100
// Async stacks.
//
// Disabled for now as it cause a huge memory usage with
// fs.createReadStream().
// TODO: find a way to reenable.
//
// try { require('trace') } catch (_) {}
// Removes internal modules.
try {
var sep = require('path').sep

View File

@@ -58,7 +58,7 @@ gulp.task(function buildCoffee () {
})
gulp.task(function buildEs6 () {
return src('**/*.js')
return src([ '**/*.js', '!*.spec.js' ])
.pipe(sourceMaps.init())
.pipe(babel())
.pipe(sourceMaps.write('.'))

View File

@@ -4,7 +4,7 @@
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xen-api,xo:*'
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// Import the real main module.

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "5.1.0",
"version": "5.7.4",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -31,131 +31,146 @@
"bin": "bin"
},
"engines": {
"node": ">=0.12"
"node": ">=4.5"
},
"dependencies": {
"@marsaud/smb2": "^0.7.1",
"@marsaud/smb2-promise": "^0.2.0",
"app-conf": "^0.4.0",
"babel-runtime": "^6.5.0",
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"app-conf": "^0.4.1",
"archiver": "^1.3.0",
"arp-a": "^0.5.1",
"babel-runtime": "^6.23.0",
"base64url": "^2.0.0",
"blocked": "^1.1.0",
"bluebird": "^3.1.1",
"body-parser": "^1.13.3",
"bind-property-descriptor": "^0.0.0",
"blocked": "^1.2.1",
"bluebird": "^3.5.0",
"body-parser": "^1.17.1",
"connect-flash": "^0.1.1",
"cookie": "^0.3.0",
"cookie-parser": "^1.3.5",
"cron": "^1.0.9",
"d3-time-format": "^2.0.0",
"debug": "^2.1.3",
"escape-string-regexp": "^1.0.3",
"event-to-promise": "^0.7.0",
"cookie": "^0.3.1",
"cookie-parser": "^1.4.3",
"cron": "^1.2.1",
"d3-time-format": "^2.0.5",
"debug": "^2.6.3",
"decorator-synchronized": "^0.2.3",
"escape-string-regexp": "^1.0.5",
"event-to-promise": "^0.8.0",
"exec-promise": "^0.6.1",
"execa": "^0.4.0",
"express": "^4.13.3",
"express-session": "^1.11.3",
"fatfs": "^0.10.3",
"fs-extra": "^0.30.0",
"fs-promise": "^0.4.1",
"get-stream": "^2.1.0",
"hashy": "~0.4.2",
"helmet": "^2.0.0",
"highland": "^2.5.1",
"http-proxy": "^1.13.2",
"http-server-plus": "^0.6.4",
"human-format": "^0.6.0",
"is-my-json-valid": "^2.12.2",
"js-yaml": "^3.2.7",
"json-rpc-peer": "^0.11.0",
"json5": "^0.4.0",
"execa": "^0.6.3",
"express": "^4.15.2",
"express-session": "^1.15.1",
"fatfs": "^0.10.4",
"from2": "^2.3.0",
"fs-extra": "^2.1.2",
"fs-promise": "^2.0.1",
"golike-defer": "^0.0.0",
"hashy": "~0.6.1",
"helmet": "^3.5.0",
"highland": "^2.10.5",
"http-proxy": "^1.16.2",
"http-server-plus": "^0.8.0",
"human-format": "^0.8.0",
"is-my-json-valid": "^2.16.0",
"is-redirect": "^1.0.0",
"js-yaml": "^3.8.2",
"json-rpc-peer": "^0.13.1",
"json5": "^0.5.1",
"julien-f-source-map-support": "0.0.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
"level": "^1.3.0",
"level": "^1.6.0",
"level-party": "^3.0.4",
"level-sublevel": "^6.5.2",
"leveldown": "^1.4.2",
"lodash": "^4.13.1",
"level-sublevel": "^6.6.1",
"leveldown": "^1.6.0",
"lodash": "^4.17.4",
"make-error": "^1",
"micromatch": "^2.3.2",
"micromatch": "^2.3.11",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.4",
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"moment-timezone": "^0.5.11",
"ms": "^1.0.0",
"multikey-hash": "^1.0.4",
"ndjson": "^1.5.0",
"parse-pairs": "^0.2.2",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport": "^0.3.2",
"passport-local": "^1.0.0",
"promise-toolbox": "^0.3.2",
"pretty-format": "^19.0.0",
"promise-toolbox": "^0.8.2",
"proxy-agent": "^2.0.0",
"pug": "^2.0.0-alpha6",
"redis": "^2.0.1",
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"stack-chain": "^1.3.3",
"struct-fu": "^1.0.0",
"through2": "^2.0.0",
"trace": "^2.0.1",
"ws": "^1.1.1",
"xen-api": "^0.9.0",
"xml2js": "~0.4.6",
"xo-acl-resolver": "^0.2.1",
"xo-collection": "^0.4.0",
"xo-remote-parser": "^0.3"
"pug": "^2.0.0-beta11",
"redis": "^2.7.1",
"schema-inspector": "^1.6.8",
"semver": "^5.3.0",
"serve-static": "^1.12.1",
"split-lines": "^1.1.0",
"stack-chain": "^1.3.7",
"tar-stream": "^1.5.2",
"through2": "^2.0.3",
"tmp": "^0.0.31",
"uuid": "^3.0.1",
"ws": "^2.2.2",
"xen-api": "^0.10.0-2",
"xml2js": "~0.4.17",
"xo-acl-resolver": "^0.2.3",
"xo-collection": "^0.4.1",
"xo-common": "^0.1.1",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"babel-eslint": "^6.0.4",
"babel-eslint": "^7.2.1",
"babel-plugin-lodash": "^3.2.11",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.5.2",
"babel-preset-es2015": "^6.5.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.0.0",
"dependency-check": "^2.4.0",
"ghooks": "^1.0.3",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.2.2",
"babel-preset-stage-0": "^6.22.0",
"dependency-check": "^2.8.0",
"gulp": "git://github.com/gulpjs/gulp#4.0",
"gulp-babel": "^6",
"gulp-coffee": "^2.3.1",
"gulp-plumber": "^1.0.0",
"gulp-sourcemaps": "^1.5.1",
"gulp-watch": "^4.2.2",
"leche": "^2.1.1",
"mocha": "^2.2.1",
"must": "^0.13.1",
"nyc": "^7.0.0",
"rimraf": "^2.5.2",
"sinon": "^1.14.1",
"standard": "^7.0.0"
"gulp-coffee": "^2.3.4",
"gulp-plumber": "^1.1.0",
"gulp-sourcemaps": "^2.4.1",
"gulp-watch": "^4.3.11",
"husky": "^0.13.2",
"index-modules": "^0.3.0",
"jest": "^19.0.2",
"rimraf": "^2.6.1",
"standard": "^9.0.2"
},
"scripts": {
"build": "npm run build-indexes && gulp build --production",
"depcheck": "dependency-check ./package.json",
"build-indexes": "./tools/generate-index src/api src/xapi/mixins src/xo-mixins",
"dev": "npm run build-indexes && gulp build",
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
"lint": "standard",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
"posttest": "npm run lint && npm run depcheck",
"build": "gulp build --production",
"commitmsg": "npm test",
"dev": "gulp build",
"dev-test": "jest --bail --watch",
"posttest": "standard && dependency-check -i constant-stream ./package.json",
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins",
"predev": "npm run prebuild",
"prepublish": "npm run build",
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
"release": "npm version",
"start": "node bin/xo-server",
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
"test": "jest"
},
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-runtime"
],
"presets": [
"stage-0",
"es2015"
[
"env",
{
"targets": {
"node": 4
}
}
],
"stage-0"
]
},
"config": {
"ghooks": {
"commit-msg": "npm test"
}
"jest": {
"roots": [
"<rootDir>/src"
],
"testRegex": "\\.spec\\.js$"
},
"standard": {
"ignore": [

View File

@@ -1,11 +1,17 @@
# Example XO-Server configuration.
# BE *VERY* CAREFUL WHEN EDITING!
# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT!
# visit http://www.yamllint.com/ to validate this file as needed
#=====================================================================
# Example XO-Server configuration.
#
# This file is automatically looking for at the following places:
# - `$HOME/.config/xo-server/config.yaml`
# - `/etc/xo-server/config.yaml`
#
# The first entries have priority.
#
# Note: paths are relative to the configuration file.
#=====================================================================
@@ -43,16 +49,17 @@ http:
# Hosts & ports on which to listen.
#
# By default, the server listens on 0.0.0.0:80.
# By default, the server listens on [::]:80.
listen:
# Basic HTTP.
-
# Address on which the server is listening on.
#
# Sets it to '127.0.0.1' to listen only on the local host.
# Sets it to 'localhost' for IP to listen only on the local host.
#
# Default: '0.0.0.0' (all addresses)
#hostname: '127.0.0.1'
# Default: all IPv6 addresses if available, otherwise all IPv4
# addresses.
#hostname: 'localhost'
# Port on which the server is listening on.
#
@@ -117,10 +124,23 @@ http:
# Connection to the Redis server.
redis:
# Syntax: redis://[db[:password]@]hostname[:port]
# Unix sockets can be used
#
# Default: redis://localhost:6379
#uri: ''
# Default: undefined
#socket: /var/run/redis/redis.sock
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
#
# Default: redis://localhost:6379/0
#uri: redis://redis.company.lan/42
# List of aliased commands.
#
# See http://redis.io/topics/security#disabling-of-specific-commands
#renameCommands:
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
# Directory containing the database of XO.
# Currently used for logs.

View File

@@ -6,55 +6,45 @@ html
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'styles/main.css')
link(rel = 'stylesheet' href = 'v4/styles/main.css')
body
.container
.row-login
.page-header
img(src = 'images/logo_small.png')
h2 Xen Orchestra
form.form-horizontal(action = 'signin/local' method = 'post')
fieldset
legend.login
h3 Sign in
if error
p.text-danger #{error}
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control.input-sm(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control.input-sm(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.form-group
.col-sm-5
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| Remember me
.form-group
.col-sm-12
button.btn.btn-login.btn-block.btn-success
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}
link(rel = 'stylesheet' href = 'index.css')
body(style = 'display: flex; height: 100vh;')
div(style = 'margin: auto; width: 20em;')
div.mb-2(style = 'display: flex;')
img(src = 'assets/logo.png' style = 'margin: auto;')
h2.text-xs-center.mb-2 Xen Orchestra
form(action = 'signin/local' method = 'post')
fieldset
if error
p.text-danger #{error}
.input-group.mb-1
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.input-group.mb-1
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| &nbsp;
| Remember me
div
button.btn.btn-block.btn-info
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}

View File

@@ -1,70 +0,0 @@
import {JsonRpcError} from 'json-rpc-peer'
// ===================================================================
// Export standard JSON-RPC errors.
export { // eslint-disable-line no-duplicate-imports
InvalidJson,
InvalidParameters,
InvalidRequest,
JsonRpcError,
MethodNotFound
} from 'json-rpc-peer'
// -------------------------------------------------------------------
export class NotImplemented extends JsonRpcError {
constructor () {
super('not implemented', 0)
}
}
// -------------------------------------------------------------------
export class NoSuchObject extends JsonRpcError {
constructor (id, type) {
super('no such object', 1, {id, type})
}
}
// -------------------------------------------------------------------
export class Unauthorized extends JsonRpcError {
constructor () {
super('not authenticated or not enough permissions', 2)
}
}
// -------------------------------------------------------------------
export class InvalidCredential extends JsonRpcError {
constructor () {
super('invalid credential', 3)
}
}
// -------------------------------------------------------------------
export class AlreadyAuthenticated extends JsonRpcError {
constructor () {
super('already authenticated', 4)
}
}
// -------------------------------------------------------------------
export class ForbiddenOperation extends JsonRpcError {
constructor (operation, reason) {
super(`forbidden operation: ${operation}`, 5, reason)
}
}
// -------------------------------------------------------------------
// To be used with a user-readable message.
// The message can be destined to be displayed to the front-end user.
export class GenericError extends JsonRpcError {
constructor (message) {
super(message, 6)
}
}

0
src/api/.index-modules Normal file
View File

98
src/api/backup.js Normal file
View File

@@ -0,0 +1,98 @@
import archiver from 'archiver'
import { basename } from 'path'
import { format } from 'json-rpc-peer'
import { forEach } from 'lodash'
// ===================================================================
export function list ({ remote }) {
return this.listVmBackups(remote)
}
list.permission = 'admin'
list.params = {
remote: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanDisk ({ remote, disk }) {
return this.scanDiskBackup(remote, disk)
}
scanDisk.permission = 'admin'
scanDisk.params = {
remote: { type: 'string' },
disk: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanFiles ({ remote, disk, partition, path }) {
return this.scanFilesInDiskBackup(remote, disk, partition, path)
}
scanFiles.permission = 'admin'
scanFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
path: { type: 'string' }
}
// -------------------------------------------------------------------
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
res.setHeader('content-disposition', 'attachment')
res.setHeader('content-type', 'application/octet-stream')
const nFiles = paths.length
// Send lone file directly
if (nFiles === 1) {
files[0].pipe(res)
return
}
const archive = archiver(archiveFormat)
archive.on('error', error => {
console.error(error)
res.end(format.error(0, error))
})
forEach(files, file => {
archive.append(file, { name: basename(file.path) })
})
archive.finalize()
archive.pipe(res)
}).catch(error => {
console.error(error)
res.writeHead(500)
res.end(format.error(0, error))
})
}
export async function fetchFiles ({ format = 'zip', ...params }) {
const fileName = params.paths.length > 1
? `restore_${new Date().toJSON()}.${format}`
: basename(params.paths[0])
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
suffix: encodeURI(`/${fileName}`)
}).then(url => ({ $getFrom: url }))
}
fetchFiles.permission = 'admin'
fetchFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
format: { type: 'string', optional: true },
partition: { type: 'string', optional: true },
paths: {
type: 'array',
items: { type: 'string' },
minLength: 1
}
}

View File

@@ -5,10 +5,11 @@ $forEach = require 'lodash/forEach'
endsWith = require 'lodash/endsWith'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
extractProperty,
parseXml,
promisify
mapToArray,
parseXml
} = require '../utils'
#=====================================================================
@@ -261,6 +262,42 @@ stats.resolve = {
exports.stats = stats;
#---------------------------------------------------------------------
handleInstallSupplementalPack = $coroutine (req, res, { hostId }) ->
xapi = @getXapi(hostId)
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
req.length = req.headers['content-length']
try
yield xapi.installSupplementalPack(req, { hostId })
res.end(format.response(0))
catch e
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
return
installSupplementalPack = $coroutine ({host}) ->
return {
$sendTo: yield @registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })
}
installSupplementalPack.description = 'installs supplemental pack from ISO file'
installSupplementalPack.params = {
host: { type: 'string' }
}
installSupplementalPack.resolve = {
host: ['host', 'host', 'admin']
}
exports.installSupplementalPack = installSupplementalPack;
#=====================================================================
Object.defineProperty(exports, '__esModule', {

44
src/api/ip-pool.js Normal file
View File

@@ -0,0 +1,44 @@
import { unauthorized } from 'xo-common/api-errors'
export function create (props) {
return this.createIpPool(props)
}
create.permission = 'admin'
create.description = 'Creates a new ipPool'
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteIpPool(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.description = 'Delete an ipPool'
// -------------------------------------------------------------------
export function getAll (params) {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getAllIpPools(user.permission === 'admin'
? params && params.userId
: user.id
)
}
getAll.description = 'List all ipPools'
// -------------------------------------------------------------------
export function set ({ id, ...props }) {
return this.updateIpPool(id, props)
}
set.permission = 'admin'
set.description = 'Allow to modify an existing ipPool'

View File

@@ -18,7 +18,11 @@ get.params = {
}
export async function create ({job}) {
return (await this.createJob(this.session.get('user_id'), job)).id
if (!job.userId) {
job.userId = this.session.get('user_id')
}
return (await this.createJob(job)).id
}
create.permission = 'admin'
@@ -27,7 +31,9 @@ create.params = {
job: {
type: 'object',
properties: {
userId: {type: 'string', optional: true},
name: {type: 'string', optional: true},
timeout: {type: 'number', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
@@ -38,14 +44,7 @@ create.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},
@@ -67,9 +66,10 @@ set.params = {
properties: {
id: {type: 'string'},
name: {type: 'string', optional: true},
type: {type: 'string'},
key: {type: 'string'},
method: {type: 'string'},
timeout: {type: ['number', 'null'], optional: true},
type: {type: 'string', optional: true},
key: {type: 'string', optional: true},
method: {type: 'string', optional: true},
paramsVector: {
type: 'object',
properties: {
@@ -77,14 +77,7 @@ set.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},

View File

@@ -1,3 +1,9 @@
import { mapToArray } from '../utils'
export function getBondModes () {
return ['balance-slb', 'active-backup', 'lacp']
}
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
return this.getXapi(pool).createNetwork({
name,
@@ -24,6 +30,81 @@ create.permission = 'admin'
// =================================================================
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
return this.getXapi(pool).createBondedNetwork({
name,
description,
pifIds: mapToArray(pifs, pif =>
this.getObject(pif, 'PIF')._xapiId
),
mtu: +mtu,
mac,
bondMode
})
}
createBonded.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pifs: {
type: 'array',
items: {
type: 'string'
}
},
mtu: { type: ['integer', 'string'], optional: true },
// RegExp since schema-inspector does not provide a param check based on an enumeration
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }
}
createBonded.resolve = {
pool: ['pool', 'pool', 'administrate']
}
createBonded.permission = 'admin'
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
// ===================================================================
export async function set ({
network,
name_description: nameDescription,
name_label: nameLabel,
defaultIsLocked,
id
}) {
await this.getXapi(network).setNetworkProperties(network._xapiId, {
nameDescription,
nameLabel,
defaultIsLocked
})
}
set.params = {
id: {
type: 'string'
},
name_label: {
type: 'string',
optional: true
},
name_description: {
type: 'string',
optional: true
},
defaultIsLocked: {
type: 'boolean',
optional: true
}
}
set.resolve = {
network: ['id', 'network', 'administrate']
}
// =================================================================
export async function delete_ ({ network }) {
return this.getXapi(network).deleteNetwork(network._xapiId)
}

View File

@@ -1,5 +1,15 @@
// TODO: too low level, move into host.
import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi'
export function getIpv4ConfigurationModes () {
return IPV4_CONFIG_MODES
}
export function getIpv6ConfigurationModes () {
return IPV6_CONFIG_MODES
}
// ===================================================================
// Delete
@@ -66,3 +76,18 @@ reconfigureIp.params = {
reconfigureIp.resolve = {
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
export async function editPif ({ pif, vlan }) {
await this.getXapi(pif).editPif(pif._xapiId, { vlan })
}
editPif.params = {
id: { type: 'string' },
vlan: { type: ['integer', 'string'] }
}
editPif.resolve = {
pif: ['id', 'PIF', 'administrate']
}

View File

@@ -102,3 +102,24 @@ purgeConfiguration.params = {
}
purgeConfiguration.permission = 'admin'
// ---------------------------------------------------------------------
export async function test ({ id, data }) {
await this.testPlugin(id, data)
}
test.description = 'Test a plugin with its current configuration'
test.params = {
id: {
type: 'string'
},
data: {
optional: true
}
}
test.permission = 'admin'
// ---------------------------------------------------------------------

View File

@@ -1,4 +1,4 @@
import {GenericError} from '../api-errors'
import { format } from 'json-rpc-peer'
// ===================================================================
@@ -35,21 +35,21 @@ set.resolve = {
// -------------------------------------------------------------------
export async function setDefaultSr ({pool, sr}) {
await this.getXapi(pool).setDefaultSr(sr._xapiId)
export async function setDefaultSr ({ sr }) {
await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ])
await this.getXapi(sr).setDefaultSr(sr._xapiId)
}
setDefaultSr.permission = '' // signed in
setDefaultSr.params = {
pool: {
type: 'string'
},
sr: {
type: 'string'
}
}
setDefaultSr.resolve = {
pool: ['pool', 'pool', 'administrate'],
sr: ['sr', 'SR']
}
// -------------------------------------------------------------------
@@ -70,6 +70,23 @@ installPatch.params = {
installPatch.resolve = {
pool: ['pool', 'pool', 'administrate']
}
// -------------------------------------------------------------------
export async function installAllPatches ({ pool }) {
await this.getXapi(pool).installAllPoolPatchesOnAllHosts()
}
installAllPatches.params = {
pool: {
type: 'string'
}
}
installAllPatches.resolve = {
pool: ['pool', 'pool', 'administrate']
}
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
// -------------------------------------------------------------------
@@ -106,12 +123,7 @@ export {uploadPatch as patch}
// -------------------------------------------------------------------
export async function mergeInto ({ source, target, force }) {
try {
await this.mergeXenPools(source._xapiId, target._xapiId, force)
} catch (e) {
// FIXME: should we expose plain XAPI error messages?
throw new GenericError(e.message)
}
await this.mergeXenPools(source._xapiId, target._xapiId, force)
}
mergeInto.params = {
@@ -130,7 +142,7 @@ mergeInto.resolve = {
export async function getLicenseState ({pool}) {
return this.getXapi(pool).call(
'pool.get_license_state',
pool._xapiId.$ref,
pool._xapiId.$ref
)
}
@@ -143,3 +155,38 @@ getLicenseState.params = {
getLicenseState.resolve = {
pool: ['pool', 'pool', 'administrate']
}
// -------------------------------------------------------------------
async function handleInstallSupplementalPack (req, res, { poolId }) {
const xapi = this.getXapi(poolId)
// Timeout seems to be broken in Node 4.
// See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) // 12 hours
req.length = req.headers['content-length']
try {
await xapi.installSupplementalPackOnAllHosts(req)
res.end(format.response(0))
} catch (e) {
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
}
}
export async function installSupplementalPack ({ pool }) {
return {
$sendTo: await this.registerHttpRequest(handleInstallSupplementalPack, { poolId: pool.id })
}
}
installSupplementalPack.description = 'installs supplemental pack from ISO file on all hosts'
installSupplementalPack.params = {
pool: { type: 'string' }
}
installSupplementalPack.resolve = {
pool: ['pool', 'pool', 'admin']
}

View File

@@ -1,6 +1,6 @@
import {
Unauthorized
} from '../api-errors'
unauthorized
} from 'xo-common/api-errors'
// ===================================================================
@@ -51,11 +51,12 @@ delete_.params = {
// -------------------------------------------------------------------
export function set ({ id, name, subjects, objects, limits }) {
export function set ({ id, name, subjects, objects, ipPools, limits }) {
return this.updateResourceSet(id, {
limits,
name,
objects,
ipPools,
subjects
})
}
@@ -84,6 +85,13 @@ set.params = {
},
optional: true
},
ipPools: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
limits: {
type: 'object',
optional: true
@@ -109,12 +117,14 @@ get.params = {
export async function getAll () {
const { user } = this
if (!user) {
throw new Unauthorized()
throw unauthorized()
}
return this.getAllResourceSets(user.id)
}
getAll.description = 'Get the list of all existing resource set'
// -------------------------------------------------------------------
export function addObject ({ id, object }) {
@@ -227,3 +237,4 @@ export function recomputeAllLimits () {
}
recomputeAllLimits.permission = 'admin'
recomputeAllLimits.description = 'Recompute manually the current resource set usage'

View File

@@ -1,3 +1,5 @@
export async function getAll () {
return /* await */ this.getRoles()
}
getAll.description = 'Returns the list of all existing roles'

View File

@@ -4,13 +4,14 @@ import {
} from '../utils'
export async function add ({
label,
host,
username,
password,
readOnly,
autoConnect = true
}) {
const server = await this.registerXenServer({host, username, password, readOnly})
const server = await this.registerXenServer({label, host, username, password, readOnly})
if (autoConnect) {
// Connect asynchronously, ignore any errors.
@@ -25,6 +26,10 @@ add.description = 'register a new Xen server'
add.permission = 'admin'
add.params = {
label: {
optional: true,
type: 'string'
},
host: {
type: 'string'
},
@@ -70,8 +75,8 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, host, username, password, readOnly}) {
await this.updateXenServer(id, {host, username, password, readOnly})
export async function set ({id, label, host, username, password, readOnly}) {
await this.updateXenServer(id, {label, host, username, password, readOnly})
}
set.description = 'changes the properties of a Xen server'
@@ -82,6 +87,10 @@ set.params = {
id: {
type: 'string'
},
label: {
type: 'string',
optional: true
},
host: {
type: 'string',
optional: true

View File

@@ -1,21 +1,18 @@
import {deprecate} from 'util'
import {InvalidCredential, AlreadyAuthenticated} from '../api-errors'
import { getUserPublicProperties } from '../utils'
import {invalidCredentials} from 'xo-common/api-errors'
// ===================================================================
export async function signIn (credentials) {
if (this.session.has('user_id')) {
throw new AlreadyAuthenticated()
}
const user = await this.authenticateUser(credentials)
if (!user) {
throw new InvalidCredential()
throw invalidCredentials()
}
this.session.set('user_id', user.id)
return this.getUserPublicProperties(user)
return getUserPublicProperties(user)
}
signIn.description = 'sign in'
@@ -55,7 +52,7 @@ export async function getUser () {
return userId === undefined
? null
: this.getUserPublicProperties(await this.getUser(userId))
: getUserPublicProperties(await this.getUser(userId))
}
getUser.description = 'return the currently connected user'

View File

@@ -34,7 +34,7 @@ set.resolve = {
// -------------------------------------------------------------------
export async function scan ({SR}) {
export async function scan ({ SR }) {
await this.getXapi(SR).call('SR.scan', SR._xapiRef)
}
@@ -50,7 +50,15 @@ scan.resolve = {
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({ sr }) {
await this.getXapi(sr).destroySr(sr._xapiId)
const xapi = this.getXapi(sr)
if (sr.SR_type === 'xosan') {
const config = xapi.xo.getData(sr, 'xosan_config')
// we simply forget because the hosted disks are been destroyed with the VMs
await xapi.forgetSr(sr._xapiId)
await Promise.all(config.nodes.map(node => xapi.deleteVm(node.vm.id, true)))
return xapi.deleteNetwork(config.network)
}
await xapi.destroySr(sr._xapiId)
}
destroy.params = {
@@ -63,7 +71,7 @@ destroy.resolve = {
// -------------------------------------------------------------------
export async function forget ({SR}) {
export async function forget ({ SR }) {
await this.getXapi(SR).forgetSr(SR._xapiId)
}
@@ -77,7 +85,7 @@ forget.resolve = {
// -------------------------------------------------------------------
export async function connectAllPbds ({SR}) {
export async function connectAllPbds ({ SR }) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
@@ -91,7 +99,7 @@ connectAllPbds.resolve = {
// -------------------------------------------------------------------
export async function disconnectAllPbds ({SR}) {
export async function disconnectAllPbds ({ SR }) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
@@ -121,6 +129,7 @@ export async function createIso ({
deviceConfig.legacy_mode = 'true'
} else if (type === 'smb') {
path = path.replace(/\\/g, '/')
deviceConfig.type = 'cifs'
deviceConfig.username = user
deviceConfig.cifspassword = password
}
@@ -136,7 +145,7 @@ export async function createIso ({
nameDescription,
'iso', // SR type ISO
'iso', // SR content type ISO
true,
type !== 'local',
{}
)
@@ -213,6 +222,51 @@ createNfs.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// HBA SR
// This functions creates an HBA SR
export async function createHba ({
host,
nameLabel,
nameDescription,
scsiId
}) {
const xapi = this.getXapi(host)
const deviceConfig = {
scsiId
}
const srRef = await xapi.call(
'SR.create',
host._xapiRef,
deviceConfig,
'0',
nameLabel,
nameDescription,
'lvmoohba', // SR LVM over HBA
'user', // recommended by Citrix
true,
{}
)
const sr = await xapi.call('SR.get_record', srRef)
return sr.uuid
}
createHba.params = {
host: { type: 'string' },
nameLabel: { type: 'string' },
nameDescription: { type: 'string' },
scsiId: { type: 'string' }
}
createHba.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// Local LVM SR
@@ -312,6 +366,55 @@ probeNfs.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// This function helps to detect all HBA devices on the host
export async function probeHba ({
host
}) {
const xapi = this.getXapi(host)
let xml
try {
await xapi.call(
'SR.probe',
host._xapiRef,
'type',
{}
)
throw new Error('the call above should have thrown an error')
} catch (error) {
if (error.code !== 'SR_BACKEND_FAILURE_107') {
throw error
}
xml = parseXml(error.params[2])
}
const hbaDevices = []
forEach(ensureArray(xml.Devlist.BlockDevice), hbaDevice => {
hbaDevices.push({
hba: hbaDevice.hba.trim(),
path: hbaDevice.path.trim(),
scsciId: hbaDevice.SCSIid.trim(),
size: hbaDevice.size.trim(),
vendor: hbaDevice.vendor.trim()
})
})
return hbaDevices
}
probeHba.params = {
host: { type: 'string' }
}
probeHba.resolve = {
host: ['host', 'host', 'administrate']
}
// -------------------------------------------------------------------
// ISCSI SR
@@ -571,7 +674,7 @@ export async function probeIscsiExists ({
const srs = []
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({uuid: sr.UUID.trim()})
srs.push({ uuid: sr.UUID.trim() })
})
return srs
@@ -613,7 +716,7 @@ export async function probeNfsExists ({
forEach(ensureArray(xml['SRlist'].SR), sr => {
// get the UUID of SR connected to this LUN
srs.push({uuid: sr.UUID.trim()})
srs.push({ uuid: sr.UUID.trim() })
})
return srs

67
src/api/system.js Normal file
View File

@@ -0,0 +1,67 @@
import forEach from 'lodash/forEach'
import getKeys from 'lodash/keys'
import moment from 'moment-timezone'
import { noSuchObject } from 'xo-common/api-errors'
import { version as xoServerVersion } from '../../package.json'
// ===================================================================
export function getMethodsInfo () {
const methods = {}
forEach(this.apiMethods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
export const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// -------------------------------------------------------------------
export const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
export const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
export function listMethods () {
return getKeys(this.apiMethods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
export function methodSignature ({method: name}) {
const method = this.apiMethods[name]
if (!method) {
throw noSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'

View File

@@ -36,9 +36,9 @@ hasPermission.params = {
export function wait ({duration, returnValue}) {
return new Promise(resolve => {
setTimeout(+duration, () => {
setTimeout(() => {
resolve(returnValue)
})
}, +duration)
})
}

View File

@@ -1,10 +1,10 @@
import {InvalidParameters} from '../api-errors'
import { mapToArray } from '../utils'
import {invalidParameters} from 'xo-common/api-errors'
import { getUserPublicProperties, mapToArray } from '../utils'
// ===================================================================
export async function create ({email, password, permission}) {
return (await this.createUser(email, {password, permission})).id
return (await this.createUser({email, password, permission})).id
}
create.description = 'creates a new user'
@@ -22,7 +22,7 @@ create.params = {
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw new InvalidParameters('a user cannot delete itself')
throw invalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
@@ -48,7 +48,7 @@ export async function getAll () {
const users = await this.getAllUsers()
// Filters out private properties.
return mapToArray(users, this.getUserPublicProperties)
return mapToArray(users, getUserPublicProperties)
}
getAll.description = 'returns all the existing users'
@@ -58,15 +58,21 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission, preferences}) {
if (permission && id === this.session.get('user_id')) {
throw new InvalidParameters('a user cannot change it\'s own permission')
const isAdmin = this.user && this.user.permission === 'admin'
if (isAdmin) {
if (permission && id === this.session.get('user_id')) {
throw invalidParameters('a user cannot change its own permission')
}
} else if (email || password || permission) {
throw invalidParameters('this properties can only changed by an administrator')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
set.permission = 'admin'
set.permission = ''
set.params = {
id: { type: 'string' },

View File

@@ -3,9 +3,9 @@
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{InvalidParameters} = require '../api-errors'
{invalidParameters} = require 'xo-common/api-errors'
{isArray: $isArray, parseSize} = require '../utils'
{JsonRpcError} = require '../api-errors'
{JsonRpcError} = require 'json-rpc-peer'
#=====================================================================
@@ -38,7 +38,7 @@ set = $coroutine (params) ->
size = parseSize(params.size)
if size < vdi.size
throw new InvalidParameters(
throw invalidParameters(
"cannot set new size (#{size}) below the current size (#{vdi.size})"
)
yield xapi.resizeVdi(ref, size)

View File

@@ -1,5 +1,19 @@
import {
diffItems,
noop,
pCatch
} from '../utils'
// ===================================================================
// TODO: move into vm and rename to removeInterface
async function delete_ ({vif}) {
this.allocIpAddresses(
vif.id,
null,
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
)::pCatch(noop)
await this.getXapi(vif).deleteVif(vif._xapiId)
}
export {delete_ as delete}
@@ -13,10 +27,11 @@ delete_.resolve = {
}
// -------------------------------------------------------------------
// TODO: move into vm and rename to disconnectInterface
export async function disconnect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
await this.getXapi(vif).disconnectVif(vif._xapiId)
}
disconnect.params = {
@@ -31,7 +46,7 @@ disconnect.resolve = {
// TODO: move into vm and rename to connectInterface
export async function connect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
await this.getXapi(vif).connectVif(vif._xapiId)
}
connect.params = {
@@ -44,28 +59,83 @@ connect.resolve = {
// -------------------------------------------------------------------
export const set = ({ vif, allowedIpv4Addresses, allowedIpv6Addresses }) => (
this.getXapi(vif._xapiId).editVif({
export async function set ({
vif,
network,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses,
attached
}) {
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
const newIpAddresses = []
{
const { push } = newIpAddresses
push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses)
push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses)
}
if (network || mac) {
const xapi = this.getXapi(vif)
const vm = xapi.getObject(vif.$VM)
mac == null && (mac = vif.MAC)
network = xapi.getObject((network && network.id) || vif.$network)
attached == null && (attached = vif.attached)
await this.allocIpAddresses(vif.id, null, oldIpAddresses)
await xapi.deleteVif(vif._xapiId)
// create new VIF with new parameters
const newVif = await xapi.createVif(vm.$id, network.$id, {
mac,
currently_attached: attached,
ipv4_allowed: newIpAddresses
})
await this.allocIpAddresses(newVif.$id, newIpAddresses)
return
}
const [ addAddresses, removeAddresses ] = diffItems(
newIpAddresses,
oldIpAddresses
)
await this.allocIpAddresses(
vif.id,
addAddresses,
removeAddresses
)
return this.getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
)
}
set.params = {
id: { type: 'string' },
network: { type: 'string', optional: true },
mac: { type: 'string', optional: true },
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
}
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
}
}
},
optional: true
},
attached: { type: 'boolean', optional: true }
}
set.resolve = {
vif: ['id', 'VIF', 'operate']
vif: ['id', 'VIF', 'operate'],
network: ['network', 'network', 'operate']
}

View File

@@ -3,23 +3,27 @@ $debug = (require 'debug') 'xo:api:vm'
$filter = require 'lodash/filter'
$findIndex = require 'lodash/findIndex'
$findWhere = require 'lodash/find'
concat = require 'lodash/concat'
endsWith = require 'lodash/endsWith'
escapeStringRegexp = require 'escape-string-regexp'
eventToPromise = require 'event-to-promise'
merge = require 'lodash/merge'
sortBy = require 'lodash/sortBy'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
GenericError,
Unauthorized
} = require('../api-errors')
forbiddenOperation,
invalidParameters,
unauthorized
} = require('xo-common/api-errors')
{
forEach,
formatXml: $js2xml,
isArray: $isArray,
map,
mapFilter,
mapToArray,
noop,
parseSize,
@@ -47,7 +51,7 @@ checkPermissionOnSrs = (vm, permission = 'operate') -> (
)
return @hasPermissions(@session.get('user_id'), permissions).then((success) => (
throw new Unauthorized() unless success
throw unauthorized() unless success
))
)
@@ -60,6 +64,11 @@ extract = (obj, prop) ->
# TODO: Implement ACLs
create = $coroutine (params) ->
{ user } = this
resourceSet = extract(params, 'resourceSet')
if not resourceSet and user.permission isnt 'admin'
throw unauthorized()
template = extract(params, 'template')
params.template = template._xapiId
@@ -80,7 +89,7 @@ create = $coroutine (params) ->
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
vdiSizesByDevice[vbd.device] = +vdi.virtual_size
vdiSizesByDevice[vbd.userdevice] = +vdi.virtual_size
return
)
@@ -94,7 +103,7 @@ create = $coroutine (params) ->
limits.disk += size
return $assign({}, vdi, {
device: vdi.device ? vdi.position,
device: vdi.userdevice ? vdi.device ? vdi.position,
size,
SR: sr._xapiId,
type: vdi.type
@@ -102,10 +111,10 @@ create = $coroutine (params) ->
)
existingVdis = extract(params, 'existingDisks')
params.existingVdis = existingVdis and map(existingVdis, (vdi, device) =>
params.existingVdis = existingVdis and map(existingVdis, (vdi, userdevice) =>
if vdi.size?
size = parseSize(vdi.size)
vdiSizesByDevice[device] = size
vdiSizesByDevice[userdevice] = size
if vdi.$SR
sr = @getObject(vdi.$SR)
@@ -128,33 +137,55 @@ create = $coroutine (params) ->
return {
mac: vif.mac
network: network._xapiId
ipv4_allowed: vif.allowedIpv4Addresses
ipv6_allowed: vif.allowedIpv6Addresses
}
)
installation = extract(params, 'installation')
params.installRepository = installation && installation.repository
resourceSet = extract(params, 'resourceSet')
checkLimits = null
xapiVm = yield xapi.createVm(template._xapiId, params)
vm = xapi.xo.addObject(xapiVm)
{ user } = this
if resourceSet
yield this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
yield this.allocateLimitsInResourceSet(limits, resourceSet)
else unless user.permission is 'admin'
throw new Unauthorized()
checkLimits = $coroutine (limits2) =>
yield this.allocateLimitsInResourceSet(limits, resourceSet)
yield this.allocateLimitsInResourceSet(limits2, resourceSet)
xapiVm = yield xapi.createVm(template._xapiId, params, checkLimits)
vm = xapi.xo.addObject(xapiVm)
if resourceSet
yield Promise.all([
@addAcl(user.id, vm.id, 'admin')
if params.share
$resourceSet = yield @getResourceSet(resourceSet)
Promise.all(map($resourceSet.subjects, (subjectId) => @addAcl(subjectId, vm.id, 'admin')))
else
@addAcl(user.id, vm.id, 'admin')
xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet)
])
for vifId in vm.VIFs
vif = @getObject(vifId, 'VIF')
yield this.allocIpAddresses(vifId, concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses)).catch(() =>
xapi.deleteVif(vif._xapiId)
)
if params.bootAfterCreate
pCatch.call(xapi.startVm(vm._xapiId), noop)
return vm.id
create.params = {
affinityHost: { type: 'string', optional: true }
bootAfterCreate: {
type: 'boolean'
optional: true
}
cloudConfig: {
type: 'string'
optional: true
@@ -191,6 +222,12 @@ create.params = {
# PV Args
pv_args: { type: 'string', optional: true }
share: {
type: 'boolean',
optional: true
}
# TODO: add the install repository!
# VBD.insert/eject
# Also for the console!
@@ -200,6 +237,7 @@ create.params = {
# Virtual interfaces to create for the new VM.
VIFs: {
optional: true
type: 'array'
items: {
type: 'object'
@@ -211,6 +249,18 @@ create.params = {
optional: true # Auto-generated per default.
type: 'string'
}
allowedIpv4Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
allowedIpv6Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
}
}
}
@@ -253,19 +303,43 @@ create.params = {
}
create.resolve = {
template: ['template', 'VM-template', 'administrate'],
template: ['template', 'VM-template', ''],
}
exports.create = create
#---------------------------------------------------------------------
delete_ = ({vm, delete_disks: deleteDisks}) ->
delete_ = $coroutine ({vm, delete_disks: deleteDisks = false }) ->
cpus = vm.CPUs.number
memory = vm.memory.size
xapi = @getXapi(vm)
@getAllAcls().then((acls) =>
Promise.all(mapFilter(acls, (acl) =>
if (acl.object == vm.id)
return pCatch.call(
@removeAcl(acl.subject, acl.object, acl.action),
noop
)
))
)
# Update IP pools
yield Promise.all(map(vm.VIFs, (vifId) =>
vif = xapi.getObject(vifId)
return pCatch.call(
this.allocIpAddresses(
vifId,
null,
concat(vif.ipv4_allowed, vif.ipv6_allowed)
),
noop
)
))
# Update resource sets
resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
if resourceSet?
disk = 0
@@ -282,10 +356,16 @@ delete_ = ({vm, delete_disks: deleteDisks}) ->
return
)
pCatch.call(@releaseLimitsInResourceSet(
@computeVmResourcesUsage(vm),
resourceSet
), noop)
resourceSetUsage = @computeVmResourcesUsage(vm)
ipPoolsUsage = yield @computeVmIpPoolsUsage(vm)
pCatch.call(
@releaseLimitsInResourceSet(
merge(resourceSetUsage, ipPoolsUsage),
resourceSet
),
noop
)
return xapi.deleteVm(vm._xapiId, deleteDisks)
@@ -368,7 +448,7 @@ migrate = $coroutine ({
])
unless yield @hasPermissions(@session.get('user_id'), permissions)
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).migrateVm(vm._xapiId, @getXapi(host), host._xapiId, {
migrationNetworkId: migrationNetwork?._xapiId
@@ -409,14 +489,18 @@ set = (params) ->
VM = extract(params, 'VM')
xapi = @getXapi(VM)
return xapi.editVm(VM._xapiId, params, (limits, vm) =>
return xapi.editVm(VM._xapiId, params, $coroutine (limits, vm) =>
resourceSet = xapi.xo.getData(vm, 'resourceSet')
if (resourceSet)
return @allocateLimitsInResourceSet(limits, resourceSet)
try
return yield @allocateLimitsInResourceSet(limits, resourceSet)
catch error
# if the resource set no longer exist, behave as if the VM is free
throw error unless noSuchObject.is(error)
if (limits.cpuWeight && this.user.permission != 'admin')
throw new Unauthorized()
throw unauthorized()
)
set.params = {
@@ -458,10 +542,12 @@ set.params = {
cpuWeight: { type: ['integer', 'null'], optional: true }
cpuCap: { type: ['integer', 'null'], optional: true }
affinityHost: { type: ['string', 'null'], optional: true }
}
set.resolve = {
VM: ['id', ['VM', 'VM-snapshot'], 'administrate']
VM: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate']
}
exports.set = set
@@ -549,7 +635,7 @@ copy.params = {
}
copy.resolve = {
vm: [ 'vm', 'VM', 'administrate' ]
vm: [ 'vm', ['VM', 'VM-snapshot'], 'administrate' ]
sr: [ 'sr', 'SR', 'operate' ]
}
@@ -562,7 +648,7 @@ convertToTemplate = $coroutine ({vm}) ->
unless yield @hasPermissions(@session.get('user_id'), [
[ vm.$pool, 'administrate' ]
])
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).call 'VM.set_is_a_template', vm._xapiRef, true
@@ -757,10 +843,10 @@ exports.rollingBackup = rollingBackup
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
unless sr
unless pool
throw new InvalidParameters('either pool or sr param should be specified')
throw invalidParameters('either pool or sr param should be specified')
if vm.$pool is pool.id
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
throw forbiddenOperation('Disaster Recovery attempts to copy on the same pool')
sr = @getObject(pool.default_SR, 'SR')
@@ -818,8 +904,7 @@ stop = $coroutine ({vm, force}) ->
yield xapi.call 'VM.clean_shutdown', vm._xapiRef
catch error
if error.code is 'VM_MISSING_PV_DRIVERS' or error.code is 'VM_LACKS_FEATURE_SHUTDOWN'
# TODO: Improve reporting: this message is unclear.
@throw 'INVALID_PARAMS'
throw invalidParameters('clean shutdown requires PV drivers')
else
throw error
@@ -854,18 +939,11 @@ exports.suspend = suspend
#---------------------------------------------------------------------
resume = $coroutine ({vm, force}) ->
# FIXME: WTF this is?
if not force
force = true
yield @getXapi(vm).call 'VM.resume', vm._xapiRef, false, force
return true
resume = ({vm}) ->
return @getXapi(vm).resumeVm(vm._xapiId)
resume.params = {
id: { type: 'string' }
force: { type: 'boolean', optional: true }
}
resume.resolve = {
@@ -875,15 +953,12 @@ exports.resume = resume
#---------------------------------------------------------------------
# revert a snapshot to its parent VM
revert = $coroutine ({snapshot}) ->
# Attempts a revert from this snapshot to its parent VM
yield @getXapi(snapshot).call 'VM.revert', snapshot._xapiRef
return true
revert = ({snapshot, snapshotBefore}) ->
return @getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore)
revert.params = {
id: { type: 'string' }
id: { type: 'string' },
snapshotBefore: { type: 'boolean', optional: true }
}
revert.resolve = {
@@ -943,30 +1018,33 @@ exports.export = export_;
#---------------------------------------------------------------------
handleVmImport = $coroutine (req, res, { xapi, srId }) ->
handleVmImport = $coroutine (req, res, { data, srId, type, xapi }) ->
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
try
vm = yield xapi.importVm(req, { srId })
vm = yield xapi.importVm(req, { data, srId, type })
res.end(format.response(0, vm.$id))
catch e
res.writeHead(500)
res.end(format.error(0, new GenericError(e.message)))
res.end(format.error(0, new Error(e.message)))
return
# TODO: "sr_id" can be passed in URL to target a specific SR
import_ = $coroutine ({host, sr}) ->
import_ = $coroutine ({ data, host, sr, type }) ->
if data and type is 'xva'
throw invalidParameters('unsupported field data for the file type xva')
if not sr
if not host
throw new InvalidParameters('you must provide either host or SR')
throw invalidParameters('you must provide either host or SR')
xapi = @getXapi(host)
sr = xapi.pool.$default_SR
if not sr
throw new InvalidParameters('there is not default SR in this pool')
throw invalidParameters('there is not default SR in this pool')
# FIXME: must have administrate permission on default SR.
else
@@ -974,13 +1052,45 @@ import_ = $coroutine ({host, sr}) ->
return {
$sendTo: yield @registerHttpRequest(handleVmImport, {
data,
srId: sr._xapiId,
type,
xapi
})
}
import_.params = {
data: {
type: 'object',
optional: true,
properties: {
descriptionLabel: { type: 'string' },
disks: {
type: 'array',
items: {
type: 'object',
properties: {
capacity: { type: 'integer' },
descriptionLabel: { type: 'string' },
nameLabel: { type: 'string' },
path: { type: 'string' },
position: { type: 'integer' }
}
},
optional: true
},
memory: { type: 'integer' },
nameLabel: { type: 'string' },
nCpus: { type: 'integer' },
networks: {
type: 'array',
items: { type: 'string' },
optional: true
},
}
},
host: { type: 'string', optional: true },
type: { type: 'string', optional: true },
sr: { type: 'string', optional: true }
}
@@ -1022,21 +1132,47 @@ exports.attachDisk = attachDisk
#---------------------------------------------------------------------
# TODO: implement resource sets
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
createInterface = $coroutine ({
vm,
network,
position,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses
}) ->
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
mac,
mtu,
position
position,
ipv4_allowed: allowedIpv4Addresses,
ipv6_allowed: allowedIpv6Addresses
})
{ push } = ipAddresses = []
push.apply(ipAddresses, allowedIpv4Addresses) if allowedIpv4Addresses
push.apply(ipAddresses, allowedIpv6Addresses) if allowedIpv6Addresses
pCatch.call(@allocIpAddresses(vif.$id, allo), noop) if ipAddresses.length
return vif.$id
createInterface.params = {
vm: { type: 'string' }
network: { type: 'string' }
position: { type: 'string', optional: true }
mtu: { type: 'string', optional: true }
position: { type: ['integer', 'string'], optional: true }
mac: { type: 'string', optional: true }
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
}
}
createInterface.resolve = {
@@ -1115,10 +1251,7 @@ setBootOrder = $coroutine ({vm, order}) ->
yield xapi.call 'VM.set_HVM_boot_params', vm._xapiRef, order
return true
@throw(
'INVALID_PARAMS'
'You can only set the boot order on a HVM guest'
)
throw invalidParameters('You can only set the boot order on a HVM guest')
setBootOrder.params = {
vm: { type: 'string' },

View File

@@ -1,5 +1,50 @@
import { streamToBuffer } from '../utils'
// ===================================================================
export function clean () {
return this.clean()
}
clean.permission = 'admin'
// -------------------------------------------------------------------
export async function exportConfig () {
return {
$getFrom: await this.registerHttpRequest((req, res) => {
res.writeHead(200, 'OK', {
'content-disposition': 'attachment'
})
return this.exportConfig()
},
undefined,
{ suffix: '/config.json' })
}
}
exportConfig.permission = 'admin'
// -------------------------------------------------------------------
export function getAllObjects () {
return this.getObjects()
}
getAllObjects.permission = ''
getAllObjects.description = 'Returns all XO objects'
// -------------------------------------------------------------------
export async function importConfig () {
return {
$sendTo: await this.registerHttpRequest(async (req, res) => {
await this.importConfig(JSON.parse(await streamToBuffer(req)))
res.end('config successfully imported')
})
}
}
importConfig.permission = 'admin'

478
src/api/xosan.js Normal file
View File

@@ -0,0 +1,478 @@
import arp from 'arp-a'
import createLogger from 'debug'
import defer from 'golike-defer'
import execa from 'execa'
import fromPairs from 'lodash/fromPairs'
import fs from 'fs-promise'
import map from 'lodash/map'
import splitLines from 'split-lines'
import {
filter,
includes
} from 'lodash'
import {
noop,
pCatch,
pFromCallback,
splitFirst
} from '../utils'
const debug = createLogger('xo:xosan')
const SSH_KEY_FILE = 'id_rsa_xosan'
const NETWORK_PREFIX = '172.31.100.'
const XOSAN_VM_SYSTEM_DISK_SIZE = 10 * 1024 * 1024 * 1024
const XOSAN_DATA_DISK_USEAGE_RATIO = 0.99
const XOSAN_MAX_DISK_SIZE = 2093050 * 1024 * 1024 // a bit under 2To
const CURRENTLY_CREATING_SRS = {}
export async function getVolumeInfo ({ sr }) {
const xapi = this.getXapi(sr)
const giantIPtoVMDict = {}
const data = xapi.xo.getData(sr, 'xosan_config')
if (!data || !data.nodes) {
return null
}
const nodes = data.nodes
nodes.forEach(conf => {
giantIPtoVMDict[conf.vm.ip] = xapi.getObject(conf.vm.id)
})
const oneHostAndVm = nodes[0]
const resultCmd = await remoteSsh(xapi, {
host: xapi.getObject(oneHostAndVm.host),
address: oneHostAndVm.vm.ip
}, 'gluster volume info xosan')
const result = resultCmd['stdout']
/*
Volume Name: xosan
Type: Disperse
Volume ID: 1d4d0e57-8b6b-43f9-9d40-c48be1df7548
Status: Started
Snapshot Count: 0
Number of Bricks: 1 x (2 + 1) = 3
Transport-type: tcp
Bricks:
Brick1: 192.168.0.201:/bricks/brick1/xosan1
Brick2: 192.168.0.202:/bricks/brick1/xosan1
Brick3: 192.168.0.203:/bricks/brick1/xosan1
Options Reconfigured:
client.event-threads: 16
server.event-threads: 16
performance.client-io-threads: on
nfs.disable: on
performance.readdir-ahead: on
transport.address-family: inet
features.shard: on
features.shard-block-size: 64MB
network.remote-dio: enable
cluster.eager-lock: enable
performance.io-cache: off
performance.read-ahead: off
performance.quick-read: off
performance.stat-prefetch: on
performance.strict-write-ordering: off
cluster.server-quorum-type: server
cluster.quorum-type: auto
*/
const info = fromPairs(
splitLines(result.trim()).map(line =>
splitFirst(line, ':').map(val => val.trim())
)
)
const getNumber = item => +item.substr(5)
const brickKeys = filter(Object.keys(info), key => key.match(/^Brick[1-9]/)).sort((i1, i2) => getNumber(i1) - getNumber(i2))
// expected brickKeys : [ 'Brick1', 'Brick2', 'Brick3' ]
info['Bricks'] = brickKeys.map(key => {
const ip = info[key].split(':')[0]
return { config: info[key], ip: ip, vm: giantIPtoVMDict[ip] }
})
const entry = await pFromCallback(cb => arp.table(cb))
if (entry) {
const brick = info['Bricks'].find(element => element.config.split(':')[0] === entry.ip)
if (brick) {
brick.mac = entry.mac
}
}
return info
}
getVolumeInfo.description = 'info on gluster volume'
getVolumeInfo.permission = 'admin'
getVolumeInfo.params = {
sr: {
type: 'string'
}
}
getVolumeInfo.resolve = {
sr: ['sr', 'SR', 'administrate']
}
function floor2048 (value) {
return 2048 * Math.floor(value / 2048)
}
async function copyVm (xapi, originalVm, params) {
return { vm: await xapi.copyVm(originalVm, params.sr), params }
}
async function prepareGlusterVm (xapi, vmAndParam, xosanNetwork, increaseDataDisk = true) {
let vm = vmAndParam.vm
// refresh the object so that sizes are correct
const params = vmAndParam.params
const ip = params.xenstore_data['vm-data/ip']
const sr = xapi.getObject(params.sr.$id)
await xapi._waitObjectState(sr.$id, sr => Boolean(sr.$PBDs))
const host = sr.$PBDs[0].$host
const firstVif = vm.$VIFs[0]
if (xosanNetwork.$id !== firstVif.$network.$id) {
await xapi.call('VIF.move', firstVif.$ref, xosanNetwork.$ref)
}
await xapi.editVm(vm, {
name_label: params.name_label,
name_description: params.name_description
})
await xapi.call('VM.set_xenstore_data', vm.$ref, params.xenstore_data)
if (increaseDataDisk) {
const dataDisk = vm.$VBDs.map(vbd => vbd.$VDI).find(vdi => vdi && vdi.name_label === 'xosan_data')
const srFreeSpace = sr.physical_size - sr.physical_utilisation
// we use a percentage because it looks like the VDI overhead is proportional
const newSize = floor2048((srFreeSpace + dataDisk.virtual_size) * XOSAN_DATA_DISK_USEAGE_RATIO)
await xapi._resizeVdi(dataDisk, Math.min(newSize, XOSAN_MAX_DISK_SIZE))
}
await xapi.startVm(vm)
debug('waiting for boot of ', ip)
// wait until we find the assigned IP in the networks, we are just checking the boot is complete
const vmIsUp = vm => Boolean(vm.$guest_metrics && includes(vm.$guest_metrics.networks, ip))
vm = await xapi._waitObjectState(vm.$id, vmIsUp)
debug('booted ', ip)
return { address: ip, host, vm }
}
async function callPlugin (xapi, host, command, params) {
debug('calling plugin', host.address, command)
return JSON.parse(await xapi.call('host.call_plugin', host.$ref, 'xosan.py', command, params))
}
async function remoteSsh (xapi, hostAndAddress, cmd) {
const result = await callPlugin(xapi, hostAndAddress.host, 'run_ssh', {
destination: 'root@' + hostAndAddress.address,
cmd: cmd
})
if (result.exit !== 0) {
throw new Error('ssh error: ' + JSON.stringify(result))
}
debug(result)
return result
}
async function setPifIp (xapi, pif, address) {
await xapi.call('PIF.reconfigure_ip', pif.$ref, 'Static', address, '255.255.255.0', NETWORK_PREFIX + '1', '')
}
const createNetworkAndInsertHosts = defer.onFailure(async function ($onFailure, xapi, pif, vlan) {
let hostIpLastNumber = 1
const xosanNetwork = await xapi.createNetwork({
name: 'XOSAN network',
description: 'XOSAN network',
pifId: pif._xapiId,
mtu: 9000,
vlan: +vlan
})
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
await Promise.all(xosanNetwork.$PIFs.map(pif => setPifIp(xapi, pif, NETWORK_PREFIX + (hostIpLastNumber++))))
return xosanNetwork
})
async function getOrCreateSshKey (xapi) {
let sshKey = xapi.xo.getData(xapi.pool, 'xosan_ssh_key')
if (!sshKey) {
const readKeys = async () => {
sshKey = {
private: await fs.readFile(SSH_KEY_FILE, 'ascii'),
public: await fs.readFile(SSH_KEY_FILE + '.pub', 'ascii')
}
xapi.xo.setData(xapi.pool, 'xosan_ssh_key', sshKey)
}
try {
await readKeys()
} catch (e) {
await execa('ssh-keygen', ['-q', '-f', SSH_KEY_FILE, '-t', 'rsa', '-b', '4096', '-N', ''])
await readKeys()
}
}
return sshKey
}
async function configureGluster (redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter = null) {
const configByType = {
replica_arbiter: {
creation: 'replica 3 arbiter 1',
extra: []
},
replica: {
creation: 'replica ' + redundancy + ' ',
extra: ['gluster volume set xosan cluster.data-self-heal on']
},
disperse: {
creation: 'disperse ' + ipAndHosts.length + ' redundancy ' + redundancy + ' ',
extra: []
}
}
let brickVms = arbiter ? ipAndHosts.concat(arbiter) : ipAndHosts
for (let i = 1; i < brickVms.length; i++) {
await remoteSsh(xapi, firstIpAndHost, 'gluster peer probe ' + brickVms[i].address)
}
const creation = configByType[glusterType].creation
const volumeCreation = 'gluster volume create xosan ' + creation +
' ' + brickVms.map(ipAndHost => (ipAndHost.address + ':/bricks/xosan/xosandir')).join(' ')
debug('creating volume: ', volumeCreation)
await remoteSsh(xapi, firstIpAndHost, volumeCreation)
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan network.remote-dio enable')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan cluster.eager-lock enable')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-cache off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.read-ahead off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.quick-read off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.strict-write-ordering off')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan client.event-threads 8')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan server.event-threads 8')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.io-thread-count 64')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan performance.stat-prefetch on')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard on')
await remoteSsh(xapi, firstIpAndHost, 'gluster volume set xosan features.shard-block-size 512MB')
for (const confChunk of configByType[glusterType].extra) {
await remoteSsh(xapi, firstIpAndHost, confChunk)
}
await remoteSsh(xapi, firstIpAndHost, 'gluster volume start xosan')
}
export const createSR = defer.onFailure(async function ($onFailure, { template, pif, vlan, srs, glusterType, redundancy }) {
if (!this.requestResource) {
throw new Error('requestResource is not a function')
}
if (srs.length < 1) {
return // TODO: throw an error
}
let vmIpLastNumber = 101
const xapi = this.getXapi(srs[0])
if (CURRENTLY_CREATING_SRS[xapi.pool.$id]) {
throw new Error('createSR is already running for this pool')
}
CURRENTLY_CREATING_SRS[xapi.pool.$id] = true
try {
const xosanNetwork = await createNetworkAndInsertHosts(xapi, pif, vlan)
$onFailure(() => xapi.deleteNetwork(xosanNetwork)::pCatch(noop))
const sshKey = await getOrCreateSshKey(xapi)
const srsObjects = map(srs, srId => xapi.getObject(srId))
const vmParameters = map(srs, srId => {
const sr = xapi.getObject(srId)
const host = sr.$PBDs[0].$host
return {
sr,
host,
name_label: `XOSAN - ${sr.name_label} - ${host.name_label}`,
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
// the values of the xenstore_data object *have* to be string, don't forget.
xenstore_data: {
'vm-data/hostname': 'XOSAN' + sr.name_label,
'vm-data/sshkey': sshKey.public,
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
'vm-data/mtu': String(xosanNetwork.MTU),
'vm-data/vlan': String(vlan)
}
}
})
await Promise.all(vmParameters.map(vmParam => callPlugin(xapi, vmParam.host, 'receive_ssh_keys', {
private_key: sshKey.private,
public_key: sshKey.public,
force: 'true'
})))
const firstVM = await xapi.importVm(
await this.requestResource('xosan', template.id, template.version),
{ srId: vmParameters[0].sr.$ref, type: 'xva' }
)
$onFailure(() => xapi.deleteVm(firstVM, true)::pCatch(noop))
await xapi.editVm(firstVM, {
autoPoweron: true
})
const copiedVms = await Promise.all(vmParameters.slice(1).map(param => copyVm(xapi, firstVM, param)))
// TODO: Promise.all() is certainly not the right operation to execute all the given promises whether they fulfill or reject.
$onFailure(() => Promise.all(copiedVms.map(vm => xapi.deleteVm(vm.vm, true)))::pCatch(noop))
const vmsAndParams = [{
vm: firstVM,
params: vmParameters[0]
}].concat(copiedVms)
let arbiter = null
if (srs.length === 2) {
const sr = vmParameters[0].sr
const arbiterConfig = {
sr: sr,
host: vmParameters[0].host,
name_label: vmParameters[0].name_label + ' arbiter',
name_description: 'Xosan VM storing data on volume ' + sr.name_label,
xenstore_data: {
'vm-data/hostname': 'XOSAN' + sr.name_label + '_arb',
'vm-data/sshkey': sshKey.public,
'vm-data/ip': NETWORK_PREFIX + (vmIpLastNumber++),
'vm-data/mtu': String(xosanNetwork.MTU),
'vm-data/vlan': String(vlan)
}
}
const arbiterVm = await copyVm(xapi, firstVM, arbiterConfig)
$onFailure(() => xapi.deleteVm(arbiterVm.vm, true)::pCatch(noop))
arbiter = await prepareGlusterVm(xapi, arbiterVm, xosanNetwork, false)
}
const ipAndHosts = await Promise.all(map(vmsAndParams, vmAndParam => prepareGlusterVm(xapi, vmAndParam, xosanNetwork)))
const firstIpAndHost = ipAndHosts[0]
await configureGluster(redundancy, ipAndHosts, xapi, firstIpAndHost, glusterType, arbiter)
debug('xosan gluster volume started')
const config = { server: firstIpAndHost.address + ':/xosan', backupserver: ipAndHosts[1].address }
const xosanSr = await xapi.call('SR.create', srsObjects[0].$PBDs[0].$host.$ref, config, 0, 'XOSAN', 'XOSAN', 'xosan', '', true, {})
if (arbiter) {
ipAndHosts.push(arbiter)
}
// we just forget because the cleanup actions will be executed before.
$onFailure(() => xapi.forgetSr(xosanSr)::pCatch(noop))
await xapi.xo.setData(xosanSr, 'xosan_config', {
nodes: ipAndHosts.map(param => ({
host: param.host.$id,
vm: { id: param.vm.$id, ip: param.address }
})),
network: xosanNetwork.$id
})
} finally {
delete CURRENTLY_CREATING_SRS[xapi.pool.$id]
}
})
createSR.description = 'create gluster VM'
createSR.permission = 'admin'
createSR.params = {
srs: {
type: 'array',
items: {
type: 'string'
}
},
pif: {
type: 'string'
},
vlan: {
type: 'string'
},
glusterType: {
type: 'string'
},
redundancy: {
type: 'number'
}
}
createSR.resolve = {
srs: ['sr', 'SR', 'administrate'],
pif: ['pif', 'PIF', 'administrate']
}
export function checkSrIsBusy ({ poolId }) {
return !!CURRENTLY_CREATING_SRS[poolId]
}
checkSrIsBusy.description = 'checks if there is a xosan SR curently being created on the given pool id'
checkSrIsBusy.permission = 'admin'
checkSrIsBusy.params = { poolId: { type: 'string' } }
const POSSIBLE_CONFIGURATIONS = {}
POSSIBLE_CONFIGURATIONS[2] = [{ layout: 'replica_arbiter', redundancy: 3, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[3] = [
{ layout: 'disperse', redundancy: 1, capacity: 2 },
{ layout: 'replica', redundancy: 3, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[4] = [{ layout: 'replica', redundancy: 2, capacity: 1 }]
POSSIBLE_CONFIGURATIONS[5] = [{ layout: 'disperse', redundancy: 1, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[6] = [
{ layout: 'disperse', redundancy: 2, capacity: 4 },
{ layout: 'replica', redundancy: 2, capacity: 3 },
{ layout: 'replica', redundancy: 3, capacity: 2 }]
POSSIBLE_CONFIGURATIONS[7] = [{ layout: 'disperse', redundancy: 3, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[8] = [{ layout: 'replica', redundancy: 2, capacity: 4 }]
POSSIBLE_CONFIGURATIONS[9] = [
{ layout: 'disperse', redundancy: 1, capacity: 8 },
{ layout: 'replica', redundancy: 3, capacity: 3 }]
POSSIBLE_CONFIGURATIONS[10] = [
{ layout: 'disperse', redundancy: 2, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 5 }]
POSSIBLE_CONFIGURATIONS[11] = [{ layout: 'disperse', redundancy: 3, capacity: 8 }]
POSSIBLE_CONFIGURATIONS[12] = [
{ layout: 'disperse', redundancy: 4, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 6 }]
POSSIBLE_CONFIGURATIONS[13] = [{ layout: 'disperse', redundancy: 5, capacity: 8 }]
POSSIBLE_CONFIGURATIONS[14] = [
{ layout: 'disperse', redundancy: 6, capacity: 8 },
{ layout: 'replica', redundancy: 2, capacity: 7 }]
POSSIBLE_CONFIGURATIONS[15] = [
{ layout: 'disperse', redundancy: 7, capacity: 8 },
{ layout: 'replica', redundancy: 3, capacity: 5 }]
POSSIBLE_CONFIGURATIONS[16] = [{ layout: 'replica', redundancy: 2, capacity: 8 }]
export async function computeXosanPossibleOptions ({ lvmSrs }) {
const count = lvmSrs.length
const configurations = POSSIBLE_CONFIGURATIONS[count]
if (!configurations) {
return null
}
if (count > 0) {
const xapi = this.getXapi(lvmSrs[0])
const srs = map(lvmSrs, srId => xapi.getObject(srId))
const srSizes = map(srs, sr => sr.physical_size - sr.physical_utilisation)
const minSize = Math.min.apply(null, srSizes)
const brickSize = (minSize - XOSAN_VM_SYSTEM_DISK_SIZE) * XOSAN_DATA_DISK_USEAGE_RATIO
return configurations.map(conf => ({ ...conf, availableSpace: brickSize * conf.capacity }))
}
}
computeXosanPossibleOptions.params = {
lvmSrs: {
type: 'array',
items: {
type: 'string'
}
}
}
// ---------------------------------------------------------------------
export async function downloadAndInstallXosanPack ({ id, version, pool }) {
if (!this.requestResource) {
throw new Error('requestResource is not a function')
}
const xapi = this.getXapi(pool.id)
const res = await this.requestResource('xosan', id, version)
return xapi.installSupplementalPackOnAllHosts(res)
}
downloadAndInstallXosanPack.description = 'Register a resource via cloud plugin'
downloadAndInstallXosanPack.params = {
id: { type: 'string' },
version: { type: 'string' },
pool: { type: 'string' }
}
downloadAndInstallXosanPack.resolve = {
pool: ['pool', 'pool', 'administrate']
}
downloadAndInstallXosanPack.permission = 'admin'

View File

@@ -3,6 +3,7 @@ import difference from 'lodash/difference'
import filter from 'lodash/filter'
import getKey from 'lodash/keys'
import {createClient as createRedisClient} from 'redis'
import {v4 as generateUuid} from 'uuid'
import {
forEach,
@@ -35,13 +36,13 @@ export default class Redis extends Collection {
connection,
indexes = [],
prefix,
uri = 'tcp://localhost:6379'
uri
}) {
super()
this.indexes = indexes
this.prefix = prefix
this.redis = promisifyAll.call(connection || createRedisClient(uri))
this.redis = promisifyAll(connection || createRedisClient(uri))
}
_extract (ids) {
@@ -68,12 +69,12 @@ export default class Redis extends Collection {
// TODO: remove “replace” which is a temporary measure, implement
// “set()” instead.
const {indexes, prefix, redis, idPrefix = ''} = this
const {indexes, prefix, redis} = this
return Promise.all(mapToArray(models, async model => {
// Generate a new identifier if necessary.
if (model.id === undefined) {
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
model.id = generateUuid()
}
const success = await redis.sadd(prefix + '_ids', model.id)
@@ -149,6 +150,10 @@ export default class Redis extends Collection {
}
_remove (ids) {
if (isEmpty(ids)) {
return
}
const {prefix, redis} = this
// TODO: handle indexes.

View File

@@ -1,65 +1,19 @@
import bind from 'lodash/bind'
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import {
isArray,
isPromise,
isFunction,
noop,
pFinally
isFunction
} from './utils'
// ===================================================================
const {
defineProperties,
defineProperty,
getOwnPropertyDescriptor
} = Object
// ===================================================================
// See: https://github.com/jayphelps/core-decorators.js#autobind
//
// TODO: make it work for all class methods.
export const autobind = (target, key, {
configurable,
enumerable,
value: fn,
writable
}) => ({
configurable,
enumerable,
get () {
if (this === target) {
return fn
}
const bound = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bound,
writable: true
})
return bound
},
set (newValue) {
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
})
// -------------------------------------------------------------------
// Debounce decorator for methods.
//
// See: https://github.com/wycats/javascript-decorators
@@ -70,7 +24,7 @@ export const debounce = duration => (target, name, descriptor) => {
// This symbol is used to store the related data directly on the
// current object.
const s = Symbol()
const s = Symbol(`debounced ${name} data`)
function debounced () {
const data = this[s] || (this[s] = {
@@ -98,119 +52,8 @@ export const debounce = duration => (target, name, descriptor) => {
// -------------------------------------------------------------------
const _push = Array.prototype.push
export const deferrable = (target, name, descriptor) => {
let fn
function newFn () {
const deferreds = []
const defer = fn => {
deferreds.push(fn)
}
defer.clear = () => {
deferreds.length = 0
}
const args = [ defer ]
_push.apply(args, arguments)
let executeDeferreds = () => {
let i = deferreds.length
while (i) {
deferreds[--i]()
}
}
try {
const result = fn.apply(this, args)
if (isPromise(result)) {
result::pFinally(executeDeferreds)
// Do not execute the deferreds in the finally block.
executeDeferreds = noop
}
return result
} finally {
executeDeferreds()
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
return descriptor
}
fn = target
return newFn
}
// Deferred functions are only executed on failures.
//
// i.e.: defer.clear() is automatically called in case of success.
deferrable.onFailure = (target, name, descriptor) => {
let fn
function newFn (defer) {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(result => {
defer.clear()
return result
})
: (defer.clear(), result)
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// Deferred functions are only executed on success.
//
// i.e.: defer.clear() is automatically called in case of failure.
deferrable.onSuccess = (target, name, descriptor) => {
let fn
function newFn (defer) {
try {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(null, error => {
defer.clear()
throw error
})
: result
} catch (error) {
defer.clear()
throw error
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// -------------------------------------------------------------------
const _ownKeys = (
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
(typeof Reflect !== 'undefined' && Reflect.ownKeys) ||
(({
getOwnPropertyNames: names,
getOwnPropertySymbols: symbols
@@ -220,22 +63,6 @@ const _ownKeys = (
)(Object)
)
const _bindPropertyDescriptor = (descriptor, thisArg) => {
const { get, set, value } = descriptor
if (get) {
descriptor.get = bind(get, thisArg)
}
if (set) {
descriptor.set = bind(set, thisArg)
}
if (isFunction(value)) {
descriptor.value = bind(value, thisArg)
}
return descriptor
}
const _isIgnoredProperty = name => (
name[0] === '_' ||
name === 'constructor'
@@ -284,7 +111,7 @@ export const mixin = MixIns => Class => {
defineProperties(prototype, descriptors)
}
const Decorator = (...args) => {
function Decorator (...args) {
const instance = new Class(...args)
for (const MixIn of MixIns) {
@@ -300,8 +127,9 @@ export const mixin = MixIns => Class => {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = _bindPropertyDescriptor(
getOwnPropertyDescriptor(prototype, prop),
descriptors[prop] = getBoundPropertyDescriptor(
prototype,
prop,
mixinInstance
)
}

View File

@@ -1,48 +1,9 @@
/* eslint-env mocha */
/* eslint-env jest */
import expect from 'must'
import {debounce} from './decorators'
// ===================================================================
import {autobind, debounce, deferrable} from './decorators'
// ===================================================================
describe('autobind()', () => {
class Foo {
@autobind
getFoo () {
return this
}
}
it('returns a bound instance for a method', () => {
const foo = new Foo()
const { getFoo } = foo
expect(getFoo()).to.equal(foo)
})
it('returns the same bound instance each time', () => {
const foo = new Foo()
expect(foo.getFoo).to.equal(foo.getFoo)
})
it('works with multiple instances of the same class', () => {
const foo1 = new Foo()
const foo2 = new Foo()
const getFoo1 = foo1.getFoo
const getFoo2 = foo2.getFoo
expect(getFoo1()).to.equal(foo1)
expect(getFoo2()).to.equal(foo2)
})
})
// -------------------------------------------------------------------
describe('debounce()', () => {
let i
@@ -60,114 +21,19 @@ describe('debounce()', () => {
it('works', done => {
const foo = new Foo()
expect(i).to.equal(0)
expect(i).toBe(0)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
setTimeout(() => {
foo.foo()
expect(i).to.equal(2)
expect(i).toBe(2)
done()
}, 2e1)
})
})
// -------------------------------------------------------------------
describe('deferrable()', () => {
it('works with normal termination', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(0)
})
it('defer.clear() removes previous deferreds', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
defer.clear()
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(2)
})
it('works with exception', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
throw i
})
expect(() => fn()).to.throw(4)
expect(i).to.equal(0)
})
it('works with promise resolution', async () => {
let i = 0
const fn = deferrable(async defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
return i
})
await expect(fn()).to.eventually.equal(4)
expect(i).to.equal(0)
})
it('works with promise rejection', async () => {
let i = 0
const fn = deferrable(async defer => {
// Wait a turn of the events loop.
await Promise.resolve()
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
throw i
})
await expect(fn()).to.reject.to.equal(4)
expect(i).to.equal(0)
})
})

View File

@@ -20,14 +20,15 @@ import { boot16 as fat16 } from 'fatfs/structs'
const SECTOR_SIZE = 512
const TEN_MIB = 10 * 1024 * 1024
// Creates a 10MB buffer and initializes it as a FAT 16 volume.
export function init () {
const buf = new Buffer(10 * 1024 * 1024) // 10MB
buf.fill(0)
const buf = Buffer.alloc(TEN_MIB)
// https://github.com/natevw/fatfs/blob/master/structs.js
fat16.pack({
jmpBoot: new Buffer('eb3c90', 'hex'),
jmpBoot: Buffer.from('eb3c90', 'hex'),
OEMName: 'mkfs.fat',
BytsPerSec: SECTOR_SIZE,
SecPerClus: 4,

View File

@@ -1,27 +1,23 @@
import assign from 'lodash/assign'
import startsWith from 'lodash/startsWith'
import { parse as parseUrl } from 'url'
import isRedirect from 'is-redirect'
import { assign, isString, startsWith } from 'lodash'
import { cancellable } from 'promise-toolbox'
import { request as httpRequest } from 'http'
import { request as httpsRequest } from 'https'
import { stringify as formatQueryString } from 'querystring'
import {
isString,
streamToBuffer
} from './utils'
format as formatUrl,
parse as parseUrl,
resolve as resolveUrl
} from 'url'
import { streamToBuffer } from './utils'
// -------------------------------------------------------------------
export default (...args) => {
const raw = opts => {
let req
const pResponse = new Promise((resolve, reject) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const {
body,
headers: { ...headers } = {},
@@ -62,11 +58,16 @@ export default (...args) => {
}
}
req = (
protocol && startsWith(protocol.toLowerCase(), 'https')
? httpsRequest
: httpRequest
)({
const secure = protocol && startsWith(protocol.toLowerCase(), 'https')
let requestFn
if (secure) {
requestFn = httpsRequest
} else {
requestFn = httpRequest
delete rest.rejectUnauthorized
}
req = requestFn({
...rest,
headers
})
@@ -98,6 +99,11 @@ export default (...args) => {
}
const code = response.statusCode
const { location } = response.headers
if (isRedirect(code) && location) {
assign(opts, parseUrl(resolveUrl(formatUrl(opts), location)))
return raw(opts)
}
if (code < 200 || code >= 300) {
const error = new Error(response.statusMessage)
error.code = code
@@ -112,13 +118,27 @@ export default (...args) => {
return response
})
pResponse.cancel = () => {
req.emit('error', new Error('HTTP request canceled!'))
req.abort()
}
pResponse.readAll = () => pResponse.then(response => response.readAll())
pResponse.request = req
return pResponse
}
const httpRequestPlus = ($cancelToken, ...args) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const pResponse = raw(opts)
$cancelToken.promise.then(() => {
const { request } = pResponse
request.emit('error', new Error('HTTP request canceled!'))
request.abort()
})
pResponse.readAll = () => pResponse.then(response => response.readAll())
return pResponse
}
export default cancellable(httpRequestPlus)

View File

@@ -1,15 +1,12 @@
import createLogger from 'debug'
const debug = createLogger('xo:main')
import appConf from 'app-conf'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import eventToPromise from 'event-to-promise'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import pick from 'lodash/pick'
import proxyConsole from './proxy-console'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
@@ -18,21 +15,14 @@ import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import {
AlreadyAuthenticated,
InvalidCredential,
InvalidParameters,
NoSuchObject,
NotImplemented
} from './api-errors'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import {
readFile,
readdir
ensureDir,
readdir,
readFile
} from 'fs-promise'
import * as apiMethods from './api/index'
import Api from './api'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
@@ -57,6 +47,8 @@ import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const debug = createLogger('xo:main')
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
@@ -139,6 +131,11 @@ async function setUpPassport (express, xo) {
}))
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
@@ -188,7 +185,7 @@ async function setUpPassport (express, xo) {
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles/.test(url)) {
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
next()
} else {
req.flash('return-url', url)
@@ -213,7 +210,7 @@ async function setUpPassport (express, xo) {
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { version = 'unknown' } = (() => {
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
@@ -225,13 +222,19 @@ async function registerPlugin (pluginPath, pluginName) {
const {
default: factory = plugin,
configurationSchema,
configurationPresets
configurationPresets,
testSchema
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({ xo: this })
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
}})
: factory
await this.registerPlugin(
@@ -239,6 +242,8 @@ async function registerPlugin (pluginPath, pluginName) {
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
@@ -407,47 +412,13 @@ const setUpStaticFiles = (express, opts) => {
// ===================================================================
const errorClasses = {
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
INVALID_CREDENTIAL: InvalidCredential,
INVALID_PARAMS: InvalidParameters,
NO_SUCH_OBJECT: NoSuchObject,
NOT_IMPLEMENTED: NotImplemented
}
const apiHelpers = {
getUserPublicProperties (user) {
// Handles both properties and wrapped models.
const properties = user.properties || user
return pick(properties, 'id', 'email', 'groups', 'permission', 'preferences', 'provider')
},
throw (errorId, data) {
throw new (errorClasses[errorId])(data)
}
}
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
server: webServer,
path: '/api/'
noServer: true
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = { __proto__: xo, ...apiHelpers }
const api = new Api({
context,
verboseLogsOnErrors
})
xo.defineProperty('api', api)
api.addMethods(apiMethods)
webSocketServer.on('connection', socket => {
const onConnection = socket => {
const { remoteAddress } = socket.upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
@@ -461,7 +432,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return api.call(connection, message.method, message.params)
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
@@ -490,6 +461,11 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
socket.send(data, onSend)
}
})
}
webServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, onConnection)
}
})
}
@@ -517,7 +493,7 @@ const setUpConsoleProxy = (webServer, xo) => {
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw new InvalidCredential()
throw invalidCredentials()
}
const { remoteAddress } = socket
@@ -535,7 +511,7 @@ const setUpConsoleProxy = (webServer, xo) => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error(error && error.stack || error)
console.error((error && error.stack) || error)
}
})
}
@@ -650,16 +626,24 @@ export default async function main (args) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
const shutdown = signal => {
debug('%s caught, closing…', signal)
xo.stop()
}
forEach([ 'SIGINT', 'SIGTERM' ], signal => {
let alreadyCalled = false
// Gracefully shutdown on signals.
process.on('SIGINT', () => shutdown('SIGINT'))
process.on('SIGTERM', () => shutdown('SIGTERM'))
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await eventToPromise(xo, 'stopped')

View File

@@ -1,9 +1,20 @@
import assign from 'lodash/assign'
import {BaseError} from 'make-error'
import Bluebird from 'bluebird'
import every from 'lodash/every'
import filter from 'lodash/filter'
import isArray from 'lodash/isArray'
import isPlainObject from 'lodash/isPlainObject'
import map from 'lodash/map'
import mapValues from 'lodash/mapValues'
import size from 'lodash/size'
import some from 'lodash/some'
import { BaseError } from 'make-error'
import { timeout } from 'promise-toolbox'
import { crossProduct } from './math'
import {
createRawObject,
forEach
serializeError,
thunkToArray
} from './utils'
export class JobExecutorError extends BaseError {}
@@ -18,30 +29,67 @@ export class UnsupportedVectorType extends JobExecutorError {
}
}
export const productParams = (...args) => {
let product = createRawObject()
assign(product, ...args)
return product
// ===================================================================
const match = (pattern, value) => {
if (isPlainObject(pattern)) {
if (size(pattern) === 1) {
if (pattern.__or) {
return some(pattern.__or, subpattern => match(subpattern, value))
}
if (pattern.__not) {
return !match(pattern.__not, value)
}
}
return isPlainObject(value) && every(pattern, (subpattern, key) => (
value[key] !== undefined && match(subpattern, value[key])
))
}
if (isArray(pattern)) {
return isArray(value) && every(pattern, subpattern =>
some(value, subvalue => match(subpattern, subvalue))
)
}
return pattern === value
}
export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
const upstreamValues = []
const itemsCopy = items.slice()
const item = itemsCopy.pop()
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
forEach(values, value => {
if (itemsCopy.length) {
let downstreamValues = _computeCrossProduct(itemsCopy, productCb, extractValueMap)
forEach(downstreamValues, downstreamValue => {
upstreamValues.push(productCb(value, downstreamValue))
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(crossProduct(
map(items, value => resolveParamsVector.call(this, value))
))
},
fetchObjects ({ pattern }) {
return filter(this.xo.getObjects(), object => match(pattern, object))
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value
})
} else {
upstreamValues.push(value)
}
})
return upstreamValues
})
},
set: ({ values }) => values
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default class JobExecutor {
constructor (xo) {
this.xo = xo
@@ -76,30 +124,24 @@ export default class JobExecutor {
event: 'job.end',
runJobId
})
} catch (e) {
} catch (error) {
this._logger.error(`The execution of ${job.id} has failed.`, {
event: 'job.end',
runJobId,
error: e
error: serializeError(error)
})
throw error
}
}
async _execCall (job, runJobId) {
let paramsFlatVector
if (job.paramsVector) {
if (job.paramsVector.type === 'crossProduct') {
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
} else {
throw new UnsupportedVectorType(job.paramsVector)
}
} else {
paramsFlatVector = [{}] // One call with no parameters
}
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(this, paramsVector)
: [{}] // One call with no parameters
const connection = this.xo.createUserConnection()
const promises = []
connection.set('user_id', job.userId)
@@ -109,7 +151,7 @@ export default class JobExecutor {
calls: {}
}
forEach(paramsFlatVector, params => {
await Bluebird.map(paramsFlatVector, params => {
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
event: 'jobCall.start',
runJobId,
@@ -122,37 +164,40 @@ export default class JobExecutor {
params,
start: Date.now()
}
let promise = this.xo.callApiMethod(connection, job.method, assign({}, params))
if (job.timeout) {
promise = promise::timeout(job.timeout)
}
promises.push(
this.xo.api.call(connection, job.method, assign({}, params)).then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
return promise.then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: {...reason, message: reason.message}
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason)
})
call.error = reason
call.end = Date.now()
}
)
call.error = reason
call.end = Date.now()
}
)
}, {
concurrency: 2
})
connection.close()
await Promise.all(promises)
execStatus.end = Date.now()
return execStatus

View File

@@ -1,71 +1,100 @@
/* eslint-env mocha */
/* eslint-env jest */
import {expect} from 'chai'
import leche from 'leche'
import { forEach } from 'lodash'
import { resolveParamsVector } from './job-executor'
import {
_computeCrossProduct,
productParams
} from './job-executor'
describe('resolveParamsVector', function () {
forEach({
'cross product with three sets': [
// Expected result.
[ { id: 3, value: 'foo', remote: 'local' },
{ id: 7, value: 'foo', remote: 'local' },
{ id: 10, value: 'foo', remote: 'local' },
{ id: 3, value: 'bar', remote: 'local' },
{ id: 7, value: 'bar', remote: 'local' },
{ id: 10, value: 'bar', remote: 'local' } ],
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { id: 3 }, { id: 7 }, { id: 10 } ]
}, {
type: 'set',
values: [ { value: 'foo' }, { value: 'bar' } ]
}, {
type: 'set',
values: [ { remote: 'local' } ]
}]
}
],
'cross product with `set` and `map`': [
// Expected result.
[
{ remote: 'local', id: 'vm:2' },
{ remote: 'smb', id: 'vm:2' }
],
describe('productParams', function () {
leche.withData({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { remote: 'local' }, { remote: 'smb' } ]
}, {
type: 'map',
collection: {
type: 'fetchObjects',
pattern: {
$pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] },
power_state: 'Running',
tags: [ 'foo' ],
type: 'VM'
}
},
iteratee: {
type: 'extractProperties',
mapping: { id: 'id' }
}
}]
},
// Context.
{
xo: {
getObjects: function () {
return [{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted'
}, {
id: 'vm:2',
$pool: 'pool:1',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Running'
}, {
id: 'host:1',
type: 'host',
power_state: 'Running'
}, {
id: 'vm:3',
$pool: 'pool:8',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Halted'
}]
}
}
}
]
}, function (resultSet, ...sets) {
it('Assembles all given param sets in on set', function () {
expect(productParams(...sets)).to.eql(resultSet)
})
})
})
describe('_computeCrossProduct', function () {
// Gives the sum of all args
const addTest = (...args) => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = (...args) => args.reduce((prev, curr) => prev * curr, 1)
leche.withData({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, function (product, items, cb) {
it('Crosses sets of values with a crossProduct callback', function () {
expect(_computeCrossProduct(items, cb)).to.have.members(product)
}, ([ expectedResult, entry, context ], name) => {
describe(`with ${name}`, () => {
it('Resolves params vector', () => {
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
})
})
})
})

33
src/lvm.js Normal file
View File

@@ -0,0 +1,33 @@
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser } from 'parse-pairs'
import { isArray, map } from 'lodash'
// ===================================================================
const parse = createParser({
keyTransform: key => key.slice(5).toLowerCase()
})
const makeFunction = command => (fields, ...args) =>
execa.stdout(command, [
'--noheading',
'--nosuffix',
'--nameprefixes',
'--unbuffered',
'--units',
'b',
'-o',
String(fields),
...args
]).then(stdout => map(
splitLines(stdout),
isArray(fields)
? parse
: line => {
const data = parse(line)
return data[fields]
}
))
export const lvs = makeFunction('lvs')
export const pvs = makeFunction('pvs')

48
src/math.js Normal file
View File

@@ -0,0 +1,48 @@
import assign from 'lodash/assign'
const _combine = (vectors, n, cb) => {
if (!n) {
return
}
const nLast = n - 1
const vector = vectors[nLast]
const m = vector.length
if (n === 1) {
for (let i = 0; i < m; ++i) {
cb([ vector[i] ]) // eslint-disable-line standard/no-callback-literal
}
return
}
for (let i = 0; i < m; ++i) {
const value = vector[i]
_combine(vectors, nLast, (vector) => {
vector.push(value)
cb(vector)
})
}
}
// Compute all combinations from vectors.
//
// Ex: combine([[2, 3], [5, 7]])
// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ]
export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
// Merge the properties of an objects set in one object.
//
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
export const mergeObjects = objects => assign({}, ...objects)
// Compute a cross product between vectors.
//
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
combine(vectors)(vector => {
cb(mergeFn(vector))
})
)

74
src/math.spec.js Normal file
View File

@@ -0,0 +1,74 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { thunkToArray } from './utils'
import {
crossProduct,
mergeObjects
} from './math'
describe('mergeObjects', function () {
forEach({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
]
}, ([ resultSet, ...sets ], name) => {
describe(`with ${name}`, () => {
it('Assembles all given param sets in on set', function () {
expect(mergeObjects(sets)).toEqual(resultSet)
})
})
})
})
describe('crossProduct', function () {
// Gives the sum of all args
const addTest = args => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
forEach({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, ([ product, items, cb ], name) => {
describe(`with ${name}`, () => {
it('Crosses sets of values with a crossProduct callback', function () {
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
})
})
})
})

View File

@@ -1,8 +1,12 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Group extends Model {}
@@ -14,20 +18,16 @@ export class Groups extends Collection {
return Group
}
get idPrefix () {
return 'group:'
}
create (name) {
return this.add(new Group({
name,
users: '[]'
}))
return this.add(new Group({ name }))
}
async save (group) {
// Serializes.
group.users = JSON.stringify(group.users)
let tmp
group.users = isEmpty(tmp = group.users)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(group)
}
@@ -37,13 +37,7 @@ export class Groups extends Collection {
// Deserializes.
forEach(groups, group => {
const {users} = group
try {
group.users = JSON.parse(users)
} catch (error) {
console.warn('cannot parse group.users:', users)
group.users = []
}
group.users = parseProp('group', group, 'users', [])
})
return groups

View File

@@ -11,12 +11,7 @@ export class Jobs extends Collection {
return Job
}
get idPrefix () {
return 'job:'
}
async create (userId, job) {
job.userId = userId
async create (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.add(new Job(job))

View File

@@ -13,10 +13,6 @@ export class PluginsMetadata extends Collection {
return PluginMetadata
}
get idPrefix () {
return 'plugin-metadata:'
}
async save ({ id, autoload, configuration }) {
return /* await */ this.update({
id,

View File

@@ -13,10 +13,6 @@ export class Remotes extends Collection {
return Remote
}
get idPrefix () {
return 'remote-'
}
create (name, url) {
return this.add(new Remote({
name,

View File

@@ -11,10 +11,6 @@ export class Schedules extends Collection {
return Schedule
}
get idPrefix () {
return 'schedule:'
}
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,

View File

@@ -1,5 +1,8 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
@@ -12,11 +15,26 @@ export class Servers extends Collection {
return Server
}
async create ({host, username, password, readOnly}) {
async create ({label, host, username, password, readOnly}) {
if (await this.exists({host})) {
throw new Error('server already exists')
}
return /* await */ this.add({host, username, password, readOnly})
return /* await */ this.add({label, host, username, password, readOnly})
}
async get (properties) {
const servers = await super.get(properties)
// Deserializes
forEach(servers, server => {
if (server.error) {
server.error = parseProp('server', server, 'error', '')
} else {
delete server.error
}
})
return servers
}
}

View File

@@ -4,6 +4,8 @@ import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class User extends Model {}
@@ -14,32 +16,19 @@ User.prototype.default = {
// -------------------------------------------------------------------
const parseProp = (obj, name) => {
const value = obj[name]
if (value == null) {
return
}
try {
return JSON.parse(value)
} catch (error) {
console.warn('cannot parse user[%s] (%s):', name, value, error)
}
}
export class Users extends Collection {
get Model () {
return User
}
async create (email, properties = {}) {
async create (properties) {
const { email } = properties
// Avoid duplicates.
if (await this.exists({email})) {
throw new Error(`the user ${email} already exists`)
}
// Adds the email to the user's properties.
properties.email = email
// Create the user object.
const user = new User(properties)
@@ -50,12 +39,12 @@ export class Users extends Collection {
async save (user) {
// Serializes.
let tmp
if (!isEmpty(tmp = user.groups)) {
user.groups = JSON.stringify(tmp)
}
if (!isEmpty(tmp = user.preferences)) {
user.preferences = JSON.stringify(tmp)
}
user.groups = isEmpty(tmp = user.groups)
? undefined
: JSON.stringify(tmp)
user.preferences = isEmpty(tmp = user.preferences)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(user)
}
@@ -65,11 +54,8 @@ export class Users extends Collection {
// Deserializes
forEach(users, user => {
let tmp
user.groups = ((tmp = parseProp(user, 'groups')) && tmp.length)
? tmp
: undefined
user.preferences = parseProp(user, 'preferences')
user.groups = parseProp('user', user, 'groups', [])
user.preferences = parseProp('user', user, 'preferences', {})
})
return users

16
src/models/utils.js Normal file
View File

@@ -0,0 +1,16 @@
export const parseProp = (type, obj, name, defaultValue) => {
const value = obj[name]
if (
value == null ||
value === '' // do not warn on this trivial and minor error
) {
return defaultValue
}
try {
return JSON.parse(value)
} catch (error) {
// do not display the error because it can occurs a lot and fill
// up log files
return defaultValue
}
}

42
src/node_modules/constant-stream.js generated vendored Normal file
View File

@@ -0,0 +1,42 @@
import from2 from 'from2'
const constantStream = (data, n = 1) => {
if (!Buffer.isBuffer(data)) {
data = Buffer.from(data)
}
const { length } = data
if (!length) {
throw new Error('data should not be empty')
}
n *= length
let currentLength = length
return from2((size, next) => {
if (n <= 0) {
return next(null, null)
}
if (n < size) {
size = n
}
if (size < currentLength) {
const m = Math.floor(size / length) * length || length
n -= m
return next(null, data.slice(0, m))
}
// if more than twice the data length is requested, repeat the data
if (size > currentLength * 2) {
currentLength = Math.floor(size / length) * length
data = Buffer.alloc(currentLength, data)
}
n -= currentLength
return next(null, data)
})
}
export { constantStream as default }

View File

@@ -56,7 +56,7 @@ export default function proxyConsole (ws, vmConsole, sessionId) {
.on('message', data => {
if (!closed) {
// Decode from base 64.
socket.write(new Buffer(data, 'base64'))
socket.write(Buffer.from(data, 'base64'))
}
})
.on('close', () => {

View File

@@ -112,48 +112,64 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async createReadStream (file, {
createReadStream (file, {
checksum = false,
ignoreMissingChecksum = false,
...options
} = {}) {
const streamP = this._createReadStream(file, options).then(async stream => {
await eventToPromise(stream, 'readable')
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = eventToPromise(stream, 'readable')
if (stream.length === undefined) {
stream.length = await this.getSize(file)::pCatch(noop)
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([ promise, this.getSize(file).then(size => {
stream.length = size
}, noop) ])
}
return stream
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
try {
checksum = await this.readFile(`${file}.checksum`)
} catch (error) {
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
return streamP
// avoid a unhandled rejection warning
streamP.catch(noop)
return this.readFile(`${file}.checksum`).then(
checksum => streamP.then(stream => {
const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream.length = length
return stream
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error
}
throw error
}
let stream = await streamP
const { length } = stream
stream = validChecksumOfReadStream(stream, checksum.toString())
stream.length = length
return stream
)
}
async _createReadStream (file, options) {
throw new Error('Not implemented')
}
async refreshChecksum (path) {
const stream = addChecksumToReadStream(await this.createReadStream(path))
stream.resume() // start reading the whole file
const checksum = await stream.checksum
await this.outputFile(`${path}.checksum`, checksum)
}
async createOutputStream (file, {
checksum = false,
...options

View File

@@ -86,5 +86,4 @@ export default class LocalHandler extends RemoteHandlerAbstract {
const stats = await fs.stat(this._getFilePath(file))
return stats.size
}
}

View File

@@ -12,7 +12,7 @@ export default class NfsHandler extends LocalHandler {
}
_getRealPath () {
return `/tmp/xo-server/mounts/${this._remote.id}`
return `/run/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
@@ -79,6 +79,6 @@ export default class NfsHandler extends LocalHandler {
}
async _umount (remote) {
await execa('umount', [remote.path])
await execa('umount', [this._getRealPath()])
}
}

View File

@@ -1,5 +1,3 @@
import paramsVector from 'job/params-vector'
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
@@ -27,7 +25,13 @@ export default {
type: 'string',
description: 'called method'
},
paramsVector
paramsVector: {
type: 'object'
},
timeout: {
type: 'number',
description: 'number of milliseconds after which the job is considered failed'
}
},
required: [
'type',

View File

@@ -1,59 +0,0 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['crossProduct']
},
items: {
type: 'array',
description: 'vector of values to multiply with others vectors',
items: {
type: 'object',
properties: {
type: {
enum: ['set']
},
values: {
type: 'array',
items: {
type: 'object'
},
minItems: 1
}
},
required: [
'type',
'values'
]
},
minItems: 1
}
},
required: [
'type',
'items'
]
}
/* Example:
{
"type": "cross product",
"items": [
{
"type": "set",
"values": [
{"id": 0, "name": "snapshost de 0"},
{"id": 1, "name": "snapshost de 1"}
],
},
{
"type": "set",
"values": [
{"force": true}
]
}
]
}
*/

View File

@@ -20,7 +20,7 @@ export default {
},
unloadable: {
type: 'boolean',
default: 'true',
default: true,
description: 'whether or not this plugin can be unloaded'
},
configuration: {
@@ -30,6 +30,14 @@ export default {
configurationSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'configuration schema for this plugin (not present if not configurable)'
},
testable: {
type: 'boolean',
description: 'whether or not this plugin can be tested'
},
testSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'test schema for this plugin'
}
},
required: [

View File

@@ -0,0 +1,44 @@
import assert from 'assert'
const streamToExistingBuffer = (
stream,
buffer,
offset = 0,
end = buffer.length
) => new Promise((resolve, reject) => {
assert(offset >= 0)
assert(end > offset)
assert(end <= buffer.length)
let i = offset
const onData = chunk => {
const prev = i
i += chunk.length
if (i > end) {
return onError(new Error('too much data'))
}
chunk.copy(buffer, prev)
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(i - offset)
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToExistingBuffer as default }

View File

@@ -0,0 +1,20 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs'
import { fromCallback } from 'promise-toolbox'
import streamToExistingBuffer from './stream-to-existing-buffer'
describe('streamToExistingBuffer()', () => {
it('read the content of a stream in a buffer', async () => {
const stream = createReadStream(__filename)
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
const buf = Buffer.allocUnsafe(expected.length + 1)
buf[0] = 'A'.charCodeAt()
await streamToExistingBuffer(stream, buf, 1)
expect(String(buf)).toBe(`A${expected}`)
})
})

View File

@@ -0,0 +1,27 @@
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
const chunks = []
let length = 0
const onData = chunk => {
chunks.push(chunk)
length += chunk.length
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(Buffer.concat(chunks, length))
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToNewBuffer as default }

View File

@@ -1,7 +1,6 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash/forEach'
import getStream from 'get-stream'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
@@ -11,17 +10,24 @@ import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import through2 from 'through2'
import { CronJob } from 'cron'
import { Readable } from 'stream'
import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
promisify,
reflect as pReflect
} from 'promise-toolbox'
@@ -29,9 +35,6 @@ import {
createHash,
randomBytes
} from 'crypto'
import { Readable } from 'stream'
import through2 from 'through2'
import {utcFormat as d3TimeFormat} from 'd3-time-format'
// ===================================================================
@@ -53,13 +56,13 @@ export function bufferToStream (buf) {
return stream
}
export const streamToBuffer = getStream.buffer
export streamToBuffer from './stream-to-new-buffer'
// -------------------------------------------------------------------
export function camelToSnakeCase (string) {
return string.replace(
/([a-z])([A-Z])/g,
/([a-z0-9])([A-Z])/g,
(_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}`
)
}
@@ -73,6 +76,27 @@ export const createRawObject = Object.create
// -------------------------------------------------------------------
// Only works with string items!
export const diffItems = (coll1, coll2) => {
const removed = createRawObject()
forEach(coll2, value => {
removed[value] = true
})
const added = []
forEach(coll1, value => {
if (value in removed) {
delete removed[value]
} else {
added.push(value)
}
})
return [ added, keys(removed) ]
}
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
@@ -177,8 +201,15 @@ export function extractProperty (obj, prop) {
// -------------------------------------------------------------------
export const getUserPublicProperties = user => pick(
user.properties || user,
'id', 'email', 'groups', 'permission', 'preferences', 'provider'
)
// -------------------------------------------------------------------
export const getPseudoRandomBytes = n => {
const bytes = new Buffer(n)
const bytes = Buffer.allocUnsafe(n)
const odd = n & 1
for (let i = 0, m = n - odd; i < m; i += 2) {
@@ -197,7 +228,7 @@ export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(
// Generate a secure random Base64 string.
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
})(randomBytes::promisify())
})(promisify(randomBytes))
// -------------------------------------------------------------------
@@ -362,7 +393,9 @@ export const popProperty = obj => {
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')
export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
// -------------------------------------------------------------------
@@ -421,7 +454,7 @@ export function map (
export const multiKeyHash = (...args) => new Promise(resolve => {
const hash = multiKeyHashInt(...args)
const buf = new Buffer(4)
const buf = Buffer.allocUnsafe(4)
buf.writeUInt32LE(hash, 0)
resolve(base64url(buf))
@@ -429,6 +462,11 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
// -------------------------------------------------------------------
export const resolveSubpath = (root, path) =>
resolve(root, `./${resolve('/', path)}`)
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper
@@ -460,7 +498,7 @@ export const scheduleFn = (cronTime, fn, timeZone) => {
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
console.error('[WARN] scheduled function:', (error && error.stack) || error)
} finally {
running = false
}
@@ -476,5 +514,68 @@ export const scheduleFn = (cronTime, fn, timeZone) => {
// -------------------------------------------------------------------
// Create a serializable object from an error.
export const serializeError = error => ({
message: error.message,
stack: error.stack,
...error // Copy enumerable properties.
})
// -------------------------------------------------------------------
// Create an array which contains the results of one thunk function.
// Only works with synchronous thunks.
export const thunkToArray = thunk => {
const values = []
thunk(::values.push)
return values
}
// -------------------------------------------------------------------
// Creates a new function which throws an error.
//
// ```js
// promise.catch(throwFn('an error has occured'))
//
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw (
isString(error)
? new Error(error)
: error
)
}
// -------------------------------------------------------------------
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value
// -------------------------------------------------------------------
export const mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
// -------------------------------------------------------------------
export const splitFirst = (string, separator) => {
const i = string.indexOf(separator)
return i === -1 ? null : [
string.slice(0, i),
string.slice(i + separator.length)
]
}

View File

@@ -1,12 +1,9 @@
/* eslint-env mocha */
import expect from 'must'
// ===================================================================
/* eslint-env jest */
import {
camelToSnakeCase,
createRawObject,
diffItems,
ensureArray,
extractProperty,
formatXml,
@@ -19,57 +16,69 @@ import {
describe('camelToSnakeCase()', function () {
it('converts a string from camelCase to snake_case', function () {
expect(camelToSnakeCase('fooBar')).to.equal('foo_bar')
expect(camelToSnakeCase('fooBar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed')
})
it('does not alter snake_case strings', function () {
expect(camelToSnakeCase('foo_bar')).to.equal('foo_bar')
expect(camelToSnakeCase('foo_bar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed')
})
it('does not alter upper case letters expect those from the camelCase', function () {
expect(camelToSnakeCase('fooBar_BAZ')).to.equal('foo_bar_BAZ')
expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ')
})
})
// -------------------------------------------------------------------
describe('createRawObject()', () => {
it('returns an object', () => {
expect(createRawObject()).to.be.an.object()
})
it('returns an empty object', () => {
expect(createRawObject()).to.be.empty()
expect(createRawObject()).toEqual({})
})
it('creates a new object each time', () => {
expect(createRawObject()).to.not.equal(createRawObject())
expect(createRawObject()).not.toBe(createRawObject())
})
if (Object.getPrototypeOf) {
it('creates an object without a prototype', () => {
expect(Object.getPrototypeOf(createRawObject())).to.be.null()
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
})
}
})
// -------------------------------------------------------------------
describe('diffItems', () => {
it('computes the added/removed items between 2 iterables', () => {
expect(diffItems(
['foo', 'bar'],
['baz', 'foo']
)).toEqual([
['bar'],
['baz']
])
})
})
// -------------------------------------------------------------------
describe('ensureArray()', function () {
it('wrap the value in an array', function () {
const value = 'foo'
expect(ensureArray(value)).to.eql([value])
expect(ensureArray(value)).toEqual([value])
})
it('returns an empty array for undefined', function () {
expect(ensureArray(undefined)).to.eql([])
expect(ensureArray(undefined)).toEqual([])
})
it('returns the object itself if is already an array', function () {
const array = ['foo', 'bar', 'baz']
expect(ensureArray(array)).to.equal(array)
expect(ensureArray(array)).toBe(array)
})
})
@@ -80,15 +89,15 @@ describe('extractProperty()', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(extractProperty(obj, 'prop')).toBe(value)
})
it('removes the property from the object', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(obj).to.not.have.property('prop')
expect(extractProperty(obj, 'prop')).toBe(value)
expect(obj.prop).not.toBeDefined()
})
})
@@ -103,7 +112,7 @@ describe('formatXml()', function () {
{$: {baz: 'plip'}}
]
}
})).to.equal(`<foo>
})).toBe(`<foo>
<bar baz="plop"/>
<bar baz="plip"/>
</foo>`)
@@ -114,7 +123,7 @@ describe('formatXml()', function () {
describe('generateToken()', () => {
it('generates a string', async () => {
expect(await generateToken()).to.be.a.string()
expect(typeof await generateToken()).toBe('string')
})
})
@@ -122,21 +131,21 @@ describe('generateToken()', () => {
describe('parseSize()', function () {
it('parses a human size', function () {
expect(parseSize('1G')).to.equal(1e9)
expect(parseSize('1G')).toBe(1e9)
})
it('returns the parameter if already a number', function () {
expect(parseSize(1e6)).to.equal(1e6)
expect(parseSize(1e6)).toBe(1e6)
})
it('throws if the string cannot be parsed', function () {
expect(function () {
parseSize('foo')
}).to.throw()
}).toThrow()
})
it('supports the B unit as suffix', function () {
expect(parseSize('3MB')).to.equal(3e6)
expect(parseSize('3MB')).toBe(3e6)
})
})
@@ -144,6 +153,7 @@ describe('parseSize()', function () {
describe('pSettle()', () => {
it('works with arrays', async () => {
const rejection = 'fatality'
const [
status1,
status2,
@@ -151,27 +161,29 @@ describe('pSettle()', () => {
] = await pSettle([
Promise.resolve(42),
Math.PI,
Promise.reject('fatality')
Promise.reject(rejection)
])
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
it('works with objects', async () => {
const rejection = 'fatality'
const {
a: status1,
b: status2,
@@ -179,23 +191,23 @@ describe('pSettle()', () => {
} = await pSettle({
a: Promise.resolve(42),
b: Math.PI,
c: Promise.reject('fatality')
c: Promise.reject(rejection)
})
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe(rejection)
})
})

View File

@@ -1,4 +1,10 @@
import fu from 'struct-fu'
// TODO: remove once completely merged in vhd.js
import assert from 'assert'
import constantStream from 'constant-stream'
import eventToPromise from 'event-to-promise'
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import {
noop,
@@ -39,6 +45,10 @@ const HARD_DISK_TYPE_DIFFERENCING = 4 // Delta backup.
const BLOCK_UNUSED = 0xFFFFFFFF
const BIT_MASK = 0x80
// unused block as buffer containing a uint32BE
const BUF_BLOCK_UNUSED = Buffer.allocUnsafe(VHD_ENTRY_SIZE)
BUF_BLOCK_UNUSED.writeUInt32BE(BLOCK_UNUSED, 0)
// ===================================================================
const fuFooter = fu.struct([
@@ -91,7 +101,7 @@ const fuHeader = fu.struct([
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char('parentUnicodeName', 512),
fu.char16be('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
@@ -144,40 +154,28 @@ const unpackField = (field, buf) => {
}
// ===================================================================
// Returns the checksum of a raw footer.
// The raw footer is altered with the new sum.
function checksumFooter (rawFooter) {
const checksumField = fuFooter.fields.checksum
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawFooter)
packField(checksumField, 0, rawStruct)
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
for (let i = 0, n = struct.size; i < n; i++) {
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawFooter)
packField(checksumField, sum, rawStruct)
return sum
}
function getParentLocatorSize (parentLocatorEntry) {
const { platformDataSpace } = parentLocatorEntry
if (platformDataSpace < VHD_SECTOR_SIZE) {
return sectorsToBytes(platformDataSpace)
}
return (platformDataSpace % VHD_SECTOR_SIZE === 0)
? platformDataSpace
: 0
}
// ===================================================================
class Vhd {
@@ -190,6 +188,17 @@ class Vhd {
// Read functions.
// =================================================================
_readStream (start, n) {
return this._handler.createReadStream(this._path, {
start,
end: start + n - 1 // end is inclusive
})
}
_read (start, n) {
return this._readStream(start, n).then(streamToBuffer)
}
// Returns the first address after metadata. (In bytes)
getEndOfHeaders () {
const { header } = this
@@ -207,10 +216,10 @@ class Vhd {
const entry = header.parentLocatorEntry[i]
if (entry.platformCode !== VHD_PLATFORM_CODE_NONE) {
const dataOffset = uint32ToUint64(entry.platformDataOffset)
// Max(end, locator end)
end = Math.max(end, dataOffset + getParentLocatorSize(entry))
end = Math.max(end,
uint32ToUint64(entry.platformDataOffset) +
sectorsToBytes(entry.platformDataSpace)
)
}
}
@@ -221,17 +230,15 @@ class Vhd {
// Returns the first sector after data.
getEndOfData () {
let end = Math.floor(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
let end = Math.ceil(this.getEndOfHeaders() / VHD_SECTOR_SIZE)
const fullBlockSize = this.sectorsOfBitmap + this.sectorsPerBlock
const { maxTableEntries } = this.header
for (let i = 0; i < maxTableEntries; i++) {
let blockAddr = this.readAllocationTableEntry(i)
const blockAddr = this._getBatEntry(i)
if (blockAddr !== BLOCK_UNUSED) {
// Compute next block address.
blockAddr += this.sectorsPerBlock + this.sectorsOfBitmap
end = Math.max(end, blockAddr)
end = Math.max(end, blockAddr + fullBlockSize)
}
}
@@ -240,24 +247,12 @@ class Vhd {
return sectorsToBytes(end)
}
// Returns the start position of the vhd footer.
// The real footer, not the copy at the beginning of the vhd file.
async getFooterStart () {
const stats = await this._handler.getSize(this._path)
return stats.size - VHD_FOOTER_SIZE
}
// Get the beginning (footer + header) of a vhd file.
async readHeaderAndFooter () {
const buf = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: 0,
end: VHD_FOOTER_SIZE + VHD_HEADER_SIZE - 1
})
)
const buf = await this._read(0, VHD_FOOTER_SIZE + VHD_HEADER_SIZE)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumFooter(buf)
const sumToTest = checksumStruct(buf, fuFooter)
// Checksum child & parent.
if (sumToTest !== sum) {
@@ -297,119 +292,176 @@ class Vhd {
sectorsRoundUpNoZero(header.maxTableEntries * VHD_ENTRY_SIZE)
)
this.blockTable = await streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + size - 1
})
)
this.blockTable = await this._read(offset, size)
}
// Returns the address block at the entry location of one table.
readAllocationTableEntry (entry) {
return this.blockTable.readUInt32BE(entry * VHD_ENTRY_SIZE)
// return the first sector (bitmap) of a block
_getBatEntry (block) {
return this.blockTable.readUInt32BE(block * VHD_ENTRY_SIZE)
}
// Returns the data content of a block. (Not the bitmap !)
async readBlockData (blockAddr) {
const { blockSize } = this.header
const handler = this._handler
const path = this._path
const blockDataAddr = sectorsToBytes(blockAddr + this.sectorsOfBitmap)
const footerStart = await this.getFooterStart()
const isPadded = footerStart < (blockDataAddr + blockSize)
// Size ot the current block in the vhd file.
const size = isPadded ? (footerStart - blockDataAddr) : sectorsToBytes(this.sectorsPerBlock)
debug(`Read block data at: ${blockDataAddr}. (size=${size})`)
const buf = await streamToBuffer(
await handler.createReadStream(path, {
start: blockDataAddr,
end: blockDataAddr + size - 1
})
)
// Padded by zero !
if (isPadded) {
return Buffer.concat([buf, new Buffer(blockSize - size).fill(0)])
_readBlock (blockId, onlyBitmap = false) {
const blockAddr = this._getBatEntry(blockId)
if (blockAddr === BLOCK_UNUSED) {
throw new Error(`no such block ${blockId}`)
}
return buf
return this._read(
sectorsToBytes(blockAddr),
onlyBitmap ? this.bitmapSize : this.fullBlockSize
).then(buf => onlyBitmap
? { bitmap: buf }
: {
bitmap: buf.slice(0, this.bitmapSize),
data: buf.slice(this.bitmapSize)
}
)
}
// Returns a buffer that contains the bitmap of a block.
// get the identifiers and first sectors of the first and last block
// in the file
//
// TODO: merge with readBlockData().
async readBlockBitmap (blockAddr) {
const { bitmapSize } = this
const offset = sectorsToBytes(blockAddr)
// return undefined if none
_getFirstAndLastBlocks () {
const n = this.header.maxTableEntries
const bat = this.blockTable
let i = 0
let j = 0
let first, firstSector, last, lastSector
debug(`Read bitmap at: ${offset}. (size=${bitmapSize})`)
// get first allocated block for initialization
while ((firstSector = bat.readUInt32BE(j)) === BLOCK_UNUSED) {
i += 1
j += VHD_ENTRY_SIZE
return streamToBuffer(
await this._handler.createReadStream(this._path, {
start: offset,
end: offset + bitmapSize - 1
})
)
if (i === n) {
return
}
}
lastSector = firstSector
first = last = i
while (i < n) {
const sector = bat.readUInt32BE(j)
if (sector !== BLOCK_UNUSED) {
if (sector < firstSector) {
first = i
firstSector = sector
} else if (sector > lastSector) {
last = i
lastSector = sector
}
}
i += 1
j += VHD_ENTRY_SIZE
}
return { first, firstSector, last, lastSector }
}
// =================================================================
// Write functions.
// =================================================================
// Write a buffer at a given position in a vhd file.
async _write (buffer, offset) {
// Write a buffer/stream at a given position in a vhd file.
_write (data, offset) {
debug(`_write offset=${offset} size=${Buffer.isBuffer(data) ? data.length : '???'}`)
// TODO: could probably be merged in remote handlers.
return this._handler.createOutputStream(this._path, {
start: offset,
flags: 'r+'
}).then(stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.write(buffer, () => {
stream.end()
resolve()
})
}))
flags: 'r+',
start: offset
}).then(
Buffer.isBuffer(data)
? stream => new Promise((resolve, reject) => {
stream.on('error', reject)
stream.end(data, resolve)
})
: stream => eventToPromise(data.pipe(stream), 'finish')
)
}
// Write an entry in the allocation table.
writeAllocationTableEntry (entry, value) {
this.blockTable.writeUInt32BE(value, entry * VHD_ENTRY_SIZE)
async ensureBatSize (size) {
const { header } = this
const prevMaxTableEntries = header.maxTableEntries
if (prevMaxTableEntries >= size) {
return
}
const tableOffset = uint32ToUint64(header.tableOffset)
const { first, firstSector, lastSector } = this._getFirstAndLastBlocks()
// extend BAT
const maxTableEntries = header.maxTableEntries = size
const batSize = maxTableEntries * VHD_ENTRY_SIZE
const prevBat = this.blockTable
const bat = this.blockTable = Buffer.allocUnsafe(batSize)
prevBat.copy(bat)
bat.fill(BUF_BLOCK_UNUSED, prevBat.length)
debug(`ensureBatSize: extend in memory BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
const extendBat = () => {
debug(`ensureBatSize: extend in file BAT ${prevMaxTableEntries} -> ${maxTableEntries}`)
return this._write(
constantStream(BUF_BLOCK_UNUSED, maxTableEntries - prevMaxTableEntries),
tableOffset + prevBat.length
)
}
if (tableOffset + batSize < sectorsToBytes(firstSector)) {
return Promise.all([
extendBat(),
this.writeHeader()
])
}
const { fullBlockSize } = this
const newFirstSector = lastSector + fullBlockSize / VHD_SECTOR_SIZE
debug(`ensureBatSize: move first block ${firstSector} -> ${newFirstSector}`)
return Promise.all([
// copy the first block at the end
this._readStream(sectorsToBytes(firstSector), fullBlockSize).then(stream =>
this._write(stream, sectorsToBytes(newFirstSector))
).then(extendBat),
this._setBatEntry(first, newFirstSector),
this.writeHeader(),
this.writeFooter()
])
}
// set the first sector (bitmap) of a block
_setBatEntry (block, blockSector) {
const i = block * VHD_ENTRY_SIZE
const { blockTable } = this
blockTable.writeUInt32BE(blockSector, i)
return this._write(
blockTable.slice(i, i + VHD_ENTRY_SIZE),
uint32ToUint64(this.header.tableOffset) + i
)
}
// Make a new empty block at vhd end.
// Update block allocation table in context and in file.
async createBlock (blockId) {
// End of file !
let offset = this.getEndOfData()
const blockAddr = Math.ceil(this.getEndOfData() / VHD_SECTOR_SIZE)
// Padded on bound sector.
if (offset % VHD_SECTOR_SIZE) {
offset += (VHD_SECTOR_SIZE - (offset % VHD_SECTOR_SIZE))
}
debug(`create block ${blockId} at ${blockAddr}`)
const blockAddr = Math.floor(offset / VHD_SECTOR_SIZE)
await Promise.all([
// Write an empty block and addr in vhd file.
this._write(
constantStream([ 0 ], this.fullBlockSize),
sectorsToBytes(blockAddr)
),
const {
blockTable,
fullBlockSize
} = this
debug(`Create block at ${blockAddr}. (size=${fullBlockSize}, offset=${offset})`)
// New entry in block allocation table.
this.writeAllocationTableEntry(blockId, blockAddr)
const tableOffset = uint32ToUint64(this.header.tableOffset)
const entry = blockId * VHD_ENTRY_SIZE
// Write an empty block and addr in vhd file.
await this._write(new Buffer(fullBlockSize).fill(0), offset)
await this._write(blockTable.slice(entry, entry + VHD_ENTRY_SIZE), tableOffset + entry)
this._setBatEntry(blockId, blockAddr)
])
return blockAddr
}
@@ -428,17 +480,16 @@ class Vhd {
await this._write(bitmap, sectorsToBytes(blockAddr))
}
async writeBlockSectors (block, beginSectorId, n) {
let blockAddr = this.readAllocationTableEntry(block.id)
async writeBlockSectors (block, beginSectorId, endSectorId) {
let blockAddr = this._getBatEntry(block.id)
if (blockAddr === BLOCK_UNUSED) {
blockAddr = await this.createBlock(block.id)
}
const endSectorId = beginSectorId + n
const offset = blockAddr + this.sectorsOfBitmap + beginSectorId
debug(`Write block data at: ${offset}. (counter=${n}, blockId=${block.id}, blockSector=${beginSectorId})`)
debug(`writeBlockSectors at ${offset} block=${block.id}, sectors=${beginSectorId}...${endSectorId}`)
await this._write(
block.data.slice(
@@ -448,7 +499,7 @@ class Vhd {
sectorsToBytes(offset)
)
const bitmap = await this.readBlockBitmap(this.bitmapSize, blockAddr)
const { bitmap } = await this._readBlock(block.id, true)
for (let i = beginSectorId; i < endSectorId; ++i) {
mapSetBit(bitmap, i)
@@ -458,61 +509,69 @@ class Vhd {
}
// Merge block id (of vhd child) into vhd parent.
async coalesceBlock (child, blockAddr, blockId) {
async coalesceBlock (child, blockId) {
// Get block data and bitmap of block id.
const blockData = await child.readBlockData(blockAddr)
const blockBitmap = await child.readBlockBitmap(blockAddr)
const { bitmap, data } = await child._readBlock(blockId)
debug(`Coalesce block ${blockId} at ${blockAddr}.`)
debug(`coalesceBlock block=${blockId}`)
// For each sector of block data...
const { sectorsPerBlock } = child
for (let i = 0; i < sectorsPerBlock; i++) {
// If no changes on one sector, skip.
if (!mapTestBit(blockBitmap, i)) {
if (!mapTestBit(bitmap, i)) {
continue
}
let sectors = 0
let endSector = i + 1
// Count changed sectors.
for (; sectors + i < sectorsPerBlock; sectors++) {
if (!mapTestBit(blockBitmap, sectors + i)) {
break
}
while (endSector < sectorsPerBlock && mapTestBit(bitmap, endSector)) {
++endSector
}
// Write n sectors into parent.
debug(`Coalesce block: write. (offset=${i}, sectors=${sectors})`)
debug(`coalesceBlock: write sectors=${i}...${endSector}`)
await this.writeBlockSectors(
{ id: blockId, data: blockData },
{ id: blockId, data },
i,
sectors
endSector
)
i += sectors
i = endSector
}
}
// Write a context footer. (At the end and beggining of a vhd file.)
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumFooter(rawFooter)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = VHD_FOOTER_SIZE
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
return this._write(rawHeader, offset)
}
}
// Merge vhd child into vhd parent.
//
// Child must be a delta backup !
// Parent must be a full backup !
//
// TODO: update the identifier of the parent VHD.
export default async function vhdMerge (
parentHandler, parentPath,
childHandler, childPath
@@ -526,6 +585,8 @@ export default async function vhdMerge (
childVhd.readHeaderAndFooter()
])
assert(childVhd.header.blockSize === parentVhd.header.blockSize)
// Child must be a delta.
if (childVhd.footer.diskType !== HARD_DISK_TYPE_DIFFERENCING) {
throw new Error('Unable to merge, child is not a delta backup.')
@@ -550,17 +611,66 @@ export default async function vhdMerge (
childVhd.readBlockTable()
])
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
const blockAddr = childVhd.readAllocationTableEntry(blockId)
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
if (blockAddr !== BLOCK_UNUSED) {
await parentVhd.coalesceBlock(
childVhd,
blockAddr,
blockId
)
for (let blockId = 0; blockId < childVhd.header.maxTableEntries; blockId++) {
if (childVhd._getBatEntry(blockId) !== BLOCK_UNUSED) {
await parentVhd.coalesceBlock(childVhd, blockId)
}
}
const cFooter = childVhd.footer
const pFooter = parentVhd.footer
pFooter.currentSize = { ...cFooter.currentSize }
pFooter.diskGeometry = { ...cFooter.diskGeometry }
pFooter.originalSize = { ...cFooter.originalSize }
pFooter.timestamp = cFooter.timestamp
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
}
// returns true if the child was actually modified
export async function chainVhd (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter()
])
const { header } = childVhd
const parentName = parentPath.split('/').pop()
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
!isEqual(header.parentUuid, parentUuid)
) {
header.parentUuid = parentUuid
header.parentUnicodeName = parentName
await childVhd.writeHeader()
return true
}
// The checksum was broken between xo-server v5.2.4 and v5.2.5
//
// Replace by a correct checksum if necessary.
//
// TODO: remove when enough time as passed (6 months).
{
const rawHeader = fuHeader.pack(header)
const checksum = checksumStruct(rawHeader, fuHeader)
if (checksum !== header.checksum) {
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
return true
}
}
return false
}

View File

@@ -1,8 +1,14 @@
import {
startsWith
} from 'lodash'
import {
ensureArray,
extractProperty,
forEach,
isArray,
isEmpty,
mapFilter,
mapToArray,
parseXml
} from './utils'
@@ -12,6 +18,9 @@ import {
isVmRunning,
parseDateTime
} from './xapi'
import {
useUpdateSystem
} from './xapi/utils'
// ===================================================================
@@ -93,6 +102,32 @@ const TRANSFORMS = {
} = obj
const isRunning = isHostRunning(obj)
const { software_version } = obj
let supplementalPacks, patches
if (useUpdateSystem(obj)) {
supplementalPacks = []
patches = []
forEach(obj.$updates, update => {
const formattedUpdate = {
name: update.name_label,
description: update.name_description,
author: update.key.split('-')[3],
version: update.version,
guidance: update.after_apply_guidance,
hosts: link(update, 'hosts'),
vdi: link(update, 'vdi'),
size: update.installation_size
}
if (startsWith(update.name_label, 'XS')) {
patches.push(formattedUpdate)
} else {
supplementalPacks.push(formattedUpdate)
}
})
}
return {
// Deprecated
@@ -133,12 +168,25 @@ const TRANSFORMS = {
total: 0
}
})(),
patches: link(obj, 'patches'),
patches: patches || link(obj, 'patches'),
powerOnMode: obj.power_on_mode,
power_state: metrics
? (isRunning ? 'Running' : 'Halted')
: 'Unknown',
startTime: toTimestamp(otherConfig.boot_time),
supplementalPacks: supplementalPacks ||
mapFilter(software_version, (value, key) => {
let author, name
if (([ author, name ] = key.split(':')).length === 2) {
const [ description, version ] = value.split(', ')
return {
name,
description,
author,
version: version.split(' ')[1]
}
}
}),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
tags: obj.tags,
version: obj.software_version.product_version,
@@ -184,22 +232,33 @@ const TRANSFORMS = {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
const { major, minor } = guestMetrics.PV_drivers_version
const [ hostMajor, hostMinor ] = (obj.$resident_on || obj.$pool.$master)
.software_version
.product_version
.split('.')
return guestMetrics.PV_drivers_up_to_date
return major >= hostMajor && minor >= hostMinor
? 'up to date'
: 'out of date'
})()
let resourceSet = otherConfig['xo:resource_set']
if (resourceSet) {
try {
resourceSet = JSON.parse(resourceSet)
} catch (_) {
resourceSet = undefined
}
}
const vm = {
// type is redefined after for controllers/, templates &
// snapshots.
type: 'VM',
addresses: guestMetrics && guestMetrics.networks || null,
addresses: (guestMetrics && guestMetrics.networks) || null,
affinityHost: link(obj, 'affinity'),
auto_poweron: Boolean(otherConfig.auto_poweron),
boot: obj.HVM_boot_params,
CPUs: {
@@ -232,7 +291,8 @@ const TRANSFORMS = {
return {
enabled: true,
info: info && parseXml(info).docker_info,
process: process && parseXml(process).docker_ps,
containers: ensureArray(process && parseXml(process).docker_ps.item),
process: process && parseXml(process).docker_ps, // deprecated (only used in v4)
version: version && parseXml(version).docker_version
}
})(),
@@ -266,11 +326,13 @@ const TRANSFORMS = {
return memory
})(),
installTime: metrics && toTimestamp(metrics.install_time),
name_description: obj.name_description,
name_label: obj.name_label,
other: otherConfig,
os_version: guestMetrics && guestMetrics.os_version || null,
os_version: (guestMetrics && guestMetrics.os_version) || null,
power_state: obj.power_state,
resourceSet,
snapshots: link(obj, 'snapshots'),
startTime: metrics && toTimestamp(metrics.start_time),
tags: obj.tags,
@@ -307,6 +369,10 @@ const TRANSFORMS = {
} else if (obj.is_a_template) {
vm.type += '-template'
if (obj.other_config.default_template === 'true') {
vm.id = obj.$ref // use refs for templates as they
}
vm.CPUs.number = +obj.VCPUs_at_startup
vm.template_info = {
arch: otherConfig['install-arch'],
@@ -362,10 +428,13 @@ const TRANSFORMS = {
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.physical_size,
shared: Boolean(obj.shared),
SR_type: obj.type,
tags: obj.tags,
usage: +obj.virtual_allocation,
VDIs: link(obj, 'VDIs'),
other_config: obj.other_config,
sm_config: obj.sm_config,
$container: (
obj.shared || !obj.$PBDs[0]
@@ -382,26 +451,32 @@ const TRANSFORMS = {
return {
type: 'PBD',
attached: obj.currently_attached,
attached: Boolean(obj.currently_attached),
host: link(obj, 'host'),
SR: link(obj, 'SR')
SR: link(obj, 'SR'),
device_config: obj.device_config
}
},
// -----------------------------------------------------------------
pif (obj) {
const metrics = obj.$metrics
return {
type: 'PIF',
attached: Boolean(obj.currently_attached),
isBondMaster: !isEmpty(obj.bond_master_of),
device: obj.device,
deviceName: metrics && metrics.device_name,
dns: obj.DNS,
disallowUnplug: Boolean(obj.disallow_unplug),
gateway: obj.gateway,
ip: obj.IP,
mac: obj.MAC,
management: Boolean(obj.management), // TODO: find a better name.
carrier: Boolean(metrics && metrics.carrier),
mode: obj.ip_configuration_mode,
mtu: +obj.MTU,
netmask: obj.netmask,
@@ -452,6 +527,7 @@ const TRANSFORMS = {
attached: Boolean(obj.currently_attached),
bootable: Boolean(obj.bootable),
device: obj.device || null,
is_cd_drive: obj.type === 'CD',
position: obj.userdevice,
read_only: obj.mode === 'RO',
@@ -483,9 +559,11 @@ const TRANSFORMS = {
network (obj) {
return {
bridge: obj.bridge,
defaultIsLocked: obj.default_locking_mode === 'disabled',
MTU: +obj.MTU,
name_description: obj.name_description,
name_label: obj.name_label,
other_config: obj.other_config,
tags: obj.tags,
PIFs: link(obj, 'PIFs'),
VIFs: link(obj, 'VIFs')

File diff suppressed because it is too large Load Diff

View File

View File

@@ -1,10 +1,60 @@
import {
makeEditObject
} from '../utils'
import { isEmpty } from '../../utils'
import { makeEditObject } from '../utils'
export default {
async _connectVif (vif) {
await this.call('VIF.plug', vif.$ref)
},
async connectVif (vifId) {
await this._connectVif(this.getObject(vifId))
},
async _deleteVif (vif) {
await this.call('VIF.destroy', vif.$ref)
},
async deleteVif (vifId) {
const vif = this.getObject(vifId)
if (vif.currently_attached) {
await this._disconnectVif(vif)
}
await this._deleteVif(vif)
},
async _disconnectVif (vif) {
await this.call('VIF.unplug_force', vif.$ref)
},
async disconnectVif (vifId) {
await this._disconnectVif(this.getObject(vifId))
},
editVif: makeEditObject({
ipv4Allowed: true,
ipv6Allowed: true
ipv4Allowed: {
get: true,
set: [
'ipv4Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
},
ipv6Allowed: {
get: true,
set: [
'ipv6Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
}
})
}

380
src/xapi/mixins/patching.js Normal file
View File

@@ -0,0 +1,380 @@
import deferrable from 'golike-defer'
import filter from 'lodash/filter'
import includes from 'lodash/includes'
import some from 'lodash/some'
import sortBy from 'lodash/sortBy'
import unzip from 'julien-f-unzip'
import httpProxy from '../../http-proxy'
import httpRequest from '../../http-request'
import { debounce } from '../../decorators'
import {
createRawObject,
ensureArray,
forEach,
mapFilter,
mapToArray,
parseXml
} from '../../utils'
import {
debug,
put,
useUpdateSystem
} from '../utils'
export default {
// FIXME: should be static
@debounce(24 * 60 * 60 * 1000)
async _getXenUpdates () {
const { readAll, statusCode } = await httpRequest(
'http://updates.xensource.com/XenServer/updates.xml',
{ agent: httpProxy }
)
if (statusCode !== 200) {
throw new Error('cannot fetch patches list from Citrix')
}
const data = parseXml(await readAll()).patchdata
const patches = createRawObject()
forEach(data.patches.patch, patch => {
patches[patch.uuid] = {
date: patch.timestamp,
description: patch['name-description'],
documentationUrl: patch.url,
guidance: patch['after-apply-guidance'],
name: patch['name-label'],
url: patch['patch-url'],
uuid: patch.uuid,
conflicts: mapToArray(ensureArray(patch.conflictingpatches), patch => {
return patch.conflictingpatch.uuid
}),
requirements: mapToArray(ensureArray(patch.requiredpatches), patch => {
return patch.requiredpatch.uuid
})
// TODO: what does it mean, should we handle it?
// version: patch.version,
}
if (patches[patch.uuid].conflicts[0] === undefined) {
patches[patch.uuid].conflicts.length = 0
}
if (patches[patch.uuid].requirements[0] === undefined) {
patches[patch.uuid].requirements.length = 0
}
})
const resolveVersionPatches = function (uuids) {
const versionPatches = createRawObject()
forEach(ensureArray(uuids), ({uuid}) => {
versionPatches[uuid] = patches[uuid]
})
return versionPatches
}
const versions = createRawObject()
let latestVersion
forEach(data.serverversions.version, version => {
versions[version.value] = {
date: version.timestamp,
name: version.name,
id: version.value,
documentationUrl: version.url,
patches: resolveVersionPatches(version.patch)
}
if (version.latest) {
latestVersion = versions[version.value]
}
})
return {
patches,
latestVersion,
versions
}
},
// =================================================================
// Returns installed and not installed patches for a given host.
async _getPoolPatchesForHost (host) {
const versions = (await this._getXenUpdates()).versions
const hostVersions = host.software_version
const version =
versions[hostVersions.product_version] ||
versions[hostVersions.product_version_text]
return version
? version.patches
: []
},
_getInstalledPoolPatchesOnHost (host) {
const installed = createRawObject()
// platform_version < 2.1.1
forEach(host.$patches, hostPatch => {
installed[hostPatch.$pool_patch.uuid] = true
})
// platform_version >= 2.1.1
forEach(host.$updates, update => {
installed[update.uuid] = true // TODO: ignore packs
})
return installed
},
async _listMissingPoolPatchesOnHost (host) {
const all = await this._getPoolPatchesForHost(host)
const installed = this._getInstalledPoolPatchesOnHost(host)
const installable = createRawObject()
forEach(all, (patch, uuid) => {
if (installed[uuid]) {
return
}
for (const uuid of patch.conflicts) {
if (uuid in installed) {
return
}
}
installable[uuid] = patch
})
return installable
},
async listMissingPoolPatchesOnHost (hostId) {
// Returns an array to not break compatibility.
return mapToArray(
await this._listMissingPoolPatchesOnHost(this.getObject(hostId))
)
},
async _ejectToolsIsos (hostRef) {
return Promise.all(mapFilter(
this.objects.all,
vm => {
if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) {
return
}
const shouldEjectCd = some(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
return vdi && vdi.is_tools_iso
})
if (shouldEjectCd) {
return this.ejectCdFromVm(vm.$id)
}
}
))
},
// -----------------------------------------------------------------
_isPoolPatchInstallableOnHost (patchUuid, host) {
const installed = this._getInstalledPoolPatchesOnHost(host)
if (installed[patchUuid]) {
return false
}
let installable = true
forEach(installed, patch => {
if (includes(patch.conflicts, patchUuid)) {
installable = false
return false
}
})
return installable
},
// -----------------------------------------------------------------
// platform_version < 2.1.1 ----------------------------------------
async uploadPoolPatch (stream, patchName = 'unknown') {
const taskRef = await this._createTask('Patch upload', patchName)
const task = this._watchTask(taskRef)
const [ patchRef ] = await Promise.all([
task,
put(stream, {
hostname: this.pool.$master.address,
path: '/pool_patch_upload',
protocol: 'https',
query: {
session_id: this.sessionId,
task_id: taskRef
},
rejectUnauthorized: false
}, task)
])
return this._getOrWaitObject(patchRef)
},
async _getOrUploadPoolPatch (uuid) {
try {
return this.getObjectByUuid(uuid)
} catch (error) {}
debug('downloading patch %s', uuid)
const patchInfo = (await this._getXenUpdates()).patches[uuid]
if (!patchInfo) {
throw new Error('no such patch ' + uuid)
}
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
stream = await new Promise((resolve, reject) => {
const PATCH_RE = /\.xsupdate$/
stream.pipe(unzip.Parse()).on('entry', entry => {
if (PATCH_RE.test(entry.path)) {
entry.length = entry.size
resolve(entry)
} else {
entry.autodrain()
}
}).on('error', reject)
})
return this.uploadPoolPatch(stream, patchInfo.name)
},
// patform_version >= 2.1.1 ----------------------------------------
_installPatch: deferrable(async function ($defer, stream, { hostId }) {
if (!stream.length) {
throw new Error('stream must have a length')
}
const vdi = await this.createTemporaryVdiOnHost(stream, hostId, '[XO] Patch ISO', 'small temporary VDI to store a patch ISO')
$defer(() => this._deleteVdi(vdi))
const updateRef = await this.call('pool_update.introduce', vdi.$ref)
// TODO: check update status
// await this.call('pool_update.precheck', updateRef, host.$ref)
// - ok_livepatch_complete An applicable live patch exists for every required component
// - ok_livepatch_incomplete An applicable live patch exists but it is not sufficient
// - ok There is no applicable live patch
await this.call('pool_update.apply', updateRef, this.getObject(hostId).$ref)
}),
async _downloadPatchAndInstall (uuid, hostId) {
debug('downloading patch %s', uuid)
const patchInfo = (await this._getXenUpdates()).patches[uuid]
if (!patchInfo) {
throw new Error('no such patch ' + uuid)
}
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
stream = await new Promise((resolve, reject) => {
stream.pipe(unzip.Parse()).on('entry', entry => {
entry.length = entry.size
resolve(entry)
}).on('error', reject)
})
return this._installPatch(stream, { hostId })
},
// -----------------------------------------------------------------
async _installPoolPatchOnHost (patchUuid, host) {
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ])
await this.call('pool_patch.apply', patch.$ref, host.$ref)
},
_installPatchUpdateOnHost (patchUuid, host) {
return Promise.all([ this._downloadPatchAndInstall(patchUuid, host.$id), this._ejectToolsIsos(host.$ref) ])
},
async _checkSoftwareVersionAndInstallPatch (patchUuid, hostId) {
const host = this.getObject(hostId)
return useUpdateSystem(host)
? this._installPatchUpdateOnHost(patchUuid, host)
: this._installPoolPatchOnHost(patchUuid, host)
},
async installPoolPatchOnHost (patchUuid, hostId) {
debug('installing patch %s', patchUuid)
return this._checkSoftwareVersionAndInstallPatch(patchUuid, hostId)
},
// -----------------------------------------------------------------
async installPoolPatchOnAllHosts (patchUuid) {
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos() ])
await this.call('pool_patch.pool_apply', patch.$ref)
},
// -----------------------------------------------------------------
async _installPoolPatchOnHostAndRequirements (patch, host, patchesByUuid) {
const { requirements } = patch
if (requirements.length) {
for (const requirementUuid of requirements) {
if (this._isPoolPatchInstallableOnHost(requirementUuid, host)) {
const requirement = patchesByUuid[requirementUuid]
await this._installPoolPatchOnHostAndRequirements(requirement, host, patchesByUuid)
host = this.getObject(host.$id)
}
}
}
await this._checkSoftwareVersionAndInstallPatch(patch.uuid, host)
},
async installAllPoolPatchesOnHost (hostId) {
let host = this.getObject(hostId)
const installableByUuid = await this._listMissingPoolPatchesOnHost(host)
// List of all installable patches sorted from the newest to the
// oldest.
const installable = sortBy(
installableByUuid,
patch => -Date.parse(patch.date)
)
for (let i = 0, n = installable.length; i < n; ++i) {
const patch = installable[i]
if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) {
await this._installPoolPatchOnHostAndRequirements(patch, host, installableByUuid).catch(error => {
if (error.code !== 'PATCH_ALREADY_APPLIED') {
throw error
}
})
host = this.getObject(host.$id)
}
}
},
async installAllPoolPatchesOnAllHosts () {
await this.installAllPoolPatchesOnHost(this.pool.master)
// TODO: use pool_update.pool_apply for platform_version ^2.1.1
await Promise.all(mapToArray(
filter(this.objects.all, { $type: 'host' }),
host => this.installAllPoolPatchesOnHost(host.$id)
))
}
}

View File

@@ -1,5 +1,7 @@
import deferrable from 'golike-defer'
import find from 'lodash/find'
import gte from 'lodash/gte'
import isEmpty from 'lodash/isEmpty'
import lte from 'lodash/lte'
import {
@@ -13,17 +15,17 @@ import {
import {
isVmHvm,
isVmRunning,
makeEditObject
makeEditObject,
NULL_REF
} from '../utils'
export default {
// TODO: clean up on error.
async createVm (templateId, {
@deferrable.onFailure
async createVm ($onFailure, templateId, {
name_label, // deprecated
nameLabel = name_label, // eslint-disable-line camelcase
bootAfterCreate = false,
clone = true,
installRepository = undefined,
vdis = undefined,
@@ -34,7 +36,7 @@ export default {
cloudConfig = undefined,
...props
} = {}) {
} = {}, checkLimits) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
@@ -50,30 +52,30 @@ export default {
const template = this.getObject(templateId)
// Clones the template.
let vm = await this._getOrWaitObject(
await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
)
const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
$onFailure(() => this.deleteVm(vmRef)::pCatch(noop))
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vm.$ref, 'disks')::pCatch(noop)
await this.call('VM.remove_from_other_config', vmRef, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vm.$ref)
await this.call('VM.provision', vmRef)
let vm = await this._getOrWaitObject(vmRef)
// Set VMs params.
// TODO: checkLimits
this._editVm(vm, props)
await this._editVm(vm, props, checkLimits)
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!vdis.length || installMethod === 'network') {
if (!isEmpty(vdis) || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
@@ -103,6 +105,9 @@ export default {
}
}
let nVbds = vm.VBDs.length
let hasBootableDisk = !!find(vm.$VBDs, 'bootable')
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
@@ -110,6 +115,9 @@ export default {
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
hasBootableDisk = true
++nVbds
}
// Modify existing (previous template) disks if necessary
@@ -137,9 +145,11 @@ export default {
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
if (vdis) {
if (!isEmpty(vdis)) {
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
++nVbds
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
@@ -150,9 +160,8 @@ export default {
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Only the first VBD if installMethod is not cd is bootable.
bootable: installMethod !== 'cd' && !i,
// Either the CD or the 1st disk is bootable (only useful for PV VMs)
bootable: !(hasBootableDisk || i),
userdevice: devices[i]
}))
}))
@@ -162,30 +171,41 @@ export default {
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
let nVifs = 0
if (vifs) {
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
await Promise.all(mapToArray(vifs, (vif, index) => this._createVif(
vm,
this.getObject(vif.network),
{
device: devices[index],
mac: vif.mac,
mtu: vif.mtu
}
)))
await Promise.all(mapToArray(vifs, (vif, index) => {
++nVifs
return this._createVif(
vm,
this.getObject(vif.network),
{
ipv4_allowed: vif.ipv4_allowed,
ipv6_allowed: vif.ipv6_allowed,
device: devices[index],
locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked',
mac: vif.mac,
mtu: vif.mtu
}
)
}))
}
// TODO: Assign VGPUs.
if (cloudConfig != null) {
// Refresh the record.
vm = this.getObject(vm.$id)
vm = await this._waitObjectState(vm.$id, vm => vm.VBDs.length === nVbds)
// Find the SR of the first VDI.
let srRef
forEach(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
if (vdi) {
let vdi
if (
vbd.type === 'Disk' &&
(vdi = vbd.$VDI)
) {
srRef = vdi.SR
return false
}
@@ -195,19 +215,32 @@ export default {
? 'createCoreOsCloudInitConfigDrive'
: 'createCloudInitConfigDrive'
await this[method](vm.$id, srRef, cloudConfig)
++nVbds
}
if (bootAfterCreate) {
this._startVm(vm)::pCatch(noop)
}
return this._waitObject(vm.$id)
// wait for the record with all the VBDs and VIFs
return this._waitObjectState(vm.$id, vm =>
vm.VBDs.length === nVbds &&
vm.VIFs.length === nVifs
)
},
// High level method to edit a VM.
//
// Params do not correspond directly to XAPI props.
_editVm: makeEditObject({
affinityHost: {
get: 'affinity',
set (value, vm) {
return this._setObjectProperty(
vm,
'affinity',
value ? this.getObject(value).$ref : NULL_REF
)
}
},
autoPoweron: {
set (value, vm) {
return Promise.all([
@@ -243,7 +276,6 @@ export default {
},
cpuCap: {
addToLimits: true,
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
set (cap, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
@@ -260,7 +292,6 @@ export default {
},
cpuWeight: {
addToLimits: true,
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
set (weight, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
@@ -285,6 +316,7 @@ export default {
memory: 'memoryMax',
memoryMax: {
addToLimits: true,
limitName: 'memory',
constraints: {
memoryMin: lte,
memoryStaticMax: gte
@@ -307,10 +339,33 @@ export default {
nameLabel: true,
PV_args: true
PV_args: true,
tags: true
}),
async editVm (id, props) {
return /* await */ this._editVm(this.getObject(id), props)
async editVm (id, props, checkLimits) {
return /* await */ this._editVm(this.getObject(id), props, checkLimits)
},
async revertVm (snapshotId, snapshotBefore = true) {
const snapshot = this.getObject(snapshotId)
if (snapshotBefore) {
await this._snapshotVm(snapshot.$snapshot_of)
}
await this.call('VM.revert', snapshot.$ref)
if (snapshot.snapshot_info['power-state-at-snapshot'] === 'Running') {
const vm = snapshot.$snapshot_of
if (vm.power_state === 'Halted') {
this.startVm(vm.$id)::pCatch(noop)
} else if (vm.power_state === 'Suspended') {
this.resumeVm(vm.$id)::pCatch(noop)
}
}
},
async resumeVm (vmId) {
// the force parameter is always true
return this.call('VM.resume', this.getObject(vmId).$ref, false, true)
}
}

View File

@@ -0,0 +1,53 @@
import { NULL_REF } from './utils'
const OTHER_CONFIG_TEMPLATE = {
actions_after_crash: 'restart',
actions_after_reboot: 'restart',
actions_after_shutdown: 'destroy',
affinity: null,
blocked_operations: {},
ha_always_run: false,
HVM_boot_params: {
order: 'cdn'
},
HVM_boot_policy: 'BIOS order',
HVM_shadow_multiplier: 1,
is_a_template: false,
memory_dynamic_max: 4294967296,
memory_dynamic_min: 4294967296,
memory_static_max: 4294967296,
memory_static_min: 134217728,
order: 0,
other_config: {
vgpu_pci: '',
base_template_name: 'Other install media',
mac_seed: '5e88eb6a-d680-c47f-a94a-028886971ba4',
'install-methods': 'cdrom'
},
PCI_bus: '',
platform: {
timeoffset: '0',
nx: 'true',
acpi: '1',
apic: 'true',
pae: 'true',
hpet: 'true',
viridian: 'true'
},
protection_policy: NULL_REF,
PV_args: '',
PV_bootloader: '',
PV_bootloader_args: '',
PV_kernel: '',
PV_legacy_args: '',
PV_ramdisk: '',
recommendations: '<restrictions><restriction field="memory-static-max" max="137438953472" /><restriction field="vcpus-max" max="32" /><restriction property="number-of-vbds" max="255" /><restriction property="number-of-vifs" max="7" /><restriction field="has-vendor-device" value="false" /></restrictions>',
shutdown_delay: 0,
start_delay: 0,
user_version: 1,
VCPUs_at_startup: 1,
VCPUs_max: 1,
VCPUs_params: {},
version: 0
}
export { OTHER_CONFIG_TEMPLATE as default }

View File

@@ -1,9 +1,13 @@
// import isFinite from 'lodash/isFinite'
import camelCase from 'lodash/camelCase'
import createDebug from 'debug'
import isEqual from 'lodash/isEqual'
import isPlainObject from 'lodash/isPlainObject'
import pickBy from 'lodash/pickBy'
import { utcFormat, utcParse } from 'd3-time-format'
import { satisfies as versionSatisfies } from 'semver'
import httpRequest from '../http-request'
import {
camelToSnakeCase,
createRawObject,
@@ -12,11 +16,12 @@ import {
isBoolean,
isFunction,
isInteger,
isObject,
isString,
map,
mapFilter,
mapToArray,
noop
noop,
pFinally
} from '../utils'
// ===================================================================
@@ -48,7 +53,10 @@ export const prepareXapiParam = param => {
if (isBoolean(param)) {
return asBoolean(param)
}
if (isObject(param)) {
if (isArray(param)) {
return map(param, prepareXapiParam)
}
if (isPlainObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
@@ -57,6 +65,10 @@ export const prepareXapiParam = param => {
// -------------------------------------------------------------------
export const debug = createDebug('xo:xapi')
// -------------------------------------------------------------------
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
export const extractOpaqueRef = str => {
const matches = OPAQUE_REF_RE.exec(str)
@@ -85,6 +97,7 @@ forEach([
'VDI',
'VGPU',
'VGPU_type',
'VIF',
'VLAN',
'VM',
'VM_appliance',
@@ -130,17 +143,6 @@ export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
const _mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}
export const makeEditObject = specs => {
const normalizeGet = (get, name) => {
if (get === true) {
@@ -169,8 +171,9 @@ export const makeEditObject = specs => {
if (isString(set)) {
const index = set.indexOf('.')
if (index === -1) {
const prop = camelToSnakeCase(set)
return function (value) {
return this._set(set, value)
return this._set(prop, value)
}
}
@@ -213,6 +216,9 @@ export const makeEditObject = specs => {
if (spec.addToLimits === true) {
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
}
if (!spec.limitName) {
spec.limitName = name
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
@@ -291,7 +297,7 @@ export const makeEditObject = specs => {
let addToLimits
if (limits && (addToLimits = spec.addToLimits)) {
limits[name] = addToLimits(value, current)
limits[spec.limitName] = addToLimits(value, current)
}
}
@@ -302,14 +308,20 @@ export const makeEditObject = specs => {
const cbs = []
forEach(constraints, (constraint, constraintName) => {
// This constraint value is already defined: bypass the constraint.
if (values[constraintName] != null) {
return
}
// Before setting a property to a new value, if the constraint check fails (e.g. memoryMin > memoryMax):
// - if the user wants to set the constraint (ie constraintNewValue is defined):
// constraint <-- constraintNewValue THEN property <-- value (e.g. memoryMax <-- 2048 THEN memoryMin <-- 1024)
// - if the user DOES NOT want to set the constraint (ie constraintNewValue is NOT defined):
// constraint <-- value THEN property <-- value (e.g. memoryMax <-- 1024 THEN memoryMin <-- 1024)
// FIXME: Some values combinations will lead to setting the same property twice, which is not perfect but works for now.
const constraintCurrentValue = specs[constraintName].get(object)
const constraintNewValue = values[constraintName]
if (!constraint(specs[constraintName].get(object), value)) {
const cb = set(value, constraintName)
cbs.push(cb)
if (!constraint(constraintCurrentValue, value)) {
const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName)
if (cb) {
cbs.push(cb)
}
}
})
@@ -321,7 +333,7 @@ export const makeEditObject = specs => {
return cb
}
const cbs = _mapFilter(values, set)
const cbs = mapFilter(values, set)
if (checkLimits) {
await checkLimits(limits, object)
@@ -330,3 +342,51 @@ export const makeEditObject = specs => {
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
}
}
// ===================================================================
export const NULL_REF = 'OpaqueRef:NULL'
// ===================================================================
// HTTP put, use an ugly hack if the length is not known because XAPI
// does not support chunk encoding.
export const put = (stream, {
headers: { ...headers } = {},
...opts
}, task) => {
const makeRequest = () => httpRequest({
...opts,
body: stream,
headers,
method: 'put'
})
// Xen API does not support chunk encoding.
if (stream.length == null) {
headers['transfer-encoding'] = null
const promise = makeRequest()
if (task) {
// Some connections need the task to resolve (VDI import).
task::pFinally(() => {
promise.cancel()
})
} else {
// Some tasks need the connection to close (VM import).
promise.request.once('finish', () => {
promise.cancel()
})
}
return promise.readAll()
}
return makeRequest().readAll()
}
export const useUpdateSystem = host => {
// Match Xen Center's condition: https://github.com/xenserver/xenadmin/blob/f3a64fc54bbff239ca6f285406d9034f57537d64/XenModel/Utils/Helpers.cs#L420
return versionSatisfies(host.software_version.platform_version, '^2.1.1')
}

View File

View File

@@ -19,11 +19,29 @@ export default class {
constructor (xo) {
this._xo = xo
this._acls = new Acls({
const aclsDb = this._acls = new Acls({
connection: xo._redis,
prefix: 'xo:acl',
indexes: ['subject', 'object']
})
xo.on('start', () => {
xo.addConfigManager('acls',
() => aclsDb.get(),
acls => aclsDb.update(acls)
)
})
xo.on('clean', async () => {
const acls = await aclsDb.get()
const toRemove = []
forEach(acls, ({ subject, object, action, id }) => {
if (!subject || !object || !action) {
toRemove.push(id)
}
})
await aclsDb.remove(toRemove)
})
}
async _getAclsForUser (userId) {
@@ -39,10 +57,9 @@ export default class {
push.apply(acls, entries)
})(acls.push)
const collection = this._acls
await Promise.all(mapToArray(
subjects,
subject => collection.get({subject}).then(pushAcls)
subject => this.getAclsForSubject(subject).then(pushAcls)
))
return acls
@@ -67,6 +84,10 @@ export default class {
return this._acls.get()
}
async getAclsForSubject (subjectId) {
return this._acls.get({ subject: subjectId })
}
async getPermissionsForUser (userId) {
const [
acls,

View File

@@ -1,30 +1,26 @@
import createDebug from 'debug'
const debug = createDebug('xo:api')
import getKeys from 'lodash/keys'
import kindOf from 'kindof'
import moment from 'moment-timezone'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
import * as methods from '../api'
import {
InvalidParameters,
MethodNotFound,
NoSuchObject,
Unauthorized
} from './api-errors'
import {
version as xoServerVersion
} from '../package.json'
MethodNotFound
} from 'json-rpc-peer'
import {
createRawObject,
forEach,
isFunction,
noop
} from './utils'
noop,
serializeError
} from '../utils'
import * as errors from 'xo-common/api-errors'
// ===================================================================
const debug = createDebug('xo:api')
const PERMISSIONS = {
none: 0,
read: 1,
@@ -32,39 +28,30 @@ const PERMISSIONS = {
admin: 3
}
// TODO:
// - error when adding a server to a pool with incompatible version
// - error when halted VM migration failure is due to XS < 7
const XAPI_ERROR_TO_XO_ERROR = {
EHOSTUNREACH: errors.serverUnreachable,
HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }),
NO_HOSTS_AVAILABLE: errors.noHostsAvailable,
NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade,
OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }),
PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }),
PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }),
SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed,
VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }),
VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }),
VM_IS_TEMPLATE: errors.vmIsTemplate,
VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }),
VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }),
VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) })
}
const hasPermission = (user, permission) => (
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
)
// FIXME: this function is specific to XO and should not be defined in
// this file.
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw new Unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw new Unauthorized()
}
}
// -------------------------------------------------------------------
function checkParams (method, params) {
const schema = method.params
if (!schema) {
@@ -77,11 +64,34 @@ function checkParams (method, params) {
}, params)
if (!result.valid) {
throw new InvalidParameters(result.error)
throw errors.invalidParameters(result.error)
}
}
// -------------------------------------------------------------------
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw errors.unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw errors.unauthorized()
}
}
function resolveParams (method, params) {
const resolve = method.resolve
@@ -91,7 +101,7 @@ function resolveParams (method, params) {
const {user} = this
if (!user) {
throw new Unauthorized()
throw errors.unauthorized()
}
const userId = user.id
@@ -127,95 +137,29 @@ function resolveParams (method, params) {
return params
}
throw new Unauthorized()
throw errors.unauthorized()
})
}
// ===================================================================
function getMethodsInfo () {
const methods = {}
forEach(this.api._methods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
function listMethods () {
return getKeys(this.api._methods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
function methodSignature ({method: name}) {
const method = this.api.getMethod(name)
if (!method) {
throw new NoSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'
// ===================================================================
const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// ===================================================================
export default class Api {
constructor ({
context,
verboseLogsOnErrors
} = {}) {
constructor (xo) {
this._logger = null
this._methods = createRawObject()
this._verboseLogsOnErrors = verboseLogsOnErrors
this.context = context
this._xo = xo
this.addMethods({
system: {
getMethodsInfo,
getServerVersion,
getServerTimezone,
getVersion,
listMethods,
methodSignature
}
this.addApiMethods(methods)
xo.on('start', async () => {
this._logger = await xo.getLogger('api')
})
}
addMethod (name, method) {
get apiMethods () {
return this._methods
}
addApiMethod (name, method) {
const methods = this._methods
if (name in methods) {
@@ -224,21 +168,22 @@ export default class Api {
methods[name] = method
let unset = () => {
let remove = () => {
delete methods[name]
unset = noop
remove = noop
}
return () => unset()
return () => remove()
}
addMethods (methods) {
addApiMethods (methods) {
let base = ''
const removes = []
const addMethod = (method, name) => {
name = base + name
if (isFunction(method)) {
this.addMethod(name, method)
removes.push(this.addApiMethod(name, method))
return
}
@@ -247,20 +192,35 @@ export default class Api {
forEach(method, addMethod)
base = oldBase
}
forEach(methods, addMethod)
try {
forEach(methods, addMethod)
} catch (error) {
// Remove all added methods.
forEach(removes, remove => remove())
// Forward the error
throw error
}
let remove = () => {
forEach(removes, remove => remove())
remove = noop
}
return remove
}
async call (session, name, params) {
async callApiMethod (session, name, params) {
const startTime = Date.now()
const method = this.getMethod(name)
const method = this._methods[name]
if (!method) {
throw new MethodNotFound(name)
}
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = Object.create(this.context, {
const context = Object.create(this._xo, {
api: { // Used by system.*().
value: this
},
@@ -269,10 +229,9 @@ export default class Api {
}
})
// FIXME: too coupled with XO.
// Fetch and inject the current user.
const userId = session.get('user_id', undefined)
context.user = userId && await context.getUser(userId)
context.user = userId && await this._xo.getUser(userId)
const userName = context.user
? context.user.email
: '(unknown user)'
@@ -293,7 +252,7 @@ export default class Api {
params.id = params[namespace]
}
checkParams(method, params)
checkParams.call(context, method, params)
const resolvedParams = await resolveParams.call(context, method, params)
@@ -315,15 +274,19 @@ export default class Api {
return result
} catch (error) {
if (this._verboseLogsOnErrors) {
debug(
'%s | %s(%j) [%s] =!> %s',
userName,
name,
params,
ms(Date.now() - startTime),
error
)
const data = {
userId,
method: name,
params,
duration: Date.now() - startTime,
error: serializeError(error)
}
const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}`
this._logger.error(message, data)
if (this._xo._config.verboseLogsOnErrors) {
debug(message)
const stack = error && error.stack
if (stack) {
@@ -339,11 +302,18 @@ export default class Api {
)
}
const xoError = XAPI_ERROR_TO_XO_ERROR[error.code]
if (xoError) {
throw xoError(error.params, ref => {
try {
return this._xo.getObject(ref).id
} catch (e) {
return ref
}
})
}
throw error
}
}
getMethod (name) {
return this._methods[name]
}
}

View File

@@ -1,9 +1,8 @@
import Token, { Tokens } from '../models/token'
import {
NoSuchObject
} from '../api-errors'
import { noSuchObject } from 'xo-common/api-errors'
import {
createRawObject,
forEach,
generateToken,
pCatch,
noop
@@ -11,13 +10,10 @@ import {
// ===================================================================
class NoSuchAuthenticationToken extends NoSuchObject {
constructor (id) {
super(id, 'authentication token')
}
}
const noSuchAuthenticationToken = id =>
noSuchObject(id, 'authenticationToken')
// ===================================================================
const ONE_MONTH = 1e3 * 60 * 60 * 24 * 30
export default class {
constructor (xo) {
@@ -30,7 +26,7 @@ export default class {
this._providers = new Set()
// Creates persistent collections.
this._tokens = new Tokens({
const tokensDb = this._tokens = new Tokens({
connection: xo._redis,
prefix: 'xo:token',
indexes: ['user_id']
@@ -61,9 +57,26 @@ export default class {
try {
return (await xo.getAuthenticationToken(tokenId)).user_id
} catch (e) {
return
}
} catch (error) {}
})
xo.on('clean', async () => {
const tokens = await tokensDb.get()
const toRemove = []
const now = Date.now()
forEach(tokens, ({ expiration, id }) => {
if (!expiration || expiration < now) {
toRemove.push(id)
}
})
await tokensDb.remove(toRemove)
})
xo.on('start', () => {
xo.addConfigManager('authTokens',
() => tokensDb.get(),
tokens => tokensDb.update(tokens)
)
})
}
@@ -141,7 +154,7 @@ export default class {
const token = new Token({
id: await generateToken(),
user_id: userId,
expiration: Date.now() + 1e3 * 60 * 60 * 24 * 30 // 1 month validity.
expiration: Date.now() + ONE_MONTH
})
await this._tokens.add(token)
@@ -152,14 +165,14 @@ export default class {
async deleteAuthenticationToken (id) {
if (!await this._tokens.remove(id)) {
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
}
async getAuthenticationToken (id) {
let token = await this._tokens.first(id)
if (!token) {
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
token = token.properties
@@ -169,7 +182,7 @@ export default class {
)) {
this._tokens.remove(id)::pCatch(noop)
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
return token

View File

@@ -1,29 +1,44 @@
import endsWith from 'lodash/endsWith'
import deferrable from 'golike-defer'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash/filter'
import find from 'lodash/find'
import findIndex from 'lodash/findIndex'
import sortBy from 'lodash/sortBy'
import startsWith from 'lodash/startsWith'
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser as createPairsParser } from 'parse-pairs'
import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import {
basename,
dirname
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import vhdMerge from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import {
deferrable
} from '../decorators'
endsWith,
filter,
find,
findIndex,
includes,
once,
sortBy,
startsWith,
trim
} from 'lodash'
import vhdMerge, { chainVhd } from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import { lvs, pvs } from '../lvm'
import {
forEach,
mapFilter,
mapToArray,
noop,
pCatch,
pFinally,
pFromCallback,
pSettle,
safeDateFormat
resolveSubpath,
safeDateFormat,
safeDateParse,
tmpDir
} from '../utils'
import {
VDI_FORMAT_VHD
@@ -34,6 +49,8 @@ import {
const DELTA_BACKUP_EXT = '.json'
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
const shortDate = utcFormat('%Y-%m-%d')
// Test if a file is a vdi backup. (full or delta)
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
@@ -41,6 +58,41 @@ const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
const toTimestamp = date => date && Math.round(date.getTime() / 1000)
const parseVmBackupPath = name => {
const base = basename(name)
let baseMatches
baseMatches = /^([^_]+)_([^_]+)_(.+)\.xva$/.exec(base)
if (baseMatches) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[3],
tag: baseMatches[2],
type: 'xva'
}
}
let dirMatches
if (
(baseMatches = /^([^_]+)_(.+)\.json$/.exec(base)) &&
(dirMatches = /^vm_delta_([^_]+)_(.+)$/.exec(basename(dirname(name))))
) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[2],
tag: dirMatches[1],
type: 'delta',
uuid: dirMatches[2]
}
}
throw new Error('invalid VM backup filename')
}
// Get the timestamp of a vdi backup. (full or delta)
const getVdiTimestamp = name => {
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
@@ -50,21 +102,200 @@ const getVdiTimestamp = name => {
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
// Checksums have been corrupted between 5.2.6 and 5.2.7.
//
// For a short period of time, bad checksums will be regenerated
// instead of rejected.
//
// TODO: restore when enough time has passed (a week/a month).
async function checkFileIntegrity (handler, name) {
let stream
await handler.refreshChecksum(name)
// let stream
//
// try {
// stream = await handler.createReadStream(name, { checksum: true })
// } catch (error) {
// if (error.code === 'ENOENT') {
// return
// }
//
// throw error
// }
//
// stream.resume()
// await eventToPromise(stream, 'finish')
}
try {
stream = await handler.createReadStream(name, { checksum: true })
} catch (error) {
if (error.code === 'ENOENT') {
return
}
// -------------------------------------------------------------------
throw error
const listPartitions = (() => {
const IGNORED = {}
forEach([
// https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38
0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5,
0x82 // swap
], type => {
IGNORED[type] = true
})
const TYPES = {
0x7: 'NTFS',
0x83: 'linux',
0xc: 'FAT'
}
stream.resume()
await eventToPromise(stream, 'finish')
const parseLine = createPairsParser({
keyTransform: key => key === 'UUID'
? 'id'
: key.toLowerCase(),
valueTransform: (value, key) => key === 'start' || key === 'size'
? +value
: key === 'type'
? TYPES[+value] || value
: value
})
return device => execa.stdout('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path
]).then(stdout => mapFilter(splitLines(stdout), line => {
const partition = parseLine(line)
const { type } = partition
if (type != null && !IGNORED[+type]) {
return partition
}
}))
})()
// handle LVM logical volumes automatically
const listPartitions2 = device => listPartitions(device).then(partitions => {
const partitions2 = []
const promises = []
forEach(partitions, partition => {
if (+partition.type === 0x8e) {
promises.push(mountLvmPv(device, partition).then(device => {
const promise = listLvmLvs(device).then(lvs => {
forEach(lvs, lv => {
partitions2.push({
name: lv.lv_name,
size: +lv.lv_size,
id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`
})
})
})
promise::pFinally(device.unmount)
return promise
}))
} else {
partitions2.push(partition)
}
})
return Promise.all(promises).then(() => partitions2)
})
const mountPartition = (device, partitionId) => Promise.all([
partitionId != null && listPartitions(device),
tmpDir()
]).then(([ partitions, path ]) => {
const options = [
'loop',
'ro'
]
if (partitions) {
const partition = find(partitions, { id: partitionId })
const { start } = partition
if (start != null) {
options.push(`offset=${start * 512}`)
}
}
const mount = options => execa('mount', [
`--options=${options.join(',')}`,
`--source=${device.path}`,
`--target=${path}`
])
// `noload` option is used for ext3/ext4, if it fails it might
// `be another fs, try without
return mount([ ...options, 'noload' ]).catch(() =>
mount(options)
).then(() => ({
path,
unmount: once(() => execa('umount', [ '--lazy', path ]))
}), error => {
console.log(error)
throw error
})
})
// handle LVM logical volumes automatically
const mountPartition2 = (device, partitionId) => {
if (
partitionId == null ||
!includes(partitionId, '/')
) {
return mountPartition(device, partitionId)
}
const [ pvId, vgName, lvName ] = partitionId.split('/')
return listPartitions(device).then(partitions =>
find(partitions, { id: pvId })
).then(pvId => mountLvmPv(device, pvId)).then(device1 =>
execa('vgchange', [ '-ay', vgName ]).then(() =>
lvs([ 'lv_name', 'lv_path' ], vgName).then(lvs =>
find(lvs, { lv_name: lvName }).lv_path
)
).then(path =>
mountPartition({ path }).then(device2 => ({
...device2,
unmount: () => device2.unmount().then(device1.unmount)
}))
).catch(error => device1.unmount().then(() => {
throw error
}))
)
}
// -------------------------------------------------------------------
const listLvmLvs = device => pvs([
'lv_name',
'lv_path',
'lv_size',
'vg_name'
], device.path).then(pvs => filter(pvs, 'lv_name'))
const mountLvmPv = (device, partition) => {
const args = []
if (partition) {
args.push('-o', partition.start * 512)
}
args.push(
'--show',
'-f',
device.path
)
return execa.stdout('losetup', args).then(stdout => {
const path = trim(stdout)
return {
path,
unmount: once(() => Promise.all([
execa('losetup', [ '-d', path ]),
pvs('vg_name', path).then(vgNames => execa('vgchange', [
'-an',
...vgNames
]))
]))
}
})
}
// ===================================================================
@@ -72,6 +303,15 @@ async function checkFileIntegrity (handler, name) {
export default class {
constructor (xo) {
this._xo = xo
// clean any LVM volumes that might have not been properly
// unmounted
xo.on('start', () => Promise.all([
execa('losetup', [ '-D' ]),
execa('vgchange', [ '-an' ])
]).then(() =>
execa('pvscan', [ '--cache' ])
))
}
async listRemoteBackups (remoteId) {
@@ -101,12 +341,52 @@ export default class {
return backups
}
async listVmBackups (remoteId) {
const handler = await this._xo.getRemoteHandler(remoteId)
const backups = []
await Promise.all(mapToArray(await handler.list(), entry => {
if (endsWith(entry, '.xva')) {
backups.push(parseVmBackupPath(entry))
} else if (startsWith(entry, 'vm_delta_')) {
return handler.list(entry).then(children => Promise.all(mapToArray(children, child => {
if (endsWith(child, '.json')) {
const path = `${entry}/${child}`
const record = parseVmBackupPath(path)
backups.push(record)
return handler.readFile(path).then(data => {
record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({
id: `${entry}/${vdi.xoPath}`,
name: vdi.name_label,
uuid: vdi.uuid
}))
}).catch(noop)
}
})))
}
}))
return backups
}
async importVmBackup (remoteId, file, sr) {
const handler = await this._xo.getRemoteHandler(remoteId)
const stream = await handler.createReadStream(file)
const xapi = this._xo.getXapi(sr)
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
const { datetime } = parseVmBackupPath(file)
await Promise.all([
xapi.addTag(vm.$id, 'restored from backup'),
xapi.editVm(vm.$id, {
name_label: `${vm.name_label} (${shortDate(datetime * 1e3)})`
})
])
return xapiObjectToXo(vm).id
}
@@ -141,7 +421,7 @@ export default class {
stream => stream.cancel()
))
return srcXapi.deleteVm(delta.vm.uuid, true)
return srcXapi.deleteVm(delta.vm.uuid)
})
const promise = targetXapi.importDeltaVm(
@@ -155,7 +435,7 @@ export default class {
// Once done, (asynchronously) remove the (now obsolete) local
// base.
if (localBaseUuid) {
promise.then(() => srcXapi.deleteVm(localBaseUuid, true))::pCatch(noop)
promise.then(() => srcXapi.deleteVm(localBaseUuid))::pCatch(noop)
}
// (Asynchronously) Identify snapshot as future base.
@@ -291,6 +571,18 @@ export default class {
return backups.slice(i)
}
// fix the parent UUID and filename in delta files after download from xapi or backup compression
async _chainDeltaVdiBackups ({handler, dir}) {
const backups = await this._listVdiBackups(handler, dir)
for (let i = 1; i < backups.length; i++) {
const childPath = dir + '/' + backups[i]
const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath)
if (modified) {
await handler.refreshChecksum(childPath)
}
}
}
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
const backups = await this._listVdiBackups(handler, dir)
let i = backups.length - depth
@@ -432,16 +724,7 @@ export default class {
@deferrable.onFailure
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
@@ -452,7 +735,7 @@ export default class {
base => base.snapshot_time
)
const baseVm = bases.pop()
forEach(bases, base => { xapi.deleteVm(base.$id, true)::pCatch(noop) })
forEach(bases, base => { xapi.deleteVm(base.$id)::pCatch(noop) })
// Check backup dirs.
const dir = `vm_delta_${tag}_${vm.uuid}`
@@ -487,7 +770,7 @@ export default class {
stream => stream.cancel()
))
await xapi.deleteVm(delta.vm.$id, true)
await xapi.deleteVm(delta.vm.uuid)
})
// Save vdis.
@@ -515,15 +798,15 @@ export default class {
)
const fulFilledVdiBackups = []
let success = true
let error
// One or many vdi backups have failed.
for (const vdiBackup of vdiBackups) {
if (vdiBackup.isFulfilled()) {
fulFilledVdiBackups.push(vdiBackup)
} else {
console.error(`Rejected backup: ${vdiBackup.reason()}`)
success = false
error = vdiBackup.reason()
console.error('Rejected backup:', error)
}
}
@@ -535,8 +818,8 @@ export default class {
)
})
if (!success) {
throw new Error('Rolling delta vm backup failed.')
if (error) {
throw error
}
const date = safeDateFormat(new Date())
@@ -553,7 +836,9 @@ export default class {
mapToArray(vdiBackups, vdiBackup => {
const backupName = vdiBackup.value()
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
const backupDir = `${dir}/${backupDirectory}`
return this._mergeDeltaVdiBackups({ handler, dir: backupDir, depth })
.then(() => { this._chainDeltaVdiBackups({ handler, dir: backupDir }) })
})
)
@@ -561,7 +846,7 @@ export default class {
await this._removeOldDeltaVmBackups(xapi, { vm, handler, dir, depth })
if (baseVm) {
xapi.deleteVm(baseVm.$id, true)::pCatch(noop)
xapi.deleteVm(baseVm.$id)::pCatch(noop)
}
// Returns relative path.
@@ -569,10 +854,13 @@ export default class {
}
async importDeltaVmBackup ({sr, remoteId, filePath}) {
filePath = `${filePath}${DELTA_BACKUP_EXT}`
const { datetime } = parseVmBackupPath(filePath)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(sr)
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
const delta = JSON.parse(await handler.readFile(filePath))
let vm
const { version } = delta
@@ -599,9 +887,12 @@ export default class {
)
)
delta.vm.name_label += ` (${shortDate(datetime * 1e3)})`
delta.vm.tags.push('restored from backup')
vm = await xapi.importDeltaVm(delta, {
srId: sr._xapiId,
disableStartAfterImport: false
disableStartAfterImport: false,
srId: sr._xapiId
})
} else {
throw new Error(`Unsupported delta backup version: ${version}`)
@@ -613,16 +904,7 @@ export default class {
// -----------------------------------------------------------------
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
}
@@ -640,16 +922,7 @@ export default class {
}
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const files = await handler.list()
@@ -676,7 +949,7 @@ export default class {
const promises = []
for (let surplus = snapshots.length - (depth - 1); surplus > 0; surplus--) {
const oldSnap = snapshots.shift()
promises.push(xapi.deleteVm(oldSnap.uuid, true))
promises.push(xapi.deleteVm(oldSnap.uuid))
}
await Promise.all(promises)
}
@@ -690,12 +963,12 @@ export default class {
const sourceXapi = this._xo.getXapi(vm)
vm = sourceXapi.getObject(vm._xapiId)
const vms = []
const vms = {}
forEach(sr.$VDIs, vdi => {
const vbds = vdi.$VBDs
const vm = vbds && vbds[0] && vbds[0].$VM
if (vm && reg.test(vm.name_label)) {
vms.push(vm)
vms[vm.$id] = vm
}
})
const olderCopies = sortBy(vms, 'name_label')
@@ -706,11 +979,121 @@ export default class {
})
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
const promises = []
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
const oldDRVm = olderCopies.shift()
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
const n = 1 - depth
await Promise.all(mapToArray(n ? olderCopies.slice(0, n) : olderCopies, vm =>
// Do not consider a failure to delete an old copy as a fatal error.
targetXapi.deleteVm(vm.$id)::pCatch(noop)
))
}
// -----------------------------------------------------------------
_mountVhd (remoteId, vhdPath) {
return Promise.all([
this._xo.getRemoteHandler(remoteId),
tmpDir()
]).then(([ handler, mountDir ]) => {
if (!handler._getRealPath) {
throw new Error(`this remote is not supported`)
}
const remotePath = handler._getRealPath()
vhdPath = resolveSubpath(remotePath, vhdPath)
return Promise.resolve().then(() => {
// TODO: remove when no longer necessary.
//
// Currently, the filenames of the VHD changes over time
// (delta → full), but the JSON is not updated, therefore the
// VHD path may need to be fixed.
return endsWith(vhdPath, '_delta.vhd')
? pFromCallback(cb => stat(vhdPath, cb)).then(
() => vhdPath,
error => {
if (error && error.code === 'ENOENT') {
return `${vhdPath.slice(0, -10)}_full.vhd`
}
}
)
: vhdPath
}).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() =>
pFromCallback(cb => readdir(mountDir, cb)).then(entries => {
let max = 0
forEach(entries, entry => {
const matches = /^vhdi(\d+)/.exec(entry)
if (matches) {
const value = +matches[1]
if (value > max) {
max = value
}
}
})
if (!max) {
throw new Error('no disks found')
}
return {
path: `${mountDir}/vhdi${max}`,
unmount: once(() => execa('fusermount', [ '-uz', mountDir ]))
}
})
)
})
}
_mountPartition (remoteId, vhdPath, partitionId) {
return this._mountVhd(remoteId, vhdPath).then(device =>
mountPartition2(device, partitionId).then(partition => ({
...partition,
unmount: () => partition.unmount().then(device.unmount)
})).catch(error => device.unmount().then(() => {
throw error
}))
)
}
@deferrable
async scanDiskBackup ($defer, remoteId, vhdPath) {
const device = await this._mountVhd(remoteId, vhdPath)
$defer(device.unmount)
return {
partitions: await listPartitions2(device)
}
await Promise.all(promises)
}
@deferrable
async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
$defer(partition.unmount)
path = resolveSubpath(partition.path, path)
const entries = await pFromCallback(cb => readdir(path, cb))
const entriesMap = {}
await Promise.all(mapToArray(entries, async name => {
const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::pCatch(noop)
if (stats) {
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
}
}))
return entriesMap
}
async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
let i = 0
const onEnd = () => {
if (!--i) {
partition.unmount()
}
}
return mapToArray(paths, path => {
++i
return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd)
})
}
}

View File

@@ -0,0 +1,33 @@
import { map, noop } from '../utils'
import { all as pAll } from 'promise-toolbox'
export default class ConfigManagement {
constructor () {
this._managers = { __proto__: null }
}
addConfigManager (id, exporter, importer) {
const managers = this._managers
if (id in managers) {
throw new Error(`${id} is already taken`)
}
this._managers[id] = { exporter, importer }
}
exportConfig () {
return map(this._managers, ({ exporter }, key) => exporter())::pAll()
}
importConfig (config) {
const managers = this._managers
return map(config, (entry, key) => {
const manager = managers[key]
if (manager) {
return manager.importer(entry)
}
})::pAll().then(noop)
}
}

307
src/xo-mixins/ip-pools.js Normal file
View File

@@ -0,0 +1,307 @@
import concat from 'lodash/concat'
import countBy from 'lodash/countBy'
import diff from 'lodash/difference'
import findIndex from 'lodash/findIndex'
import flatten from 'lodash/flatten'
import highland from 'highland'
import includes from 'lodash/includes'
import isObject from 'lodash/isObject'
import keys from 'lodash/keys'
import mapValues from 'lodash/mapValues'
import pick from 'lodash/pick'
import remove from 'lodash/remove'
import synchronized from 'decorator-synchronized'
import { noSuchObject } from 'xo-common/api-errors'
import { fromCallback } from 'promise-toolbox'
import {
forEach,
generateUnsecureToken,
isEmpty,
lightSet,
mapToArray,
streamToArray,
throwFn
} from '../utils'
// ===================================================================
const normalize = ({
addresses,
id = throwFn('id is a required field'),
name = '',
networks,
resourceSets
}) => ({
addresses,
id,
name,
networks,
resourceSets
})
const _isAddressInIpPool = (address, network, ipPool) => (
ipPool.addresses && (address in ipPool.addresses) &&
includes(ipPool.networks, isObject(network) ? network.id : network)
)
// ===================================================================
// Note: an address cannot be in two different pools sharing a
// network.
export default class IpPools {
constructor (xo) {
this._store = null
this._xo = xo
xo.on('start', async () => {
this._store = await xo.getStore('ipPools')
xo.addConfigManager('ipPools',
() => this.getAllIpPools(),
ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool)))
)
})
}
async createIpPool ({ addresses, name, networks }) {
const id = await this._generateId()
await this._save({
addresses,
id,
name,
networks
})
return id
}
async deleteIpPool (id) {
const store = this._store
if (await store.has(id)) {
await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => {
await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id)
return this._xo.removeIpPoolFromResourceSet(id, set.id)
}))
await this._removeIpAddressesFromVifs(
mapValues((await this.getIpPool(id)).addresses, 'vifs')
)
return store.del(id)
}
throw noSuchObject(id, 'ipPool')
}
_getAllIpPools (filter) {
return streamToArray(this._store.createValueStream(), {
filter,
mapper: normalize
})
}
async getAllIpPools (userId) {
let filter
if (userId != null) {
const user = await this._xo.getUser(userId)
if (user.permission !== 'admin') {
const resourceSets = await this._xo.getAllResourceSets(userId)
const ipPools = lightSet(flatten(mapToArray(resourceSets, 'ipPools')))
filter = ({ id }) => ipPools.has(id)
}
}
return this._getAllIpPools(filter)
}
getIpPool (id) {
return this._store.get(id).then(normalize, error => {
throw error.notFound ? noSuchObject(id, 'ipPool') : error
})
}
async _getAddressIpPool (address, network) {
const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool))
return ipPools && ipPools[0]
}
// Returns a map that indicates how many IPs from each IP pool the VM uses
// e.g.: { 'ipPool:abc': 3, 'ipPool:xyz': 7 }
async computeVmIpPoolsUsage (vm) {
const vifs = vm.VIFs
const ipPools = []
for (const vifId of vifs) {
const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId)
for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) {
const ipPool = await this._getAddressIpPool(address, $network)
ipPool && ipPools.push(ipPool.id)
}
}
return countBy(ipPools, ({ id }) => `ipPool:${id}`)
}
@synchronized
allocIpAddresses (vifId, addAddresses, removeAddresses) {
const updatedIpPools = {}
const limits = {}
const xoVif = this._xo.getObject(vifId)
const xapi = this._xo.getXapi(xoVif)
const vif = xapi.getObject(xoVif._xapiId)
const allocAndSave = (() => {
const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet')
return () => {
const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool)))
return resourseSetId
? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then(
saveIpPools
)
: saveIpPools()
}
})()
return fromCallback(cb => {
const network = vif.$network
const networkId = network.$id
const isVif = id => id === vifId
highland(this._store.createValueStream()).each(ipPool => {
const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool
if (!(addresses && networks && includes(networks, networkId))) {
return false
}
let allocations = 0
let changed = false
forEach(removeAddresses, address => {
let vifs, i
if (
(vifs = addresses[address]) &&
(vifs = vifs.vifs) &&
(i = findIndex(vifs, isVif)) !== -1
) {
vifs.splice(i, 1)
--allocations
changed = true
}
})
forEach(addAddresses, address => {
const data = addresses[address]
if (!data) {
return
}
const vifs = data.vifs || (data.vifs = [])
if (!includes(vifs, vifId)) {
vifs.push(vifId)
++allocations
changed = true
}
})
if (changed) {
const { id } = ipPool
updatedIpPools[id] = ipPool
limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations
}
}).toCallback(cb)
}).then(allocAndSave)
}
async _removeIpAddressesFromVifs (mapAddressVifs) {
const mapVifAddresses = {}
forEach(mapAddressVifs, (vifs, address) => {
forEach(vifs, vifId => {
if (mapVifAddresses[vifId]) {
mapVifAddresses[vifId].push(address)
} else {
mapVifAddresses[vifId] = [ address ]
}
})
})
const { getXapi } = this._xo
return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => {
let vif
try {
// The IP may not have been correctly deallocated from the IP pool when the VIF was deleted
vif = this._xo.getObject(vifId)
} catch (error) {
return
}
const { allowedIpv4Addresses, allowedIpv6Addresses } = vif
remove(allowedIpv4Addresses, address => includes(addresses, address))
remove(allowedIpv6Addresses, address => includes(addresses, address))
this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses))
return getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
}))
}
async updateIpPool (id, {
addresses,
name,
networks,
resourceSets
}) {
const ipPool = await this.getIpPool(id)
const previousAddresses = { ...ipPool.addresses }
name != null && (ipPool.name = name)
if (addresses) {
const addresses_ = ipPool.addresses || {}
forEach(addresses, (props, address) => {
if (props === null) {
delete addresses_[address]
} else {
addresses_[address] = props
}
})
// Remove the addresses that are no longer in the IP pool from the concerned VIFs
const deletedAddresses = diff(keys(previousAddresses), keys(addresses_))
await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses))
if (isEmpty(addresses_)) {
delete ipPool.addresses
} else {
ipPool.addresses = addresses_
}
}
// TODO: Implement patching like for addresses.
if (networks) {
ipPool.networks = networks
}
// TODO: Implement patching like for addresses.
if (resourceSets) {
ipPool.resourceSets = resourceSets
}
await this._save(ipPool)
}
async _generateId () {
let id
do {
id = generateUnsecureToken(8)
} while (await this._store.has(id))
return id
}
_save (ipPool) {
ipPool = normalize(ipPool)
return this._store.put(ipPool.id, ipPool)
}
}

View File

@@ -1,29 +1,29 @@
import assign from 'lodash/assign'
import JobExecutor from '../job-executor'
import { Jobs } from '../models/job'
import {
GenericError,
NoSuchObject
} from '../api-errors'
// ===================================================================
class NoSuchJob extends NoSuchObject {
constructor (id) {
super(id, 'job')
}
}
import { mapToArray } from '../utils'
import { noSuchObject } from 'xo-common/api-errors'
// ===================================================================
export default class {
constructor (xo) {
this._executor = new JobExecutor(xo)
this._jobs = new Jobs({
const jobsDb = this._jobs = new Jobs({
connection: xo._redis,
prefix: 'xo:job',
indexes: ['user_id', 'key']
})
xo.on('start', () => {
xo.addConfigManager('jobs',
() => jobsDb.get(),
jobs => Promise.all(mapToArray(jobs, job =>
jobsDb.save(job)
))
)
})
}
async getAllJobs () {
@@ -33,22 +33,27 @@ export default class {
async getJob (id) {
const job = await this._jobs.first(id)
if (!job) {
throw new NoSuchJob(id)
throw noSuchObject(id, 'job')
}
return job.properties
}
async createJob (userId, job) {
async createJob (job) {
// TODO: use plain objects
const job_ = await this._jobs.create(userId, job)
const job_ = await this._jobs.create(job)
return job_.properties
}
async updateJob ({id, type, name, key, method, paramsVector}) {
const oldJob = await this.getJob(id)
assign(oldJob, {type, name, key, method, paramsVector})
return /* await */ this._jobs.save(oldJob)
async updateJob ({id, ...props}) {
const job = await this.getJob(id)
assign(job, props)
if (job.timeout === null) {
delete job.timeout
}
return /* await */ this._jobs.save(job)
}
async removeJob (id) {
@@ -56,24 +61,10 @@ export default class {
}
async runJobSequence (idSequence) {
const notFound = []
for (const id of idSequence) {
let job
try {
job = await this.getJob(id)
} catch (error) {
if (error instanceof NoSuchJob) {
notFound.push(id)
} else {
throw error
}
}
if (job) {
await this._executor.exec(job)
}
}
if (notFound.length > 0) {
throw new GenericError(`The following jobs were not found: ${notFound.join()}`)
const jobs = await Promise.all(mapToArray(idSequence, id => this.getJob(id)))
for (const job of jobs) {
await this._executor.exec(job)
}
}
}

View File

@@ -2,9 +2,9 @@ import createJsonSchemaValidator from 'is-my-json-valid'
import { PluginsMetadata } from '../models/plugin-metadata'
import {
InvalidParameters,
NoSuchObject
} from '../api-errors'
invalidParameters,
noSuchObject
} from 'xo-common/api-errors'
import {
createRawObject,
isFunction,
@@ -13,14 +13,6 @@ import {
// ===================================================================
class NoSuchPlugin extends NoSuchObject {
constructor (id) {
super(id, 'plugin')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._plugins = createRawObject()
@@ -29,12 +21,21 @@ export default class {
connection: xo._redis,
prefix: 'xo:plugin-metadata'
})
xo.on('start', () => {
xo.addConfigManager('plugins',
() => this._pluginsMetadata.get(),
plugins => Promise.all(mapToArray(plugins, plugin =>
this._pluginsMetadata.save(plugin)
))
)
})
}
_getRawPlugin (id) {
const plugin = this._plugins[id]
if (!plugin) {
throw new NoSuchPlugin(id)
throw noSuchObject(id, 'plugin')
}
return plugin
}
@@ -51,16 +52,21 @@ export default class {
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
) {
const id = name
const plugin = this._plugins[id] = {
configured: !configurationSchema,
configurationPresets,
configurationSchema,
configured: !configurationSchema,
description,
id,
instance,
name,
testable: isFunction(instance.test),
testSchema,
unloadable: isFunction(instance.unload),
version
}
@@ -68,7 +74,6 @@ export default class {
const metadata = await this._getPluginMetadata(id)
let autoload = true
let configuration
if (metadata) {
({
autoload,
@@ -97,7 +102,7 @@ export default class {
}
})
.catch(error => {
console.error('register plugin %s: %s', name, error && error.stack || error)
console.error('register plugin %s: %s', name, (error && error.stack) || error)
})
}
@@ -105,8 +110,11 @@ export default class {
const {
configurationPresets,
configurationSchema,
description,
loaded,
name,
testable,
testSchema,
unloadable,
version
} = this._getRawPlugin(id)
@@ -119,12 +127,15 @@ export default class {
id,
name,
autoload,
description,
loaded,
unloadable,
version,
configuration,
configurationPresets,
configurationSchema
configurationSchema,
testable,
testSchema
}
}
@@ -139,12 +150,12 @@ export default class {
const { configurationSchema } = plugin
if (!configurationSchema) {
throw new InvalidParameters('plugin not configurable')
throw invalidParameters('plugin not configurable')
}
// See: https://github.com/mafintosh/is-my-json-valid/issues/116
if (configuration == null) {
throw new InvalidParameters([{
throw invalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
@@ -152,7 +163,7 @@ export default class {
const validate = createJsonSchemaValidator(configurationSchema)
if (!validate(configuration)) {
throw new InvalidParameters(validate.errors)
throw invalidParameters(validate.errors)
}
// Sets the plugin configuration.
@@ -191,11 +202,11 @@ export default class {
async loadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (plugin.loaded) {
throw new InvalidParameters('plugin already loaded')
throw invalidParameters('plugin already loaded')
}
if (!plugin.configured) {
throw new InvalidParameters('plugin not configured')
throw invalidParameters('plugin not configured')
}
await plugin.instance.load()
@@ -205,11 +216,11 @@ export default class {
async unloadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (!plugin.loaded) {
throw new InvalidParameters('plugin already unloaded')
throw invalidParameters('plugin already unloaded')
}
if (plugin.unloadable === false) {
throw new InvalidParameters('plugin cannot be unloaded')
throw invalidParameters('plugin cannot be unloaded')
}
await plugin.instance.unload()
@@ -219,4 +230,31 @@ export default class {
async purgePluginConfiguration (id) {
await this._pluginsMetadata.merge(id, { configuration: undefined })
}
async testPlugin (id, data) {
const plugin = this._getRawPlugin(id)
if (!plugin.testable) {
throw invalidParameters('plugin not testable')
}
if (!plugin.loaded) {
throw invalidParameters('plugin not loaded')
}
const { testSchema } = plugin
if (testSchema) {
if (data == null) {
throw invalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
}
const validate = createJsonSchemaValidator(testSchema)
if (!validate(data)) {
throw invalidParameters(validate.errors)
}
}
await plugin.instance.test(data)
}
}

View File

@@ -1,26 +1,18 @@
import { noSuchObject } from 'xo-common/api-errors'
import RemoteHandlerLocal from '../remote-handlers/local'
import RemoteHandlerNfs from '../remote-handlers/nfs'
import RemoteHandlerSmb from '../remote-handlers/smb'
import {
forEach
forEach,
mapToArray
} from '../utils'
import {
NoSuchObject
} from '../api-errors'
import {
Remotes
} from '../models/remote'
// ===================================================================
class NoSuchRemote extends NoSuchObject {
constructor (id) {
super(id, 'remote')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._remotes = new Remotes({
@@ -30,17 +22,29 @@ export default class {
})
xo.on('start', async () => {
xo.addConfigManager('remotes',
() => this._remotes.get(),
remotes => Promise.all(mapToArray(remotes, remote =>
this._remotes.save(remote)
))
)
await this.initRemotes()
await this.syncAllRemotes()
})
xo.on('stop', () => this.forgetAllRemotes())
}
async getRemoteHandler (remote) {
async getRemoteHandler (remote, ignoreDisabled) {
if (typeof remote === 'string') {
remote = await this.getRemote(remote)
}
const Handler = {
if (!(ignoreDisabled || remote.enabled)) {
throw new Error('remote is disabled')
}
const HANDLERS = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs
@@ -48,14 +52,16 @@ export default class {
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
if (!Handler[type]) {
const Handler = HANDLERS[type]
if (!Handler) {
throw new Error('Unhandled remote type')
}
return new Handler[type](remote)
return new Handler(remote)
}
async testRemote (remote) {
const handler = await this.getRemoteHandler(remote)
const handler = await this.getRemoteHandler(remote, true)
return handler.test()
}
@@ -66,7 +72,7 @@ export default class {
async _getRemote (id) {
const remote = await this._remotes.first(id)
if (!remote) {
throw new NoSuchRemote(id)
throw noSuchObject(id, 'remote')
}
return remote
@@ -84,7 +90,7 @@ export default class {
async updateRemote (id, {name, url, enabled, error}) {
const remote = await this._getRemote(id)
this._updateRemote(remote, {name, url, enabled, error})
const handler = await this.getRemoteHandler(remote.properties)
const handler = await this.getRemoteHandler(remote.properties, true)
const props = await handler.sync()
this._updateRemote(remote, props)
return (await this._remotes.save(remote)).properties
@@ -102,7 +108,7 @@ export default class {
}
async removeRemote (id) {
const handler = await this.getRemoteHandler(id)
const handler = await this.getRemoteHandler(id, true)
await handler.forget()
await this._remotes.remove(id)
}
@@ -120,7 +126,7 @@ export default class {
const remotes = await this.getAllRemotes()
for (let remote of remotes) {
try {
(await this.getRemoteHandler(remote)).forget()
(await this.getRemoteHandler(remote, true)).forget()
} catch (_) {}
}
}

View File

@@ -2,11 +2,12 @@ import every from 'lodash/every'
import keyBy from 'lodash/keyBy'
import remove from 'lodash/remove'
import some from 'lodash/some'
import synchronized from 'decorator-synchronized'
import {
NoSuchObject,
Unauthorized
} from '../api-errors'
noSuchObject,
unauthorized
} from 'xo-common/api-errors'
import {
forEach,
generateUnsecureToken,
@@ -19,10 +20,12 @@ import {
// ===================================================================
class NoSuchResourceSet extends NoSuchObject {
constructor (id) {
super(id, 'resource set')
}
const VM_RESOURCES = {
cpus: true,
disk: true,
disks: true,
memory: true,
vms: true
}
const computeVmResourcesUsage = vm => {
@@ -54,6 +57,7 @@ const computeVmResourcesUsage = vm => {
const normalize = set => ({
id: set.id,
ipPools: set.ipPools || [],
limits: set.limits
? map(set.limits, limit => isObject(limit)
? limit
@@ -76,6 +80,13 @@ export default class {
this._store = null
xo.on('start', async () => {
xo.addConfigManager('resourceSets',
() => this.getAllResourceSets(),
resourceSets => Promise.all(mapToArray(resourceSets, resourceSet =>
this._save(resourceSet)
))
)
this._store = await xo.getStore('resourceSets')
})
}
@@ -108,7 +119,7 @@ export default class {
// The set does not contains ALL objects.
!every(objectIds, lightSet(set.objects).has)
)) {
throw new Unauthorized()
throw unauthorized()
}
}
@@ -140,14 +151,15 @@ export default class {
return store.del(id)
}
throw new NoSuchResourceSet(id)
throw noSuchObject(id, 'resourceSet')
}
async updateResourceSet (id, {
name = undefined,
subjects = undefined,
objects = undefined,
limits = undefined
limits = undefined,
ipPools = undefined
}) {
const set = await this.getResourceSet(id)
if (name) {
@@ -178,6 +190,9 @@ export default class {
}
})
}
if (ipPools) {
set.ipPools = ipPools
}
await this._save(set)
}
@@ -203,7 +218,7 @@ export default class {
getResourceSet (id) {
return this._store.get(id).then(normalize, error => {
if (error.notFound) {
throw new NoSuchResourceSet(id)
throw noSuchObject(id, 'resourceSet')
}
throw error
@@ -218,7 +233,19 @@ export default class {
async removeObjectFromResourceSet (objectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.objects)
remove(set.objects, id => id === objectId)
await this._save(set)
}
async addIpPoolToResourceSet (ipPoolId, setId) {
const set = await this.getResourceSet(setId)
set.ipPools.push(ipPoolId)
await this._save(set)
}
async removeIpPoolFromResourceSet (ipPoolId, setId) {
const set = await this.getResourceSet(setId)
remove(set.ipPools, id => id === ipPoolId)
await this._save(set)
}
@@ -230,7 +257,7 @@ export default class {
async removeSubjectToResourceSet (subjectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.subjects, subjectId)
remove(set.subjects, id => id === subjectId)
await this._save(set)
}
@@ -246,6 +273,7 @@ export default class {
await this._save(set)
}
@synchronized
async allocateLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
@@ -261,6 +289,7 @@ export default class {
await this._save(set)
}
@synchronized
async releaseLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
@@ -280,7 +309,9 @@ export default class {
const sets = keyBy(await this.getAllResourceSets(), 'id')
forEach(sets, ({ limits }) => {
forEach(limits, (limit, id) => {
limit.available = limit.total
if (VM_RESOURCES[id]) { // only reset VMs related limits
limit.available = limit.total
}
})
})

View File

@@ -1,9 +1,10 @@
import { BaseError } from 'make-error'
import { NoSuchObject } from '../api-errors.js'
import { Schedules } from '../models/schedule'
import { noSuchObject } from 'xo-common/api-errors.js'
import { Schedules } from '../models/schedule'
import {
forEach,
mapToArray,
scheduleFn
} from '../utils'
@@ -19,15 +20,9 @@ export class ScheduleOverride extends SchedulerError {
}
}
export class NoSuchSchedule extends NoSuchObject {
constructor (scheduleOrId) {
super(scheduleOrId, 'schedule')
}
}
export class ScheduleNotEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId)) + ' is not enabled'
super('Schedule ' + _resolveId(scheduleOrId) + ' is not enabled')
}
}
@@ -42,14 +37,23 @@ export class ScheduleAlreadyEnabled extends SchedulerError {
export default class {
constructor (xo) {
this.xo = xo
this._redisSchedules = new Schedules({
const schedules = this._redisSchedules = new Schedules({
connection: xo._redis,
prefix: 'xo:schedule',
indexes: ['user_id', 'job']
})
this._scheduleTable = undefined
xo.on('start', () => this._loadSchedules())
xo.on('start', () => {
xo.addConfigManager('schedules',
() => schedules.get(),
schedules_ => Promise.all(mapToArray(schedules_, schedule =>
schedules.save(schedule)
))
)
return this._loadSchedules()
})
xo.on('stop', () => this._disableAll())
}
@@ -86,7 +90,7 @@ export default class {
_disable (scheduleOrId) {
if (!this._exists(scheduleOrId)) {
throw new NoSuchSchedule(scheduleOrId)
throw noSuchObject(scheduleOrId, 'schedule')
}
if (!this._isEnabled(scheduleOrId)) {
throw new ScheduleNotEnabled(scheduleOrId)
@@ -125,7 +129,7 @@ export default class {
const schedule = await this._redisSchedules.first(id)
if (!schedule) {
throw new NoSuchSchedule(id)
throw noSuchObject(id, 'schedule')
}
return schedule
@@ -166,7 +170,7 @@ export default class {
const { properties } = schedule
if (!this._exists(properties)) {
throw new NoSuchSchedule(properties)
throw noSuchObject(properties, 'schedule')
}
if (this._isEnabled(properties)) {
@@ -182,7 +186,7 @@ export default class {
try {
this._disable(id)
} catch (exc) {
if (!exc instanceof SchedulerError) {
if (!(exc instanceof SchedulerError)) {
throw exc
}
} finally {

View File

@@ -54,7 +54,7 @@ const levelPromise = db => {
dbP[name] = db::value
} else {
dbP[`${name}Sync`] = db::value
dbP[name] = value::promisify(db)
dbP[name] = promisify(value, db)
}
})

View File

@@ -5,11 +5,11 @@ import {
needsRehash,
verify
} from 'hashy'
import {
InvalidCredential,
NoSuchObject
} from '../api-errors'
invalidCredentials,
noSuchObject
} from 'xo-common/api-errors'
import {
Groups
} from '../models/group'
@@ -27,18 +27,6 @@ import {
// ===================================================================
class NoSuchGroup extends NoSuchObject {
constructor (id) {
super(id, 'group')
}
}
class NoSuchUser extends NoSuchObject {
constructor (id) {
super(id, 'user')
}
}
const addToArraySet = (set, value) => set && !includes(set, value)
? set.concat(value)
: [ value ]
@@ -52,22 +40,40 @@ export default class {
const redis = xo._redis
this._groups = new Groups({
const groupsDb = this._groups = new Groups({
connection: redis,
prefix: 'xo:group'
})
const users = this._users = new Users({
const usersDb = this._users = new Users({
connection: redis,
prefix: 'xo:user',
indexes: ['email']
})
xo.on('start', async () => {
if (!await users.exists()) {
xo.addConfigManager('groups',
() => groupsDb.get(),
groups => Promise.all(mapToArray(groups, group => groupsDb.save(group)))
)
xo.addConfigManager('users',
() => usersDb.get(),
users => Promise.all(mapToArray(users, async user => {
const userId = user.id
const conflictUsers = await usersDb.get({ email: user.email })
if (!isEmpty(conflictUsers)) {
await Promise.all(mapToArray(conflictUsers, ({ id }) =>
(id !== userId) && this.deleteUser(user.id)
))
}
return usersDb.save(user)
}))
)
if (!await usersDb.exists()) {
const email = 'admin@admin.net'
const password = 'admin'
await this.createUser(email, {password, permission: 'admin'})
await this.createUser({email, password, permission: 'admin'})
console.log('[INFO] Default user created:', email, ' with password', password)
}
})
@@ -75,13 +81,17 @@ export default class {
// -----------------------------------------------------------------
async createUser (email, { password, ...properties }) {
async createUser ({ name, password, ...properties }) {
if (name) {
properties.email = name
}
if (password) {
properties.pw_hash = await hash(password)
}
// TODO: use plain objects
const user = await this._users.create(email, properties)
const user = await this._users.create(properties)
return user.properties
}
@@ -95,12 +105,18 @@ export default class {
this._xo.getAuthenticationTokensForUser(id)
.then(tokens => {
forEach(tokens, token => {
this._xo._tokens.remove(token.id)
::pCatch(noop)
this._xo.deleteAuthenticationToken(id)::pCatch(noop)
})
})
::pCatch(noop) // Ignore any failures.
// Remove ACLs for this user.
this._xo.getAclsForSubject(id).then(acls => {
forEach(acls, acl => {
this._xo.removeAcl(id, acl.object, acl.action)::pCatch(noop)
})
})
// Remove the user from all its groups.
forEach(user.groups, groupId => {
this.getGroup(groupId)
@@ -153,7 +169,7 @@ export default class {
async _getUser (id) {
const user = await this._users.first(id)
if (!user) {
throw new NoSuchUser(id)
throw noSuchObject(id, 'user')
}
return user
@@ -186,7 +202,7 @@ export default class {
return null
}
throw new NoSuchUser(username)
throw noSuchObject(username, 'user')
}
// Get or create a user associated with an auth provider.
@@ -204,14 +220,15 @@ export default class {
throw new Error(`registering ${name} user is forbidden`)
}
return /* await */ this.createUser(name, {
return /* await */ this.createUser({
name,
_provider: provider
})
}
async changeUserPassword (userId, oldPassword, newPassword) {
if (!(await this.checkUserPassword(userId, oldPassword, false))) {
throw new InvalidCredential()
throw invalidCredentials()
}
await this.updateUser(userId, { password: newPassword })
@@ -239,7 +256,6 @@ export default class {
// TODO: use plain objects.
const group = (await this._groups.create(name)).properties
group.users = JSON.parse(group.users)
return group
}
@@ -248,6 +264,13 @@ export default class {
await this._groups.remove(id)
// Remove ACLs for this group.
this._xo.getAclsForSubject(id).then(acls => {
forEach(acls, acl => {
this._xo.removeAcl(id, acl.object, acl.action)::pCatch(noop)
})
})
// Remove the group from all its users.
forEach(group.users, userId => {
this.getUser(userId)
@@ -267,7 +290,7 @@ export default class {
async getGroup (id) {
const group = await this._groups.first(id)
if (!group) {
throw new NoSuchGroup(id)
throw noSuchObject(id, 'group')
}
return group.properties
@@ -323,7 +346,7 @@ export default class {
if (newUsersIds.has(id)) {
newUsersIds.delete(id)
} else {
oldUsers.push(id)
oldUsersIds.push(id)
}
})
newUsersIds = newUsersIds.toArray()

View File

@@ -1,10 +1,8 @@
import { noSuchObject } from 'xo-common/api-errors'
import Xapi from '../xapi'
import xapiObjectToXo from '../xapi-object-to-xo'
import XapiStats from '../xapi-stats'
import {
GenericError,
NoSuchObject
} from '../api-errors'
import {
camelToSnakeCase,
createRawObject,
@@ -13,7 +11,8 @@ import {
isString,
noop,
pCatch,
popProperty
popProperty,
serializeError
} from '../utils'
import {
Servers
@@ -21,18 +20,10 @@ import {
// ===================================================================
class NoSuchXenServer extends NoSuchObject {
constructor (id) {
super(id, 'xen server')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._objectConflicts = createRawObject() // TODO: clean when a server is disconnected.
this._servers = new Servers({
const serversDb = this._servers = new Servers({
connection: xo._redis,
prefix: 'xo:server',
indexes: ['host']
@@ -43,8 +34,13 @@ export default class {
this._xo = xo
xo.on('start', async () => {
xo.addConfigManager('xenServers',
() => serversDb.get(),
servers => serversDb.update(servers)
)
// Connects to existing servers.
const servers = await this._servers.get()
const servers = await serversDb.get()
for (let server of servers) {
if (server.enabled) {
this.connectXenServer(server.id).catch(error => {
@@ -60,11 +56,12 @@ export default class {
// TODO: disconnect servers on stop.
}
async registerXenServer ({host, username, password, readOnly = false}) {
async registerXenServer ({label, host, username, password, readOnly = false}) {
// FIXME: We are storing passwords which is bad!
// Could we use tokens instead?
// TODO: use plain objects
const server = await this._servers.create({
label: label || undefined,
host,
username,
password,
@@ -79,17 +76,22 @@ export default class {
this.disconnectXenServer(id)::pCatch(noop)
if (!await this._servers.remove(id)) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
}
async updateXenServer (id, {host, username, password, readOnly, enabled}) {
async updateXenServer (id, {label, host, username, password, readOnly, enabled, error}) {
const server = await this._getXenServer(id)
if (label !== undefined) server.set('label', label || undefined)
if (host) server.set('host', host)
if (username) server.set('username', username)
if (password) server.set('password', password)
if (error !== undefined) {
server.set('error', error ? JSON.stringify(error) : '')
}
if (enabled !== undefined) {
server.set('enabled', enabled ? 'true' : undefined)
}
@@ -110,7 +112,7 @@ export default class {
async _getXenServer (id) {
const server = await this._servers.first(id)
if (!server) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
return server
@@ -283,23 +285,20 @@ export default class {
xapi.xo.install()
try {
await xapi.connect()
} catch (error) {
if (error.code === 'SESSION_AUTHENTICATION_FAILED') {
throw new GenericError('authentication failed')
await xapi.connect().then(
() => this.updateXenServer(id, { error: null }),
error => {
this.updateXenServer(id, { error: serializeError(error) })
throw error
}
if (error.code === 'EHOSTUNREACH') {
throw new GenericError('host unreachable')
}
throw error
}
)
}
async disconnectXenServer (id) {
const xapi = this._xapis[id]
if (!xapi) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
delete this._xapis[id]

View File

@@ -3,6 +3,7 @@ import XoCollection from 'xo-collection'
import XoUniqueIndex from 'xo-collection/unique-index'
import {createClient as createRedisClient} from 'redis'
import {EventEmitter} from 'events'
import { noSuchObject } from 'xo-common/api-errors'
import mixins from './xo-mixins'
import Connection from './connection'
@@ -20,9 +21,6 @@ import {
mapToArray,
noop
} from './utils'
import {
NoSuchObject
} from './api-errors'
// ===================================================================
@@ -31,6 +29,9 @@ export default class Xo extends EventEmitter {
constructor (config) {
super()
// a lot of mixins adds listener for start/stop/… events
this.setMaxListeners(0)
this._config = config
this._objects = new XoCollection()
@@ -43,7 +44,33 @@ export default class Xo extends EventEmitter {
this._httpRequestWatchers = createRawObject()
// Connects to Redis.
this._redis = createRedisClient(config.redis && config.redis.uri)
{
const {
renameCommands: rename_commands,
socket: path,
uri: url
} = config.redis || {}
this._redis = createRedisClient({ path, rename_commands, url })
}
}
// -----------------------------------------------------------------
async clean () {
const handleCleanError = error => {
console.error(
'[WARN] clean error:',
(error && error.stack) || error
)
}
await Promise.all(mapToArray(
this.listeners('clean'),
listener => new Promise(resolve => {
resolve(listener.call(this))
}).catch(handleCleanError)
))
}
// -----------------------------------------------------------------
@@ -58,7 +85,7 @@ export default class Xo extends EventEmitter {
const handleStartError = error => {
console.error(
'[WARN] start error:',
error && error.stack || error
(error && error.stack) || error
)
}
await Promise.all(mapToArray(
@@ -82,7 +109,7 @@ export default class Xo extends EventEmitter {
const handleStopError = error => {
console.error(
'[WARN] stop error:',
error && error.stack || error
(error && error.stack) || error
)
}
await Promise.all(mapToArray(
@@ -122,14 +149,14 @@ export default class Xo extends EventEmitter {
const obj = all[key] || byRef[key]
if (!obj) {
throw new NoSuchObject(key, type)
throw noSuchObject(key, type)
}
if (type != null && (
isString(type) && type !== obj.type ||
(isString(type) && type !== obj.type) ||
!includes(type, obj.type) // Array
)) {
throw new NoSuchObject(key, type)
throw noSuchObject(key, type)
}
return obj
@@ -192,7 +219,7 @@ export default class Xo extends EventEmitter {
const {fn, data} = watcher
new Promise(resolve => {
resolve(fn(req, res, data, next))
resolve(fn.call(this, req, res, data, next))
}).then(
result => {
if (result != null) {
@@ -255,7 +282,7 @@ export default class Xo extends EventEmitter {
// -----------------------------------------------------------------
// Plugins can use this method to expose methods directly on XO.
defineProperty (name, value) {
defineProperty (name, value, thisArg = null) {
if (name in this) {
throw new Error(`Xo#${name} is already defined`)
}
@@ -263,7 +290,7 @@ export default class Xo extends EventEmitter {
// For security, prevent from accessing `this`.
if (isFunction(value)) {
value = (value => function () {
return value.apply(null, arguments)
return value.apply(thisArg, arguments)
})(value)
}
@@ -279,6 +306,23 @@ export default class Xo extends EventEmitter {
return () => unset()
}
// Convenience method to define multiple properties at once.
defineProperties (props, thisArg) {
const unsets = []
const unset = () => forEach(unsets, unset => unset())
try {
forEach(props, (value, name) => {
unsets.push(this.defineProperty(name, value, thisArg))
})
} catch (error) {
unset()
throw error
}
return unset
}
// -----------------------------------------------------------------
// Watches objects changes.
@@ -330,7 +374,7 @@ export default class Xo extends EventEmitter {
forEach(connections, connection => {
// Notifies only authenticated clients.
if (connection.has('user_id')) {
if (connection.has('user_id') && connection.notify) {
if (enteredMessage) {
connection.notify('all', enteredMessage)
}

View File

@@ -1,84 +0,0 @@
#!/usr/bin/env node
var join = require('path').join
var readdir = require('fs').readdirSync
var stat = require('fs').statSync
var writeFile = require('fs').writeFileSync
// ===================================================================
function bind (fn, thisArg) {
return function () {
return fn.apply(thisArg, arguments)
}
}
function camelCase (str) {
return str.toLowerCase().replace(/[^a-z0-9]+([a-z0-9])/g, function (_, str) {
return str.toUpperCase()
})
}
function removeSuffix (str, sfx) {
var strLength = str.length
var sfxLength = sfx.length
var pos = strLength - sfxLength
if (pos < 0 || str.indexOf(sfx, pos) !== pos) {
return false
}
return str.slice(0, pos)
}
// ===================================================================
function handleEntry (entry, dir) {
var stats = stat(join(dir, entry))
var base
if (stats.isDirectory()) {
base = entry
} else if (!(
stats.isFile() && (
(base = removeSuffix(entry, '.coffee')) ||
(base = removeSuffix(entry, '.js'))
)
)) {
return
}
var identifier = camelCase(base)
this(
'import ' + identifier + " from './" + base + "'",
'defaults.' + identifier + ' = ' + identifier,
'export * as ' + identifier + " from './" + base + "'",
''
)
}
function generateIndex (dir) {
var content = [
'//',
'// This file has been generated by /tools/generate-index',
'//',
'// It is automatically re-generated each time a build is started.',
'//',
'',
'const defaults = {}',
'export default defaults',
''
]
var write = bind(content.push, content)
readdir(dir).map(function (entry) {
if (entry === 'index.js') {
return
}
handleEntry.call(write, entry, dir)
})
writeFile(dir + '/index.js', content.join('\n'))
}
process.argv.slice(2).map(generateIndex)

6748
yarn.lock Normal file

File diff suppressed because it is too large Load Diff