Compare commits

..

318 Commits

Author SHA1 Message Date
Julien Fontanet
eba8f95e58 5.6.0 2017-01-27 16:42:07 +01:00
Julien Fontanet
7e2da1ff93 [WiP] feat(backups): implements file restore for LVM (#490)
Fixes vatesfr/xo-web#1878
2017-01-27 16:37:34 +01:00
Pierre Donias
b7b7e81468 feat(host.installSupplementalPack) (#487)
See vatesfr/xo-web#1460
2017-01-25 16:08:31 +01:00
Pierre Donias
0c7768f5d2 fix(vm.delete): IP addresses should always be deallocated (#488)
Fixes vatesfr/xo-web#1906
2017-01-25 15:46:33 +01:00
Pierre Donias
8fe6a56dfc fix(Xapi#installAllPoolPatchesOnHost): ignore PATCH_ALREADY_APPLIED error (#489)
Fixes vatesfr/xo-web#1904
2017-01-25 15:46:15 +01:00
Julien Fontanet
7b9dae980d fix(vm.create): properly handle optional param VDIs 2017-01-24 13:36:36 +01:00
Olivier Lambert
b59ba6b7bb feat(api): add description for some API calls (#486)
Fixes vatesfr/xo-web#1882
2017-01-17 15:15:18 +01:00
Julien Fontanet
8cdee4d173 chore(xo): disable too many listeners warning 2017-01-16 15:50:18 +01:00
Julien Fontanet
c9ed5fbe00 chore: update yarn.lock 2017-01-16 15:18:46 +01:00
Julien Fontanet
e698e89968 feat(/signout): URL to sign out 2017-01-16 14:33:58 +01:00
Pierre Donias
02f198d42c feat(backup.fetchFiles): multiple files support (#485)
See vatesfr/xo-web#1877
2017-01-16 09:33:22 +01:00
Pierre Donias
61d2d0263b feat(patching): eject tools ISOs before patching host (#479)
Fixes #1798
2017-01-13 18:20:31 +01:00
badrAZ
ed477e99a8 feat(plugin): provide a getDataDir() to plugins (#483)
It returns the path of a directory where the plugin can store data.
2017-01-13 18:13:44 +01:00
Olivier Lambert
1449be8d66 feat(host): expose supplemental packs (#480) 2017-01-12 17:54:48 +01:00
greenkeeper[bot]
28902d8747 fix(package): update execa to version 0.6.0 (#478)
https://greenkeeper.io/
2017-01-09 10:50:31 +01:00
Julien Fontanet
d534592479 5.5.4 2017-01-06 16:57:47 +01:00
Pierre Donias
b2f6ea9116 fix(vm.set): allocate resources when editing VM (#477)
Fixes vatesfr/xo-web#1695
2017-01-06 16:54:49 +01:00
Pierre Donias
8bf38bb29b feat(server): store connection error in database (#472)
See vatesfr/xo-web#1833
2017-01-06 16:38:17 +01:00
greenkeeper[bot]
9c6a78b678 fix(package): update promise-toolbox to version 0.8.0 (#476)
https://greenkeeper.io/
2017-01-06 11:34:27 +01:00
Pierre Donias
850199d7fc fix(resource-sets): recompute limits (#475)
Fixes vatesfr/xo-web#1866
2017-01-06 10:09:36 +01:00
Pierre Donias
4282928960 fix(vif/create): locking mode when allocating IP addresses (#474)
Fixes vatesfr/xo-web#1747
2017-01-06 09:55:55 +01:00
Julien Fontanet
356dd89d9f chore(package): upgrade jest to v 0.18.1 2017-01-03 18:30:28 +01:00
Julien Fontanet
7dd2391e5a fix(group.setUsers): oldUsers → oldUsersIds 2017-01-03 11:20:25 +01:00
Julien Fontanet
e0093f236a fix(group.create): do not attempt to parse empty prop 2017-01-03 10:47:10 +01:00
Julien Fontanet
8c5c32268a fix: users and groups serialization in Redis
Fixes vatesfr/xo-web#1852.
2017-01-02 16:52:51 +01:00
greenkeeper[bot]
b61ccc1af1 fix(package): update hashy to version 0.6.0 (#470)
https://greenkeeper.io/
2017-01-02 13:01:29 +01:00
Julien Fontanet
7caf0e40f4 5.5.3 2017-01-02 10:56:08 +01:00
Julien Fontanet
a16508db10 fix(remotes): do not error on disabled remote
- testRemote()
- updateRemote()
- remoteRemote()
- forgetAllRemotes()
2016-12-25 20:07:42 +01:00
Julien Fontanet
81bff342b9 chore(package): update decorator-synchronized to version 0.2.3 2016-12-22 16:25:46 +01:00
Julien Fontanet
49d41a76a0 5.5.2 2016-12-22 11:22:45 +01:00
Julien Fontanet
b1732b3298 fix(file restore): work around for invalid delta VHD path (#467)
See vatesfr/xo-web#1842.
2016-12-22 11:20:51 +01:00
Julien Fontanet
9372cdb6c7 fix(vm.rollingDeltaBackup): do not hide error 2016-12-22 10:21:38 +01:00
Julien Fontanet
1d8e54b83e chore(backups): use directly Xo#getRemoteHandler() 2016-12-22 09:50:16 +01:00
Julien Fontanet
30c5600271 chore(Xo#getRemoteHandler): use intermediary variable 2016-12-22 09:49:36 +01:00
Julien Fontanet
9f7e5c3a9a fix(Xo#getRemoteHandler): throws if remote is disabled 2016-12-22 09:49:04 +01:00
Julien Fontanet
37c9342717 fix(vm.rollingDeltaBackup): correctly delete snapshot in case of failure 2016-12-21 22:35:43 +01:00
Julien Fontanet
8827f8e940 fix(backup.fetchFiles): encode URI suffix
Fix issue with whitespaces in the filename.
2016-12-20 17:07:13 +01:00
Julien Fontanet
58334bf4a1 fix(backup.list): timestamps should be integers 2016-12-20 17:07:13 +01:00
Julien Fontanet
b898a6702c chore(package): use husky instead of ghooks 2016-12-20 17:07:13 +01:00
Julien Fontanet
6d78a810b9 perf(RemoteHandlerAbstract/createReadStream): optimise code
- avoid async function: overhead with transpilation
- do as much as possible in parallel
- fix: do not add length property in range mode
2016-12-20 17:07:13 +01:00
Julien Fontanet
8fc4eb8cdf 5.5.1 2016-12-20 13:38:02 +01:00
Julien Fontanet
b3fac0c56f fix(backup.list): datetimes should be timestamps 2016-12-20 12:50:17 +01:00
Julien Fontanet
0b063b1f5e 5.5.0 2016-12-20 12:29:16 +01:00
Olivier Lambert
480f05e676 feat(vm): add install time (#465) 2016-12-20 12:19:11 +01:00
Julien Fontanet
1ac8af34ec feat(backup): implement file restore (#461)
See vatesfr/xo-web#1590

Current implementation has following limitations:

- only support local and NFS remotes
- requires installation of libvhdi-utils
- files can only be recovered one by one
2016-12-20 12:18:22 +01:00
Julien Fontanet
34ff8b0f02 feat(Xapi#exportDeltaVm): don't export VDIs with names starting with [NOBAK] (#464)
Fixes vatesfr/xo-web#826
2016-12-14 10:57:25 +01:00
Julien Fontanet
77c3684e28 chore(tests): execute tests directly in src/ 2016-12-13 18:20:17 +01:00
Julien Fontanet
93038ea838 chore(package): remove unused trace 2016-12-13 14:08:38 +01:00
Julien Fontanet
46348f7cba feat: yarn integration 2016-12-13 12:15:26 +01:00
Julien Fontanet
ccc0e45daf feat(tests): use Jest instead of mocha/chai/must 2016-12-13 12:15:03 +01:00
Julien Fontanet
46ca03b017 chore(package): clean scripts 2016-12-13 11:55:12 +01:00
Julien Fontanet
1bfe3197a5 chore(Travis): test with Node stable 2016-12-13 11:51:04 +01:00
Julien Fontanet
4d2617fe68 chore(package): requires Node >= 4 2016-12-13 11:49:54 +01:00
Julien Fontanet
92e289f9da fix(decorators/mixin): do not use arrow function for constructor
It works because of the transpilation but it's not valid ES2015.
2016-12-13 11:41:41 +01:00
greenkeeper[bot]
a8c7558a77 chore(package): update index-modules to version 0.2.1 (#463) 2016-12-12 16:49:10 +01:00
greenkeeper[bot]
c756e7ecbe chore(package): update index-modules to version 0.2.0 (#462)
https://greenkeeper.io/
2016-12-12 16:16:44 +01:00
Pierre Donias
1998c56e84 feat(vm.delete): release resource set and IP-pool addresses (#460)
Fixes vatesfr/xo-web#1657, fixes vatesfr/xo-web#1748
2016-12-12 15:14:31 +01:00
Julien Fontanet
2ed55b1616 chore(decorators): remove unused @autobind. 2016-12-08 11:47:17 +01:00
Julien Fontanet
0c8d456fd3 chore(package): use bind-property-descriptor instead of custom implementation 2016-12-08 11:46:29 +01:00
Julien Fontanet
9e4924caf6 5.4.1 2016-12-02 16:37:17 +01:00
Julien Fontanet
7f391a5860 Merge branch 'next-release' into stable 2016-12-02 16:37:13 +01:00
Julien Fontanet
5c7249c8fc fix(Xapi#exportDeltaVm): remove TAG_BASE_DELTA if full export
Fixes vatesfr/xo-web#1811
2016-12-02 16:09:27 +01:00
Pierre Donias
932d00133d feat(job-executor.match): __not pattern property (#459)
See vatesfr/xo-web#1503
2016-12-01 14:56:52 +01:00
Julien Fontanet
32a371bf13 chore(package): use golike-defer instead of custom implementation 2016-11-30 15:40:30 +01:00
Julien Fontanet
5d0622d2cf 5.4.0 2016-11-23 11:10:01 +01:00
Pierre Donias
9ab9155bf0 fix(vif.set): remove old VIF before creating new one (#457)
Fixes #1784
2016-11-23 10:38:24 +01:00
Julien Fontanet
86a1ed6d46 chore(package): remove unused nyc 2016-11-23 10:00:45 +01:00
Julien Fontanet
b3c9936d74 chore(package): update xen-api to v0.9.6 2016-11-23 09:58:04 +01:00
greenkeeper[bot]
21b4d7cf11 chore(package): update nyc to version 10.0.0 (#456)
https://greenkeeper.io/
2016-11-23 09:12:26 +01:00
greenkeeper[bot]
4ec07f9ff8 fix(package): update get-stream to version 3.0.0 (#458)
https://greenkeeper.io/
2016-11-23 09:11:39 +01:00
greenkeeper[bot]
b7c89d6f64 fix(package): update http-server-plus to version 0.8.0 (#454)
https://greenkeeper.io/
2016-11-18 14:44:50 +01:00
greenkeeper[bot]
0eb168ec70 fix(package): update uuid to version 3.0.0 (#453)
https://greenkeeper.io/
2016-11-18 09:10:07 +01:00
Olivier Lambert
8ac1a66e93 feat(sr.shared): new boolean property (#452) 2016-11-17 14:33:45 +01:00
badrAZ
301da3662a fix(plugin.test): data param is optional (#451) 2016-11-16 16:08:11 +01:00
greenkeeper[bot]
e474946cb7 fix(package): update xo-common to version 0.1.0 (#450)
https://greenkeeper.io/
2016-11-16 12:01:27 +01:00
Pierre Donias
9a0ca1ebb2 feat(api): map 10 XAPI errors to XO errors (#449)
Fixes vatesfr/xo-web#1481
2016-11-16 11:22:31 +01:00
Julien Fontanet
520f7b2a77 feat(job.create,job.set): ability to set userId (#448)
See vatesfr/xo-web#1733
2016-11-14 17:42:19 +01:00
Pierre Donias
c0b3b3aab8 Fix userId. 2016-11-14 16:59:10 +01:00
Pierre Donias
d499332ce3 It should be possible to not change a job's user. 2016-11-14 15:56:54 +01:00
Pierre Donias
19ce06e0bb feat(job#create,job#set): userId parameter
See vatesfr/xo-web#1733
2016-11-14 15:33:09 +01:00
greenkeeper[bot]
ea6ff4224e fix(package): update fs-promise to version 1.0.0 (#447)
https://greenkeeper.io/
2016-11-10 08:56:37 +01:00
Julien Fontanet
871d1f8632 fix(plugins registration): params order 2016-11-09 17:05:10 +01:00
badrAZ
77ce2ff6d1 feat(plugin.test): plugins can be tested (#446)
See vatesfr/xo-web#1749
2016-11-09 14:58:19 +01:00
Pierre Donias
6383104796 fix(Xapi#editPif): destroy VLAN from each PIF before creating new VLAN (#444) 2016-11-08 16:50:12 +01:00
Julien Fontanet
b99b4159c8 feat(Redis): support aliased commands
Fixes #443
2016-11-08 10:23:53 +01:00
Olivier Lambert
8bedb1f3b9 Merge pull request #442 from vatesfr/pierre-fix-xo-error
fix(api): xoError is not an object
2016-11-07 18:18:45 +01:00
Pierre Donias
dc85804a27 fix(api): xoError is not an object 2016-11-07 17:58:16 +01:00
greenkeeper[bot]
42a31e512a fix(package): update json-rpc-peer to version 0.13.0 (#441)
https://greenkeeper.io/
2016-11-07 14:57:53 +01:00
Pierre Donias
2be7388696 feat(api-errors): throw custom errors when XAPI error is caught (#440)
See vatesfr/xo-web#1717
2016-11-07 14:15:23 +01:00
Julien Fontanet
bc5b00781b 5.3.3 2016-11-04 11:44:09 +01:00
Olivier Lambert
313e2b3de6 fix(Sr): add type cifs in deviceConfig. Fixes vatesfr/xo-web#1615 (#439) 2016-11-04 11:42:03 +01:00
Julien Fontanet
0bbd002060 fix(xo.importConfig): dont unnecessarily delete existing users
Do not delete existing users with same name & id
2016-11-04 09:42:56 +01:00
Julien Fontanet
5e785266a5 fix(xo.importConfig): correctly import ACLs
Fixes vatesfr/xo-web#1722
2016-11-04 09:40:41 +01:00
Julien Fontanet
5870769e7d fix(vm.import{,Delta}Backup): make restored VMs identifiable
Their names is prefixed with the exported date and they have a specific tag (*restored from backup*).

Fixes vatesfr/xo-web#1719
2016-11-03 16:22:42 +01:00
Julien Fontanet
79b80dcd07 fix(pif#carrier): cast to boolean 2016-11-02 16:50:12 +01:00
Olivier Lambert
6f6e547e6c feat(pif): add carrier (#438)
Fixes vatesfr/xo-web#1702
2016-11-02 16:25:44 +01:00
greenkeeper[bot]
352c9357df chore(package): update dependencies (#437)
https://greenkeeper.io/
2016-11-01 19:05:11 +01:00
Pierre Donias
1ba4641641 feat(acls): handle xo.clean (#436) 2016-10-31 15:53:50 +01:00
Greenkeeper
60e0047285 chore(package): update helmet to version 3.0.0 (#435)
https://greenkeeper.io/
2016-10-29 12:52:18 +02:00
Pierre Donias
235e7c143c fix(signin): new Bootstrap classes (#434) 2016-10-28 10:11:41 +02:00
Julien Fontanet
522d6eed92 5.3.2 2016-10-27 18:49:32 +02:00
Julien Fontanet
9d1d6ea4c5 feat(xo): export/import config (#427)
See vatesfr/xo-web#786
2016-10-27 18:48:19 +02:00
Julien Fontanet
0afd506a41 5.3.1 2016-10-27 18:25:16 +02:00
Julien Fontanet
9dfb837e3f fix(Xapi#importDeltaVm): gracefully handle missing vif.$network$uuid (#433) 2016-10-27 16:46:45 +02:00
fufroma
4ab63b569f fix(RemoteHandlerNfs): move mount points in /run/xo-server/mounts
Fixes vatesfr/xo-web#1405
2016-10-27 15:56:33 +02:00
Julien Fontanet
8d390d256d fix(http-request): handle redirections (#432) 2016-10-27 15:34:54 +02:00
Julien Fontanet
4eec5e06fc fix(package): test on Node 6, not 7 (#431) 2016-10-27 12:24:40 +02:00
Julien Fontanet
e4063b1ba8 feat(sample.config.yaml): add warning about YAML 2016-10-24 22:52:11 +02:00
Greenkeeper
0c3227cf8e chore(package): update promise-toolbox to version 0.7.0 (#428)
https://greenkeeper.io/
2016-10-24 15:01:17 +02:00
Pierre Donias
7bed200bf5 feat(pif): editVlan (#426)
Fix vatesfr/xo-web#1092
2016-10-24 10:24:44 +02:00
Julien Fontanet
4f763e2109 5.3.0 2016-10-20 16:01:53 +02:00
Pierre Donias
75167fb65b feat(pif): expose IP config modes (#424)
See vatesfr/xo-web#1651
2016-10-20 12:44:35 +02:00
Julien Fontanet
675588f780 feat(delta backups): force checksums refresh
See vatesfr/xo-web#1672
2016-10-20 12:38:26 +02:00
Julien Fontanet
2d6f94edd8 fix(vhd-merge/chainVhd): correctly await _write()
Fixes vatesfr/xo-web#1672
2016-10-20 12:31:20 +02:00
Julien Fontanet
247c66ef4b feat(IP pools): can be used in resource sets (#413)
See vatesfr/xo-web#1565
2016-10-19 11:17:05 +02:00
Greenkeeper
1076fac40f Update gulp-sourcemaps to version 2.1.1 🚀 (#422)
https://greenkeeper.io/
2016-10-14 10:44:27 +02:00
Julien Fontanet
14a4a415a2 5.2.6 2016-10-13 18:51:16 +02:00
Julien Fontanet
524355b59c fix(vhd-merge/chainVhd): correctly compute header checksum (#419)
Fixes vatesfr/xo-web#1656
2016-10-13 18:49:58 +02:00
Greenkeeper
36fe49f3f5 Update promise-toolbox to version 0.6.0 🚀 (#416)
https://greenkeeper.io/
2016-10-12 09:19:19 +02:00
Greenkeeper
c0c0af9b14 chore(package): update execa to version 0.5.0 (#411)
https://greenkeeper.io/
2016-10-05 10:40:31 +02:00
Julien Fontanet
d1e472d482 chore(package): use babel-plugin-lodash 2016-10-04 16:05:01 +02:00
Julien Fontanet
c80e43ad0d fix(vm.create): don't require view perm on VM template 2016-10-04 16:03:06 +02:00
Julien Fontanet
fdd395e2b6 fix(vm.create): correctly check resourceSet objects
Related to vatesfr/xo-web#1620
2016-10-04 15:51:04 +02:00
Julien Fontanet
e094437168 fix(package): update xo-acl-resolver to version 0.2.2
See vatesfr/xo-web#1620
2016-10-04 15:24:01 +02:00
Pierre Donias
2ee0be7466 fix(xapi/utils/makeEditObject): constraints works with user props (#410) 2016-10-04 15:02:27 +02:00
Julien Fontanet
2784a7cc92 Create ISSUE_TEMPLATE.md 2016-10-03 16:24:24 +02:00
Julien Fontanet
b09f998d6c 5.2.5 2016-10-03 09:39:52 +02:00
Nicolas Raynaud
bdeb5895f6 fix(deltaBackups): update checksum after altering VHD files (#408)
Fixes vatesfr/xo-web#1606
2016-09-30 14:31:33 +02:00
Pierre Donias
3944b8aaee feat(network): create a bonded network (#407)
Fixes vatesfr/xo-web#876
2016-09-30 13:51:33 +02:00
Nicolas Raynaud
6e66cffb92 feat(deltaBackups): correctly chain VHDs (#406)
The goal is for a tool like vhdimount to be able to mount any file and use it as a disk to recover specific file in it.
2016-09-29 17:31:36 +02:00
Pierre Donias
57092ee788 feat(vif.set): support for network, MAC and currently_attached (#403)
Fixes vatesfr/xo-web#1446
2016-09-28 15:09:17 +02:00
Julien Fontanet
70e9e1c706 chore(package): update human-format to version 0.7.0 2016-09-28 09:58:54 +02:00
Greenkeeper
9662b8fbee chore(package): update babel-eslint to version 7.0.0 (#404)
https://greenkeeper.io/
2016-09-27 23:39:30 +02:00
Julien Fontanet
9f66421ae7 fix(bootstrap): C-c twice force stop the server 2016-09-27 10:44:24 +02:00
Greenkeeper
50584c2e50 chore(package): update http-server-plus to version 0.7.0 (#402)
https://greenkeeper.io/
2016-09-27 09:30:16 +02:00
Julien Fontanet
7be4e1901a chore(package): use index-modules 2016-09-26 15:41:41 +02:00
Julien Fontanet
b47146de45 fix(pbd/attached): should be a boolean 2016-09-22 13:20:49 +02:00
Julien Fontanet
97b229b2c7 fix(vm.set): works with VM templates
Fixes vatesfr/xo-web#1569
2016-09-22 10:39:20 +02:00
Julien Fontanet
6bb5bb9403 5.2.4 2016-09-21 10:20:46 +02:00
Julien Fontanet
8c4b8271d8 fix(pool.setDefaultSr): remove pool param
Fixes vatesfr/xo-web#1558
2016-09-20 11:45:36 +02:00
Julien Fontanet
69291c0574 chore(package): update xo-vmdk-to-vhd to version 0.0.12
Fixes vatesfr/xo-web#1551
2016-09-20 10:41:42 +02:00
Julien Fontanet
2dc073dcd6 fix(vm.resourceSet): handle xo namespace 2016-09-19 13:15:23 +02:00
Julien Fontanet
1894cb35d2 feat(vm): expose resourceSet prop 2016-09-19 12:10:09 +02:00
Julien Fontanet
cd37420b07 Merge pull request #398 from vatesfr/greenkeeper-standard-8.1.0
Update standard to version 8.1.0 🚀
2016-09-18 05:17:41 +02:00
Julien Fontanet
55cb6b39db fix(Xo#removeSchedule): correctly test instance of SchedulerError 2016-09-18 05:12:36 +02:00
greenkeeperio-bot
89d13b2285 chore(package): update standard to version 8.1.0
https://greenkeeper.io/
2016-09-17 20:51:59 +02:00
Julien Fontanet
1b64b0468a fix(group.delete): remove associated ACLs
Fixes vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
085fb83294 fix(user.delete): remove associated ACLs
See vatesfr/xo-web#899
2016-09-16 16:04:41 +02:00
Julien Fontanet
edd606563f feat(vm.revert): can snapshot before (#395)
See vatesfr/xo-web#1445
2016-09-15 14:59:43 +02:00
Julien Fontanet
fb804e99f0 5.2.3 2016-09-14 18:02:32 +02:00
Pierre Donias
1707cbcb54 feat(signin): use XO 5 style (#394)
Fixes vatesfr/xo-web#1161
2016-09-14 17:56:05 +02:00
Julien Fontanet
6d6a630c31 5.2.2 2016-09-14 17:37:42 +02:00
Julien Fontanet
ff2990e8e5 chore(package): update @marsaud/smb2-promise to version 0.2.1
Fixes vatesfr/xo-web#1511
2016-09-14 17:32:52 +02:00
Nicolas Raynaud
d679aff0fb chore(package): remove node-smb2 dependency (#393) 2016-09-14 16:23:28 +02:00
Julien Fontanet
603a444905 fix(Xapi#importVm): remove VM's VDIs on failure 2016-09-14 14:11:20 +02:00
Julien Fontanet
a002958448 fix(DR): remove previous VDIs
Fixes vatesfr/xo-web#1510
2016-09-14 14:11:20 +02:00
Julien Fontanet
cb4bc37424 fix(DR): delete VMs in all cases
Previous copies were not deleted when there were as many as the depth.

Fixes vatesfr/xo-web#1509
2016-09-14 14:11:19 +02:00
Julien Fontanet
0fc6f917e6 5.2.1 2016-09-13 16:44:35 +02:00
Julien Fontanet
ec0d012b24 feat(vm.set): support tags (#392)
Fixes vatesfr/xo-web#1431
2016-09-13 16:35:40 +02:00
Julien Fontanet
2cd4b171a1 chore(package): update json5 to version 0.5.0 2016-09-13 11:28:56 +02:00
Julien Fontanet
0cb6906c4d chore(package): is-my-json-valid to v2.13.1 2016-09-13 11:25:22 +02:00
Julien Fontanet
4c19b93c30 chore(package): update fs-promise to version 0.5.0 2016-09-13 11:23:42 +02:00
Julien Fontanet
6165f1b405 fix(vm.create): select SR of first disk-VDI (#391)
Fixes vatesfr/xo-web#1493
2016-09-12 16:32:43 +02:00
Julien Fontanet
37a4221e43 fix(vm.docker.containers): yes, again 2016-09-12 12:13:45 +02:00
Julien Fontanet
9831b222b5 fix(vm.docker.containers) 2016-09-12 12:11:15 +02:00
Julien Fontanet
7b6f44fb74 fix(vm.createInterface): syntax fix 2016-09-12 12:06:34 +02:00
Julien Fontanet
399f4d0ea3 feat(vm.docker.containers): like vm.docker.process.items but always an array 2016-09-12 11:43:36 +02:00
Julien Fontanet
26a668a875 fix(vm.createInterface): accept integers for position and mtu 2016-09-12 11:36:30 +02:00
Julien Fontanet
bf96262b6e feat(Xapi#createVif): default MTU is network's MTU 2016-09-12 11:05:31 +02:00
Julien Fontanet
1155fa1fe9 chore(vm.create): remove some console.log()s 2016-09-09 15:31:25 +02:00
Julien Fontanet
1875d31731 5.2.0 2016-09-09 15:16:03 +02:00
Julien Fontanet
6f855fd14e feat(IP pools): groups of IP addresses (#371) 2016-09-09 15:12:30 +02:00
Julien Fontanet
08e392bb46 fix(vm.create): correctly compute limits usage (#389)
Fixes vatesfr/xo-web#1365
2016-09-09 12:55:10 +02:00
Julien Fontanet
66d63e0546 fix(test.wait): fix setTimeout params order 2016-09-08 18:40:55 +02:00
Julien Fontanet
7ee56fe8bc feat(pool.installAllPatches): install all patches on a pool (#388)
See vatesfr/xo-web#1392
2016-09-07 17:54:00 +02:00
Julien Fontanet
669d04ee48 fix(vm.migrate): error on unused default SR
Fixes #1466
2016-09-05 14:21:17 +02:00
Julien Fontanet
cb1b37326e fix(vm.rollingDrCopy): avoid duplicates in VMs list (#387)
Fixes vatesfr/xo-web#1464
2016-09-05 13:41:20 +02:00
Julien Fontanet
7bb73bee67 feat(vm.rollingDrCopy): failure to destroy old copies is not fatal 2016-09-05 11:29:54 +02:00
Julien Fontanet
7286ddc338 chore(JobExecutor): use utils/serializeError() 2016-09-05 11:29:53 +02:00
Olivier Lambert
7d1f9e33fe feat(network): add defaultIsLocked to API (#385) 2016-09-01 14:49:20 +02:00
Ronan Abhamon
63c676ebfe feat(vm.import): supports OVA (#375)
See vatesfr/xo-web#709
2016-09-01 14:11:15 +02:00
Greenkeeper
fcaf6b7923 chore(package): update json-rpc-peer to version 0.12.0 (#383)
https://greenkeeper.io/
2016-08-25 11:56:54 -04:00
Julien Fontanet
9f347a170a fix(xapi/utils): correctly isPlainObject 2016-08-18 16:21:34 +02:00
Julien Fontanet
2f7cd4426d fix(xapi/utils/prepareXapiParam): array handling 2016-08-18 16:15:51 +02:00
Julien Fontanet
854f256470 fix(xapi/getNamespaceForType): add missing VIF 2016-08-18 15:27:47 +02:00
Julien Fontanet
5d0b40f752 fix(utils/camelToSnakeCase): better number handling 2016-08-18 15:23:57 +02:00
Julien Fontanet
27a2853ee8 fix(vif.set): add missing param 2016-08-18 15:13:46 +02:00
Julien Fontanet
67f6b80312 fix(vif.set): do not use an arrow function 2016-08-18 15:01:13 +02:00
Julien Fontanet
016037adc1 fix(user.set): can be used by non admins 2016-08-18 14:17:07 +02:00
Julien Fontanet
70d5c1034d 5.1.6 2016-08-18 10:54:36 +02:00
Greenkeeper
ed6fb8754f chore(package): update mocha to version 3.0.2 (#376)
https://greenkeeper.io/
2016-08-18 10:53:05 +02:00
Julien Fontanet
6d08a9b11c feat(JobExecutor): a current job will only run 2 calls at a time (#382)
Fixes vatesfr/xo-web#915
2016-08-18 10:52:29 +02:00
Julien Fontanet
cf6aa7cf79 fix(package): update xen-api to 0.9.4
Again, fixes vatesfr/xo-web#1384
2016-08-18 09:42:28 +02:00
Julien Fontanet
6c4e57aae0 chore(JobExecutor#_execCall): forEach+Array#push → mapToArray 2016-08-17 18:13:30 +02:00
Julien Fontanet
d08a04959c 5.1.5 2016-08-16 19:15:52 +02:00
Julien Fontanet
2762f74ce5 fix(package): update xen-api to 0.9.3 2016-08-16 19:12:46 +02:00
Julien Fontanet
6ebcf6eec5 5.1.4 2016-08-16 18:18:04 +02:00
Julien Fontanet
25b78fb7e1 fix(package): update xen-api to 0.9.2
Fixes vatesfr/xo-web#1384
2016-08-16 18:15:32 +02:00
Greenkeeper
670dd2dd96 chore(package): update promise-toolbox to version 0.5.0 (#381)
https://greenkeeper.io/
2016-08-16 12:22:57 +02:00
Julien Fontanet
1baf04f786 fix(NfsHandler#_unmount): use _getRealPath() (#380)
Fixes vatesfr/xo-web#1396.
2016-08-15 14:22:19 +02:00
Greenkeeper
ce05b7a041 chore(package): update nyc to version 8.1.0 (#379)
https://greenkeeper.io/
2016-08-14 19:06:00 +02:00
Olivier Lambert
290cc146c8 fix(xapi): allow to unplug VBDs when VM is running 2016-08-11 16:32:06 +02:00
Olivier Lambert
db4d46a584 fix(sr): don't share a local ISO SR. Fixes vatesfr/xo-web#1389 2016-08-10 14:39:05 +02:00
Olivier Lambert
8ed2e51dde feat(network): add network.set method 2016-08-08 14:54:23 +02:00
Olivier Lambert
33702c09a6 feat(vm copy): allow snapshot copy. Related to vatesfr/xo-web#1353 2016-08-08 14:07:27 +02:00
Olivier Lambert
45aeca3753 5.1.3 2016-08-05 11:08:11 +02:00
Olivier Lambert
deae7dfb4d fix(xen-api): avoid reserved key conflicts. Fixes vatesfr/xo-web#1369 2016-08-05 11:06:58 +02:00
Julien Fontanet
2af043ebdd chore(jshint): remove unused config file 2016-08-03 09:46:52 +02:00
Olivier Lambert
e121295735 Merge pull request #373 from nraynaud/next-release
fix (readme): fix installation documentation link
2016-08-02 12:32:05 +02:00
Nicolas Raynaud
7c1c405a64 fix installation documentation link 2016-08-02 12:22:39 +02:00
Olivier Lambert
5d7c95a34d fix(xapi): typo on host disable method. Fixes vatesfr/xo-web#1351 2016-07-30 20:22:12 +02:00
Julien Fontanet
504c934fc9 fix(JobExecutor#_execCall): xo.api.call() → xo.callApiMethod() 2016-07-29 15:28:24 +02:00
Julien Fontanet
81b0223f73 fix(JobExecutor#exec): forward the error 2016-07-29 15:27:58 +02:00
Julien Fontanet
6d1e410bfd fix(JobExecutor#exec): correctly log the error 2016-07-29 15:27:32 +02:00
Julien Fontanet
26c5c6152d fix(job-executor/map): paramName handling 2016-07-29 14:37:42 +02:00
Julien Fontanet
d83bf0ebaf fix(Xo#_watchObject): check for notify() 2016-07-29 14:29:57 +02:00
Julien Fontanet
5adfe9a552 chore(index): remove debug trace 2016-07-29 13:54:54 +02:00
ABHAMON Ronan
883f461dc7 feat(job-executor): supports dynamic param vectors (#369)
See vatesfr/xo-web#837
2016-07-29 13:26:53 +02:00
Julien Fontanet
8595ebc258 feat(api): generate logs on errors
See vatesfr/xo-web#1344
2016-07-29 10:32:48 +02:00
Julien Fontanet
2bd31f4560 chore(api): remove legacy helpers 2016-07-28 15:21:59 +02:00
Julien Fontanet
6df85ecadd fix(vm.*): add missing import 2016-07-28 15:21:59 +02:00
Julien Fontanet
07829918e4 5.1.2 2016-07-28 15:21:12 +02:00
Julien Fontanet
b0d400b6eb fix(Xapi#exportDeltaVm): better handling of removed VDIs (#370)
Fixes vatesfr/xo-web#1333
2016-07-28 15:19:44 +02:00
Julien Fontanet
706cb895ad 5.1.1 2016-07-27 16:36:51 +02:00
Julien Fontanet
45bf539b3c fix(user.delete): fix tokens deletion 2016-07-27 13:23:16 +02:00
Julien Fontanet
0923981f8d fix(user.set): typo in error message 2016-07-27 13:01:32 +02:00
Julien Fontanet
b0ac14363d 5.1.0 2016-07-26 16:52:49 +02:00
Julien Fontanet
5d346aba37 fix(vm.create): cloudConfig handling 2016-07-26 14:26:24 +02:00
Julien Fontanet
124cb15ebe fix(resource sets): fix VM resources computation
Fixes vatesfr/xo-web#1276
2016-07-25 17:08:09 +02:00
Julien Fontanet
a244ab898d fix(vm.create): correctly store the resource set 2016-07-25 17:08:08 +02:00
Julien Fontanet
3c551590eb fix(vm.set): correctly save memory in limits 2016-07-25 17:08:07 +02:00
ABHAMON Ronan
10e30cccbc feat(models/schedule): null properly remove timezone (#368)
Related to vatesfr/xo-web#1314
2016-07-25 15:54:27 +02:00
Julien Fontanet
806a6b86a2 fix(signin): fix styles when /v4 2016-07-25 13:40:57 +02:00
Julien Fontanet
9719fdf5cc fix(sr.probe*): correctly prepare port param 2016-07-23 16:18:03 +02:00
Julien Fontanet
6d8764f8cb fix(Xapi#createVm): add missing param 2016-07-23 15:49:27 +02:00
Julien Fontanet
d9fd9cb408 fix(vm.create): better VBDs creation (#361)
Fixes vatesfr/xo-web#1257
2016-07-23 15:31:15 +02:00
Julien Fontanet
7710ec0aba feat(schemas): add user schema 2016-07-20 12:10:23 +02:00
Julien Fontanet
c97bd78cd0 fix(VM): cpuCap & cpuWeight are integers 2016-07-20 10:57:15 +02:00
ABHAMON Ronan
728c5aa86e feat(plugins): supports predefined configurations (#365)
See vatesfr/xo-web#1289
2016-07-19 17:28:53 +02:00
Pierre Donias
83d68ca293 feat(vm.set): make cpuWeight and cpuCap nullable (#364) 2016-07-19 16:53:47 +02:00
Julien Fontanet
47d7561db4 fix(VM): cpuCap can be defined when cpuWeight is not 2016-07-19 15:37:07 +02:00
ABHAMON Ronan
7d993e8319 feat(schedules): schedules support timezones (#363)
Fixes vatesfr/xo-web#1258
2016-07-19 13:32:27 +02:00
Julien Fontanet
1d1a597b22 feat(VM): expose cpuCap 2016-07-19 11:02:38 +02:00
Julien Fontanet
23082f9300 feat(vm.set): support for cpuCap (#362) 2016-07-19 10:35:03 +02:00
Julien Fontanet
ea1a7f9376 chore(Xapi#_getXenUpdates): use ensureArray() 2016-07-15 12:57:20 +02:00
Greenkeeper
1796c7bab8 chore(package): update nyc to version 7.0.0 (#358)
https://greenkeeper.io/
2016-07-14 13:09:12 +02:00
Greenkeeper
65ad76479a chore(package): update base64url to version 2.0.0 (#360)
https://greenkeeper.io/
2016-07-14 11:33:12 +02:00
Olivier Lambert
422db04ec8 5.0.5 2016-07-13 15:20:56 +02:00
Olivier Lambert
d12f60fe37 Merge pull request #359 from vatesfr/pierre-fix-create-vm
fix(vm/create): missing single quotes
2016-07-13 09:37:23 +02:00
Pierre Donias
194c1c991c fix(vm/create): missing single quotes 2016-07-12 16:40:32 +02:00
Olivier Lambert
3e8e2222c1 Merge pull request #357 from vatesfr/marsaudf-fix-job-log-error
Add message to job log error
2016-07-07 15:26:15 +02:00
Fabrice Marsaud
1620327a33 Add message to job log error 2016-07-07 14:55:43 +02:00
Olivier Lambert
b1131e3667 5.0.4 2016-07-07 12:12:54 +02:00
Olivier Lambert
db0250ac08 Merge pull request #356 from vatesfr/marsaudf-fix-patch-conflicts
Fix(xapi): handle correctly single XML elements
2016-07-07 11:22:27 +02:00
Fabrice Marsaud
0a6b605760 Handle single patch elements in parsed XML 2016-07-07 10:11:21 +02:00
Olivier Lambert
81ac2375e5 5.0.3 2016-07-06 23:23:14 +02:00
Olivier Lambert
6bcaca6cd7 Merge pull request #355 from vatesfr/issue-1233
fix(Xapi#importDeltaVm): correctly handle missing network
2016-07-06 23:21:55 +02:00
Olivier Lambert
ec8375252e fix(Xapi#importDeltaVm): correctly handle missing network 2016-07-06 23:11:47 +02:00
Julien Fontanet
766aa1762f 5.0.2 2016-07-05 17:56:02 +02:00
Julien Fontanet
5165e0a54c feat(user.set): support preferences 2016-07-05 17:19:38 +02:00
Julien Fontanet
a2f7ad627e feat(Xapi#migrateVm): allow non-running VMs
Fixes vatesfr/xo-web#1216
2016-07-05 17:09:54 +02:00
Julien Fontanet
1176c162d4 5.0.1 2016-06-30 15:46:27 +02:00
Fabrice Marsaud
a4880cd017 feat(remote.test): perform a write/read test on a remote (#354)
See vatesfr/xo-web#1075
2016-06-30 15:00:00 +02:00
Julien Fontanet
383bdce416 fix(plugin.configure): fix undefined handling 2016-06-29 13:08:02 +02:00
Julien Fontanet
7cc300dd83 fix(Xapi#createVif): fix handling when neither device nor position is not provided 2016-06-28 17:36:24 +02:00
Fabrice Marsaud
687809db9d fix(user.set): cannot change self permission (#353) 2016-06-28 13:28:31 +02:00
Julien Fontanet
1127ec3a90 feat(vif.set): allowed IPv4/IPv6 addresses (#328) 2016-06-27 15:11:46 +02:00
Julien Fontanet
a797edfae9 chore(xapi/mixins/vm): simplify _editVm() specs 2016-06-27 12:10:57 +02:00
Julien Fontanet
938e106252 feat(xapi/utils/makeEditObject): support camelCase and snake_case aliases 2016-06-27 12:10:54 +02:00
Julien Fontanet
a0eb9caaa2 feat(xapi/utils/makeEditObject): set, set.get, set.set can be true 2016-06-27 11:54:13 +02:00
Julien Fontanet
442f53d45e fix(xapi/utils/makeEditObject): use deep equality 2016-06-27 09:52:02 +02:00
Greenkeeper
68de1ca248 chore(package): update ws to version 1.1.1 (#348)
https://greenkeeper.io/
2016-06-26 20:19:47 +02:00
Greenkeeper
e16061141e chore(package): update d3-time-format to version 2.0.0 (#350)
https://greenkeeper.io/
2016-06-26 20:18:24 +02:00
Julien Fontanet
64cbe3d209 feat(build): delete dist before building 2016-06-26 17:47:56 +02:00
Julien Fontanet
ebdc6376d8 5.0.0 2016-06-24 18:34:31 +02:00
Julien Fontanet
68335123a1 feat(vm.create): all vm.set params are supported (#340) 2016-06-24 18:33:43 +02:00
Julien Fontanet
25b18f4ef8 chore(package): update xo-acl-resolver to 0.2.1 2016-06-24 14:43:18 +02:00
Julien Fontanet
9ad615b0ff fix(Xapi#_waitObjectState): fix failure when object is initially missing 2016-06-22 12:20:22 +02:00
Julien Fontanet
12eaceb032 fix(xapi-objects-to-xo): fix CPUs.number when no tools 2016-06-21 13:19:29 +02:00
Julien Fontanet
3263511b72 fix(Xapi#snapshotVm): fallback if quiesce failed
Fixes vatesfr/xo-web#1088
2016-06-21 11:21:01 +02:00
Julien Fontanet
75cae8c647 fix(Xapi#_updateObjectMapProperty): prepare XAPI param 2016-06-21 11:21:00 +02:00
Julien Fontanet
9991ef624c feat(Xapi#getObject): accept objects with _xapiId property 2016-06-21 11:21:00 +02:00
Julien Fontanet
489e9fce27 fix(xapi/index): work around Babel T2877 2016-06-21 11:21:00 +02:00
Julien Fontanet
0655628073 fix(xapi/index): incorrect import 2016-06-21 11:20:59 +02:00
Fabrice Marsaud
9460822529 feat(vm.importBackup): returns the new VM id (#345) 2016-06-20 18:07:14 +02:00
Julien Fontanet
d02358ac0d chore(xapi): move utilities into dedicated module 2016-06-17 18:43:10 +02:00
ABHAMON Ronan
366237a625 fix(XapiStats): fix unit for host free memory (#339) 2016-06-17 10:16:58 +02:00
Julien Fontanet
2f2da18994 chore: remove some unnecessary logs 2016-06-16 09:22:26 +02:00
Greenkeeper
ecd30db215 chore(package): update d3-time-format to version 1.0.0 (#338)
https://greenkeeper.io/
2016-06-15 08:40:56 +02:00
ABHAMON Ronan
1980854f6f feat(Xapi#importDeltaVm): attach VIFs to original networks if available (#335)
Fixes vatesfr/xo-web#1016
2016-06-10 11:05:54 +02:00
Julien Fontanet
7d4f006c25 feat(Xapi#exportDeltaVm): inject network/SR UUIDs in VIF/VDI records 2016-06-09 17:25:02 +02:00
Julien Fontanet
b697be2383 fix(Xapi#_snapshotVm): returns the up-to-date snapshot record 2016-06-09 17:17:14 +02:00
Fabrice Marsaud
143e53c43f chore(package): update xo-remote-parser to version 0.3.0 (#333) 2016-06-08 17:26:08 +02:00
Julien Fontanet
6dde1ade01 fix(xo-server-logs): fix broken require since Babel 6 2016-06-08 11:12:45 +02:00
Greenkeeper
d4de391ac5 chore(package): update d3-time-format to version 0.4.0 (#332)
https://greenkeeper.io/
2016-06-08 09:05:45 +02:00
Greenkeeper
af15f4bc6a chore(package): update xo-acl-resolver to version 0.2.0 (#330)
https://greenkeeper.io/
2016-06-07 16:46:23 +02:00
Fabrice Marsaud
d4ace24caa fix(job.set): protects userId from modification (#329) 2016-06-07 09:25:15 +02:00
Julien Fontanet
c5ab47fa66 chore(package): fix deps order 2016-06-06 13:38:16 +02:00
Julien Fontanet
d60051b629 fix(package): update xo-remote-parser to 0.2.1 2016-06-06 13:37:47 +02:00
Julien Fontanet
22ff330ee7 fix(package): update @marsaud/smb2 to 0.7.1 2016-06-03 18:22:37 +02:00
Olivier Lambert
dd62bef66d feat(host): expose correct timestamp for license expiry value 2016-05-31 17:24:49 +02:00
Julien Fontanet
e7feb99f8d feat(vm.create): clone param may be use to disable cloning (#318)
See vatesfr/xo-web#960
2016-05-30 11:34:39 +02:00
Julien Fontanet
6358accece fix(plugin.configure): correctly handle undefined 2016-05-30 11:12:11 +02:00
Olivier Lambert
9ce8a24eea feat(sr): add disconnect and connect all PBDs to a SR (#324) 2016-05-27 18:31:09 +02:00
Julien Fontanet
4d0673f489 feat(sr.forget): automatically disconnect PBDs (#323) 2016-05-27 18:15:09 +02:00
Olivier Lambert
fbe1e6a7d5 fix(vm): missing parameters and wrong value for set_memory_static_max 2016-05-27 15:03:49 +02:00
Greenkeeper
4ed02ca501 chore(package): update cookie to version 0.3.0 (#322)
https://greenkeeper.io/
2016-05-27 04:36:35 +02:00
Julien Fontanet
af245ed9fe fix(log.delete): id can be an array 2016-05-26 13:34:47 +02:00
Julien Fontanet
fc86a3e882 fix(vm): always consider memory dynamic max when updating resource set 2016-05-24 16:22:55 +02:00
Julien Fontanet
f9109edcf1 fix(vm.set): memoryMax should update resource set 2016-05-24 16:21:21 +02:00
Julien Fontanet
ec100e1a91 fix(vm.set): memoryMax should change dynamic max 2016-05-24 16:20:25 +02:00
Julien Fontanet
746c5f4a79 fix(vm.set): cpusMax (shame) 2016-05-24 15:13:53 +02:00
Julien Fontanet
b2611728a1 fix(vm): fix indent 2016-05-24 14:38:11 +02:00
Julien Fontanet
fc6cc4234d chore(vm.set): fix some comments 2016-05-24 14:33:40 +02:00
Julien Fontanet
7706c1cb63 feat(vm.set): memoryStaticMax 2016-05-24 14:33:02 +02:00
Julien Fontanet
4d7a07220c feat(vm.set): memoryMax increase static max if necessary 2016-05-24 14:32:14 +02:00
Julien Fontanet
436875f7dc fix(vm.set): memoryMin should not change static min 2016-05-24 14:30:26 +02:00
Julien Fontanet
21c6f53ecc fix(vm.set): cpusMax 2016-05-24 14:23:21 +02:00
96 changed files with 11901 additions and 2706 deletions

1
.gitignore vendored
View File

@@ -2,6 +2,7 @@
/dist/
/node_modules/
/src/api/index.js
/src/xapi/mixins/index.js
/src/xo-mixins/index.js
npm-debug.log

View File

@@ -1,93 +0,0 @@
{
// Julien Fontanet JSHint configuration
// https://gist.github.com/julien-f/8095615
//
// Changes from defaults:
// - all enforcing options (except `++` & `--`) enabled
// - single quotes
// - indentation set to 2 instead of 4
// - almost all relaxing options disabled
// - environments are set to Node.js
//
// See http://jshint.com/docs/ for more details
"maxerr" : 50, // {int} Maximum error before stopping
// Enforcing
"bitwise" : true, // true: Prohibit bitwise operators (&, |, ^, etc.)
"camelcase" : true, // true: Identifiers must be in camelCase
"curly" : true, // true: Require {} for every new block or scope
"eqeqeq" : true, // true: Require triple equals (===) for comparison
"forin" : true, // true: Require filtering for..in loops with obj.hasOwnProperty()
"freeze" : true, // true: Prohibit overwriting prototypes of native objects (Array, Date, ...)
"immed" : true, // true: Require immediate invocations to be wrapped in parens e.g. `(function () { } ());`
"indent" : 2, // {int} Number of spaces to use for indentation
"latedef" : true, // true: Require variables/functions to be defined before being used
"newcap" : true, // true: Require capitalization of all constructor functions e.g. `new F()`
"noarg" : true, // true: Prohibit use of `arguments.caller` and `arguments.callee`
"noempty" : true, // true: Prohibit use of empty blocks
"nonbsp" : true, // true: Prohibit use of non breakable spaces
"nonew" : true, // true: Prohibit use of constructors for side-effects (without assignment)
"plusplus" : false, // true: Prohibit use of `++` & `--`
"quotmark" : "single", // Quotation mark consistency:
// false : do nothing (default)
// true : ensure whatever is used is consistent
// "single" : require single quotes
// "double" : require double quotes
"undef" : true, // true: Require all non-global variables to be declared (prevents global leaks)
"unused" : true, // true: Require all defined variables be used
"strict" : false, // true: Requires all functions run in ES5 Strict Mode
"maxcomplexity" : 7, // {int} Max cyclomatic complexity per function
"maxdepth" : 3, // {int} Max depth of nested blocks (within functions)
"maxlen" : 80, // {int} Max number of characters per line
"maxparams" : 4, // {int} Max number of formal params allowed per function
"maxstatements" : 20, // {int} Max number statements per function
// Relaxing
"asi" : false, // true: Tolerate Automatic Semicolon Insertion (no semicolons)
"boss" : false, // true: Tolerate assignments where comparisons would be expected
"debug" : false, // true: Allow debugger statements e.g. browser breakpoints.
"eqnull" : false, // true: Tolerate use of `== null`
"esnext" : true, // true: Allow ES.next (ES6) syntax (ex: `const`)
"evil" : false, // true: Tolerate use of `eval` and `new Function()`
"expr" : false, // true: Tolerate `ExpressionStatement` as Programs
"funcscope" : false, // true: Tolerate defining variables inside control statements
"globalstrict" : false, // true: Allow global "use strict" (also enables 'strict')
"iterator" : false, // true: Tolerate using the `__iterator__` property
"lastsemic" : false, // true: Tolerate omitting a semicolon for the last statement of a 1-line block
"laxbreak" : false, // true: Tolerate possibly unsafe line breakings
"laxcomma" : false, // true: Tolerate comma-first style coding
"loopfunc" : false, // true: Tolerate functions being defined in loops
"moz" : false, // true: Allow Mozilla specific syntax (extends and overrides esnext features)
// (ex: `for each`, multiple try/catch, function expression…)
"multistr" : false, // true: Tolerate multi-line strings
"notypeof" : false, // true: Tolerate typeof comparison with unknown values.
"proto" : false, // true: Tolerate using the `__proto__` property
"scripturl" : false, // true: Tolerate script-targeted URLs
"shadow" : false, // true: Allows re-define variables later in code e.g. `var x=1; x=2;`
"sub" : false, // true: Tolerate using `[]` notation when it can still be expressed in dot notation
"supernew" : false, // true: Tolerate `new function () { ... };` and `new Object;`
"validthis" : false, // true: Tolerate using this in a non-constructor function
"noyield" : false, // true: Tolerate generators without yields
// Environments
"browser" : false, // Web Browser (window, document, etc)
"browserify" : false, // Browserify (node.js code in the browser)
"couch" : false, // CouchDB
"devel" : false, // Development/debugging (alert, confirm, etc)
"dojo" : false, // Dojo Toolkit
"jquery" : false, // jQuery
"mocha" : false, // mocha
"mootools" : false, // MooTools
"node" : true, // Node.js
"nonstandard" : false, // Widely adopted globals (escape, unescape, etc)
"phantom" : false, // PhantomJS
"prototypejs" : false, // Prototype and Scriptaculous
"rhino" : false, // Rhino
"worker" : false, // Web Workers
"wsh" : false, // Windows Scripting Host
"yui" : false, // Yahoo User Interface
// Custom Globals
"globals" : {} // additional predefined global variables
}

View File

@@ -1 +0,0 @@
--require ./better-stacks.js

View File

@@ -1,8 +1,8 @@
language: node_js
node_js:
# - 'stable'
- '4'
- '0.12'
- stable
- 6
- 4
# Use containers.
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/

3
ISSUE_TEMPLATE.md Normal file
View File

@@ -0,0 +1,3 @@
# ALL ISSUES SHOULD BE CREATED IN XO-WEB'S TRACKER!
https://github.com/vatesfr/xo-web/issues

View File

@@ -19,7 +19,7 @@ ___
## Installation
Manual install procedure is [available here](https://github.com/vatesfr/xo/blob/master/doc/installation/README.md#installation).
Manual install procedure is [available here](https://xen-orchestra.com/docs/from_the_sources.html).
## Compilation

View File

@@ -1,13 +1,5 @@
Error.stackTraceLimit = 100
// Async stacks.
//
// Disabled for now as it cause a huge memory usage with
// fs.createReadStream().
// TODO: find a way to reenable.
//
// try { require('trace') } catch (_) {}
// Removes internal modules.
try {
var sep = require('path').sep

View File

@@ -7,4 +7,4 @@
// Better stack traces if possible.
require('../better-stacks')
require('exec-promise')(require('../dist/logs-cli'))
require('exec-promise')(require('../dist/logs-cli').default)

View File

@@ -7,6 +7,7 @@ var gulp = require('gulp')
var babel = require('gulp-babel')
var coffee = require('gulp-coffee')
var plumber = require('gulp-plumber')
var rimraf = require('rimraf')
var sourceMaps = require('gulp-sourcemaps')
var watch = require('gulp-watch')
@@ -38,6 +39,10 @@ function src (patterns) {
// ===================================================================
gulp.task(function clean (cb) {
rimraf(DIST_DIR, cb)
})
gulp.task(function buildCoffee () {
return src('**/*.coffee')
.pipe(sourceMaps.init())
@@ -62,4 +67,4 @@ gulp.task(function buildEs6 () {
// ===================================================================
gulp.task('build', gulp.parallel('buildCoffee', 'buildEs6'))
gulp.task('build', gulp.series('clean', gulp.parallel('buildCoffee', 'buildEs6')))

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "4.17.0",
"version": "5.6.0",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -31,42 +31,48 @@
"bin": "bin"
},
"engines": {
"node": ">=0.12"
"node": ">=4"
},
"dependencies": {
"@marsaud/smb2-promise": "^0.2.0",
"@marsaud/smb2-promise": "^0.2.1",
"@nraynaud/struct-fu": "^1.0.1",
"app-conf": "^0.4.0",
"archiver": "^1.3.0",
"babel-runtime": "^6.5.0",
"base64url": "^1.0.5",
"base64url": "^2.0.0",
"bind-property-descriptor": "^0.0.0",
"blocked": "^1.1.0",
"bluebird": "^3.1.1",
"body-parser": "^1.13.3",
"connect-flash": "^0.1.1",
"cookie": "^0.2.3",
"cookie": "^0.3.0",
"cookie-parser": "^1.3.5",
"cron": "^1.0.9",
"d3-time-format": "^0.3.0",
"d3-time-format": "^2.0.0",
"debug": "^2.1.3",
"decorator-synchronized": "^0.2.3",
"escape-string-regexp": "^1.0.3",
"event-to-promise": "^0.7.0",
"exec-promise": "^0.6.1",
"execa": "^0.4.0",
"execa": "^0.6.0",
"express": "^4.13.3",
"express-session": "^1.11.3",
"fatfs": "^0.10.3",
"fs-extra": "^0.30.0",
"fs-promise": "^0.4.1",
"get-stream": "^2.1.0",
"hashy": "~0.4.2",
"helmet": "^2.0.0",
"fs-extra": "^1.0.0",
"fs-promise": "^1.0.0",
"golike-defer": "^0.0.0",
"hashy": "~0.6.0",
"helmet": "^3.0.0",
"highland": "^2.5.1",
"http-proxy": "^1.13.2",
"http-server-plus": "^0.6.4",
"human-format": "^0.6.0",
"is-my-json-valid": "^2.12.2",
"http-server-plus": "^0.8.0",
"human-format": "^0.7.0",
"is-my-json-valid": "^2.13.1",
"is-redirect": "^1.0.0",
"jest": "^18.1.0",
"js-yaml": "^3.2.7",
"json-rpc-peer": "^0.11.0",
"json5": "^0.4.0",
"json-rpc-peer": "^0.13.0",
"json5": "^0.5.0",
"julien-f-source-map-support": "0.0.0",
"julien-f-unzip": "^0.2.1",
"kindof": "^2.0.0",
@@ -74,101 +80,75 @@
"level-party": "^3.0.4",
"level-sublevel": "^6.5.2",
"leveldown": "^1.4.2",
"lodash.assign": "^4.0.3",
"lodash.bind": "^4.1.0",
"lodash.difference": "^4.1.0",
"lodash.endswith": "^4.0.0",
"lodash.every": "^4.0.0",
"lodash.filter": "^4.2.0",
"lodash.find": "^4.2.0",
"lodash.findindex": "^4.2.0",
"lodash.foreach": "^4.1.0",
"lodash.get": "^4.1.2",
"lodash.has": "^4.2.0",
"lodash.includes": "^4.1.0",
"lodash.invert": "^4.0.1",
"lodash.isarray": "^4.0.0",
"lodash.isboolean": "^3.0.2",
"lodash.isempty": "^4.1.2",
"lodash.isfunction": "^3.0.1",
"lodash.isinteger": "^4.0.0",
"lodash.isobject": "^3.0.0",
"lodash.isstring": "^4.0.1",
"lodash.keys": "^4.0.3",
"lodash.map": "^4.2.0",
"lodash.pick": "^4.1.0",
"lodash.pickby": "^4.2.0",
"lodash.remove": "^4.0.1",
"lodash.some": "^4.2.0",
"lodash.sortby": "^4.2.0",
"lodash.startswith": "^4.0.0",
"lodash.trim": "^4.2.0",
"lodash": "^4.13.1",
"make-error": "^1",
"micromatch": "^2.3.2",
"minimist": "^1.2.0",
"moment-timezone": "^0.5.4",
"ms": "^0.7.1",
"multikey-hash": "^1.0.1",
"ndjson": "^1.4.3",
"parse-pairs": "^0.2.2",
"partial-stream": "0.0.0",
"passport": "^0.3.0",
"passport-local": "^1.0.0",
"promise-toolbox": "^0.3.2",
"pretty-format": "^18.1.0",
"promise-toolbox": "^0.8.0",
"proxy-agent": "^2.0.0",
"pug": "^2.0.0-alpha6",
"redis": "^2.0.1",
"schema-inspector": "^1.5.1",
"semver": "^5.1.0",
"serve-static": "^1.9.2",
"split-lines": "^1.1.0",
"stack-chain": "^1.3.3",
"tar-stream": "^1.5.2",
"through2": "^2.0.0",
"struct-fu": "^1.0.0",
"trace": "^2.0.1",
"ws": "~1.0.1",
"xen-api": "^0.9.0",
"tmp": "^0.0.31",
"uuid": "^3.0.0",
"ws": "^1.1.1",
"xen-api": "^0.9.6",
"xml2js": "~0.4.6",
"xo-acl-resolver": "0.1.0",
"xo-acl-resolver": "^0.2.2",
"xo-collection": "^0.4.0",
"xo-remote-parser": "^0.2.0"
"xo-common": "0.1.0",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "0.0.12"
},
"devDependencies": {
"babel-eslint": "^6.0.4",
"babel-eslint": "^7.0.0",
"babel-plugin-lodash": "^3.2.9",
"babel-plugin-transform-decorators-legacy": "^1.3.4",
"babel-plugin-transform-runtime": "^6.5.2",
"babel-preset-es2015": "^6.5.0",
"babel-preset-stage-0": "^6.5.0",
"chai": "^3.0.0",
"dependency-check": "^2.4.0",
"ghooks": "^1.0.3",
"gulp": "git://github.com/gulpjs/gulp#4.0",
"gulp-babel": "^6",
"gulp-coffee": "^2.3.1",
"gulp-plumber": "^1.0.0",
"gulp-sourcemaps": "^1.5.1",
"gulp-sourcemaps": "^2.1.1",
"gulp-watch": "^4.2.2",
"leche": "^2.1.1",
"mocha": "^2.2.1",
"must": "^0.13.1",
"nyc": "^6.4.2",
"sinon": "^1.14.1",
"standard": "^7.0.0"
"husky": "^0.12.0",
"index-modules": "^0.2.1",
"rimraf": "^2.5.2",
"standard": "^8.1.0"
},
"scripts": {
"build": "npm run build-indexes && gulp build --production",
"depcheck": "dependency-check ./package.json",
"build-indexes": "./tools/generate-index src/api src/xo-mixins",
"dev": "npm run build-indexes && gulp build",
"dev-test": "mocha --opts .mocha.opts --watch --reporter=min \"dist/**/*.spec.js\"",
"lint": "standard",
"postrelease": "git checkout master && git merge --ff-only stable && git checkout next-release && git merge --ff-only stable",
"posttest": "npm run lint && npm run depcheck",
"build": "gulp build --production",
"commit-msg": "npm test",
"dev": "gulp build",
"dev-test": "jest --bail --watch",
"posttest": "standard && dependency-check ./package.json",
"prebuild": "index-modules src/api src/xapi/mixins src/xo-mixins",
"predev": "npm run prebuild",
"prepublish": "npm run build",
"prerelease": "git checkout next-release && git pull --ff-only && git checkout stable && git pull --ff-only && git merge next-release",
"release": "npm version",
"start": "node bin/xo-server",
"test": "nyc mocha --opts .mocha.opts \"dist/**/*.spec.js\""
"test": "jest"
},
"babel": {
"plugins": [
"lodash",
"transform-decorators-legacy",
"transform-runtime"
],
@@ -177,10 +157,11 @@
"es2015"
]
},
"config": {
"ghooks": {
"commit-msg": "npm test"
}
"jest": {
"testPathDirs": [
"<rootDir>/src"
],
"testRegex": "\\.spec\\.js$"
},
"standard": {
"ignore": [

View File

@@ -1,11 +1,17 @@
# Example XO-Server configuration.
# BE *VERY* CAREFUL WHEN EDITING!
# YAML FILES ARE SUPER SUPER SENSITIVE TO MISTAKES IN WHITESPACE OR ALIGNMENT!
# visit http://www.yamllint.com/ to validate this file as needed
#=====================================================================
# Example XO-Server configuration.
#
# This file is automatically looking for at the following places:
# - `$HOME/.config/xo-server/config.yaml`
# - `/etc/xo-server/config.yaml`
#
# The first entries have priority.
#
# Note: paths are relative to the configuration file.
#=====================================================================
@@ -117,10 +123,18 @@ http:
# Connection to the Redis server.
redis:
# Syntax: redis://[db[:password]@]hostname[:port]
# Syntax: redis://[db[:password]@]hostname[:port][/db-number]
#
# Default: redis://localhost:6379
#uri: ''
# Default: redis://localhost:6379/0
#uri: redis://redis.company.lan/42
# List of aliased commands.
#
# See http://redis.io/topics/security#disabling-of-specific-commands
#renameCommands:
# del: '3dda29ad-3015-44f9-b13b-fa570de92489'
# srem: '3fd758c9-5610-4e9d-a058-dbf4cb6d8bf0'
# Directory containing the database of XO.
# Currently used for logs.

View File

@@ -6,54 +6,45 @@ html
meta(name = 'viewport' content = 'width=device-width, initial-scale=1.0')
title Xen Orchestra
meta(name = 'author' content = 'Vates SAS')
link(rel = 'stylesheet' href = 'styles/main.css')
body
.container
.row-login
.page-header
img(src = 'images/logo_small.png')
h2 Xen Orchestra
form.form-horizontal(action = 'signin/local' method = 'post')
fieldset
legend.login
h3 Sign in
if error
p.text-danger #{error}
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control.input-sm(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.form-group
.col-sm-12
.input-group
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control.input-sm(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.form-group
.col-sm-5
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| Remember me
.form-group
.col-sm-12
button.btn.btn-login.btn-block.btn-success
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}
link(rel = 'stylesheet' href = 'index.css')
body(style = 'display: flex; height: 100vh;')
div(style = 'margin: auto; width: 20em;')
div.mb-2(style = 'display: flex;')
img(src = 'assets/logo.png' style = 'margin: auto;')
h2.text-xs-center.mb-2 Xen Orchestra
form(action = 'signin/local' method = 'post')
fieldset
if error
p.text-danger #{error}
.input-group.mb-1
span.input-group-addon
i.xo-icon-user.fa-fw
input.form-control(
name = 'username'
type = 'text'
placeholder = 'Username'
required
)
.input-group.mb-1
span.input-group-addon
i.fa.fa-key.fa-fw
input.form-control(
name = 'password'
type = 'password'
placeholder = 'Password'
required
)
.checkbox
label
input(
name = 'remember-me'
type = 'checkbox'
)
| &nbsp;
| Remember me
div
button.btn.btn-block.btn-info
i.fa.fa-sign-in
| Sign in
each label, id in strategies
div: a(href = 'signin/' + id) Sign in with #{label}

View File

@@ -1,70 +0,0 @@
import {JsonRpcError} from 'json-rpc-peer'
// ===================================================================
// Export standard JSON-RPC errors.
export { // eslint-disable-line no-duplicate-imports
InvalidJson,
InvalidParameters,
InvalidRequest,
JsonRpcError,
MethodNotFound
} from 'json-rpc-peer'
// -------------------------------------------------------------------
export class NotImplemented extends JsonRpcError {
constructor () {
super('not implemented', 0)
}
}
// -------------------------------------------------------------------
export class NoSuchObject extends JsonRpcError {
constructor (id, type) {
super('no such object', 1, {id, type})
}
}
// -------------------------------------------------------------------
export class Unauthorized extends JsonRpcError {
constructor () {
super('not authenticated or not enough permissions', 2)
}
}
// -------------------------------------------------------------------
export class InvalidCredential extends JsonRpcError {
constructor () {
super('invalid credential', 3)
}
}
// -------------------------------------------------------------------
export class AlreadyAuthenticated extends JsonRpcError {
constructor () {
super('already authenticated', 4)
}
}
// -------------------------------------------------------------------
export class ForbiddenOperation extends JsonRpcError {
constructor (operation, reason) {
super(`forbidden operation: ${operation}`, 5, reason)
}
}
// -------------------------------------------------------------------
// To be used with a user-readable message.
// The message can be destined to be displayed to the front-end user.
export class GenericError extends JsonRpcError {
constructor (message) {
super(message, 6)
}
}

0
src/api/.index-modules Normal file
View File

98
src/api/backup.js Normal file
View File

@@ -0,0 +1,98 @@
import archiver from 'archiver'
import { basename } from 'path'
import { format } from 'json-rpc-peer'
import { forEach } from 'lodash'
// ===================================================================
export function list ({ remote }) {
return this.listVmBackups(remote)
}
list.permission = 'admin'
list.params = {
remote: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanDisk ({ remote, disk }) {
return this.scanDiskBackup(remote, disk)
}
scanDisk.permission = 'admin'
scanDisk.params = {
remote: { type: 'string' },
disk: { type: 'string' }
}
// -------------------------------------------------------------------
export function scanFiles ({ remote, disk, partition, path }) {
return this.scanFilesInDiskBackup(remote, disk, partition, path)
}
scanFiles.permission = 'admin'
scanFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
partition: { type: 'string', optional: true },
path: { type: 'string' }
}
// -------------------------------------------------------------------
function handleFetchFiles (req, res, { remote, disk, partition, paths, format: archiveFormat }) {
this.fetchFilesInDiskBackup(remote, disk, partition, paths).then(files => {
res.setHeader('content-disposition', 'attachment')
res.setHeader('content-type', 'application/octet-stream')
const nFiles = paths.length
// Send lone file directly
if (nFiles === 1) {
files[0].pipe(res)
return
}
const archive = archiver(archiveFormat)
archive.on('error', error => {
console.error(error)
res.end(format.error(0, error))
})
forEach(files, file => {
archive.append(file, { name: basename(file.path) })
})
archive.finalize()
archive.pipe(res)
}).catch(error => {
console.error(error)
res.writeHead(500)
res.end(format.error(0, error))
})
}
export async function fetchFiles ({ format = 'zip', ...params }) {
const fileName = params.paths.length > 1
? `restore_${new Date().toJSON()}.${format}`
: basename(params.paths[0])
return this.registerHttpRequest(handleFetchFiles, { ...params, format }, {
suffix: encodeURI(`/${fileName}`)
}).then(url => ({ $getFrom: url }))
}
fetchFiles.permission = 'admin'
fetchFiles.params = {
remote: { type: 'string' },
disk: { type: 'string' },
format: { type: 'string', optional: true },
partition: { type: 'string', optional: true },
paths: {
type: 'array',
items: { type: 'string' },
minLength: 1
}
}

View File

@@ -1,14 +1,15 @@
$debug = (require 'debug') 'xo:api:vm'
$find = require 'lodash.find'
$findIndex = require 'lodash.findindex'
$forEach = require 'lodash.foreach'
endsWith = require 'lodash.endswith'
startsWith = require 'lodash.startswith'
$find = require 'lodash/find'
$findIndex = require 'lodash/findIndex'
$forEach = require 'lodash/forEach'
endsWith = require 'lodash/endsWith'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
extractProperty,
parseXml,
promisify
mapToArray,
parseXml
} = require '../utils'
#=====================================================================
@@ -261,6 +262,42 @@ stats.resolve = {
exports.stats = stats;
#---------------------------------------------------------------------
handleInstallSupplementalPack = $coroutine (req, res, { hostId }) ->
xapi = @getXapi(hostId)
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
req.length = req.headers['content-length']
try
yield xapi.installSupplementalPack(req, { hostId })
res.end(format.response(0))
catch e
res.writeHead(500)
res.end(format.error(0, new Error(e.message)))
return
installSupplementalPack = $coroutine ({host}) ->
return {
$sendTo: yield @registerHttpRequest(handleInstallSupplementalPack, { hostId: host.id })
}
installSupplementalPack.description = 'installs supplemental pack from ISO file'
installSupplementalPack.params = {
host: { type: 'string' }
}
installSupplementalPack.resolve = {
host: ['host', 'host', 'admin']
}
exports.installSupplementalPack = installSupplementalPack;
#=====================================================================
Object.defineProperty(exports, '__esModule', {

44
src/api/ip-pool.js Normal file
View File

@@ -0,0 +1,44 @@
import { unauthorized } from 'xo-common/api-errors'
export function create (props) {
return this.createIpPool(props)
}
create.permission = 'admin'
create.description = 'Creates a new ipPool'
// -------------------------------------------------------------------
function delete_ ({ id }) {
return this.deleteIpPool(id)
}
export { delete_ as delete }
delete_.permission = 'admin'
delete_.description = 'Delete an ipPool'
// -------------------------------------------------------------------
export function getAll (params) {
const { user } = this
if (!user) {
throw unauthorized()
}
return this.getAllIpPools(user.permission === 'admin'
? params && params.userId
: user.id
)
}
getAll.description = 'List all ipPools'
// -------------------------------------------------------------------
export function set ({ id, ...props }) {
return this.updateIpPool(id, props)
}
set.permission = 'admin'
set.description = 'Allow to modify an existing ipPool'

View File

@@ -18,7 +18,11 @@ get.params = {
}
export async function create ({job}) {
return (await this.createJob(this.session.get('user_id'), job)).id
if (!job.userId) {
job.userId = this.session.get('user_id')
}
return (await this.createJob(job)).id
}
create.permission = 'admin'
@@ -27,6 +31,7 @@ create.params = {
job: {
type: 'object',
properties: {
userId: {type: 'string', optional: true},
name: {type: 'string', optional: true},
type: {type: 'string'},
key: {type: 'string'},
@@ -38,14 +43,7 @@ create.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},
@@ -77,14 +75,7 @@ set.params = {
items: {
type: 'array',
items: {
type: 'object',
properties: {
type: {type: 'string'},
values: {
type: 'array',
items: {type: 'object'}
}
}
type: 'object'
}
}
},

View File

@@ -30,7 +30,7 @@ async function delete_ ({namespace, id}) {
delete_.description = 'deletes one or several logs from a namespace'
delete_.params = {
id: { type: 'string' },
id: { type: [ 'array', 'string' ] },
namespace: { type: 'string' }
}
delete_.permission = 'admin'

View File

@@ -1,3 +1,9 @@
import { mapToArray } from '../utils'
export function getBondModes () {
return ['balance-slb', 'active-backup', 'lacp']
}
export async function create ({ pool, name, description, pif, mtu = 1500, vlan = 0 }) {
return this.getXapi(pool).createNetwork({
name,
@@ -24,6 +30,81 @@ create.permission = 'admin'
// =================================================================
export async function createBonded ({ pool, name, description, pifs, mtu = 1500, mac, bondMode }) {
return this.getXapi(pool).createBondedNetwork({
name,
description,
pifIds: mapToArray(pifs, pif =>
this.getObject(pif, 'PIF')._xapiId
),
mtu: +mtu,
mac,
bondMode
})
}
createBonded.params = {
pool: { type: 'string' },
name: { type: 'string' },
description: { type: 'string', optional: true },
pifs: {
type: 'array',
items: {
type: 'string'
}
},
mtu: { type: ['integer', 'string'], optional: true },
// RegExp since schema-inspector does not provide a param check based on an enumeration
bondMode: { type: 'string', pattern: new RegExp(`^(${getBondModes().join('|')})$`) }
}
createBonded.resolve = {
pool: ['pool', 'pool', 'administrate']
}
createBonded.permission = 'admin'
createBonded.description = 'Create a bonded network. bondMode can be balance-slb, active-backup or lacp'
// ===================================================================
export async function set ({
network,
name_description: nameDescription,
name_label: nameLabel,
defaultIsLocked,
id
}) {
await this.getXapi(network).setNetworkProperties(network._xapiId, {
nameDescription,
nameLabel,
defaultIsLocked
})
}
set.params = {
id: {
type: 'string'
},
name_label: {
type: 'string',
optional: true
},
name_description: {
type: 'string',
optional: true
},
defaultIsLocked: {
type: 'boolean',
optional: true
}
}
set.resolve = {
network: ['id', 'network', 'administrate']
}
// =================================================================
export async function delete_ ({ network }) {
return this.getXapi(network).deleteNetwork(network._xapiId)
}

View File

@@ -1,7 +1,3 @@
import {
GenericError
} from '../api-errors'
// FIXME: too low level, should be removed.
// ===================================================================
@@ -24,17 +20,8 @@ delete_.resolve = {
// ===================================================================
// Disconnect
export async function disconnect ({PBD}) {
// TODO: check if PBD is attached before
try {
await this.getXapi(PBD).call('PBD.unplug', PBD._xapiRef)
} catch (error) {
if (error.code === 'VDI_IN_USE') {
throw new GenericError('VDI in use')
} else {
throw error
}
}
export async function disconnect ({ pbd }) {
return this.getXapi(pbd).unplugPbd(pbd._xapiId)
}
disconnect.params = {
@@ -42,7 +29,7 @@ disconnect.params = {
}
disconnect.resolve = {
PBD: ['id', 'PBD', 'administrate']
pbd: ['id', 'PBD', 'administrate']
}
// ===================================================================

View File

@@ -1,5 +1,15 @@
// TODO: too low level, move into host.
import { IPV4_CONFIG_MODES, IPV6_CONFIG_MODES } from '../xapi'
export function getIpv4ConfigurationModes () {
return IPV4_CONFIG_MODES
}
export function getIpv6ConfigurationModes () {
return IPV6_CONFIG_MODES
}
// ===================================================================
// Delete
@@ -66,3 +76,18 @@ reconfigureIp.params = {
reconfigureIp.resolve = {
pif: ['id', 'PIF', 'administrate']
}
// ===================================================================
export async function editPif ({ pif, vlan }) {
await this.getXapi(pif).editPif(pif._xapiId, { vlan })
}
editPif.params = {
id: { type: 'string' },
vlan: { type: ['integer', 'string'] }
}
editPif.resolve = {
pif: ['id', 'PIF', 'administrate']
}

View File

@@ -102,3 +102,24 @@ purgeConfiguration.params = {
}
purgeConfiguration.permission = 'admin'
// ---------------------------------------------------------------------
export async function test ({ id, data }) {
await this.testPlugin(id, data)
}
test.description = 'Test a plugin with its current configuration'
test.params = {
id: {
type: 'string'
},
data: {
optional: true
}
}
test.permission = 'admin'
// ---------------------------------------------------------------------

View File

@@ -1,5 +1,3 @@
import {GenericError} from '../api-errors'
// ===================================================================
export async function set ({
@@ -35,21 +33,21 @@ set.resolve = {
// -------------------------------------------------------------------
export async function setDefaultSr ({pool, sr}) {
await this.getXapi(pool).setDefaultSr(sr._xapiId)
export async function setDefaultSr ({ sr }) {
await this.hasPermissions(this.user.id, [ [ sr.$pool, 'administrate' ] ])
await this.getXapi(sr).setDefaultSr(sr._xapiId)
}
setDefaultSr.permission = '' // signed in
setDefaultSr.params = {
pool: {
type: 'string'
},
sr: {
type: 'string'
}
}
setDefaultSr.resolve = {
pool: ['pool', 'pool', 'administrate'],
sr: ['sr', 'SR']
}
// -------------------------------------------------------------------
@@ -70,6 +68,23 @@ installPatch.params = {
installPatch.resolve = {
pool: ['pool', 'pool', 'administrate']
}
// -------------------------------------------------------------------
export async function installAllPatches ({ pool }) {
await this.getXapi(pool).installAllPoolPatchesOnAllHosts()
}
installAllPatches.params = {
pool: {
type: 'string'
}
}
installAllPatches.resolve = {
pool: ['pool', 'pool', 'administrate']
}
installAllPatches.description = 'Install automatically all patches for every hosts of a pool'
// -------------------------------------------------------------------
@@ -106,12 +121,7 @@ export {uploadPatch as patch}
// -------------------------------------------------------------------
export async function mergeInto ({ source, target, force }) {
try {
await this.mergeXenPools(source._xapiId, target._xapiId, force)
} catch (e) {
// FIXME: should we expose plain XAPI error messages?
throw new GenericError(e.message)
}
await this.mergeXenPools(source._xapiId, target._xapiId, force)
}
mergeInto.params = {

View File

@@ -1,12 +1,12 @@
export async function getAll () {
return /* await */ this.getAllRemotes()
return this.getAllRemotes()
}
getAll.permission = 'admin'
getAll.description = 'Gets all existing fs remote points'
export async function get ({id}) {
return /* await */ this.getRemote(id)
return this.getRemote(id)
}
get.permission = 'admin'
@@ -15,8 +15,18 @@ get.params = {
id: {type: 'string'}
}
export async function test ({id}) {
return this.testRemote(id)
}
test.permission = 'admin'
test.description = 'Performs a read/write matching test on a remote point'
test.params = {
id: {type: 'string'}
}
export async function list ({id}) {
return /* await */ this.listRemoteBackups(id)
return this.listRemoteBackups(id)
}
list.permission = 'admin'
@@ -26,7 +36,7 @@ list.params = {
}
export async function create ({name, url}) {
return /* await */ this.createRemote({name, url})
return this.createRemote({name, url})
}
create.permission = 'admin'

View File

@@ -1,6 +1,6 @@
import {
Unauthorized
} from '../api-errors'
unauthorized
} from 'xo-common/api-errors'
// ===================================================================
@@ -51,11 +51,12 @@ delete_.params = {
// -------------------------------------------------------------------
export function set ({ id, name, subjects, objects, limits }) {
export function set ({ id, name, subjects, objects, ipPools, limits }) {
return this.updateResourceSet(id, {
limits,
name,
objects,
ipPools,
subjects
})
}
@@ -84,6 +85,13 @@ set.params = {
},
optional: true
},
ipPools: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
limits: {
type: 'object',
optional: true
@@ -109,12 +117,14 @@ get.params = {
export async function getAll () {
const { user } = this
if (!user) {
throw new Unauthorized()
throw unauthorized()
}
return this.getAllResourceSets(user.id)
}
getAll.description = 'Get the list of all existing resource set'
// -------------------------------------------------------------------
export function addObject ({ id, object }) {
@@ -227,3 +237,4 @@ export function recomputeAllLimits () {
}
recomputeAllLimits.permission = 'admin'
recomputeAllLimits.description = 'Recompute manually the current resource set usage'

View File

@@ -1,3 +1,5 @@
export async function getAll () {
return /* await */ this.getRoles()
}
getAll.description = 'Returns the list of all existing roles'

View File

@@ -17,8 +17,8 @@ get.params = {
id: {type: 'string'}
}
export async function create ({jobId, cron, enabled, name}) {
return /* await */ this.createSchedule(this.session.get('user_id'), {job: jobId, cron, enabled, name})
export async function create ({ jobId, cron, enabled, name, timezone }) {
return /* await */ this.createSchedule(this.session.get('user_id'), { job: jobId, cron, enabled, name, timezone })
}
create.permission = 'admin'
@@ -30,8 +30,8 @@ create.params = {
name: {type: 'string', optional: true}
}
export async function set ({id, jobId, cron, enabled, name}) {
await this.updateSchedule(id, {job: jobId, cron, enabled, name})
export async function set ({ id, jobId, cron, enabled, name, timezone }) {
await this.updateSchedule(id, { job: jobId, cron, enabled, name, timezone })
}
set.permission = 'admin'

View File

@@ -1,21 +1,18 @@
import {deprecate} from 'util'
import {InvalidCredential, AlreadyAuthenticated} from '../api-errors'
import { getUserPublicProperties } from '../utils'
import {invalidCredentials} from 'xo-common/api-errors'
// ===================================================================
export async function signIn (credentials) {
if (this.session.has('user_id')) {
throw new AlreadyAuthenticated()
}
const user = await this.authenticateUser(credentials)
if (!user) {
throw new InvalidCredential()
throw invalidCredentials()
}
this.session.set('user_id', user.id)
return this.getUserPublicProperties(user)
return getUserPublicProperties(user)
}
signIn.description = 'sign in'
@@ -55,7 +52,7 @@ export async function getUser () {
return userId === undefined
? null
: this.getUserPublicProperties(await this.getUser(userId))
: getUserPublicProperties(await this.getUser(userId))
}
getUser.description = 'return the currently connected user'

View File

@@ -1,3 +1,4 @@
import { asInteger } from '../xapi/utils'
import {
ensureArray,
forEach,
@@ -48,8 +49,8 @@ scan.resolve = {
// -------------------------------------------------------------------
// TODO: find a way to call this "delete" and not destroy
export async function destroy ({SR}) {
await this.getXapi(SR).call('SR.destroy', SR._xapiRef)
export async function destroy ({ sr }) {
await this.getXapi(sr).destroySr(sr._xapiId)
}
destroy.params = {
@@ -57,13 +58,13 @@ destroy.params = {
}
destroy.resolve = {
SR: ['id', 'SR', 'administrate']
sr: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function forget ({SR}) {
await this.getXapi(SR).call('SR.forget', SR._xapiRef)
await this.getXapi(SR).forgetSr(SR._xapiId)
}
forget.params = {
@@ -76,6 +77,34 @@ forget.resolve = {
// -------------------------------------------------------------------
export async function connectAllPbds ({SR}) {
await this.getXapi(SR).connectAllSrPbds(SR._xapiId)
}
connectAllPbds.params = {
id: { type: 'string' }
}
connectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function disconnectAllPbds ({SR}) {
await this.getXapi(SR).disconnectAllSrPbds(SR._xapiId)
}
disconnectAllPbds.params = {
id: { type: 'string' }
}
disconnectAllPbds.resolve = {
SR: ['id', 'SR', 'administrate']
}
// -------------------------------------------------------------------
export async function createIso ({
host,
nameLabel,
@@ -92,6 +121,7 @@ export async function createIso ({
deviceConfig.legacy_mode = 'true'
} else if (type === 'smb') {
path = path.replace(/\\/g, '/')
deviceConfig.type = 'cifs'
deviceConfig.username = user
deviceConfig.cifspassword = password
}
@@ -107,7 +137,7 @@ export async function createIso ({
nameDescription,
'iso', // SR type ISO
'iso', // SR content type ISO
true,
type !== 'local',
{}
)
@@ -316,7 +346,7 @@ export async function createIscsi ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const srRef = await xapi.call(
@@ -377,7 +407,7 @@ export async function probeIscsiIqns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -455,7 +485,7 @@ export async function probeIscsiLuns ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
let xml
@@ -534,7 +564,7 @@ export async function probeIscsiExists ({
// if we give another port than default iSCSI
if (port) {
deviceConfig.port = port
deviceConfig.port = asInteger(port)
}
const xml = parseXml(await xapi.call('SR.probe', host._xapiRef, deviceConfig, 'lvmoiscsi', {}))

67
src/api/system.js Normal file
View File

@@ -0,0 +1,67 @@
import forEach from 'lodash/forEach'
import getKeys from 'lodash/keys'
import moment from 'moment-timezone'
import { noSuchObject } from 'xo-common/api-errors'
import { version as xoServerVersion } from '../../package.json'
// ===================================================================
export function getMethodsInfo () {
const methods = {}
forEach(this.apiMethods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
export const getServerTimezone = (tz => () => tz)(moment.tz.guess())
getServerTimezone.description = 'return the timezone server'
// -------------------------------------------------------------------
export const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
export const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
export function listMethods () {
return getKeys(this.apiMethods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
export function methodSignature ({method: name}) {
const method = this.apiMethods[name]
if (!method) {
throw noSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'

View File

@@ -36,9 +36,9 @@ hasPermission.params = {
export function wait ({duration, returnValue}) {
return new Promise(resolve => {
setTimeout(+duration, () => {
setTimeout(() => {
resolve(returnValue)
})
}, +duration)
})
}

View File

@@ -1,10 +1,10 @@
import {InvalidParameters} from '../api-errors'
import { mapToArray } from '../utils'
import {invalidParameters} from 'xo-common/api-errors'
import { getUserPublicProperties, mapToArray } from '../utils'
// ===================================================================
export async function create ({email, password, permission}) {
return (await this.createUser(email, {password, permission})).id
return (await this.createUser({email, password, permission})).id
}
create.description = 'creates a new user'
@@ -22,7 +22,7 @@ create.params = {
// Deletes an existing user.
async function delete_ ({id}) {
if (id === this.session.get('user_id')) {
throw new InvalidParameters('an user cannot delete itself')
throw invalidParameters('a user cannot delete itself')
}
await this.deleteUser(id)
@@ -48,7 +48,7 @@ export async function getAll () {
const users = await this.getAllUsers()
// Filters out private properties.
return mapToArray(users, this.getUserPublicProperties)
return mapToArray(users, getUserPublicProperties)
}
getAll.description = 'returns all the existing users'
@@ -57,19 +57,29 @@ getAll.permission = 'admin'
// -------------------------------------------------------------------
export async function set ({id, email, password, permission}) {
await this.updateUser(id, {email, password, permission})
export async function set ({id, email, password, permission, preferences}) {
const isAdmin = this.user && this.user.permission === 'admin'
if (isAdmin) {
if (permission && id === this.session.get('user_id')) {
throw invalidParameters('a user cannot change its own permission')
}
} else if (email || password || permission) {
throw invalidParameters('this properties can only changed by an administrator')
}
await this.updateUser(id, {email, password, permission, preferences})
}
set.description = 'changes the properties of an existing user'
set.permission = 'admin'
set.permission = ''
set.params = {
id: { type: 'string' },
email: { type: 'string', optional: true },
password: { type: 'string', optional: true },
permission: { type: 'string', optional: true }
permission: { type: 'string', optional: true },
preferences: { type: 'object', optional: true }
}
// -------------------------------------------------------------------

View File

@@ -1,12 +1,11 @@
# FIXME: rename to disk.*
$isArray = require 'lodash.isarray'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{InvalidParameters} = require '../api-errors'
{parseSize} = require '../utils'
{JsonRpcError} = require '../api-errors'
{invalidParameters} = require 'xo-common/api-errors'
{isArray: $isArray, parseSize} = require '../utils'
{JsonRpcError} = require 'json-rpc-peer'
#=====================================================================
@@ -39,7 +38,7 @@ set = $coroutine (params) ->
size = parseSize(params.size)
if size < vdi.size
throw new InvalidParameters(
throw invalidParameters(
"cannot set new size (#{size}) below the current size (#{vdi.size})"
)
yield xapi.resizeVdi(ref, size)

View File

@@ -1,5 +1,19 @@
import {
diffItems,
noop,
pCatch
} from '../utils'
// ===================================================================
// TODO: move into vm and rename to removeInterface
async function delete_ ({vif}) {
this.allocIpAddresses(
vif.id,
null,
vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
)::pCatch(noop)
await this.getXapi(vif).deleteVif(vif._xapiId)
}
export {delete_ as delete}
@@ -13,10 +27,11 @@ delete_.resolve = {
}
// -------------------------------------------------------------------
// TODO: move into vm and rename to disconnectInterface
export async function disconnect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.unplug_force', vif._xapiRef)
await this.getXapi(vif).disconnectVif(vif._xapiId)
}
disconnect.params = {
@@ -31,7 +46,7 @@ disconnect.resolve = {
// TODO: move into vm and rename to connectInterface
export async function connect ({vif}) {
// TODO: check if VIF is attached before
await this.getXapi(vif).call('VIF.plug', vif._xapiRef)
await this.getXapi(vif).connectVif(vif._xapiId)
}
connect.params = {
@@ -41,3 +56,86 @@ connect.params = {
connect.resolve = {
vif: ['id', 'VIF', 'operate']
}
// -------------------------------------------------------------------
export async function set ({
vif,
network,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses,
attached
}) {
const oldIpAddresses = vif.allowedIpv4Addresses.concat(vif.allowedIpv6Addresses)
const newIpAddresses = []
{
const { push } = newIpAddresses
push.apply(newIpAddresses, allowedIpv4Addresses || vif.allowedIpv4Addresses)
push.apply(newIpAddresses, allowedIpv6Addresses || vif.allowedIpv6Addresses)
}
if (network || mac) {
const xapi = this.getXapi(vif)
const vm = xapi.getObject(vif.$VM)
mac == null && (mac = vif.MAC)
network = xapi.getObject(network && network.id || vif.$network)
attached == null && (attached = vif.attached)
await this.allocIpAddresses(vif.id, null, oldIpAddresses)
await xapi.deleteVif(vif._xapiId)
// create new VIF with new parameters
const newVif = await xapi.createVif(vm.$id, network.$id, {
mac,
currently_attached: attached,
ipv4_allowed: newIpAddresses
})
await this.allocIpAddresses(newVif.$id, newIpAddresses)
return
}
const [ addAddresses, removeAddresses ] = diffItems(
newIpAddresses,
oldIpAddresses
)
await this.allocIpAddresses(
vif.id,
addAddresses,
removeAddresses
)
return this.getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
}
set.params = {
id: { type: 'string' },
network: { type: 'string', optional: true },
mac: { type: 'string', optional: true },
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
attached: { type: 'boolean', optional: true }
}
set.resolve = {
vif: ['id', 'VIF', 'operate'],
network: ['network', 'network', 'operate']
}

View File

@@ -1,24 +1,27 @@
$assign = require 'lodash.assign'
$assign = require 'lodash/assign'
$debug = (require 'debug') 'xo:api:vm'
$filter = require 'lodash.filter'
$findIndex = require 'lodash.findindex'
$findWhere = require 'lodash.find'
$isArray = require 'lodash.isarray'
endsWith = require 'lodash.endswith'
$filter = require 'lodash/filter'
$findIndex = require 'lodash/findIndex'
$findWhere = require 'lodash/find'
concat = require 'lodash/concat'
endsWith = require 'lodash/endsWith'
escapeStringRegexp = require 'escape-string-regexp'
eventToPromise = require 'event-to-promise'
sortBy = require 'lodash.sortby'
startsWith = require 'lodash.startswith'
merge = require 'lodash/merge'
sortBy = require 'lodash/sortBy'
startsWith = require 'lodash/startsWith'
{coroutine: $coroutine} = require 'bluebird'
{format} = require 'json-rpc-peer'
{
GenericError,
Unauthorized
} = require('../api-errors')
forbiddenOperation,
invalidParameters,
unauthorized
} = require('xo-common/api-errors')
{
forEach,
formatXml: $js2xml,
isArray: $isArray,
map,
mapToArray,
noop,
@@ -47,39 +50,51 @@ checkPermissionOnSrs = (vm, permission = 'operate') -> (
)
return @hasPermissions(@session.get('user_id'), permissions).then((success) => (
throw new Unauthorized() unless success
throw unauthorized() unless success
))
)
#=====================================================================
# TODO: Implement ACLs
create = $coroutine ({
resourceSet
installation
name_description
name_label
template
pv_args
VDIs
VIFs
existingDisks
}) ->
{ user } = this
unless user
throw new Unauthorized()
extract = (obj, prop) ->
value = obj[prop]
delete obj[prop]
return value
# TODO: Implement ACLs
create = $coroutine (params) ->
{ user } = this
resourceSet = extract(params, 'resourceSet')
if not resourceSet and user.permission isnt 'admin'
throw unauthorized()
template = extract(params, 'template')
params.template = template._xapiId
xapi = this.getXapi(template)
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.size,
vms: 1
}
objectIds = [
template.id
]
limits = {
cpus: template.CPUs.number,
disk: 0,
memory: template.memory.dynamic[1],
vms: 1
}
vdiSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
vdiSizesByDevice[vbd.userdevice] = +vdi.virtual_size
xapiVdis = VDIs and map(VDIs, (vdi) =>
return
)
vdis = extract(params, 'VDIs')
params.vdis = vdis and map(vdis, (vdi) =>
sr = @getObject(vdi.SR)
size = parseSize(vdi.size)
@@ -87,31 +102,18 @@ create = $coroutine ({
limits.disk += size
return $assign({}, vdi, {
device: vdi.device ? vdi.position,
device: vdi.userdevice ? vdi.device ? vdi.position,
size,
SR: sr._xapiId,
type: vdi.type
})
)
xapi = @getXapi(template)
diskSizesByDevice = {}
forEach(xapi.getObject(template._xapiId).$VBDs, (vbd) =>
if (
vbd.type is 'Disk' and
(vdi = vbd.$VDI)
)
diskSizesByDevice[vbd.device] = +vdi.virtual_size
return
)
xapiExistingVdis = existingDisks and map(existingDisks, (vdi, device) =>
existingVdis = extract(params, 'existingDisks')
params.existingVdis = existingVdis and map(existingVdis, (vdi, userdevice) =>
if vdi.size?
size = parseSize(vdi.size)
diskSizesByDevice[device] = size
vdiSizesByDevice[userdevice] = size
if vdi.$SR
sr = @getObject(vdi.$SR)
@@ -123,9 +125,10 @@ create = $coroutine ({
})
)
forEach(diskSizesByDevice, (size) => limits.disk += size)
forEach(vdiSizesByDevice, (size) => limits.disk += size)
xapiVifs = VIFs and map(VIFs, (vif) =>
vifs = extract(params, 'VIFs')
params.vifs = vifs and map(vifs, (vif) =>
network = @getObject(vif.network)
objectIds.push(network.id)
@@ -133,36 +136,63 @@ create = $coroutine ({
return {
mac: vif.mac
network: network._xapiId
ipv4_allowed: vif.allowedIpv4Addresses
ipv6_allowed: vif.allowedIpv6Addresses
}
)
installation = extract(params, 'installation')
params.installRepository = installation && installation.repository
checkLimits = null
if resourceSet
yield this.checkResourceSetConstraints(resourceSet, user.id, objectIds)
yield this.allocateLimitsInResourceSet(limits, resourceSet)
else unless user.permission is 'admin'
throw new Unauthorized()
xapiVm = yield xapi.createVm(template._xapiId, {
installRepository: installation && installation.repository,
nameDescription: name_description,
nameLabel: name_label,
pvArgs: pv_args,
vdis: xapiVdis,
vifs: xapiVifs,
existingVdis: xapiExistingVdis
})
checkLimits = $coroutine (limits2) =>
yield this.allocateLimitsInResourceSet(limits, resourceSet)
yield this.allocateLimitsInResourceSet(limits2, resourceSet)
xapiVm = yield xapi.createVm(template._xapiId, params, checkLimits)
vm = xapi.xo.addObject(xapiVm)
if resourceSet
yield Promise.all([
@addAcl(user.id, vm.id, 'admin'),
@addAcl(user.id, vm.id, 'admin')
xapi.xo.setData(xapiVm.$id, 'resourceSet', resourceSet)
])
for vifId in vm.VIFs
vif = @getObject(vifId, 'VIF')
yield this.allocIpAddresses(vifId, concat(vif.allowedIpv4Addresses, vif.allowedIpv6Addresses)).catch(() =>
xapi.deleteVif(vif._xapiId)
)
if params.bootAfterCreate
pCatch.call(xapi.startVm(vm._xapiId), noop)
return vm.id
create.params = {
bootAfterCreate: {
type: 'boolean'
optional: true
}
cloudConfig: {
type: 'string'
optional: true
}
coreOs: {
type: 'boolean'
optional: true
}
clone: {
type: 'boolean'
optional: true
}
resourceSet: {
type: 'string',
optional: true
@@ -204,6 +234,18 @@ create.params = {
optional: true # Auto-generated per default.
type: 'string'
}
allowedIpv4Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
allowedIpv6Addresses: {
optional: true
type: 'array'
items: { type: 'string' }
}
}
}
}
@@ -246,19 +288,33 @@ create.params = {
}
create.resolve = {
template: ['template', 'VM-template', 'administrate'],
template: ['template', 'VM-template', ''],
}
exports.create = create
#---------------------------------------------------------------------
delete_ = ({vm, delete_disks: deleteDisks}) ->
delete_ = $coroutine ({vm, delete_disks: deleteDisks}) ->
cpus = vm.CPUs.number
memory = vm.memory.size
xapi = @getXapi(vm)
# Update IP pools
yield Promise.all(map(vm.VIFs, (vifId) =>
vif = xapi.getObject(vifId)
return pCatch.call(
this.allocIpAddresses(
vifId,
null,
concat(vif.ipv4_allowed, vif.ipv6_allowed)
),
noop
)
))
# Update resource sets
resourceSet = xapi.xo.getData(vm._xapiId, 'resourceSet')
if resourceSet?
disk = 0
@@ -275,10 +331,16 @@ delete_ = ({vm, delete_disks: deleteDisks}) ->
return
)
pCatch.call(@releaseLimitsInResourceSet(
@computeVmResourcesUsage(vm),
resourceSet
), noop)
resourceSetUsage = @computeVmResourcesUsage(vm)
ipPoolsUsage = yield @computeVmIpPoolsUsage(vm)
pCatch.call(
@releaseLimitsInResourceSet(
merge(resourceSetUsage, ipPoolsUsage),
resourceSet
),
noop
)
return xapi.deleteVm(vm._xapiId, deleteDisks)
@@ -361,7 +423,7 @@ migrate = $coroutine ({
])
unless yield @hasPermissions(@session.get('user_id'), permissions)
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).migrateVm(vm._xapiId, @getXapi(host), host._xapiId, {
migrationNetworkId: migrationNetwork?._xapiId
@@ -398,112 +460,19 @@ exports.migrate = migrate
#---------------------------------------------------------------------
# FIXME: human readable strings should be handled.
set = $coroutine (params) ->
{VM} = params
xapi = @getXapi VM
set = (params) ->
VM = extract(params, 'VM')
xapi = @getXapi(VM)
{_xapiRef: ref} = VM
return xapi.editVm(VM._xapiId, params, (limits, vm) =>
resourceSet = xapi.xo.getData(vm, 'resourceSet')
resourceSet = xapi.xo.getData(ref, 'resourceSet')
if (resourceSet)
return @allocateLimitsInResourceSet(limits, resourceSet)
if 'memoryMin' of params
memoryMin = parseSize(params.memoryMin)
yield xapi.call 'VM.set_memory_static_min', ref, "#{memoryMin}"
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memoryMin}"
if 'memoryMax' of params
memoryMax = parseSize(params.memoryMax)
yield xapi.call 'VM.set_memory_static_max', ref, "#{memoryMax}"
# Memory.
if 'memory' of params
memory = parseSize(params.memory)
if memory < VM.memory.static[0]
@throw(
'INVALID_PARAMS'
"cannot set memory below the static minimum (#{VM.memory.static[0]})"
)
if memory < VM.memory.dynamic[0]
yield xapi.call 'VM.set_memory_dynamic_min', ref, "#{memory}"
else if memory > VM.memory.static[1]
if $isVmRunning VM
@throw(
'INVALID_PARAMS'
"cannot set memory above the static maximum (#{VM.memory.static[1]}) "+
"for a running VM"
)
yield xapi.call 'VM.set_memory_static_max', ref, "#{memory}"
if resourceSet?
yield @allocateLimitsInResourceSet({
memory: memory - VM.memory.size
}, resourceSet)
yield xapi.call 'VM.set_memory_dynamic_max', ref, "#{memory}"
# Number of CPUs.
if 'CPUs' of params
{CPUs} = params
if resourceSet?
yield @allocateLimitsInResourceSet({
cpus: CPUs - VM.CPUs.number
}, resourceSet)
if $isVmRunning VM
if CPUs > VM.CPUs.max
@throw(
'INVALID_PARAMS'
"cannot set CPUs above the static maximum (#{VM.CPUs.max}) "+
"for a running VM"
)
yield xapi.call 'VM.set_VCPUs_number_live', ref, "#{CPUs}"
else
if CPUs > VM.CPUs.max
yield xapi.call 'VM.set_VCPUs_max', ref, "#{CPUs}"
yield xapi.call 'VM.set_VCPUs_at_startup', ref, "#{CPUs}"
if 'cpusMax' of params
yield xapi.call 'VM.set_VCPUs_max', ref, "#{CPUs}"
# HA policy
# TODO: also handle "best-effort" case
if 'high_availability' of params
{high_availability} = params
if high_availability
yield xapi.call 'VM.set_ha_restart_priority', ref, "restart"
else
yield xapi.call 'VM.set_ha_restart_priority', ref, ""
if 'auto_poweron' of params
{auto_poweron} = params
if auto_poweron
yield xapi.call 'VM.add_to_other_config', ref, 'auto_poweron', 'true'
yield xapi.setPoolProperties({autoPoweron: true})
else
yield xapi.call 'VM.remove_from_other_config', ref, 'auto_poweron'
if 'cpuWeight' of params
if resourceSet? and this.user.permission isnt 'admin'
throw new Unauthorized()
yield xapi.setVcpuWeight(VM._xapiId, params.cpuWeight)
# Other fields.
for param, fields of {
'name_label'
'name_description'
'PV_args'
}
continue unless param of params
for field in (if $isArray fields then fields else [fields])
yield xapi.call "VM.set_#{field}", ref, "#{params[param]}"
return true
if (limits.cpuWeight && this.user.permission != 'admin')
throw unauthorized()
)
set.params = {
# Identifier of the VM to update.
@@ -529,20 +498,25 @@ set.params = {
# Note: static_min ≤ dynamic_min ≤ dynamic_max ≤ static_max
memory: { type: ['integer', 'string'], optional: true }
# Set static_min & dynamic_min
# Set dynamic_min
memoryMin: { type: ['integer', 'string'], optional: true }
# Set static_max
# Set dynamic_max
memoryMax: { type: ['integer', 'string'], optional: true }
# Set static_max
memoryStaticMax: { type: ['integer', 'string'], optional: true }
# Kernel arguments for PV VM.
PV_args: { type: 'string', optional: true }
cpuWeight: { type: 'integer', optional: true}
cpuWeight: { type: ['integer', 'null'], optional: true }
cpuCap: { type: ['integer', 'null'], optional: true }
}
set.resolve = {
VM: ['id', ['VM', 'VM-snapshot'], 'administrate']
VM: ['id', ['VM', 'VM-snapshot', 'VM-template'], 'administrate']
}
exports.set = set
@@ -630,7 +604,7 @@ copy.params = {
}
copy.resolve = {
vm: [ 'vm', 'VM', 'administrate' ]
vm: [ 'vm', ['VM', 'VM-snapshot'], 'administrate' ]
sr: [ 'sr', 'SR', 'operate' ]
}
@@ -643,7 +617,7 @@ convertToTemplate = $coroutine ({vm}) ->
unless yield @hasPermissions(@session.get('user_id'), [
[ vm.$pool, 'administrate' ]
])
throw new Unauthorized()
throw unauthorized()
yield @getXapi(vm).call 'VM.set_is_a_template', vm._xapiRef, true
@@ -785,9 +759,7 @@ exports.backup = backup
#---------------------------------------------------------------------
importBackup = $coroutine ({remote, file, sr}) ->
yield @importVmBackup(remote, file, sr)
return
importBackup = ({remote, file, sr}) -> @importVmBackup(remote, file, sr)
importBackup.permission = 'admin'
importBackup.description = 'Imports a VM into host, from a file found in the chosen remote'
@@ -840,10 +812,10 @@ exports.rollingBackup = rollingBackup
rollingDrCopy = ({vm, pool, sr, tag, depth}) ->
unless sr
unless pool
throw new InvalidParameters('either pool or sr param should be specified')
throw invalidParameters('either pool or sr param should be specified')
if vm.$pool is pool.id
throw new GenericError('Disaster Recovery attempts to copy on the same pool')
throw forbiddenOperation('Disaster Recovery attempts to copy on the same pool')
sr = @getObject(pool.default_SR, 'SR')
@@ -901,8 +873,7 @@ stop = $coroutine ({vm, force}) ->
yield xapi.call 'VM.clean_shutdown', vm._xapiRef
catch error
if error.code is 'VM_MISSING_PV_DRIVERS' or error.code is 'VM_LACKS_FEATURE_SHUTDOWN'
# TODO: Improve reporting: this message is unclear.
@throw 'INVALID_PARAMS'
throw invalidParameters('clean shutdown requires PV drivers')
else
throw error
@@ -958,15 +929,12 @@ exports.resume = resume
#---------------------------------------------------------------------
# revert a snapshot to its parent VM
revert = $coroutine ({snapshot}) ->
# Attempts a revert from this snapshot to its parent VM
yield @getXapi(snapshot).call 'VM.revert', snapshot._xapiRef
return true
revert = ({snapshot, snapshotBefore}) ->
return @getXapi(snapshot).revertVm(snapshot._xapiId, snapshotBefore)
revert.params = {
id: { type: 'string' }
id: { type: 'string' },
snapshotBefore: { type: 'boolean', optional: true }
}
revert.resolve = {
@@ -1026,30 +994,30 @@ exports.export = export_;
#---------------------------------------------------------------------
handleVmImport = $coroutine (req, res, { xapi, srId }) ->
handleVmImport = $coroutine (req, res, { data, srId, type, xapi }) ->
# Timeout seems to be broken in Node 4.
# See https://github.com/nodejs/node/issues/3319
req.setTimeout(43200000) # 12 hours
try
vm = yield xapi.importVm(req, { srId })
vm = yield xapi.importVm(req, { data, srId, type })
res.end(format.response(0, vm.$id))
catch e
res.writeHead(500)
res.end(format.error(0, new GenericError(e.message)))
res.end(format.error(0, new Error(e.message)))
return
# TODO: "sr_id" can be passed in URL to target a specific SR
import_ = $coroutine ({host, sr}) ->
import_ = $coroutine ({ data, host, sr, type }) ->
if not sr
if not host
throw new InvalidParameters('you must provide either host or SR')
throw invalidParameters('you must provide either host or SR')
xapi = @getXapi(host)
sr = xapi.pool.$default_SR
if not sr
throw new InvalidParameters('there is not default SR in this pool')
throw invalidParameters('there is not default SR in this pool')
# FIXME: must have administrate permission on default SR.
else
@@ -1057,13 +1025,45 @@ import_ = $coroutine ({host, sr}) ->
return {
$sendTo: yield @registerHttpRequest(handleVmImport, {
data,
srId: sr._xapiId,
type,
xapi
})
}
import_.params = {
data: {
type: 'object',
optional: true,
properties: {
descriptionLabel: { type: 'string' },
disks: {
type: 'array',
items: {
type: 'object',
properties: {
capacity: { type: 'integer' },
descriptionLabel: { type: 'string' },
nameLabel: { type: 'string' },
path: { type: 'string' },
position: { type: 'integer' }
}
},
optional: true
},
memory: { type: 'integer' },
nameLabel: { type: 'string' },
nCpus: { type: 'integer' },
networks: {
type: 'array',
items: { type: 'string' },
optional: true
},
}
},
host: { type: 'string', optional: true },
type: { type: 'string', optional: true },
sr: { type: 'string', optional: true }
}
@@ -1105,21 +1105,47 @@ exports.attachDisk = attachDisk
#---------------------------------------------------------------------
# TODO: implement resource sets
createInterface = $coroutine ({vm, network, position, mtu, mac}) ->
createInterface = $coroutine ({
vm,
network,
position,
mac,
allowedIpv4Addresses,
allowedIpv6Addresses
}) ->
vif = yield @getXapi(vm).createVif(vm._xapiId, network._xapiId, {
mac,
mtu,
position
position,
ipv4_allowed: allowedIpv4Addresses,
ipv6_allowed: allowedIpv6Addresses
})
{ push } = ipAddresses = []
push.apply(ipAddresses, allowedIpv4Addresses) if allowedIpv4Addresses
push.apply(ipAddresses, allowedIpv6Addresses) if allowedIpv6Addresses
pCatch.call(@allocIpAddresses(vif.$id, allo), noop) if ipAddresses.length
return vif.$id
createInterface.params = {
vm: { type: 'string' }
network: { type: 'string' }
position: { type: 'string', optional: true }
mtu: { type: 'string', optional: true }
position: { type: ['integer', 'string'], optional: true }
mac: { type: 'string', optional: true }
allowedIpv4Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
},
allowedIpv6Addresses: {
type: 'array',
items: {
type: 'string'
},
optional: true
}
}
createInterface.resolve = {
@@ -1198,10 +1224,7 @@ setBootOrder = $coroutine ({vm, order}) ->
yield xapi.call 'VM.set_HVM_boot_params', vm._xapiRef, order
return true
@throw(
'INVALID_PARAMS'
'You can only set the boot order on a HVM guest'
)
throw invalidParameters('You can only set the boot order on a HVM guest')
setBootOrder.params = {
vm: { type: 'string' },

View File

@@ -1,5 +1,50 @@
import { streamToBuffer } from '../utils'
// ===================================================================
export function clean () {
return this.clean()
}
clean.permission = 'admin'
// -------------------------------------------------------------------
export async function exportConfig () {
return {
$getFrom: await this.registerHttpRequest((req, res) => {
res.writeHead(200, 'OK', {
'content-disposition': 'attachment'
})
return this.exportConfig()
},
undefined,
{ suffix: '/config.json' })
}
}
exportConfig.permission = 'admin'
// -------------------------------------------------------------------
export function getAllObjects () {
return this.getObjects()
}
getAllObjects.permission = ''
getAllObjects.description = 'Returns all XO objects'
// -------------------------------------------------------------------
export async function importConfig () {
return {
$sendTo: await this.registerHttpRequest(async (req, res) => {
await this.importConfig(JSON.parse(await streamToBuffer(req)))
res.end('config successfully imported')
})
}
}
importConfig.permission = 'admin'

View File

@@ -1,8 +1,9 @@
import Collection, {ModelAlreadyExists} from '../collection'
import difference from 'lodash.difference'
import filter from 'lodash.filter'
import getKey from 'lodash.keys'
import difference from 'lodash/difference'
import filter from 'lodash/filter'
import getKey from 'lodash/keys'
import {createClient as createRedisClient} from 'redis'
import {v4 as generateUuid} from 'uuid'
import {
forEach,
@@ -35,13 +36,13 @@ export default class Redis extends Collection {
connection,
indexes = [],
prefix,
uri = 'tcp://localhost:6379'
uri
}) {
super()
this.indexes = indexes
this.prefix = prefix
this.redis = promisifyAll.call(connection || createRedisClient(uri))
this.redis = promisifyAll(connection || createRedisClient(uri))
}
_extract (ids) {
@@ -68,12 +69,12 @@ export default class Redis extends Collection {
// TODO: remove “replace” which is a temporary measure, implement
// “set()” instead.
const {indexes, prefix, redis, idPrefix = ''} = this
const {indexes, prefix, redis} = this
return Promise.all(mapToArray(models, async model => {
// Generate a new identifier if necessary.
if (model.id === undefined) {
model.id = idPrefix + String(await redis.incr(prefix + '_id'))
model.id = generateUuid()
}
const success = await redis.sadd(prefix + '_ids', model.id)
@@ -149,6 +150,10 @@ export default class Redis extends Collection {
}
_remove (ids) {
if (isEmpty(ids)) {
return
}
const {prefix, redis} = this
// TODO: handle indexes.

View File

@@ -1,65 +1,19 @@
import bind from 'lodash.bind'
import { getBoundPropertyDescriptor } from 'bind-property-descriptor'
import {
isArray,
isPromise,
isFunction,
noop,
pFinally
isFunction
} from './utils'
// ===================================================================
const {
defineProperties,
defineProperty,
getOwnPropertyDescriptor
} = Object
// ===================================================================
// See: https://github.com/jayphelps/core-decorators.js#autobind
//
// TODO: make it work for all class methods.
export const autobind = (target, key, {
configurable,
enumerable,
value: fn,
writable
}) => ({
configurable,
enumerable,
get () {
if (this === target) {
return fn
}
const bound = bind(fn, this)
defineProperty(this, key, {
configurable: true,
enumerable: false,
value: bound,
writable: true
})
return bound
},
set (newValue) {
// Cannot use assignment because it will call the setter on
// the prototype.
defineProperty(this, key, {
configurable: true,
enumerable: true,
value: newValue,
writable: true
})
}
})
// -------------------------------------------------------------------
// Debounce decorator for methods.
//
// See: https://github.com/wycats/javascript-decorators
@@ -98,117 +52,6 @@ export const debounce = duration => (target, name, descriptor) => {
// -------------------------------------------------------------------
const _push = Array.prototype.push
export const deferrable = (target, name, descriptor) => {
let fn
function newFn () {
const deferreds = []
const defer = fn => {
deferreds.push(fn)
}
defer.clear = () => {
deferreds.length = 0
}
const args = [ defer ]
_push.apply(args, arguments)
let executeDeferreds = () => {
let i = deferreds.length
while (i) {
deferreds[--i]()
}
}
try {
const result = fn.apply(this, args)
if (isPromise(result)) {
result::pFinally(executeDeferreds)
// Do not execute the deferreds in the finally block.
executeDeferreds = noop
}
return result
} finally {
executeDeferreds()
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
return descriptor
}
fn = target
return newFn
}
// Deferred functions are only executed on failures.
//
// i.e.: defer.clear() is automatically called in case of success.
deferrable.onFailure = (target, name, descriptor) => {
let fn
function newFn (defer) {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(result => {
defer.clear()
return result
})
: (defer.clear(), result)
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// Deferred functions are only executed on success.
//
// i.e.: defer.clear() is automatically called in case of failure.
deferrable.onSuccess = (target, name, descriptor) => {
let fn
function newFn (defer) {
try {
const result = fn.apply(this, arguments)
return isPromise(result)
? result.then(null, error => {
defer.clear()
throw error
})
: result
} catch (error) {
defer.clear()
throw error
}
}
if (descriptor) {
fn = descriptor.value
descriptor.value = newFn
} else {
fn = target
target = newFn
}
return deferrable(target, name, descriptor)
}
// -------------------------------------------------------------------
const _ownKeys = (
typeof Reflect !== 'undefined' && Reflect.ownKeys ||
(({
@@ -220,22 +63,6 @@ const _ownKeys = (
)(Object)
)
const _bindPropertyDescriptor = (descriptor, thisArg) => {
const { get, set, value } = descriptor
if (get) {
descriptor.get = bind(get, thisArg)
}
if (set) {
descriptor.set = bind(set, thisArg)
}
if (isFunction(value)) {
descriptor.value = bind(value, thisArg)
}
return descriptor
}
const _isIgnoredProperty = name => (
name[0] === '_' ||
name === 'constructor'
@@ -259,7 +86,32 @@ export const mixin = MixIns => Class => {
const { name } = Class
const Decorator = (...args) => {
// Copy properties of plain object mix-ins to the prototype.
{
const allMixIns = MixIns
MixIns = []
const { prototype } = Class
const descriptors = { __proto__: null }
for (const MixIn of allMixIns) {
if (isFunction(MixIn)) {
MixIns.push(MixIn)
continue
}
for (const prop of _ownKeys(MixIn)) {
if (prop in prototype) {
throw new Error(`${name}#${prop} is already defined`)
}
(
descriptors[prop] = getOwnPropertyDescriptor(MixIn, prop)
).enumerable = false // Object methods are enumerable but class methods are not.
}
}
defineProperties(prototype, descriptors)
}
function Decorator (...args) {
const instance = new Class(...args)
for (const MixIn of MixIns) {
@@ -275,8 +127,9 @@ export const mixin = MixIns => Class => {
throw new Error(`${name}#${prop} is already defined`)
}
descriptors[prop] = _bindPropertyDescriptor(
getOwnPropertyDescriptor(prototype, prop),
descriptors[prop] = getBoundPropertyDescriptor(
prototype,
prop,
mixinInstance
)
}

View File

@@ -1,48 +1,9 @@
/* eslint-env mocha */
/* eslint-env jest */
import expect from 'must'
import {debounce} from './decorators'
// ===================================================================
import {autobind, debounce, deferrable} from './decorators'
// ===================================================================
describe('autobind()', () => {
class Foo {
@autobind
getFoo () {
return this
}
}
it('returns a bound instance for a method', () => {
const foo = new Foo()
const { getFoo } = foo
expect(getFoo()).to.equal(foo)
})
it('returns the same bound instance each time', () => {
const foo = new Foo()
expect(foo.getFoo).to.equal(foo.getFoo)
})
it('works with multiple instances of the same class', () => {
const foo1 = new Foo()
const foo2 = new Foo()
const getFoo1 = foo1.getFoo
const getFoo2 = foo2.getFoo
expect(getFoo1()).to.equal(foo1)
expect(getFoo2()).to.equal(foo2)
})
})
// -------------------------------------------------------------------
describe('debounce()', () => {
let i
@@ -60,114 +21,19 @@ describe('debounce()', () => {
it('works', done => {
const foo = new Foo()
expect(i).to.equal(0)
expect(i).toBe(0)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
foo.foo()
expect(i).to.equal(1)
expect(i).toBe(1)
setTimeout(() => {
foo.foo()
expect(i).to.equal(2)
expect(i).toBe(2)
done()
}, 2e1)
})
})
// -------------------------------------------------------------------
describe('deferrable()', () => {
it('works with normal termination', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(0)
})
it('defer.clear() removes previous deferreds', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
defer.clear()
i *= 2
defer(() => { i /= 2 })
return i
})
expect(fn()).to.equal(4)
expect(i).to.equal(2)
})
it('works with exception', () => {
let i = 0
const fn = deferrable(defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
throw i
})
expect(() => fn()).to.throw(4)
expect(i).to.equal(0)
})
it('works with promise resolution', async () => {
let i = 0
const fn = deferrable(async defer => {
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
return i
})
await expect(fn()).to.eventually.equal(4)
expect(i).to.equal(0)
})
it('works with promise rejection', async () => {
let i = 0
const fn = deferrable(async defer => {
// Wait a turn of the events loop.
await Promise.resolve()
i += 2
defer(() => { i -= 2 })
i *= 2
defer(() => { i /= 2 })
// Wait a turn of the events loop.
await Promise.resolve()
throw i
})
await expect(fn()).to.reject.to.equal(4)
expect(i).to.equal(0)
})
})

View File

@@ -1,27 +1,22 @@
import assign from 'lodash.assign'
import startsWith from 'lodash.startswith'
import { parse as parseUrl } from 'url'
import isRedirect from 'is-redirect'
import { assign, isString, startsWith } from 'lodash'
import { request as httpRequest } from 'http'
import { request as httpsRequest } from 'https'
import { stringify as formatQueryString } from 'querystring'
import {
isString,
streamToBuffer
} from './utils'
format as formatUrl,
parse as parseUrl,
resolve as resolveUrl
} from 'url'
import { streamToBuffer } from './utils'
// -------------------------------------------------------------------
export default (...args) => {
const raw = opts => {
let req
const pResponse = new Promise((resolve, reject) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const {
body,
headers: { ...headers } = {},
@@ -62,11 +57,16 @@ export default (...args) => {
}
}
req = (
protocol && startsWith(protocol.toLowerCase(), 'https')
? httpsRequest
: httpRequest
)({
const secure = protocol && startsWith(protocol.toLowerCase(), 'https')
let requestFn
if (secure) {
requestFn = httpsRequest
} else {
requestFn = httpRequest
delete rest.rejectUnauthorized
}
req = requestFn({
...rest,
headers
})
@@ -98,6 +98,11 @@ export default (...args) => {
}
const code = response.statusCode
const { location } = response.headers
if (isRedirect(code) && location) {
assign(opts, parseUrl(resolveUrl(formatUrl(opts), location)))
return raw(opts)
}
if (code < 200 || code >= 300) {
const error = new Error(response.statusMessage)
error.code = code
@@ -112,13 +117,27 @@ export default (...args) => {
return response
})
pResponse.cancel = () => {
req.emit('error', new Error('HTTP request canceled!'))
req.abort()
}
pResponse.readAll = () => pResponse.then(response => response.readAll())
pResponse.request = req
return pResponse
}
const httpRequestPlus = (...args) => {
const opts = {}
for (let i = 0, length = args.length; i < length; ++i) {
const arg = args[i]
assign(opts, isString(arg) ? parseUrl(arg) : arg)
}
const pResponse = raw(opts)
pResponse.cancel = () => {
const { request } = pResponse
request.emit('error', new Error('HTTP request canceled!'))
request.abort()
}
pResponse.readAll = () => pResponse.then(response => response.readAll())
return pResponse
}
export { httpRequestPlus as default }

View File

@@ -2,37 +2,29 @@ import createLogger from 'debug'
const debug = createLogger('xo:main')
import appConf from 'app-conf'
import bind from 'lodash.bind'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import eventToPromise from 'event-to-promise'
import has from 'lodash.has'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash.includes'
import pick from 'lodash.pick'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import serveStatic from 'serve-static'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import WebSocket from 'ws'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { join as joinPath } from 'path'
import {
AlreadyAuthenticated,
InvalidCredential,
InvalidParameters,
NoSuchObject,
NotImplemented
} from './api-errors'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import {
readFile,
readdir
ensureDir,
readdir,
readFile
} from 'fs-promise'
import * as apiMethods from './api/index'
import Api from './api'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
@@ -139,6 +131,11 @@ async function setUpPassport (express, xo) {
}))
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
@@ -188,7 +185,7 @@ async function setUpPassport (express, xo) {
next()
} else if (req.cookies.token) {
next()
} else if (/favicon|fontawesome|images|styles/.test(url)) {
} else if (/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)) {
next()
} else {
req.flash('return-url', url)
@@ -224,19 +221,28 @@ async function registerPlugin (pluginPath, pluginName) {
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema
configurationSchema,
configurationPresets,
testSchema
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({ xo: this })
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
}})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
testSchema,
version
)
}
@@ -353,7 +359,6 @@ const setUpProxies = (express, opts, xo) => {
if (startsWith(url, prefix)) {
const target = opts[prefix]
console.log('proxy.web', url, target + url.slice(prefix.length))
proxy.web(req, res, {
target: target + url.slice(prefix.length)
})
@@ -378,7 +383,6 @@ const setUpProxies = (express, opts, xo) => {
if (startsWith(url, prefix)) {
const target = opts[prefix]
console.log('proxy.ws', url, target + url.slice(prefix.length))
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length)
})
@@ -407,27 +411,6 @@ const setUpStaticFiles = (express, opts) => {
// ===================================================================
const errorClasses = {
ALREADY_AUTHENTICATED: AlreadyAuthenticated,
INVALID_CREDENTIAL: InvalidCredential,
INVALID_PARAMS: InvalidParameters,
NO_SUCH_OBJECT: NoSuchObject,
NOT_IMPLEMENTED: NotImplemented
}
const apiHelpers = {
getUserPublicProperties (user) {
// Handles both properties and wrapped models.
const properties = user.properties || user
return pick(properties, 'id', 'email', 'groups', 'permission', 'provider')
},
throw (errorId, data) {
throw new (errorClasses[errorId])(data)
}
}
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
server: webServer,
@@ -435,18 +418,6 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = { __proto__: xo, ...apiHelpers }
const api = new Api({
context,
verboseLogsOnErrors
})
xo.defineProperty('api', api)
api.addMethods(apiMethods)
webSocketServer.on('connection', socket => {
const { remoteAddress } = socket.upgradeReq.socket
@@ -461,7 +432,7 @@ const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return api.call(connection, message.method, message.params)
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
@@ -517,7 +488,7 @@ const setUpConsoleProxy = (webServer, xo) => {
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [ [ id, 'operate' ] ])) {
throw new InvalidCredential()
throw invalidCredentials()
}
const { remoteAddress } = socket
@@ -650,16 +621,24 @@ export default async function main (args) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
const shutdown = signal => {
debug('%s caught, closing…', signal)
xo.stop()
}
forEach([ 'SIGINT', 'SIGTERM' ], signal => {
let alreadyCalled = false
// Gracefully shutdown on signals.
process.on('SIGINT', () => shutdown('SIGINT'))
process.on('SIGTERM', () => shutdown('SIGTERM'))
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await eventToPromise(xo, 'stopped')

View File

@@ -1,9 +1,19 @@
import assign from 'lodash.assign'
import {BaseError} from 'make-error'
import assign from 'lodash/assign'
import Bluebird from 'bluebird'
import every from 'lodash/every'
import filter from 'lodash/filter'
import isArray from 'lodash/isArray'
import isPlainObject from 'lodash/isPlainObject'
import map from 'lodash/map'
import mapValues from 'lodash/mapValues'
import size from 'lodash/size'
import some from 'lodash/some'
import { BaseError } from 'make-error'
import { crossProduct } from './math'
import {
createRawObject,
forEach
serializeError,
thunkToArray
} from './utils'
export class JobExecutorError extends BaseError {}
@@ -18,30 +28,67 @@ export class UnsupportedVectorType extends JobExecutorError {
}
}
export const productParams = (...args) => {
let product = createRawObject()
assign(product, ...args)
return product
// ===================================================================
const match = (pattern, value) => {
if (isPlainObject(pattern)) {
if (size(pattern) === 1) {
if (pattern.__or) {
return some(pattern.__or, subpattern => match(subpattern, value))
}
if (pattern.__not) {
return !match(pattern.__not, value)
}
}
return isPlainObject(value) && every(pattern, (subpattern, key) => (
value[key] !== undefined && match(subpattern, value[key])
))
}
if (isArray(pattern)) {
return isArray(value) && every(pattern, subpattern =>
some(value, subvalue => match(subpattern, subvalue))
)
}
return pattern === value
}
export function _computeCrossProduct (items, productCb, extractValueMap = {}) {
const upstreamValues = []
const itemsCopy = items.slice()
const item = itemsCopy.pop()
const values = extractValueMap[item.type] && extractValueMap[item.type](item) || item
forEach(values, value => {
if (itemsCopy.length) {
let downstreamValues = _computeCrossProduct(itemsCopy, productCb, extractValueMap)
forEach(downstreamValues, downstreamValue => {
upstreamValues.push(productCb(value, downstreamValue))
const paramsVectorActionsMap = {
extractProperties ({ mapping, value }) {
return mapValues(mapping, key => value[key])
},
crossProduct ({ items }) {
return thunkToArray(crossProduct(
map(items, value => resolveParamsVector.call(this, value))
))
},
fetchObjects ({ pattern }) {
return filter(this.xo.getObjects(), object => match(pattern, object))
},
map ({ collection, iteratee, paramName = 'value' }) {
return map(resolveParamsVector.call(this, collection), value => {
return resolveParamsVector.call(this, {
...iteratee,
[paramName]: value
})
} else {
upstreamValues.push(value)
}
})
return upstreamValues
})
},
set: ({ values }) => values
}
export function resolveParamsVector (paramsVector) {
const visitor = paramsVectorActionsMap[paramsVector.type]
if (!visitor) {
throw new Error(`Unsupported function '${paramsVector.type}'.`)
}
return visitor.call(this, paramsVector)
}
// ===================================================================
export default class JobExecutor {
constructor (xo) {
this.xo = xo
@@ -76,30 +123,24 @@ export default class JobExecutor {
event: 'job.end',
runJobId
})
} catch (e) {
} catch (error) {
this._logger.error(`The execution of ${job.id} has failed.`, {
event: 'job.end',
runJobId,
error: e
error: serializeError(error)
})
throw error
}
}
async _execCall (job, runJobId) {
let paramsFlatVector
if (job.paramsVector) {
if (job.paramsVector.type === 'crossProduct') {
paramsFlatVector = _computeCrossProduct(job.paramsVector.items, productParams, this._extractValueCb)
} else {
throw new UnsupportedVectorType(job.paramsVector)
}
} else {
paramsFlatVector = [{}] // One call with no parameters
}
const { paramsVector } = job
const paramsFlatVector = paramsVector
? resolveParamsVector.call(this, paramsVector)
: [{}] // One call with no parameters
const connection = this.xo.createUserConnection()
const promises = []
connection.set('user_id', job.userId)
@@ -109,7 +150,7 @@ export default class JobExecutor {
calls: {}
}
forEach(paramsFlatVector, params => {
await Bluebird.map(paramsFlatVector, params => {
const runCallId = this._logger.notice(`Starting ${job.method} call. (${job.id})`, {
event: 'jobCall.start',
runJobId,
@@ -123,36 +164,35 @@ export default class JobExecutor {
start: Date.now()
}
promises.push(
this.xo.api.call(connection, job.method, assign({}, params)).then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
return this.xo.callApiMethod(connection, job.method, assign({}, params)).then(
value => {
this._logger.notice(`Call ${job.method} (${runCallId}) is a success. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue: value
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: reason
})
call.returnedValue = value
call.end = Date.now()
},
reason => {
this._logger.notice(`Call ${job.method} (${runCallId}) has failed. (${job.id})`, {
event: 'jobCall.end',
runJobId,
runCallId,
error: serializeError(reason)
})
call.error = reason
call.end = Date.now()
}
)
call.error = reason
call.end = Date.now()
}
)
}, {
concurrency: 2
})
connection.close()
await Promise.all(promises)
execStatus.end = Date.now()
return execStatus

View File

@@ -1,71 +1,100 @@
/* eslint-env mocha */
/* eslint-env jest */
import {expect} from 'chai'
import leche from 'leche'
import { forEach } from 'lodash'
import { resolveParamsVector } from './job-executor'
import {
_computeCrossProduct,
productParams
} from './job-executor'
describe('resolveParamsVector', function () {
forEach({
'cross product with three sets': [
// Expected result.
[ { id: 3, value: 'foo', remote: 'local' },
{ id: 7, value: 'foo', remote: 'local' },
{ id: 10, value: 'foo', remote: 'local' },
{ id: 3, value: 'bar', remote: 'local' },
{ id: 7, value: 'bar', remote: 'local' },
{ id: 10, value: 'bar', remote: 'local' } ],
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { id: 3 }, { id: 7 }, { id: 10 } ]
}, {
type: 'set',
values: [ { value: 'foo' }, { value: 'bar' } ]
}, {
type: 'set',
values: [ { remote: 'local' } ]
}]
}
],
'cross product with `set` and `map`': [
// Expected result.
[
{ remote: 'local', id: 'vm:2' },
{ remote: 'smb', id: 'vm:2' }
],
describe('productParams', function () {
leche.withData({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
// Entry.
{
type: 'crossProduct',
items: [{
type: 'set',
values: [ { remote: 'local' }, { remote: 'smb' } ]
}, {
type: 'map',
collection: {
type: 'fetchObjects',
pattern: {
$pool: { __or: [ 'pool:1', 'pool:8', 'pool:12' ] },
power_state: 'Running',
tags: [ 'foo' ],
type: 'VM'
}
},
iteratee: {
type: 'extractProperties',
mapping: { id: 'id' }
}
}]
},
// Context.
{
xo: {
getObjects: function () {
return [{
id: 'vm:1',
$pool: 'pool:1',
tags: [],
type: 'VM',
power_state: 'Halted'
}, {
id: 'vm:2',
$pool: 'pool:1',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Running'
}, {
id: 'host:1',
type: 'host',
power_state: 'Running'
}, {
id: 'vm:3',
$pool: 'pool:8',
tags: [ 'foo' ],
type: 'VM',
power_state: 'Halted'
}]
}
}
}
]
}, function (resultSet, ...sets) {
it('Assembles all given param sets in on set', function () {
expect(productParams(...sets)).to.eql(resultSet)
})
})
})
describe('_computeCrossProduct', function () {
// Gives the sum of all args
const addTest = (...args) => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = (...args) => args.reduce((prev, curr) => prev * curr, 1)
leche.withData({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, function (product, items, cb) {
it('Crosses sets of values with a crossProduct callback', function () {
expect(_computeCrossProduct(items, cb)).to.have.members(product)
}, ([ expectedResult, entry, context ], name) => {
describe(`with ${name}`, () => {
it('Resolves params vector', () => {
expect(resolveParamsVector.call(context, entry)).toEqual(expectedResult)
})
})
})
})

View File

@@ -1,5 +1,5 @@
import appConf from 'app-conf'
import get from 'lodash.get'
import get from 'lodash/get'
import highland from 'highland'
import levelup from 'level-party'
import ndjson from 'ndjson'

48
src/math.js Normal file
View File

@@ -0,0 +1,48 @@
import assign from 'lodash/assign'
const _combine = (vectors, n, cb) => {
if (!n) {
return
}
const nLast = n - 1
const vector = vectors[nLast]
const m = vector.length
if (n === 1) {
for (let i = 0; i < m; ++i) {
cb([ vector[i] ])
}
return
}
for (let i = 0; i < m; ++i) {
const value = vector[i]
_combine(vectors, nLast, (vector) => {
vector.push(value)
cb(vector)
})
}
}
// Compute all combinations from vectors.
//
// Ex: combine([[2, 3], [5, 7]])
// => [ [ 2, 5 ], [ 3, 5 ], [ 2, 7 ], [ 3, 7 ] ]
export const combine = vectors => cb => _combine(vectors, vectors.length, cb)
// Merge the properties of an objects set in one object.
//
// Ex: mergeObjects([ { a: 1 }, { b: 2 } ]) => { a: 1, b: 2 }
export const mergeObjects = objects => assign({}, ...objects)
// Compute a cross product between vectors.
//
// Ex: crossProduct([ [ { a: 2 }, { b: 3 } ], [ { c: 5 }, { d: 7 } ] ] )
// => [ { a: 2, c: 5 }, { b: 3, c: 5 }, { a: 2, d: 7 }, { b: 3, d: 7 } ]
export const crossProduct = (vectors, mergeFn = mergeObjects) => cb => (
combine(vectors)(vector => {
cb(mergeFn(vector))
})
)

74
src/math.spec.js Normal file
View File

@@ -0,0 +1,74 @@
/* eslint-env jest */
import { forEach } from 'lodash'
import { thunkToArray } from './utils'
import {
crossProduct,
mergeObjects
} from './math'
describe('mergeObjects', function () {
forEach({
'Two sets of one': [
{a: 1, b: 2}, {a: 1}, {b: 2}
],
'Two sets of two': [
{a: 1, b: 2, c: 3, d: 4}, {a: 1, b: 2}, {c: 3, d: 4}
],
'Three sets': [
{a: 1, b: 2, c: 3, d: 4, e: 5, f: 6}, {a: 1}, {b: 2, c: 3}, {d: 4, e: 5, f: 6}
],
'One set': [
{a: 1, b: 2}, {a: 1, b: 2}
],
'Empty set': [
{a: 1}, {a: 1}, {}
],
'All empty': [
{}, {}, {}
],
'No set': [
{}
]
}, ([ resultSet, ...sets ], name) => {
describe(`with ${name}`, () => {
it('Assembles all given param sets in on set', function () {
expect(mergeObjects(sets)).toEqual(resultSet)
})
})
})
})
describe('crossProduct', function () {
// Gives the sum of all args
const addTest = args => args.reduce((prev, curr) => prev + curr, 0)
// Gives the product of all args
const multiplyTest = args => args.reduce((prev, curr) => prev * curr, 1)
forEach({
'2 sets of 2 items to multiply': [
[10, 14, 15, 21], [[2, 3], [5, 7]], multiplyTest
],
'3 sets of 2 items to multiply': [
[110, 130, 154, 182, 165, 195, 231, 273], [[2, 3], [5, 7], [11, 13]], multiplyTest
],
'2 sets of 3 items to multiply': [
[14, 22, 26, 21, 33, 39, 35, 55, 65], [[2, 3, 5], [7, 11, 13]], multiplyTest
],
'2 sets of 2 items to add': [
[7, 9, 8, 10], [[2, 3], [5, 7]], addTest
],
'3 sets of 2 items to add': [
[18, 20, 20, 22, 19, 21, 21, 23], [[2, 3], [5, 7], [11, 13]], addTest
],
'2 sets of 3 items to add': [
[9, 13, 15, 10, 14, 16, 12, 16, 18], [[2, 3, 5], [7, 11, 13]], addTest
]
}, ([ product, items, cb ], name) => {
describe(`with ${name}`, () => {
it('Crosses sets of values with a crossProduct callback', function () {
expect(thunkToArray(crossProduct(items, cb)).sort()).toEqual(product.sort())
})
})
})
})

View File

@@ -1,8 +1,12 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class Group extends Model {}
@@ -14,20 +18,16 @@ export class Groups extends Collection {
return Group
}
get idPrefix () {
return 'group:'
}
create (name) {
return this.add(new Group({
name,
users: '[]'
}))
return this.add(new Group({ name }))
}
async save (group) {
// Serializes.
group.users = JSON.stringify(group.users)
let tmp
group.users = isEmpty(tmp = group.users)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(group)
}
@@ -37,13 +37,7 @@ export class Groups extends Collection {
// Deserializes.
forEach(groups, group => {
const {users} = group
try {
group.users = JSON.parse(users)
} catch (error) {
console.warn('cannot parse group.users:', users)
group.users = []
}
group.users = parseProp('group', group, 'users', [])
})
return groups

View File

@@ -11,12 +11,7 @@ export class Jobs extends Collection {
return Job
}
get idPrefix () {
return 'job:'
}
async create (userId, job) {
job.userId = userId
async create (job) {
// Serializes.
job.paramsVector = JSON.stringify(job.paramsVector)
return /* await */ this.add(new Job(job))

View File

@@ -13,10 +13,6 @@ export class PluginsMetadata extends Collection {
return PluginMetadata
}
get idPrefix () {
return 'plugin-metadata:'
}
async save ({ id, autoload, configuration }) {
return /* await */ this.update({
id,

View File

@@ -13,10 +13,6 @@ export class Remotes extends Collection {
return Remote
}
get idPrefix () {
return 'remote-'
}
create (name, url) {
return this.add(new Remote({
name,

View File

@@ -11,17 +11,14 @@ export class Schedules extends Collection {
return Schedule
}
get idPrefix () {
return 'schedule:'
}
create (userId, job, cron, enabled, name = undefined) {
create (userId, job, cron, enabled, name = undefined, timezone = undefined) {
return this.add(new Schedule({
userId,
job,
cron,
enabled,
name
name,
timezone
}))
}

View File

@@ -1,5 +1,8 @@
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
@@ -19,4 +22,19 @@ export class Servers extends Collection {
return /* await */ this.add({host, username, password, readOnly})
}
async get (properties) {
const servers = await super.get(properties)
// Deserializes
forEach(servers, server => {
if (server.error) {
server.error = parseProp('server', server, 'error', '')
} else {
delete server.error
}
})
return servers
}
}

View File

@@ -1,7 +1,11 @@
import isEmpty from 'lodash/isEmpty'
import Collection from '../collection/redis'
import Model from '../model'
import { forEach } from '../utils'
import { parseProp } from './utils'
// ===================================================================
export default class User extends Model {}
@@ -17,15 +21,14 @@ export class Users extends Collection {
return User
}
async create (email, properties = {}) {
async create (properties) {
const { email } = properties
// Avoid duplicates.
if (await this.exists({email})) {
throw new Error(`the user ${email} already exists`)
}
// Adds the email to the user's properties.
properties.email = email
// Create the user object.
const user = new User(properties)
@@ -35,7 +38,13 @@ export class Users extends Collection {
async save (user) {
// Serializes.
user.groups = JSON.stringify(user.groups)
let tmp
user.groups = isEmpty(tmp = user.groups)
? undefined
: JSON.stringify(tmp)
user.preferences = isEmpty(tmp = user.preferences)
? undefined
: JSON.stringify(tmp)
return /* await */ this.update(user)
}
@@ -45,13 +54,8 @@ export class Users extends Collection {
// Deserializes
forEach(users, user => {
const {groups} = user
try {
user.groups = groups ? JSON.parse(groups) : []
} catch (_) {
console.warn('cannot parse user.groups:', groups)
user.groups = []
}
user.groups = parseProp('user', user, 'groups', [])
user.preferences = parseProp('user', user, 'preferences', {})
})
return users

14
src/models/utils.js Normal file
View File

@@ -0,0 +1,14 @@
import prettyFormat from 'pretty-format'
export const parseProp = (type, obj, name, defaultValue) => {
const value = obj[name]
if (value == null) {
return defaultValue
}
try {
return JSON.parse(value)
} catch (error) {
console.warn('cannot parse %ss[%j].%s (%s):', type, obj.id, name, prettyFormat(value), error)
return defaultValue
}
}

View File

@@ -7,6 +7,7 @@ import {
import {
addChecksumToReadStream,
getPseudoRandomBytes,
noop,
pCatch,
streamToBuffer,
@@ -15,7 +16,7 @@ import {
export default class RemoteHandlerAbstract {
constructor (remote) {
this._remote = parse({...remote})
this._remote = {...remote, ...parse(remote.url)}
if (this._remote.type !== this.type) {
throw new Error('Incorrect remote type')
}
@@ -47,6 +48,32 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async test () {
const testFileName = `${Date.now()}.test`
const data = getPseudoRandomBytes(1024 * 1024)
let step = 'write'
try {
await this.outputFile(testFileName, data)
step = 'read'
const read = await this.readFile(testFileName)
if (data.compare(read) !== 0) {
throw new Error('output and input did not match')
}
return {
success: true
}
} catch (error) {
return {
success: false,
step,
file: testFileName,
error: error.message || String(error)
}
} finally {
this.unlink(testFileName).catch(noop)
}
}
async outputFile (file, data, options) {
return this._outputFile(file, data, {
flags: 'wx',
@@ -85,48 +112,64 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async createReadStream (file, {
createReadStream (file, {
checksum = false,
ignoreMissingChecksum = false,
...options
} = {}) {
const streamP = this._createReadStream(file, options).then(async stream => {
await eventToPromise(stream, 'readable')
const streamP = this._createReadStream(file, options).then(stream => {
// detect early errors
let promise = eventToPromise(stream, 'readable')
if (stream.length === undefined) {
stream.length = await this.getSize(file)::pCatch(noop)
// try to add the length prop if missing and not a range stream
if (
stream.length === undefined &&
options.end === undefined &&
options.start === undefined
) {
promise = Promise.all([ promise, this.getSize(file).then(size => {
stream.length = size
}, noop) ])
}
return stream
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
try {
checksum = await this.readFile(`${file}.checksum`)
} catch (error) {
if (error.code === 'ENOENT' && ignoreMissingChecksum) {
return streamP
// avoid a unhandled rejection warning
streamP.catch(noop)
return this.readFile(`${file}.checksum`).then(
checksum => streamP.then(stream => {
const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream.length = length
return stream
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error
}
throw error
}
let stream = await streamP
const { length } = stream
stream = validChecksumOfReadStream(stream, checksum.toString())
stream.length = length
return stream
)
}
async _createReadStream (file, options) {
throw new Error('Not implemented')
}
async refreshChecksum (path) {
const stream = addChecksumToReadStream(await this.createReadStream(path))
stream.resume() // start reading the whole file
const checksum = await stream.checksum
await this.outputFile(`${path}.checksum`, checksum)
}
async createOutputStream (file, {
checksum = false,
...options

View File

@@ -1,5 +1,5 @@
import fs from 'fs-promise'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import {
dirname,
resolve
@@ -12,16 +12,21 @@ import {
export default class LocalHandler extends RemoteHandlerAbstract {
get type () {
return 'local'
return 'file'
}
_getRealPath () {
return this._remote.path
}
_getFilePath (file) {
const parts = [this._remote.path]
const realPath = this._getRealPath()
const parts = [realPath]
if (file) {
parts.push(file)
}
const path = resolve.apply(null, parts)
if (!startsWith(path, this._remote.path)) {
if (!startsWith(path, realPath)) {
throw new Error('Remote path is unavailable')
}
return path
@@ -30,8 +35,9 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _sync () {
if (this._remote.enabled) {
try {
await fs.ensureDir(this._remote.path)
await fs.access(this._remote.path, fs.R_OK | fs.W_OK)
const path = this._getRealPath()
await fs.ensureDir(path)
await fs.access(path, fs.R_OK | fs.W_OK)
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
@@ -47,7 +53,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
async _outputFile (file, data, options) {
const path = this._getFilePath(file)
await fs.ensureDir(dirname(path))
await fs.writeFile(this._getFilePath(file), data, options)
await fs.writeFile(path, data, options)
}
async _readFile (file, options) {

View File

@@ -11,6 +11,10 @@ export default class NfsHandler extends LocalHandler {
return 'nfs'
}
_getRealPath () {
return `/run/xo-server/mounts/${this._remote.id}`
}
async _loadRealMounts () {
let stdout
const mounted = {}
@@ -37,27 +41,27 @@ export default class NfsHandler extends LocalHandler {
return mounted
}
_matchesRealMount (remote) {
return remote.path in this._realMounts
_matchesRealMount () {
return this._getRealPath() in this._realMounts
}
async _mount (remote) {
await fs.ensureDir(remote.path)
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${remote.host}:/${remote.share}`, remote.path])
async _mount () {
await fs.ensureDir(this._getRealPath())
return execa('mount', ['-t', 'nfs', '-o', 'vers=3', `${this._remote.host}:${this._remote.path}`, this._getRealPath()])
}
async _sync () {
await this._loadRealMounts()
if (this._matchesRealMount(this._remote) && !this._remote.enabled) {
if (this._matchesRealMount() && !this._remote.enabled) {
try {
await this._umount(this._remote)
} catch (exc) {
this._remote.enabled = true
this._remote.error = exc.message
}
} else if (!this._matchesRealMount(this._remote) && this._remote.enabled) {
} else if (!this._matchesRealMount() && this._remote.enabled) {
try {
await this._mount(this._remote)
await this._mount()
} catch (exc) {
this._remote.enabled = false
this._remote.error = exc.message
@@ -75,6 +79,6 @@ export default class NfsHandler extends LocalHandler {
}
async _umount (remote) {
await execa('umount', [remote.path])
await execa('umount', [this._getRealPath()])
}
}

View File

@@ -1,5 +1,3 @@
import paramsVector from 'job/params-vector'
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
@@ -27,7 +25,9 @@ export default {
type: 'string',
description: 'called method'
},
paramsVector
paramsVector: {
type: 'object'
}
},
required: [
'type',

View File

@@ -1,59 +0,0 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
type: {
enum: ['crossProduct']
},
items: {
type: 'array',
description: 'vector of values to multiply with others vectors',
items: {
type: 'object',
properties: {
type: {
enum: ['set']
},
values: {
type: 'array',
items: {
type: 'object'
},
minItems: 1
}
},
required: [
'type',
'values'
]
},
minItems: 1
}
},
required: [
'type',
'items'
]
}
/* Example:
{
"type": "cross product",
"items": [
{
"type": "set",
"values": [
{"id": 0, "name": "snapshost de 0"},
{"id": 1, "name": "snapshost de 1"}
],
},
{
"type": "set",
"values": [
{"force": true}
]
}
]
}
*/

View File

@@ -20,7 +20,7 @@ export default {
},
unloadable: {
type: 'boolean',
default: 'true',
default: true,
description: 'whether or not this plugin can be unloaded'
},
configuration: {
@@ -30,6 +30,14 @@ export default {
configurationSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'configuration schema for this plugin (not present if not configurable)'
},
testable: {
type: 'boolean',
description: 'whether or not this plugin can be tested'
},
testSchema: {
$ref: 'http://json-schema.org/draft-04/schema#',
description: 'test schema for this plugin'
}
},
required: [

50
src/schemas/user.js Normal file
View File

@@ -0,0 +1,50 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
id: {
type: 'string',
description: 'unique identifier for this user'
},
email: {
type: 'string',
description: 'email address of this user'
},
groups: {
type: 'array',
items: {
type: 'string'
},
description: 'identifier of groups this user belong to'
},
permission: {
enum: ['none', 'read', 'write', 'admin'],
description: 'root permission for this user, none and admin are the only significant ones'
},
preferences: {
type: 'object',
properties: {
lang: { type: 'string' },
sshKeys: {
type: 'array',
items: {
type: 'object',
properties: {
key: { type: 'string' },
title: { type: 'string' }
},
required: [
'key',
'title'
]
}
}
},
description: 'various user preferences'
}
},
required: [
'id',
'email'
]
}

View File

@@ -0,0 +1,44 @@
import assert from 'assert'
const streamToExistingBuffer = (
stream,
buffer,
offset = 0,
end = buffer.length
) => new Promise((resolve, reject) => {
assert(offset >= 0)
assert(end > offset)
assert(end <= buffer.length)
let i = offset
const onData = chunk => {
const prev = i
i += chunk.length
if (i > end) {
return onError(new Error('too much data'))
}
chunk.copy(buffer, prev)
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(i - offset)
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToExistingBuffer as default }

View File

@@ -0,0 +1,20 @@
/* eslint-env jest */
import { createReadStream, readFile } from 'fs'
import { fromCallback } from 'promise-toolbox'
import streamToExistingBuffer from './stream-to-existing-buffer'
describe('streamToExistingBuffer()', () => {
it('read the content of a stream in a buffer', async () => {
const stream = createReadStream(__filename)
const expected = await fromCallback(cb => readFile(__filename, 'utf-8', cb))
const buf = Buffer.allocUnsafe(expected.length + 1)
buf[0] = 'A'.charCodeAt()
await streamToExistingBuffer(stream, buf, 1)
expect(String(buf)).toBe(`A${expected}`)
})
})

View File

@@ -0,0 +1,27 @@
const streamToNewBuffer = stream => new Promise((resolve, reject) => {
const chunks = []
let length = 0
const onData = chunk => {
chunks.push(chunk)
length += chunk.length
}
stream.on('data', onData)
const clean = () => {
stream.removeListener('data', onData)
stream.removeListener('end', onEnd)
stream.removeListener('error', onError)
}
const onEnd = () => {
resolve(Buffer.concat(chunks, length))
clean()
}
stream.on('end', onEnd)
const onError = error => {
reject(error)
clean()
}
stream.on('error', onError)
})
export { streamToNewBuffer as default }

View File

@@ -1,20 +1,33 @@
import base64url from 'base64url'
import eventToPromise from 'event-to-promise'
import forEach from 'lodash.foreach'
import getStream from 'get-stream'
import has from 'lodash.has'
import forEach from 'lodash/forEach'
import has from 'lodash/has'
import highland from 'highland'
import humanFormat from 'human-format'
import invert from 'lodash.invert'
import isArray from 'lodash.isarray'
import isString from 'lodash.isstring'
import invert from 'lodash/invert'
import isArray from 'lodash/isArray'
import isString from 'lodash/isString'
import keys from 'lodash/keys'
import kindOf from 'kindof'
import multiKeyHashInt from 'multikey-hash'
import pick from 'lodash/pick'
import tmp from 'tmp'
import xml2js from 'xml2js'
import { resolve } from 'path'
// Moment timezone can be loaded only one time, it's a workaround to load
// the latest version because cron module uses an old version of moment which
// does not implement `guess` function for example.
import 'moment-timezone'
import through2 from 'through2'
import { CronJob } from 'cron'
import { Readable } from 'stream'
import { utcFormat, utcParse } from 'd3-time-format'
import {
all as pAll,
defer,
fromCallback,
promisify,
reflect as pReflect
} from 'promise-toolbox'
@@ -22,9 +35,6 @@ import {
createHash,
randomBytes
} from 'crypto'
import { Readable } from 'stream'
import through2 from 'through2'
import {utcFormat as d3TimeFormat} from 'd3-time-format'
// ===================================================================
@@ -46,13 +56,13 @@ export function bufferToStream (buf) {
return stream
}
export const streamToBuffer = getStream.buffer
export streamToBuffer from './stream-to-new-buffer'
// -------------------------------------------------------------------
export function camelToSnakeCase (string) {
return string.replace(
/([a-z])([A-Z])/g,
/([a-z0-9])([A-Z])/g,
(_, prevChar, currChar) => `${prevChar}_${currChar.toLowerCase()}`
)
}
@@ -66,6 +76,27 @@ export const createRawObject = Object.create
// -------------------------------------------------------------------
// Only works with string items!
export const diffItems = (coll1, coll2) => {
const removed = createRawObject()
forEach(coll2, value => {
removed[value] = true
})
const added = []
forEach(coll1, value => {
if (value in removed) {
delete removed[value]
} else {
added.push(value)
}
})
return [ added, keys(removed) ]
}
// -------------------------------------------------------------------
const ALGORITHM_TO_ID = {
md5: '1',
sha256: '5',
@@ -170,7 +201,14 @@ export function extractProperty (obj, prop) {
// -------------------------------------------------------------------
export const generateUnsecureToken = (n = 32) => {
export const getUserPublicProperties = user => pick(
user.properties || user,
'id', 'email', 'groups', 'permission', 'preferences', 'provider'
)
// -------------------------------------------------------------------
export const getPseudoRandomBytes = n => {
const bytes = new Buffer(n)
const odd = n & 1
@@ -182,13 +220,15 @@ export const generateUnsecureToken = (n = 32) => {
bytes.writeUInt8(Math.random() * 256 | 0, n - 1)
}
return base64url(bytes)
return bytes
}
export const generateUnsecureToken = (n = 32) => base64url(getPseudoRandomBytes(n))
// Generate a secure random Base64 string.
export const generateToken = (randomBytes => {
return (n = 32) => randomBytes(n).then(base64url)
})(randomBytes::promisify())
})(promisify(randomBytes))
// -------------------------------------------------------------------
@@ -230,10 +270,12 @@ export const parseXml = (function () {
// - methods are already bound and chainable
export const lightSet = collection => {
const data = createRawObject()
collection && forEach(collection, value => {
data[value] = true
})
collection = null
if (collection) {
forEach(collection, value => {
data[value] = true
})
collection = null
}
const set = {
add: value => {
@@ -250,7 +292,8 @@ export const lightSet = collection => {
delete data[value]
return set
},
has: value => data[value]
has: value => data[value],
toArray: () => keys(data)
}
return set
}
@@ -350,22 +393,24 @@ export const popProperty = obj => {
// Format a date in ISO 8601 in a safe way to be used in filenames
// (even on Windows).
export const safeDateFormat = d3TimeFormat('%Y%m%dT%H%M%SZ')
export const safeDateFormat = utcFormat('%Y%m%dT%H%M%SZ')
export const safeDateParse = utcParse('%Y%m%dT%H%M%SZ')
// -------------------------------------------------------------------
// This functions are often used throughout xo-server.
//
// Exports them from here to avoid direct dependencies on lodash.
export { default as forEach } from 'lodash.foreach' // eslint-disable-line no-duplicate-imports
export { default as isArray } from 'lodash.isarray' // eslint-disable-line no-duplicate-imports
export { default as isBoolean } from 'lodash.isboolean'
export { default as isEmpty } from 'lodash.isempty'
export { default as isFunction } from 'lodash.isfunction'
export { default as isInteger } from 'lodash.isinteger'
export { default as isObject } from 'lodash.isobject'
export { default as isString } from 'lodash.isstring' // eslint-disable-line no-duplicate-imports
export { default as mapToArray } from 'lodash.map'
// Exports them from here to avoid direct dependencies on lodash/
export { default as forEach } from 'lodash/forEach' // eslint-disable-line no-duplicate-imports
export { default as isArray } from 'lodash/isArray' // eslint-disable-line no-duplicate-imports
export { default as isBoolean } from 'lodash/isBoolean'
export { default as isEmpty } from 'lodash/isEmpty'
export { default as isFunction } from 'lodash/isFunction'
export { default as isInteger } from 'lodash/isInteger'
export { default as isObject } from 'lodash/isObject'
export { default as isString } from 'lodash/isString' // eslint-disable-line no-duplicate-imports
export { default as mapToArray } from 'lodash/map'
// -------------------------------------------------------------------
@@ -417,6 +462,11 @@ export const multiKeyHash = (...args) => new Promise(resolve => {
// -------------------------------------------------------------------
export const resolveSubpath = (root, path) =>
resolve(root, `./${resolve('/', path)}`)
// -------------------------------------------------------------------
export const streamToArray = (stream, {
filter,
mapper
@@ -433,27 +483,30 @@ export const streamToArray = (stream, {
// -------------------------------------------------------------------
export const scheduleFn = (cronPattern, fn) => {
export const scheduleFn = (cronTime, fn, timeZone) => {
let running = false
const job = new CronJob(cronPattern, async () => {
if (running) {
return
}
const job = new CronJob({
cronTime,
onTick: async () => {
if (running) {
return
}
running = true
running = true
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
try {
await fn()
} catch (error) {
console.error('[WARN] scheduled function:', error && error.stack || error)
} finally {
running = false
}
},
start: true,
timeZone
})
job.start()
return () => {
job.stop()
}
@@ -461,5 +514,58 @@ export const scheduleFn = (cronPattern, fn) => {
// -------------------------------------------------------------------
// Create a serializable object from an error.
export const serializeError = error => ({
message: error.message,
stack: error.stack,
...error // Copy enumerable properties.
})
// -------------------------------------------------------------------
// Create an array which contains the results of one thunk function.
// Only works with synchronous thunks.
export const thunkToArray = thunk => {
const values = []
thunk(::values.push)
return values
}
// -------------------------------------------------------------------
// Creates a new function which throws an error.
//
// ```js
// promise.catch(throwFn('an error has occured'))
//
// function foo (param = throwFn('param is required')()) {}
// ```
export const throwFn = error => () => {
throw (
isString(error)
? new Error(error)
: error
)
}
// -------------------------------------------------------------------
export const tmpDir = () => fromCallback(cb => tmp.dir(cb))
// -------------------------------------------------------------------
// Wrap a value in a function.
export const wrap = value => () => value
// -------------------------------------------------------------------
export const mapFilter = (collection, iteratee) => {
const result = []
forEach(collection, (...args) => {
const value = iteratee(...args)
if (value) {
result.push(value)
}
})
return result
}

View File

@@ -1,12 +1,9 @@
/* eslint-env mocha */
import expect from 'must'
// ===================================================================
/* eslint-env jest */
import {
camelToSnakeCase,
createRawObject,
diffItems,
ensureArray,
extractProperty,
formatXml,
@@ -19,57 +16,69 @@ import {
describe('camelToSnakeCase()', function () {
it('converts a string from camelCase to snake_case', function () {
expect(camelToSnakeCase('fooBar')).to.equal('foo_bar')
expect(camelToSnakeCase('fooBar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4Allowed')).toBe('ipv4_allowed')
})
it('does not alter snake_case strings', function () {
expect(camelToSnakeCase('foo_bar')).to.equal('foo_bar')
expect(camelToSnakeCase('foo_bar')).toBe('foo_bar')
expect(camelToSnakeCase('ipv4_allowed')).toBe('ipv4_allowed')
})
it('does not alter upper case letters expect those from the camelCase', function () {
expect(camelToSnakeCase('fooBar_BAZ')).to.equal('foo_bar_BAZ')
expect(camelToSnakeCase('fooBar_BAZ')).toBe('foo_bar_BAZ')
})
})
// -------------------------------------------------------------------
describe('createRawObject()', () => {
it('returns an object', () => {
expect(createRawObject()).to.be.an.object()
})
it('returns an empty object', () => {
expect(createRawObject()).to.be.empty()
expect(createRawObject()).toEqual({})
})
it('creates a new object each time', () => {
expect(createRawObject()).to.not.equal(createRawObject())
expect(createRawObject()).not.toBe(createRawObject())
})
if (Object.getPrototypeOf) {
it('creates an object without a prototype', () => {
expect(Object.getPrototypeOf(createRawObject())).to.be.null()
expect(Object.getPrototypeOf(createRawObject())).toBe(null)
})
}
})
// -------------------------------------------------------------------
describe('diffItems', () => {
it('computes the added/removed items between 2 iterables', () => {
expect(diffItems(
['foo', 'bar'],
['baz', 'foo']
)).toEqual([
['bar'],
['baz']
])
})
})
// -------------------------------------------------------------------
describe('ensureArray()', function () {
it('wrap the value in an array', function () {
const value = 'foo'
expect(ensureArray(value)).to.eql([value])
expect(ensureArray(value)).toEqual([value])
})
it('returns an empty array for undefined', function () {
expect(ensureArray(undefined)).to.eql([])
expect(ensureArray(undefined)).toEqual([])
})
it('returns the object itself if is already an array', function () {
const array = ['foo', 'bar', 'baz']
expect(ensureArray(array)).to.equal(array)
expect(ensureArray(array)).toBe(array)
})
})
@@ -80,15 +89,15 @@ describe('extractProperty()', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(extractProperty(obj, 'prop')).toBe(value)
})
it('removes the property from the object', function () {
const value = {}
const obj = { prop: value }
expect(extractProperty(obj, 'prop')).to.equal(value)
expect(obj).to.not.have.property('prop')
expect(extractProperty(obj, 'prop')).toBe(value)
expect(obj.prop).not.toBeDefined()
})
})
@@ -103,7 +112,7 @@ describe('formatXml()', function () {
{$: {baz: 'plip'}}
]
}
})).to.equal(`<foo>
})).toBe(`<foo>
<bar baz="plop"/>
<bar baz="plip"/>
</foo>`)
@@ -114,7 +123,7 @@ describe('formatXml()', function () {
describe('generateToken()', () => {
it('generates a string', async () => {
expect(await generateToken()).to.be.a.string()
expect(typeof await generateToken()).toBe('string')
})
})
@@ -122,21 +131,21 @@ describe('generateToken()', () => {
describe('parseSize()', function () {
it('parses a human size', function () {
expect(parseSize('1G')).to.equal(1e9)
expect(parseSize('1G')).toBe(1e9)
})
it('returns the parameter if already a number', function () {
expect(parseSize(1e6)).to.equal(1e6)
expect(parseSize(1e6)).toBe(1e6)
})
it('throws if the string cannot be parsed', function () {
expect(function () {
parseSize('foo')
}).to.throw()
}).toThrow()
})
it('supports the B unit as suffix', function () {
expect(parseSize('3MB')).to.equal(3e6)
expect(parseSize('3MB')).toBe(3e6)
})
})
@@ -154,21 +163,21 @@ describe('pSettle()', () => {
Promise.reject('fatality')
])
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe('fatality')
})
it('works with objects', async () => {
@@ -182,20 +191,20 @@ describe('pSettle()', () => {
c: Promise.reject('fatality')
})
expect(status1.isRejected()).to.equal(false)
expect(status2.isRejected()).to.equal(false)
expect(status3.isRejected()).to.equal(true)
expect(status1.isRejected()).toBe(false)
expect(status2.isRejected()).toBe(false)
expect(status3.isRejected()).toBe(true)
expect(status1.isFulfilled()).to.equal(true)
expect(status2.isFulfilled()).to.equal(true)
expect(status3.isFulfilled()).to.equal(false)
expect(status1.isFulfilled()).toBe(true)
expect(status2.isFulfilled()).toBe(true)
expect(status3.isFulfilled()).toBe(false)
expect(status1.value()).to.equal(42)
expect(status2.value()).to.equal(Math.PI)
expect(::status3.value).to.throw()
expect(status1.value()).toBe(42)
expect(status2.value()).toBe(Math.PI)
expect(::status3.value).toThrow()
expect(::status1.reason).to.throw()
expect(::status2.reason).to.throw()
expect(status3.reason()).to.equal('fatality')
expect(::status1.reason).toThrow()
expect(::status2.reason).toThrow()
expect(status3.reason()).toBe('fatality')
})
})

View File

@@ -1,4 +1,7 @@
import fu from 'struct-fu'
// TODO: remove once completely merged in vhd.js
import fu from '@nraynaud/struct-fu'
import isEqual from 'lodash/isEqual'
import {
noop,
@@ -91,7 +94,7 @@ const fuHeader = fu.struct([
fu.uint8('parentUuid', 16),
fu.uint32('parentTimestamp'),
fu.uint32('reserved1'),
fu.char('parentUnicodeName', 512),
fu.char16be('parentUnicodeName', 512),
fu.struct('parentLocatorEntry', [
fu.uint32('platformCode'),
fu.uint32('platformDataSpace'),
@@ -144,24 +147,24 @@ const unpackField = (field, buf) => {
}
// ===================================================================
// Returns the checksum of a raw footer.
// The raw footer is altered with the new sum.
function checksumFooter (rawFooter) {
const checksumField = fuFooter.fields.checksum
// Returns the checksum of a raw struct.
// The raw struct (footer or header) is altered with the new sum.
function checksumStruct (rawStruct, struct) {
const checksumField = struct.fields.checksum
let sum = 0
// Reset current sum.
packField(checksumField, 0, rawFooter)
packField(checksumField, 0, rawStruct)
for (let i = 0; i < VHD_FOOTER_SIZE; i++) {
sum = (sum + rawFooter[i]) & 0xFFFFFFFF
for (let i = 0, n = struct.size; i < n; i++) {
sum = (sum + rawStruct[i]) & 0xFFFFFFFF
}
sum = 0xFFFFFFFF - sum
// Write new sum.
packField(checksumField, sum, rawFooter)
packField(checksumField, sum, rawStruct)
return sum
}
@@ -257,7 +260,7 @@ class Vhd {
)
const sum = unpackField(fuFooter.fields.checksum, buf)
const sumToTest = checksumFooter(buf)
const sumToTest = checksumStruct(buf, fuFooter)
// Checksum child & parent.
if (sumToTest !== sum) {
@@ -494,25 +497,36 @@ class Vhd {
}
}
// Write a context footer. (At the end and beggining of a vhd file.)
// Write a context footer. (At the end and beginning of a vhd file.)
async writeFooter () {
const { footer } = this
const offset = this.getEndOfData()
const rawFooter = fuFooter.pack(footer)
footer.checksum = checksumFooter(rawFooter)
footer.checksum = checksumStruct(rawFooter, fuFooter)
debug(`Write footer at: ${offset} (checksum=${footer.checksum}). (data=${rawFooter.toString('hex')})`)
await this._write(rawFooter, 0)
await this._write(rawFooter, offset)
}
async writeHeader () {
const { header } = this
const rawHeader = fuHeader.pack(header)
header.checksum = checksumStruct(rawHeader, fuHeader)
const offset = VHD_FOOTER_SIZE
debug(`Write header at: ${offset} (checksum=${header.checksum}). (data=${rawHeader.toString('hex')})`)
await this._write(rawHeader, offset)
}
}
// Merge vhd child into vhd parent.
//
// Child must be a delta backup !
// Parent must be a full backup !
//
// TODO: update the identifier of the parent VHD.
export default async function vhdMerge (
parentHandler, parentPath,
childHandler, childPath
@@ -564,3 +578,46 @@ export default async function vhdMerge (
await parentVhd.writeFooter()
}
// returns true if the child was actually modified
export async function chainVhd (
parentHandler, parentPath,
childHandler, childPath
) {
const parentVhd = new Vhd(parentHandler, parentPath)
const childVhd = new Vhd(childHandler, childPath)
await Promise.all([
parentVhd.readHeaderAndFooter(),
childVhd.readHeaderAndFooter()
])
const { header } = childVhd
const parentName = parentPath.split('/').pop()
const parentUuid = parentVhd.footer.uuid
if (
header.parentUnicodeName !== parentName ||
!isEqual(header.parentUuid, parentUuid)
) {
header.parentUuid = parentUuid
header.parentUnicodeName = parentName
await childVhd.writeHeader()
return true
}
// The checksum was broken between xo-server v5.2.4 and v5.2.5
//
// Replace by a correct checksum if necessary.
//
// TODO: remove when enough time as passed (6 months).
{
const rawHeader = fuHeader.pack(header)
const checksum = checksumStruct(rawHeader, fuHeader)
if (checksum !== header.checksum) {
await childVhd._write(rawHeader, VHD_FOOTER_SIZE)
return true
}
}
return false
}

View File

@@ -1,8 +1,14 @@
import {
includes,
pickBy
} from 'lodash'
import {
ensureArray,
extractProperty,
forEach,
isArray,
isEmpty,
mapToArray,
parseXml
} from './utils'
@@ -111,6 +117,7 @@ const TRANSFORMS = {
iSCSI_name: otherConfig.iscsi_iqn || null,
license_params: obj.license_params,
license_server: obj.license_server,
license_expiry: toTimestamp(obj.license_params.expiry),
name_description: obj.name_description,
name_label: obj.name_label,
memory: (function () {
@@ -138,6 +145,7 @@ const TRANSFORMS = {
? (isRunning ? 'Running' : 'Halted')
: 'Unknown',
startTime: toTimestamp(otherConfig.boot_time),
supplementalPacks: pickBy(obj.software_version, (value, key) => includes(key, ':')),
agentStartTime: toTimestamp(otherConfig.agent_start_time),
tags: obj.tags,
version: obj.software_version.product_version,
@@ -173,6 +181,34 @@ const TRANSFORMS = {
const isHvm = isVmHvm(obj)
const isRunning = isVmRunning(obj)
const xenTools = (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})()
let resourceSet = otherConfig['xo:resource_set']
if (resourceSet) {
try {
resourceSet = JSON.parse(resourceSet)
} catch (_) {
resourceSet = undefined
}
}
const vm = {
// type is redefined after for controllers/, templates &
@@ -185,7 +221,7 @@ const TRANSFORMS = {
CPUs: {
max: +obj.VCPUs_max,
number: (
isRunning && metrics
isRunning && metrics && xenTools
? +metrics.VCPUs_number
: +obj.VCPUs_at_startup
)
@@ -212,7 +248,8 @@ const TRANSFORMS = {
return {
enabled: true,
info: info && parseXml(info).docker_info,
process: process && parseXml(process).docker_ps,
containers: ensureArray(process && parseXml(process).docker_ps.item),
process: process && parseXml(process).docker_ps, // deprecated (only used in v4)
version: version && parseXml(version).docker_version
}
})(),
@@ -246,11 +283,13 @@ const TRANSFORMS = {
return memory
})(),
installTime: metrics && toTimestamp(metrics.install_time),
name_description: obj.name_description,
name_label: obj.name_label,
other: otherConfig,
os_version: guestMetrics && guestMetrics.os_version || null,
power_state: obj.power_state,
resourceSet,
snapshots: link(obj, 'snapshots'),
startTime: metrics && toTimestamp(metrics.start_time),
tags: obj.tags,
@@ -263,25 +302,7 @@ const TRANSFORMS = {
// - false: not optimized
// - 'out of date': optimized but drivers should be updated
// - 'up to date': optimized
xenTools: (() => {
if (!isRunning || !metrics) {
// Unknown status, returns nothing.
return
}
if (!guestMetrics) {
return false
}
const { PV_drivers_version: { major, minor } } = guestMetrics
if (major === undefined || minor === undefined) {
return false
}
return guestMetrics.PV_drivers_up_to_date
? 'up to date'
: 'out of date'
})(),
xenTools,
$container: (
isRunning
@@ -333,8 +354,10 @@ const TRANSFORMS = {
}
}
if (obj.VCPUs_params && obj.VCPUs_params.weight) {
vm.cpuWeight = obj.VCPUs_params.weight
let tmp
if ((tmp = obj.VCPUs_params)) {
tmp.cap && (vm.cpuCap = +tmp.cap)
tmp.weight && (vm.cpuWeight = +tmp.weight)
}
if (!isHvm) {
@@ -358,6 +381,7 @@ const TRANSFORMS = {
name_description: obj.name_description,
name_label: obj.name_label,
size: +obj.physical_size,
shared: Boolean(obj.shared),
SR_type: obj.type,
tags: obj.tags,
usage: +obj.virtual_allocation,
@@ -378,7 +402,7 @@ const TRANSFORMS = {
return {
type: 'PBD',
attached: obj.currently_attached,
attached: Boolean(obj.currently_attached),
host: link(obj, 'host'),
SR: link(obj, 'SR')
}
@@ -387,10 +411,13 @@ const TRANSFORMS = {
// -----------------------------------------------------------------
pif (obj) {
const metrics = obj.$metrics
return {
type: 'PIF',
attached: Boolean(obj.currently_attached),
isBondMaster: !isEmpty(obj.bond_master_of),
device: obj.device,
dns: obj.DNS,
disallowUnplug: Boolean(obj.disallow_unplug),
@@ -398,6 +425,7 @@ const TRANSFORMS = {
ip: obj.IP,
mac: obj.MAC,
management: Boolean(obj.management), // TODO: find a better name.
carrier: Boolean(metrics && metrics.carrier),
mode: obj.ip_configuration_mode,
mtu: +obj.MTU,
netmask: obj.netmask,
@@ -462,6 +490,8 @@ const TRANSFORMS = {
return {
type: 'VIF',
allowedIpv4Addresses: obj.ipv4_allowed,
allowedIpv6Addresses: obj.ipv6_allowed,
attached: Boolean(obj.currently_attached),
device: obj.device, // TODO: should it be cast to a number?
MAC: obj.MAC,
@@ -477,6 +507,7 @@ const TRANSFORMS = {
network (obj) {
return {
bridge: obj.bridge,
defaultIsLocked: obj.default_locking_mode === 'disabled',
MTU: +obj.MTU,
name_description: obj.name_description,
name_label: obj.name_label,

View File

@@ -1,4 +1,4 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import JSON5 from 'json5'
import { BaseError } from 'make-error'
@@ -285,9 +285,10 @@ export default class XapiStats {
// Load
hostStats.load.push(convertNanToNull(values[hostLegends.load]))
// Memory
const memory = values[hostLegends.memory]
const memoryFree = values[hostLegends.memoryFree]
// Memory.
// WARNING! memory/memoryFree are in kB.
const memory = values[hostLegends.memory] * 1024
const memoryFree = values[hostLegends.memoryFree] * 1024
hostStats.memory.push(memory)

File diff suppressed because it is too large Load Diff

View File

View File

@@ -0,0 +1,60 @@
import { isEmpty } from '../../utils'
import { makeEditObject } from '../utils'
export default {
async _connectVif (vif) {
await this.call('VIF.plug', vif.$ref)
},
async connectVif (vifId) {
await this._connectVif(this.getObject(vifId))
},
async _deleteVif (vif) {
await this.call('VIF.destroy', vif.$ref)
},
async deleteVif (vifId) {
const vif = this.getObject(vifId)
if (vif.currently_attached) {
await this._disconnectVif(vif)
}
await this._deleteVif(vif)
},
async _disconnectVif (vif) {
await this.call('VIF.unplug_force', vif.$ref)
},
async disconnectVif (vifId) {
await this._disconnectVif(this.getObject(vifId))
},
editVif: makeEditObject({
ipv4Allowed: {
get: true,
set: [
'ipv4Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv6_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
},
ipv6Allowed: {
get: true,
set: [
'ipv6Allowed',
function (value, vif) {
const lockingMode = isEmpty(value) && isEmpty(vif.ipv4_allowed)
? 'network_default'
: 'locked'
if (lockingMode !== vif.locking_mode) {
return this._set('locking_mode', lockingMode)
}
}
]
}
})
}

322
src/xapi/mixins/patching.js Normal file
View File

@@ -0,0 +1,322 @@
import filter from 'lodash/filter'
import includes from 'lodash/includes'
import some from 'lodash/some'
import sortBy from 'lodash/sortBy'
import unzip from 'julien-f-unzip'
import httpProxy from '../../http-proxy'
import httpRequest from '../../http-request'
import { debounce } from '../../decorators'
import {
createRawObject,
ensureArray,
forEach,
mapFilter,
mapToArray,
parseXml
} from '../../utils'
import {
debug,
put
} from '../utils'
export default {
// FIXME: should be static
@debounce(24 * 60 * 60 * 1000)
async _getXenUpdates () {
const { readAll, statusCode } = await httpRequest(
'http://updates.xensource.com/XenServer/updates.xml',
{ agent: httpProxy }
)
if (statusCode !== 200) {
throw new Error('cannot fetch patches list from Citrix')
}
const data = parseXml(await readAll()).patchdata
const patches = createRawObject()
forEach(data.patches.patch, patch => {
patches[patch.uuid] = {
date: patch.timestamp,
description: patch['name-description'],
documentationUrl: patch.url,
guidance: patch['after-apply-guidance'],
name: patch['name-label'],
url: patch['patch-url'],
uuid: patch.uuid,
conflicts: mapToArray(ensureArray(patch.conflictingpatches), patch => {
return patch.conflictingpatch.uuid
}),
requirements: mapToArray(ensureArray(patch.requiredpatches), patch => {
return patch.requiredpatch.uuid
})
// TODO: what does it mean, should we handle it?
// version: patch.version,
}
if (patches[patch.uuid].conflicts[0] === undefined) {
patches[patch.uuid].conflicts.length = 0
}
if (patches[patch.uuid].requirements[0] === undefined) {
patches[patch.uuid].requirements.length = 0
}
})
const resolveVersionPatches = function (uuids) {
const versionPatches = createRawObject()
forEach(ensureArray(uuids), ({uuid}) => {
versionPatches[uuid] = patches[uuid]
})
return versionPatches
}
const versions = createRawObject()
let latestVersion
forEach(data.serverversions.version, version => {
versions[version.value] = {
date: version.timestamp,
name: version.name,
id: version.value,
documentationUrl: version.url,
patches: resolveVersionPatches(version.patch)
}
if (version.latest) {
latestVersion = versions[version.value]
}
})
return {
patches,
latestVersion,
versions
}
},
// =================================================================
// Returns installed and not installed patches for a given host.
async _getPoolPatchesForHost (host) {
const versions = (await this._getXenUpdates()).versions
const hostVersions = host.software_version
const version =
versions[hostVersions.product_version] ||
versions[hostVersions.product_version_text]
return version
? version.patches
: []
},
_getInstalledPoolPatchesOnHost (host) {
const installed = createRawObject()
forEach(host.$patches, hostPatch => {
installed[hostPatch.$pool_patch.uuid] = true
})
return installed
},
async _listMissingPoolPatchesOnHost (host) {
const all = await this._getPoolPatchesForHost(host)
const installed = this._getInstalledPoolPatchesOnHost(host)
const installable = createRawObject()
forEach(all, (patch, uuid) => {
if (installed[uuid]) {
return
}
for (const uuid of patch.conflicts) {
if (uuid in installed) {
return
}
}
installable[uuid] = patch
})
return installable
},
async listMissingPoolPatchesOnHost (hostId) {
// Returns an array to not break compatibility.
return mapToArray(
await this._listMissingPoolPatchesOnHost(this.getObject(hostId))
)
},
async _ejectToolsIsos (hostRef) {
return Promise.all(mapFilter(
this.objects.all,
vm => {
if (vm.$type !== 'vm' || (hostRef && vm.resident_on !== hostRef)) {
return
}
const shouldEjectCd = some(vm.$VBDs, vbd => {
const vdi = vbd.$VDI
return vdi && vdi.is_tools_iso
})
if (shouldEjectCd) {
return this.ejectCdFromVm(vm.$id)
}
}
))
},
// -----------------------------------------------------------------
_isPoolPatchInstallableOnHost (patchUuid, host) {
const installed = this._getInstalledPoolPatchesOnHost(host)
if (installed[patchUuid]) {
return false
}
let installable = true
forEach(installed, patch => {
if (includes(patch.conflicts, patchUuid)) {
installable = false
return false
}
})
return installable
},
// -----------------------------------------------------------------
async uploadPoolPatch (stream, patchName = 'unknown') {
const taskRef = await this._createTask('Patch upload', patchName)
const task = this._watchTask(taskRef)
const [ patchRef ] = await Promise.all([
task,
put(stream, {
hostname: this.pool.$master.address,
path: '/pool_patch_upload',
query: {
session_id: this.sessionId,
task_id: taskRef
}
}, task)
])
return this._getOrWaitObject(patchRef)
},
async _getOrUploadPoolPatch (uuid) {
try {
return this.getObjectByUuid(uuid)
} catch (error) {}
debug('downloading patch %s', uuid)
const patchInfo = (await this._getXenUpdates()).patches[uuid]
if (!patchInfo) {
throw new Error('no such patch ' + uuid)
}
let stream = await httpRequest(patchInfo.url, { agent: httpProxy })
stream = await new Promise((resolve, reject) => {
const PATCH_RE = /\.xsupdate$/
stream.pipe(unzip.Parse()).on('entry', entry => {
if (PATCH_RE.test(entry.path)) {
entry.length = entry.size
resolve(entry)
} else {
entry.autodrain()
}
}).on('error', reject)
})
return this.uploadPoolPatch(stream, patchInfo.name)
},
// -----------------------------------------------------------------
async _installPoolPatchOnHost (patchUuid, host) {
debug('installing patch %s', patchUuid)
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos(host.$ref) ])
await this.call('pool_patch.apply', patch.$ref, host.$ref)
},
async installPoolPatchOnHost (patchUuid, hostId) {
return /* await */ this._installPoolPatchOnHost(
patchUuid,
this.getObject(hostId)
)
},
// -----------------------------------------------------------------
async installPoolPatchOnAllHosts (patchUuid) {
const [ patch ] = await Promise.all([ this._getOrUploadPoolPatch(patchUuid), this._ejectToolsIsos() ])
await this.call('pool_patch.pool_apply', patch.$ref)
},
// -----------------------------------------------------------------
async _installPoolPatchOnHostAndRequirements (patch, host, patchesByUuid) {
const { requirements } = patch
if (requirements.length) {
for (const requirementUuid of requirements) {
if (this._isPoolPatchInstallableOnHost(requirementUuid, host)) {
const requirement = patchesByUuid[requirementUuid]
await this._installPoolPatchOnHostAndRequirements(requirement, host, patchesByUuid)
host = this.getObject(host.$id)
}
}
}
await this._installPoolPatchOnHost(patch.uuid, host)
},
async installAllPoolPatchesOnHost (hostId) {
let host = this.getObject(hostId)
const installableByUuid = await this._listMissingPoolPatchesOnHost(host)
// List of all installable patches sorted from the newest to the
// oldest.
const installable = sortBy(
installableByUuid,
patch => -Date.parse(patch.date)
)
for (let i = 0, n = installable.length; i < n; ++i) {
const patch = installable[i]
if (this._isPoolPatchInstallableOnHost(patch.uuid, host)) {
await this._installPoolPatchOnHostAndRequirements(patch, host, installableByUuid).catch(error => {
if (error.code !== 'PATCH_ALREADY_APPLIED') {
throw error
}
})
host = this.getObject(host.$id)
}
}
},
async installAllPoolPatchesOnAllHosts () {
await this.installAllPoolPatchesOnHost(this.pool.master)
await Promise.all(mapToArray(
filter(this.objects.all, { $type: 'host' }),
host => this.installAllPoolPatchesOnHost(host.$id)
))
}
}

View File

@@ -0,0 +1,53 @@
import {
mapToArray
} from '../../utils'
export default {
_connectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._plugPbd(pbd))
)
},
async connectAllSrPbds (id) {
await this._connectAllSrPbds(this.getObject(id))
},
_disconnectAllSrPbds (sr) {
return Promise.all(
mapToArray(sr.$PBDs, pbd => this._unplugPbd(pbd))
)
},
async disconnectAllSrPbds (id) {
await this._disconnectAllSrPbds(this.getObject(id))
},
async destroySr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.destroy', sr.$ref)
},
async forgetSr (id) {
const sr = this.getObject(id)
await this._disconnectAllSrPbds(sr)
await this.call('SR.forget', sr.$ref)
},
_plugPbd (pbd) {
return this.call('PBD.plug', pbd.$ref)
},
async plugPbd (id) {
await this._plugPbd(this.getObject(id))
},
_unplugPbd (pbd) {
return this.call('PBD.unplug', pbd.$ref)
},
async unplugPbd (id) {
await this._unplugPbd(this.getObject(id))
}
}

334
src/xapi/mixins/vm.js Normal file
View File

@@ -0,0 +1,334 @@
import deferrable from 'golike-defer'
import find from 'lodash/find'
import gte from 'lodash/gte'
import isEmpty from 'lodash/isEmpty'
import lte from 'lodash/lte'
import {
forEach,
mapToArray,
noop,
parseSize,
pCatch
} from '../../utils'
import {
isVmHvm,
isVmRunning,
makeEditObject
} from '../utils'
export default {
// TODO: clean up on error.
@deferrable.onFailure
async createVm ($onFailure, templateId, {
name_label, // deprecated
nameLabel = name_label, // eslint-disable-line camelcase
clone = true,
installRepository = undefined,
vdis = undefined,
vifs = undefined,
existingVdis = undefined,
coreOs = false,
cloudConfig = undefined,
...props
} = {}, checkLimits) {
const installMethod = (() => {
if (installRepository == null) {
return 'none'
}
try {
installRepository = this.getObject(installRepository)
return 'cd'
} catch (_) {
return 'network'
}
})()
const template = this.getObject(templateId)
// Clones the template.
const vmRef = await this[clone ? '_cloneVm' : '_copyVm'](template, nameLabel)
$onFailure(() => this.deleteVm(vmRef, true)::pCatch(noop))
// TODO: copy BIOS strings?
// Removes disks from the provision XML, we will create them by
// ourselves.
await this.call('VM.remove_from_other_config', vmRef, 'disks')::pCatch(noop)
// Creates the VDIs and executes the initial steps of the
// installation.
await this.call('VM.provision', vmRef)
let vm = await this._getOrWaitObject(vmRef)
// Set VMs params.
await this._editVm(vm, props, checkLimits)
// Sets boot parameters.
{
const isHvm = isVmHvm(vm)
if (isHvm) {
if (!isEmpty(vdis) || installMethod === 'network') {
const { HVM_boot_params: bootParams } = vm
let order = bootParams.order
if (order) {
order = 'n' + order.replace('n', '')
} else {
order = 'ncd'
}
this._setObjectProperties(vm, {
HVM_boot_params: { ...bootParams, order }
})
}
} else { // PV
if (vm.PV_bootloader === 'eliloader') {
if (installMethod === 'network') {
// TODO: normalize RHEL URL?
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': installRepository
})
} else if (installMethod === 'cd') {
await this._updateObjectMapProperty(vm, 'other_config', {
'install-repository': 'cdrom'
})
}
}
}
}
// Inserts the CD if necessary.
if (installMethod === 'cd') {
// When the VM is started, if PV, the CD drive will become not
// bootable and the first disk bootable.
await this._insertCdIntoVm(installRepository, vm, {
bootable: true
})
}
let nDisks = 0
// Modify existing (previous template) disks if necessary
existingVdis && await Promise.all(mapToArray(existingVdis, async ({ size, $SR: srId, ...properties }, userdevice) => {
++nDisks
const vbd = find(vm.$VBDs, { userdevice })
if (!vbd) {
return
}
const vdi = vbd.$VDI
await this._setObjectProperties(vdi, properties)
// if the disk is bigger
if (
size != null &&
size > vdi.virtual_size
) {
await this.resizeVdi(vdi.$id, size)
}
// if another SR is set, move it there
if (srId) {
await this.moveVdi(vdi.$id, srId)
}
}))
// Creates the user defined VDIs.
//
// TODO: set vm.suspend_SR
if (!isEmpty(vdis)) {
const devices = await this.call('VM.get_allowed_VBD_devices', vm.$ref)
await Promise.all(mapToArray(vdis, (vdiDescription, i) => {
++nDisks
return this._createVdi(
vdiDescription.size, // FIXME: Should not be done in Xapi.
{
name_label: vdiDescription.name_label,
name_description: vdiDescription.name_description,
sr: vdiDescription.sr || vdiDescription.SR
}
)
.then(ref => this._getOrWaitObject(ref))
.then(vdi => this._createVbd(vm, vdi, {
// Only the first VBD if installMethod is not cd is bootable.
bootable: installMethod !== 'cd' && !i,
userdevice: devices[i]
}))
}))
}
// Destroys the VIFs cloned from the template.
await Promise.all(mapToArray(vm.$VIFs, vif => this._deleteVif(vif)))
// Creates the VIFs specified by the user.
if (vifs) {
const devices = await this.call('VM.get_allowed_VIF_devices', vm.$ref)
await Promise.all(mapToArray(vifs, (vif, index) => this._createVif(
vm,
this.getObject(vif.network),
{
ipv4_allowed: vif.ipv4_allowed,
ipv6_allowed: vif.ipv6_allowed,
device: devices[index],
locking_mode: isEmpty(vif.ipv4_allowed) && isEmpty(vif.ipv6_allowed) ? 'network_default' : 'locked',
mac: vif.mac,
mtu: vif.mtu
}
)))
}
// TODO: Assign VGPUs.
if (cloudConfig != null) {
// Refresh the record.
vm = await this._waitObject(vm.$id, vm => vm.VBDs.length === nDisks)
// Find the SR of the first VDI.
let srRef
forEach(vm.$VBDs, vbd => {
let vdi
if (
vbd.type === 'Disk' &&
(vdi = vbd.$VDI)
) {
srRef = vdi.SR
return false
}
})
const method = coreOs
? 'createCoreOsCloudInitConfigDrive'
: 'createCloudInitConfigDrive'
await this[method](vm.$id, srRef, cloudConfig)
}
return this._waitObject(vm.$id)
},
// High level method to edit a VM.
//
// Params do not correspond directly to XAPI props.
_editVm: makeEditObject({
autoPoweron: {
set (value, vm) {
return Promise.all([
this._updateObjectMapProperty(vm, 'other_config', {
autoPoweron: value ? 'true' : null
}),
value && this.setPoolProperties({
autoPoweron: true
})
])
}
},
CPUs: 'cpus',
cpus: {
addToLimits: true,
// Current value may have constraints with other values.
//
// If the other value is not set and the constraint is not
// respected, the other value is changed first.
constraints: {
cpusStaticMax: gte
},
get: vm => +vm.VCPUs_at_startup,
set: [
'VCPUs_at_startup',
function (value, vm) {
return isVmRunning(vm) && this._set('VCPUs_number_live', value)
}
]
},
cpuCap: {
get: vm => vm.VCPUs_params.cap && +vm.VCPUs_params.cap,
set (cap, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { cap })
}
},
cpusMax: 'cpusStaticMax',
cpusStaticMax: {
constraints: {
cpus: lte
},
get: vm => +vm.VCPUs_max,
set: 'VCPUs_max'
},
cpuWeight: {
get: vm => vm.VCPUs_params.weight && +vm.VCPUs_params.weight,
set (weight, vm) {
return this._updateObjectMapProperty(vm, 'VCPUs_params', { weight })
}
},
highAvailability: {
set (ha, vm) {
return this.call('VM.set_ha_restart_priority', vm.$ref, ha ? 'restart' : '')
}
},
memoryMin: {
constraints: {
memoryMax: gte
},
get: vm => +vm.memory_dynamic_min,
preprocess: parseSize,
set: 'memory_dynamic_min'
},
memory: 'memoryMax',
memoryMax: {
addToLimits: true,
limitName: 'memory',
constraints: {
memoryMin: lte,
memoryStaticMax: gte
},
get: vm => +vm.memory_dynamic_max,
preprocess: parseSize,
set: 'memory_dynamic_max'
},
memoryStaticMax: {
constraints: {
memoryMax: lte
},
get: vm => +vm.memory_static_max,
preprocess: parseSize,
set: 'memory_static_max'
},
nameDescription: true,
nameLabel: true,
PV_args: true,
tags: true
}),
async editVm (id, props, checkLimits) {
return /* await */ this._editVm(this.getObject(id), props, checkLimits)
},
async revertVm (snapshotId, snapshotBefore = true) {
const snapshot = this.getObject(snapshotId)
if (snapshotBefore) {
await this._snapshotVm(snapshot.$snapshot_of)
}
return this.call('VM.revert', snapshot.$ref)
}
}

View File

@@ -0,0 +1,53 @@
import { NULL_REF } from './utils'
const OTHER_CONFIG_TEMPLATE = {
actions_after_crash: 'restart',
actions_after_reboot: 'restart',
actions_after_shutdown: 'destroy',
affinity: null,
blocked_operations: {},
ha_always_run: false,
HVM_boot_params: {
order: 'cdn'
},
HVM_boot_policy: 'BIOS order',
HVM_shadow_multiplier: 1,
is_a_template: false,
memory_dynamic_max: 4294967296,
memory_dynamic_min: 4294967296,
memory_static_max: 4294967296,
memory_static_min: 134217728,
order: 0,
other_config: {
vgpu_pci: '',
base_template_name: 'Other install media',
mac_seed: '5e88eb6a-d680-c47f-a94a-028886971ba4',
'install-methods': 'cdrom'
},
PCI_bus: '',
platform: {
timeoffset: '0',
nx: 'true',
acpi: '1',
apic: 'true',
pae: 'true',
hpet: 'true',
viridian: 'true'
},
protection_policy: NULL_REF,
PV_args: '',
PV_bootloader: '',
PV_bootloader_args: '',
PV_kernel: '',
PV_legacy_args: '',
PV_ramdisk: '',
recommendations: '<restrictions><restriction field="memory-static-max" max="137438953472" /><restriction field="vcpus-max" max="32" /><restriction property="number-of-vbds" max="255" /><restriction property="number-of-vifs" max="7" /><restriction field="has-vendor-device" value="false" /></restrictions>',
shutdown_delay: 0,
start_delay: 0,
user_version: 1,
VCPUs_at_startup: 1,
VCPUs_max: 1,
VCPUs_params: {},
version: 0
}
export { OTHER_CONFIG_TEMPLATE as default }

386
src/xapi/utils.js Normal file
View File

@@ -0,0 +1,386 @@
// import isFinite from 'lodash/isFinite'
import camelCase from 'lodash/camelCase'
import createDebug from 'debug'
import isEqual from 'lodash/isEqual'
import isPlainObject from 'lodash/isPlainObject'
import pickBy from 'lodash/pickBy'
import { utcFormat, utcParse } from 'd3-time-format'
import httpRequest from '../http-request'
import {
camelToSnakeCase,
createRawObject,
forEach,
isArray,
isBoolean,
isFunction,
isInteger,
isString,
map,
mapFilter,
mapToArray,
noop,
pFinally
} from '../utils'
// ===================================================================
export const asBoolean = value => Boolean(value)
// const asFloat = value => {
// value = String(value)
// return value.indexOf('.') === -1
// ? `${value}.0`
// : value
// }
export const asInteger = value => String(value)
export const filterUndefineds = obj => pickBy(obj, value => value !== undefined)
export const optional = (value, fn) => value == null
? undefined
: fn ? fn(value) : value
export const prepareXapiParam = param => {
// if (isFinite(param) && !isInteger(param)) {
// return asFloat(param)
// }
if (isInteger(param)) {
return asInteger(param)
}
if (isBoolean(param)) {
return asBoolean(param)
}
if (isArray(param)) {
return map(param, prepareXapiParam)
}
if (isPlainObject(param)) {
return map(filterUndefineds(param), prepareXapiParam)
}
return param
}
// -------------------------------------------------------------------
export const debug = createDebug('xo:xapi')
// -------------------------------------------------------------------
const OPAQUE_REF_RE = /OpaqueRef:[0-9a-z-]+/
export const extractOpaqueRef = str => {
const matches = OPAQUE_REF_RE.exec(str)
if (!matches) {
throw new Error('no opaque ref found')
}
return matches[0]
}
// -------------------------------------------------------------------
const TYPE_TO_NAMESPACE = createRawObject()
forEach([
'Bond',
'DR_task',
'GPU_group',
'PBD',
'PCI',
'PGPU',
'PIF',
'PIF_metrics',
'SM',
'SR',
'VBD',
'VBD_metrics',
'VDI',
'VGPU',
'VGPU_type',
'VIF',
'VLAN',
'VM',
'VM_appliance',
'VM_guest_metrics',
'VM_metrics',
'VMPP',
'VTPM'
], namespace => {
TYPE_TO_NAMESPACE[namespace.toLowerCase()] = namespace
})
// Object types given by `xen-api` are always lowercase but the
// namespaces in the Xen API can have a different casing.
export const getNamespaceForType = type => TYPE_TO_NAMESPACE[type] || type
// -------------------------------------------------------------------
// Format a date (pseudo ISO 8601) from one XenServer get by
// xapi.call('host.get_servertime', host.$ref) for example
export const formatDateTime = utcFormat('%Y%m%dT%H:%M:%SZ')
export const parseDateTime = utcParse('%Y%m%dT%H:%M:%SZ')
// -------------------------------------------------------------------
export const isHostRunning = host => {
const { $metrics } = host
return $metrics && $metrics.live
}
// -------------------------------------------------------------------
export const isVmHvm = vm => Boolean(vm.HVM_boot_policy)
const VM_RUNNING_POWER_STATES = {
Running: true,
Paused: true
}
export const isVmRunning = vm => VM_RUNNING_POWER_STATES[vm.power_state]
// -------------------------------------------------------------------
const _DEFAULT_ADD_TO_LIMITS = (next, current) => next - current
export const makeEditObject = specs => {
const normalizeGet = (get, name) => {
if (get === true) {
const prop = camelToSnakeCase(name)
return object => object[prop]
}
if (isString(get)) {
return object => object[get]
}
return get
}
const normalizeSet = (set, name) => {
if (isFunction(set)) {
return set
}
if (set === true) {
const prop = camelToSnakeCase(name)
return function (value) {
return this._set(prop, value)
}
}
if (isString(set)) {
const index = set.indexOf('.')
if (index === -1) {
const prop = camelToSnakeCase(set)
return function (value) {
return this._set(prop, value)
}
}
const map = set.slice(0, index)
const prop = set.slice(index + 1)
return function (value, object) {
return this._updateObjectMapProperty(object, map, { [prop]: value })
}
}
if (!isArray(set)) {
throw new Error('must be an array, a function or a string')
}
set = mapToArray(set, normalizeSet)
const { length } = set
if (!length) {
throw new Error('invalid setter')
}
if (length === 1) {
return set[0]
}
return function (value, object) {
return Promise.all(mapToArray(set, set => set.call(this, value, object)))
}
}
const normalizeSpec = (spec, name) => {
if (spec === true) {
spec = {
get: true,
set: true
}
}
if (spec.addToLimits === true) {
spec.addToLimits = _DEFAULT_ADD_TO_LIMITS
}
if (!spec.limitName) {
spec.limitName = name
}
forEach(spec.constraints, (constraint, constraintName) => {
if (!isFunction(constraint)) {
throw new Error('constraint must be a function')
}
const constraintSpec = specs[constraintName]
if (!constraintSpec.get) {
throw new Error('constraint values must have a get')
}
})
const { get } = spec
if (get) {
spec.get = normalizeGet(get, name)
} else if (spec.addToLimits) {
throw new Error('addToLimits cannot be defined without get')
}
spec.set = normalizeSet(spec.set, name)
return spec
}
forEach(specs, (spec, name) => {
isString(spec) || (specs[name] = normalizeSpec(spec, name))
})
// Resolves aliases and add camelCase and snake_case aliases.
forEach(specs, (spec, name) => {
if (isString(spec)) {
do {
spec = specs[spec]
} while (isString(spec))
specs[name] = spec
}
let tmp
specs[tmp = camelCase(name)] || (specs[tmp] = spec)
specs[tmp = camelToSnakeCase(name)] || (specs[tmp] = spec)
})
return async function _editObject_ (id, values, checkLimits) {
const limits = checkLimits && {}
const object = this.getObject(id)
const _objectRef = object.$ref
const _setMethodPrefix = `${getNamespaceForType(object.$type)}.set_`
// Context used to execute functions.
const context = {
__proto__: this,
_set: (prop, value) => this.call(_setMethodPrefix + prop, _objectRef, prepareXapiParam(value))
}
const set = (value, name) => {
if (value === undefined) {
return
}
const spec = specs[name]
if (!spec) {
return
}
const { preprocess } = spec
if (preprocess) {
value = preprocess(value)
}
const { get } = spec
if (get) {
const current = get(object)
if (isEqual(value, current)) {
return
}
let addToLimits
if (limits && (addToLimits = spec.addToLimits)) {
limits[spec.limitName] = addToLimits(value, current)
}
}
const cb = () => spec.set.call(context, value, object)
const { constraints } = spec
if (constraints) {
const cbs = []
forEach(constraints, (constraint, constraintName) => {
// Before setting a property to a new value, if the constraint check fails (e.g. memoryMin > memoryMax):
// - if the user wants to set the constraint (ie constraintNewValue is defined):
// constraint <-- constraintNewValue THEN property <-- value (e.g. memoryMax <-- 2048 THEN memoryMin <-- 1024)
// - if the user DOES NOT want to set the constraint (ie constraintNewValue is NOT defined):
// constraint <-- value THEN property <-- value (e.g. memoryMax <-- 1024 THEN memoryMin <-- 1024)
// FIXME: Some values combinations will lead to setting the same property twice, which is not perfect but works for now.
const constraintCurrentValue = specs[constraintName].get(object)
const constraintNewValue = values[constraintName]
if (!constraint(constraintCurrentValue, value)) {
const cb = set(constraintNewValue == null ? value : constraintNewValue, constraintName)
if (cb) {
cbs.push(cb)
}
}
})
if (cbs.length) {
return () => Promise.all(mapToArray(cbs, cb => cb())).then(cb)
}
}
return cb
}
const cbs = mapFilter(values, set)
if (checkLimits) {
await checkLimits(limits, object)
}
return Promise.all(mapToArray(cbs, cb => cb())).then(noop)
}
}
// ===================================================================
export const NULL_REF = 'OpaqueRef:NULL'
// ===================================================================
// HTTP put, use an ugly hack if the length is not known because XAPI
// does not support chunk encoding.
export const put = (stream, {
headers: { ...headers } = {},
...opts
}, task) => {
const makeRequest = () => httpRequest({
...opts,
body: stream,
headers,
method: 'put'
})
// Xen API does not support chunk encoding.
if (stream.length == null) {
headers['transfer-encoding'] = null
const promise = makeRequest()
if (task) {
// Some connections need the task to resolve (VDI import).
task::pFinally(() => {
promise.cancel()
})
} else {
// Some tasks need the connection to close (VM import).
promise.request.once('finish', () => {
promise.cancel()
})
}
return promise.readAll()
}
return makeRequest().readAll()
}

View File

View File

@@ -19,27 +19,47 @@ export default class {
constructor (xo) {
this._xo = xo
this._acls = new Acls({
const aclsDb = this._acls = new Acls({
connection: xo._redis,
prefix: 'xo:acl',
indexes: ['subject', 'object']
})
xo.on('start', () => {
xo.addConfigManager('acls',
() => aclsDb.get(),
acls => aclsDb.update(acls)
)
})
xo.on('clean', async () => {
const acls = await aclsDb.get()
const toRemove = []
forEach(acls, ({ subject, object, action, id }) => {
if (!subject || !object || !action) {
toRemove.push(id)
}
})
await aclsDb.remove(toRemove)
})
}
async _getAclsForUser (userId) {
const subjects = (await this._xo.getUser(userId)).groups.concat(userId)
const user = await this._xo.getUser(userId)
const { groups } = user
const subjects = groups
? groups.concat(userId)
: [ userId ]
const acls = []
const pushAcls = (function (push) {
return function (entries) {
push.apply(acls, entries)
}
const pushAcls = (push => entries => {
push.apply(acls, entries)
})(acls.push)
const {_acls: collection} = this
await Promise.all(mapToArray(
subjects,
subject => collection.get({subject}).then(pushAcls)
subject => this.getAclsForSubject(subject).then(pushAcls)
))
return acls
@@ -64,6 +84,10 @@ export default class {
return this._acls.get()
}
async getAclsForSubject (subjectId) {
return this._acls.get({ subject: subjectId })
}
async getPermissionsForUser (userId) {
const [
acls,

View File

@@ -1,26 +1,23 @@
import createDebug from 'debug'
const debug = createDebug('xo:api')
import getKeys from 'lodash.keys'
import kindOf from 'kindof'
import ms from 'ms'
import schemaInspector from 'schema-inspector'
import * as methods from '../api'
import {
InvalidParameters,
MethodNotFound,
NoSuchObject,
Unauthorized
} from './api-errors'
import {
version as xoServerVersion
} from '../package.json'
MethodNotFound
} from 'json-rpc-peer'
import {
createRawObject,
forEach,
isFunction,
noop
} from './utils'
noop,
serializeError
} from '../utils'
import * as errors from 'xo-common/api-errors'
// ===================================================================
@@ -31,39 +28,30 @@ const PERMISSIONS = {
admin: 3
}
// TODO:
// - error when adding a server to a pool with incompatible version
// - error when halted VM migration failure is due to XS < 7
const XAPI_ERROR_TO_XO_ERROR = {
EHOSTUNREACH: errors.serverUnreachable,
HOST_OFFLINE: ([ host ], getId) => errors.hostOffline({ host: getId(host) }),
NO_HOSTS_AVAILABLE: errors.noHostsAvailable,
NOT_SUPPORTED_DURING_UPGRADE: errors.notSupportedDuringUpgrade,
OPERATION_BLOCKED: ([ ref, code ], getId) => errors.operationBlocked({ objectId: getId(ref), code }),
PATCH_PRECHECK_FAILED_ISO_MOUNTED: ([ patch ]) => errors.patchPrecheck({ errorType: 'isoMounted', patch }),
PIF_VLAN_EXISTS: ([ pif ], getId) => errors.objectAlreadyExists({ objectId: getId(pif), objectType: 'PIF' }),
SESSION_AUTHENTICATION_FAILED: errors.authenticationFailed,
VDI_IN_USE: ([ vdi, operation ], getId) => errors.vdiInUse({ vdi: getId(vdi), operation }),
VM_BAD_POWER_STATE: ([ vm, expected, actual ], getId) => errors.vmBadPowerState({ vm: getId(vm), expected, actual }),
VM_IS_TEMPLATE: errors.vmIsTemplate,
VM_LACKS_FEATURE: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm) }),
VM_LACKS_FEATURE_SHUTDOWN: ([ vm ], getId) => errors.vmLacksFeature({ vm: getId(vm), feature: 'shutdown' }),
VM_MISSING_PV_DRIVERS: ([ vm ], getId) => errors.vmMissingPvDrivers({ vm: getId(vm) })
}
const hasPermission = (user, permission) => (
PERMISSIONS[user.permission] >= PERMISSIONS[permission]
)
// FIXME: this function is specific to XO and should not be defined in
// this file.
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw new Unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw new Unauthorized()
}
}
// -------------------------------------------------------------------
function checkParams (method, params) {
const schema = method.params
if (!schema) {
@@ -76,11 +64,34 @@ function checkParams (method, params) {
}, params)
if (!result.valid) {
throw new InvalidParameters(result.error)
throw errors.invalidParameters(result.error)
}
}
// -------------------------------------------------------------------
function checkPermission (method) {
/* jshint validthis: true */
const {permission} = method
// No requirement.
if (permission === undefined) {
return
}
const {user} = this
if (!user) {
throw errors.unauthorized()
}
// The only requirement is login.
if (!permission) {
return
}
if (!hasPermission(user, permission)) {
throw errors.unauthorized()
}
}
function resolveParams (method, params) {
const resolve = method.resolve
@@ -90,7 +101,7 @@ function resolveParams (method, params) {
const {user} = this
if (!user) {
throw new Unauthorized()
throw errors.unauthorized()
}
const userId = user.id
@@ -126,89 +137,29 @@ function resolveParams (method, params) {
return params
}
throw new Unauthorized()
throw errors.unauthorized()
})
}
// ===================================================================
function getMethodsInfo () {
const methods = {}
forEach(this.api._methods, (method, name) => {
methods[name] = {
description: method.description,
params: method.params || {},
permission: method.permission
}
})
return methods
}
getMethodsInfo.description = 'returns the signatures of all available API methods'
// -------------------------------------------------------------------
const getServerVersion = () => xoServerVersion
getServerVersion.description = 'return the version of xo-server'
// -------------------------------------------------------------------
const getVersion = () => '0.1'
getVersion.description = 'API version (unstable)'
// -------------------------------------------------------------------
function listMethods () {
return getKeys(this.api._methods)
}
listMethods.description = 'returns the name of all available API methods'
// -------------------------------------------------------------------
function methodSignature ({method: name}) {
const method = this.api.getMethod(name)
if (!method) {
throw new NoSuchObject()
}
// Return an array for compatibility with XML-RPC.
return [
// XML-RPC require the name of the method.
{
name,
description: method.description,
params: method.params || {},
permission: method.permission
}
]
}
methodSignature.description = 'returns the signature of an API method'
// ===================================================================
export default class Api {
constructor ({
context,
verboseLogsOnErrors
} = {}) {
constructor (xo) {
this._logger = null
this._methods = createRawObject()
this._verboseLogsOnErrors = verboseLogsOnErrors
this.context = context
this._xo = xo
this.addMethods({
system: {
getMethodsInfo,
getServerVersion,
getVersion,
listMethods,
methodSignature
}
this.addApiMethods(methods)
xo.on('start', async () => {
this._logger = await xo.getLogger('api')
})
}
addMethod (name, method) {
get apiMethods () {
return this._methods
}
addApiMethod (name, method) {
const methods = this._methods
if (name in methods) {
@@ -217,21 +168,22 @@ export default class Api {
methods[name] = method
let unset = () => {
let remove = () => {
delete methods[name]
unset = noop
remove = noop
}
return () => unset()
return () => remove()
}
addMethods (methods) {
addApiMethods (methods) {
let base = ''
const removes = []
const addMethod = (method, name) => {
name = base + name
if (isFunction(method)) {
this.addMethod(name, method)
removes.push(this.addApiMethod(name, method))
return
}
@@ -240,20 +192,35 @@ export default class Api {
forEach(method, addMethod)
base = oldBase
}
forEach(methods, addMethod)
try {
forEach(methods, addMethod)
} catch (error) {
// Remove all added methods.
forEach(removes, remove => remove())
// Forward the error
throw error
}
let remove = () => {
forEach(removes, remove => remove())
remove = noop
}
return remove
}
async call (session, name, params) {
async callApiMethod (session, name, params) {
const startTime = Date.now()
const method = this.getMethod(name)
const method = this._methods[name]
if (!method) {
throw new MethodNotFound(name)
}
// FIXME: it can cause issues if there any property assignments in
// XO methods called from the API.
const context = Object.create(this.context, {
const context = Object.create(this._xo, {
api: { // Used by system.*().
value: this
},
@@ -262,10 +229,9 @@ export default class Api {
}
})
// FIXME: too coupled with XO.
// Fetch and inject the current user.
const userId = session.get('user_id', undefined)
context.user = userId && await context.getUser(userId)
context.user = userId && await this._xo.getUser(userId)
const userName = context.user
? context.user.email
: '(unknown user)'
@@ -286,7 +252,7 @@ export default class Api {
params.id = params[namespace]
}
checkParams(method, params)
checkParams.call(context, method, params)
const resolvedParams = await resolveParams.call(context, method, params)
@@ -308,15 +274,19 @@ export default class Api {
return result
} catch (error) {
if (this._verboseLogsOnErrors) {
debug(
'%s | %s(%j) [%s] =!> %s',
userName,
name,
params,
ms(Date.now() - startTime),
error
)
const data = {
userId,
method: name,
params,
duration: Date.now() - startTime,
error: serializeError(error)
}
const message = `${userName} | ${name}(${JSON.stringify(params)}) [${ms(Date.now() - startTime)}] =!> ${error}`
this._logger.error(message, data)
if (this._xo._config.verboseLogsOnErrors) {
debug(message)
const stack = error && error.stack
if (stack) {
@@ -332,11 +302,18 @@ export default class Api {
)
}
const xoError = XAPI_ERROR_TO_XO_ERROR[error.code]
if (xoError) {
throw xoError(error.params, ref => {
try {
return this._xo.getObject(ref).id
} catch (e) {
return ref
}
})
}
throw error
}
}
getMethod (name) {
return this._methods[name]
}
}

View File

@@ -1,9 +1,8 @@
import Token, { Tokens } from '../models/token'
import {
NoSuchObject
} from '../api-errors'
import { noSuchObject } from 'xo-common/api-errors'
import {
createRawObject,
forEach,
generateToken,
pCatch,
noop
@@ -11,13 +10,8 @@ import {
// ===================================================================
class NoSuchAuthenticationToken extends NoSuchObject {
constructor (id) {
super(id, 'authentication token')
}
}
// ===================================================================
const noSuchAuthenticationToken = id =>
noSuchObject(id, 'authenticationToken')
export default class {
constructor (xo) {
@@ -30,7 +24,7 @@ export default class {
this._providers = new Set()
// Creates persistent collections.
this._tokens = new Tokens({
const tokensDb = this._tokens = new Tokens({
connection: xo._redis,
prefix: 'xo:token',
indexes: ['user_id']
@@ -65,6 +59,25 @@ export default class {
return
}
})
xo.on('clean', async () => {
const tokens = await tokensDb.get()
const toRemove = []
const now = Date.now()
forEach(tokens, ({ expiration, id }) => {
if (!expiration || expiration < now) {
toRemove.push(id)
}
})
await tokensDb.remove(toRemove)
})
xo.on('start', () => {
xo.addConfigManager('authTokens',
() => tokensDb.get(),
tokens => tokensDb.update(tokens)
)
})
}
registerAuthenticationProvider (provider) {
@@ -152,14 +165,14 @@ export default class {
async deleteAuthenticationToken (id) {
if (!await this._tokens.remove(id)) {
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
}
async getAuthenticationToken (id) {
let token = await this._tokens.first(id)
if (!token) {
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
token = token.properties
@@ -169,7 +182,7 @@ export default class {
)) {
this._tokens.remove(id)::pCatch(noop)
throw new NoSuchAuthenticationToken(id)
throw noSuchAuthenticationToken(id)
}
return token

View File

@@ -1,29 +1,43 @@
import endsWith from 'lodash.endswith'
import deferrable from 'golike-defer'
import escapeStringRegexp from 'escape-string-regexp'
import eventToPromise from 'event-to-promise'
import filter from 'lodash.filter'
import find from 'lodash.find'
import findIndex from 'lodash.findindex'
import sortBy from 'lodash.sortby'
import startsWith from 'lodash.startswith'
import execa from 'execa'
import splitLines from 'split-lines'
import { createParser as createPairsParser } from 'parse-pairs'
import { createReadStream, readdir, stat } from 'fs'
import { satisfies as versionSatisfies } from 'semver'
import { utcFormat } from 'd3-time-format'
import {
basename,
dirname
} from 'path'
import { satisfies as versionSatisfies } from 'semver'
import {
endsWith,
filter,
find,
findIndex,
includes,
once,
sortBy,
startsWith,
trim
} from 'lodash'
import vhdMerge from '../vhd-merge'
import vhdMerge, { chainVhd } from '../vhd-merge'
import xapiObjectToXo from '../xapi-object-to-xo'
import {
deferrable
} from '../decorators'
import {
forEach,
mapFilter,
mapToArray,
noop,
pCatch,
pFinally,
pFromCallback,
pSettle,
safeDateFormat
resolveSubpath,
safeDateFormat,
safeDateParse,
tmpDir
} from '../utils'
import {
VDI_FORMAT_VHD
@@ -34,6 +48,8 @@ import {
const DELTA_BACKUP_EXT = '.json'
const DELTA_BACKUP_EXT_LENGTH = DELTA_BACKUP_EXT.length
const shortDate = utcFormat('%Y-%m-%d')
// Test if a file is a vdi backup. (full or delta)
const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
@@ -41,6 +57,41 @@ const isVdiBackup = name => /^\d+T\d+Z_(?:full|delta)\.vhd$/.test(name)
const isDeltaVdiBackup = name => /^\d+T\d+Z_delta\.vhd$/.test(name)
const isFullVdiBackup = name => /^\d+T\d+Z_full\.vhd$/.test(name)
const toTimestamp = date => date && Math.round(date.getTime() / 1000)
const parseVmBackupPath = name => {
const base = basename(name)
let baseMatches
baseMatches = /^([^_]+)_([^_]+)_(.+)\.xva$/.exec(base)
if (baseMatches) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[3],
tag: baseMatches[2],
type: 'xva'
}
}
let dirMatches
if (
(baseMatches = /^([^_]+)_(.+)\.json$/.exec(base)) &&
(dirMatches = /^vm_delta_([^_]+)_(.+)$/.exec(basename(dirname(name))))
) {
return {
datetime: toTimestamp(safeDateParse(baseMatches[1])),
id: name,
name: baseMatches[2],
tag: dirMatches[1],
type: 'delta',
uuid: dirMatches[2]
}
}
throw new Error('invalid VM backup filename')
}
// Get the timestamp of a vdi backup. (full or delta)
const getVdiTimestamp = name => {
const arr = /^(\d+T\d+Z)_(?:full|delta)\.vhd$/.exec(name)
@@ -50,21 +101,216 @@ const getVdiTimestamp = name => {
const getDeltaBackupNameWithoutExt = name => name.slice(0, -DELTA_BACKUP_EXT_LENGTH)
const isDeltaBackup = name => endsWith(name, DELTA_BACKUP_EXT)
// Checksums have been corrupted between 5.2.6 and 5.2.7.
//
// For a short period of time, bad checksums will be regenerated
// instead of rejected.
//
// TODO: restore when enough time has passed (a week/a month).
async function checkFileIntegrity (handler, name) {
let stream
await handler.refreshChecksum(name)
// let stream
//
// try {
// stream = await handler.createReadStream(name, { checksum: true })
// } catch (error) {
// if (error.code === 'ENOENT') {
// return
// }
//
// throw error
// }
//
// stream.resume()
// await eventToPromise(stream, 'finish')
}
try {
stream = await handler.createReadStream(name, { checksum: true })
} catch (error) {
if (error.code === 'ENOENT') {
return
}
// -------------------------------------------------------------------
throw error
const listPartitions = (() => {
const IGNORED = {}
forEach([
// https://github.com/jhermsmeier/node-mbr/blob/master/lib/partition.js#L38
0x05, 0x0F, 0x85, 0x15, 0x91, 0x9B, 0x5E, 0x5F, 0xCF, 0xD5, 0xC5,
0x82 // swap
], type => {
IGNORED[type] = true
})
const TYPES = {
0x7: 'NTFS',
0x83: 'linux',
0xc: 'FAT'
}
stream.resume()
await eventToPromise(stream, 'finish')
const parseLine = createPairsParser({
keyTransform: key => key === 'UUID'
? 'id'
: key.toLowerCase(),
valueTransform: (value, key) => key === 'start' || key === 'size'
? +value
: key === 'type'
? TYPES[+value] || value
: value
})
return device => execa.stdout('partx', [
'--bytes',
'--output=NR,START,SIZE,NAME,UUID,TYPE',
'--pairs',
device.path
]).then(stdout => mapFilter(splitLines(stdout), line => {
const partition = parseLine(line)
const { type } = partition
if (type != null && !IGNORED[+type]) {
return partition
}
}))
})()
// handle LVM logical volumes automatically
const listPartitions2 = device => listPartitions(device).then(partitions => {
const partitions2 = []
const promises = []
forEach(partitions, partition => {
if (+partition.type === 0x8e) {
promises.push(mountLvmPv(device, partition).then(device => {
const promise = listLvmLvs(device).then(lvs => {
forEach(lvs, lv => {
partitions2.push({
name: lv.lv_name,
size: +lv.lv_size,
id: `${partition.id}/${lv.vg_name}/${lv.lv_name}`
})
})
})
promise::pFinally(device.unmount)
return promise
}))
} else {
partitions2.push(partition)
}
})
return Promise.all(promises).then(() => partitions2)
})
const mountPartition = (device, partitionId) => Promise.all([
partitionId != null && listPartitions(device),
tmpDir()
]).then(([ partitions, path ]) => {
const options = [
'loop',
'ro'
]
if (partitions) {
const partition = find(partitions, { id: partitionId })
const { start } = partition
if (start != null) {
options.push(`offset=${start * 512}`)
}
}
const mount = options => execa('mount', [
`--options=${options.join(',')}`,
`--source=${device.path}`,
`--target=${path}`
])
// `noload` option is used for ext3/ext4, if it fails it might
// `be another fs, try without
return mount([ ...options, 'noload' ]).catch(() =>
mount(options)
).then(() => ({
path,
unmount: once(() => execa('umount', [ '--lazy', path ]))
}), error => {
console.log(error)
throw error
})
})
// handle LVM logical volumes automatically
const mountPartition2 = (device, partitionId) => {
if (
partitionId == null ||
!includes(partitionId, '/')
) {
return mountPartition(device, partitionId)
}
const [ pvId, vgName, lvName ] = partitionId.split('/')
return listPartitions(device).then(partitions =>
find(partitions, { id: pvId })
).then(pvId => mountLvmPv(device, pvId)).then(device1 =>
execa('vgchange', [ '-ay', vgName ]).then(() =>
execa.stdout('lvs', [
'--reportformat',
'json',
'-o',
'lv_name,lv_path',
`${vgName}`
]).then(stdout =>
find(JSON.parse(stdout).report[0].lv, { lv_name: lvName }).lv_path
)
).then(path =>
mountPartition({ path }).then(device2 => ({
...device2,
unmount: () => device2.unmount().then(device1.unmount)
}))
).catch(error => device1.unmount().then(() => {
throw error
}))
)
}
// -------------------------------------------------------------------
const listLvmLvs = device => execa.stdout('pvs', [
'--reportformat',
'json',
'--nosuffix',
'--units',
'b',
'-o',
'lv_name,lv_path,lv_size,vg_name',
device.path
]).then(stdout => filter(JSON.parse(stdout).report[0].pv, pv => pv.lv_name))
const mountLvmPv = (device, partition) => {
const args = []
if (partition) {
args.push('-o', partition.start * 512)
}
args.push(
'--show',
'-f',
device.path
)
return execa.stdout('losetup', args).then(stdout => {
const path = trim(stdout)
return {
path,
unmount: once(() => Promise.all([
execa('losetup', [ '-d', path ]),
execa.stdout('pvs', [
'--reportformat',
'json',
'-o',
'vg_name',
path
]).then(stdout => execa('vgchange', [
'-an',
...mapToArray(JSON.parse(stdout).report[0].pv, 'vg_name')
]))
]))
}
})
}
// ===================================================================
@@ -72,6 +318,15 @@ async function checkFileIntegrity (handler, name) {
export default class {
constructor (xo) {
this._xo = xo
// clean any LVM volumes that might have not been properly
// unmounted
xo.on('start', () => Promise.all([
execa('losetup', [ '-D' ]),
execa('vgchange', [ '-an' ])
]).then(() =>
execa('pvscan', [ '--cache' ])
))
}
async listRemoteBackups (remoteId) {
@@ -101,12 +356,53 @@ export default class {
return backups
}
async listVmBackups (remoteId) {
const handler = await this._xo.getRemoteHandler(remoteId)
const backups = []
await Promise.all(mapToArray(await handler.list(), entry => {
if (endsWith(entry, '.xva')) {
backups.push(parseVmBackupPath(entry))
} else if (startsWith(entry, 'vm_delta_')) {
return handler.list(entry).then(children => Promise.all(mapToArray(children, child => {
if (endsWith(child, '.json')) {
const path = `${entry}/${child}`
const record = parseVmBackupPath(path)
backups.push(record)
return handler.readFile(path).then(data => {
record.disks = mapToArray(JSON.parse(data).vdis, vdi => ({
id: `${entry}/${vdi.xoPath}`,
name: vdi.name_label,
uuid: vdi.uuid
}))
}).catch(noop)
}
})))
}
}))
return backups
}
async importVmBackup (remoteId, file, sr) {
const handler = await this._xo.getRemoteHandler(remoteId)
const stream = await handler.createReadStream(file)
const xapi = this._xo.getXapi(sr)
await xapi.importVm(stream, { srId: sr._xapiId })
const vm = await xapi.importVm(stream, { srId: sr._xapiId })
const { datetime } = parseVmBackupPath(file)
await Promise.all([
xapi.addTag(vm.$id, 'restored from backup'),
xapi.editVm(vm.$id, {
name_label: `${vm.name_label} (${shortDate(datetime)})`
})
])
return xapiObjectToXo(vm).id
}
// -----------------------------------------------------------------
@@ -290,6 +586,18 @@ export default class {
return backups.slice(i)
}
// fix the parent UUID and filename in delta files after download from xapi or backup compression
async _chainDeltaVdiBackups ({handler, dir}) {
const backups = await this._listVdiBackups(handler, dir)
for (let i = 1; i < backups.length; i++) {
const childPath = dir + '/' + backups[i]
const modified = await chainVhd(handler, dir + '/' + backups[i - 1], handler, childPath)
if (modified) {
await handler.refreshChecksum(childPath)
}
}
}
async _mergeDeltaVdiBackups ({handler, dir, depth}) {
const backups = await this._listVdiBackups(handler, dir)
let i = backups.length - depth
@@ -431,16 +739,7 @@ export default class {
@deferrable.onFailure
async rollingDeltaVmBackup ($onFailure, {vm, remoteId, tag, depth}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(vm)
vm = xapi.getObject(vm._xapiId)
@@ -486,7 +785,7 @@ export default class {
stream => stream.cancel()
))
await xapi.deleteVm(delta.vm.$id, true)
await xapi.deleteVm(delta.vm.uuid, true)
})
// Save vdis.
@@ -514,15 +813,15 @@ export default class {
)
const fulFilledVdiBackups = []
let success = true
let error
// One or many vdi backups have failed.
for (const vdiBackup of vdiBackups) {
if (vdiBackup.isFulfilled()) {
fulFilledVdiBackups.push(vdiBackup)
} else {
console.error(`Rejected backup: ${vdiBackup.reason()}`)
success = false
error = vdiBackup.reason()
console.error('Rejected backup:', error)
}
}
@@ -534,8 +833,8 @@ export default class {
)
})
if (!success) {
throw new Error('Rolling delta vm backup failed.')
if (error) {
throw error
}
const date = safeDateFormat(new Date())
@@ -552,7 +851,9 @@ export default class {
mapToArray(vdiBackups, vdiBackup => {
const backupName = vdiBackup.value()
const backupDirectory = backupName.slice(0, backupName.lastIndexOf('/'))
return this._mergeDeltaVdiBackups({ handler, dir: `${dir}/${backupDirectory}`, depth })
const backupDir = `${dir}/${backupDirectory}`
return this._mergeDeltaVdiBackups({ handler, dir: backupDir, depth })
.then(() => { this._chainDeltaVdiBackups({ handler, dir: backupDir }) })
})
)
@@ -568,10 +869,13 @@ export default class {
}
async importDeltaVmBackup ({sr, remoteId, filePath}) {
filePath = `${filePath}${DELTA_BACKUP_EXT}`
const { datetime } = parseVmBackupPath(filePath)
const handler = await this._xo.getRemoteHandler(remoteId)
const xapi = this._xo.getXapi(sr)
const delta = JSON.parse(await handler.readFile(`${filePath}${DELTA_BACKUP_EXT}`))
const delta = JSON.parse(await handler.readFile(filePath))
let vm
const { version } = delta
@@ -598,9 +902,12 @@ export default class {
)
)
delta.vm.name_label += ` (${shortDate(datetime)})`
delta.vm.tags.push('restored from backup')
vm = await xapi.importDeltaVm(delta, {
srId: sr._xapiId,
disableStartAfterImport: false
disableStartAfterImport: false,
srId: sr._xapiId
})
} else {
throw new Error(`Unsupported delta backup version: ${version}`)
@@ -612,16 +919,7 @@ export default class {
// -----------------------------------------------------------------
async backupVm ({vm, remoteId, file, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
return this._backupVm(vm, handler, file, {compress, onlyMetadata})
}
@@ -639,16 +937,7 @@ export default class {
}
async rollingBackupVm ({vm, remoteId, tag, depth, compress, onlyMetadata}) {
const remote = await this._xo.getRemote(remoteId)
if (!remote) {
throw new Error(`No such Remote ${remoteId}`)
}
if (!remote.enabled) {
throw new Error(`Backup remote ${remoteId} is disabled`)
}
const handler = await this._xo.getRemoteHandler(remote)
const handler = await this._xo.getRemoteHandler(remoteId)
const files = await handler.list()
@@ -689,12 +978,12 @@ export default class {
const sourceXapi = this._xo.getXapi(vm)
vm = sourceXapi.getObject(vm._xapiId)
const vms = []
const vms = {}
forEach(sr.$VDIs, vdi => {
const vbds = vdi.$VBDs
const vm = vbds && vbds[0] && vbds[0].$VM
if (vm && reg.test(vm.name_label)) {
vms.push(vm)
vms[vm.$id] = vm
}
})
const olderCopies = sortBy(vms, 'name_label')
@@ -705,11 +994,121 @@ export default class {
})
await targetXapi.addTag(drCopy.$id, 'Disaster Recovery')
const promises = []
for (let surplus = olderCopies.length - (depth - 1); surplus > 0; surplus--) {
const oldDRVm = olderCopies.shift()
promises.push(targetXapi.deleteVm(oldDRVm.$id, true))
const n = 1 - depth
await Promise.all(mapToArray(n ? olderCopies.slice(0, n) : olderCopies, vm =>
// Do not consider a failure to delete an old copy as a fatal error.
targetXapi.deleteVm(vm.$id, true)::pCatch(noop)
))
}
// -----------------------------------------------------------------
_mountVhd (remoteId, vhdPath) {
return Promise.all([
this._xo.getRemoteHandler(remoteId),
tmpDir()
]).then(([ handler, mountDir ]) => {
if (!handler._getRealPath) {
throw new Error(`this remote is not supported`)
}
const remotePath = handler._getRealPath()
vhdPath = resolveSubpath(remotePath, vhdPath)
return Promise.resolve().then(() => {
// TODO: remove when no longer necessary.
//
// Currently, the filenames of the VHD changes over time
// (delta → full), but the JSON is not updated, therefore the
// VHD path may need to be fixed.
return endsWith(vhdPath, '_delta.vhd')
? pFromCallback(cb => stat(vhdPath, cb)).then(
() => vhdPath,
error => {
if (error && error.code === 'ENOENT') {
return `${vhdPath.slice(0, -10)}_full.vhd`
}
}
)
: vhdPath
}).then(vhdPath => execa('vhdimount', [ vhdPath, mountDir ])).then(() =>
pFromCallback(cb => readdir(mountDir, cb)).then(entries => {
let max = 0
forEach(entries, entry => {
const matches = /^vhdi(\d+)/.exec(entry)
if (matches) {
const value = +matches[1]
if (value > max) {
max = value
}
}
})
if (!max) {
throw new Error('no disks found')
}
return {
path: `${mountDir}/vhdi${max}`,
unmount: once(() => execa('fusermount', [ '-uz', mountDir ]))
}
})
)
})
}
_mountPartition (remoteId, vhdPath, partitionId) {
return this._mountVhd(remoteId, vhdPath).then(device =>
mountPartition2(device, partitionId).then(partition => ({
...partition,
unmount: () => partition.unmount().then(device.unmount)
})).catch(error => device.unmount().then(() => {
throw error
}))
)
}
@deferrable
async scanDiskBackup ($defer, remoteId, vhdPath) {
const device = await this._mountVhd(remoteId, vhdPath)
$defer(device.unmount)
return {
partitions: await listPartitions2(device)
}
await Promise.all(promises)
}
@deferrable
async scanFilesInDiskBackup ($defer, remoteId, vhdPath, partitionId, path) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
$defer(partition.unmount)
path = resolveSubpath(partition.path, path)
const entries = await pFromCallback(cb => readdir(path, cb))
const entriesMap = {}
await Promise.all(mapToArray(entries, async name => {
const stats = await pFromCallback(cb => stat(`${path}/${name}`, cb))::pCatch(noop)
if (stats) {
entriesMap[stats.isDirectory() ? `${name}/` : name] = {}
}
}))
return entriesMap
}
async fetchFilesInDiskBackup (remoteId, vhdPath, partitionId, paths) {
const partition = await this._mountPartition(remoteId, vhdPath, partitionId)
let i = 0
const onEnd = () => {
if (!--i) {
partition.unmount()
}
}
return mapToArray(paths, path => {
++i
return createReadStream(resolveSubpath(partition.path, path)).once('end', onEnd)
})
}
}

View File

@@ -0,0 +1,33 @@
import { map, noop } from '../utils'
import { all as pAll } from 'promise-toolbox'
export default class ConfigManagement {
constructor () {
this._managers = { __proto__: null }
}
addConfigManager (id, exporter, importer) {
const managers = this._managers
if (id in managers) {
throw new Error(`${id} is already taken`)
}
this._managers[id] = { exporter, importer }
}
exportConfig () {
return map(this._managers, ({ exporter }, key) => exporter())::pAll()
}
importConfig (config) {
const managers = this._managers
return map(config, (entry, key) => {
const manager = managers[key]
if (manager) {
return manager.importer(entry)
}
})::pAll().then(noop)
}
}

307
src/xo-mixins/ip-pools.js Normal file
View File

@@ -0,0 +1,307 @@
import concat from 'lodash/concat'
import countBy from 'lodash/countBy'
import diff from 'lodash/difference'
import findIndex from 'lodash/findIndex'
import flatten from 'lodash/flatten'
import highland from 'highland'
import includes from 'lodash/includes'
import isObject from 'lodash/isObject'
import keys from 'lodash/keys'
import mapValues from 'lodash/mapValues'
import pick from 'lodash/pick'
import remove from 'lodash/remove'
import synchronized from 'decorator-synchronized'
import { noSuchObject } from 'xo-common/api-errors'
import { fromCallback } from 'promise-toolbox'
import {
forEach,
generateUnsecureToken,
isEmpty,
lightSet,
mapToArray,
streamToArray,
throwFn
} from '../utils'
// ===================================================================
const normalize = ({
addresses,
id = throwFn('id is a required field'),
name = '',
networks,
resourceSets
}) => ({
addresses,
id,
name,
networks,
resourceSets
})
const _isAddressInIpPool = (address, network, ipPool) => (
ipPool.addresses && (address in ipPool.addresses) &&
includes(ipPool.networks, isObject(network) ? network.id : network)
)
// ===================================================================
// Note: an address cannot be in two different pools sharing a
// network.
export default class IpPools {
constructor (xo) {
this._store = null
this._xo = xo
xo.on('start', async () => {
this._store = await xo.getStore('ipPools')
xo.addConfigManager('ipPools',
() => this.getAllIpPools(),
ipPools => Promise.all(mapToArray(ipPools, ipPool => this._save(ipPool)))
)
})
}
async createIpPool ({ addresses, name, networks }) {
const id = await this._generateId()
await this._save({
addresses,
id,
name,
networks
})
return id
}
async deleteIpPool (id) {
const store = this._store
if (await store.has(id)) {
await Promise.all(mapToArray(await this._xo.getAllResourceSets(), async set => {
await this._xo.removeLimitFromResourceSet(`ipPool:${id}`, set.id)
return this._xo.removeIpPoolFromResourceSet(id, set.id)
}))
await this._removeIpAddressesFromVifs(
mapValues((await this.getIpPool(id)).addresses, 'vifs')
)
return store.del(id)
}
throw noSuchObject(id, 'ipPool')
}
_getAllIpPools (filter) {
return streamToArray(this._store.createValueStream(), {
filter,
mapper: normalize
})
}
async getAllIpPools (userId) {
let filter
if (userId != null) {
const user = await this._xo.getUser(userId)
if (user.permission !== 'admin') {
const resourceSets = await this._xo.getAllResourceSets(userId)
const ipPools = lightSet(flatten(mapToArray(resourceSets, 'ipPools')))
filter = ({ id }) => ipPools.has(id)
}
}
return this._getAllIpPools(filter)
}
getIpPool (id) {
return this._store.get(id).then(normalize, error => {
throw error.notFound ? noSuchObject(id, 'ipPool') : error
})
}
async _getAddressIpPool (address, network) {
const ipPools = await this._getAllIpPools(ipPool => _isAddressInIpPool(address, network, ipPool))
return ipPools && ipPools[0]
}
// Returns a map that indicates how many IPs from each IP pool the VM uses
// e.g.: { 'ipPool:abc': 3, 'ipPool:xyz': 7 }
async computeVmIpPoolsUsage (vm) {
const vifs = vm.VIFs
const ipPools = []
for (const vifId of vifs) {
const { allowedIpv4Addresses, allowedIpv6Addresses, $network } = this._xo.getObject(vifId)
for (const address of concat(allowedIpv4Addresses, allowedIpv6Addresses)) {
const ipPool = await this._getAddressIpPool(address, $network)
ipPool && ipPools.push(ipPool.id)
}
}
return countBy(ipPools, ({ id }) => `ipPool:${id}`)
}
@synchronized
allocIpAddresses (vifId, addAddresses, removeAddresses) {
const updatedIpPools = {}
const limits = {}
const xoVif = this._xo.getObject(vifId)
const xapi = this._xo.getXapi(xoVif)
const vif = xapi.getObject(xoVif._xapiId)
const allocAndSave = (() => {
const resourseSetId = xapi.xo.getData(vif.VM, 'resourceSet')
return () => {
const saveIpPools = () => Promise.all(mapToArray(updatedIpPools, ipPool => this._save(ipPool)))
return resourseSetId
? this._xo.allocateLimitsInResourceSet(limits, resourseSetId).then(
saveIpPools
)
: saveIpPools()
}
})()
return fromCallback(cb => {
const network = vif.$network
const networkId = network.$id
const isVif = id => id === vifId
highland(this._store.createValueStream()).each(ipPool => {
const { addresses, networks } = updatedIpPools[ipPool.id] || ipPool
if (!(addresses && networks && includes(networks, networkId))) {
return false
}
let allocations = 0
let changed = false
forEach(removeAddresses, address => {
let vifs, i
if (
(vifs = addresses[address]) &&
(vifs = vifs.vifs) &&
(i = findIndex(vifs, isVif)) !== -1
) {
vifs.splice(i, 1)
--allocations
changed = true
}
})
forEach(addAddresses, address => {
const data = addresses[address]
if (!data) {
return
}
const vifs = data.vifs || (data.vifs = [])
if (!includes(vifs, vifId)) {
vifs.push(vifId)
++allocations
changed = true
}
})
if (changed) {
const { id } = ipPool
updatedIpPools[id] = ipPool
limits[`ipPool:${id}`] = (limits[`ipPool:${id}`] || 0) + allocations
}
}).toCallback(cb)
}).then(allocAndSave)
}
async _removeIpAddressesFromVifs (mapAddressVifs) {
const mapVifAddresses = {}
forEach(mapAddressVifs, (vifs, address) => {
forEach(vifs, vifId => {
if (mapVifAddresses[vifId]) {
mapVifAddresses[vifId].push(address)
} else {
mapVifAddresses[vifId] = [ address ]
}
})
})
const { getXapi } = this._xo
return Promise.all(mapToArray(mapVifAddresses, (addresses, vifId) => {
let vif
try {
// The IP may not have been correctly deallocated from the IP pool when the VIF was deleted
vif = this._xo.getObject(vifId)
} catch (error) {
return
}
const { allowedIpv4Addresses, allowedIpv6Addresses } = vif
remove(allowedIpv4Addresses, address => includes(addresses, address))
remove(allowedIpv6Addresses, address => includes(addresses, address))
this.allocIpAddresses(vifId, undefined, concat(allowedIpv4Addresses, allowedIpv6Addresses))
return getXapi(vif).editVif(vif._xapiId, {
ipv4Allowed: allowedIpv4Addresses,
ipv6Allowed: allowedIpv6Addresses
})
}))
}
async updateIpPool (id, {
addresses,
name,
networks,
resourceSets
}) {
const ipPool = await this.getIpPool(id)
const previousAddresses = { ...ipPool.addresses }
name != null && (ipPool.name = name)
if (addresses) {
const addresses_ = ipPool.addresses || {}
forEach(addresses, (props, address) => {
if (props === null) {
delete addresses_[address]
} else {
addresses_[address] = props
}
})
// Remove the addresses that are no longer in the IP pool from the concerned VIFs
const deletedAddresses = diff(keys(previousAddresses), keys(addresses_))
await this._removeIpAddressesFromVifs(pick(previousAddresses, deletedAddresses))
if (isEmpty(addresses_)) {
delete ipPool.addresses
} else {
ipPool.addresses = addresses_
}
}
// TODO: Implement patching like for addresses.
if (networks) {
ipPool.networks = networks
}
// TODO: Implement patching like for addresses.
if (resourceSets) {
ipPool.resourceSets = resourceSets
}
await this._save(ipPool)
}
async _generateId () {
let id
do {
id = generateUnsecureToken(8)
} while (await this._store.has(id))
return id
}
_save (ipPool) {
ipPool = normalize(ipPool)
return this._store.put(ipPool.id, ipPool)
}
}

View File

@@ -1,28 +1,29 @@
import assign from 'lodash/assign'
import JobExecutor from '../job-executor'
import { Jobs } from '../models/job'
import {
GenericError,
NoSuchObject
} from '../api-errors'
// ===================================================================
class NoSuchJob extends NoSuchObject {
constructor (id) {
super(id, 'job')
}
}
import { mapToArray } from '../utils'
import { noSuchObject } from 'xo-common/api-errors'
// ===================================================================
export default class {
constructor (xo) {
this._executor = new JobExecutor(xo)
this._jobs = new Jobs({
const jobsDb = this._jobs = new Jobs({
connection: xo._redis,
prefix: 'xo:job',
indexes: ['user_id', 'key']
})
xo.on('start', () => {
xo.addConfigManager('jobs',
() => jobsDb.get(),
jobs => Promise.all(mapToArray(jobs, job =>
jobsDb.save(job)
))
)
})
}
async getAllJobs () {
@@ -32,20 +33,22 @@ export default class {
async getJob (id) {
const job = await this._jobs.first(id)
if (!job) {
throw new NoSuchJob(id)
throw noSuchObject(id, 'job')
}
return job.properties
}
async createJob (userId, job) {
async createJob (job) {
// TODO: use plain objects
const job_ = await this._jobs.create(userId, job)
const job_ = await this._jobs.create(job)
return job_.properties
}
async updateJob (job) {
return /* await */ this._jobs.save(job)
async updateJob ({id, userId, type, name, key, method, paramsVector}) {
const oldJob = await this.getJob(id)
assign(oldJob, {userId, type, name, key, method, paramsVector})
return /* await */ this._jobs.save(oldJob)
}
async removeJob (id) {
@@ -53,24 +56,10 @@ export default class {
}
async runJobSequence (idSequence) {
const notFound = []
for (const id of idSequence) {
let job
try {
job = await this.getJob(id)
} catch (error) {
if (error instanceof NoSuchJob) {
notFound.push(id)
} else {
throw error
}
}
if (job) {
await this._executor.exec(job)
}
}
if (notFound.length > 0) {
throw new GenericError(`The following jobs were not found: ${notFound.join()}`)
const jobs = await Promise.all(mapToArray(idSequence, id => this.getJob(id)))
for (const job of jobs) {
await this._executor.exec(job)
}
}
}

View File

@@ -2,9 +2,9 @@ import createJsonSchemaValidator from 'is-my-json-valid'
import { PluginsMetadata } from '../models/plugin-metadata'
import {
InvalidParameters,
NoSuchObject
} from '../api-errors'
invalidParameters,
noSuchObject
} from 'xo-common/api-errors'
import {
createRawObject,
isFunction,
@@ -13,14 +13,6 @@ import {
// ===================================================================
class NoSuchPlugin extends NoSuchObject {
constructor (id) {
super(id, 'plugin')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._plugins = createRawObject()
@@ -29,12 +21,21 @@ export default class {
connection: xo._redis,
prefix: 'xo:plugin-metadata'
})
xo.on('start', () => {
xo.addConfigManager('plugins',
() => this._pluginsMetadata.get(),
plugins => Promise.all(mapToArray(plugins, plugin =>
this._pluginsMetadata.save(plugin)
))
)
})
}
_getRawPlugin (id) {
const plugin = this._plugins[id]
if (!plugin) {
throw new NoSuchPlugin(id)
throw noSuchObject(id, 'plugin')
}
return plugin
}
@@ -50,16 +51,20 @@ export default class {
name,
instance,
configurationSchema,
configurationPresets,
testSchema,
version
) {
const id = name
const plugin = this._plugins[id] = {
configured: !configurationSchema,
configurationPresets,
configurationSchema,
configured: !configurationSchema,
id,
instance,
name,
testable: isFunction(instance.test),
testSchema,
unloadable: isFunction(instance.unload),
version
}
@@ -67,7 +72,6 @@ export default class {
const metadata = await this._getPluginMetadata(id)
let autoload = true
let configuration
if (metadata) {
({
autoload,
@@ -102,9 +106,12 @@ export default class {
async _getPlugin (id) {
const {
configurationPresets,
configurationSchema,
loaded,
name,
testable,
testSchema,
unloadable,
version
} = this._getRawPlugin(id)
@@ -121,7 +128,10 @@ export default class {
unloadable,
version,
configuration,
configurationSchema
configurationPresets,
configurationSchema,
testable,
testSchema
}
}
@@ -133,13 +143,23 @@ export default class {
// Validate the configuration and configure the plugin instance.
async _configurePlugin (plugin, configuration) {
if (!plugin.configurationSchema) {
throw new InvalidParameters('plugin not configurable')
const { configurationSchema } = plugin
if (!configurationSchema) {
throw invalidParameters('plugin not configurable')
}
const validate = createJsonSchemaValidator(plugin.configurationSchema)
// See: https://github.com/mafintosh/is-my-json-valid/issues/116
if (configuration == null) {
throw invalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
}
const validate = createJsonSchemaValidator(configurationSchema)
if (!validate(configuration)) {
throw new InvalidParameters(validate.errors)
throw invalidParameters(validate.errors)
}
// Sets the plugin configuration.
@@ -178,11 +198,11 @@ export default class {
async loadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (plugin.loaded) {
throw new InvalidParameters('plugin already loaded')
throw invalidParameters('plugin already loaded')
}
if (!plugin.configured) {
throw new InvalidParameters('plugin not configured')
throw invalidParameters('plugin not configured')
}
await plugin.instance.load()
@@ -192,11 +212,11 @@ export default class {
async unloadPlugin (id) {
const plugin = this._getRawPlugin(id)
if (!plugin.loaded) {
throw new InvalidParameters('plugin already unloaded')
throw invalidParameters('plugin already unloaded')
}
if (plugin.unloadable === false) {
throw new InvalidParameters('plugin cannot be unloaded')
throw invalidParameters('plugin cannot be unloaded')
}
await plugin.instance.unload()
@@ -206,4 +226,31 @@ export default class {
async purgePluginConfiguration (id) {
await this._pluginsMetadata.merge(id, { configuration: undefined })
}
async testPlugin (id, data) {
const plugin = this._getRawPlugin(id)
if (!plugin.testable) {
throw invalidParameters('plugin not testable')
}
if (!plugin.loaded) {
throw invalidParameters('plugin not loaded')
}
const { testSchema } = plugin
if (testSchema) {
if (data == null) {
throw invalidParameters([{
field: 'data',
message: 'is the wrong type'
}])
}
const validate = createJsonSchemaValidator(testSchema)
if (!validate(data)) {
throw invalidParameters(validate.errors)
}
}
await plugin.instance.test(data)
}
}

View File

@@ -1,26 +1,18 @@
import { noSuchObject } from 'xo-common/api-errors'
import RemoteHandlerLocal from '../remote-handlers/local'
import RemoteHandlerNfs from '../remote-handlers/nfs'
import RemoteHandlerSmb from '../remote-handlers/smb'
import {
forEach
forEach,
mapToArray
} from '../utils'
import {
NoSuchObject
} from '../api-errors'
import {
Remotes
} from '../models/remote'
// ===================================================================
class NoSuchRemote extends NoSuchObject {
constructor (id) {
super(id, 'remote')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._remotes = new Remotes({
@@ -30,17 +22,29 @@ export default class {
})
xo.on('start', async () => {
xo.addConfigManager('remotes',
() => this._remotes.get(),
remotes => Promise.all(mapToArray(remotes, remote =>
this._remotes.save(remote)
))
)
await this.initRemotes()
await this.syncAllRemotes()
})
xo.on('stop', () => this.forgetAllRemotes())
}
async getRemoteHandler (remote) {
async getRemoteHandler (remote, ignoreDisabled) {
if (typeof remote === 'string') {
remote = await this.getRemote(remote)
}
const Handler = {
if (!(ignoreDisabled || remote.enabled)) {
throw new Error('remote is disabled')
}
const HANDLERS = {
file: RemoteHandlerLocal,
smb: RemoteHandlerSmb,
nfs: RemoteHandlerNfs
@@ -48,10 +52,17 @@ export default class {
// FIXME: should be done in xo-remote-parser.
const type = remote.url.split('://')[0]
if (!Handler[type]) {
const Handler = HANDLERS[type]
if (!Handler) {
throw new Error('Unhandled remote type')
}
return new Handler[type](remote)
return new Handler(remote)
}
async testRemote (remote) {
const handler = await this.getRemoteHandler(remote, true)
return handler.test()
}
async getAllRemotes () {
@@ -61,7 +72,7 @@ export default class {
async _getRemote (id) {
const remote = await this._remotes.first(id)
if (!remote) {
throw new NoSuchRemote(id)
throw noSuchObject(id, 'remote')
}
return remote
@@ -79,7 +90,7 @@ export default class {
async updateRemote (id, {name, url, enabled, error}) {
const remote = await this._getRemote(id)
this._updateRemote(remote, {name, url, enabled, error})
const handler = await this.getRemoteHandler(remote.properties)
const handler = await this.getRemoteHandler(remote.properties, true)
const props = await handler.sync()
this._updateRemote(remote, props)
return (await this._remotes.save(remote)).properties
@@ -97,7 +108,7 @@ export default class {
}
async removeRemote (id) {
const handler = await this.getRemoteHandler(id)
const handler = await this.getRemoteHandler(id, true)
await handler.forget()
await this._remotes.remove(id)
}
@@ -115,7 +126,7 @@ export default class {
const remotes = await this.getAllRemotes()
for (let remote of remotes) {
try {
(await this.getRemoteHandler(remote)).forget()
(await this.getRemoteHandler(remote, true)).forget()
} catch (_) {}
}
}

View File

@@ -1,30 +1,63 @@
import every from 'lodash.every'
import remove from 'lodash.remove'
import some from 'lodash.some'
import every from 'lodash/every'
import keyBy from 'lodash/keyBy'
import remove from 'lodash/remove'
import some from 'lodash/some'
import synchronized from 'decorator-synchronized'
import {
NoSuchObject,
Unauthorized
} from '../api-errors'
noSuchObject,
unauthorized
} from 'xo-common/api-errors'
import {
forEach,
generateUnsecureToken,
isObject,
lightSet,
map,
mapToArray,
streamToArray
} from '../utils'
// ===================================================================
class NoSuchResourceSet extends NoSuchObject {
constructor (id) {
super(id, 'resource set')
const VM_RESOURCES = {
cpus: true,
disk: true,
disks: true,
memory: true,
vms: true
}
const computeVmResourcesUsage = vm => {
const processed = {}
let disks = 0
let disk = 0
forEach(vm.$VBDs, vbd => {
let vdi, vdiId
if (
vbd.type === 'Disk' &&
!processed[vdiId = vbd.VDI] &&
(vdi = vbd.$VDI)
) {
processed[vdiId] = true
++disks
disk += +vdi.virtual_size
}
})
return {
cpus: vm.VCPUs_at_startup,
disk,
disks,
memory: vm.memory_dynamic_max,
vms: 1
}
}
const normalize = set => ({
id: set.id,
ipPools: set.ipPools || [],
limits: set.limits
? map(set.limits, limit => isObject(limit)
? limit
@@ -47,6 +80,13 @@ export default class {
this._store = null
xo.on('start', async () => {
xo.addConfigManager('resourceSets',
() => this.getAllResourceSets(),
resourceSets => Promise.all(mapToArray(resourceSets, resourceSet =>
this._save(resourceSet)
))
)
this._store = await xo.getStore('resourceSets')
})
}
@@ -79,34 +119,14 @@ export default class {
// The set does not contains ALL objects.
!every(objectIds, lightSet(set.objects).has)
)) {
throw new Unauthorized()
throw unauthorized()
}
}
computeVmResourcesUsage (vm) {
const processed = {}
let disks = 0
let disk = 0
forEach(this._xo.getXapi(vm).getObject(vm._xapiId).$VBDs, (vbd) => {
let vdi, vdiId
if (
vbd.type === 'Disk' &&
!processed[vdiId = vbd.VDI] &&
(vdi = vbd.$VDI)
) {
processed[vdiId] = true
++disks
disk += +vdi.virtual_size
}
})
return {
cpus: vm.CPUs.number,
disk,
disks,
memory: vm.memory.size,
vms: 1
}
return computeVmResourcesUsage(
this._xo.getXapi(vm).getObject(vm._xapiId)
)
}
async createResourceSet (name, subjects = undefined, objects = undefined, limits = undefined) {
@@ -131,14 +151,15 @@ export default class {
return store.del(id)
}
throw new NoSuchResourceSet(id)
throw noSuchObject(id, 'resourceSet')
}
async updateResourceSet (id, {
name = undefined,
subjects = undefined,
objects = undefined,
limits = undefined
limits = undefined,
ipPools = undefined
}) {
const set = await this.getResourceSet(id)
if (name) {
@@ -169,6 +190,9 @@ export default class {
}
})
}
if (ipPools) {
set.ipPools = ipPools
}
await this._save(set)
}
@@ -194,7 +218,7 @@ export default class {
getResourceSet (id) {
return this._store.get(id).then(normalize, error => {
if (error.notFound) {
throw new NoSuchResourceSet(id)
throw noSuchObject(id, 'resourceSet')
}
throw error
@@ -209,7 +233,19 @@ export default class {
async removeObjectFromResourceSet (objectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.objects)
remove(set.objects, id => id === objectId)
await this._save(set)
}
async addIpPoolToResourceSet (ipPoolId, setId) {
const set = await this.getResourceSet(setId)
set.ipPools.push(ipPoolId)
await this._save(set)
}
async removeIpPoolFromResourceSet (ipPoolId, setId) {
const set = await this.getResourceSet(setId)
remove(set.ipPools, id => id === ipPoolId)
await this._save(set)
}
@@ -221,7 +257,7 @@ export default class {
async removeSubjectToResourceSet (subjectId, setId) {
const set = await this.getResourceSet(setId)
remove(set.subjects, subjectId)
remove(set.subjects, id => id === subjectId)
await this._save(set)
}
@@ -237,6 +273,7 @@ export default class {
await this._save(set)
}
@synchronized
async allocateLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
@@ -252,6 +289,7 @@ export default class {
await this._save(set)
}
@synchronized
async releaseLimitsInResourceSet (limits, setId) {
const set = await this.getResourceSet(setId)
forEach(limits, (quantity, id) => {
@@ -268,10 +306,12 @@ export default class {
}
async recomputeResourceSetsLimits () {
const sets = await this.getAllResourceSets()
const sets = keyBy(await this.getAllResourceSets(), 'id')
forEach(sets, ({ limits }) => {
forEach(limits, (limit, id) => {
limit.available = limit.total
if (VM_RESOURCES[id]) { // only reset VMs related limits
limit.available = limit.total
}
})
})
@@ -292,13 +332,15 @@ export default class {
}
const { limits } = set
forEach(this.computeVmResourcesUsage(object), (usage, resource) => {
forEach(computeVmResourcesUsage(object), (usage, resource) => {
const limit = limits[resource]
limit.available -= usage
if (limit) {
limit.available -= usage
}
})
})
})
await Promise.all(map(sets, (set) => this._save(set)))
await Promise.all(mapToArray(sets, set => this._save(set)))
}
}

View File

@@ -1,9 +1,10 @@
import { BaseError } from 'make-error'
import { NoSuchObject } from '../api-errors.js'
import { Schedules } from '../models/schedule'
import { noSuchObject } from 'xo-common/api-errors.js'
import { Schedules } from '../models/schedule'
import {
forEach,
mapToArray,
scheduleFn
} from '../utils'
@@ -19,12 +20,6 @@ export class ScheduleOverride extends SchedulerError {
}
}
export class NoSuchSchedule extends NoSuchObject {
constructor (scheduleOrId) {
super(scheduleOrId, 'schedule')
}
}
export class ScheduleNotEnabled extends SchedulerError {
constructor (scheduleOrId) {
super('Schedule ' + _resolveId(scheduleOrId)) + ' is not enabled'
@@ -42,14 +37,23 @@ export class ScheduleAlreadyEnabled extends SchedulerError {
export default class {
constructor (xo) {
this.xo = xo
this._redisSchedules = new Schedules({
const schedules = this._redisSchedules = new Schedules({
connection: xo._redis,
prefix: 'xo:schedule',
indexes: ['user_id', 'job']
})
this._scheduleTable = undefined
xo.on('start', () => this._loadSchedules())
xo.on('start', () => {
xo.addConfigManager('schedules',
() => schedules.get(),
schedules_ => Promise.all(mapToArray(schedules_, schedule =>
schedules.save(schedule)
))
)
return this._loadSchedules()
})
xo.on('stop', () => this._disableAll())
}
@@ -74,8 +78,10 @@ export default class {
_enable (schedule) {
const { id } = schedule
const stopSchedule = scheduleFn(schedule.cron, () =>
this.xo.runJobSequence([ schedule.job ])
const stopSchedule = scheduleFn(
schedule.cron,
() => this.xo.runJobSequence([ schedule.job ]),
schedule.timezone
)
this._cronJobs[id] = stopSchedule
@@ -84,7 +90,7 @@ export default class {
_disable (scheduleOrId) {
if (!this._exists(scheduleOrId)) {
throw new NoSuchSchedule(scheduleOrId)
throw noSuchObject(scheduleOrId, 'schedule')
}
if (!this._isEnabled(scheduleOrId)) {
throw new ScheduleNotEnabled(scheduleOrId)
@@ -123,7 +129,7 @@ export default class {
const schedule = await this._redisSchedules.first(id)
if (!schedule) {
throw new NoSuchSchedule(id)
throw noSuchObject(id, 'schedule')
}
return schedule
@@ -137,8 +143,8 @@ export default class {
return /* await */ this._redisSchedules.get()
}
async createSchedule (userId, {job, cron, enabled, name}) {
const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name)
async createSchedule (userId, { job, cron, enabled, name, timezone }) {
const schedule_ = await this._redisSchedules.create(userId, job, cron, enabled, name, timezone)
const schedule = schedule_.properties
this._add(schedule)
@@ -146,20 +152,25 @@ export default class {
return schedule
}
async updateSchedule (id, {job, cron, enabled, name}) {
async updateSchedule (id, { job, cron, enabled, name, timezone }) {
const schedule = await this._getSchedule(id)
if (job) schedule.set('job', job)
if (cron) schedule.set('cron', cron)
if (enabled !== undefined) schedule.set('enabled', enabled)
if (name !== undefined) schedule.set('name', name)
if (timezone === null) {
schedule.set('timezone', undefined) // Remove current timezone
} else if (timezone !== undefined) {
schedule.set('timezone', timezone)
}
await this._redisSchedules.save(schedule)
const { properties } = schedule
if (!this._exists(properties)) {
throw new NoSuchSchedule(properties)
throw noSuchObject(properties, 'schedule')
}
if (this._isEnabled(properties)) {
@@ -175,7 +186,7 @@ export default class {
try {
this._disable(id)
} catch (exc) {
if (!exc instanceof SchedulerError) {
if (!(exc instanceof SchedulerError)) {
throw exc
}
} finally {

View File

@@ -1,6 +1,6 @@
import endsWith from 'lodash.endswith'
import endsWith from 'lodash/endsWith'
import levelup from 'level-party'
import startsWith from 'lodash.startswith'
import startsWith from 'lodash/startsWith'
import sublevel from 'level-sublevel'
import { ensureDir } from 'fs-promise'
@@ -54,7 +54,7 @@ const levelPromise = db => {
dbP[name] = db::value
} else {
dbP[`${name}Sync`] = db::value
dbP[name] = value::promisify(db)
dbP[name] = promisify(value, db)
}
})

View File

@@ -1,15 +1,15 @@
import filter from 'lodash.filter'
import includes from 'lodash.includes'
import filter from 'lodash/filter'
import includes from 'lodash/includes'
import {
hash,
needsRehash,
verify
} from 'hashy'
import {
InvalidCredential,
NoSuchObject
} from '../api-errors'
invalidCredentials,
noSuchObject
} from 'xo-common/api-errors'
import {
Groups
} from '../models/group'
@@ -17,8 +17,9 @@ import {
Users
} from '../models/user'
import {
createRawObject,
forEach,
isEmpty,
lightSet,
mapToArray,
noop,
pCatch
@@ -26,17 +27,10 @@ import {
// ===================================================================
class NoSuchGroup extends NoSuchObject {
constructor (id) {
super(id, 'group')
}
}
class NoSuchUser extends NoSuchObject {
constructor (id) {
super(id, 'user')
}
}
const addToArraySet = (set, value) => set && !includes(set, value)
? set.concat(value)
: [ value ]
const removeFromArraySet = (set, value) => set && filter(set, current => current !== value)
// ===================================================================
@@ -46,22 +40,40 @@ export default class {
const redis = xo._redis
this._groups = new Groups({
const groupsDb = this._groups = new Groups({
connection: redis,
prefix: 'xo:group'
})
const users = this._users = new Users({
const usersDb = this._users = new Users({
connection: redis,
prefix: 'xo:user',
indexes: ['email']
})
xo.on('start', async () => {
if (!await users.exists()) {
xo.addConfigManager('groups',
() => groupsDb.get(),
groups => Promise.all(mapToArray(groups, group => groupsDb.save(group)))
)
xo.addConfigManager('users',
() => usersDb.get(),
users => Promise.all(mapToArray(users, async user => {
const userId = user.id
const conflictUsers = await usersDb.get({ email: user.email })
if (!isEmpty(conflictUsers)) {
await Promise.all(mapToArray(conflictUsers, ({ id }) =>
(id !== userId) && this.deleteUser(user.id)
))
}
return usersDb.save(user)
}))
)
if (!await usersDb.exists()) {
const email = 'admin@admin.net'
const password = 'admin'
await this.createUser(email, {password, permission: 'admin'})
await this.createUser({email, password, permission: 'admin'})
console.log('[INFO] Default user created:', email, ' with password', password)
}
})
@@ -69,13 +81,17 @@ export default class {
// -----------------------------------------------------------------
async createUser (email, { password, ...properties }) {
async createUser ({ name, password, ...properties }) {
if (name) {
properties.email = name
}
if (password) {
properties.pw_hash = await hash(password)
}
// TODO: use plain objects
const user = await this._users.create(email, properties)
const user = await this._users.create(properties)
return user.properties
}
@@ -89,12 +105,18 @@ export default class {
this._xo.getAuthenticationTokensForUser(id)
.then(tokens => {
forEach(tokens, token => {
this._xo._tokens.remove(token.id)
::pCatch(noop)
this._xo.deleteAuthenticationToken(id)::pCatch(noop)
})
})
::pCatch(noop) // Ignore any failures.
// Remove ACLs for this user.
this._xo.getAclsForSubject(id).then(acls => {
forEach(acls, acl => {
this._xo.removeAcl(id, acl.object, acl.action)::pCatch(noop)
})
})
// Remove the user from all its groups.
forEach(user.groups, groupId => {
this.getGroup(groupId)
@@ -109,7 +131,8 @@ export default class {
name = email,
password,
permission
permission,
preferences
}) {
const user = await this.getUser(id)
@@ -123,6 +146,18 @@ export default class {
user.pw_hash = await hash(password)
}
const newPreferences = { ...user.preferences }
forEach(preferences, (value, name) => {
if (value == null) {
delete newPreferences[name]
} else {
newPreferences[name] = value
}
})
user.preferences = isEmpty(newPreferences)
? undefined
: newPreferences
// TODO: remove
user.email = user.name
delete user.name
@@ -134,7 +169,7 @@ export default class {
async _getUser (id) {
const user = await this._users.first(id)
if (!user) {
throw new NoSuchUser(id)
throw noSuchObject(id, 'user')
}
return user
@@ -167,7 +202,7 @@ export default class {
return null
}
throw new NoSuchUser(username)
throw noSuchObject(username, 'user')
}
// Get or create a user associated with an auth provider.
@@ -185,14 +220,15 @@ export default class {
throw new Error(`registering ${name} user is forbidden`)
}
return /* await */ this.createUser(name, {
return /* await */ this.createUser({
name,
_provider: provider
})
}
async changeUserPassword (userId, oldPassword, newPassword) {
if (!(await this.checkUserPassword(userId, oldPassword, false))) {
throw new InvalidCredential()
throw invalidCredentials()
}
await this.updateUser(userId, { password: newPassword })
@@ -220,7 +256,6 @@ export default class {
// TODO: use plain objects.
const group = (await this._groups.create(name)).properties
group.users = JSON.parse(group.users)
return group
}
@@ -229,6 +264,13 @@ export default class {
await this._groups.remove(id)
// Remove ACLs for this group.
this._xo.getAclsForSubject(id).then(acls => {
forEach(acls, acl => {
this._xo.removeAcl(id, acl.object, acl.action)::pCatch(noop)
})
})
// Remove the group from all its users.
forEach(group.users, userId => {
this.getUser(userId)
@@ -248,7 +290,7 @@ export default class {
async getGroup (id) {
const group = await this._groups.first(id)
if (!group) {
throw new NoSuchGroup(id)
throw noSuchObject(id, 'group')
}
return group.properties
@@ -264,15 +306,8 @@ export default class {
this.getGroup(groupId)
])
const {groups} = user
if (!includes(groups, groupId)) {
user.groups.push(groupId)
}
const {users} = group
if (!includes(users, userId)) {
group.users.push(userId)
}
user.groups = addToArraySet(user.groups, groupId)
group.users = addToArraySet(group.users, userId)
await Promise.all([
this._users.save(user),
@@ -281,14 +316,12 @@ export default class {
}
async _removeUserFromGroup (userId, group) {
// TODO: maybe not iterating through the whole arrays?
group.users = filter(group.users, id => id !== userId)
group.users = removeFromArraySet(group.users, userId)
return this._groups.save(group)
}
async _removeGroupFromUser (groupId, user) {
// TODO: maybe not iterating through the whole arrays?
user.groups = filter(user.groups, id => id !== groupId)
user.groups = removeFromArraySet(user.groups, groupId)
return this._users.save(user)
}
@@ -307,39 +340,36 @@ export default class {
async setGroupUsers (groupId, userIds) {
const group = await this.getGroup(groupId)
const newUsersIds = createRawObject()
const oldUsersIds = createRawObject()
forEach(userIds, id => {
newUsersIds[id] = null
})
let newUsersIds = lightSet(userIds)
const oldUsersIds = []
forEach(group.users, id => {
if (id in newUsersIds) {
delete newUsersIds[id]
if (newUsersIds.has(id)) {
newUsersIds.delete(id)
} else {
oldUsersIds[id] = null
oldUsersIds.push(id)
}
})
newUsersIds = newUsersIds.toArray()
const getUser = ::this.getUser
const [newUsers, oldUsers] = await Promise.all([
Promise.all(mapToArray(newUsersIds, (_, id) => this.getUser(id))),
Promise.all(mapToArray(oldUsersIds, (_, id) => this.getUser(id)))
Promise.all(newUsersIds.map(getUser)),
Promise.all(oldUsersIds.map(getUser))
])
forEach(newUsers, user => {
const {groups} = user
if (!includes(groups, groupId)) {
user.groups.push(groupId)
}
user.groups = addToArraySet(user.groups, groupId)
})
forEach(oldUsers, user => {
user.groups = filter(user.groups, id => id !== groupId)
user.groups = removeFromArraySet(user.groups, groupId)
})
group.users = userIds
const saveUser = ::this._users.save
await Promise.all([
Promise.all(mapToArray(newUsers, ::this._users.save)),
Promise.all(mapToArray(oldUsers, ::this._users.save)),
Promise.all(mapToArray(newUsers, saveUser)),
Promise.all(mapToArray(oldUsers, saveUser)),
this._groups.save(group)
])
}

View File

@@ -1,10 +1,8 @@
import { noSuchObject } from 'xo-common/api-errors'
import Xapi from '../xapi'
import xapiObjectToXo from '../xapi-object-to-xo'
import XapiStats from '../xapi-stats'
import {
GenericError,
NoSuchObject
} from '../api-errors'
import {
camelToSnakeCase,
createRawObject,
@@ -13,7 +11,8 @@ import {
isString,
noop,
pCatch,
popProperty
popProperty,
serializeError
} from '../utils'
import {
Servers
@@ -21,18 +20,10 @@ import {
// ===================================================================
class NoSuchXenServer extends NoSuchObject {
constructor (id) {
super(id, 'xen server')
}
}
// ===================================================================
export default class {
constructor (xo) {
this._objectConflicts = createRawObject() // TODO: clean when a server is disconnected.
this._servers = new Servers({
const serversDb = this._servers = new Servers({
connection: xo._redis,
prefix: 'xo:server',
indexes: ['host']
@@ -43,8 +34,13 @@ export default class {
this._xo = xo
xo.on('start', async () => {
xo.addConfigManager('xenServers',
() => serversDb.get(),
servers => serversDb.update(servers)
)
// Connects to existing servers.
const servers = await this._servers.get()
const servers = await serversDb.get()
for (let server of servers) {
if (server.enabled) {
this.connectXenServer(server.id).catch(error => {
@@ -79,17 +75,21 @@ export default class {
this.disconnectXenServer(id)::pCatch(noop)
if (!await this._servers.remove(id)) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
}
async updateXenServer (id, {host, username, password, readOnly, enabled}) {
async updateXenServer (id, {host, username, password, readOnly, enabled, error}) {
const server = await this._getXenServer(id)
if (host) server.set('host', host)
if (username) server.set('username', username)
if (password) server.set('password', password)
if (error !== undefined) {
server.set('error', error ? JSON.stringify(error) : '')
}
if (enabled !== undefined) {
server.set('enabled', enabled ? 'true' : undefined)
}
@@ -110,7 +110,7 @@ export default class {
async _getXenServer (id) {
const server = await this._servers.first(id)
if (!server) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
return server
@@ -283,23 +283,20 @@ export default class {
xapi.xo.install()
try {
await xapi.connect()
} catch (error) {
if (error.code === 'SESSION_AUTHENTICATION_FAILED') {
throw new GenericError('authentication failed')
await xapi.connect().then(
() => this.updateXenServer(id, { error: null }),
error => {
this.updateXenServer(id, { error: serializeError(error) })
throw error
}
if (error.code === 'EHOSTUNREACH') {
throw new GenericError('host unreachable')
}
throw error
}
)
}
async disconnectXenServer (id) {
const xapi = this._xapis[id]
if (!xapi) {
throw new NoSuchXenServer(id)
throw noSuchObject(id, 'xenServer')
}
delete this._xapis[id]

View File

@@ -1,8 +1,9 @@
import includes from 'lodash.includes'
import includes from 'lodash/includes'
import XoCollection from 'xo-collection'
import XoUniqueIndex from 'xo-collection/unique-index'
import {createClient as createRedisClient} from 'redis'
import {EventEmitter} from 'events'
import { noSuchObject } from 'xo-common/api-errors'
import mixins from './xo-mixins'
import Connection from './connection'
@@ -20,9 +21,6 @@ import {
mapToArray,
noop
} from './utils'
import {
NoSuchObject
} from './api-errors'
// ===================================================================
@@ -31,6 +29,9 @@ export default class Xo extends EventEmitter {
constructor (config) {
super()
// a lot of mixins adds listener for start/stop/… events
this.setMaxListeners(0)
this._config = config
this._objects = new XoCollection()
@@ -43,7 +44,29 @@ export default class Xo extends EventEmitter {
this._httpRequestWatchers = createRawObject()
// Connects to Redis.
this._redis = createRedisClient(config.redis && config.redis.uri)
const redisConf = config.redis
this._redis = createRedisClient(redisConf && {
rename_commands: redisConf.renameCommands,
url: redisConf.uri
})
}
// -----------------------------------------------------------------
async clean () {
const handleCleanError = error => {
console.error(
'[WARN] clean error:',
error && error.stack || error
)
}
await Promise.all(mapToArray(
this.listeners('clean'),
listener => new Promise(resolve => {
resolve(listener.call(this))
}).catch(handleCleanError)
))
}
// -----------------------------------------------------------------
@@ -122,14 +145,14 @@ export default class Xo extends EventEmitter {
const obj = all[key] || byRef[key]
if (!obj) {
throw new NoSuchObject(key, type)
throw noSuchObject(key, type)
}
if (type != null && (
isString(type) && type !== obj.type ||
!includes(type, obj.type) // Array
)) {
throw new NoSuchObject(key, type)
throw noSuchObject(key, type)
}
return obj
@@ -192,7 +215,7 @@ export default class Xo extends EventEmitter {
const {fn, data} = watcher
new Promise(resolve => {
resolve(fn(req, res, data, next))
resolve(fn.call(this, req, res, data, next))
}).then(
result => {
if (result != null) {
@@ -330,7 +353,7 @@ export default class Xo extends EventEmitter {
forEach(connections, connection => {
// Notifies only authenticated clients.
if (connection.has('user_id')) {
if (connection.has('user_id') && connection.notify) {
if (enteredMessage) {
connection.notify('all', enteredMessage)
}

View File

@@ -1,84 +0,0 @@
#!/usr/bin/env node
var join = require('path').join
var readdir = require('fs').readdirSync
var stat = require('fs').statSync
var writeFile = require('fs').writeFileSync
// ===================================================================
function bind (fn, thisArg) {
return function () {
return fn.apply(thisArg, arguments)
}
}
function camelCase (str) {
return str.toLowerCase().replace(/[^a-z0-9]+([a-z0-9])/g, function (_, str) {
return str.toUpperCase()
})
}
function removeSuffix (str, sfx) {
var strLength = str.length
var sfxLength = sfx.length
var pos = strLength - sfxLength
if (pos < 0 || str.indexOf(sfx, pos) !== pos) {
return false
}
return str.slice(0, pos)
}
// ===================================================================
function handleEntry (entry, dir) {
var stats = stat(join(dir, entry))
var base
if (stats.isDirectory()) {
base = entry
} else if (!(
stats.isFile() && (
(base = removeSuffix(entry, '.coffee')) ||
(base = removeSuffix(entry, '.js'))
)
)) {
return
}
var identifier = camelCase(base)
this(
'import ' + identifier + " from './" + base + "'",
'defaults.' + identifier + ' = ' + identifier,
'export * as ' + identifier + " from './" + base + "'",
''
)
}
function generateIndex (dir) {
var content = [
'//',
'// This file has been generated by /tools/generate-index',
'//',
'// It is automatically re-generated each time a build is started.',
'//',
'',
'const defaults = {}',
'export default defaults',
''
]
var write = bind(content.push, content)
readdir(dir).map(function (entry) {
if (entry === 'index.js') {
return
}
handleEntry.call(write, entry, dir)
})
writeFile(dir + '/index.js', content.join('\n'))
}
process.argv.slice(2).map(generateIndex)

6772
yarn.lock Normal file

File diff suppressed because it is too large Load Diff