Compare commits

...

361 Commits

Author SHA1 Message Date
Julien Fontanet
d6895e7288 WiP 2023-02-09 10:31:14 +01:00
Julien Fontanet
e279ea01a2 WiP 2023-02-09 10:30:41 +01:00
Julien Fontanet
8df308aa01 feat(xen-api/{get,put}Resource): add 24h timeout 2023-02-09 10:30:01 +01:00
Julien Fontanet
a3e37eca62 fix(xo-server): disable broken requestTimeout
Fixes https://xcp-ng.org/forum/post/58146

Caused by nodejs/node#46574

It caused requests to timeout after 0-30 seconds, which broke all uploads.
2023-02-09 10:25:45 +01:00
Julien Fontanet
817911a41e fix(xen-api): fix task watchers when initially not watching events (2)
Introduced by 9f4fce9da
2023-02-08 17:11:27 +01:00
Julien Fontanet
9f4fce9daa fix(xen-api): fix task watchers when initially not watching events
Introduced by bc61dd85c
2023-02-08 12:21:56 +01:00
Julien Fontanet
9ff305d5db fix(xo-server/rest-api): fix VDI import
Introduced by ab0e411ac
2023-02-07 19:20:59 +01:00
Julien Fontanet
055c3e098f fix(xen-api/putResource): better error handling
Tested all combinaisons with the following conditions:

- success, cancelation and connection loss (ECONNRESET)
- with and without tasks watching
- with and without known length (i.e. content-length hack)
2023-02-07 17:38:01 +01:00
Julien Fontanet
bc61dd85c6 fix(xen-api): correctly handle not watching tasks 2023-02-07 17:23:57 +01:00
Julien Fontanet
db6f1405e9 chore(xo-server/rest-api): match export routes first 2023-02-07 16:36:05 +01:00
Gabriel Gunullu
3dc3376aec chore(test): replace vhd-util check (#6651) 2023-02-07 13:33:25 +01:00
Julien Fontanet
55920a58a3 feat(xo-server/recover-account): -s flag for xoa-support
Simpler process for xoa-support.

```console
$ xo-server-recover-account -s
The generated password is lXJMtCzWDGPOIg
user xoa-support has been successfully updated
```
2023-02-06 15:25:04 +01:00
Julien Fontanet
2a70ebf667 docs: uniformize code blocks
- add missing syntaxes
- don't put prompt if no command outputs to ease copy/paste and use `sh` syntax
- always use `$` as prompt and use `console` syntax
2023-02-06 11:25:12 +01:00
Julien Fontanet
2f65a86aa0 fix(xen-api/putResource): fix a number of issues
- hide `VDI_IO_ERROR` when using content-length hack
- avoid unhandled rejection in case upload fails
2023-02-06 10:40:42 +01:00
Julien Fontanet
4bf81ac33b docs(xapi): fix typo 2023-02-04 11:14:02 +01:00
Julien Fontanet
263c23ae8f docs(xapi): describe syncHookTimeout 2023-02-04 11:11:41 +01:00
Julien Fontanet
bf51b945c5 chore(vmware-explorer): fix lint issues
Introduced by 9fa15d9c8
2023-02-03 16:36:55 +01:00
Julien Fontanet
9d7a461550 feat(turbo): add dev and test tasks 2023-02-03 16:17:52 +01:00
Julien Fontanet
bbf60818eb chore: update dev deps 2023-02-03 16:17:31 +01:00
Julien Fontanet
103b22ebb2 fix(backups/importDeltaVm): resize cloned VDI if necessary
Fixes zammad#10996
2023-02-03 15:49:08 +01:00
Mathieu
cf4a1d7d40 fix(lite): update stacked ram usage message (#6650) 2023-02-02 11:50:10 +01:00
Julien Fontanet
e94f036aca chore(vmware-explorer): lower requirement to Node 14 2023-02-02 09:43:03 +01:00
Julien Fontanet
675405f7ac feat: release 5.79.0 2023-01-31 17:49:51 +01:00
Thierry Goettelmann
f8a3536a88 feat(lite): RelativeTime component (#6620) 2023-01-31 17:10:26 +01:00
Julien Fontanet
e527a13b50 feat(xo-server): 5.109.0 2023-01-31 17:04:31 +01:00
Julien Fontanet
3be03451f8 feat(@xen-orchestra/vmware-explorer): 0.0.3 2023-01-31 17:02:24 +01:00
Florent BEAUCHAMP
9fa15d9c84 feat(xo-server): import VM from ESXi (#6595) 2023-01-31 16:54:18 +01:00
Mathieu
9c3d39b4a7 feat: technical release (#6648) 2023-01-31 11:18:19 +01:00
Mathieu
28800f43ee fix(lite): use browser timestamps for stats (#6623) 2023-01-31 10:26:07 +01:00
Gabriel Gunullu
5c0b29c51f feat(xo-web/network): NBD option (#6646) 2023-01-30 17:34:21 +01:00
Gabriel Gunullu
62d9d0208b feat(xo-server/network.set): support (un)setting NBD (#6635) 2023-01-30 16:02:28 +01:00
Pierre Donias
4bf871e52f fix(lite): stats.memory is undefined (#6647)
Introduced by 4f31b7007a
2023-01-30 14:40:57 +01:00
Florent BEAUCHAMP
103972808c fix(xo-vmdk-to-vhd): better computation of overprovisioning of very sparse disks (#6639) 2023-01-30 14:15:44 +01:00
Julien Fontanet
dc65bb87b5 feat(upload-ova): special handling of invalid params error (#6626)
Fixes #6622

Similar to 036b30212 & 65daa39eb
2023-01-30 14:09:28 +01:00
Mathieu
bfa0282ecc feat: technical release (#6645) 2023-01-27 16:16:26 +01:00
Mathieu
aa66ec0ccd fix(changelog): fix release type on a package (#6644) 2023-01-27 15:09:10 +01:00
Mathieu
18fe19c680 fix(lite): fix getHostMemoryFunction error (#6643) 2023-01-27 14:43:11 +01:00
Julien Fontanet
ab0e411ac0 chore(xo-server/rest-api): improve code
- mutualize object fetching
- mutualize error handling
2023-01-27 13:01:21 +01:00
Pierre Donias
79671e6d61 fix(lite/build): "Big integer literals are not available in the configured target environment" (#6638)
Introduced by a281682f7a
2023-01-26 11:42:29 +01:00
Mathieu
71ad9773da feat(lite/vm): ability to change state of a VM (#6608) 2023-01-26 09:43:12 +01:00
Julien Fontanet
34ecc2bcbb feat(xo-server/rest-api): support setting name_label/name_description 2023-01-25 17:29:49 +01:00
Pierre Donias
53f4f265dc fix(xo-web/host/network): remove extra "mode" column (#6640)
Introduced by 7ede6bdbce
2023-01-25 17:19:03 +01:00
Florent BEAUCHAMP
97624ef836 fix(xo-vmdk-to-vhd): memory consumption during ova generation (#6637) 2023-01-25 10:23:50 +01:00
Julien Fontanet
fb8d0ed924 fix(xen-api/examples/import-vdi): fix tasks watching
Introduced by 3e351f852
2023-01-24 16:31:03 +01:00
Gabriel Gunullu
fedbdba13d feat(xo-web/recipes): static network config for k8s recipe (#6598) 2023-01-24 11:04:02 +01:00
Julien Fontanet
a281682f7a chore: update dev deps 2023-01-23 18:31:07 +01:00
Julien Fontanet
07e9f09692 docs(xo-server/rest-api): minor fix 2023-01-23 17:16:32 +01:00
Julien Fontanet
29d6e590de feat(xo-server/rest-api): support exporting VDI in raw format 2023-01-23 17:14:24 +01:00
Julien Fontanet
3e351f8529 feat(xen-api/examples/import-vdi): can create the VDI and various flags 2023-01-23 17:13:41 +01:00
Julien Fontanet
bfbfb9379a feat(xo-cli): improve no server message 2023-01-23 09:31:01 +01:00
rajaa-b
4f31b7007a feat(lite): RAM usage graph (#6604) 2023-01-20 11:44:54 +01:00
rajaa-b
fe0cc2ebb9 feat(lite): network throughput chart (#6610) 2023-01-19 16:10:36 +01:00
Mathieu
2fd6f521f8 feat(xo-web/licenses): make id and boundObjectId copyable (#6634) 2023-01-19 15:11:10 +01:00
Florent BEAUCHAMP
ec00728112 feat(xo-web): add toggle for viridian flag (#6631)
Fixes #6572
2023-01-19 09:33:36 +01:00
Julien Fontanet
7174c1edeb chore(xo-server/rest-api): doc fixes and changelog entry
Introduced by 7bd27e743
2023-01-18 23:43:57 +01:00
Julien Fontanet
7bd27e7437 feat(xo-server/rest-api): support to destroy VMs/VDIs 2023-01-18 23:35:49 +01:00
Florent BEAUCHAMP
0a28e30003 fix(xo-web): clarify windows update label (#6632)
Fix #6628
2023-01-18 17:31:28 +01:00
Mathieu
246c793c28 fix(xo-web/licenses): move message for XCP-ng license binding (#6630) 2023-01-18 17:11:21 +01:00
Florent BEAUCHAMP
5f0ea4d586 fix(xo-web): show bootable status for VM running pv_in_pvh virtualisation mode (#6629)
Fix #6432
2023-01-18 17:09:26 +01:00
Julien Fontanet
3c7d316b3c feat(xo-server): initial tasks infrastructure (#6625) 2023-01-17 16:12:04 +01:00
Julien Fontanet
645c8f32e3 chore(xo-server-perf-alert): use @xen-orchestra/log@0.5.0
Introduced by #6550
2023-01-17 15:42:38 +01:00
Gabriel Gunullu
adc5e7d0c0 test(vhd-cli): from Jest to test (#6605) 2023-01-17 10:39:41 +01:00
Thierry Goettelmann
b9b74ab1ac feat(lite/ui): first implementation of responsive UI (#6612) 2023-01-17 10:22:08 +01:00
Thierry Goettelmann
64298c04f2 feat(lite/ui): UiModal fix (#6617) 2023-01-17 09:25:29 +01:00
Gabriel Gunullu
3dfb7db039 chore(xo-server-perf-alert): print error (#6550) 2023-01-16 22:53:53 +01:00
Julien Fontanet
b64d8f5cbf fix(xo-server/rest-api): handle filter parsing errors 2023-01-16 17:34:23 +01:00
Julien Fontanet
c2e5225728 feat(xo-server): expose host.residentVms 2023-01-16 17:33:47 +01:00
Florent BEAUCHAMP
6c44a94bf4 fix(vhd-lib/parseVhdStream): also consume stream in NBD mode (#6613)
Consuming the stream is necessary for all writers including DeltaBackupWriter) otherwise other writers (e.g. DeltaBackupWriter i.e. CR) will stay stuck.
2023-01-16 10:54:45 +01:00
Florent BEAUCHAMP
a2d9310d0a fix(backups): fix size of NBD backups (#6599) 2023-01-16 10:43:29 +01:00
Julien Fontanet
05197b93ee feat(proxy): dedupe logs 2023-01-15 13:08:57 +01:00
Julien Fontanet
448d115d49 feat(xo-server): dedupe logs 2023-01-15 13:04:52 +01:00
Julien Fontanet
ae993dff45 feat(log/dedupe): helper to remove duplicated logs 2023-01-15 12:59:31 +01:00
Julien Fontanet
1bc4805f3d chore(log): move Log into own module 2023-01-15 12:59:31 +01:00
Julien Fontanet
98fe8f3955 chore(log): move createTransport into own module 2023-01-15 12:59:31 +01:00
Julien Fontanet
e902bcef67 chore(log): prefix internal modules by _ 2023-01-15 12:59:31 +01:00
Julien Fontanet
cb2a6e43a8 chore(log/utils.test.js): rename to _compileLogPattern.test.js 2023-01-15 12:59:31 +01:00
Julien Fontanet
b73a0992f8 feat(log): define public entry points
BREAKING CHANGE: Importing modules with extensions is now unsupported, i.e. use `@xen-orchestra/log/configure` instead of `@xen-orchestra/log/configure.js`.

Allows ESM modules to import modules without specifying extensions (just like CJS module) which will make migrating this lib to ESM painless in the future.
2023-01-15 12:58:35 +01:00
Julien Fontanet
d0b3d78639 feat(xo-server): round up host memory to nearest GiB
Fixes #5776

Improves the display of the value by ignoring the micro-kernel size (~50MiB), ie `128 GiB` instead of `127.96 GiB`.
2023-01-13 15:06:06 +01:00
Julien Fontanet
e6b8939772 fix(xapi/VM_snapshot): don't fail on NOBAK VDIs destruction failure 2023-01-12 15:25:09 +01:00
Julien Fontanet
bc372a982c fix(xapi/VM_checkpoint): remove unsupported ignoreNobakVdis 2023-01-12 15:20:40 +01:00
Florent Beauchamp
3ff8064f1b feat(backups): add more info about NBD backups in logs 2023-01-12 10:28:30 +01:00
Florent Beauchamp
834459186d fix(backups): useNbd must follow the config 2023-01-12 10:28:30 +01:00
Mathieu
12220ad4cf fix(lite/UsageBar): add color for dangerous cases (#6606) 2023-01-12 09:22:07 +01:00
Julien Fontanet
f6fd1db1ef feat(xo-server): increase HTTP server request timeout to 1 day
Fixes #6590
2023-01-11 22:07:35 +01:00
Julien Fontanet
a1050882ae docs(installation): explicit FreeBSD/OpenBSD not officially supported 2023-01-11 15:11:54 +01:00
Mathieu
687df5ead4 feat(lite/vm): change state button (#6571) 2023-01-11 10:51:16 +01:00
Mathieu
b057881ad0 fix(lite): fix type checking (#6607) 2023-01-10 16:16:32 +01:00
Julien Fontanet
2b23550996 chore(vhd-lib/createVhdStreamWithLength): use readChunkStrict
Related to zammad#10996

Not only it simplified the code a bit, but it also provides better error messages, especially on stream end.
2023-01-10 11:11:38 +01:00
Thierry Goettelmann
afeb20e589 fix(lite/Console): fix isReady condition (#6594) 2023-01-06 10:44:45 +01:00
Julien Fontanet
d7794518a2 chore: update to fs-extra@11 & parse-pairs@2 2023-01-05 11:33:09 +01:00
Julien Fontanet
fee61a43e3 chore: update to sinon@15 2023-01-05 11:16:03 +01:00
Julien Fontanet
b201afd192 chore: update dev deps 2023-01-05 10:21:06 +01:00
Florent Beauchamp
feef1f8b0a fix(backups/cleanVm): fix tests 2023-01-04 10:54:22 +01:00
Florent Beauchamp
1a5e2fde4f fix(vhd-lib/merge): require aliases for VHD directories 2023-01-04 10:54:22 +01:00
Julien Fontanet
609e957a55 fix: build script should build xo-server plugins
Introduced by 3bfd6c697
2023-01-04 10:53:55 +01:00
Thierry Goettelmann
5c18404174 feat(lite): update useCollectionFilter composable (#6538)
- Query String support must now be explicitly enabled with the `queryStringParam` option
- Added `initialFilters` option
- Added generic type support
- Updated documentation
2023-01-04 09:51:39 +01:00
Thierry Goettelmann
866a1dd8ae feat(lite): update useCollectionSorter composable (#6540)
- Query String support must now be explicitly enabled with the `queryStringParam` option
- Added `initialFilters` option
- Added generic type support
- Updated documentation
2023-01-04 09:42:51 +01:00
Julien Fontanet
3bfd6c6979 chore: use Turborepo to build
Why?

- ordering: build dependencies before dependents
- cache: don't rebuild if no changes in files or dependencies
- possibility to restrict to specific scopes

Changes:

- `yarn build` now only build `xo-server` and `xo-web` (and dependencies)
- `yarn build:xo-lite` build `@xen-orchestra/lite\ (and dependencies)
2023-01-03 11:39:20 +01:00
Florent BEAUCHAMP
06564e9091 feat(backups): remove merge limitations (#6591)
following #0635b3316ea077fccaa8b2d1e7a4d801eb701811
2023-01-03 11:07:07 +01:00
Thierry Goettelmann
1702783cfb feat(lite): Reactive chart theme (#6587) 2022-12-21 15:00:26 +01:00
rajaa-b
4ea0cbaa37 feat(xo-lite): Pool CPU usage chart (#6577) 2022-12-21 12:03:04 +01:00
Mathieu
2246e065f7 feat: release 5.78.0 (#6588) 2022-12-20 13:54:30 +01:00
Mathieu
29a38cdf1a feat: technical release (#6586) 2022-12-19 14:30:41 +01:00
Julien Fontanet
960c569e86 fix(CHANGELOG): add missing backups changes
Introduced by f95a20173
2022-12-19 11:40:06 +01:00
Julien Fontanet
fa183fc97e fix(CHANGELOG): add missing Kubernetes changes
Introduced by a1d63118c
2022-12-19 10:42:53 +01:00
Gabriel Gunullu
a1d63118c0 feat(xo-web/recipes/kubernetes): CIDR is no longer necessary (#6583)
Related to 6227349725
2022-12-19 09:42:56 +01:00
Julien Fontanet
f95a20173c fix(backups/{Delta,Full}BackupWriter}): fix this._vmBackupDir access
May fix #6584

Introduced by 45dcb914b.
2022-12-17 10:57:49 +01:00
Mathieu
b82d0fadc3 feat: technical release (#6585) 2022-12-16 16:13:07 +01:00
Julien Fontanet
0635b3316e feat(xo-server/backups): remove merge limitations
Since 30fe9764a, merging range of VHDs is supported via synthetic VHD which limits the perf impact.

It's no longer necessary to limit the number of VHDs per run to merge.
2022-12-16 14:42:05 +01:00
Thierry Goettelmann
113235aec3 feat(lite): new useArrayRemovedItemsHistory composable (#6546) 2022-12-16 11:43:50 +01:00
Mathieu
3921401e96 fix(lite): fix 'not connected to xapi' (#6522)
Introduced by 1c3cad9235
2022-12-16 09:54:43 +01:00
Julien Fontanet
2e514478a4 fix(xo-server/remotes): always remove handler from cache when forgetting 2022-12-15 17:58:14 +01:00
Julien Fontanet
b3d53b230e fix(fs/abstract): use standard naming for logger 2022-12-15 17:58:14 +01:00
Julien Fontanet
45dcb914ba chore(backups/{Mixin,Delta,Full}BackupWriter}): mutualize VM backup dir computation 2022-12-15 17:58:14 +01:00
Mathieu
711087b686 feat(lite): feedback on login page (#6464) 2022-12-15 15:00:46 +01:00
Julien Fontanet
b100a59d1d feat(xapi/VM_snapshot): use ignore_vdis param 2022-12-14 23:36:03 +01:00
Mathieu
109b2b0055 feat(lite): not found views (page/object) (#6410) 2022-12-14 16:47:40 +01:00
Julien Fontanet
9dda99eb20 fix(xo-server/_handleBackupLog): fix sendPassiveCheck condition
Introduced by ba782d269

Fixes https://xcp-ng.org/forum/post/56175
2022-12-14 16:26:43 +01:00
Thierry Goettelmann
fa0f75b474 feat(lite): New UiCardTitle component (#6558) 2022-12-12 15:19:43 +01:00
Julien Fontanet
2d93e0d4be feat(xapi/waitObjectState): better timeout error stacktrace
Create the error synchronously for better stacktrace and debuggability.
2022-12-12 15:11:10 +01:00
Julien Fontanet
fe6406336d feat: release 5.77.2 2022-12-12 11:49:55 +01:00
Julien Fontanet
1037d44089 feat(xo-server): 5.107.3 2022-12-12 11:27:18 +01:00
Julien Fontanet
a8c3669f43 feat(@xen-orchestra/proxy): 0.26.7 2022-12-12 11:26:55 +01:00
Julien Fontanet
d91753aa82 feat(@xen-orchestra/backups): 0.29.3 2022-12-12 11:26:26 +01:00
Julien Fontanet
b548514d44 fix(backups): wait for cache to be updated before running cleanVm (#6580)
Introduced by 191c12413
2022-12-12 09:30:08 +01:00
Julien Fontanet
ba782d2698 fix(xo-server/_handleBackupLog): bail instead of failing if Nagios plugin is not loaded
Introduced by ed34d9cbc
2022-12-08 17:17:31 +01:00
Julien Fontanet
0552dc23a5 chore(CHANGELOG.unreleased): clarify format description 2022-12-08 17:17:31 +01:00
Cécile Morange
574bbbf5ff docs(manage infrastructure): add how to remove a host from pool (#6574)
Co-authored-by: Jon Sands <fohdeesha@gmail.com>
2022-12-08 15:38:02 +01:00
Julien Fontanet
df11a92cdb feat(scripts/gen-deps-list.js): add debug logs 2022-12-07 14:35:49 +01:00
Julien Fontanet
33ae59adf7 feat: release 5.77.1 2022-12-07 13:41:17 +01:00
Julien Fontanet
e0a115b41d feat(xo-server): 5.107.2 2022-12-07 13:19:15 +01:00
Julien Fontanet
f838d6c179 feat(@xen-orchestra/proxy): 0.26.6 2022-12-07 13:16:51 +01:00
Julien Fontanet
6c3229f517 feat(@xen-orchestra/backups): 0.29.2 2022-12-07 13:16:50 +01:00
Julien Fontanet
6973928b1a feat(backups/cleanVm): detect and fix cache inconsistencies (#6575) 2022-12-07 13:06:03 +01:00
Julien Fontanet
a5daba2a4d fix: work-around VuePress issues #2 2022-12-06 14:43:15 +01:00
Julien Fontanet
40ef83416e fix: work-around VuePress issues 2022-12-06 14:35:00 +01:00
Julien Fontanet
8518146455 fix: force classic Yarn 2022-12-06 10:53:35 +01:00
Florent BEAUCHAMP
d58f563de5 fix(xo-server/vm.warmMigration): fix start/delete params handling (#6570) 2022-12-06 10:42:51 +01:00
Thierry Goettelmann
ad2454adab feat(lite): replace ProgressBar with UiProgressBar & update UsageBar (#6544)
`ProgressBar` component handled too much logic (a progress bar + a circle icon + a label + a badge)

Since at various places we need a simple progress bar, all the additional logic should be handled by `UsageBar`.

- Move usage-specific logic from `ProgressBar` to `UsageBar`
- Removed `ProgressBar` component
- Created `ui/UiProgressBar` component containing only the bar itself
- Updated `UsageBar` to use `UiProgressBar` then adapting its style
2022-12-06 09:50:50 +01:00
Julien Fontanet
1f32557743 fix(scripts/gen-deps-list): fix packages order (#6564)
The release order computation is now uncoupled of the packages to release computation, and is now done for all packages so that transitive dependencies are still correctly ordered.
2022-11-30 14:52:46 +01:00
Julien Fontanet
e95aae2129 feat: release 5.77.0 2022-11-30 14:05:38 +01:00
Pierre Donias
9176171f20 feat: technical release (#6566) 2022-11-30 11:18:33 +01:00
Florent BEAUCHAMP
d4f2249a4d fix(xo-server/vm.warmMigration): use same job id in subsequent run (#6565)
Introduced by 72c69d7
2022-11-30 11:00:42 +01:00
Julien Fontanet
e0b4069c17 fix(scripts/bump-pkg): don't call git add --patch twice 2022-11-29 18:56:03 +01:00
Julien Fontanet
6b25a21151 feat(scripts/bump-pkg): ignore yarn.lock changes 2022-11-29 18:56:03 +01:00
Julien Fontanet
716dc45d85 chore(CHANGELOG): integrate released changes 2022-11-29 18:56:03 +01:00
Julien Fontanet
57850230c8 feat(xo-web): 5.108.0 2022-11-29 18:47:33 +01:00
Julien Fontanet
362d597031 feat(xo-server-web-hooks): 0.3.2 2022-11-29 18:47:14 +01:00
Julien Fontanet
e89b84b37b feat(xo-server-usage-report): 0.10.2 2022-11-29 18:46:54 +01:00
Julien Fontanet
ae6f6bf536 feat(xo-server-transport-nagios): 1.0.0 2022-11-29 18:46:27 +01:00
Julien Fontanet
6f765bdd6f feat(xo-server-sdn-controller): 1.0.7 2022-11-29 18:45:50 +01:00
Julien Fontanet
1982c6e6e6 feat(xo-server-netbox): 0.3.5 2022-11-29 18:45:30 +01:00
Julien Fontanet
527dceb43f feat(xo-server-load-balancer): 0.7.2 2022-11-29 18:44:12 +01:00
Julien Fontanet
f5a3d68d07 feat(xo-server-backup-reports): 0.17.2 2022-11-29 18:43:50 +01:00
Julien Fontanet
6c904fbc96 feat(xo-server-auth-ldap): 0.10.6 2022-11-29 18:43:22 +01:00
Julien Fontanet
295036a1e3 feat(xo-server-audit): 0.10.2 2022-11-29 18:42:30 +01:00
Julien Fontanet
5601d61b49 feat(xo-server): 5.107.0 2022-11-29 18:32:04 +01:00
Julien Fontanet
1c35c1a61a feat(xo-cli): 0.14.2 2022-11-29 18:31:24 +01:00
Julien Fontanet
4143014466 feat(xo-vmdk-to-vhd): 2.5.0 2022-11-29 18:29:33 +01:00
Julien Fontanet
90fea69b7e feat(@xen-orchestra/proxy): 0.26.5 2022-11-29 18:21:01 +01:00
Julien Fontanet
625663d619 feat(@xen-orchestra/xapi): 1.5.3 2022-11-29 18:18:09 +01:00
Julien Fontanet
403afc7aaf feat(@xen-orchestra/mixins): 0.8.2 2022-11-29 17:50:43 +01:00
Julien Fontanet
d295524c3c feat(@xen-orchestra/backups-cli): 1.0.0 2022-11-29 17:48:21 +01:00
Julien Fontanet
5eb4294e70 feat(@xen-orchestra/backups): 0.29.1 2022-11-29 17:48:21 +01:00
Julien Fontanet
90598522a6 feat(@xen-orchestra/audit-core): 0.2.2 2022-11-29 17:48:21 +01:00
Julien Fontanet
519fa1bcf8 feat(vhd-lib): 4.2.0 2022-11-29 17:48:21 +01:00
Julien Fontanet
7b0e5afe37 feat(@xen-orchestra/fs): 3.3.0 2022-11-29 17:48:21 +01:00
Julien Fontanet
0b6b3a47a2 feat(@vates/disposable): 0.1.3 2022-11-29 17:48:21 +01:00
Julien Fontanet
75db810508 feat(@xen-orchestra/log): 0.5.0 2022-11-29 17:48:21 +01:00
Julien Fontanet
2f52c564f5 chore(backups-cli): format package.json 2022-11-29 17:48:21 +01:00
Florent Beauchamp
011d582b80 fix(vhd-lib/merge): delete old data AFTER the alias has been overwritten 2022-11-29 16:42:57 +01:00
Julien Fontanet
32d21b2308 chore: use caret range for @vates/async-each
Introduced by 08298d328
2022-11-29 16:31:41 +01:00
Pierre Donias
45971ca622 fix(xo-web): remove duplicated imports (#6562) 2022-11-29 16:17:40 +01:00
Mathieu
f3a09f2dad feat(xo-web/VM/advanced): add button for warm migration (#6533)
See #6549
2022-11-29 15:14:41 +01:00
Mathieu
552a9c7b9f feat(xo-web/proxy): register an existing proxy (#6556) 2022-11-29 14:44:51 +01:00
Gabriel Gunullu
ed34d9cbc0 feat(xo-server-transport-nagios): make host and service configurable (#6560) 2022-11-29 14:34:41 +01:00
Julien Fontanet
187ee99931 fix(xo-server/plugin.configure): don't save injected defaults
Default values injected by Ajv from the configuration schema should not be saved.
2022-11-29 12:43:17 +01:00
Cécile Morange
ff78dd8f7c feat(xo-web/i18n): "XenServer" → "XCP-ng" (#6462)
See #6439
2022-11-29 11:47:16 +01:00
Julien Fontanet
b0eadb8ea4 fix: remove concurrency limit for dev script
Introduced by 9d5bc8af6

Limited concurrency (which is the default) is not compatible with never-ending commands.
2022-11-29 11:35:01 +01:00
Julien Fontanet
a95754715a fix: use --verbose for dev script
Introduced by 9d5bc8af6

Silent mode is not compatible (i.e. does not show a meaningful output) with never-ending commands.
2022-11-29 11:14:44 +01:00
Julien Fontanet
18ece4b90c fix(xo-server/MigrateVm): fix uuid import
Introduced by 72c69d791

Fixes #6561
2022-11-29 10:30:09 +01:00
Florent Beauchamp
3862fb2664 fix(fs/rename): throw ENOENT when source file is missing 2022-11-28 17:33:57 +01:00
Florent BEAUCHAMP
72c69d791a feat(xo-server): implement warm migration backend (#6549) 2022-11-28 17:28:19 +01:00
Julien Fontanet
d6192a4a7a chore: remove unused travis-tests.js 2022-11-28 15:51:47 +01:00
Julien Fontanet
0f824ffa70 lint(vhd-lib): remove unused var and fix formatting
Introduced by f6c227e7f
2022-11-26 10:10:08 +01:00
Florent BEAUCHAMP
f6c227e7f5 feat(vhd-lib): merge resume can resume when rename fails (#6530) 2022-11-25 20:51:33 +01:00
Julien Fontanet
9d5bc8af6e feat: run-script.js now only shows output on error by default 2022-11-25 15:45:52 +01:00
Julien Fontanet
9480079770 feat: script test-unit now bails on first error 2022-11-25 15:45:08 +01:00
Julien Fontanet
54fe9147ac chore: only enable Babel debug on prod builds
The output was making test results hard to see.
2022-11-25 14:43:36 +01:00
Gabriel Gunullu
b6a0477232 feat(xo-server-transport-nagios): report backed up VM individually (#6534) 2022-11-25 14:36:41 +01:00
Julien Fontanet
c60644c578 chore(lite): merge lint with the root config 2022-11-25 11:23:04 +01:00
Thierry Goettelmann
abdce94c5f feat(lite): type check on test (#6547) 2022-11-25 11:19:58 +01:00
Mathieu
d7dee04013 feat(xo-web/settings/users): remove OTP of users in admin panel (#6541)
See https://xcp-ng.org/forum/topic/6521
2022-11-25 11:15:07 +01:00
Julien Fontanet
dfc62132b7 fix(xo-web/remote): prevent browser from autocompleting encryption key 2022-11-24 18:48:45 +01:00
Julien Fontanet
36f7f193aa feat: run linter in CI 2022-11-24 17:00:59 +01:00
Julien Fontanet
ca4a82ec38 fix: make test-lint script ignore xo-web
Too many errors in this legacy package.
2022-11-24 16:26:40 +01:00
Julien Fontanet
37aea1888d chore: fix lint issues 2022-11-24 16:26:40 +01:00
Julien Fontanet
92f3b4ddd7 chore(backups/RemoteAdapter): remove unused invalidateVmBackupListCache 2022-11-24 16:26:40 +01:00
Mathieu
647995428c feat(lite/pool/dashboard): top 5 RAM usage (#6419) 2022-11-24 15:57:11 +01:00
Mathieu
407e9c25f3 feat(xo-web/licenses): text to explicit where to bind xcp-ng licenses (#6551)
See zammad#11037
2022-11-24 15:42:16 +01:00
Julien Fontanet
1612ab7335 fix(backups-cli/clean-vms): remove incorrect console.log
Introduced by 94c755b10
2022-11-23 23:03:46 +01:00
Julien Fontanet
b952c36210 fix(vhd-lib/merge): VhdAbstract.rename → handler.rename
Missing changed from c5b3acfce
2022-11-23 15:02:56 +01:00
Florent BEAUCHAMP
96b5cb2c61 feat(xo-vmdk-to-vhd): overprovision vmdk size to generate ova in one pass (#6487) 2022-11-23 14:48:18 +01:00
Florent Beauchamp
c5b3acfce2 fix(vhd-lib): remove unsafe VhdAbstract.rename implementation
actual implementation was deleting the target vhd even if the source did not exist, leading to ptential data loss
2022-11-23 14:31:37 +01:00
Julien Fontanet
20a01bf266 feat(lint-staged): format all files with Prettier 2022-11-22 18:20:01 +01:00
Julien Fontanet
a33b88cf1c chore: format with Prettier 2022-11-22 17:30:14 +01:00
Julien Fontanet
09a2f45ada feat: run test script for all pkgs with changed files 2022-11-22 17:30:14 +01:00
Julien Fontanet
83a7dd7ea1 chore: remove custom scripts/lint-staged 2022-11-22 17:30:14 +01:00
Julien Fontanet
afc1b6a5c0 Revert "feat: run pre-commit script for all packages"
This reverts commit f5b91cd45d.
2022-11-22 17:30:14 +01:00
Thierry Goettelmann
7f4f860735 feat(lite/color mode): "auto" mode + "D" shortcut to toggle (#6536)
The shortcut is only enabled in dev environment
2022-11-22 15:35:31 +01:00
Julien Fontanet
d789e3aa0d chore: update to husky@8 2022-11-22 15:33:43 +01:00
Julien Fontanet
f5b91cd45d feat: run pre-commit script for all packages 2022-11-22 11:37:40 +01:00
Julien Fontanet
92ab4b3309 chore(lite): format with Prettier (#6545) 2022-11-22 11:33:03 +01:00
Florent Beauchamp
2c456e4c89 fix(vhd-lib): create directory for merged blocks 2022-11-22 11:05:51 +01:00
Florent Beauchamp
1460e63449 fix(vhd-lib): write state at the begining 2022-11-22 11:05:51 +01:00
Julien Fontanet
8291124c1f feat(xo-server/remote.{create,set}): prevent xo-vm-backups suffix
Fixes zammad#10930
2022-11-21 16:58:24 +01:00
Julien Fontanet
fc4d9accfd feat(mixin): add usage 2022-11-21 11:04:51 +01:00
Julien Fontanet
80969b785f feat(xo-server/proxy.register): authenticationToken is now optional
It's automatically generated if missing, which can be useful when manually registering a proxy.
2022-11-20 23:51:48 +01:00
Julien Fontanet
3dfd7f1835 fix(xo-server/proxy.register): requires either address or vmUuid 2022-11-20 23:50:51 +01:00
Julien Fontanet
65daa39ebe fix(xo-cli): fix invalid parameters error message
Introduced by d7f29e736

The error format has changed due to the switch of xo-server to Ajv.
2022-11-20 23:44:50 +01:00
Julien Fontanet
5ad94504e3 feat(xo-web/downloadLog): use .json extension for JSON values 2022-11-20 23:20:01 +01:00
Julien Fontanet
4101bf3ba5 fix(xo-web): injected task.parent should not be enumerable
Shared task objects are direclty altered and adding an enumerable cyclic property might break JSON.stringify in other components.
2022-11-20 23:19:35 +01:00
Thierry Goettelmann
e9d52864ef fix(lite): remove @trivago/prettier-plugin-sort-imports package breaking monorepo (#6531) 2022-11-18 11:32:27 +01:00
Julien Fontanet
aef2696426 feat(log): respect env.{DEBUG,NODE_DEBUG} by default
Previously, env.{DEBUG,NODE_DEBUG} were only handled if `log/configure` has been imported, now it's the case by default.
2022-11-18 10:42:13 +01:00
Julien Fontanet
94c755b102 fix(backups-cli/clean-vms): use getSyncedHandler 2022-11-18 10:42:13 +01:00
Gabriel Gunullu
279b457348 test(xo-remote-parser): from Jest to test (#6537) 2022-11-17 14:35:01 +01:00
Julien Fontanet
b5988bb8b7 chore(backups-cli): convert to ESM 2022-11-17 10:44:48 +01:00
Mathieu
f73b1d8b40 feat(lite): add loader in pool dashboard (#6468) 2022-11-17 10:15:03 +01:00
Gabriel Gunullu
b2ccb07a95 test(complex-matcher): from Jest to test (#6535) 2022-11-16 23:24:32 +01:00
Thierry Goettelmann
9560cc4e33 chore(lite): upgrade packages (#6532) 2022-11-16 11:18:04 +01:00
Julien Fontanet
e87c380556 chore: update dev deps 2022-11-15 15:16:29 +01:00
Julien Fontanet
b0846876f7 feat: release 5.76.2 2022-11-14 15:55:02 +01:00
Julien Fontanet
477ed67957 feat(xo-server): 5.106.1 2022-11-14 14:52:01 +01:00
Thierry Goettelmann
5acacd7e1e feat(lite): add merge prop to UiButtonGroup (#6494) 2022-11-14 11:08:26 +01:00
Thierry Goettelmann
8d542fe9c0 fix(lite): UiButton should follow UiButtonGroup transparent prop (#6493) 2022-11-14 11:06:54 +01:00
Thierry Goettelmann
b0cb249ae9 docs(lite): update README about UiIcon (#6520) 2022-11-14 10:22:07 +01:00
Julien Fontanet
185509a0cf fix(xo-server/proxy.upgradeAppliance): use getObject method on correct object
Introduced by 572359892
2022-11-10 18:12:57 +01:00
Julien Fontanet
08298d3284 feat: limit concurrency of root build script
Should fixes https://xcp-ng.org/forum/post/54567
2022-11-10 18:09:05 +01:00
Mathieu
7a4cec5093 fix(dashboard/health): filter correctly unhealthyVdis (#6519)
See zammad#10720
2022-11-10 15:35:05 +01:00
rajaa-b
f44f5199c6 feat(lite): uncollapse hosts by default (#6428) 2022-11-10 15:12:37 +01:00
kursantkvi
81abc091de feat(xo-web/intl): Russian localization (#6526) 2022-11-10 10:02:16 +01:00
Pierre Donias
7e4f4c445d feat: release 5.76.1 (#6523) 2022-11-08 16:24:01 +01:00
Pierre Donias
5a673c1833 feat: technical release (#6521) 2022-11-08 11:12:48 +01:00
Mathieu
266231ae0f fix(xo-web): "Pro Support" instead of "pool support" in XCP-ng support tooltips (#6517)
See https://xcp-ng.org/forum/topic/6535
2022-11-08 10:11:16 +01:00
Florent BEAUCHAMP
9e87a887cb fix(xo-web/backup): cleanup settings correctly when deselecting health check (#6515)
Fix #6501
2022-11-08 10:06:04 +01:00
Mathieu
12e98bfd31 fix(xo-web/health): fix "an error has occurred" (#6508) 2022-11-08 09:52:54 +01:00
Mathieu
249f124ba6 fix(xo-web/license): display the license product ID in SelectLicense (#6512)
See zammad#10750
2022-11-08 09:50:46 +01:00
Julien Fontanet
131643a91b feat(xo-server/rest-api): expose VDI snapshots 2022-11-07 17:14:08 +01:00
Julien Fontanet
df3df18690 feat(xo-server/rest-api): expose VM snapshots and templates 2022-11-07 17:00:22 +01:00
Julien Fontanet
5401d17610 fix(xo-server/backup): respect httpProxy when connecting to XAPIs (#6513) 2022-11-07 15:07:35 +01:00
Florent BEAUCHAMP
90ea2284c6 fix(xo-vmk-to-vhd): failing tests (#6518)
Sometimes `buffer.allocUnsafe` was generating an buffer containing only zeroes, that buffer was filtered in packages/xo-vmdk-to-vhd/src/vmdk-generate.js line 140, thus the generated vmdk was variable
2022-11-07 13:28:43 +01:00
Mathieu
a4c5792f9e fix(lite): fix .value is undefined (#6469) 2022-11-07 10:18:12 +01:00
Julien Fontanet
5723598923 feat(xo-server/proxy.upgradeAppliance): support proxies with unknown VM 2022-11-07 00:19:13 +01:00
Julien Fontanet
aa0b2ff93a feat(xo-server/proxy.register): vmUuid parameter 2022-11-06 01:10:09 +01:00
Gabriel Gunullu
be6233f12b test(backups): from Jest to test (#6500) 2022-11-04 17:00:02 +01:00
Olivier Lambert
17df749790 chore(ISSUE_TEMPLATE/bug_report): make it mandatory and ask commit (#6509) 2022-11-04 11:25:58 +01:00
Gabriel Gunullu
97f852f8e8 test(log): from Jest to test (#6498) 2022-11-04 10:54:11 +01:00
Gabriel Gunullu
dc3446d61a test(template): from Jest to test (#6499) 2022-11-04 10:53:38 +01:00
Mathieu
c830a0b208 fix(pool): added tooltip for no support icon (#6505)
See zammad#10716
2022-11-04 10:46:17 +01:00
Thierry Goettelmann
ff0307b68f fix(lite): Vite constants declaration (#6511) 2022-11-04 10:27:41 +01:00
Mathieu
1c3cad9235 feat(lite): alert when unreachable hosts (#6378) 2022-11-04 10:02:02 +01:00
Julien Fontanet
ccafc15b66 fix(xo-server): split-log → split-host
Introduced by ed7ff1fad

Fixes https://xcp-ng.org/forum/post/54503
2022-11-03 14:54:23 +01:00
Julien Fontanet
a40d6b32e3 fix(xo-server/sample.config.toml): typo log → logs
Introduced by 2dda1aecc

Fixes https://xcp-ng.org/forum/post/54351
2022-11-03 13:57:34 +01:00
Pierre Donias
de1ee92fe7 chore(lite): normalize package 2022-11-03 10:30:39 +01:00
Pierre Donias
c7227d2f50 feat(lite): settings page (#6418) 2022-11-03 10:30:39 +01:00
Mathieu
b2cebbfaf4 fix(lite): invalidate sessionId token (#6480) 2022-11-03 10:30:39 +01:00
Pierre Donias
30fbbc92ca feat(lite/ProgressBar): use transition instead of animation (#6466) 2022-11-03 10:30:39 +01:00
Mathieu
d1b210cf16 fix(lite/dashboard): add missing 'id' field for storage usage (#6467) 2022-11-03 10:30:39 +01:00
Mathieu
9963568368 feat(lite/pool/dashboard): top 5 CPU usage (#6370) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
ffc3249b33 feat(lite/component): UiSpinner (#6427) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
29826db81b fix(lite): fix build errors (#6448) 2022-11-03 10:30:39 +01:00
Pierre Donias
5367a76db5 chore(lite): create CHANGELOG.md (#6457) 2022-11-03 10:30:39 +01:00
Pierre Donias
2512a00205 fix(lite/UiBadge): do not instanciate FontAwesomeIcon if icon is undefined (#6446) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
72a3a9f04f feat(lite/component): Radio, Checkbox, Select, Input, Toggle (#6426) 2022-11-03 10:30:39 +01:00
Mathieu
b566e0fd46 feat(lite): persit language change (#6443) 2022-11-03 10:30:39 +01:00
Pierre Donias
4621fb4e9b feat(lite): set default language to English (#6442)
See https://xcp-ng.org/forum/topic/4731/xen-orchestra-lite/48?_=1664781482177
2022-11-03 10:30:39 +01:00
Mathieu
7f3d25964f feat(lite): display storage usage (#6421) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
4b3728e8d8 feat(lite/component): New multicolor modal (#6394) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
5218d6df1a feat(lite/component): Menu component (#6354)
* feat(lite/component): Menu component

* feat(lite/component): Add disabled prop to AppMenu

* feat(lite/component): Add custom placement to AppMenu + Fix trigger color

* feat(lite/component): Update VmsActionBar to use new AppMenu (#6357)

* fix(lite/menu): Doesn't teleport the root menu if no trigger

* Don't disable a menu item having a submenu

* i18n
2022-11-03 10:30:39 +01:00
Thierry Goettelmann
94b2b8ec70 feat(lite/buttons): Add multiple button colors, outlined and transparent (#6393) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
6d1086539e feat(lite/component): UiActionButton component (#6386) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
7f758bbb73 Revert "feat(lite/component): Radio and Checkbox"
This reverts commit abfb6c97a2.
2022-11-03 10:30:39 +01:00
Thierry Goettelmann
62b88200c3 feat(lite/component): Radio and Checkbox 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
ce42883268 feat(lite): tooltips (#6412) 2022-11-03 10:30:39 +01:00
Pierre Donias
6b60cfce4d feat(lite): deploy script (#6413) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
aebb47ad38 feat(lite/component): Linear Chart (#6376) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
41f5634b7a feat(lite): i18n (#6399) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
87ddb01122 feat(lite): use FontAwesome Free (#6405) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
6898eea45e feat(lite): Update missing colors (#6392) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
ba2679d3d7 feat(lite/component): Change style of active items in tree view (#6397) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
971cdaa44f feat(lite): CR feedback (#6341) 2022-11-03 10:30:39 +01:00
Pierre Donias
005d3b5976 feat(lite): placeholders for pool/host/VM name_label (#6391)
Some objects may have an empty `name_label`. This is to avoid confusion in the
tree view.
2022-11-03 10:30:39 +01:00
Thierry Goettelmann
663403cb14 chore(lite): change font-size (#6390) 2022-11-03 10:30:39 +01:00
Pierre Donias
b341e38623 feat(lite): add "coming soon" message on empty views (#6389) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
8246db30cb chore(lite): remove fake cards from Pool Dashboard (#6385) 2022-11-03 10:30:39 +01:00
Pierre Donias
9c9c656620 feat(lite/signin): smaller logo and few tweaks (#6381) 2022-11-03 10:30:39 +01:00
Pierre Donias
f36be0d5e0 feat(lite/nav): use logo without circle (#6382) 2022-11-03 10:30:39 +01:00
Pierre Donias
72090ea8ff feat(lite/dark mode): persistence + icon (#6383) 2022-11-03 10:30:39 +01:00
Pierre Donias
8d64a0a232 feat(lite): use new XO Lite logo (#6379) 2022-11-03 10:30:39 +01:00
Pierre Donias
35974a0a33 fix(lite): connect to window.origin's XAPI in prod (#6377) 2022-11-03 10:30:39 +01:00
Mathieu
3023439028 fix(lite): fix UiCard height (#6373) 2022-11-03 10:30:39 +01:00
Pierre Donias
77f4a09d74 chore(lite): switch from actual routes to hash routes (#6372)
XCP-ng web servers only serve the HTML on /xolite.html. This allows XO Lite to
still work when opened on a route that is different than /.
2022-11-03 10:30:39 +01:00
Pierre Donias
0fc797f7d0 fix(lite): change HTML main element ID from app to root (#6371)
XCP-ng web servers already serve an HTML file with a #root element. This allows
to use the new version of XO Lite without having to change that HTML file on
XCP-ng hosts.
2022-11-03 10:30:39 +01:00
Mathieu
0b02c84e33 feat(lite): xapiStat with fetchStats composable (#6361) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
e03ff0a9be feat(lite/component): update UiButton (#6355) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
d91f1841c0 feat(lite/component): UI Icon utility component (#6353) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
0effc9cfc1 fix(lite): disconnecting (#6346) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
f08cbb458d fix(lite): ESLint config (#6344) 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
b8c9770d43 chore(lite): merge old repo to XO 2022-11-03 10:30:39 +01:00
Julien Fontanet
44ff5d0e4d fix(lite): various fixes 2022-11-03 10:30:39 +01:00
Thierry Goettelmann
ecb580a629 feat(lite): initial Vue.js implementation 2022-11-03 10:30:39 +01:00
Julien Fontanet
0623d837c1 feat: release 5.76.0 2022-10-31 13:59:50 +01:00
Julien Fontanet
f92d1ce4ac chore(CHANGELOG): integrate released changes 2022-10-31 13:24:43 +01:00
Julien Fontanet
88f84069d6 feat(xo-web): 5.106.0 2022-10-31 13:23:49 +01:00
Julien Fontanet
b9b7081184 feat(xo-server): 5.105.0 2022-10-31 13:23:21 +01:00
Julien Fontanet
ce3e0817db feat(@xen-orchestra/proxy): 0.26.4 2022-10-31 13:22:52 +01:00
Julien Fontanet
55b65a8bf6 feat(@xen-orchestra/backups): 0.29.0 2022-10-31 13:22:18 +01:00
Julien Fontanet
6767141661 feat(@xen-orchestra/xapi): 1.5.2 2022-10-31 13:17:26 +01:00
rajaa-b
2eb3b15930 feat(xo-web/new-vm): possibility to destroy cloud config drive after first boot (#6486)
Fixes #6438
2022-10-31 12:25:55 +01:00
Julien Fontanet
b63c4a0d4f fix(xapi/waitObjectState): check if state si already correct 2022-10-28 16:15:53 +02:00
Mathieu
1269ddfeae feat(xo-web/pool): XCP-ng license binding (#6453) 2022-10-28 16:04:37 +02:00
rajaa-b
afd47f5522 fix(xo-web/proxies): remove "Bind license" for proxies without VM UUID (#6472) 2022-10-28 11:19:45 +02:00
Florent BEAUCHAMP
7ede6bdbce feat(backups): use NBD to export VDIs when possible (#6461) 2022-10-27 16:50:56 +02:00
Pierre Donias
03b505e40e feat: technical release (#6488) 2022-10-27 15:18:03 +02:00
Julien Fontanet
ed7ff1fad4 feat(xo-server): allow logging to external syslog
Follow-up of 756d2fe4e
2022-10-27 14:23:00 +02:00
Julien Fontanet
2dda1aecce feat(xo-server): use object to configure log transports
Follow-up of 756d2fe4e
2022-10-27 14:18:23 +02:00
Julien Fontanet
720e363577 test(fs/abstract): use getSyncedHandler 2022-10-27 09:26:49 +02:00
Florent Beauchamp
545a65521a fix(vhd-lib): improve openVhd error handling 2022-10-27 09:26:49 +02:00
Florent Beauchamp
0cf6f94677 test: rework tests following 05161bd4df
Test of cleanVm are still failin , untill we fix the error condition of cleanVm broken vhd removing

- don't use handler to / (need root to run)
- don't create file at the root of the remote (conflict with the metadata and encryption.json)
- test more unhappy paths
2022-10-27 09:26:49 +02:00
Florent Beauchamp
14e205ab69 fix(vhd-cli): sync handler 2022-10-27 09:26:49 +02:00
Florent Beauchamp
c3da87a40c fix(@xen-orchestra/fs): do not create metadata on non encrypted remote
this was creating file in wrong place during test or when running cli
introduced by #05161bd4df5b42e5ecfa0ae11e60c466ab2eabdf
2022-10-27 09:26:49 +02:00
Gabriel Gunullu
5d93b05088 test(cron): from Jest to test (#6485) 2022-10-26 09:41:39 +02:00
Gabriel Gunullu
2cdd33cb7a test(async-map): from Jest to test (#6484) 2022-10-25 16:17:08 +02:00
Julien Fontanet
dc909fdfb0 test(async-each): fix iteratee calls
Introduced by myself in last minute change in a43199b75
2022-10-25 13:14:44 +02:00
Gabriel Gunullu
a43199b754 test(async-each): from Jest to test (#6481) 2022-10-25 12:23:59 +02:00
Gabriel Gunullu
876211879f test(decorate-with): from Tap to test (#6474) 2022-10-24 17:46:44 +02:00
Gabriel Gunullu
fe323b8fe5 test: remove unnecessary ESLint comments (#6479) 2022-10-24 17:46:27 +02:00
Florent BEAUCHAMP
b60f5d593b feat(xo-web/remote): show encryption in remote UI (#6465)
* if remote is disabled we don't know the used algorithm : only show the lock if there is an encryption key
* if remote is enabled : 
  *  if algorithm is undefined or none : show nothing, remote is not encrypted
  * if algorithm is defined to DEFAULT_ENCRYPTION_ALGORITHM : show the lock with the name of the algorithm as a tooltip
  * else show the lock and a warning advising to create a new remote with an up to date algorithm
2022-10-24 16:15:26 +02:00
Gabriel Gunullu
2d4317b681 test(read-chunk): from Jest to test (#6478) 2022-10-24 14:38:56 +02:00
Julien Fontanet
caf0eb3762 chore(eslint): accepts Node 16 features in tests 2022-10-24 11:21:19 +02:00
Gabriel Gunullu
c1aa7b9d8a test(multi-key-map): from Jest to test (#6477) 2022-10-24 10:05:51 +02:00
Gabriel Gunullu
6c6efd9cfb test(disposable): from Jest to test and SinonJS (#6476) 2022-10-24 10:04:48 +02:00
Julien Fontanet
551670a8b9 fix(eslint): disable n/no-unpublished-{import,require} in tests 2022-10-24 09:53:55 +02:00
Gabriel Gunullu
ac75225e7d test(compose): from Jest to test (#6473) 2022-10-21 16:25:25 +02:00
Julien Fontanet
20dbbeb38e feat(npmignore): handle *.test.*js files
This naming scheme is used by `node:test` and its userland implementation `test`.
2022-10-20 17:00:49 +02:00
Julien Fontanet
37dea9980e fix(npmignore): handle .cjs and .mjs files 2022-10-20 16:58:30 +02:00
Gabriel Gunullu
5cec2d4cb0 test(coalesce-calls): from Jest to test (#6470) 2022-10-20 16:46:56 +02:00
Julien Fontanet
ed76fa5141 feat(predicates): not operator 2022-10-20 12:47:02 +02:00
Julien Fontanet
389a765825 fix(mixins/_parseBasicAuth): consider empty password as missing
This makes `username:` recognized as token, just like `username` is.

This fixes token-based authentication in HttpProxy with cURL.
2022-10-20 10:21:09 +02:00
Julien Fontanet
3bad40095a fix(mixins/Config#watch): first run even when undefined
Fixes issue introduced by d157fd352
2022-10-19 18:43:48 +02:00
Gabriel Gunullu
1a51c66028 fix(ci): GitHub actions workflow (#6463) 2022-10-19 12:00:46 +02:00
Florent BEAUCHAMP
05161bd4df feat(fs): use aes256-gcm encryption algorithm (#6447)
Fixes zammad#9788
2022-10-17 11:33:55 +02:00
Florent BEAUCHAMP
db1102750f feat(xo-web): label of vhd directory backup (#6459) 2022-10-13 16:45:03 +02:00
Julien Fontanet
42a974476f feat(@vates/otp): minimal HOTP/TOTP implementation (#6456) 2022-10-12 15:44:43 +02:00
Florent BEAUCHAMP
0dd91c1efe feat(nbd-client): first implementation (#6444) 2022-10-12 14:46:16 +02:00
Julien Fontanet
756d2fe4e7 feat(xo-server): make log transport configurable
See zammad#9799
2022-10-12 14:37:41 +02:00
Julien Fontanet
61c64b49c7 feat(log/configure): can instanciate transport from JSON 2022-10-12 14:37:41 +02:00
Julien Fontanet
c2eb68a31a chore(fuse-vhd): remove unused var/lib
Introduced by 46fe3be32
2022-10-11 16:41:12 +02:00
Julien Fontanet
f1a1b922c7 chore: format with Prettier 2022-10-11 16:40:10 +02:00
Cécile Morange
a2dcceb470 docs(installation): Debian 10 → 11 (#6458)
Signed-off-by: Cécile MORANGE - AtaxyaNetwork <contact@ataxya.net>
2022-10-10 17:29:32 +02:00
Julien Fontanet
1d78fdd673 chore: update dev deps 2022-10-10 15:55:05 +02:00
512 changed files with 26001 additions and 4453 deletions

View File

@@ -28,8 +28,10 @@ module.exports = {
},
},
{
files: ['*.spec.{,c,m}js'],
files: ['*.{spec,test}.{,c,m}js'],
rules: {
'n/no-unpublished-require': 'off',
'n/no-unpublished-import': 'off',
'n/no-unsupported-features/node-builtins': [
'error',
{

View File

@@ -6,7 +6,10 @@ labels: 'status: triaging :triangular_flag_on_post:, type: bug :bug:'
assignees: ''
---
**XOA or XO from the sources?**
1. ⚠️ **If you don't follow this template, the issue will be closed**.
2. ⚠️ **If your issue can't be easily reproduced, please report it [on the forum first](https://xcp-ng.org/forum/category/12/xen-orchestra)**.
Are you using XOA or XO from the sources?
If XOA:
@@ -15,6 +18,7 @@ If XOA:
If XO from the sources:
- Provide **your commit number**. If it's older than a week, we won't investigate
- Don't forget to [read this first](https://xen-orchestra.com/docs/community.html)
- As well as follow [this guide](https://xen-orchestra.com/docs/community.html#report-a-bug)
@@ -38,8 +42,6 @@ If applicable, add screenshots to help explain your problem.
**Environment (please provide the following information):**
- Node: [e.g. 16.12.1]
- xo-server: [e.g. 5.82.3]
- xo-web: [e.g. 5.87.0]
- hypervisor: [e.g. XCP-ng 8.2.0]
**Additional context**

View File

@@ -1,13 +1,12 @@
name: CI
on: [push]
on: push
jobs:
build:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: satackey/action-docker-layer-caching@v0.0.11
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- run: docker-compose -f docker/docker-compose.dev.yml build
- run: docker-compose -f docker/docker-compose.dev.yml up
- name: Checkout
uses: actions/checkout@v3
- name: Build docker image
run: docker-compose -f docker/docker-compose.dev.yml build
- name: Create the container and start the tests
run: docker-compose -f docker/docker-compose.dev.yml up --exit-code-from xo

1
.gitignore vendored
View File

@@ -34,3 +34,4 @@ yarn-error.log.*
# code coverage
.nyc_output/
coverage/
.turbo/

4
.husky/pre-commit Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/async-each):
```
> npm install --save @vates/async-each
```sh
npm install --save @vates/async-each
```
## Usage

View File

@@ -1,6 +1,8 @@
'use strict'
/* eslint-env jest */
const { describe, it, beforeEach } = require('test')
const assert = require('assert').strict
const { spy } = require('sinon')
const { asyncEach } = require('./')
@@ -34,12 +36,18 @@ describe('asyncEach', () => {
})
it('works', async () => {
const iteratee = jest.fn(async () => {})
const iteratee = spy(async () => {})
await asyncEach.call(thisArg, iterable, iteratee, { concurrency: 1 })
expect(iteratee.mock.instances).toEqual(Array.from(values, () => thisArg))
expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable]))
assert.deepStrictEqual(
iteratee.thisValues,
Array.from(values, () => thisArg)
)
assert.deepStrictEqual(
iteratee.args,
Array.from(values, (value, index) => [value, index, iterable])
)
})
;[1, 2, 4].forEach(concurrency => {
it('respects a concurrency of ' + concurrency, async () => {
@@ -49,7 +57,7 @@ describe('asyncEach', () => {
values,
async () => {
++running
expect(running).toBeLessThanOrEqual(concurrency)
assert.deepStrictEqual(running <= concurrency, true)
await randomDelay()
--running
},
@@ -59,42 +67,52 @@ describe('asyncEach', () => {
})
it('stops on first error when stopOnError is true', async () => {
const tracker = new assert.CallTracker()
const error = new Error()
const iteratee = jest.fn((_, i) => {
const iteratee = tracker.calls((_, i) => {
if (i === 1) {
throw error
}
})
}, 2)
assert.deepStrictEqual(
await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true })),
error
)
expect(await rejectionOf(asyncEach(iterable, iteratee, { concurrency: 1, stopOnError: true }))).toBe(error)
expect(iteratee).toHaveBeenCalledTimes(2)
tracker.verify()
})
it('rejects AggregateError when stopOnError is false', async () => {
const errors = []
const iteratee = jest.fn(() => {
const iteratee = spy(() => {
const error = new Error()
errors.push(error)
throw error
})
const error = await rejectionOf(asyncEach(iterable, iteratee, { stopOnError: false }))
expect(error.errors).toEqual(errors)
expect(iteratee.mock.calls).toEqual(Array.from(values, (value, index) => [value, index, iterable]))
assert.deepStrictEqual(error.errors, errors)
assert.deepStrictEqual(
iteratee.args,
Array.from(values, (value, index) => [value, index, iterable])
)
})
it('can be interrupted with an AbortSignal', async () => {
const tracker = new assert.CallTracker()
const ac = new AbortController()
const iteratee = jest.fn((_, i) => {
const iteratee = tracker.calls((_, i) => {
if (i === 1) {
ac.abort()
}
}, 2)
await assert.rejects(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal }), {
message: 'asyncEach aborted',
})
await expect(asyncEach(iterable, iteratee, { concurrency: 1, signal: ac.signal })).rejects.toThrow(
'asyncEach aborted'
)
expect(iteratee).toHaveBeenCalledTimes(2)
tracker.verify()
})
})
)

View File

@@ -29,6 +29,12 @@
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"devDependencies": {
"sinon": "^15.0.1",
"tap": "^16.3.0",
"test": "^3.2.1"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/cached-dns.lookup):
```
> npm install --save @vates/cached-dns.lookup
```sh
npm install --save @vates/cached-dns.lookup
```
## Usage

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/coalesce-calls):
```
> npm install --save @vates/coalesce-calls
```sh
npm install --save @vates/coalesce-calls
```
## Usage

View File

@@ -1,6 +1,7 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const assert = require('assert')
const { coalesceCalls } = require('./')
@@ -23,13 +24,13 @@ describe('coalesceCalls', () => {
const promise2 = fn(defer2.promise)
defer1.resolve('foo')
expect(await promise1).toBe('foo')
expect(await promise2).toBe('foo')
assert.strictEqual(await promise1, 'foo')
assert.strictEqual(await promise2, 'foo')
const defer3 = pDefer()
const promise3 = fn(defer3.promise)
defer3.resolve('bar')
expect(await promise3).toBe('bar')
assert.strictEqual(await promise3, 'bar')
})
})

View File

@@ -30,6 +30,10 @@
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"devDependencies": {
"test": "^3.2.1"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/compose):
```
> npm install --save @vates/compose
```sh
npm install --save @vates/compose
```
## Usage

View File

@@ -1,6 +1,7 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const assert = require('node:assert').strict
const { compose } = require('./')
@@ -9,43 +10,42 @@ const mul3 = x => x * 3
describe('compose()', () => {
it('throws when no functions is passed', () => {
expect(() => compose()).toThrow(TypeError)
expect(() => compose([])).toThrow(TypeError)
assert.throws(() => compose(), TypeError)
assert.throws(() => compose([]), TypeError)
})
it('applies from left to right', () => {
expect(compose(add2, mul3)(5)).toBe(21)
assert.strictEqual(compose(add2, mul3)(5), 21)
})
it('accepts functions in an array', () => {
expect(compose([add2, mul3])(5)).toBe(21)
assert.strictEqual(compose([add2, mul3])(5), 21)
})
it('can apply from right to left', () => {
expect(compose({ right: true }, add2, mul3)(5)).toBe(17)
assert.strictEqual(compose({ right: true }, add2, mul3)(5), 17)
})
it('accepts options with functions in an array', () => {
expect(compose({ right: true }, [add2, mul3])(5)).toBe(17)
assert.strictEqual(compose({ right: true }, [add2, mul3])(5), 17)
})
it('can compose async functions', async () => {
expect(
assert.strictEqual(
await compose(
{ async: true },
async x => x + 2,
async x => x * 3
)(5)
).toBe(21)
)(5),
21
)
})
it('forwards all args to first function', () => {
expect.assertions(1)
const expectedArgs = [Math.random(), Math.random()]
compose(
(...args) => {
expect(args).toEqual(expectedArgs)
assert.deepEqual(args, expectedArgs)
},
// add a second function to avoid the one function special case
Function.prototype
@@ -53,15 +53,13 @@ describe('compose()', () => {
})
it('forwards context to all functions', () => {
expect.assertions(2)
const expectedThis = {}
compose(
function () {
expect(this).toBe(expectedThis)
assert.strictEqual(this, expectedThis)
},
function () {
expect(this).toBe(expectedThis)
assert.strictEqual(this, expectedThis)
}
).call(expectedThis)
})

View File

@@ -19,6 +19,10 @@
"node": ">=7.6"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"devDependencies": {
"test": "^3.2.1"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/decorate-with):
```
> npm install --save @vates/decorate-with
```sh
npm install --save @vates/decorate-with
```
## Usage

View File

@@ -1,7 +1,7 @@
'use strict'
const assert = require('assert')
const { describe, it } = require('tap').mocha
const { describe, it } = require('test')
const { decorateClass, decorateWith, decorateMethodsWith, perInstance } = require('./')

View File

@@ -26,9 +26,9 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test": "tap"
"test": "node--test"
},
"devDependencies": {
"tap": "^16.0.1"
"test": "^3.2.1"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/disposable):
```
> npm install --save @vates/disposable
```sh
npm install --save @vates/disposable
```
## Usage

View File

@@ -1,16 +1,17 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const { useFakeTimers, spy, assert } = require('sinon')
const { createDebounceResource } = require('./debounceResource')
jest.useFakeTimers()
const clock = useFakeTimers()
describe('debounceResource()', () => {
it('calls the resource disposer after 10 seconds', async () => {
const debounceResource = createDebounceResource()
const delay = 10e3
const dispose = jest.fn()
const dispose = spy()
const resource = await debounceResource(
Promise.resolve({
@@ -22,10 +23,10 @@ describe('debounceResource()', () => {
resource.dispose()
expect(dispose).not.toBeCalled()
assert.notCalled(dispose)
jest.advanceTimersByTime(delay)
clock.tick(delay)
expect(dispose).toBeCalled()
assert.called(dispose)
})
})

View File

@@ -1,13 +1,14 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const { spy, assert } = require('sinon')
const { deduped } = require('./deduped')
describe('deduped()', () => {
it('calls the resource function only once', async () => {
const value = {}
const getResource = jest.fn(async () => ({
const getResource = spy(async () => ({
value,
dispose: Function.prototype,
}))
@@ -17,13 +18,13 @@ describe('deduped()', () => {
const { value: v1 } = await dedupedGetResource()
const { value: v2 } = await dedupedGetResource()
expect(getResource).toHaveBeenCalledTimes(1)
expect(v1).toBe(value)
expect(v2).toBe(value)
assert.calledOnce(getResource)
assert.match(v1, value)
assert.match(v2, value)
})
it('only disposes the source disposable when its all copies dispose', async () => {
const dispose = jest.fn()
const dispose = spy()
const getResource = async () => ({
value: '',
dispose,
@@ -36,35 +37,35 @@ describe('deduped()', () => {
d1()
expect(dispose).not.toHaveBeenCalled()
assert.notCalled(dispose)
d2()
expect(dispose).toHaveBeenCalledTimes(1)
assert.calledOnce(dispose)
})
it('works with sync factory', () => {
const value = {}
const dispose = jest.fn()
const dispose = spy()
const dedupedGetResource = deduped(() => ({ value, dispose }))
const d1 = dedupedGetResource()
expect(d1.value).toBe(value)
assert.match(d1.value, value)
const d2 = dedupedGetResource()
expect(d2.value).toBe(value)
assert.match(d2.value, value)
d1.dispose()
expect(dispose).not.toHaveBeenCalled()
assert.notCalled(dispose)
d2.dispose()
expect(dispose).toHaveBeenCalledTimes(1)
assert.calledOnce(dispose)
})
it('no race condition on dispose before async acquisition', async () => {
const dispose = jest.fn()
const dispose = spy()
const dedupedGetResource = deduped(async () => ({ value: 42, dispose }))
const d1 = await dedupedGetResource()
@@ -73,6 +74,6 @@ describe('deduped()', () => {
d1.dispose()
expect(dispose).not.toHaveBeenCalled()
assert.notCalled(dispose)
})
})

View File

@@ -14,17 +14,22 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "0.1.1",
"version": "0.1.4",
"engines": {
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"dependencies": {
"@vates/multi-key-map": "^0.1.0",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/log": "^0.6.0",
"ensure-array": "^1.0.0"
},
"devDependencies": {
"sinon": "^15.0.1",
"test": "^3.2.1"
}
}

View File

@@ -8,8 +8,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/event-listeners-manager):
```
> npm install --save @vates/event-listeners-manager
```sh
npm install --save @vates/event-listeners-manager
```
## Usage

View File

@@ -4,9 +4,6 @@ const LRU = require('lru-cache')
const Fuse = require('fuse-native')
const { VhdSynthetic } = require('vhd-lib')
const { Disposable, fromCallback } = require('promise-toolbox')
const { createLogger } = require('@xen-orchestra/log')
const { warn } = createLogger('vates:fuse-vhd')
// build a s stat object from https://github.com/fuse-friends/fuse-native/blob/master/test/fixtures/stat.js
const stat = st => ({
@@ -57,9 +54,7 @@ exports.mount = Disposable.factory(async function* mount(handler, diskPath, moun
},
read(path, fd, buf, len, pos, cb) {
if (path === '/vhd0') {
return vhd
.readRawData(pos, len, cache, buf)
.then(cb)
return vhd.readRawData(pos, len, cache, buf).then(cb)
}
throw new Error(`read file ${path} not exists`)
},
@@ -67,5 +62,5 @@ exports.mount = Disposable.factory(async function* mount(handler, diskPath, moun
return new Disposable(
() => fromCallback(() => fuse.unmount()),
fromCallback(() => fuse.mount())
)
)
})

View File

@@ -18,11 +18,10 @@
"node": ">=10.0"
},
"dependencies": {
"@xen-orchestra/log": "^0.3.0",
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^4.1.0"
"vhd-lib": "^4.2.1"
},
"scripts": {
"postversion": "npm publish --access public"

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/multi-key-map):
```
> npm install --save @vates/multi-key-map
```sh
npm install --save @vates/multi-key-map
```
## Usage

View File

@@ -1,6 +1,7 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const assert = require('node:assert')
const { MultiKeyMap } = require('./')
@@ -28,9 +29,9 @@ describe('MultiKeyMap', () => {
keys.forEach((key, i) => {
// copy the key to make sure the array itself is not the key
expect(map.get(key.slice())).toBe(values[i])
assert.strictEqual(map.get(key.slice()), values[i])
map.delete(key.slice())
expect(map.get(key.slice())).toBe(undefined)
assert.strictEqual(map.get(key.slice()), undefined)
})
})
})

View File

@@ -23,6 +23,10 @@
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"devDependencies": {
"test": "^3.2.1"
}
}

View File

@@ -0,0 +1,16 @@
### `new NdbClient({address, exportname, secure = true, port = 10809})`
create a new nbd client
```js
import NbdClient from '@vates/nbd-client'
const client = new NbdClient({
address: 'MY_NBD_HOST',
exportname: 'MY_SECRET_EXPORT',
cert: 'Server certificate', // optional, will use encrypted link if provided
})
await client.connect()
const block = await client.readBlock(blockIndex, BlockSize)
await client.disconnect()
```

View File

@@ -0,0 +1 @@
../../scripts/npmignore

View File

@@ -0,0 +1,47 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/nbd-client
[![Package Version](https://badgen.net/npm/v/@vates/nbd-client)](https://npmjs.org/package/@vates/nbd-client) ![License](https://badgen.net/npm/license/@vates/nbd-client) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/nbd-client)](https://bundlephobia.com/result?p=@vates/nbd-client) [![Node compatibility](https://badgen.net/npm/node/@vates/nbd-client)](https://npmjs.org/package/@vates/nbd-client)
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/nbd-client):
```sh
npm install --save @vates/nbd-client
```
## Usage
### `new NdbClient({address, exportname, secure = true, port = 10809})`
create a new nbd client
```js
import NbdClient from '@vates/nbd-client'
const client = new NbdClient({
address: 'MY_NBD_HOST',
exportname: 'MY_SECRET_EXPORT',
cert: 'Server certificate', // optional, will use encrypted link if provided
})
await client.connect()
const block = await client.readBlock(blockIndex, BlockSize)
await client.disconnect()
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -0,0 +1,42 @@
'use strict'
exports.INIT_PASSWD = Buffer.from('NBDMAGIC') // "NBDMAGIC" ensure we're connected to a nbd server
exports.OPTS_MAGIC = Buffer.from('IHAVEOPT') // "IHAVEOPT" start an option block
exports.NBD_OPT_REPLY_MAGIC = 1100100111001001n // magic received during negociation
exports.NBD_OPT_EXPORT_NAME = 1
exports.NBD_OPT_ABORT = 2
exports.NBD_OPT_LIST = 3
exports.NBD_OPT_STARTTLS = 5
exports.NBD_OPT_INFO = 6
exports.NBD_OPT_GO = 7
exports.NBD_FLAG_HAS_FLAGS = 1 << 0
exports.NBD_FLAG_READ_ONLY = 1 << 1
exports.NBD_FLAG_SEND_FLUSH = 1 << 2
exports.NBD_FLAG_SEND_FUA = 1 << 3
exports.NBD_FLAG_ROTATIONAL = 1 << 4
exports.NBD_FLAG_SEND_TRIM = 1 << 5
exports.NBD_FLAG_FIXED_NEWSTYLE = 1 << 0
exports.NBD_CMD_FLAG_FUA = 1 << 0
exports.NBD_CMD_FLAG_NO_HOLE = 1 << 1
exports.NBD_CMD_FLAG_DF = 1 << 2
exports.NBD_CMD_FLAG_REQ_ONE = 1 << 3
exports.NBD_CMD_FLAG_FAST_ZERO = 1 << 4
exports.NBD_CMD_READ = 0
exports.NBD_CMD_WRITE = 1
exports.NBD_CMD_DISC = 2
exports.NBD_CMD_FLUSH = 3
exports.NBD_CMD_TRIM = 4
exports.NBD_CMD_CACHE = 5
exports.NBD_CMD_WRITE_ZEROES = 6
exports.NBD_CMD_BLOCK_STATUS = 7
exports.NBD_CMD_RESIZE = 8
exports.NBD_REQUEST_MAGIC = 0x25609513 // magic number to create a new NBD request to send to the server
exports.NBD_REPLY_MAGIC = 0x67446698 // magic number received from the server when reading response to a nbd request
exports.NBD_REPLY_ACK = 1
exports.NBD_DEFAULT_PORT = 10809
exports.NBD_DEFAULT_BLOCK_SIZE = 64 * 1024

243
@vates/nbd-client/index.js Normal file
View File

@@ -0,0 +1,243 @@
'use strict'
const assert = require('node:assert')
const { Socket } = require('node:net')
const { connect } = require('node:tls')
const {
INIT_PASSWD,
NBD_CMD_READ,
NBD_DEFAULT_BLOCK_SIZE,
NBD_DEFAULT_PORT,
NBD_FLAG_FIXED_NEWSTYLE,
NBD_FLAG_HAS_FLAGS,
NBD_OPT_EXPORT_NAME,
NBD_OPT_REPLY_MAGIC,
NBD_OPT_STARTTLS,
NBD_REPLY_ACK,
NBD_REPLY_MAGIC,
NBD_REQUEST_MAGIC,
OPTS_MAGIC,
} = require('./constants.js')
const { fromCallback } = require('promise-toolbox')
const { readChunkStrict } = require('@vates/read-chunk')
// documentation is here : https://github.com/NetworkBlockDevice/nbd/blob/master/doc/proto.md
module.exports = class NbdClient {
#serverAddress
#serverCert
#serverPort
#serverSocket
#exportName
#exportSize
// AFAIK, there is no guaranty the server answers in the same order as the queries
// so we handle a backlog of command waiting for response and handle concurrency manually
#waitingForResponse // there is already a listenner waiting for a response
#nextCommandQueryId = BigInt(0)
#commandQueryBacklog // map of command waiting for an response queryId => { size/*in byte*/, resolve, reject}
constructor({ address, port = NBD_DEFAULT_PORT, exportname, cert }) {
this.#serverAddress = address
this.#serverPort = port
this.#exportName = exportname
this.#serverCert = cert
}
get exportSize() {
return this.#exportSize
}
async #tlsConnect() {
return new Promise((resolve, reject) => {
this.#serverSocket = connect({
socket: this.#serverSocket,
rejectUnauthorized: false,
cert: this.#serverCert,
})
this.#serverSocket.once('error', reject)
this.#serverSocket.once('secureConnect', () => {
this.#serverSocket.removeListener('error', reject)
resolve()
})
})
}
// mandatory , at least to start the handshake
async #unsecureConnect() {
this.#serverSocket = new Socket()
return new Promise((resolve, reject) => {
this.#serverSocket.connect(this.#serverPort, this.#serverAddress)
this.#serverSocket.once('error', reject)
this.#serverSocket.once('connect', () => {
this.#serverSocket.removeListener('error', reject)
resolve()
})
})
}
async connect() {
// first we connect to the serve without tls, and then we upgrade the connection
// to tls during the handshake
await this.#unsecureConnect()
await this.#handshake()
// reset internal state if we reconnected a nbd client
this.#commandQueryBacklog = new Map()
this.#waitingForResponse = false
}
async disconnect() {
await this.#serverSocket.destroy()
}
// we can use individual read/write from the socket here since there is no concurrency
async #sendOption(option, buffer = Buffer.alloc(0)) {
await this.#write(OPTS_MAGIC)
await this.#writeInt32(option)
await this.#writeInt32(buffer.length)
await this.#write(buffer)
assert.strictEqual(await this.#readInt64(), NBD_OPT_REPLY_MAGIC) // magic number everywhere
assert.strictEqual(await this.#readInt32(), option) // the option passed
assert.strictEqual(await this.#readInt32(), NBD_REPLY_ACK) // ACK
const length = await this.#readInt32()
assert.strictEqual(length, 0) // length
}
// we can use individual read/write from the socket here since there is only one handshake at once, no concurrency
async #handshake() {
assert((await this.#read(8)).equals(INIT_PASSWD))
assert((await this.#read(8)).equals(OPTS_MAGIC))
const flagsBuffer = await this.#read(2)
const flags = flagsBuffer.readInt16BE(0)
assert.strictEqual(flags & NBD_FLAG_FIXED_NEWSTYLE, NBD_FLAG_FIXED_NEWSTYLE) // only FIXED_NEWSTYLE one is supported from the server options
await this.#writeInt32(NBD_FLAG_FIXED_NEWSTYLE) // client also support NBD_FLAG_C_FIXED_NEWSTYLE
if (this.#serverCert !== undefined) {
// upgrade socket to TLS if needed
await this.#sendOption(NBD_OPT_STARTTLS)
await this.#tlsConnect()
}
// send export name we want to access.
// it's implictly closing the negociation phase.
await this.#write(OPTS_MAGIC)
await this.#writeInt32(NBD_OPT_EXPORT_NAME)
const exportNameBuffer = Buffer.from(this.#exportName)
await this.#writeInt32(exportNameBuffer.length)
await this.#write(exportNameBuffer)
// 8 (export size ) + 2 (flags) + 124 zero = 134
// must read all to ensure nothing stays in the buffer
const answer = await this.#read(134)
this.#exportSize = answer.readBigUInt64BE(0)
const transmissionFlags = answer.readInt16BE(8)
assert.strictEqual(transmissionFlags & NBD_FLAG_HAS_FLAGS, NBD_FLAG_HAS_FLAGS, 'NBD_FLAG_HAS_FLAGS') // must always be 1 by the norm
// note : xapi server always send NBD_FLAG_READ_ONLY (3) as a flag
}
#read(length) {
return readChunkStrict(this.#serverSocket, length)
}
#write(buffer) {
return fromCallback.call(this.#serverSocket, 'write', buffer)
}
async #readInt32() {
const buffer = await this.#read(4)
return buffer.readInt32BE(0)
}
async #readInt64() {
const buffer = await this.#read(8)
return buffer.readBigUInt64BE(0)
}
#writeInt32(int) {
const buffer = Buffer.alloc(4)
buffer.writeInt32BE(int)
return this.#write(buffer)
}
// when one read fail ,stop everything
async #rejectAll(error) {
this.#commandQueryBacklog.forEach(({ reject }) => {
reject(error)
})
await this.disconnect()
}
async #readBlockResponse() {
// ensure at most one read occur in parallel
if (this.#waitingForResponse) {
return
}
try {
this.#waitingForResponse = true
const magic = await this.#readInt32()
if (magic !== NBD_REPLY_MAGIC) {
throw new Error(`magic number for block answer is wrong : ${magic} ${NBD_REPLY_MAGIC}`)
}
const error = await this.#readInt32()
if (error !== 0) {
// @todo use error code from constants.mjs
throw new Error(`GOT ERROR CODE : ${error}`)
}
const blockQueryId = await this.#readInt64()
const query = this.#commandQueryBacklog.get(blockQueryId)
if (!query) {
throw new Error(` no query associated with id ${blockQueryId}`)
}
this.#commandQueryBacklog.delete(blockQueryId)
const data = await this.#read(query.size)
query.resolve(data)
this.#waitingForResponse = false
if (this.#commandQueryBacklog.size > 0) {
await this.#readBlockResponse()
}
} catch (error) {
// reject all the promises
// we don't need to call readBlockResponse on failure
// since we will empty the backlog
await this.#rejectAll(error)
}
}
async readBlock(index, size = NBD_DEFAULT_BLOCK_SIZE) {
const queryId = this.#nextCommandQueryId
this.#nextCommandQueryId++
// create and send command at once to ensure there is no concurrency issue
const buffer = Buffer.alloc(28)
buffer.writeInt32BE(NBD_REQUEST_MAGIC, 0) // it is a nbd request
buffer.writeInt16BE(0, 4) // no command flags for a simple block read
buffer.writeInt16BE(NBD_CMD_READ, 6) // we want to read a data block
buffer.writeBigUInt64BE(queryId, 8)
// byte offset in the raw disk
buffer.writeBigUInt64BE(BigInt(index) * BigInt(size), 16)
buffer.writeInt32BE(size, 24)
return new Promise((resolve, reject) => {
// this will handle one block response, but it can be another block
// since server does not guaranty to handle query in order
this.#commandQueryBacklog.set(queryId, {
size,
resolve,
reject,
})
// really send the command to the server
this.#write(buffer).catch(reject)
// #readBlockResponse never throws directly
// but if it fails it will reject all the promises in the backlog
this.#readBlockResponse()
})
}
}

View File

@@ -0,0 +1,76 @@
'use strict'
const NbdClient = require('./index.js')
const { spawn } = require('node:child_process')
const fs = require('node:fs/promises')
const { test } = require('tap')
const tmp = require('tmp')
const { pFromCallback } = require('promise-toolbox')
const { asyncEach } = require('@vates/async-each')
const FILE_SIZE = 2 * 1024 * 1024
async function createTempFile(size) {
const tmpPath = await pFromCallback(cb => tmp.file(cb))
const data = Buffer.alloc(size, 0)
for (let i = 0; i < size; i += 4) {
data.writeUInt32BE(i, i)
}
await fs.writeFile(tmpPath, data)
return tmpPath
}
test('it works with unsecured network', async tap => {
const path = await createTempFile(FILE_SIZE)
const nbdServer = spawn(
'nbdkit',
[
'file',
path,
'--newstyle', //
'--exit-with-parent',
'--read-only',
'--export-name=MY_SECRET_EXPORT',
],
{
stdio: ['inherit', 'inherit', 'inherit'],
}
)
const client = new NbdClient({
address: 'localhost',
exportname: 'MY_SECRET_EXPORT',
secure: false,
})
await client.connect()
tap.equal(client.exportSize, BigInt(FILE_SIZE))
const CHUNK_SIZE = 128 * 1024 // non default size
const indexes = []
for (let i = 0; i < FILE_SIZE / CHUNK_SIZE; i++) {
indexes.push(i)
}
// read mutiple blocks in parallel
await asyncEach(
indexes,
async i => {
const block = await client.readBlock(i, CHUNK_SIZE)
let blockOk = true
let firstFail
for (let j = 0; j < CHUNK_SIZE; j += 4) {
const wanted = i * CHUNK_SIZE + j
const found = block.readUInt32BE(j)
blockOk = blockOk && found === wanted
if (!blockOk && firstFail === undefined) {
firstFail = j
}
}
tap.ok(blockOk, `check block ${i} content`)
},
{ concurrency: 8 }
)
await client.disconnect()
nbdServer.kill()
await fs.unlink(path)
})

View File

@@ -0,0 +1,35 @@
{
"private": false,
"name": "@vates/nbd-client",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/nbd-client",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/nbd-client",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"engines": {
"node": ">=14.0"
},
"dependencies": {
"@vates/async-each": "^1.0.0",
"@vates/read-chunk": "^1.0.1",
"@xen-orchestra/async-map": "^0.1.2",
"promise-toolbox": "^0.21.0",
"xen-api": "^1.2.2"
},
"devDependencies": {
"tap": "^16.3.0",
"tmp": "^0.2.1"
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap *.spec.js"
}
}

130
@vates/otp/.USAGE.md Normal file
View File

@@ -0,0 +1,130 @@
### Usual workflow
> This section presents how this library should be used to implement a classic two factor authentification.
#### Setup
```js
import { generateSecret, generateTotp } from '@vates/otp'
import QrCode from 'qrcode'
// Generates a secret that will be shared by both the service and the user:
const secret = generateSecret()
// Stores the secret in the service:
await currentUser.saveOtpSecret(secret)
// Generates an URI to present to the user
const uri = generateTotpUri({ secret })
// Generates the QR code from the URI to make it easily importable in Authy or Google Authenticator
const qr = await QrCode.toDataURL(uri)
```
#### Authentication
```js
import { verifyTotp } from '@vates/otp'
// Verifies a `token` entered by the user against a `secret` generated during setup.
if (await verifyTotp(token, { secret })) {
console.log('authenticated!')
}
```
### API
#### Secret
```js
import { generateSecret } from '@vates/otp'
const secret = generateSecret()
// 'OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
#### HOTP
> This is likely not what you want to use, see TOTP below instead.
```js
import { generateHotp, generateHotpUri, verifyHotp } from '@vates/otp'
// a sequence number, see HOTP specification
const counter = 0
// generate a token
//
// optional params:
// - digits
const token = await generateHotp({ counter, secret })
// '239988'
// verify a token
//
// optional params:
// - digits
const isValid = await verifyHotp(token, { counter, secret })
// true
// generate a URI than can be displayed as a QR code to be used with Authy or Google Authenticator
//
// optional params:
// - digits
const uri = generateHotpUri({ counter, label: 'account name', issuer: 'my app', secret })
// 'otpauth://hotp/my%20app:account%20name?counter=0&issuer=my%20app&secret=OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
Optional params and their default values:
- `digits = 6`: length of the token, avoid using it because not compatible with Google Authenticator
#### TOTP
```js
import { generateTotp, generateTotpUri, verifyTotp } from '@vates/otp'
// generate a token
//
// optional params:
// - digits
// - period
// - timestamp
const token = await generateTotp({ secret })
// '632869'
// verify a token
//
// optional params:
// - digits
// - period
// - timestamp
// - window
const isValid = await verifyTotp(token, { secret })
// true
// generate a URI than can be displayed as a QR code to be used with Authy or Google Authenticator
//
// optional params:
// - digits
// - period
const uri = generateTotpUri({ label: 'account name', issuer: 'my app', secret })
// 'otpauth://totp/my%20app:account%20name?issuer=my%20app&secret=OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
Optional params and their default values:
- `digits = 6`: length of the token, avoid using it because not compatible with Google Authenticator
- `period = 30`: number of seconds a token is valid
- `timestamp = Date.now() / 1e3`: Unix timestamp, in seconds, when this token will be valid, default to now
- `window = 1`: number of periods before and after `timestamp` for which the token is considered valid
#### Verification from URI
```js
import { verifyFromUri } from '@vates/otp'
// Verify the token using all the information contained in the URI
const isValid = await verifyFromUri(token, uri)
// true
```

1
@vates/otp/.npmignore Symbolic link
View File

@@ -0,0 +1 @@
../../scripts/npmignore

163
@vates/otp/README.md Normal file
View File

@@ -0,0 +1,163 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/otp
[![Package Version](https://badgen.net/npm/v/@vates/otp)](https://npmjs.org/package/@vates/otp) ![License](https://badgen.net/npm/license/@vates/otp) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/otp)](https://bundlephobia.com/result?p=@vates/otp) [![Node compatibility](https://badgen.net/npm/node/@vates/otp)](https://npmjs.org/package/@vates/otp)
> Minimal HTOP/TOTP implementation
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/otp):
```sh
npm install --save @vates/otp
```
## Usage
### Usual workflow
> This section presents how this library should be used to implement a classic two factor authentification.
#### Setup
```js
import { generateSecret, generateTotp } from '@vates/otp'
import QrCode from 'qrcode'
// Generates a secret that will be shared by both the service and the user:
const secret = generateSecret()
// Stores the secret in the service:
await currentUser.saveOtpSecret(secret)
// Generates an URI to present to the user
const uri = generateTotpUri({ secret })
// Generates the QR code from the URI to make it easily importable in Authy or Google Authenticator
const qr = await QrCode.toDataURL(uri)
```
#### Authentication
```js
import { verifyTotp } from '@vates/otp'
// Verifies a `token` entered by the user against a `secret` generated during setup.
if (await verifyTotp(token, { secret })) {
console.log('authenticated!')
}
```
### API
#### Secret
```js
import { generateSecret } from '@vates/otp'
const secret = generateSecret()
// 'OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
#### HOTP
> This is likely not what you want to use, see TOTP below instead.
```js
import { generateHotp, generateHotpUri, verifyHotp } from '@vates/otp'
// a sequence number, see HOTP specification
const counter = 0
// generate a token
//
// optional params:
// - digits
const token = await generateHotp({ counter, secret })
// '239988'
// verify a token
//
// optional params:
// - digits
const isValid = await verifyHotp(token, { counter, secret })
// true
// generate a URI than can be displayed as a QR code to be used with Authy or Google Authenticator
//
// optional params:
// - digits
const uri = generateHotpUri({ counter, label: 'account name', issuer: 'my app', secret })
// 'otpauth://hotp/my%20app:account%20name?counter=0&issuer=my%20app&secret=OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
Optional params and their default values:
- `digits = 6`: length of the token, avoid using it because not compatible with Google Authenticator
#### TOTP
```js
import { generateTotp, generateTotpUri, verifyTotp } from '@vates/otp'
// generate a token
//
// optional params:
// - digits
// - period
// - timestamp
const token = await generateTotp({ secret })
// '632869'
// verify a token
//
// optional params:
// - digits
// - period
// - timestamp
// - window
const isValid = await verifyTotp(token, { secret })
// true
// generate a URI than can be displayed as a QR code to be used with Authy or Google Authenticator
//
// optional params:
// - digits
// - period
const uri = generateTotpUri({ label: 'account name', issuer: 'my app', secret })
// 'otpauth://totp/my%20app:account%20name?issuer=my%20app&secret=OJOKA65RY5FQQ2RYWVKD5Y3YG5CSHGYH'
```
Optional params and their default values:
- `digits = 6`: length of the token, avoid using it because not compatible with Google Authenticator
- `period = 30`: number of seconds a token is valid
- `timestamp = Date.now() / 1e3`: Unix timestamp, in seconds, when this token will be valid, default to now
- `window = 1`: number of periods before and after `timestamp` for which the token is considered valid
#### Verification from URI
```js
import { verifyFromUri } from '@vates/otp'
// Verify the token using all the information contained in the URI
const isValid = await verifyFromUri(token, uri)
// true
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

111
@vates/otp/index.mjs Normal file
View File

@@ -0,0 +1,111 @@
import { base32 } from 'rfc4648'
import { webcrypto } from 'node:crypto'
const { subtle } = webcrypto
function assert(name, value) {
if (!value) {
throw new TypeError('invalid value for param ' + name)
}
}
// https://github.com/google/google-authenticator/wiki/Key-Uri-Format
function generateUri(protocol, label, params) {
assert('label', typeof label === 'string')
assert('secret', typeof params.secret === 'string')
let path = encodeURIComponent(label)
const { issuer } = params
if (issuer !== undefined) {
path = encodeURIComponent(issuer) + ':' + path
}
const query = Object.entries(params)
.filter(_ => _[1] !== undefined)
.map(([key, value]) => key + '=' + encodeURIComponent(value))
.join('&')
return `otpauth://${protocol}/${path}?${query}`
}
export function generateSecret() {
// https://www.rfc-editor.org/rfc/rfc4226 recommends 160 bits (i.e. 20 bytes)
const data = new Uint8Array(20)
webcrypto.getRandomValues(data)
return base32.stringify(data, { pad: false })
}
const DIGITS = 6
// https://www.rfc-editor.org/rfc/rfc4226
export async function generateHotp({ counter, digits = DIGITS, secret }) {
const data = new Uint8Array(8)
new DataView(data.buffer).setBigInt64(0, BigInt(counter), false)
const key = await subtle.importKey(
'raw',
base32.parse(secret, { loose: true }),
{ name: 'HMAC', hash: 'SHA-1' },
false,
['sign', 'verify']
)
const digest = new DataView(await subtle.sign('HMAC', key, data))
const offset = digest.getUint8(digest.byteLength - 1) & 0xf
const p = digest.getUint32(offset) & 0x7f_ff_ff_ff
return String(p % Math.pow(10, digits)).padStart(digits, '0')
}
export function generateHotpUri({ counter, digits, issuer, label, secret }) {
assert('counter', typeof counter === 'number')
return generateUri('hotp', label, { counter, digits, issuer, secret })
}
export async function verifyHotp(token, opts) {
return token === (await generateHotp(opts))
}
function totpCounter(period = 30, timestamp = Math.floor(Date.now() / 1e3)) {
return Math.floor(timestamp / period)
}
// https://www.rfc-editor.org/rfc/rfc6238.html
export async function generateTotp({ period, timestamp, ...opts }) {
opts.counter = totpCounter(period, timestamp)
return await generateHotp(opts)
}
export function generateTotpUri({ digits, issuer, label, period, secret }) {
return generateUri('totp', label, { digits, issuer, period, secret })
}
export async function verifyTotp(token, { period, timestamp, window = 1, ...opts }) {
const counter = totpCounter(period, timestamp)
const end = counter + window
opts.counter = counter - window
while (opts.counter <= end) {
if (token === (await generateHotp(opts))) {
return true
}
opts.counter += 1
}
return false
}
export async function verifyFromUri(token, uri) {
const url = new URL(uri)
assert('protocol', url.protocol === 'otpauth:')
const { host } = url
const opts = Object.fromEntries(url.searchParams.entries())
if (host === 'hotp') {
return await verifyHotp(token, opts)
}
if (host === 'totp') {
return await verifyTotp(token, opts)
}
assert('host', false)
}

112
@vates/otp/index.spec.mjs Normal file
View File

@@ -0,0 +1,112 @@
import { strict as assert } from 'node:assert'
import { describe, it } from 'tap/mocha'
import {
generateHotp,
generateHotpUri,
generateSecret,
generateTotp,
generateTotpUri,
verifyHotp,
verifyTotp,
} from './index.mjs'
describe('generateSecret', function () {
it('generates a string of 32 chars', async function () {
const secret = generateSecret()
assert.equal(typeof secret, 'string')
assert.equal(secret.length, 32)
})
it('generates a different secret at each call', async function () {
assert.notEqual(generateSecret(), generateSecret())
})
})
describe('HOTP', function () {
it('generate and verify valid tokens', async function () {
for (const [token, opts] of Object.entries({
382752: {
counter: -3088,
secret: 'PJYFSZ3JNVXVQMZXOB2EQYJSKB2HE6TB',
},
163376: {
counter: 30598,
secret: 'GBUDQZ3UKZZGIMRLNVYXA33GMFMEGQKN',
},
})) {
assert.equal(await generateHotp(opts), token)
assert(await verifyHotp(token, opts))
}
})
describe('generateHotpUri', function () {
const opts = {
counter: 59732,
label: 'the label',
secret: 'OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
}
Object.entries({
'without optional params': [
opts,
'otpauth://hotp/the%20label?counter=59732&secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
],
'with issuer': [
{ ...opts, issuer: 'the issuer' },
'otpauth://hotp/the%20issuer:the%20label?counter=59732&issuer=the%20issuer&secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
],
'with digits': [
{ ...opts, digits: 7 },
'otpauth://hotp/the%20label?counter=59732&digits=7&secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
],
}).forEach(([title, [opts, uri]]) => {
it(title, async function () {
assert.strictEqual(generateHotpUri(opts), uri)
})
})
})
})
describe('TOTP', function () {
Object.entries({
'033702': {
secret: 'PJYFSZ3JNVXVQMZXOB2EQYJSKB2HE6TB',
timestamp: 1665416296,
period: 30,
},
107250: {
secret: 'GBUDQZ3UKZZGIMRLNVYXA33GMFMEGQKN',
timestamp: 1665416674,
period: 60,
},
}).forEach(([token, opts]) => {
it('works', async function () {
assert.equal(await generateTotp(opts), token)
assert(await verifyTotp(token, opts))
})
})
describe('generateHotpUri', function () {
const opts = {
label: 'the label',
secret: 'OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
}
Object.entries({
'without optional params': [opts, 'otpauth://totp/the%20label?secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX'],
'with issuer': [
{ ...opts, issuer: 'the issuer' },
'otpauth://totp/the%20issuer:the%20label?issuer=the%20issuer&secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
],
'with digits': [
{ ...opts, digits: 7 },
'otpauth://totp/the%20label?digits=7&secret=OGK45BBZAIGNGELHZPXYKN4GUVWWO6YX',
],
}).forEach(([title, [opts, uri]]) => {
it(title, async function () {
assert.strictEqual(generateTotpUri(opts), uri)
})
})
})
})

39
@vates/otp/package.json Normal file
View File

@@ -0,0 +1,39 @@
{
"private": false,
"name": "@vates/otp",
"description": "Minimal HTOP/TOTP implementation",
"keywords": [
"2fa",
"authenticator",
"hotp",
"otp",
"totp"
],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/otp",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"main": "index.mjs",
"repository": {
"directory": "@vates/otp",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"engines": {
"node": ">=15"
},
"dependencies": {
"rfc4648": "^1.5.2"
},
"scripts": {
"postversion": "npm publish --access public",
"test": "tap"
},
"devDependencies": {
"tap": "^16.3.0"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/parse-duration):
```
> npm install --save @vates/parse-duration
```sh
npm install --save @vates/parse-duration
```
## Usage

View File

@@ -1,7 +1,7 @@
`undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition:
```js
const compositePredicate = every(undefined, some(predicate2, undefined))
const compositePredicate = not(every(undefined, some(not(predicate2), undefined)))
// ends up as
@@ -36,6 +36,21 @@ isBetween3And10(10)
// → false
```
### `not(predicate)`
> Returns a predicate that returns the negation of the predicate.
```js
const isEven = n => n % 2 === 0
const isOdd = not(isEven)
isOdd(1)
// true
isOdd(2)
// false
```
### `some(predicates)`
> Returns a predicate that returns `true` iff some predicate returns `true`.

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/predicates):
```
> npm install --save @vates/predicates
```sh
npm install --save @vates/predicates
```
## Usage
@@ -19,7 +19,7 @@ Installation of the [npm package](https://npmjs.org/package/@vates/predicates):
`undefined` predicates are ignored and `undefined` is returned if all predicates are `undefined`, this permits the most efficient composition:
```js
const compositePredicate = every(undefined, some(predicate2, undefined))
const compositePredicate = not(every(undefined, some(not(predicate2), undefined)))
// ends up as
@@ -54,6 +54,21 @@ isBetween3And10(10)
// → false
```
### `not(predicate)`
> Returns a predicate that returns the negation of the predicate.
```js
const isEven = n => n % 2 === 0
const isOdd = not(isEven)
isOdd(1)
// true
isOdd(2)
// false
```
### `some(predicates)`
> Returns a predicate that returns `true` iff some predicate returns `true`.

View File

@@ -51,6 +51,22 @@ exports.every = function every() {
}
}
const notPredicateTag = {}
exports.not = function not(predicate) {
if (isDefinedPredicate(predicate)) {
if (predicate.tag === notPredicateTag) {
return predicate.predicate
}
function notPredicate() {
return !predicate.apply(this, arguments)
}
notPredicate.predicate = predicate
notPredicate.tag = notPredicateTag
return notPredicate
}
}
exports.some = function some() {
const predicates = handleArgs.apply(this, arguments)
const n = predicates.length

View File

@@ -3,20 +3,14 @@
const assert = require('assert/strict')
const { describe, it } = require('tap').mocha
const { every, some } = require('./')
const { every, not, some } = require('./')
const T = () => true
const F = () => false
const testArgsHandling = fn => {
it('returns undefined if all predicates are undefined', () => {
const testArgHandling = fn => {
it('returns undefined if predicate is undefined', () => {
assert.equal(fn(undefined), undefined)
assert.equal(fn([undefined]), undefined)
})
it('returns the predicate if only a single one is passed', () => {
assert.equal(fn(undefined, T), T)
assert.equal(fn([undefined, T]), T)
})
it('throws if it receives a non-predicate', () => {
@@ -24,6 +18,15 @@ const testArgsHandling = fn => {
error.value = 3
assert.throws(() => fn(3), error)
})
}
const testArgsHandling = fn => {
testArgHandling(fn)
it('returns the predicate if only a single one is passed', () => {
assert.equal(fn(undefined, T), T)
assert.equal(fn([undefined, T]), T)
})
it('forwards this and arguments to predicates', () => {
const thisArg = 'qux'
@@ -36,17 +39,21 @@ const testArgsHandling = fn => {
})
}
const runTests = (fn, truthTable) =>
const runTests = (fn, acceptMultiple, truthTable) =>
it('works', () => {
truthTable.forEach(([result, ...predicates]) => {
if (acceptMultiple) {
assert.equal(fn(predicates)(), result)
} else {
assert.equal(predicates.length, 1)
}
assert.equal(fn(...predicates)(), result)
assert.equal(fn(predicates)(), result)
})
})
describe('every', () => {
testArgsHandling(every)
runTests(every, [
runTests(every, true, [
[true, T, T],
[false, T, F],
[false, F, T],
@@ -54,9 +61,22 @@ describe('every', () => {
])
})
describe('not', () => {
testArgHandling(not)
it('returns the original predicate if negated twice', () => {
assert.equal(not(not(T)), T)
})
runTests(not, false, [
[true, F],
[false, T],
])
})
describe('some', () => {
testArgsHandling(some)
runTests(some, [
runTests(some, true, [
[true, T, T],
[true, T, F],
[true, F, T],

View File

@@ -26,7 +26,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "1.0.0",
"version": "1.1.0",
"engines": {
"node": ">=6"
},

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/read-chunk):
```
> npm install --save @vates/read-chunk
```sh
npm install --save @vates/read-chunk
```
## Usage

View File

@@ -1,6 +1,7 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const assert = require('node:assert').strict
const { Readable } = require('stream')
@@ -11,42 +12,42 @@ makeStream.obj = Readable.from
describe('readChunk', () => {
it('returns null if stream is empty', async () => {
expect(await readChunk(makeStream([]))).toBe(null)
assert.strictEqual(await readChunk(makeStream([])), null)
})
it('returns null if the stream is already ended', async () => {
const stream = await makeStream([])
await readChunk(stream)
expect(await readChunk(stream)).toBe(null)
assert.strictEqual(await readChunk(stream), null)
})
describe('with binary stream', () => {
it('returns the first chunk of data', async () => {
expect(await readChunk(makeStream(['foo', 'bar']))).toEqual(Buffer.from('foo'))
assert.deepEqual(await readChunk(makeStream(['foo', 'bar'])), Buffer.from('foo'))
})
it('returns a chunk of the specified size (smaller than first)', async () => {
expect(await readChunk(makeStream(['foo', 'bar']), 2)).toEqual(Buffer.from('fo'))
assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 2), Buffer.from('fo'))
})
it('returns a chunk of the specified size (larger than first)', async () => {
expect(await readChunk(makeStream(['foo', 'bar']), 4)).toEqual(Buffer.from('foob'))
assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 4), Buffer.from('foob'))
})
it('returns less data if stream ends', async () => {
expect(await readChunk(makeStream(['foo', 'bar']), 10)).toEqual(Buffer.from('foobar'))
assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 10), Buffer.from('foobar'))
})
it('returns an empty buffer if the specified size is 0', async () => {
expect(await readChunk(makeStream(['foo', 'bar']), 0)).toEqual(Buffer.alloc(0))
assert.deepEqual(await readChunk(makeStream(['foo', 'bar']), 0), Buffer.alloc(0))
})
})
describe('with object stream', () => {
it('returns the first chunk of data verbatim', async () => {
const chunks = [{}, {}]
expect(await readChunk(makeStream.obj(chunks))).toBe(chunks[0])
assert.strictEqual(await readChunk(makeStream.obj(chunks)), chunks[0])
})
})
})
@@ -62,15 +63,15 @@ const rejectionOf = promise =>
describe('readChunkStrict', function () {
it('throws if stream is empty', async () => {
const error = await rejectionOf(readChunkStrict(makeStream([])))
expect(error).toBeInstanceOf(Error)
expect(error.message).toBe('stream has ended without data')
expect(error.chunk).toEqual(undefined)
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended without data')
assert.strictEqual(error.chunk, undefined)
})
it('throws if stream ends with not enough data', async () => {
const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10))
expect(error).toBeInstanceOf(Error)
expect(error.message).toBe('stream has ended with not enough data')
expect(error.chunk).toEqual(Buffer.from('foobar'))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data')
assert.deepEqual(error.chunk, Buffer.from('foobar'))
})
})

View File

@@ -19,15 +19,19 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "1.0.0",
"version": "1.0.1",
"engines": {
"node": ">=8.10"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"devDependencies": {
"test": "^3.2.1"
}
}

54
@vates/task/.USAGE.md Normal file
View File

@@ -0,0 +1,54 @@
```js
import { Task } from '@vates/task'
const task = new Task({
name: 'my task',
// if defined, a new detached task is created
//
// if not defined and created inside an existing task, the new task is considered a subtask
onProgress(event) {
// this function is called each time this task or one of it's subtasks change state
const { id, timestamp, type } = event
if (type === 'start') {
const { name, parentId } = event
} else if (type === 'end') {
const { result, status } = event
} else if (type === 'info' || type === 'warning') {
const { data, message } = event
} else if (type === 'property') {
const { name, value } = event
}
},
})
// this field is settable once before being observed
task.id
task.status
await task.abort()
// if fn rejects, the task will be marked as failed
const result = await task.runInside(fn)
// if fn rejects, the task will be marked as failed
// if fn resolves, the task will be marked as succeeded
const result = await task.run(fn)
// the abort signal of the current task if any, otherwise is `undefined`
Task.abortSignal
// sends an info on the current task if any, otherwise does nothing
Task.info(message, data)
// sends an info on the current task if any, otherwise does nothing
Task.warning(message, data)
// attaches a property to the current task if any, otherwise does nothing
//
// the latest value takes precedence
//
// examples:
// - progress
Task.set(property, value)
```

1
@vates/task/.npmignore Symbolic link
View File

@@ -0,0 +1 @@
../../scripts/npmignore

85
@vates/task/README.md Normal file
View File

@@ -0,0 +1,85 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/task
[![Package Version](https://badgen.net/npm/v/@vates/task)](https://npmjs.org/package/@vates/task) ![License](https://badgen.net/npm/license/@vates/task) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/task)](https://bundlephobia.com/result?p=@vates/task) [![Node compatibility](https://badgen.net/npm/node/@vates/task)](https://npmjs.org/package/@vates/task)
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/task):
```sh
npm install --save @vates/task
```
## Usage
```js
import { Task } from '@vates/task'
const task = new Task({
name: 'my task',
// if defined, a new detached task is created
//
// if not defined and created inside an existing task, the new task is considered a subtask
onProgress(event) {
// this function is called each time this task or one of it's subtasks change state
const { id, timestamp, type } = event
if (type === 'start') {
const { name, parentId } = event
} else if (type === 'end') {
const { result, status } = event
} else if (type === 'info' || type === 'warning') {
const { data, message } = event
} else if (type === 'property') {
const { name, value } = event
}
},
})
// this field is settable once before being observed
task.id
task.status
await task.abort()
// if fn rejects, the task will be marked as failed
const result = await task.runInside(fn)
// if fn rejects, the task will be marked as failed
// if fn resolves, the task will be marked as succeeded
const result = await task.run(fn)
// the abort signal of the current task if any, otherwise is `undefined`
Task.abortSignal
// sends an info on the current task if any, otherwise does nothing
Task.info(message, data)
// sends an info on the current task if any, otherwise does nothing
Task.warning(message, data)
// attaches a property to the current task if any, otherwise does nothing
//
// the latest value takes precedence
//
// examples:
// - progress
Task.set(property, value)
```
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

184
@vates/task/index.js Normal file
View File

@@ -0,0 +1,184 @@
'use strict'
const assert = require('node:assert').strict
const { AsyncLocalStorage } = require('node:async_hooks')
// define a read-only, non-enumerable, non-configurable property
function define(object, property, value) {
Object.defineProperty(object, property, { value })
}
const noop = Function.prototype
const ABORTED = 'aborted'
const ABORTING = 'aborting'
const FAILURE = 'failure'
const PENDING = 'pending'
const SUCCESS = 'success'
exports.STATUS = { ABORTED, ABORTING, FAILURE, PENDING, SUCCESS }
const asyncStorage = new AsyncLocalStorage()
const getTask = () => asyncStorage.getStore()
exports.Task = class Task {
static get abortSignal() {
const task = getTask()
if (task !== undefined) {
return task.#abortController.signal
}
}
static info(message, data) {
const task = getTask()
if (task !== undefined) {
task.#emit('info', { data, message })
}
}
static run(opts, fn) {
return new this(opts).run(fn)
}
static set(name, value) {
const task = getTask()
if (task !== undefined) {
task.#emit('property', { name, value })
}
}
static warning(message, data) {
const task = getTask()
if (task !== undefined) {
task.#emit('warning', { data, message })
}
}
static wrap(opts, fn) {
// compatibility with @decorateWith
if (typeof fn !== 'function') {
;[fn, opts] = [opts, fn]
}
return function taskRun() {
return Task.run(typeof opts === 'function' ? opts.apply(this, arguments) : opts, () => fn.apply(this, arguments))
}
}
#abortController = new AbortController()
#onProgress
#parent
get id() {
return (this.id = Math.random().toString(36).slice(2))
}
set id(value) {
define(this, 'id', value)
}
#startData
#status = PENDING
get status() {
return this.#status
}
constructor({ name, onProgress }) {
this.#startData = { name }
if (onProgress !== undefined) {
this.#onProgress = onProgress
} else {
const parent = getTask()
if (parent !== undefined) {
this.#parent = parent
const { signal } = parent.#abortController
signal.addEventListener('abort', () => {
this.#abortController.abort(signal.reason)
})
this.#onProgress = parent.#onProgress
this.#startData.parentId = parent.id
} else {
this.#onProgress = noop
}
}
const { signal } = this.#abortController
signal.addEventListener('abort', () => {
if (this.status === PENDING) {
this.#status = this.#running ? ABORTING : ABORTED
}
})
}
abort(reason) {
this.#abortController.abort(reason)
}
#emit(type, data) {
data.id = this.id
data.timestamp = Date.now()
data.type = type
this.#onProgress(data)
}
#handleMaybeAbortion(result) {
if (this.status === ABORTING) {
this.#status = ABORTED
this.#emit('end', { status: ABORTED, result })
return true
}
return false;
}
async run(fn) {
const result = await this.runInside(fn)
if (this.status === PENDING) {
this.#status = SUCCESS
this.#emit('end', { status: SUCCESS, result })
}
return result
}
#running = false
async runInside(fn) {
assert.equal(this.status, PENDING)
assert.equal(this.#running, false)
this.#running = true
const startData = this.#startData
if (startData !== undefined) {
this.#startData = undefined
this.#emit('start', startData)
}
try {
const result = await asyncStorage.run(this, fn)
this.#handleMaybeAbortion(result)
this.#running = false
return result
} catch (result) {
if (!this.#handleMaybeAbortion(result)) {
this.#status = FAILURE
this.#emit('end', { status: FAILURE, result })
}
throw result
}
}
wrap(fn) {
const task = this
return function taskRun() {
return task.run(() => fn.apply(this, arguments))
}
}
wrapInside(fn) {
const task = this
return function taskRunInside() {
return task.runInside(() => fn.apply(this, arguments))
}
}
}

23
@vates/task/package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"private": false,
"name": "@vates/task",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/task",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@vates/task",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"license": "ISC",
"version": "0.0.1",
"engines": {
"node": ">=14"
},
"scripts": {
"postversion": "npm publish --access public"
}
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@vates/toggle-scripts):
```
> npm install --save @vates/toggle-scripts
```sh
npm install --save @vates/toggle-scripts
```
## Usage

View File

@@ -30,6 +30,7 @@ if (args.length === 0) {
${name} v${version}
`)
// eslint-disable-next-line n/no-process-exit
process.exit()
}

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/async-map):
```
> npm install --save @xen-orchestra/async-map
```sh
npm install --save @xen-orchestra/async-map
```
## Usage

View File

@@ -1,6 +1,8 @@
'use strict'
/* eslint-env jest */
const { describe, it } = require('test')
const assert = require('assert').strict
const sinon = require('sinon')
const { asyncMapSettled } = require('./')
@@ -9,26 +11,29 @@ const noop = Function.prototype
describe('asyncMapSettled', () => {
it('works', async () => {
const values = [Math.random(), Math.random()]
const spy = jest.fn(async v => v * 2)
const spy = sinon.spy(async v => v * 2)
const iterable = new Set(values)
// returns an array containing the result of each calls
expect(await asyncMapSettled(iterable, spy)).toEqual(values.map(value => value * 2))
assert.deepStrictEqual(
await asyncMapSettled(iterable, spy),
values.map(value => value * 2)
)
for (let i = 0, n = values.length; i < n; ++i) {
// each call receive the current item as sole argument
expect(spy.mock.calls[i]).toEqual([values[i]])
assert.deepStrictEqual(spy.args[i], [values[i]])
// each call as this bind to the iterable
expect(spy.mock.instances[i]).toBe(iterable)
assert.deepStrictEqual(spy.thisValues[i], iterable)
}
})
it('can use a specified thisArg', () => {
const thisArg = {}
const spy = jest.fn()
const spy = sinon.spy()
asyncMapSettled(['foo'], spy, thisArg)
expect(spy.mock.instances[0]).toBe(thisArg)
assert.deepStrictEqual(spy.thisValues[0], thisArg)
})
it('rejects only when all calls as resolved', async () => {
@@ -55,19 +60,22 @@ describe('asyncMapSettled', () => {
// wait for all microtasks to settle
await new Promise(resolve => setImmediate(resolve))
expect(hasSettled).toBe(false)
assert.strictEqual(hasSettled, false)
defers[1].resolve()
// wait for all microtasks to settle
await new Promise(resolve => setImmediate(resolve))
expect(hasSettled).toBe(true)
await expect(promise).rejects.toBe(error)
assert.strictEqual(hasSettled, true)
await assert.rejects(promise, error)
})
it('issues when latest promise rejects', async () => {
const error = new Error()
await expect(asyncMapSettled([1], () => Promise.reject(error))).rejects.toBe(error)
await assert.rejects(
asyncMapSettled([1], () => Promise.reject(error)),
error
)
})
})

View File

@@ -31,6 +31,11 @@
"lodash": "^4.17.4"
},
"scripts": {
"postversion": "npm publish"
"postversion": "npm publish",
"test": "node--test"
},
"devDependencies": {
"sinon": "^15.0.1",
"test": "^3.2.1"
}
}

View File

@@ -8,8 +8,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/audit-core):
```
> npm install --save @xen-orchestra/audit-core
```sh
npm install --save @xen-orchestra/audit-core
```
## Contributions

View File

@@ -7,7 +7,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.2.0",
"version": "0.2.3",
"engines": {
"node": ">=14"
},
@@ -17,7 +17,7 @@
},
"dependencies": {
"@vates/decorate-with": "^2.0.0",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/log": "^0.6.0",
"golike-defer": "^0.5.1",
"object-hash": "^2.0.1"
},

View File

@@ -5,7 +5,6 @@ const PRESETS_RE = /^@babel\/preset-.+$/
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const configs = {
'@babel/plugin-proposal-decorators': {
@@ -15,7 +14,7 @@ const configs = {
proposal: 'minimal',
},
'@babel/preset-env': {
debug: !__TEST__,
debug: __PROD__,
// disabled until https://github.com/babel/babel/issues/8323 is resolved
// loose: true,

View File

@@ -8,8 +8,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups-cli):
```
> npm install --global @xen-orchestra/backups-cli
```sh
npm install --global @xen-orchestra/backups-cli
```
## Usage

View File

@@ -1,11 +1,10 @@
'use strict'
import { readFileSync } from 'fs'
import getopts from 'getopts'
const getopts = require('getopts')
const { version } = JSON.parse(readFileSync(new URL('package.json', import.meta.url)))
const { version } = require('./package.json')
module.exports = commands =>
async function (args, prefix) {
export function composeCommands(commands) {
return async function (args, prefix) {
const opts = getopts(args, {
alias: {
help: 'h',
@@ -30,5 +29,6 @@ xo-backups v${version}
return
}
return command.main(args.slice(1), prefix + ' ' + commandName)
return (await command.default)(args.slice(1), prefix + ' ' + commandName)
}
}

View File

@@ -1,11 +1,9 @@
'use strict'
import fs from 'fs/promises'
import { dirname } from 'path'
const { dirname } = require('path')
export * from 'fs/promises'
const fs = require('promise-toolbox/promisifyAll')(require('fs'))
module.exports = fs
fs.getSize = path =>
export const getSize = path =>
fs.stat(path).then(
_ => _.size,
error => {
@@ -16,7 +14,7 @@ fs.getSize = path =>
}
)
fs.mktree = async function mkdirp(path) {
export async function mktree(path) {
try {
await fs.mkdir(path)
} catch (error) {
@@ -26,8 +24,8 @@ fs.mktree = async function mkdirp(path) {
return
}
if (code === 'ENOENT') {
await mkdirp(dirname(path))
return mkdirp(path)
await mktree(dirname(path))
return mktree(path)
}
throw error
}
@@ -37,7 +35,7 @@ fs.mktree = async function mkdirp(path) {
// - single param for direct use in `Array#map`
// - files are prefixed with directory path
// - safer: returns empty array if path is missing or not a directory
fs.readdir2 = path =>
export const readdir2 = path =>
fs.readdir(path).then(
entries => {
entries.forEach((entry, i) => {
@@ -59,7 +57,7 @@ fs.readdir2 = path =>
}
)
fs.symlink2 = async (target, path) => {
export async function symlink2(target, path) {
try {
await fs.symlink(target, path)
} catch (error) {

View File

@@ -1,40 +0,0 @@
'use strict'
// -----------------------------------------------------------------------------
const asyncMap = require('lodash/curryRight')(require('@xen-orchestra/async-map').asyncMap)
const getopts = require('getopts')
const { RemoteAdapter } = require('@xen-orchestra/backups/RemoteAdapter')
const { resolve } = require('path')
const adapter = new RemoteAdapter(require('@xen-orchestra/fs').getHandler({ url: 'file://' }))
module.exports = async function main(args) {
const { _, fix, remove, merge } = getopts(args, {
alias: {
fix: 'f',
remove: 'r',
merge: 'm',
},
boolean: ['fix', 'merge', 'remove'],
default: {
merge: false,
remove: false,
},
})
await asyncMap(_, async vmDir => {
vmDir = resolve(vmDir)
try {
await adapter.cleanVm(vmDir, {
fixMetadata: fix,
remove,
merge,
logInfo: (...args) => console.log(...args),
logWarn: (...args) => console.warn(...args),
})
} catch (error) {
console.error('adapter.cleanVm', vmDir, error)
}
})
}

View File

@@ -0,0 +1,38 @@
import { asyncMap } from '@xen-orchestra/async-map'
import { RemoteAdapter } from '@xen-orchestra/backups/RemoteAdapter.js'
import { getSyncedHandler } from '@xen-orchestra/fs'
import getopts from 'getopts'
import { basename, dirname } from 'path'
import Disposable from 'promise-toolbox/Disposable'
import { pathToFileURL } from 'url'
export default async function cleanVms(args) {
const { _, fix, remove, merge } = getopts(args, {
alias: {
fix: 'f',
remove: 'r',
merge: 'm',
},
boolean: ['fix', 'merge', 'remove'],
default: {
merge: false,
remove: false,
},
})
await asyncMap(_, vmDir =>
Disposable.use(getSyncedHandler({ url: pathToFileURL(dirname(vmDir)).href }), async handler => {
try {
await new RemoteAdapter(handler).cleanVm(basename(vmDir), {
fixMetadata: fix,
remove,
merge,
logInfo: (...args) => console.log(...args),
logWarn: (...args) => console.warn(...args),
})
} catch (error) {
console.error('adapter.cleanVm', vmDir, error)
}
})
)
}

View File

@@ -1,13 +1,10 @@
'use strict'
import { mktree, readdir2, readFile, symlink2 } from '../_fs.mjs'
import { asyncMap } from '@xen-orchestra/async-map'
import filenamify from 'filenamify'
import get from 'lodash/get.js'
import { dirname, join, relative } from 'path'
const filenamify = require('filenamify')
const get = require('lodash/get')
const { asyncMap } = require('@xen-orchestra/async-map')
const { dirname, join, relative } = require('path')
const { mktree, readdir2, readFile, symlink2 } = require('../_fs')
module.exports = async function createSymlinkIndex([backupDir, fieldPath]) {
export default async function createSymlinkIndex([backupDir, fieldPath]) {
const indexDir = join(backupDir, 'indexes', filenamify(fieldPath))
await mktree(indexDir)

View File

@@ -1,16 +1,13 @@
'use strict'
const groupBy = require('lodash/groupBy')
const { asyncMap } = require('@xen-orchestra/async-map')
const { createHash } = require('crypto')
const { dirname, resolve } = require('path')
const { readdir2, readFile, getSize } = require('../_fs')
import { readdir2, readFile, getSize } from '../_fs.mjs'
import { asyncMap } from '@xen-orchestra/async-map'
import { createHash } from 'crypto'
import groupBy from 'lodash/groupBy.js'
import { dirname, resolve } from 'path'
const sha512 = str => createHash('sha512').update(str).digest('hex')
const sum = values => values.reduce((a, b) => a + b)
module.exports = async function info(vmDirs) {
export default async function info(vmDirs) {
const jsonFiles = (
await asyncMap(vmDirs, async vmDir => (await readdir2(vmDir)).filter(_ => _.endsWith('.json')))
).flat()

View File

@@ -1,11 +1,12 @@
#!/usr/bin/env node
import { composeCommands } from './_composeCommands.mjs'
'use strict'
const importDefault = async path => (await import(path)).default
require('./_composeCommands')({
composeCommands({
'clean-vms': {
get main() {
return require('./commands/clean-vms')
get default() {
return importDefault('./commands/clean-vms.mjs')
},
usage: `[--fix] [--merge] [--remove] xo-vm-backups/*
@@ -18,14 +19,14 @@ require('./_composeCommands')({
`,
},
'create-symlink-index': {
get main() {
return require('./commands/create-symlink-index')
get default() {
return importDefault('./commands/create-symlink-index.mjs')
},
usage: 'xo-vm-backups <field path>',
},
info: {
get main() {
return require('./commands/info')
get default() {
return importDefault('./commands/info.mjs')
},
usage: 'xo-vm-backups/*',
},

View File

@@ -1,21 +1,21 @@
{
"private": false,
"bin": {
"xo-backups": "index.js"
"xo-backups": "index.mjs"
},
"preferGlobal": true,
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.28.0",
"@xen-orchestra/fs": "^3.1.0",
"@xen-orchestra/backups": "^0.29.5",
"@xen-orchestra/fs": "^3.3.1",
"filenamify": "^4.1.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",
"promise-toolbox": "^0.21.0"
},
"engines": {
"node": ">=7.10.1"
"node": ">=14"
},
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/backups-cli",
"name": "@xen-orchestra/backups-cli",
@@ -27,7 +27,7 @@
"scripts": {
"postversion": "npm publish --access public"
},
"version": "0.7.8",
"version": "1.0.0",
"license": "AGPL-3.0-or-later",
"author": {
"name": "Vates SAS",

View File

@@ -38,11 +38,12 @@ const DEFAULT_VM_SETTINGS = {
fullInterval: 0,
healthCheckSr: undefined,
healthCheckVmsWithTags: [],
maxMergedDeltasPerRun: 2,
maxMergedDeltasPerRun: Infinity,
offlineBackup: false,
offlineSnapshot: false,
snapshotRetention: 0,
timeout: 0,
useNbd: false,
unconditionalSnapshot: false,
vmTimeout: 0,
}

View File

@@ -8,8 +8,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups):
```
> npm install --save @xen-orchestra/backups
```sh
npm install --save @xen-orchestra/backups
```
## Contributions

View File

@@ -28,6 +28,7 @@ const { isMetadataFile } = require('./_backupType.js')
const { isValidXva } = require('./_isValidXva.js')
const { listPartitions, LVM_PARTITION_TYPE } = require('./_listPartitions.js')
const { lvs, pvs } = require('./_lvm.js')
const { watchStreamSize } = require('./_watchStreamSize')
// @todo : this import is marked extraneous , sould be fixed when lib is published
const { mount } = require('@vates/fuse-vhd')
const { asyncEach } = require('@vates/async-each')
@@ -76,14 +77,16 @@ const debounceResourceFactory = factory =>
}
class RemoteAdapter {
constructor(handler, { debounceResource = res => res, dirMode, vhdDirectoryCompression, useGetDiskLegacy=false } = {}) {
constructor(
handler,
{ debounceResource = res => res, dirMode, vhdDirectoryCompression, useGetDiskLegacy = false } = {}
) {
this._debounceResource = debounceResource
this._dirMode = dirMode
this._handler = handler
this._vhdDirectoryCompression = vhdDirectoryCompression
this._readCacheListVmBackups = synchronized.withKey()(this._readCacheListVmBackups)
this._useGetDiskLegacy = useGetDiskLegacy
}
get handler() {
@@ -230,21 +233,23 @@ class RemoteAdapter {
return promise
}
#removeVmBackupsFromCache(backups) {
for (const [dir, filenames] of Object.entries(
groupBy(
backups.map(_ => _._filename),
dirname
)
)) {
// detached async action, will not reject
this._updateCache(dir + '/cache.json.gz', backups => {
for (const filename of filenames) {
debug('removing cache entry', { entry: filename })
delete backups[filename]
}
})
}
async #removeVmBackupsFromCache(backups) {
await asyncEach(
Object.entries(
groupBy(
backups.map(_ => _._filename),
dirname
)
),
([dir, filenames]) =>
// will not reject
this._updateCache(dir + '/cache.json.gz', backups => {
for (const filename of filenames) {
debug('removing cache entry', { entry: filename })
delete backups[filename]
}
})
)
}
async deleteDeltaVmBackups(backups) {
@@ -253,7 +258,7 @@ class RemoteAdapter {
// this will delete the json, unused VHDs will be detected by `cleanVm`
await asyncMapSettled(backups, ({ _filename }) => handler.unlink(_filename))
this.#removeVmBackupsFromCache(backups)
await this.#removeVmBackupsFromCache(backups)
}
async deleteMetadataBackup(backupId) {
@@ -282,7 +287,7 @@ class RemoteAdapter {
Promise.all([handler.unlink(_filename), handler.unlink(resolveRelativeFromFile(_filename, xva))])
)
this.#removeVmBackupsFromCache(backups)
await this.#removeVmBackupsFromCache(backups)
}
deleteVmBackup(file) {
@@ -324,9 +329,7 @@ class RemoteAdapter {
return this.#useVhdDirectory()
}
async *#getDiskLegacy(diskId) {
const RE_VHDI = /^vhdi(\d+)$/
const handler = this._handler
@@ -358,8 +361,8 @@ class RemoteAdapter {
}
async *getDisk(diskId) {
if(this._useGetDiskLegacy){
yield * this.#getDiskLegacy(diskId)
if (this._useGetDiskLegacy) {
yield* this.#getDiskLegacy(diskId)
return
}
const handler = this._handler
@@ -508,7 +511,7 @@ class RemoteAdapter {
return `${BACKUP_DIR}/${vmUuid}/cache.json.gz`
}
async #readCache(path) {
async _readCache(path) {
try {
return JSON.parse(await fromCallback(zlib.gunzip, await this.handler.readFile(path)))
} catch (error) {
@@ -521,15 +524,15 @@ class RemoteAdapter {
_updateCache = synchronized.withKey()(this._updateCache)
// eslint-disable-next-line no-dupe-class-members
async _updateCache(path, fn) {
const cache = await this.#readCache(path)
const cache = await this._readCache(path)
if (cache !== undefined) {
fn(cache)
await this.#writeCache(path, cache)
await this._writeCache(path, cache)
}
}
async #writeCache(path, data) {
async _writeCache(path, data) {
try {
await this.handler.writeFile(path, await fromCallback(zlib.gzip, JSON.stringify(data)), { flags: 'w' })
} catch (error) {
@@ -537,10 +540,6 @@ class RemoteAdapter {
}
}
async invalidateVmBackupListCache(vmUuid) {
await this.handler.unlink(this.#getVmBackupsCache(vmUuid))
}
async #getCachabledDataListVmBackups(dir) {
debug('generating cache', { path: dir })
@@ -581,7 +580,7 @@ class RemoteAdapter {
async _readCacheListVmBackups(vmUuid) {
const path = this.#getVmBackupsCache(vmUuid)
const cache = await this.#readCache(path)
const cache = await this._readCache(path)
if (cache !== undefined) {
debug('found VM backups cache, using it', { path })
return cache
@@ -594,7 +593,7 @@ class RemoteAdapter {
}
// detached async action, will not reject
this.#writeCache(path, backups)
this._writeCache(path, backups)
return backups
}
@@ -645,7 +644,7 @@ class RemoteAdapter {
})
// will not throw
this._updateCache(this.#getVmBackupsCache(vmUuid), backups => {
await this._updateCache(this.#getVmBackupsCache(vmUuid), backups => {
debug('adding cache entry', { entry: path })
backups[path] = {
...metadata,
@@ -659,26 +658,28 @@ class RemoteAdapter {
return path
}
async writeVhd(path, input, { checksum = true, validator = noop, writeBlockConcurrency } = {}) {
async writeVhd(path, input, { checksum = true, validator = noop, writeBlockConcurrency, nbdClient } = {}) {
const handler = this._handler
if (this.#useVhdDirectory()) {
const dataPath = `${dirname(path)}/data/${uuidv4()}.vhd`
await createVhdDirectoryFromStream(handler, dataPath, input, {
const size = await createVhdDirectoryFromStream(handler, dataPath, input, {
concurrency: writeBlockConcurrency,
compression: this.#getCompressionType(),
async validator() {
await input.task
return validator.apply(this, arguments)
},
nbdClient,
})
await VhdAbstract.createAlias(handler, path, dataPath)
return size
} else {
await this.outputStream(path, input, { checksum, validator })
return this.outputStream(path, input, { checksum, validator })
}
}
async outputStream(path, input, { checksum = true, validator = noop } = {}) {
const container = watchStreamSize(input)
await this._handler.outputStream(path, input, {
checksum,
dirMode: this._dirMode,
@@ -687,6 +688,7 @@ class RemoteAdapter {
return validator.apply(this, arguments)
},
})
return container.size
}
// open the hierarchy of ancestors until we find a full one

View File

@@ -100,7 +100,7 @@ class Task {
* In case of error, the task will be failed.
*
* @typedef Result
* @param {() => Result)} fn
* @param {() => Result} fn
* @param {boolean} last - Whether the task should succeed if there is no error
* @returns Result
*/

View File

@@ -1,6 +1,6 @@
'use strict'
require('@xen-orchestra/log/configure.js').catchGlobalErrors(
require('@xen-orchestra/log/configure').catchGlobalErrors(
require('@xen-orchestra/log').createLogger('xo:backups:worker')
)

View File

@@ -311,7 +311,6 @@ exports.cleanVm = async function cleanVm(
}
const jsons = new Set()
let mustInvalidateCache = false
const xvas = new Set()
const xvaSums = []
const entries = await handler.list(vmDir, {
@@ -327,6 +326,20 @@ exports.cleanVm = async function cleanVm(
}
})
const cachePath = vmDir + '/cache.json.gz'
let mustRegenerateCache
{
const cache = await this._readCache(cachePath)
const actual = cache === undefined ? 0 : Object.keys(cache).length
const expected = jsons.size
mustRegenerateCache = actual !== expected
if (mustRegenerateCache) {
logWarn('unexpected number of entries in backup cache', { path: cachePath, actual, expected })
}
}
await asyncMap(xvas, async path => {
// check is not good enough to delete the file, the best we can do is report
// it
@@ -338,6 +351,8 @@ exports.cleanVm = async function cleanVm(
const unusedVhds = new Set(vhds)
const unusedXvas = new Set(xvas)
const backups = new Map()
// compile the list of unused XVAs and VHDs, and remove backup metadata which
// reference a missing XVA/VHD
await asyncMap(jsons, async json => {
@@ -350,19 +365,16 @@ exports.cleanVm = async function cleanVm(
return
}
let isBackupComplete
const { mode } = metadata
if (mode === 'full') {
const linkedXva = resolve('/', vmDir, metadata.xva)
if (xvas.has(linkedXva)) {
isBackupComplete = xvas.has(linkedXva)
if (isBackupComplete) {
unusedXvas.delete(linkedXva)
} else {
logWarn('the XVA linked to the backup is missing', { backup: json, xva: linkedXva })
if (remove) {
logInfo('deleting incomplete backup', { path: json })
jsons.delete(json)
mustInvalidateCache = true
await handler.unlink(json)
}
}
} else if (mode === 'delta') {
const linkedVhds = (() => {
@@ -371,22 +383,28 @@ exports.cleanVm = async function cleanVm(
})()
const missingVhds = linkedVhds.filter(_ => !vhds.has(_))
isBackupComplete = missingVhds.length === 0
// FIXME: find better approach by keeping as much of the backup as
// possible (existing disks) even if one disk is missing
if (missingVhds.length === 0) {
if (isBackupComplete) {
linkedVhds.forEach(_ => unusedVhds.delete(_))
linkedVhds.forEach(path => {
vhdsToJSons[path] = json
})
} else {
logWarn('some VHDs linked to the backup are missing', { backup: json, missingVhds })
if (remove) {
logInfo('deleting incomplete backup', { path: json })
mustInvalidateCache = true
jsons.delete(json)
await handler.unlink(json)
}
}
}
if (isBackupComplete) {
backups.set(json, metadata)
} else {
jsons.delete(json)
if (remove) {
logInfo('deleting incomplete backup', { backup: json })
mustRegenerateCache = true
await handler.unlink(json)
}
}
})
@@ -496,7 +514,7 @@ exports.cleanVm = async function cleanVm(
// check for the other that the size is the same as the real file size
await asyncMap(jsons, async metadataPath => {
const metadata = JSON.parse(await handler.readFile(metadataPath))
const metadata = backups.get(metadataPath)
let fileSystemSize
const merged = metadataWithMergedVhd[metadataPath] !== undefined
@@ -538,6 +556,7 @@ exports.cleanVm = async function cleanVm(
// systematically update size after a merge
if ((merged || fixMetadata) && size !== fileSystemSize) {
metadata.size = fileSystemSize
mustRegenerateCache = true
try {
await handler.writeFile(metadataPath, JSON.stringify(metadata), { flags: 'w' })
} catch (error) {
@@ -546,9 +565,16 @@ exports.cleanVm = async function cleanVm(
}
})
// purge cache if a metadata file has been deleted
if (mustInvalidateCache) {
await handler.unlink(vmDir + '/cache.json.gz')
if (mustRegenerateCache) {
const cache = {}
for (const [path, content] of backups.entries()) {
cache[path] = {
_filename: path,
id: path,
...content,
}
}
await this._writeCache(cachePath, cache)
}
return {

View File

@@ -1,6 +1,7 @@
'use strict'
/* eslint-env jest */
const { beforeEach, afterEach, test, describe } = require('test')
const assert = require('assert').strict
const rimraf = require('rimraf')
const tmp = require('tmp')
@@ -14,9 +15,8 @@ const { VhdFile, Constants, VhdDirectory, VhdAbstract } = require('vhd-lib')
const { checkAliases } = require('./_cleanVm')
const { dirname, basename } = require('path')
let tempDir, adapter, handler, jobId, vdiId, basePath
jest.setTimeout(60000)
let tempDir, adapter, handler, jobId, vdiId, basePath, relativePath
const rootPath = 'xo-vm-backups/VMUUID/'
beforeEach(async () => {
tempDir = await pFromCallback(cb => tmp.dir(cb))
@@ -25,12 +25,13 @@ beforeEach(async () => {
adapter = new RemoteAdapter(handler)
jobId = uniqueId()
vdiId = uniqueId()
basePath = `vdis/${jobId}/${vdiId}`
relativePath = `vdis/${jobId}/${vdiId}`
basePath = `${rootPath}/${relativePath}`
await fs.mkdirp(`${tempDir}/${basePath}`)
})
afterEach(async () => {
await pFromCallback(cb => rimraf(tempDir, cb))
await rimraf(tempDir)
await handler.forget()
})
@@ -76,18 +77,18 @@ test('It remove broken vhd', async () => {
// todo also tests a directory and an alias
await handler.writeFile(`${basePath}/notReallyAVhd.vhd`, 'I AM NOT A VHD')
expect((await handler.list(basePath)).length).toEqual(1)
assert.equal((await handler.list(basePath)).length, 1)
let loggued = ''
const logInfo = message => {
loggued += message
}
await adapter.cleanVm('/', { remove: false, logInfo, logWarn: logInfo, lock: false })
expect(loggued).toEqual(`VHD check error`)
await adapter.cleanVm(rootPath, { remove: false, logInfo, logWarn: logInfo, lock: false })
assert.equal(loggued, `VHD check error`)
// not removed
expect((await handler.list(basePath)).length).toEqual(1)
assert.deepEqual(await handler.list(basePath), ['notReallyAVhd.vhd'])
// really remove it
await adapter.cleanVm('/', { remove: true, logInfo, logWarn: () => {}, lock: false })
expect((await handler.list(basePath)).length).toEqual(0)
await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: () => {}, lock: false })
assert.deepEqual(await handler.list(basePath), [])
})
test('it remove vhd with missing or multiple ancestors', async () => {
@@ -121,10 +122,10 @@ test('it remove vhd with missing or multiple ancestors', async () => {
const logInfo = message => {
loggued += message + '\n'
}
await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false })
await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false })
const deletedOrphanVhd = loggued.match(/deleting orphan VHD/g) || []
expect(deletedOrphanVhd.length).toEqual(1) // only one vhd should have been deleted
assert.equal(deletedOrphanVhd.length, 1) // only one vhd should have been deleted
// we don't test the filew on disk, since they will all be marker as unused and deleted without a metadata.json file
})
@@ -132,12 +133,12 @@ test('it remove vhd with missing or multiple ancestors', async () => {
test('it remove backup meta data referencing a missing vhd in delta backup', async () => {
// create a metadata file marking child and orphan as ok
await handler.writeFile(
`metadata.json`,
`${rootPath}/metadata.json`,
JSON.stringify({
mode: 'delta',
vhds: [
`${basePath}/orphan.vhd`,
`${basePath}/child.vhd`,
`${relativePath}/orphan.vhd`,
`${relativePath}/child.vhd`,
// abandonned.json is not here
],
})
@@ -160,39 +161,39 @@ test('it remove backup meta data referencing a missing vhd in delta backup', asy
const logInfo = message => {
loggued += message + '\n'
}
await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false })
await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false })
let matched = loggued.match(/deleting unused VHD/g) || []
expect(matched.length).toEqual(1) // only one vhd should have been deleted
assert.equal(matched.length, 1) // only one vhd should have been deleted
// a missing vhd cause clean to remove all vhds
await handler.writeFile(
`metadata.json`,
`${rootPath}/metadata.json`,
JSON.stringify({
mode: 'delta',
vhds: [
`${basePath}/deleted.vhd`, // in metadata but not in vhds
`${basePath}/orphan.vhd`,
`${basePath}/child.vhd`,
`deleted.vhd`, // in metadata but not in vhds
`orphan.vhd`,
`child.vhd`,
// abandonned.vhd is not here anymore
],
}),
{ flags: 'w' }
)
loggued = ''
await adapter.cleanVm('/', { remove: true, logInfo, logWarn: () => {}, lock: false })
await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: () => {}, lock: false })
matched = loggued.match(/deleting unused VHD/g) || []
expect(matched.length).toEqual(2) // all vhds (orphan and child ) should have been deleted
assert.equal(matched.length, 2) // all vhds (orphan and child ) should have been deleted
})
test('it merges delta of non destroyed chain', async () => {
await handler.writeFile(
`metadata.json`,
`${rootPath}/metadata.json`,
JSON.stringify({
mode: 'delta',
size: 12000, // a size too small
vhds: [
`${basePath}/grandchild.vhd`, // grand child should not be merged
`${basePath}/child.vhd`,
`${relativePath}/grandchild.vhd`, // grand child should not be merged
`${relativePath}/child.vhd`,
// orphan is not here, he should be merged in child
],
})
@@ -219,33 +220,33 @@ test('it merges delta of non destroyed chain', async () => {
const logInfo = message => {
loggued.push(message)
}
await adapter.cleanVm('/', { remove: true, logInfo, logWarn: logInfo, lock: false })
expect(loggued[0]).toEqual(`incorrect backup size in metadata`)
await adapter.cleanVm(rootPath, { remove: true, logInfo, logWarn: logInfo, lock: false })
assert.equal(loggued[0], `unexpected number of entries in backup cache`)
loggued = []
await adapter.cleanVm('/', { remove: true, merge: true, logInfo, logWarn: () => {}, lock: false })
await adapter.cleanVm(rootPath, { remove: true, merge: true, logInfo, logWarn: () => {}, lock: false })
const [merging] = loggued
expect(merging).toEqual(`merging VHD chain`)
assert.equal(merging, `merging VHD chain`)
const metadata = JSON.parse(await handler.readFile(`metadata.json`))
const metadata = JSON.parse(await handler.readFile(`${rootPath}/metadata.json`))
// size should be the size of children + grand children after the merge
expect(metadata.size).toEqual(209920)
assert.equal(metadata.size, 209920)
// merging is already tested in vhd-lib, don't retest it here (and theses vhd are as empty as my stomach at 12h12)
// only check deletion
const remainingVhds = await handler.list(basePath)
expect(remainingVhds.length).toEqual(2)
expect(remainingVhds.includes('child.vhd')).toEqual(true)
expect(remainingVhds.includes('grandchild.vhd')).toEqual(true)
assert.equal(remainingVhds.length, 2)
assert.equal(remainingVhds.includes('child.vhd'), true)
assert.equal(remainingVhds.includes('grandchild.vhd'), true)
})
test('it finish unterminated merge ', async () => {
await handler.writeFile(
`metadata.json`,
`${rootPath}/metadata.json`,
JSON.stringify({
mode: 'delta',
size: 209920,
vhds: [`${basePath}/orphan.vhd`, `${basePath}/child.vhd`],
vhds: [`${relativePath}/orphan.vhd`, `${relativePath}/child.vhd`],
})
)
@@ -271,13 +272,13 @@ test('it finish unterminated merge ', async () => {
})
)
await adapter.cleanVm('/', { remove: true, merge: true, logWarn: () => {}, lock: false })
await adapter.cleanVm(rootPath, { remove: true, merge: true, logWarn: () => {}, lock: false })
// merging is already tested in vhd-lib, don't retest it here (and theses vhd are as empty as my stomach at 12h12)
// only check deletion
const remainingVhds = await handler.list(basePath)
expect(remainingVhds.length).toEqual(1)
expect(remainingVhds.includes('child.vhd')).toEqual(true)
assert.equal(remainingVhds.length, 1)
assert.equal(remainingVhds.includes('child.vhd'), true)
})
// each of the vhd can be a file, a directory, an alias to a file or an alias to a directory
@@ -367,22 +368,34 @@ describe('tests multiple combination ', () => {
// the metadata file
await handler.writeFile(
`metadata.json`,
`${rootPath}/metadata.json`,
JSON.stringify({
mode: 'delta',
vhds: [
`${basePath}/grandchild.vhd` + (useAlias ? '.alias.vhd' : ''), // grand child should not be merged
`${basePath}/child.vhd` + (useAlias ? '.alias.vhd' : ''),
`${basePath}/clean.vhd` + (useAlias ? '.alias.vhd' : ''),
`${relativePath}/grandchild.vhd` + (useAlias ? '.alias.vhd' : ''), // grand child should not be merged
`${relativePath}/child.vhd` + (useAlias ? '.alias.vhd' : ''),
`${relativePath}/clean.vhd` + (useAlias ? '.alias.vhd' : ''),
],
})
)
if (!useAlias && vhdMode === 'directory') {
try {
await adapter.cleanVm(rootPath, { remove: true, merge: true, logWarn: () => {}, lock: false })
} catch (err) {
assert.strictEqual(
err.code,
'NOT_SUPPORTED',
'Merging directory without alias should raise a not supported error'
)
return
}
assert.strictEqual(true, false, 'Merging directory without alias should raise an error')
}
await adapter.cleanVm(rootPath, { remove: true, merge: true, logWarn: () => {}, lock: false })
await adapter.cleanVm('/', { remove: true, merge: true, logWarn: () => {}, lock: false })
const metadata = JSON.parse(await handler.readFile(`metadata.json`))
const metadata = JSON.parse(await handler.readFile(`${rootPath}/metadata.json`))
// size should be the size of children + grand children + clean after the merge
expect(metadata.size).toEqual(vhdMode === 'file' ? 314880 : undefined)
assert.deepEqual(metadata.size, vhdMode === 'file' ? 314880 : undefined)
// broken vhd, non referenced, abandonned should be deleted ( alias and data)
// ancestor and child should be merged
@@ -392,19 +405,19 @@ describe('tests multiple combination ', () => {
if (useAlias) {
const dataSurvivors = await handler.list(basePath + '/data')
// the goal of the alias : do not move a full folder
expect(dataSurvivors).toContain('ancestor.vhd')
expect(dataSurvivors).toContain('grandchild.vhd')
expect(dataSurvivors).toContain('cleanAncestor.vhd')
expect(survivors).toContain('clean.vhd.alias.vhd')
expect(survivors).toContain('child.vhd.alias.vhd')
expect(survivors).toContain('grandchild.vhd.alias.vhd')
expect(survivors.length).toEqual(4) // the 3 ok + data
expect(dataSurvivors.length).toEqual(3) // the 3 ok + data
assert.equal(dataSurvivors.includes('ancestor.vhd'), true)
assert.equal(dataSurvivors.includes('grandchild.vhd'), true)
assert.equal(dataSurvivors.includes('cleanAncestor.vhd'), true)
assert.equal(survivors.includes('clean.vhd.alias.vhd'), true)
assert.equal(survivors.includes('child.vhd.alias.vhd'), true)
assert.equal(survivors.includes('grandchild.vhd.alias.vhd'), true)
assert.equal(survivors.length, 4) // the 3 ok + data
assert.equal(dataSurvivors.length, 3)
} else {
expect(survivors).toContain('clean.vhd')
expect(survivors).toContain('child.vhd')
expect(survivors).toContain('grandchild.vhd')
expect(survivors.length).toEqual(3)
assert.equal(survivors.includes('clean.vhd'), true)
assert.equal(survivors.includes('child.vhd'), true)
assert.equal(survivors.includes('grandchild.vhd'), true)
assert.equal(survivors.length, 3)
}
})
}
@@ -414,9 +427,9 @@ describe('tests multiple combination ', () => {
test('it cleans orphan merge states ', async () => {
await handler.writeFile(`${basePath}/.orphan.vhd.merge.json`, '')
await adapter.cleanVm('/', { remove: true, logWarn: () => {}, lock: false })
await adapter.cleanVm(rootPath, { remove: true, logWarn: () => {}, lock: false })
expect(await handler.list(basePath)).toEqual([])
assert.deepEqual(await handler.list(basePath), [])
})
test('check Aliases should work alone', async () => {
@@ -437,8 +450,8 @@ test('check Aliases should work alone', async () => {
// only ok have suvived
const alias = (await handler.list('vhds')).filter(f => f.endsWith('.vhd'))
expect(alias.length).toEqual(1)
assert.equal(alias.length, 1)
const data = await handler.list('vhds/data')
expect(data.length).toEqual(1)
assert.equal(data.length, 1)
})

View File

@@ -258,6 +258,9 @@ exports.importDeltaVm = defer(async function importDeltaVm(
$defer.onFailure(() => newVdi.$destroy())
await newVdi.update_other_config(TAG_COPY_SRC, vdi.uuid)
if (vdi.virtual_size > newVdi.virtual_size) {
await newVdi.$callAsync('resize', vdi.virtual_size)
}
} else if (vdiRef === vmRecord.suspend_VDI) {
// suspendVDI has already created
newVdi = suspendVdi

View File

@@ -4,7 +4,7 @@
'use strict'
const { catchGlobalErrors } = require('@xen-orchestra/log/configure.js')
const { catchGlobalErrors } = require('@xen-orchestra/log/configure')
const { createLogger } = require('@xen-orchestra/log')
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { join } = require('path')

View File

@@ -8,47 +8,51 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.28.0",
"version": "0.29.5",
"engines": {
"node": ">=14.6"
},
"scripts": {
"postversion": "npm publish --access public"
"postversion": "npm publish --access public",
"test": "node--test"
},
"dependencies": {
"@vates/async-each": "^1.0.0",
"@vates/cached-dns.lookup": "^1.0.0",
"@vates/compose": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.1",
"@vates/disposable": "^0.1.4",
"@vates/fuse-vhd": "^1.0.0",
"@vates/nbd-client": "*",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^3.1.0",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/fs": "^3.3.1",
"@xen-orchestra/log": "^0.6.0",
"@xen-orchestra/template": "^0.1.0",
"compare-versions": "^5.0.1",
"d3-time-format": "^3.0.0",
"decorator-synchronized": "^0.6.0",
"end-of-stream": "^1.4.4",
"fs-extra": "^10.0.0",
"fs-extra": "^11.1.0",
"golike-defer": "^0.5.1",
"limit-concurrency-decorator": "^0.5.0",
"lodash": "^4.17.20",
"node-zone": "^0.4.0",
"parse-pairs": "^1.1.0",
"parse-pairs": "^2.0.0",
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"uuid": "^9.0.0",
"vhd-lib": "^4.1.0",
"vhd-lib": "^4.2.1",
"yazl": "^2.5.1"
},
"devDependencies": {
"rimraf": "^3.0.2",
"rimraf": "^4.1.1",
"sinon": "^15.0.1",
"test": "^3.2.1",
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^1.5.0"
"@xen-orchestra/xapi": "^1.6.1"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -11,7 +11,6 @@ const { dirname } = require('path')
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { getVmBackupDir } = require('../_getVmBackupDir.js')
const { Task } = require('../Task.js')
const { MixinBackupWriter } = require('./_MixinBackupWriter.js')
@@ -19,8 +18,9 @@ const { AbstractDeltaWriter } = require('./_AbstractDeltaWriter.js')
const { checkVhd } = require('./_checkVhd.js')
const { packUuid } = require('./_packUuid.js')
const { Disposable } = require('promise-toolbox')
const NbdClient = require('@vates/nbd-client')
const { warn } = createLogger('xo:backups:DeltaBackupWriter')
const { debug, warn, info } = createLogger('xo:backups:DeltaBackupWriter')
exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(AbstractDeltaWriter) {
async checkBaseVdis(baseUuidToSrcVdi) {
@@ -28,8 +28,7 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
const backup = this._backup
const adapter = this._adapter
const backupDir = getVmBackupDir(backup.vm.uuid)
const vdisDir = `${backupDir}/vdis/${backup.job.id}`
const vdisDir = `${this._vmBackupDir}/vdis/${backup.job.id}`
await asyncMap(baseUuidToSrcVdi, async ([baseUuid, srcVdi]) => {
let found = false
@@ -134,7 +133,7 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
}
}
async _transfer({ timestamp, deltaExport, sizeContainers }) {
async _transfer({ timestamp, deltaExport }) {
const adapter = this._adapter
const backup = this._backup
@@ -142,7 +141,6 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
const jobId = job.id
const handler = adapter.handler
const backupDir = getVmBackupDir(vm.uuid)
// TODO: clean VM backup directory
@@ -174,9 +172,10 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
}
const { size } = await Task.run({ name: 'transfer' }, async () => {
let transferSize = 0
await Promise.all(
map(deltaExport.vdis, async (vdi, id) => {
const path = `${backupDir}/${vhds[id]}`
const path = `${this._vmBackupDir}/${vhds[id]}`
const isDelta = vdi.other_config['xo:base_delta'] !== undefined
let parentPath
@@ -199,12 +198,34 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
await checkVhd(handler, parentPath)
}
await adapter.writeVhd(path, deltaExport.streams[`${id}.vhd`], {
const vdiRef = vm.$xapi.getObject(vdi.uuid).$ref
let nbdClient
if (this._backup.config.useNbd) {
debug('useNbd is enabled', { vdi: id, path })
// get nbd if possible
try {
// this will always take the first host in the list
const [nbdInfo] = await vm.$xapi.call('VDI.get_nbd_info', vdiRef)
debug('got NBD info', { nbdInfo, vdi: id, path })
nbdClient = new NbdClient(nbdInfo)
await nbdClient.connect()
info('NBD client ready', { vdi: id, path })
} catch (error) {
nbdClient = undefined
warn('error connecting to NBD server', { error, vdi: id, path })
}
} else {
debug('useNbd is disabled', { vdi: id, path })
}
transferSize += await adapter.writeVhd(path, deltaExport.streams[`${id}.vhd`], {
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
validator: tmpPath => checkVhd(handler, tmpPath),
writeBlockConcurrency: this._backup.config.writeBlockConcurrency,
nbdClient,
})
if (isDelta) {
@@ -219,9 +240,7 @@ exports.DeltaBackupWriter = class DeltaBackupWriter extends MixinBackupWriter(Ab
})
})
)
return {
size: Object.values(sizeContainers).reduce((sum, { size }) => sum + size, 0),
}
return { size: transferSize }
})
metadataContent.size = size
this._metadataFileName = await adapter.writeVmBackupMetadata(vm.uuid, metadataContent)

View File

@@ -2,7 +2,6 @@
const { formatFilenameDate } = require('../_filenameDate.js')
const { getOldEntries } = require('../_getOldEntries.js')
const { getVmBackupDir } = require('../_getVmBackupDir.js')
const { Task } = require('../Task.js')
const { MixinBackupWriter } = require('./_MixinBackupWriter.js')
@@ -34,7 +33,6 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
const { job, scheduleId, vm } = backup
const adapter = this._adapter
const backupDir = getVmBackupDir(vm.uuid)
// TODO: clean VM backup directory
@@ -47,9 +45,8 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
const basename = formatFilenameDate(timestamp)
const dataBasename = basename + '.xva'
const dataFilename = backupDir + '/' + dataBasename
const dataFilename = this._vmBackupDir + '/' + dataBasename
const metadataFilename = `${backupDir}/${basename}.json`
const metadata = {
jobId: job.id,
mode: job.mode,

View File

@@ -16,7 +16,6 @@ const { info, warn } = createLogger('xo:backups:MixinBackupWriter')
exports.MixinBackupWriter = (BaseClass = Object) =>
class MixinBackupWriter extends BaseClass {
#lock
#vmBackupDir
constructor({ remoteId, ...rest }) {
super(rest)
@@ -24,13 +23,13 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
this._adapter = rest.backup.remoteAdapters[remoteId]
this._remoteId = remoteId
this.#vmBackupDir = getVmBackupDir(this._backup.vm.uuid)
this._vmBackupDir = getVmBackupDir(this._backup.vm.uuid)
}
async _cleanVm(options) {
try {
return await Task.run({ name: 'clean-vm' }, () => {
return this._adapter.cleanVm(this.#vmBackupDir, {
return this._adapter.cleanVm(this._vmBackupDir, {
...options,
fixMetadata: true,
logInfo: info,
@@ -50,7 +49,7 @@ exports.MixinBackupWriter = (BaseClass = Object) =>
async beforeBackup() {
const { handler } = this._adapter
const vmBackupDir = this.#vmBackupDir
const vmBackupDir = this._vmBackupDir
await handler.mktree(vmBackupDir)
this.#lock = await handler.lock(vmBackupDir)
}

View File

@@ -8,8 +8,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cr-seed-cli):
```
> npm install --global @xen-orchestra/cr-seed-cli
```sh
npm install --global @xen-orchestra/cr-seed-cli
```
## Contributions

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cron):
```
> npm install --save @xen-orchestra/cron
```sh
npm install --save @xen-orchestra/cron
```
## Usage

View File

@@ -1,24 +1,20 @@
/* eslint-env jest */
'use strict'
const test = require('test')
const assert = require('assert').strict
const sinon = require('sinon')
const { createSchedule } = require('./')
jest.useFakeTimers()
const clock = sinon.useFakeTimers()
const wrap = value => () => value
describe('issues', () => {
test('issues', async t => {
let originalDateNow
beforeAll(() => {
originalDateNow = Date.now
})
afterAll(() => {
Date.now = originalDateNow
originalDateNow = undefined
})
originalDateNow = Date.now
test('stop during async execution', async () => {
await t.test('stop during async execution', async () => {
let nCalls = 0
let resolve, promise
@@ -35,20 +31,20 @@ describe('issues', () => {
job.start()
Date.now = wrap(+schedule.next(1)[0])
jest.runAllTimers()
clock.runAll()
expect(nCalls).toBe(1)
assert.strictEqual(nCalls, 1)
job.stop()
resolve()
await promise
jest.runAllTimers()
expect(nCalls).toBe(1)
clock.runAll()
assert.strictEqual(nCalls, 1)
})
test('stop then start during async job execution', async () => {
await t.test('stop then start during async job execution', async () => {
let nCalls = 0
let resolve, promise
@@ -65,9 +61,9 @@ describe('issues', () => {
job.start()
Date.now = wrap(+schedule.next(1)[0])
jest.runAllTimers()
clock.runAll()
expect(nCalls).toBe(1)
assert.strictEqual(nCalls, 1)
job.stop()
job.start()
@@ -76,7 +72,10 @@ describe('issues', () => {
await promise
Date.now = wrap(+schedule.next(1)[0])
jest.runAllTimers()
expect(nCalls).toBe(2)
clock.runAll()
assert.strictEqual(nCalls, 2)
})
Date.now = originalDateNow
originalDateNow = undefined
})

View File

@@ -1,7 +1,8 @@
/* eslint-env jest */
'use strict'
const { describe, it } = require('test')
const assert = require('assert').strict
const mapValues = require('lodash/mapValues')
const moment = require('moment-timezone')
@@ -25,24 +26,24 @@ describe('next()', () => {
},
([pattern, result], title) =>
it(title, () => {
expect(N(pattern)).toBe(result)
assert.strictEqual(N(pattern), result)
})
)
it('select first between month-day and week-day', () => {
expect(N('* * 10 * wen')).toBe('2018-04-10T00:00')
expect(N('* * 12 * wen')).toBe('2018-04-11T00:00')
assert.strictEqual(N('* * 10 * wen'), '2018-04-10T00:00')
assert.strictEqual(N('* * 12 * wen'), '2018-04-11T00:00')
})
it('select the last available day of a month', () => {
expect(N('* * 29 feb *')).toBe('2020-02-29T00:00')
assert.strictEqual(N('* * 29 feb *'), '2020-02-29T00:00')
})
it('fails when no solutions has been found', () => {
expect(() => N('0 0 30 feb *')).toThrow('no solutions found for this schedule')
assert.throws(() => N('0 0 30 feb *'), { message: 'no solutions found for this schedule' })
})
it('select the first sunday of the month', () => {
expect(N('* * * * 0', '2018-03-31T00:00')).toBe('2018-04-01T00:00')
assert.strictEqual(N('* * * * 0', '2018-03-31T00:00'), '2018-04-01T00:00')
})
})

View File

@@ -38,6 +38,11 @@
"moment-timezone": "^0.5.14"
},
"scripts": {
"postversion": "npm publish"
"postversion": "npm publish",
"test": "node--test"
},
"devDependencies": {
"sinon": "^15.0.1",
"test": "^3.2.1"
}
}

View File

@@ -1,49 +0,0 @@
/* eslint-env jest */
'use strict'
const parse = require('./parse')
describe('parse()', () => {
it('works', () => {
expect(parse('0 0-10 */10 jan,2,4-11/3 *')).toEqual({
minute: [0],
hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dayOfMonth: [1, 11, 21, 31],
month: [0, 2, 4, 7, 10],
})
})
it('correctly parse months', () => {
expect(parse('* * * 0,11 *')).toEqual({
month: [0, 11],
})
expect(parse('* * * jan,dec *')).toEqual({
month: [0, 11],
})
})
it('correctly parse days', () => {
expect(parse('* * * * mon,sun')).toEqual({
dayOfWeek: [0, 1],
})
})
it('reports missing integer', () => {
expect(() => parse('*/a')).toThrow('minute: missing integer at character 2')
expect(() => parse('*')).toThrow('hour: missing integer at character 1')
})
it('reports invalid aliases', () => {
expect(() => parse('* * * jan-foo *')).toThrow('month: missing alias or integer at character 10')
})
it('dayOfWeek: 0 and 7 bind to sunday', () => {
expect(parse('* * * * 0')).toEqual({
dayOfWeek: [0],
})
expect(parse('* * * * 7')).toEqual({
dayOfWeek: [0],
})
})
})

View File

@@ -0,0 +1,50 @@
'use strict'
const { describe, it } = require('test')
const assert = require('assert').strict
const parse = require('./parse')
describe('parse()', () => {
it('works', () => {
assert.deepStrictEqual(parse('0 0-10 */10 jan,2,4-11/3 *'), {
minute: [0],
hour: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
dayOfMonth: [1, 11, 21, 31],
month: [0, 2, 4, 7, 10],
})
})
it('correctly parse months', () => {
assert.deepStrictEqual(parse('* * * 0,11 *'), {
month: [0, 11],
})
assert.deepStrictEqual(parse('* * * jan,dec *'), {
month: [0, 11],
})
})
it('correctly parse days', () => {
assert.deepStrictEqual(parse('* * * * mon,sun'), {
dayOfWeek: [0, 1],
})
})
it('reports missing integer', () => {
assert.throws(() => parse('*/a'), { message: 'minute: missing integer at character 2' })
assert.throws(() => parse('*'), { message: 'hour: missing integer at character 1' })
})
it('reports invalid aliases', () => {
assert.throws(() => parse('* * * jan-foo *'), { message: 'month: missing alias or integer at character 10' })
})
it('dayOfWeek: 0 and 7 bind to sunday', () => {
assert.deepStrictEqual(parse('* * * * 0'), {
dayOfWeek: [0],
})
assert.deepStrictEqual(parse('* * * * 7'), {
dayOfWeek: [0],
})
})
})

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/defined):
```
> npm install --save @xen-orchestra/defined
```sh
npm install --save @xen-orchestra/defined
```
## Contributions

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/emit-async):
```
> npm install --save @xen-orchestra/emit-async
```sh
npm install --save @xen-orchestra/emit-async
```
## Usage

View File

@@ -10,8 +10,8 @@
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/fs):
```
> npm install --global @xen-orchestra/fs
```sh
npm install --global @xen-orchestra/fs
```
## Contributions

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/fs",
"version": "3.1.0",
"version": "3.3.1",
"license": "AGPL-3.0-or-later",
"description": "The File System for Xen Orchestra backups.",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
@@ -28,22 +28,21 @@
"@vates/async-each": "^1.0.0",
"@vates/coalesce-calls": "^0.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/read-chunk": "^1.0.0",
"@vates/read-chunk": "^1.0.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.3.0",
"@xen-orchestra/log": "^0.6.0",
"bind-property-descriptor": "^2.0.0",
"decorator-synchronized": "^0.6.0",
"execa": "^5.0.0",
"fs-extra": "^10.0.0",
"fs-extra": "^11.1.0",
"get-stream": "^6.0.0",
"limit-concurrency-decorator": "^0.5.0",
"lodash": "^4.17.4",
"promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2",
"pumpify": "^2.0.1",
"readable-stream": "^4.1.0",
"through2": "^4.0.2",
"xo-remote-parser": "^0.9.1"
"xo-remote-parser": "^0.9.2"
},
"devDependencies": {
"@babel/cli": "^7.0.0",
@@ -54,7 +53,8 @@
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^7.0.2",
"dotenv": "^16.0.0",
"rimraf": "^3.0.0"
"rimraf": "^4.1.1",
"tmp": "^0.2.1"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",

View File

@@ -1,8 +1,15 @@
const { pipeline } = require('node:stream')
const { readChunk } = require('@vates/read-chunk')
const crypto = require('crypto')
const pumpify = require('pumpify')
function getEncryptor(key) {
export const DEFAULT_ENCRYPTION_ALGORITHM = 'aes-256-gcm'
export const UNENCRYPTED_ALGORITHM = 'none'
export function isLegacyEncryptionAlgorithm(algorithm) {
return algorithm !== UNENCRYPTED_ALGORITHM && algorithm !== DEFAULT_ENCRYPTION_ALGORITHM
}
function getEncryptor(algorithm = DEFAULT_ENCRYPTION_ALGORITHM, key) {
if (key === undefined) {
return {
id: 'NULL_ENCRYPTOR',
@@ -15,43 +22,100 @@ function getEncryptor(key) {
decryptStream: stream => stream,
}
}
const algorithm = 'aes-256-cbc'
const ivLength = 16
const info = crypto.getCipherInfo(algorithm, { keyLength: key.length })
if (info === undefined) {
const error = new Error(
`Either the algorithm ${algorithm} is not available, or the key length ${
key.length
} is incorrect. Supported algorithm are ${crypto.getCiphers()}`
)
error.code = 'BAD_ALGORITHM'
throw error
}
const { ivLength, mode } = info
const authTagLength = ['gcm', 'ccm', 'ocb'].includes(mode) ? 16 : 0
function encryptStream(input) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = pumpify(input, cipher)
encrypted.unshift(iv)
return encrypted
return pipeline(
input,
async function* (source) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
yield iv
for await (const data of source) {
yield cipher.update(data)
}
yield cipher.final()
// must write the auth tag at the end of the encryption stream
if (authTagLength > 0) {
yield cipher.getAuthTag()
}
},
() => {}
)
}
async function decryptStream(encryptedStream) {
const iv = await readChunk(encryptedStream, ivLength)
const cipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
/**
* WARNING
*
* the crytped size has an initializtion vector + a padding at the end
* whe can't predict the decrypted size from the start of the encrypted size
* thus, we can't set decrypted.length reliably
*
*/
return pumpify(encryptedStream, cipher)
function decryptStream(encryptedStream) {
return pipeline(
encryptedStream,
async function* (source) {
/**
* WARNING
*
* the crypted size has an initializtion vector + eventually an auth tag + a padding at the end
* whe can't predict the decrypted size from the start of the encrypted size
* thus, we can't set decrypted.length reliably
*
*/
const iv = await readChunk(source, ivLength)
const cipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
let authTag = Buffer.alloc(0)
for await (const data of source) {
if (data.length >= authTagLength) {
// fast path, no buffer concat
yield cipher.update(authTag)
authTag = data.slice(data.length - authTagLength)
yield cipher.update(data.slice(0, data.length - authTagLength))
} else {
// slower since there is a concat
const fullData = Buffer.concat([authTag, data])
const fullDataLength = fullData.length
if (fullDataLength > authTagLength) {
authTag = fullData.slice(fullDataLength - authTagLength)
yield cipher.update(fullData.slice(0, fullDataLength - authTagLength))
} else {
authTag = fullData
}
}
}
if (authTagLength > 0) {
cipher.setAuthTag(authTag)
}
yield cipher.final()
},
() => {}
)
}
function encryptData(buffer) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = cipher.update(buffer)
return Buffer.concat([iv, encrypted, cipher.final()])
return Buffer.concat([iv, encrypted, cipher.final(), authTagLength > 0 ? cipher.getAuthTag() : Buffer.alloc(0)])
}
function decryptData(buffer) {
const iv = buffer.slice(0, ivLength)
const encrypted = buffer.slice(ivLength)
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
let encrypted
if (authTagLength > 0) {
const authTag = buffer.slice(buffer.length - authTagLength)
decipher.setAuthTag(authTag)
encrypted = buffer.slice(ivLength, buffer.length - authTagLength)
} else {
encrypted = buffer.slice(ivLength)
}
const decrypted = decipher.update(encrypted)
return Buffer.concat([decrypted, decipher.final()])
}

View File

@@ -0,0 +1,50 @@
/* eslint-env jest */
import { Readable } from 'node:stream'
import { _getEncryptor } from './_encryptor'
import crypto from 'crypto'
const algorithms = ['none', 'aes-256-cbc', 'aes-256-gcm']
function streamToBuffer(stream) {
return new Promise(resolve => {
const bufs = []
stream.on('data', function (d) {
bufs.push(d)
})
stream.on('end', function () {
resolve(Buffer.concat(bufs))
})
})
}
algorithms.forEach(algorithm => {
describe(`test algorithm ${algorithm}`, () => {
const key = algorithm === 'none' ? undefined : '73c1838d7d8a6088ca2317fb5f29cd91'
const encryptor = _getEncryptor(algorithm, key)
const buffer = crypto.randomBytes(1024 * 1024 + 1)
it('handle buffer', () => {
const encrypted = encryptor.encryptData(buffer)
if (algorithm !== 'none') {
expect(encrypted.equals(buffer)).toEqual(false) // encrypted should be different
// ivlength, auth tag, padding
expect(encrypted.length).not.toEqual(buffer.length)
}
const decrypted = encryptor.decryptData(encrypted)
expect(decrypted.equals(buffer)).toEqual(true)
})
it('handle stream', async () => {
const stream = Readable.from(buffer)
stream.length = buffer.length
const encrypted = encryptor.encryptStream(stream)
if (algorithm !== 'none') {
expect(encrypted.length).toEqual(undefined)
}
const decrypted = encryptor.decryptStream(encrypted)
const decryptedBuffer = await streamToBuffer(decrypted)
expect(decryptedBuffer.equals(buffer)).toEqual(true)
})
})
})

View File

@@ -12,9 +12,9 @@ import { synchronized } from 'decorator-synchronized'
import { basename, dirname, normalize as normalizePath } from './path'
import { createChecksumStream, validChecksumOfReadStream } from './checksum'
import { _getEncryptor } from './_encryptor'
import { DEFAULT_ENCRYPTION_ALGORITHM, _getEncryptor } from './_encryptor'
const { info, warn } = createLogger('@xen-orchestra:fs')
const { info, warn } = createLogger('xo:fs:abstract')
const checksumFile = file => file + '.checksum'
const computeRate = (hrtime, size) => {
@@ -68,7 +68,15 @@ class PrefixWrapper {
}
export default class RemoteHandlerAbstract {
_encryptor
#encryptor
get _encryptor() {
if (this.#encryptor === undefined) {
throw new Error(`Can't access to encryptor before remote synchronization`)
}
return this.#encryptor
}
constructor(remote, options = {}) {
if (remote.url === 'test://') {
this._remote = remote
@@ -79,7 +87,6 @@ export default class RemoteHandlerAbstract {
}
}
;({ highWaterMark: this._highWaterMark, timeout: this._timeout = DEFAULT_TIMEOUT } = options)
this._encryptor = _getEncryptor(this._remote.encryptionKey)
const sharedLimit = limitConcurrency(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS)
this.closeFile = sharedLimit(this.closeFile)
@@ -277,15 +284,25 @@ export default class RemoteHandlerAbstract {
return this._encryptor.decryptData(data)
}
async rename(oldPath, newPath, { checksum = false } = {}) {
oldPath = normalizePath(oldPath)
newPath = normalizePath(newPath)
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
if (checksum) {
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
async #rename(oldPath, newPath, { checksum }, createTree = true) {
try {
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
if (checksum) {
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
}
await p
} catch (error) {
// ENOENT can be a missing target directory OR a missing source
if (error.code === 'ENOENT' && createTree) {
await this._mktree(dirname(newPath))
return this.#rename(oldPath, newPath, { checksum }, false)
}
throw error
}
return p
}
rename(oldPath, newPath, { checksum = false } = {}) {
return this.#rename(normalizePath(oldPath), normalizePath(newPath), { checksum })
}
async copy(oldPath, newPath, { checksum = false } = {}) {
@@ -330,44 +347,54 @@ export default class RemoteHandlerAbstract {
}
async _createMetadata() {
const encryptionAlgorithm = this._remote.encryptionKey === undefined ? 'none' : DEFAULT_ENCRYPTION_ALGORITHM
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
await Promise.all([
this._writeFile(
normalizePath(ENCRYPTION_DESC_FILENAME),
JSON.stringify({ algorithm: this._encryptor.algorithm }),
{
flags: 'w',
}
), // not encrypted
this._writeFile(normalizePath(ENCRYPTION_DESC_FILENAME), JSON.stringify({ algorithm: encryptionAlgorithm }), {
flags: 'w',
}), // not encrypted
this.writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
])
}
async _checkMetadata() {
let encryptionAlgorithm = 'none'
let data
try {
// this file is not encrypted
const data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME))
JSON.parse(data)
data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME), 'utf-8')
const json = JSON.parse(data)
encryptionAlgorithm = json.algorithm
} catch (error) {
if (error.code !== 'ENOENT') {
throw error
}
encryptionAlgorithm = this._remote.encryptionKey === undefined ? 'none' : DEFAULT_ENCRYPTION_ALGORITHM
}
try {
this.#encryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
// this file is encrypted
const data = await this.readFile(ENCRYPTION_METADATA_FILENAME)
const data = await this.readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
JSON.parse(data)
} catch (error) {
if (error.code === 'ENOENT' || (await this._canWriteMetadata())) {
info('will update metadata of this remote')
return this._createMetadata()
// can be enoent, bad algorithm, or broeken json ( bad key or algorithm)
if (encryptionAlgorithm !== 'none') {
if (await this._canWriteMetadata()) {
// any other error , but on empty remote => update with remote settings
info('will update metadata of this remote')
return this._createMetadata()
} else {
warn(
`The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`,
{ error }
)
// will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect
throw error
}
}
warn(
`The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`,
{ error }
)
// will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect
throw error
}
}

View File

@@ -1,21 +1,29 @@
/* eslint-env jest */
import { TimeoutError } from 'promise-toolbox'
import { DEFAULT_ENCRYPTION_ALGORITHM, _getEncryptor } from './_encryptor'
import { Disposable, pFromCallback, TimeoutError } from 'promise-toolbox'
import { getSyncedHandler } from '.'
import AbstractHandler from './abstract'
import fs from 'fs-extra'
import rimraf from 'rimraf'
import tmp from 'tmp'
const TIMEOUT = 10e3
class TestHandler extends AbstractHandler {
constructor(impl) {
super({ url: 'test://' }, { timeout: TIMEOUT })
Object.defineProperty(this, 'isEncrypted', {
get: () => false, // encryption is tested separatly
})
Object.keys(impl).forEach(method => {
this[`_${method}`] = impl[method]
})
}
}
const noop = Function.prototype
jest.useFakeTimers()
describe('closeFile()', () => {
@@ -101,3 +109,112 @@ describe('rmdir()', () => {
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
describe('encryption', () => {
let dir
beforeEach(async () => {
dir = await pFromCallback(cb => tmp.dir(cb))
})
afterAll(async () => {
await rimraf(dir)
})
it('sync should NOT create metadata if missing (not encrypted)', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
expect(await fs.readdir(dir)).toEqual([])
})
it('sync should create metadata if missing (encrypted)', async () => {
await Disposable.use(
getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }),
noop
)
expect(await fs.readdir(dir)).toEqual(['encryption.json', 'metadata.json'])
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
// encrypted , should not be parsable
expect(async () => JSON.parse(await fs.readFile(`${dir}/metadata.json`))).rejects.toThrowError()
})
it('sync should not modify existing metadata', async () => {
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "none"}`)
await fs.writeFile(`${dir}/metadata.json`, `{"random": "NOTSORANDOM"}`)
await Disposable.use(await getSyncedHandler({ url: `file://${dir}` }), noop)
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
expect(encryption.algorithm).toEqual('none')
const metadata = JSON.parse(await fs.readFile(`${dir}/metadata.json`, 'utf-8'))
expect(metadata.random).toEqual('NOTSORANDOM')
})
it('should modify metadata if empty', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
// nothing created without encryption
await Disposable.use(
getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }),
noop
)
let encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }), noop)
encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
expect(encryption.algorithm).toEqual('none')
})
it(
'sync should work with encrypted',
Disposable.wrap(async function* () {
const encryptor = _getEncryptor(DEFAULT_ENCRYPTION_ALGORITHM, '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "${DEFAULT_ENCRYPTION_ALGORITHM}"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
const handler = yield getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` })
const encryption = JSON.parse(await fs.readFile(`${dir}/encryption.json`, 'utf-8'))
expect(encryption.algorithm).toEqual(DEFAULT_ENCRYPTION_ALGORITHM)
const metadata = JSON.parse(await handler.readFile(`./metadata.json`))
expect(metadata.random).toEqual('NOTSORANDOM')
})
)
it('sync should fail when changing key on non empty remote ', async () => {
const encryptor = _getEncryptor(DEFAULT_ENCRYPTION_ALGORITHM, '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "${DEFAULT_ENCRYPTION_ALGORITHM}"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
// different key but empty remote => ok
await Disposable.use(
getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd00"` }),
noop
)
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd10"` }), noop)
).rejects.toThrowError()
})
it('sync should fail when changing algorithm', async () => {
// encrypt with a non default algorithm
const encryptor = _getEncryptor('aes-256-cbc', '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gmc"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
// remote is now non empty : can't modify key anymore
await fs.writeFile(`${dir}/nonempty.json`, 'content')
await expect(
Disposable.use(getSyncedHandler({ url: `file://${dir}?encryptionKey="73c1838d7d8a6088ca2317fb5f29cd91"` }), noop)
).rejects.toThrowError()
})
})

Some files were not shown because too many files have changed in this diff Show More