Compare commits
431 Commits
licenses-d
...
complex-ma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e9946b645 | ||
|
|
4911bbe3a2 | ||
|
|
e0b6ab3f8a | ||
|
|
8736c2cf9a | ||
|
|
d825c33b55 | ||
|
|
171ecaaf62 | ||
|
|
5e6d5d4eb0 | ||
|
|
3733a3c335 | ||
|
|
7fca6defd6 | ||
|
|
2a270b399e | ||
|
|
64109aee05 | ||
|
|
e1d9395128 | ||
|
|
32eec95c26 | ||
|
|
f41cca45aa | ||
|
|
48eeab974c | ||
|
|
eed44156ae | ||
|
|
1177d9bdd8 | ||
|
|
d151a94285 | ||
|
|
a7fe6453ee | ||
|
|
313eb136f4 | ||
|
|
98591ff83d | ||
|
|
0b9d78560b | ||
|
|
32a930e598 | ||
|
|
edd8512196 | ||
|
|
7a6aec34ae | ||
|
|
009a0c5703 | ||
|
|
a99086b6bd | ||
|
|
a186672447 | ||
|
|
0b8a7c0d09 | ||
|
|
1990bf3d7a | ||
|
|
ea74a7e401 | ||
|
|
bf12c3ff74 | ||
|
|
9d261aae76 | ||
|
|
3d8c8fd745 | ||
|
|
6ad7db522a | ||
|
|
385984b1d8 | ||
|
|
4f3d4b06b5 | ||
|
|
2291986e2c | ||
|
|
fc81cf4d70 | ||
|
|
fdeab86a87 | ||
|
|
3616b7a67b | ||
|
|
83ea57d825 | ||
|
|
24a69bcade | ||
|
|
58dc3244be | ||
|
|
61e580b992 | ||
|
|
1116530a6b | ||
|
|
8cfaabedeb | ||
|
|
66ba05dcd0 | ||
|
|
d1db616d1e | ||
|
|
aed09b152a | ||
|
|
f755365e23 | ||
|
|
ccd34c1610 | ||
|
|
f9104e6cc9 | ||
|
|
4bb702fe89 | ||
|
|
511a04dad5 | ||
|
|
f3527a44d7 | ||
|
|
fdbe84cb1e | ||
|
|
45fe70f0fa | ||
|
|
2aed2fd534 | ||
|
|
a523fa9733 | ||
|
|
0f42f032e4 | ||
|
|
4575b98fd5 | ||
|
|
3a0cc0d6f6 | ||
|
|
626e2fcb12 | ||
|
|
592feb54b7 | ||
|
|
9c6b63e7e4 | ||
|
|
4364a74b7a | ||
|
|
00f13102f8 | ||
|
|
3f17389871 | ||
|
|
726ba287b1 | ||
|
|
42ee29cb3c | ||
|
|
8a98b6b012 | ||
|
|
14ab694804 | ||
|
|
14b8cda543 | ||
|
|
4264e34ffd | ||
|
|
bd9bf55e43 | ||
|
|
7c802bbd33 | ||
|
|
9e37f3f586 | ||
|
|
1d4f5d068a | ||
|
|
5be5eb80e8 | ||
|
|
12c774a34a | ||
|
|
14c3fa4378 | ||
|
|
2f17420721 | ||
|
|
8d7f8d156f | ||
|
|
38248d8c35 | ||
|
|
edaae02892 | ||
|
|
846eff4984 | ||
|
|
481adf3a1e | ||
|
|
d622f7a65c | ||
|
|
a479501aef | ||
|
|
2456374e5a | ||
|
|
c77016ea44 | ||
|
|
6fd45a37e2 | ||
|
|
9be56d3ab8 | ||
|
|
24b264b6c9 | ||
|
|
7f9130470b | ||
|
|
b82aa1daa5 | ||
|
|
53cb325974 | ||
|
|
1256c320e3 | ||
|
|
15bc30a2d5 | ||
|
|
fc3bc8468f | ||
|
|
b4e068f630 | ||
|
|
08eef80673 | ||
|
|
152f73ebf0 | ||
|
|
38de5048bc | ||
|
|
c4d96fbc49 | ||
|
|
ff25d402c1 | ||
|
|
f957024605 | ||
|
|
006e54e2fd | ||
|
|
5f7bc58788 | ||
|
|
bdd93603aa | ||
|
|
8392a17cb2 | ||
|
|
5f7f0b777e | ||
|
|
3f574606d9 | ||
|
|
45f0f93895 | ||
|
|
af2710135b | ||
|
|
95ed6094fe | ||
|
|
6af8ce9eeb | ||
|
|
3ff37f00fe | ||
|
|
ed5b066cbe | ||
|
|
cec5593c70 | ||
|
|
04924884ad | ||
|
|
3ccf64fcd3 | ||
|
|
8eb7f9b91c | ||
|
|
f25c50c629 | ||
|
|
e524a1b865 | ||
|
|
ac15e3355e | ||
|
|
0930a37819 | ||
|
|
d62f91a9e6 | ||
|
|
2789ead999 | ||
|
|
f25fd267dd | ||
|
|
47999f1f72 | ||
|
|
095bbcd15c | ||
|
|
9177bb8451 | ||
|
|
119bf9b0ff | ||
|
|
015c6037c4 | ||
|
|
452a7e7445 | ||
|
|
407586e2d5 | ||
|
|
ffa431a3cd | ||
|
|
281a5ff991 | ||
|
|
92db9bd284 | ||
|
|
ea8f319f45 | ||
|
|
a11e9fe04e | ||
|
|
27367bd1fc | ||
|
|
c6f48ae054 | ||
|
|
7d6efe3694 | ||
|
|
f4aad05edc | ||
|
|
d8f7637ca0 | ||
|
|
f9a7bd199e | ||
|
|
68b7ed284a | ||
|
|
e782895cf5 | ||
|
|
a5935b40d5 | ||
|
|
035d2cb440 | ||
|
|
2a74a49995 | ||
|
|
902953a1fa | ||
|
|
1ffef91b7a | ||
|
|
3d13d9b0dc | ||
|
|
adcc5d5692 | ||
|
|
c49d70170e | ||
|
|
349a78a5bd | ||
|
|
48734c6896 | ||
|
|
0f60a3b24d | ||
|
|
d3a88011a6 | ||
|
|
9b6e4c605b | ||
|
|
7c91524111 | ||
|
|
e1573069e4 | ||
|
|
f2459c964b | ||
|
|
43aa0b815d | ||
|
|
0740630e05 | ||
|
|
c9244b2b13 | ||
|
|
0d398f867f | ||
|
|
b74ec2d7d3 | ||
|
|
26a295c8ed | ||
|
|
2a71d3d20c | ||
|
|
b79605b692 | ||
|
|
ea0fc68a53 | ||
|
|
1ca5c32de3 | ||
|
|
f51bcfa05a | ||
|
|
e1bf68ab38 | ||
|
|
99e03b7ce5 | ||
|
|
cd70d3ea46 | ||
|
|
d387227cef | ||
|
|
2f4530e426 | ||
|
|
4db181d8bf | ||
|
|
9a7a1cc752 | ||
|
|
59ca6c6708 | ||
|
|
fe7901ca7f | ||
|
|
9351b4a5bb | ||
|
|
dfdd0a0496 | ||
|
|
cda39ec256 | ||
|
|
3720a46ff3 | ||
|
|
7ea50ea41e | ||
|
|
60a696916b | ||
|
|
b6a255d96f | ||
|
|
44a0cce7f2 | ||
|
|
f580e0d26f | ||
|
|
6beefe86e2 | ||
|
|
cbada35788 | ||
|
|
44ff2f872d | ||
|
|
2198853662 | ||
|
|
4636109081 | ||
|
|
1c042778b6 | ||
|
|
34b5962eac | ||
|
|
fc7af59eb7 | ||
|
|
7e557ca059 | ||
|
|
1d0cea8ad0 | ||
|
|
5c901d7c1e | ||
|
|
1dffab0bb8 | ||
|
|
ae89e14ea2 | ||
|
|
908255060c | ||
|
|
88278d0041 | ||
|
|
86bfd91c9d | ||
|
|
0ee412ccb9 | ||
|
|
b8bd6ea820 | ||
|
|
98a1ab3033 | ||
|
|
e360f53a40 | ||
|
|
237ec38003 | ||
|
|
30ea1bbf87 | ||
|
|
0d0aef6014 | ||
|
|
1b7441715c | ||
|
|
e3223b6124 | ||
|
|
41fb06187b | ||
|
|
adf0e8ae3b | ||
|
|
42dd1efb41 | ||
|
|
b6a6694abf | ||
|
|
04f2f50d6d | ||
|
|
6d1048e5c5 | ||
|
|
fe722c8b31 | ||
|
|
0326ce1d85 | ||
|
|
183ddb68d3 | ||
|
|
d7fe1afc08 | ||
|
|
ae9aeaf5fd | ||
|
|
ec9476216f | ||
|
|
619f2ef119 | ||
|
|
52020abde8 | ||
|
|
1bd504d67e | ||
|
|
edc4414de4 | ||
|
|
c1d588264c | ||
|
|
94b84b75ad | ||
|
|
b72a4c5aa9 | ||
|
|
857a9f3efc | ||
|
|
ce53128657 | ||
|
|
d9211053ce | ||
|
|
e8316178a0 | ||
|
|
bf763d2cf4 | ||
|
|
eba5b34982 | ||
|
|
afb8b3dd6b | ||
|
|
c5fa94894b | ||
|
|
4137758caa | ||
|
|
3578d16e9e | ||
|
|
3ef263a5cc | ||
|
|
510460c966 | ||
|
|
f74ecc53ae | ||
|
|
c4121073ad | ||
|
|
9ded2641a7 | ||
|
|
295ca68d02 | ||
|
|
27f53f262b | ||
|
|
3fc16cb414 | ||
|
|
90db25d732 | ||
|
|
bbb359470e | ||
|
|
319652c7c7 | ||
|
|
c9c271fee8 | ||
|
|
ca0755e92b | ||
|
|
acd38597f6 | ||
|
|
f4a5a80f3c | ||
|
|
c45d00fee8 | ||
|
|
ffae59fa1c | ||
|
|
b697178f68 | ||
|
|
83ade5eecb | ||
|
|
6973b92c4a | ||
|
|
6261f8a778 | ||
|
|
6048493ac6 | ||
|
|
1cbd715235 | ||
|
|
703fcbccd6 | ||
|
|
2f9cbec07e | ||
|
|
9f0b22d3e9 | ||
|
|
ab5907c09c | ||
|
|
fae0b168f6 | ||
|
|
f18e98a63e | ||
|
|
3524886d5d | ||
|
|
fb44eea06c | ||
|
|
3ea4c757e6 | ||
|
|
cfb8d79049 | ||
|
|
1ea86da7af | ||
|
|
e289f2dba2 | ||
|
|
7f64cd1801 | ||
|
|
d4526e1ed2 | ||
|
|
34f42216c8 | ||
|
|
a26a24a8ad | ||
|
|
4530fd4164 | ||
|
|
9156b8f48c | ||
|
|
6212109fc1 | ||
|
|
c22a080e23 | ||
|
|
834a7109f9 | ||
|
|
7cbf32202d | ||
|
|
d0b9380dca | ||
|
|
13fd9be566 | ||
|
|
53a9aa6ad2 | ||
|
|
c2ce4aca1b | ||
|
|
567f6d7cc0 | ||
|
|
489c0b27f9 | ||
|
|
343f988584 | ||
|
|
7f676c56c8 | ||
|
|
3c0ca7026f | ||
|
|
2ceba11aa7 | ||
|
|
6db5e0b27c | ||
|
|
dfecb801db | ||
|
|
c10bfe3db2 | ||
|
|
20fb2c99bc | ||
|
|
b4a0b5c58b | ||
|
|
faa46c2a21 | ||
|
|
9691199ae8 | ||
|
|
f736381933 | ||
|
|
f44e5b3b7a | ||
|
|
6e24bf5f8c | ||
|
|
8fb43e31c5 | ||
|
|
0860c80e51 | ||
|
|
66fc25756f | ||
|
|
f008e240cd | ||
|
|
8d7e95d6e9 | ||
|
|
3e3ce543a8 | ||
|
|
6c447a82f1 | ||
|
|
64a0918ff1 | ||
|
|
9274223701 | ||
|
|
1368c18844 | ||
|
|
67a60a7557 | ||
|
|
3d5fd47748 | ||
|
|
b9a18807ae | ||
|
|
088c0b6321 | ||
|
|
ecee11a24c | ||
|
|
ec8df7ce57 | ||
|
|
4159fd2ffb | ||
|
|
1a1d21bbb3 | ||
|
|
be1045fed9 | ||
|
|
e43773c712 | ||
|
|
30d69dadbb | ||
|
|
b138438036 | ||
|
|
d649211330 | ||
|
|
6cf211a9ad | ||
|
|
3388e5e8a4 | ||
|
|
5d497a1908 | ||
|
|
c820646fb6 | ||
|
|
5870f6f734 | ||
|
|
6732150121 | ||
|
|
1dead8b080 | ||
|
|
d547aa8ebd | ||
|
|
1da889e420 | ||
|
|
5d0a308d1d | ||
|
|
f9886d52da | ||
|
|
4f8e48b7d4 | ||
|
|
258e07c2ca | ||
|
|
cc32c50665 | ||
|
|
ec1d91f73e | ||
|
|
eb2f429964 | ||
|
|
1ad067309d | ||
|
|
48ce7df43a | ||
|
|
6555e2c440 | ||
|
|
a05191e112 | ||
|
|
b8eeee1d5d | ||
|
|
4aa87f3fa5 | ||
|
|
40c37d923b | ||
|
|
5a5837b8ed | ||
|
|
1e0b521070 | ||
|
|
35ed58cc5e | ||
|
|
c4a1579197 | ||
|
|
e471706422 | ||
|
|
d78b7350b5 | ||
|
|
47b29d5a49 | ||
|
|
e5946a51d1 | ||
|
|
a88798cc22 | ||
|
|
6fbd32523a | ||
|
|
94b1cc2bdd | ||
|
|
0ed5c8f0ae | ||
|
|
5f883f552b | ||
|
|
9db99ab4a5 | ||
|
|
287214c2b2 | ||
|
|
317a020841 | ||
|
|
b50e3aec5f | ||
|
|
21a9e0e2a7 | ||
|
|
7775df8ef1 | ||
|
|
53f9b5d131 | ||
|
|
bf4d4a4742 | ||
|
|
0dbbe7104d | ||
|
|
561ef00680 | ||
|
|
85428fa72e | ||
|
|
29d0593b86 | ||
|
|
ac524dd799 | ||
|
|
aba8b764b6 | ||
|
|
a9050e0f41 | ||
|
|
15ef84e238 | ||
|
|
09096fef5b | ||
|
|
19b08e1019 | ||
|
|
06d67642dd | ||
|
|
ceb6c450c0 | ||
|
|
a745e42cf5 | ||
|
|
462d6a4450 | ||
|
|
a2e39c5e2e | ||
|
|
ec899be3b5 | ||
|
|
c79ebfdd0a | ||
|
|
cd95e6c552 | ||
|
|
f676145302 | ||
|
|
0847267069 | ||
|
|
6fe1da1587 | ||
|
|
7ab555d869 | ||
|
|
4a35e9e60d | ||
|
|
5e8dfdfd9b | ||
|
|
278a1b8ab3 | ||
|
|
6cb03dded1 | ||
|
|
4752ec1b67 | ||
|
|
e641371544 | ||
|
|
8d9a7e9af1 | ||
|
|
2b8e9bf887 | ||
|
|
a16c55c679 | ||
|
|
9a72c40149 | ||
|
|
8424fc4c19 | ||
|
|
9f29a047a7 | ||
|
|
af4904ce8d | ||
|
|
6f2a323063 | ||
|
|
1da8ecfaac | ||
|
|
1ba386bbd2 | ||
|
|
d00d791cda | ||
|
|
51aabd7b21 | ||
|
|
7840c3bdc8 | ||
|
|
07d13002b0 | ||
|
|
23abe2ba06 | ||
|
|
4d73821d21 | ||
|
|
f2687cf807 | ||
|
|
8abce0d4cf | ||
|
|
62ad3848c4 | ||
|
|
521e9969f1 | ||
|
|
5ee1ceced3 |
@@ -1,6 +1,6 @@
|
||||
module.exports = {
|
||||
arrowParens: 'avoid',
|
||||
jsxSingleQuote: true,
|
||||
semi: false,
|
||||
singleQuote: true,
|
||||
trailingComma: 'es5',
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
#- stable # disable for now due to an issue of indirect dep upath with Node 9
|
||||
- 8
|
||||
- 12
|
||||
|
||||
# Use containers.
|
||||
# http://docs.travis-ci.com/user/workers/container-based-infrastructure/
|
||||
|
||||
46
@vates/coalesce-calls/README.md
Normal file
46
@vates/coalesce-calls/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/coalesce-calls
|
||||
|
||||
[](https://npmjs.org/package/@vates/coalesce-calls)  [](https://bundlephobia.com/result?p=@vates/coalesce-calls) [](https://npmjs.org/package/@vates/coalesce-calls)
|
||||
|
||||
> Wraps an async function so that concurrent calls will be coalesced
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/coalesce-calls):
|
||||
|
||||
```
|
||||
> npm install --save @vates/coalesce-calls
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
import { coalesceCalls } from '@vates/coalesce-calls'
|
||||
|
||||
const connect = coalesceCalls(async function () {
|
||||
// async operation
|
||||
})
|
||||
|
||||
connect()
|
||||
|
||||
// the previous promise result will be returned if the operation is not
|
||||
// complete yet
|
||||
connect()
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
13
@vates/coalesce-calls/USAGE.md
Normal file
13
@vates/coalesce-calls/USAGE.md
Normal file
@@ -0,0 +1,13 @@
|
||||
```js
|
||||
import { coalesceCalls } from '@vates/coalesce-calls'
|
||||
|
||||
const connect = coalesceCalls(async function () {
|
||||
// async operation
|
||||
})
|
||||
|
||||
connect()
|
||||
|
||||
// the previous promise result will be returned if the operation is not
|
||||
// complete yet
|
||||
connect()
|
||||
```
|
||||
14
@vates/coalesce-calls/index.js
Normal file
14
@vates/coalesce-calls/index.js
Normal file
@@ -0,0 +1,14 @@
|
||||
exports.coalesceCalls = function (fn) {
|
||||
let promise
|
||||
const clean = () => {
|
||||
promise = undefined
|
||||
}
|
||||
return function () {
|
||||
if (promise !== undefined) {
|
||||
return promise
|
||||
}
|
||||
promise = fn.apply(this, arguments)
|
||||
promise.then(clean, clean)
|
||||
return promise
|
||||
}
|
||||
}
|
||||
33
@vates/coalesce-calls/index.spec.js
Normal file
33
@vates/coalesce-calls/index.spec.js
Normal file
@@ -0,0 +1,33 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
const { coalesceCalls } = require('./')
|
||||
|
||||
const pDefer = () => {
|
||||
const r = {}
|
||||
r.promise = new Promise((resolve, reject) => {
|
||||
r.reject = reject
|
||||
r.resolve = resolve
|
||||
})
|
||||
return r
|
||||
}
|
||||
|
||||
describe('coalesceCalls', () => {
|
||||
it('decorates an async function', async () => {
|
||||
const fn = coalesceCalls(promise => promise)
|
||||
|
||||
const defer1 = pDefer()
|
||||
const promise1 = fn(defer1.promise)
|
||||
const defer2 = pDefer()
|
||||
const promise2 = fn(defer2.promise)
|
||||
|
||||
defer1.resolve('foo')
|
||||
expect(await promise1).toBe('foo')
|
||||
expect(await promise2).toBe('foo')
|
||||
|
||||
const defer3 = pDefer()
|
||||
const promise3 = fn(defer3.promise)
|
||||
|
||||
defer3.resolve('bar')
|
||||
expect(await promise3).toBe('bar')
|
||||
})
|
||||
})
|
||||
38
@vates/coalesce-calls/package.json
Normal file
38
@vates/coalesce-calls/package.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/coalesce-calls",
|
||||
"description": "Wraps an async function so that concurrent calls will be coalesced",
|
||||
"keywords": [
|
||||
"async",
|
||||
"calls",
|
||||
"coalesce",
|
||||
"decorate",
|
||||
"decorator",
|
||||
"merge",
|
||||
"promise",
|
||||
"wrap",
|
||||
"wrapper"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/coalesce-calls",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/coalesce-calls",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
45
@vates/decorate-with/README.md
Normal file
45
@vates/decorate-with/README.md
Normal file
@@ -0,0 +1,45 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/decorate-with
|
||||
|
||||
[](https://npmjs.org/package/@vates/decorate-with)  [](https://bundlephobia.com/result?p=@vates/decorate-with) [](https://npmjs.org/package/@vates/decorate-with)
|
||||
|
||||
> Creates a decorator from a function wrapper
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/decorate-with):
|
||||
|
||||
```
|
||||
> npm install --save @vates/decorate-with
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
For instance, allows using Lodash's functions as decorators:
|
||||
|
||||
```js
|
||||
import { decorateWith } from '@vates/decorate-with'
|
||||
|
||||
class Foo {
|
||||
@decorateWith(lodash.debounce, 150)
|
||||
bar() {
|
||||
// body
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
12
@vates/decorate-with/USAGE.md
Normal file
12
@vates/decorate-with/USAGE.md
Normal file
@@ -0,0 +1,12 @@
|
||||
For instance, allows using Lodash's functions as decorators:
|
||||
|
||||
```js
|
||||
import { decorateWith } from '@vates/decorate-with'
|
||||
|
||||
class Foo {
|
||||
@decorateWith(lodash.debounce, 150)
|
||||
bar() {
|
||||
// body
|
||||
}
|
||||
}
|
||||
```
|
||||
4
@vates/decorate-with/index.js
Normal file
4
@vates/decorate-with/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
exports.decorateWith = (fn, ...args) => (target, name, descriptor) => ({
|
||||
...descriptor,
|
||||
value: fn(descriptor.value, ...args),
|
||||
})
|
||||
30
@vates/decorate-with/package.json
Normal file
30
@vates/decorate-with/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/decorate-with",
|
||||
"description": "Creates a decorator from a function wrapper",
|
||||
"keywords": [
|
||||
"apply",
|
||||
"decorator",
|
||||
"factory",
|
||||
"wrapper"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/decorate-with",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/decorate-with",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC",
|
||||
"version": "0.0.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
47
@vates/parse-duration/README.md
Normal file
47
@vates/parse-duration/README.md
Normal file
@@ -0,0 +1,47 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/parse-duration
|
||||
|
||||
[](https://npmjs.org/package/@vates/parse-duration)  [](https://bundlephobia.com/result?p=@vates/parse-duration) [](https://npmjs.org/package/@vates/parse-duration)
|
||||
|
||||
> Small wrapper around ms to parse a duration
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/parse-duration):
|
||||
|
||||
```
|
||||
> npm install --save @vates/parse-duration
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`ms` without magic: always parse a duration and throws if invalid.
|
||||
|
||||
```js
|
||||
import { parseDuration } from '@vates/parse-duration'
|
||||
|
||||
parseDuration('2 days')
|
||||
// 172800000
|
||||
|
||||
parseDuration(172800000)
|
||||
// 172800000
|
||||
|
||||
parseDuration(undefined)
|
||||
// throws TypeError('not a valid duration: undefined')
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
14
@vates/parse-duration/USAGE.md
Normal file
14
@vates/parse-duration/USAGE.md
Normal file
@@ -0,0 +1,14 @@
|
||||
`ms` without magic: always parse a duration and throws if invalid.
|
||||
|
||||
```js
|
||||
import { parseDuration } from '@vates/parse-duration'
|
||||
|
||||
parseDuration('2 days')
|
||||
// 172800000
|
||||
|
||||
parseDuration(172800000)
|
||||
// 172800000
|
||||
|
||||
parseDuration(undefined)
|
||||
// throws TypeError('not a valid duration: undefined')
|
||||
```
|
||||
@@ -1,6 +1,6 @@
|
||||
import ms from 'ms'
|
||||
const ms = require('ms')
|
||||
|
||||
export default value => {
|
||||
exports.parseDuration = value => {
|
||||
if (typeof value === 'number') {
|
||||
return value
|
||||
}
|
||||
32
@vates/parse-duration/package.json
Normal file
32
@vates/parse-duration/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/parse-duration",
|
||||
"description": "Small wrapper around ms to parse a duration",
|
||||
"keywords": [
|
||||
"duration",
|
||||
"ms",
|
||||
"parse"
|
||||
],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/parse-duration",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/parse-duration",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": "^2.1.2"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
}
|
||||
}
|
||||
46
@vates/read-chunk/README.md
Normal file
46
@vates/read-chunk/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @vates/read-chunk
|
||||
|
||||
[](https://npmjs.org/package/@vates/read-chunk)  [](https://bundlephobia.com/result?p=@vates/read-chunk) [](https://npmjs.org/package/@vates/read-chunk)
|
||||
|
||||
> Read a chunk of a Node stream
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@vates/read-chunk):
|
||||
|
||||
```
|
||||
> npm install --save @vates/read-chunk
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- returns the next available chunk of data
|
||||
- like `stream.read()`, a number of bytes can be specified
|
||||
- returns `null` if the stream has ended
|
||||
|
||||
```js
|
||||
import { readChunk } from '@vates/read-chunk'
|
||||
;(async () => {
|
||||
let chunk
|
||||
while ((chunk = await readChunk(stream, 1024)) !== null) {
|
||||
// do something with chunk
|
||||
}
|
||||
})()
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
13
@vates/read-chunk/USAGE.md
Normal file
13
@vates/read-chunk/USAGE.md
Normal file
@@ -0,0 +1,13 @@
|
||||
- returns the next available chunk of data
|
||||
- like `stream.read()`, a number of bytes can be specified
|
||||
- returns `null` if the stream has ended
|
||||
|
||||
```js
|
||||
import { readChunk } from '@vates/read-chunk'
|
||||
;(async () => {
|
||||
let chunk
|
||||
while ((chunk = await readChunk(stream, 1024)) !== null) {
|
||||
// do something with chunk
|
||||
}
|
||||
})()
|
||||
```
|
||||
27
@vates/read-chunk/index.js
Normal file
27
@vates/read-chunk/index.js
Normal file
@@ -0,0 +1,27 @@
|
||||
exports.readChunk = (stream, size) =>
|
||||
new Promise((resolve, reject) => {
|
||||
function onEnd() {
|
||||
resolve(null)
|
||||
removeListeners()
|
||||
}
|
||||
function onError(error) {
|
||||
reject(error)
|
||||
removeListeners()
|
||||
}
|
||||
function onReadable() {
|
||||
const data = stream.read(size)
|
||||
if (data !== null) {
|
||||
resolve(data)
|
||||
removeListeners()
|
||||
}
|
||||
}
|
||||
function removeListeners() {
|
||||
stream.removeListener('end', onEnd)
|
||||
stream.removeListener('error', onError)
|
||||
stream.removeListener('readable', onReadable)
|
||||
}
|
||||
stream.on('end', onEnd)
|
||||
stream.on('error', onError)
|
||||
stream.on('readable', onReadable)
|
||||
onReadable()
|
||||
})
|
||||
33
@vates/read-chunk/package.json
Normal file
33
@vates/read-chunk/package.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@vates/read-chunk",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/read-chunk",
|
||||
"description": "Read a chunk of a Node stream",
|
||||
"license": "ISC",
|
||||
"keywords": [
|
||||
"async",
|
||||
"chunk",
|
||||
"data",
|
||||
"node",
|
||||
"promise",
|
||||
"read",
|
||||
"stream"
|
||||
],
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@vates/read-chunk",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"version": "0.1.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,10 @@
|
||||
# @xen-orchestra/async-map [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
> ${pkg.description}
|
||||
# @xen-orchestra/async-map
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/async-map)  [](https://bundlephobia.com/result?p=@xen-orchestra/async-map) [](https://npmjs.org/package/@xen-orchestra/async-map)
|
||||
|
||||
> Similar to Promise.all + lodash.map but wait for all promises to be settled
|
||||
|
||||
## Install
|
||||
|
||||
@@ -12,25 +16,10 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/async
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
```js
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
const array = await asyncMap(collection, iteratee)
|
||||
```
|
||||
|
||||
## Contributions
|
||||
@@ -46,4 +35,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
5
@xen-orchestra/async-map/USAGE.md
Normal file
5
@xen-orchestra/async-map/USAGE.md
Normal file
@@ -0,0 +1,5 @@
|
||||
```js
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
|
||||
const array = await asyncMap(collection, iteratee)
|
||||
```
|
||||
@@ -3,7 +3,7 @@
|
||||
"name": "@xen-orchestra/async-map",
|
||||
"version": "0.0.0",
|
||||
"license": "ISC",
|
||||
"description": "",
|
||||
"description": "Similar to Promise.all + lodash.map but wait for all promises to be settled",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/async-map",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
@@ -13,8 +13,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -37,7 +37,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -46,7 +46,6 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"postversion": "npm publish"
|
||||
}
|
||||
|
||||
28
@xen-orchestra/audit-core/README.md
Normal file
28
@xen-orchestra/audit-core/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/audit-core
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/audit-core)  [](https://bundlephobia.com/result?p=@xen-orchestra/audit-core) [](https://npmjs.org/package/@xen-orchestra/audit-core)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/audit-core):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/audit-core
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/audit-core/USAGE.md
Normal file
0
@xen-orchestra/audit-core/USAGE.md
Normal file
@@ -26,15 +26,20 @@
|
||||
"@babel/plugin-proposal-decorators": "^7.8.0",
|
||||
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.8.0",
|
||||
"@babel/preset-env": "^7.7.4",
|
||||
"@babel/preset-typescript": "^7.7.4",
|
||||
"cross": "^1.0.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@xen-orchestra/log": "^0.2.0",
|
||||
"core-js": "^3.6.4",
|
||||
"golike-defer": "^0.4.1",
|
||||
"lodash": "^4.17.15",
|
||||
"object-hash": "^2.0.1"
|
||||
},
|
||||
"private": false
|
||||
"private": false,
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,12 @@
|
||||
import 'core-js/features/symbol/async-iterator'
|
||||
|
||||
import assert from 'assert'
|
||||
import createLogger from '@xen-orchestra/log'
|
||||
import defer from 'golike-defer'
|
||||
import hash from 'object-hash'
|
||||
|
||||
const log = createLogger('xo:audit-core')
|
||||
|
||||
export class Storage {
|
||||
constructor() {
|
||||
this._lock = Promise.resolve()
|
||||
@@ -25,7 +28,7 @@ export class Storage {
|
||||
//
|
||||
// http://man7.org/linux/man-pages/man3/crypt.3.html#NOTES
|
||||
const ID_TO_ALGORITHM = {
|
||||
'5': 'sha256',
|
||||
5: 'sha256',
|
||||
}
|
||||
|
||||
export class AlteredRecordError extends Error {
|
||||
@@ -65,8 +68,17 @@ export class AuditCore {
|
||||
@defer
|
||||
async add($defer, subject, event, data) {
|
||||
const time = Date.now()
|
||||
$defer(await this._storage.acquireLock())
|
||||
return this._addUnsafe({
|
||||
data,
|
||||
event,
|
||||
subject,
|
||||
time,
|
||||
})
|
||||
}
|
||||
|
||||
async _addUnsafe({ data, event, subject, time }) {
|
||||
const storage = this._storage
|
||||
$defer(await storage.acquireLock())
|
||||
|
||||
// delete "undefined" properties and normalize data with JSON.stringify
|
||||
const record = JSON.parse(
|
||||
@@ -139,4 +151,45 @@ export class AuditCore {
|
||||
await this._storage.del(id)
|
||||
}
|
||||
}
|
||||
|
||||
@defer
|
||||
async deleteRangeAndRewrite($defer, newest, oldest) {
|
||||
assert.notStrictEqual(newest, undefined)
|
||||
assert.notStrictEqual(oldest, undefined)
|
||||
|
||||
const storage = this._storage
|
||||
$defer(await storage.acquireLock())
|
||||
|
||||
assert.notStrictEqual(await storage.get(newest), undefined)
|
||||
const oldestRecord = await storage.get(oldest)
|
||||
assert.notStrictEqual(oldestRecord, undefined)
|
||||
|
||||
const lastId = await storage.getLastId()
|
||||
const recentRecords = []
|
||||
for await (const record of this.getFrom(lastId)) {
|
||||
if (record.id === newest) {
|
||||
break
|
||||
}
|
||||
|
||||
recentRecords.push(record)
|
||||
}
|
||||
|
||||
for await (const record of this.getFrom(newest)) {
|
||||
await storage.del(record.id)
|
||||
if (record.id === oldest) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
await storage.setLastId(oldestRecord.previousId)
|
||||
|
||||
for (const record of recentRecords) {
|
||||
try {
|
||||
await this._addUnsafe(record)
|
||||
await storage.del(record.id)
|
||||
} catch (error) {
|
||||
log.error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,9 +17,10 @@ interface Record {
|
||||
}
|
||||
|
||||
export class AuditCore {
|
||||
constructor(storage: Storage) {}
|
||||
public add(subject: any, event: string, data: any): Promise<Record> {}
|
||||
public checkIntegrity(oldest: string, newest: string): Promise<number> {}
|
||||
public getFrom(newest?: string): AsyncIterator {}
|
||||
public deleteFrom(newest: string): Promise<void> {}
|
||||
constructor(storage: Storage) { }
|
||||
public add(subject: any, event: string, data: any): Promise<Record> { }
|
||||
public checkIntegrity(oldest: string, newest: string): Promise<number> { }
|
||||
public getFrom(newest?: string): AsyncIterator { }
|
||||
public deleteFrom(newest: string): Promise<void> { }
|
||||
public deleteRangeAndRewrite(newest: string, oldest: string): Promise<void> { }
|
||||
}
|
||||
|
||||
18
@xen-orchestra/babel-config/README.md
Normal file
18
@xen-orchestra/babel-config/README.md
Normal file
@@ -0,0 +1,18 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/babel-config
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/babel-config/USAGE.md
Normal file
0
@xen-orchestra/babel-config/USAGE.md
Normal file
@@ -32,7 +32,6 @@ const configs = {
|
||||
}
|
||||
return { browsers: pkg.browserslist, node }
|
||||
})(),
|
||||
useBuiltIns: '@babel/polyfill' in (pkg.dependencies || {}) && 'usage',
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -52,7 +51,7 @@ const pluginsOrder = [
|
||||
'@babel/plugin-proposal-class-properties',
|
||||
]
|
||||
|
||||
module.exports = function(pkg, plugins, presets) {
|
||||
module.exports = function (pkg, plugins, presets) {
|
||||
plugins === undefined && (plugins = {})
|
||||
presets === undefined && (presets = {})
|
||||
|
||||
|
||||
@@ -11,5 +11,10 @@
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
28
@xen-orchestra/backups-cli/README.md
Normal file
28
@xen-orchestra/backups-cli/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/backups-cli
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/backups-cli)  [](https://bundlephobia.com/result?p=@xen-orchestra/backups-cli) [](https://npmjs.org/package/@xen-orchestra/backups-cli)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups-cli):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/backups-cli
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/backups-cli/USAGE.md
Normal file
0
@xen-orchestra/backups-cli/USAGE.md
Normal file
@@ -3,7 +3,7 @@ const getopts = require('getopts')
|
||||
const { version } = require('./package.json')
|
||||
|
||||
module.exports = commands =>
|
||||
async function(args, prefix) {
|
||||
async function (args, prefix) {
|
||||
const opts = getopts(args, {
|
||||
alias: {
|
||||
help: 'h',
|
||||
|
||||
@@ -3,6 +3,17 @@ const { dirname } = require('path')
|
||||
const fs = require('promise-toolbox/promisifyAll')(require('fs'))
|
||||
module.exports = fs
|
||||
|
||||
fs.getSize = path =>
|
||||
fs.stat(path).then(
|
||||
_ => _.size,
|
||||
error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return 0
|
||||
}
|
||||
throw error
|
||||
}
|
||||
)
|
||||
|
||||
fs.mktree = async function mkdirp(path) {
|
||||
try {
|
||||
await fs.mkdir(path)
|
||||
@@ -34,14 +45,15 @@ fs.readdir2 = path =>
|
||||
return entries
|
||||
},
|
||||
error => {
|
||||
if (
|
||||
error != null &&
|
||||
(error.code === 'ENOENT' || error.code === 'ENOTDIR')
|
||||
) {
|
||||
const { code } = error
|
||||
if (code === 'ENOENT') {
|
||||
// do nothing
|
||||
} else if (code === 'ENOTDIR') {
|
||||
console.warn('WARN: readdir(%s)', path, error)
|
||||
return []
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
throw error
|
||||
return []
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -8,12 +8,13 @@ let force
|
||||
const assert = require('assert')
|
||||
const flatten = require('lodash/flatten')
|
||||
const getopts = require('getopts')
|
||||
const isValidXva = require('@xen-orchestra/backups/isValidXva')
|
||||
const limitConcurrency = require('limit-concurrency-decorator').default
|
||||
const lockfile = require('proper-lockfile')
|
||||
const pipe = require('promise-toolbox/pipe')
|
||||
const { default: Vhd } = require('vhd-lib')
|
||||
const { default: Vhd, mergeVhd } = require('vhd-lib')
|
||||
const { dirname, resolve } = require('path')
|
||||
const { DISK_TYPE_DIFFERENCING } = require('vhd-lib/dist/_constants')
|
||||
const { isValidXva } = require('@xen-orchestra/backups/isValidXva')
|
||||
|
||||
const asyncMap = require('../_asyncMap')
|
||||
const fs = require('../_fs')
|
||||
@@ -26,10 +27,10 @@ const handler = require('@xen-orchestra/fs').getHandler({ url: 'file://' })
|
||||
//
|
||||
// the whole chain will be merged into parent, parent will be renamed to child
|
||||
// and all the others will deleted
|
||||
async function mergeVhdChain(chain) {
|
||||
const mergeVhdChain = limitConcurrency(1)(async function mergeVhdChain(chain) {
|
||||
assert(chain.length >= 2)
|
||||
|
||||
const child = chain[0]
|
||||
let child = chain[0]
|
||||
const parent = chain[chain.length - 1]
|
||||
const children = chain.slice(0, -1).reverse()
|
||||
|
||||
@@ -46,15 +47,36 @@ async function mergeVhdChain(chain) {
|
||||
// `mergeVhd` does not work with a stream, either
|
||||
// - make it accept a stream
|
||||
// - or create synthetic VHD which is not a stream
|
||||
return console.warn('TODO: implement merge')
|
||||
// await mergeVhd(
|
||||
// handler,
|
||||
// parent,
|
||||
// handler,
|
||||
// children.length === 1
|
||||
// ? child
|
||||
// : await createSyntheticStream(handler, children)
|
||||
// )
|
||||
if (children.length !== 1) {
|
||||
console.warn('TODO: implement merging multiple children')
|
||||
children.length = 1
|
||||
child = children[0]
|
||||
}
|
||||
|
||||
let done, total
|
||||
const handle = setInterval(() => {
|
||||
if (done !== undefined) {
|
||||
console.log('merging %s: %s/%s', child, done, total)
|
||||
}
|
||||
}, 10e3)
|
||||
|
||||
await mergeVhd(
|
||||
handler,
|
||||
parent,
|
||||
handler,
|
||||
child,
|
||||
// children.length === 1
|
||||
// ? child
|
||||
// : await createSyntheticStream(handler, children),
|
||||
{
|
||||
onProgress({ done: d, total: t }) {
|
||||
done = d
|
||||
total = t
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
clearInterval(handle)
|
||||
}
|
||||
|
||||
await Promise.all([
|
||||
@@ -66,7 +88,7 @@ async function mergeVhdChain(chain) {
|
||||
return force && handler.unlink(child)
|
||||
}),
|
||||
])
|
||||
}
|
||||
})
|
||||
|
||||
const listVhds = pipe([
|
||||
vmDir => vmDir + '/vdis',
|
||||
@@ -152,11 +174,12 @@ async function handleVm(vmDir) {
|
||||
await Promise.all(deletions)
|
||||
}
|
||||
|
||||
const [jsons, xvas] = await fs
|
||||
const [jsons, xvas, xvaSums] = await fs
|
||||
.readdir2(vmDir)
|
||||
.then(entries => [
|
||||
entries.filter(_ => _.endsWith('.json')),
|
||||
new Set(entries.filter(_ => _.endsWith('.xva'))),
|
||||
entries.filter(_ => _.endsWith('.xva.cheksum')),
|
||||
])
|
||||
|
||||
await asyncMap(xvas, async path => {
|
||||
@@ -274,6 +297,15 @@ async function handleVm(vmDir) {
|
||||
console.warn('')
|
||||
return force && handler.unlink(path)
|
||||
}),
|
||||
asyncMap(xvaSums, path => {
|
||||
// no need to handle checksums for XVAs deleted by the script, they will be handled by `unlink()`
|
||||
if (!xvas.has(path.slice(0, -'.checksum'.length))) {
|
||||
console.warn('Unused XVA checksum', path)
|
||||
force && console.warn(' deleting…')
|
||||
console.warn('')
|
||||
return force && handler.unlink(path)
|
||||
}
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
58
@xen-orchestra/backups-cli/commands/info.js
Normal file
58
@xen-orchestra/backups-cli/commands/info.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const groupBy = require('lodash/groupBy')
|
||||
const { createHash } = require('crypto')
|
||||
const { dirname, resolve } = require('path')
|
||||
|
||||
const asyncMap = require('../_asyncMap')
|
||||
const { readdir2, readFile, getSize } = require('../_fs')
|
||||
|
||||
const sha512 = str => createHash('sha512').update(str).digest('hex')
|
||||
const sum = values => values.reduce((a, b) => a + b)
|
||||
|
||||
module.exports = async function info(vmDirs) {
|
||||
const jsonFiles = (
|
||||
await asyncMap(vmDirs, async vmDir =>
|
||||
(await readdir2(vmDir)).filter(_ => _.endsWith('.json'))
|
||||
)
|
||||
).flat()
|
||||
|
||||
const hashes = { __proto__: null }
|
||||
|
||||
const info = (
|
||||
await asyncMap(jsonFiles, async jsonFile => {
|
||||
try {
|
||||
const jsonDir = dirname(jsonFile)
|
||||
const json = await readFile(jsonFile)
|
||||
|
||||
const hash = sha512(json)
|
||||
if (hash in hashes) {
|
||||
console.log(jsonFile, 'duplicate of', hashes[hash])
|
||||
return
|
||||
}
|
||||
hashes[hash] = jsonFile
|
||||
|
||||
const metadata = JSON.parse(json)
|
||||
|
||||
return {
|
||||
jsonDir,
|
||||
jsonFile,
|
||||
metadata,
|
||||
size:
|
||||
json.length +
|
||||
(await (metadata.mode === 'delta'
|
||||
? asyncMap(Object.values(metadata.vhds), _ =>
|
||||
getSize(resolve(jsonDir, _))
|
||||
).then(sum)
|
||||
: getSize(resolve(jsonDir, metadata.xva)))),
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(jsonFile, error)
|
||||
}
|
||||
})
|
||||
).filter(_ => _ !== undefined)
|
||||
const byJobs = groupBy(info, 'metadata.jobId')
|
||||
Object.keys(byJobs)
|
||||
.sort()
|
||||
.forEach(jobId => {
|
||||
console.log(jobId, sum(byJobs[jobId].map(_ => _.size)))
|
||||
})
|
||||
}
|
||||
@@ -13,6 +13,12 @@ require('./_composeCommands')({
|
||||
},
|
||||
usage: 'xo-vm-backups <field path>',
|
||||
},
|
||||
info: {
|
||||
get main() {
|
||||
return require('./commands/info')
|
||||
},
|
||||
usage: 'xo-vm-backups/*',
|
||||
},
|
||||
})(process.argv.slice(2), 'xo-backups').catch(error => {
|
||||
console.error('main', error)
|
||||
process.exitCode = 1
|
||||
|
||||
@@ -3,12 +3,14 @@
|
||||
"bin": {
|
||||
"xo-backups": "index.js"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"dependencies": {
|
||||
"@xen-orchestra/backups": "^0.1.1",
|
||||
"@xen-orchestra/fs": "^0.10.3",
|
||||
"@xen-orchestra/fs": "^0.11.1",
|
||||
"filenamify": "^4.1.0",
|
||||
"getopts": "^2.2.5",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"lodash": "^4.17.15",
|
||||
"promise-toolbox": "^0.15.0",
|
||||
"proper-lockfile": "^4.1.1",
|
||||
@@ -31,5 +33,10 @@
|
||||
"scripts": {
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"version": "0.0.0"
|
||||
"version": "0.2.1",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
28
@xen-orchestra/backups/README.md
Normal file
28
@xen-orchestra/backups/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/backups
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/backups)  [](https://bundlephobia.com/result?p=@xen-orchestra/backups) [](https://npmjs.org/package/@xen-orchestra/backups)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/backups):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/backups
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/backups/USAGE.md
Normal file
0
@xen-orchestra/backups/USAGE.md
Normal file
@@ -16,7 +16,12 @@
|
||||
"postversion": "npm publish --access public"
|
||||
},
|
||||
"dependencies": {
|
||||
"d3-time-format": "^2.2.3",
|
||||
"fs-extra": "^8.1.0"
|
||||
"d3-time-format": "^3.0.0",
|
||||
"fs-extra": "^9.0.0"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
28
@xen-orchestra/cr-seed-cli/README.md
Normal file
28
@xen-orchestra/cr-seed-cli/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/cr-seed-cli
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/cr-seed-cli)  [](https://bundlephobia.com/result?p=@xen-orchestra/cr-seed-cli) [](https://npmjs.org/package/@xen-orchestra/cr-seed-cli)
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cr-seed-cli):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/cr-seed-cli
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/cr-seed-cli/USAGE.md
Normal file
0
@xen-orchestra/cr-seed-cli/USAGE.md
Normal file
@@ -5,7 +5,7 @@ const { NULL_REF, Xapi } = require('xen-api')
|
||||
|
||||
const pkg = require('./package.json')
|
||||
|
||||
Xapi.prototype.getVmDisks = async function(vm) {
|
||||
Xapi.prototype.getVmDisks = async function (vm) {
|
||||
const disks = { __proto__: null }
|
||||
await Promise.all([
|
||||
...vm.VBDs.map(async vbdRef => {
|
||||
|
||||
@@ -15,11 +15,17 @@
|
||||
"bin": {
|
||||
"xo-cr-seed": "./index.js"
|
||||
},
|
||||
"preferGlobal": true,
|
||||
"dependencies": {
|
||||
"golike-defer": "^0.4.1",
|
||||
"xen-api": "^0.28.3"
|
||||
"xen-api": "^0.29.0"
|
||||
},
|
||||
"scripts": {
|
||||
"postversion": "npm publish"
|
||||
},
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
# @xen-orchestra/cron [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/cron
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/cron)  [](https://bundlephobia.com/result?p=@xen-orchestra/cron) [](https://npmjs.org/package/@xen-orchestra/cron)
|
||||
|
||||
> Focused, well maintained, cron parser/scheduler
|
||||
|
||||
@@ -10,6 +14,8 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/cron)
|
||||
> npm install --save @xen-orchestra/cron
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Pattern syntax
|
||||
|
||||
```
|
||||
@@ -109,25 +115,6 @@ job.start()
|
||||
job.stop()
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
@@ -141,4 +128,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
98
@xen-orchestra/cron/USAGE.md
Normal file
98
@xen-orchestra/cron/USAGE.md
Normal file
@@ -0,0 +1,98 @@
|
||||
### Pattern syntax
|
||||
|
||||
```
|
||||
<minute> <hour> <day of month> <month> <day of week>
|
||||
```
|
||||
|
||||
Each entry can be:
|
||||
|
||||
- a single value
|
||||
- a range (`0-23` or `*/2`)
|
||||
- a list of values/ranges (`1,8-12`)
|
||||
|
||||
A wildcard (`*`) can be used as a shortcut for the whole range
|
||||
(`first-last`).
|
||||
|
||||
Step values can be used in conjunctions with ranges. For instance,
|
||||
`1-7/2` is the same as `1,3,5,7`.
|
||||
|
||||
| Field | Allowed values |
|
||||
| ---------------- | ------------------------------------------------------------------ |
|
||||
| minute | 0-59 |
|
||||
| hour | 0-23 |
|
||||
| day of the month | 1-31 or 3-letter names (`jan`, `feb`, …) |
|
||||
| month | 0-11 |
|
||||
| day of week | 0-7 (0 and 7 both mean Sunday) or 3-letter names (`mon`, `tue`, …) |
|
||||
|
||||
> Note: the month range is 0-11 to be compatible with
|
||||
> [cron](https://github.com/kelektiv/node-cron), it does not appear to
|
||||
> be very standard though.
|
||||
|
||||
### API
|
||||
|
||||
`createSchedule(pattern: string, zone: string = 'utc'): Schedule`
|
||||
|
||||
> Create a new schedule.
|
||||
|
||||
- `pattern`: the pattern to use, see [the syntax](#pattern-syntax)
|
||||
- `zone`: the timezone to use, use `'local'` for the local timezone
|
||||
|
||||
```js
|
||||
import { createSchedule } from '@xen-orchestra/cron'
|
||||
|
||||
const schedule = createSchedule('0 0 * * sun', 'America/New_York')
|
||||
```
|
||||
|
||||
`Schedule#createJob(fn: Function): Job`
|
||||
|
||||
> Create a new job from this schedule.
|
||||
|
||||
- `fn`: function to execute, if it returns a promise, it will be
|
||||
awaited before scheduling the next run.
|
||||
|
||||
```js
|
||||
const job = schedule.createJob(() => {
|
||||
console.log(new Date())
|
||||
})
|
||||
```
|
||||
|
||||
`Schedule#next(n: number): Array<Date>`
|
||||
|
||||
> Returns the next dates matching this schedule.
|
||||
|
||||
- `n`: number of dates to return
|
||||
|
||||
```js
|
||||
schedule.next(2)
|
||||
// [ 2018-02-11T05:00:00.000Z, 2018-02-18T05:00:00.000Z ]
|
||||
```
|
||||
|
||||
`Schedule#startJob(fn: Function): () => void`
|
||||
|
||||
> Start a new job from this schedule and return a function to stop it.
|
||||
|
||||
- `fn`: function to execute, if it returns a promise, it will be
|
||||
awaited before scheduling the next run.
|
||||
|
||||
```js
|
||||
const stopJob = schedule.startJob(() => {
|
||||
console.log(new Date())
|
||||
})
|
||||
stopJob()
|
||||
```
|
||||
|
||||
`Job#start(): void`
|
||||
|
||||
> Start this job.
|
||||
|
||||
```js
|
||||
job.start()
|
||||
```
|
||||
|
||||
`Job#stop(): void`
|
||||
|
||||
> Stop this job.
|
||||
|
||||
```js
|
||||
job.stop()
|
||||
```
|
||||
@@ -23,8 +23,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@isonoe.net"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -47,7 +47,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# @xen-orchestra/defined [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
> ${pkg.description}
|
||||
# @xen-orchestra/defined
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/defined)  [](https://bundlephobia.com/result?p=@xen-orchestra/defined) [](https://npmjs.org/package/@xen-orchestra/defined)
|
||||
|
||||
## Install
|
||||
|
||||
@@ -10,29 +12,6 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/defin
|
||||
> npm install --save @xen-orchestra/defined
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
@@ -46,4 +25,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
0
@xen-orchestra/defined/USAGE.md
Normal file
0
@xen-orchestra/defined/USAGE.md
Normal file
@@ -13,8 +13,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -35,7 +35,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# @xen-orchestra/emit-async [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
> ${pkg.description}
|
||||
# @xen-orchestra/emit-async
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/emit-async)  [](https://bundlephobia.com/result?p=@xen-orchestra/emit-async) [](https://npmjs.org/package/@xen-orchestra/emit-async)
|
||||
|
||||
## Install
|
||||
|
||||
@@ -19,7 +21,7 @@ import emitAsync from '@xen-orchestra/emit-async'
|
||||
const ee = new EE()
|
||||
ee.emitAsync = emitAsync
|
||||
|
||||
ee.on('start', async function() {
|
||||
ee.on('start', async function () {
|
||||
// whatever
|
||||
})
|
||||
|
||||
@@ -39,25 +41,6 @@ await ee.emitAsync(
|
||||
)
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
@@ -71,4 +54,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
26
@xen-orchestra/emit-async/USAGE.md
Normal file
26
@xen-orchestra/emit-async/USAGE.md
Normal file
@@ -0,0 +1,26 @@
|
||||
```js
|
||||
import EE from 'events'
|
||||
import emitAsync from '@xen-orchestra/emit-async'
|
||||
|
||||
const ee = new EE()
|
||||
ee.emitAsync = emitAsync
|
||||
|
||||
ee.on('start', async function () {
|
||||
// whatever
|
||||
})
|
||||
|
||||
// similar to EventEmmiter#emit() but returns a promise which resolves when all
|
||||
// listeners have resolved
|
||||
await ee.emitAsync('start')
|
||||
|
||||
// by default, it will rejects as soon as one listener reject, you can customise
|
||||
// error handling though:
|
||||
await ee.emitAsync(
|
||||
{
|
||||
onError(error) {
|
||||
console.warn(error)
|
||||
},
|
||||
},
|
||||
'start'
|
||||
)
|
||||
```
|
||||
@@ -13,8 +13,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -34,7 +34,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
30
@xen-orchestra/fs/README.md
Normal file
30
@xen-orchestra/fs/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/fs
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/fs)  [](https://bundlephobia.com/result?p=@xen-orchestra/fs) [](https://npmjs.org/package/@xen-orchestra/fs)
|
||||
|
||||
> The File System for Xen Orchestra backups.
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/fs):
|
||||
|
||||
```
|
||||
> npm install --global @xen-orchestra/fs
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)
|
||||
0
@xen-orchestra/fs/USAGE.md
Normal file
0
@xen-orchestra/fs/USAGE.md
Normal file
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/fs",
|
||||
"version": "0.10.3",
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.11.1",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"description": "The File System for Xen Orchestra backups.",
|
||||
"keywords": [],
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
|
||||
@@ -25,17 +25,18 @@
|
||||
"@marsaud/smb2": "^0.15.0",
|
||||
"@sindresorhus/df": "^3.1.1",
|
||||
"@xen-orchestra/async-map": "^0.0.0",
|
||||
"aws-sdk": "^2.686.0",
|
||||
"decorator-synchronized": "^0.5.0",
|
||||
"execa": "^3.2.0",
|
||||
"fs-extra": "^8.0.1",
|
||||
"get-stream": "^5.1.0",
|
||||
"execa": "^4.0.2",
|
||||
"fs-extra": "^9.0.0",
|
||||
"get-stream": "^6.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"lodash": "^4.17.4",
|
||||
"promise-toolbox": "^0.15.0",
|
||||
"readable-stream": "^3.0.6",
|
||||
"through2": "^3.0.0",
|
||||
"tmp": "^0.1.0",
|
||||
"xo-remote-parser": "^0.5.0"
|
||||
"through2": "^4.0.2",
|
||||
"tmp": "^0.2.1",
|
||||
"xo-remote-parser": "^0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.0.0",
|
||||
@@ -47,7 +48,7 @@
|
||||
"@babel/preset-flow": "^7.0.0",
|
||||
"async-iterator-to-stream": "^1.1.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"dotenv": "^8.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
@@ -58,7 +59,11 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run clean",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"postversion": "npm publish"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,10 +31,7 @@ export default class MountHandler extends LocalHandler {
|
||||
}
|
||||
this._realPath = join(
|
||||
mountsDir,
|
||||
remote.id ||
|
||||
Math.random()
|
||||
.toString(36)
|
||||
.slice(2)
|
||||
remote.id || Math.random().toString(36).slice(2)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -75,9 +72,12 @@ export default class MountHandler extends LocalHandler {
|
||||
|
||||
try {
|
||||
const { type, device, options, env } = this._params
|
||||
|
||||
// Linux mount is more flexible in which order the mount arguments appear.
|
||||
// But FreeBSD requires this order of the arguments.
|
||||
await this._execa(
|
||||
'mount',
|
||||
['-t', type, device, realPath, '-o', options],
|
||||
['-o', options, '-t', type, device, realPath],
|
||||
{
|
||||
env: {
|
||||
LANG: 'C',
|
||||
|
||||
@@ -5,7 +5,7 @@ import getStream from 'get-stream'
|
||||
|
||||
import asyncMap from '@xen-orchestra/async-map'
|
||||
import limit from 'limit-concurrency-decorator'
|
||||
import path from 'path'
|
||||
import path, { basename } from 'path'
|
||||
import synchronized from 'decorator-synchronized'
|
||||
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
@@ -121,6 +121,7 @@ export default class RemoteHandlerAbstract {
|
||||
await this.__closeFile(fd)
|
||||
}
|
||||
|
||||
// TODO: remove method
|
||||
async createOutputStream(
|
||||
file: File,
|
||||
{ checksum = false, ...options }: Object = {}
|
||||
@@ -221,19 +222,15 @@ export default class RemoteHandlerAbstract {
|
||||
)
|
||||
}
|
||||
|
||||
createWriteStream(
|
||||
file: File,
|
||||
options: { end?: number, flags?: string, start?: number } = {}
|
||||
): Promise<LaxWritable> {
|
||||
return timeout.call(
|
||||
this._createWriteStream(
|
||||
typeof file === 'string' ? normalizePath(file) : file,
|
||||
{
|
||||
flags: 'wx',
|
||||
...options,
|
||||
}
|
||||
)
|
||||
)
|
||||
// write a stream to a file using a temporary file
|
||||
async outputStream(
|
||||
input: Readable | Promise<Readable>,
|
||||
path: string,
|
||||
{ checksum = true }: { checksum?: boolean } = {}
|
||||
): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
input = await input
|
||||
return this._outputStream(await input, normalizePath(path), { checksum })
|
||||
}
|
||||
|
||||
// Free the resources possibly dedicated to put the remote at work, when it
|
||||
@@ -321,18 +318,6 @@ export default class RemoteHandlerAbstract {
|
||||
return this._readFile(normalizePath(file), { flags })
|
||||
}
|
||||
|
||||
async refreshChecksum(path: string): Promise<void> {
|
||||
path = normalizePath(path)
|
||||
|
||||
const stream = (await this._createReadStream(path, { flags: 'r' })).pipe(
|
||||
createChecksumStream()
|
||||
)
|
||||
stream.resume() // start reading the whole file
|
||||
await this._outputFile(checksumFile(path), await stream.checksum, {
|
||||
flags: 'wx',
|
||||
})
|
||||
}
|
||||
|
||||
async rename(
|
||||
oldPath: string,
|
||||
newPath: string,
|
||||
@@ -548,6 +533,22 @@ export default class RemoteHandlerAbstract {
|
||||
return this._outputFile(file, data, options)
|
||||
}
|
||||
|
||||
async _outputStream(input, path, { checksum }) {
|
||||
const tmpPath = `${dirname(path)}/.${basename(path)}`
|
||||
const output = await this.createOutputStream(tmpPath, { checksum })
|
||||
try {
|
||||
input.pipe(output)
|
||||
await fromEvent(output, 'finish')
|
||||
await output.checksumWritten
|
||||
// $FlowFixMe
|
||||
await input.task
|
||||
await this.rename(tmpPath, path, { checksum })
|
||||
} catch (error) {
|
||||
await this.unlink(tmpPath, { checksum })
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
_read(
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
@@ -648,7 +649,7 @@ function createPrefixWrapperMethods() {
|
||||
return
|
||||
}
|
||||
|
||||
descriptor.value = function() {
|
||||
descriptor.value = function () {
|
||||
let path
|
||||
if (arguments.length !== 0 && typeof (path = arguments[0]) === 'string') {
|
||||
arguments[0] = this._resolve(path)
|
||||
|
||||
@@ -42,18 +42,6 @@ describe('createOutputStream()', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('createReadStream()', () => {
|
||||
it(`throws in case of timeout`, async () => {
|
||||
const testHandler = new TestHandler({
|
||||
createReadStream: () => new Promise(() => {}),
|
||||
})
|
||||
|
||||
const promise = testHandler.createReadStream('file')
|
||||
jest.advanceTimersByTime(TIMEOUT)
|
||||
await expect(promise).rejects.toThrowError(TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getInfo()', () => {
|
||||
it('throws in case of timeout', async () => {
|
||||
const testHandler = new TestHandler({
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import 'dotenv/config'
|
||||
import asyncIteratorToStream from 'async-iterator-to-stream'
|
||||
import getStream from 'get-stream'
|
||||
import { forOwn, random } from 'lodash'
|
||||
import { fromCallback } from 'promise-toolbox'
|
||||
import { pipeline } from 'readable-stream'
|
||||
@@ -28,7 +27,7 @@ const unsecureRandomBytes = n => {
|
||||
|
||||
const TEST_DATA_LEN = 1024
|
||||
const TEST_DATA = unsecureRandomBytes(TEST_DATA_LEN)
|
||||
const createTestDataStream = asyncIteratorToStream(function*() {
|
||||
const createTestDataStream = asyncIteratorToStream(function* () {
|
||||
yield TEST_DATA
|
||||
})
|
||||
|
||||
@@ -91,31 +90,6 @@ handlers.forEach(url => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createReadStream()', () => {
|
||||
beforeEach(() => handler.outputFile('file', TEST_DATA))
|
||||
|
||||
testWithFileDescriptor('file', 'r', async ({ file, flags }) => {
|
||||
await expect(
|
||||
await getStream.buffer(
|
||||
await handler.createReadStream(file, { flags })
|
||||
)
|
||||
).toEqual(TEST_DATA)
|
||||
})
|
||||
})
|
||||
|
||||
describe('#createWriteStream()', () => {
|
||||
testWithFileDescriptor('file', 'wx', async ({ file, flags }) => {
|
||||
const stream = await handler.createWriteStream(file, { flags })
|
||||
await fromCallback(pipeline, createTestDataStream(), stream)
|
||||
await expect(await handler.readFile('file')).toEqual(TEST_DATA)
|
||||
})
|
||||
|
||||
it('fails if parent dir is missing', async () => {
|
||||
const error = await rejectionOf(handler.createWriteStream('dir/file'))
|
||||
expect(error.code).toBe('ENOENT')
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getInfo()', () => {
|
||||
let info
|
||||
beforeAll(async () => {
|
||||
|
||||
@@ -4,6 +4,7 @@ import execa from 'execa'
|
||||
import type RemoteHandler from './abstract'
|
||||
import RemoteHandlerLocal from './local'
|
||||
import RemoteHandlerNfs from './nfs'
|
||||
import RemoteHandlerS3 from './s3'
|
||||
import RemoteHandlerSmb from './smb'
|
||||
import RemoteHandlerSmbMount from './smb-mount'
|
||||
|
||||
@@ -13,6 +14,7 @@ export type Remote = { url: string }
|
||||
const HANDLERS = {
|
||||
file: RemoteHandlerLocal,
|
||||
nfs: RemoteHandlerNfs,
|
||||
s3: RemoteHandlerS3,
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
284
@xen-orchestra/fs/src/s3.js
Normal file
284
@xen-orchestra/fs/src/s3.js
Normal file
@@ -0,0 +1,284 @@
|
||||
import AWS from 'aws-sdk'
|
||||
import { parse } from 'xo-remote-parser'
|
||||
|
||||
import RemoteHandlerAbstract from './abstract'
|
||||
import { createChecksumStream } from './checksum'
|
||||
|
||||
// endpoints https://docs.aws.amazon.com/general/latest/gr/s3.html
|
||||
|
||||
// limits: https://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html
|
||||
const MIN_PART_SIZE = 1024 * 1024 * 5 // 5MB
|
||||
const MAX_PART_SIZE = 1024 * 1024 * 1024 * 5 // 5GB
|
||||
const MAX_PARTS_COUNT = 10000
|
||||
const MAX_OBJECT_SIZE = 1024 * 1024 * 1024 * 1024 * 5 // 5TB
|
||||
const IDEAL_FRAGMENT_SIZE = Math.ceil(MAX_OBJECT_SIZE / MAX_PARTS_COUNT) // the smallest fragment size that still allows a 5TB upload in 10000 fragments, about 524MB
|
||||
export default class S3Handler extends RemoteHandlerAbstract {
|
||||
constructor(remote, _opts) {
|
||||
super(remote)
|
||||
const { host, path, username, password } = parse(remote.url)
|
||||
|
||||
// https://www.zenko.io/blog/first-things-first-getting-started-scality-s3-server/
|
||||
this._s3 = new AWS.S3({
|
||||
accessKeyId: username,
|
||||
apiVersion: '2006-03-01',
|
||||
endpoint: host,
|
||||
s3ForcePathStyle: true,
|
||||
secretAccessKey: password,
|
||||
signatureVersion: 'v4',
|
||||
})
|
||||
const splitPath = path.split('/').filter(s => s.length)
|
||||
this._bucket = splitPath.shift()
|
||||
this._dir = splitPath.join('/')
|
||||
}
|
||||
|
||||
get type() {
|
||||
return 's3'
|
||||
}
|
||||
|
||||
_createParams(file) {
|
||||
return { Bucket: this._bucket, Key: this._dir + file }
|
||||
}
|
||||
|
||||
async _outputStream(input, path, { checksum }) {
|
||||
let inputStream = input
|
||||
if (checksum) {
|
||||
const checksumStream = createChecksumStream()
|
||||
const forwardError = error => {
|
||||
checksumStream.emit('error', error)
|
||||
}
|
||||
input.pipe(checksumStream)
|
||||
input.on('error', forwardError)
|
||||
inputStream = checksumStream
|
||||
}
|
||||
const upload = this._s3.upload(
|
||||
{
|
||||
...this._createParams(path),
|
||||
Body: inputStream,
|
||||
},
|
||||
{ partSize: IDEAL_FRAGMENT_SIZE }
|
||||
)
|
||||
await upload.promise()
|
||||
if (checksum) {
|
||||
const checksum = await inputStream.checksum
|
||||
const params = {
|
||||
...this._createParams(path + '.checksum'),
|
||||
Body: checksum,
|
||||
}
|
||||
await this._s3.upload(params).promise()
|
||||
}
|
||||
await input.task
|
||||
}
|
||||
|
||||
async _writeFile(file, data, options) {
|
||||
return this._s3
|
||||
.putObject({ ...this._createParams(file), Body: data })
|
||||
.promise()
|
||||
}
|
||||
|
||||
async _createReadStream(file, options) {
|
||||
return this._s3.getObject(this._createParams(file)).createReadStream()
|
||||
}
|
||||
|
||||
async _unlink(file) {
|
||||
return this._s3.deleteObject(this._createParams(file)).promise()
|
||||
}
|
||||
|
||||
async _list(dir) {
|
||||
function splitPath(path) {
|
||||
return path.split('/').filter(d => d.length)
|
||||
}
|
||||
|
||||
const prefix = [this._dir, dir].join('/')
|
||||
const splitPrefix = splitPath(prefix)
|
||||
const request = this._s3.listObjectsV2({
|
||||
Bucket: this._bucket,
|
||||
Prefix: splitPrefix.join('/'),
|
||||
})
|
||||
const result = await request.promise()
|
||||
const uniq = new Set()
|
||||
for (const entry of result.Contents) {
|
||||
const line = splitPath(entry.Key)
|
||||
if (line.length > splitPrefix.length) {
|
||||
uniq.add(line[splitPrefix.length])
|
||||
}
|
||||
}
|
||||
return [...uniq]
|
||||
}
|
||||
|
||||
async _rename(oldPath, newPath) {
|
||||
const params = {
|
||||
...this._createParams(newPath),
|
||||
CopySource: `/${this._bucket}/${this._dir}${oldPath}`,
|
||||
}
|
||||
await this._s3.copyObject(params).promise()
|
||||
await this._s3.deleteObject(this._createParams(oldPath)).promise()
|
||||
}
|
||||
|
||||
async _getSize(file) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const result = await this._s3.headObject(this._createParams(file)).promise()
|
||||
return +result.ContentLength
|
||||
}
|
||||
|
||||
async _read(file, buffer, position = 0) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const params = this._createParams(file)
|
||||
params.Range = `bytes=${position}-${position + buffer.length - 1}`
|
||||
const result = await this._s3.getObject(params).promise()
|
||||
result.Body.copy(buffer)
|
||||
return { bytesRead: result.Body.length, buffer }
|
||||
}
|
||||
|
||||
async _write(file, buffer, position) {
|
||||
if (typeof file !== 'string') {
|
||||
file = file.fd
|
||||
}
|
||||
const uploadParams = this._createParams(file)
|
||||
const fileSize = +(await this._s3.headObject(uploadParams).promise())
|
||||
.ContentLength
|
||||
if (fileSize < MIN_PART_SIZE) {
|
||||
const resultBuffer = Buffer.alloc(
|
||||
Math.max(fileSize, position + buffer.length)
|
||||
)
|
||||
const fileContent = (await this._s3.getObject(uploadParams).promise())
|
||||
.Body
|
||||
fileContent.copy(resultBuffer)
|
||||
buffer.copy(resultBuffer, position)
|
||||
await this._s3
|
||||
.putObject({ ...uploadParams, Body: resultBuffer })
|
||||
.promise()
|
||||
return { buffer, bytesWritten: buffer.length }
|
||||
} else {
|
||||
// using this trick: https://stackoverflow.com/a/38089437/72637
|
||||
// multipart fragments have a minimum size of 5Mo and a max of 5Go unless they are last
|
||||
// splitting the file in 3 parts: [prefix, edit, suffix]
|
||||
// if `prefix` is bigger than 5Mo, it will be sourced from uploadPartCopy()
|
||||
// otherwise otherwise it will be downloaded, concatenated to `edit`
|
||||
// `edit` will always be an upload part
|
||||
// `suffix` will ways be sourced from uploadPartCopy()
|
||||
const multipartParams = await this._s3
|
||||
.createMultipartUpload(uploadParams)
|
||||
.promise()
|
||||
try {
|
||||
const parts = []
|
||||
const prefixSize = position
|
||||
let suffixOffset = prefixSize + buffer.length
|
||||
let suffixSize = Math.max(0, fileSize - suffixOffset)
|
||||
let hasSuffix = suffixSize > 0
|
||||
let editBuffer = buffer
|
||||
let editBufferOffset = position
|
||||
let partNumber = 1
|
||||
if (prefixSize < MIN_PART_SIZE) {
|
||||
const downloadParams = {
|
||||
...uploadParams,
|
||||
Range: `bytes=0-${prefixSize - 1}`,
|
||||
}
|
||||
const prefixBuffer =
|
||||
prefixSize > 0
|
||||
? (await this._s3.getObject(downloadParams).promise()).Body
|
||||
: Buffer.alloc(0)
|
||||
editBuffer = Buffer.concat([prefixBuffer, buffer])
|
||||
editBufferOffset = 0
|
||||
} else {
|
||||
const fragmentsCount = Math.ceil(prefixSize / MAX_PART_SIZE)
|
||||
const prefixFragmentSize = Math.ceil(prefixSize / fragmentsCount)
|
||||
const lastFragmentSize =
|
||||
prefixFragmentSize * fragmentsCount - prefixSize
|
||||
let prefixPosition = 0
|
||||
for (let i = 0; i < fragmentsCount; i++) {
|
||||
const copyPrefixParams = {
|
||||
...multipartParams,
|
||||
PartNumber: partNumber++,
|
||||
CopySource: `/${this._bucket}/${this._dir + file}`,
|
||||
CopySourceRange: `bytes=${prefixPosition}-${
|
||||
prefixPosition + prefixFragmentSize - 1
|
||||
}`,
|
||||
}
|
||||
const prefixPart = (
|
||||
await this._s3.uploadPartCopy(copyPrefixParams).promise()
|
||||
).CopyPartResult
|
||||
parts.push({
|
||||
ETag: prefixPart.ETag,
|
||||
PartNumber: copyPrefixParams.PartNumber,
|
||||
})
|
||||
prefixPosition += prefixFragmentSize
|
||||
}
|
||||
if (lastFragmentSize) {
|
||||
}
|
||||
}
|
||||
if (hasSuffix && editBuffer.length < MIN_PART_SIZE) {
|
||||
// the edit fragment is too short and is not the last fragment
|
||||
// let's steal from the suffix fragment to reach the minimum size
|
||||
// the suffix might be too short and itself entirely absorbed in the edit fragment, making it the last one.
|
||||
const complementSize = Math.min(
|
||||
MIN_PART_SIZE - editBuffer.length,
|
||||
suffixSize
|
||||
)
|
||||
const complementOffset = editBufferOffset + editBuffer.length
|
||||
suffixOffset += complementSize
|
||||
suffixSize -= complementSize
|
||||
hasSuffix = suffixSize > 0
|
||||
const prefixRange = `bytes=${complementOffset}-${
|
||||
complementOffset + complementSize - 1
|
||||
}`
|
||||
const downloadParams = { ...uploadParams, Range: prefixRange }
|
||||
const complementBuffer = (
|
||||
await this._s3.getObject(downloadParams).promise()
|
||||
).Body
|
||||
editBuffer = Buffer.concat([editBuffer, complementBuffer])
|
||||
}
|
||||
const editParams = {
|
||||
...multipartParams,
|
||||
Body: editBuffer,
|
||||
PartNumber: partNumber++,
|
||||
}
|
||||
const editPart = await this._s3.uploadPart(editParams).promise()
|
||||
parts.push({ ETag: editPart.ETag, PartNumber: editParams.PartNumber })
|
||||
if (hasSuffix) {
|
||||
const suffixFragments = Math.ceil(suffixSize / MAX_PART_SIZE)
|
||||
const suffixFragmentsSize = Math.ceil(suffixSize / suffixFragments)
|
||||
let suffixFragmentOffset = suffixOffset
|
||||
for (let i = 0; i < suffixFragments; i++) {
|
||||
const fragmentEnd = suffixFragmentOffset + suffixFragmentsSize
|
||||
const suffixRange = `bytes=${suffixFragmentOffset}-${
|
||||
Math.min(fileSize, fragmentEnd) - 1
|
||||
}`
|
||||
const copySuffixParams = {
|
||||
...multipartParams,
|
||||
PartNumber: partNumber++,
|
||||
CopySource: `/${this._bucket}/${this._dir + file}`,
|
||||
CopySourceRange: suffixRange,
|
||||
}
|
||||
const suffixPart = (
|
||||
await this._s3.uploadPartCopy(copySuffixParams).promise()
|
||||
).CopyPartResult
|
||||
parts.push({
|
||||
ETag: suffixPart.ETag,
|
||||
PartNumber: copySuffixParams.PartNumber,
|
||||
})
|
||||
suffixFragmentOffset = fragmentEnd
|
||||
}
|
||||
}
|
||||
await this._s3
|
||||
.completeMultipartUpload({
|
||||
...multipartParams,
|
||||
MultipartUpload: { Parts: parts },
|
||||
})
|
||||
.promise()
|
||||
} catch (e) {
|
||||
await this._s3.abortMultipartUpload(multipartParams).promise()
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _openFile(path, flags) {
|
||||
return path
|
||||
}
|
||||
|
||||
async _closeFile(fd) {}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
# @xen-orchestra/log [](https://travis-ci.org/vatesfr/xen-orchestra)
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
> ${pkg.description}
|
||||
# @xen-orchestra/log
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/log)  [](https://bundlephobia.com/result?p=@xen-orchestra/log) [](https://npmjs.org/package/@xen-orchestra/log)
|
||||
|
||||
## Install
|
||||
|
||||
@@ -135,25 +137,6 @@ configure(transportSyslog())
|
||||
configure(transportSyslog('tcp://syslog.company.lan'))
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
@@ -161,10 +144,10 @@ the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xo-web/issues/)
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
122
@xen-orchestra/log/USAGE.md
Normal file
122
@xen-orchestra/log/USAGE.md
Normal file
@@ -0,0 +1,122 @@
|
||||
Everywhere something should be logged:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
|
||||
const log = createLogger('my-module')
|
||||
|
||||
log.debug('only useful for debugging')
|
||||
log.info('this information is relevant to the user')
|
||||
log.warn('something went wrong but did not prevent current action')
|
||||
log.error('something went wrong')
|
||||
log.fatal('service/app is going down')
|
||||
|
||||
// you can add contextual info
|
||||
log.debug('new API request', {
|
||||
method: 'foo',
|
||||
params: [ 'bar', 'baz' ]
|
||||
user: 'qux'
|
||||
})
|
||||
|
||||
// by convention, errors go into the `error` field
|
||||
log.error('could not join server', {
|
||||
error,
|
||||
server: 'example.org',
|
||||
})
|
||||
```
|
||||
|
||||
Then, at application level, configure the logs are handled:
|
||||
|
||||
```js
|
||||
import { createLogger } from '@xen-orchestra/log'
|
||||
import { configure, catchGlobalErrors } from '@xen-orchestra/log/configure'
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
const transport = transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
})
|
||||
|
||||
configure([
|
||||
{
|
||||
// if filter is a string, then it is pattern
|
||||
// (https://github.com/visionmedia/debug#wildcards) which is
|
||||
// matched against the namespace of the logs
|
||||
filter: process.env.DEBUG,
|
||||
|
||||
transport: transportConsole(),
|
||||
},
|
||||
{
|
||||
// only levels >= warn
|
||||
level: 'warn',
|
||||
|
||||
transport,
|
||||
},
|
||||
])
|
||||
|
||||
// send all global errors (uncaught exceptions, warnings, unhandled rejections)
|
||||
// to this logger
|
||||
catchGlobalErrors(createLogger('app'))
|
||||
```
|
||||
|
||||
### Transports
|
||||
|
||||
#### Console
|
||||
|
||||
```js
|
||||
import transportConsole from '@xen-orchestra/log/transports/console'
|
||||
|
||||
configure(transportConsole())
|
||||
```
|
||||
|
||||
#### Email
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add nodemailer pretty-format
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportEmail from '@xen-orchestra/log/transports/email'
|
||||
|
||||
configure(
|
||||
transportEmail({
|
||||
service: 'gmail',
|
||||
auth: {
|
||||
user: 'jane.smith@gmail.com',
|
||||
pass: 'H&NbECcpXF|pyXe#%ZEb',
|
||||
},
|
||||
from: 'jane.smith@gmail.com',
|
||||
to: ['jane.smith@gmail.com', 'sam.doe@yahoo.com'],
|
||||
})
|
||||
)
|
||||
```
|
||||
|
||||
#### Syslog
|
||||
|
||||
Optional dependency:
|
||||
|
||||
```
|
||||
> yarn add split-host syslog-client
|
||||
```
|
||||
|
||||
Configuration:
|
||||
|
||||
```js
|
||||
import transportSyslog from '@xen-orchestra/log/transports/syslog'
|
||||
|
||||
// By default, log to udp://localhost:514
|
||||
configure(transportSyslog())
|
||||
|
||||
// But TCP, a different host, or a different port can be used
|
||||
configure(transportSyslog('tcp://syslog.company.lan'))
|
||||
```
|
||||
@@ -13,8 +13,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -39,7 +39,7 @@
|
||||
"@babel/core": "^7.0.0",
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
@@ -49,7 +49,7 @@
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"prebuild": "yarn run clean",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepare": "yarn run build",
|
||||
"prepublishOnly": "yarn run build",
|
||||
"postversion": "npm publish"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ const createTransport = config => {
|
||||
if (Array.isArray(config)) {
|
||||
const transports = config.map(createTransport)
|
||||
const { length } = transports
|
||||
return function() {
|
||||
return function () {
|
||||
for (let i = 0; i < length; ++i) {
|
||||
transports[i].apply(this, arguments)
|
||||
}
|
||||
@@ -30,14 +30,14 @@ const createTransport = config => {
|
||||
}
|
||||
|
||||
const orig = transport
|
||||
transport = function(log) {
|
||||
transport = function (log) {
|
||||
if ((level !== undefined && log.level >= level) || filter(log)) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
} else if (level !== undefined) {
|
||||
const orig = transport
|
||||
transport = function(log) {
|
||||
transport = function (log) {
|
||||
if (log.level >= level) {
|
||||
return orig.apply(this, arguments)
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ const { prototype } = Logger
|
||||
for (const name in LEVELS) {
|
||||
const level = LEVELS[name]
|
||||
|
||||
prototype[name.toLowerCase()] = function(message, data) {
|
||||
prototype[name.toLowerCase()] = function (message, data) {
|
||||
if (typeof message !== 'string') {
|
||||
if (message instanceof Error) {
|
||||
data = { error: message }
|
||||
@@ -54,13 +54,13 @@ for (const name in LEVELS) {
|
||||
}
|
||||
}
|
||||
|
||||
prototype.wrap = function(message, fn) {
|
||||
prototype.wrap = function (message, fn) {
|
||||
const logger = this
|
||||
const warnAndRethrow = error => {
|
||||
logger.warn(message, { error })
|
||||
throw error
|
||||
}
|
||||
return function() {
|
||||
return function () {
|
||||
try {
|
||||
const result = fn.apply(this, arguments)
|
||||
const then = result != null && result.then
|
||||
|
||||
@@ -13,10 +13,7 @@ export const evalTemplate = (tpl, data) => {
|
||||
// -------------------------------------------------------------------
|
||||
|
||||
const compileGlobPatternFragment = pattern =>
|
||||
pattern
|
||||
.split('*')
|
||||
.map(escapeRegExp)
|
||||
.join('.*')
|
||||
pattern.split('*').map(escapeRegExp).join('.*')
|
||||
|
||||
export const compileGlobPattern = pattern => {
|
||||
const no = []
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# @xen-orchestra/mixin [](https://travis-ci.org/${pkg.shortGitHubPath})
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
> ${pkg.description}
|
||||
# @xen-orchestra/mixin
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/mixin)  [](https://bundlephobia.com/result?p=@xen-orchestra/mixin) [](https://npmjs.org/package/@xen-orchestra/mixin)
|
||||
|
||||
## Install
|
||||
|
||||
@@ -10,29 +12,6 @@ Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/mixin
|
||||
> npm install --save @xen-orchestra/mixin
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
**TODO**
|
||||
|
||||
## Development
|
||||
|
||||
```
|
||||
# Install dependencies
|
||||
> yarn
|
||||
|
||||
# Run the tests
|
||||
> yarn test
|
||||
|
||||
# Continuously compile
|
||||
> yarn dev
|
||||
|
||||
# Continuously run the tests
|
||||
> yarn dev-test
|
||||
|
||||
# Build for production (automatically called by npm install)
|
||||
> yarn build
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
@@ -46,4 +25,4 @@ You may:
|
||||
|
||||
## License
|
||||
|
||||
ISC © [Vates SAS](https://vates.fr)
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
|
||||
0
@xen-orchestra/mixin/USAGE.md
Normal file
0
@xen-orchestra/mixin/USAGE.md
Normal file
@@ -13,8 +13,8 @@
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "Julien Fontanet",
|
||||
"email": "julien.fontanet@vates.fr"
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"preferGlobal": false,
|
||||
"main": "dist/",
|
||||
@@ -37,7 +37,7 @@
|
||||
"@babel/preset-env": "^7.0.0",
|
||||
"babel-plugin-dev": "^1.0.0",
|
||||
"babel-plugin-lodash": "^3.3.2",
|
||||
"cross-env": "^6.0.3",
|
||||
"cross-env": "^7.0.2",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
3
@xen-orchestra/openflow/.babelrc.js
Normal file
3
@xen-orchestra/openflow/.babelrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = require('../../@xen-orchestra/babel-config')(
|
||||
require('./package.json')
|
||||
)
|
||||
24
@xen-orchestra/openflow/.npmignore
Normal file
24
@xen-orchestra/openflow/.npmignore
Normal file
@@ -0,0 +1,24 @@
|
||||
/benchmark/
|
||||
/benchmarks/
|
||||
*.bench.js
|
||||
*.bench.js.map
|
||||
|
||||
/examples/
|
||||
example.js
|
||||
example.js.map
|
||||
*.example.js
|
||||
*.example.js.map
|
||||
|
||||
/fixture/
|
||||
/fixtures/
|
||||
*.fixture.js
|
||||
*.fixture.js.map
|
||||
*.fixtures.js
|
||||
*.fixtures.js.map
|
||||
|
||||
/test/
|
||||
/tests/
|
||||
*.spec.js
|
||||
*.spec.js.map
|
||||
|
||||
__snapshots__/
|
||||
141
@xen-orchestra/openflow/README.md
Normal file
141
@xen-orchestra/openflow/README.md
Normal file
@@ -0,0 +1,141 @@
|
||||
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
|
||||
|
||||
# @xen-orchestra/openflow
|
||||
|
||||
[](https://npmjs.org/package/@xen-orchestra/openflow)  [](https://bundlephobia.com/result?p=@xen-orchestra/openflow) [](https://npmjs.org/package/@xen-orchestra/openflow)
|
||||
|
||||
> Pack and unpack OpenFlow messages
|
||||
|
||||
## Install
|
||||
|
||||
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/openflow):
|
||||
|
||||
```
|
||||
> npm install --save @xen-orchestra/openflow
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Unpacking a received OpenFlow message from a socket:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
import parse from '@xen-orchestra/openflow/parse-socket'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
function parseOpenFlowMessages(socket) {
|
||||
for await (const msg of parse(socket)) {
|
||||
if (msg.header !== undefined) {
|
||||
const ofType = msg.header.type
|
||||
switch (ofType) {
|
||||
case ofProtocol.type.hello:
|
||||
// Handle OFPT_HELLO
|
||||
break
|
||||
case ofProtocol.type.error:
|
||||
// Handle OFPT_ERROR
|
||||
break
|
||||
case ofProtocol.type.echoRequest:
|
||||
// Handle OFPT_ECHO_REQUEST
|
||||
break
|
||||
case ofProtocol.type.packetIn:
|
||||
// Handle OFPT_PACKET_IN
|
||||
break
|
||||
case ofProtocol.type.featuresReply:
|
||||
// Handle OFPT_FEATURES_REPLY
|
||||
break
|
||||
case ofProtocol.type.getConfigReply:
|
||||
// Handle OFPT_GET_CONFIG_REPLY
|
||||
break
|
||||
case ofProtocol.type.portStatus:
|
||||
// Handle OFPT_PORT_STATUS
|
||||
break
|
||||
case ofProtocol.type.flowRemoved:
|
||||
// Handle OFPT_FLOW_REMOVED
|
||||
break
|
||||
default:
|
||||
// Error: Invalid type
|
||||
break
|
||||
}
|
||||
} else {
|
||||
// Error: Message is unparseable
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Unpacking a OpenFlow message from a buffer:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
function processOpenFlowMessage(buf) {
|
||||
const unpacked = openflow.unpack(buf)
|
||||
const ofType = unpacked.header.type
|
||||
switch (ofType) {
|
||||
case ofProtocol.type.hello:
|
||||
// Handle OFPT_HELLO
|
||||
break
|
||||
case ofProtocol.type.error:
|
||||
// Handle OFPT_ERROR
|
||||
break
|
||||
case ofProtocol.type.echoRequest:
|
||||
// Handle OFPT_ECHO_REQUEST
|
||||
break
|
||||
case ofProtocol.type.packetIn:
|
||||
// Handle OFPT_PACKET_IN
|
||||
break
|
||||
case ofProtocol.type.featuresReply:
|
||||
// Handle OFPT_FEATURES_REPLY
|
||||
break
|
||||
case ofProtocol.type.getConfigReply:
|
||||
// Handle OFPT_GET_CONFIG_REPLY
|
||||
break
|
||||
case ofProtocol.type.portStatus:
|
||||
// Handle OFPT_PORT_STATUS
|
||||
break
|
||||
case ofProtocol.type.flowRemoved:
|
||||
// Handle OFPT_FLOW_REMOVED
|
||||
break
|
||||
default:
|
||||
// Error: Invalid type
|
||||
break
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Packing an OpenFlow OFPT_HELLO message:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
const buf = openflow.pack({
|
||||
header: {
|
||||
version,
|
||||
type: ofProtocol.type.hello,
|
||||
xid: 1,
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Contributions are _very_ welcomed, either on the documentation or on
|
||||
the code.
|
||||
|
||||
You may:
|
||||
|
||||
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
|
||||
you've encountered;
|
||||
- fork and create a pull request.
|
||||
|
||||
## License
|
||||
|
||||
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)
|
||||
108
@xen-orchestra/openflow/USAGE.md
Normal file
108
@xen-orchestra/openflow/USAGE.md
Normal file
@@ -0,0 +1,108 @@
|
||||
Unpacking a received OpenFlow message from a socket:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
import parse from '@xen-orchestra/openflow/parse-socket'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
function parseOpenFlowMessages(socket) {
|
||||
for await (const msg of parse(socket)) {
|
||||
if (msg.header !== undefined) {
|
||||
const ofType = msg.header.type
|
||||
switch (ofType) {
|
||||
case ofProtocol.type.hello:
|
||||
// Handle OFPT_HELLO
|
||||
break
|
||||
case ofProtocol.type.error:
|
||||
// Handle OFPT_ERROR
|
||||
break
|
||||
case ofProtocol.type.echoRequest:
|
||||
// Handle OFPT_ECHO_REQUEST
|
||||
break
|
||||
case ofProtocol.type.packetIn:
|
||||
// Handle OFPT_PACKET_IN
|
||||
break
|
||||
case ofProtocol.type.featuresReply:
|
||||
// Handle OFPT_FEATURES_REPLY
|
||||
break
|
||||
case ofProtocol.type.getConfigReply:
|
||||
// Handle OFPT_GET_CONFIG_REPLY
|
||||
break
|
||||
case ofProtocol.type.portStatus:
|
||||
// Handle OFPT_PORT_STATUS
|
||||
break
|
||||
case ofProtocol.type.flowRemoved:
|
||||
// Handle OFPT_FLOW_REMOVED
|
||||
break
|
||||
default:
|
||||
// Error: Invalid type
|
||||
break
|
||||
}
|
||||
} else {
|
||||
// Error: Message is unparseable
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Unpacking a OpenFlow message from a buffer:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
function processOpenFlowMessage(buf) {
|
||||
const unpacked = openflow.unpack(buf)
|
||||
const ofType = unpacked.header.type
|
||||
switch (ofType) {
|
||||
case ofProtocol.type.hello:
|
||||
// Handle OFPT_HELLO
|
||||
break
|
||||
case ofProtocol.type.error:
|
||||
// Handle OFPT_ERROR
|
||||
break
|
||||
case ofProtocol.type.echoRequest:
|
||||
// Handle OFPT_ECHO_REQUEST
|
||||
break
|
||||
case ofProtocol.type.packetIn:
|
||||
// Handle OFPT_PACKET_IN
|
||||
break
|
||||
case ofProtocol.type.featuresReply:
|
||||
// Handle OFPT_FEATURES_REPLY
|
||||
break
|
||||
case ofProtocol.type.getConfigReply:
|
||||
// Handle OFPT_GET_CONFIG_REPLY
|
||||
break
|
||||
case ofProtocol.type.portStatus:
|
||||
// Handle OFPT_PORT_STATUS
|
||||
break
|
||||
case ofProtocol.type.flowRemoved:
|
||||
// Handle OFPT_FLOW_REMOVED
|
||||
break
|
||||
default:
|
||||
// Error: Invalid type
|
||||
break
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Packing an OpenFlow OFPT_HELLO message:
|
||||
|
||||
```js
|
||||
import openflow from '@xen-orchestra/openflow'
|
||||
|
||||
const version = openflow.versions.openFlow11
|
||||
const ofProtocol = openflow.protocols[version]
|
||||
|
||||
const buf = openflow.pack({
|
||||
header: {
|
||||
version,
|
||||
type: ofProtocol.type.hello,
|
||||
xid: 1,
|
||||
},
|
||||
})
|
||||
```
|
||||
40
@xen-orchestra/openflow/package.json
Normal file
40
@xen-orchestra/openflow/package.json
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"description": "Pack and unpack OpenFlow messages",
|
||||
"private": false,
|
||||
"name": "@xen-orchestra/openflow",
|
||||
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/openflow",
|
||||
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
|
||||
"repository": {
|
||||
"directory": "@xen-orchestra/openflow",
|
||||
"type": "git",
|
||||
"url": "https://github.com/vatesfr/xen-orchestra.git"
|
||||
},
|
||||
"version": "0.1.1",
|
||||
"engines": {
|
||||
"node": ">=8.10"
|
||||
},
|
||||
"main": "dist/",
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
|
||||
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
|
||||
"postversion": "npm publish --access public",
|
||||
"prebuild": "rimraf dist/",
|
||||
"predev": "yarn run prebuild",
|
||||
"prepublishOnly": "yarn run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.7.4",
|
||||
"@babel/core": "^7.7.4",
|
||||
"@babel/preset-env": "^7.7.4",
|
||||
"cross": "^1.0.0",
|
||||
"rimraf": "^3.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vates/read-chunk": "^0.1.0"
|
||||
},
|
||||
"author": {
|
||||
"name": "Vates SAS",
|
||||
"url": "https://vates.fr"
|
||||
},
|
||||
"license": "ISC"
|
||||
}
|
||||
1
@xen-orchestra/openflow/parse-socket.js
Normal file
1
@xen-orchestra/openflow/parse-socket.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./dist/parse-socket')
|
||||
9
@xen-orchestra/openflow/src/default-header-scheme.js
Normal file
9
@xen-orchestra/openflow/src/default-header-scheme.js
Normal file
@@ -0,0 +1,9 @@
|
||||
export default {
|
||||
size: 8,
|
||||
offsets: {
|
||||
version: 0,
|
||||
type: 1,
|
||||
length: 2,
|
||||
xid: 4,
|
||||
},
|
||||
}
|
||||
38
@xen-orchestra/openflow/src/index.js
Normal file
38
@xen-orchestra/openflow/src/index.js
Normal file
@@ -0,0 +1,38 @@
|
||||
import get from './util/get-from-map'
|
||||
import ofVersion from './version'
|
||||
// TODO: More openflow versions
|
||||
import of11 from './openflow-11/index'
|
||||
import scheme from './default-header-scheme'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OPENFLOW = {
|
||||
[ofVersion.openFlow11]: of11,
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
versions: ofVersion,
|
||||
protocols: { [ofVersion.openFlow11]: of11.protocol },
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pack: object => {
|
||||
const version = object.header.version
|
||||
return get(
|
||||
OPENFLOW,
|
||||
version,
|
||||
`Unsupported OpenFlow version: ${version}`
|
||||
).pack(object)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const version = buffer.readUInt8(offset + scheme.offsets.version)
|
||||
return get(
|
||||
OPENFLOW,
|
||||
version,
|
||||
`Unsupported OpenFlow version: ${version}`
|
||||
).unpack(buffer, offset)
|
||||
},
|
||||
}
|
||||
58
@xen-orchestra/openflow/src/openflow-11/action/action.js
Normal file
58
@xen-orchestra/openflow/src/openflow-11/action/action.js
Normal file
@@ -0,0 +1,58 @@
|
||||
import get from '../../util/get-from-map'
|
||||
|
||||
import ofOutput from './output'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const ACTION = {
|
||||
[of.actionType.output]: ofOutput,
|
||||
/* TODO:
|
||||
[of.actionType.group]: ,
|
||||
[of.actionType.setVlanId]: ,
|
||||
[of.actionType.setVlanPcp]: ,
|
||||
[of.actionType.setDlSrc]: ,
|
||||
[of.actionType.setDlDst]: ,
|
||||
[of.actionType.setNwSrc]: ,
|
||||
[of.actionType.setNwDst]: ,
|
||||
[of.actionType.setNwTos]: ,
|
||||
[of.actionType.setNwEcn]: ,
|
||||
[of.actionType.setTpSrc]: ,
|
||||
[of.actionType.setTpDst]: ,
|
||||
[of.actionType.copyTtlOut]: ,
|
||||
[of.actionType.copyTtlIn]: ,
|
||||
[of.actionType.setMplsLabel]: ,
|
||||
[of.actionType.setMplsTc]: ,
|
||||
[of.actionType.setMplsTtl]: ,
|
||||
[of.actionType.decMplsTtl]: ,
|
||||
[of.actionType.pushVlan]: ,
|
||||
[of.actionType.popVlan]: ,
|
||||
[of.actionType.pushMpls]: ,
|
||||
[of.actionType.popMpls]: ,
|
||||
[of.actionType.setQueue]: ,
|
||||
[of.actionType.setNwTtl]: ,
|
||||
[of.actionType.decNwTtl]: ,
|
||||
[of.actionType.experimenter]:
|
||||
*/
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
const { type } = object
|
||||
return get(ACTION, type, `Invalid action type: ${type}`).pack(
|
||||
object,
|
||||
buffer,
|
||||
offset
|
||||
)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const type = buffer.readUInt16BE(offset + of.offsets.actionHeader.type)
|
||||
return get(ACTION, type, `Invalid action type: ${type}`).unpack(
|
||||
buffer,
|
||||
offset
|
||||
)
|
||||
},
|
||||
}
|
||||
45
@xen-orchestra/openflow/src/openflow-11/action/output.js
Normal file
45
@xen-orchestra/openflow/src/openflow-11/action/output.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.actionOutput
|
||||
|
||||
const PAD_LENGTH = 6
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
assert(object.type === of.actionType.output)
|
||||
object.len = of.sizes.actionOutput
|
||||
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(object.len)
|
||||
|
||||
buffer.writeUInt16BE(object.type, offset + OFFSETS.type)
|
||||
buffer.writeUInt16BE(object.len, offset + OFFSETS.len)
|
||||
|
||||
buffer.writeUInt32BE(object.port, offset + OFFSETS.port)
|
||||
buffer.writeUInt16BE(object.max_len, offset + OFFSETS.maxLen)
|
||||
|
||||
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD_LENGTH)
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const object = {}
|
||||
|
||||
object.type = buffer.readUInt16BE(offset + OFFSETS.type)
|
||||
assert(object.type === of.actionType.output)
|
||||
|
||||
object.len = buffer.readUInt16BE(offset + OFFSETS.len)
|
||||
assert(object.len === of.sizes.actionOutput)
|
||||
|
||||
object.port = buffer.readUInt32BE(offset + OFFSETS.port)
|
||||
object.max_len = buffer.readUInt16BE(offset + OFFSETS.maxLen)
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
49
@xen-orchestra/openflow/src/openflow-11/index.js
Normal file
49
@xen-orchestra/openflow/src/openflow-11/index.js
Normal file
@@ -0,0 +1,49 @@
|
||||
import get from '../util/get-from-map'
|
||||
|
||||
import echo from './message/echo'
|
||||
import error from './message/error'
|
||||
import hello from './message/hello'
|
||||
import featuresRequest from './message/features-request'
|
||||
import featuresReply from './message/features-reply'
|
||||
import getConfigRequest from './message/get-config-request'
|
||||
import switchConfig from './message/switch-config'
|
||||
import flowMod from './message/flow-mod'
|
||||
import of from './openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const MESSAGE = {
|
||||
[of.type.hello]: hello,
|
||||
[of.type.error]: error,
|
||||
[of.type.featuresRequest]: featuresRequest,
|
||||
[of.type.featuresReply]: featuresReply,
|
||||
[of.type.echoRequest]: echo,
|
||||
[of.type.echoReply]: echo,
|
||||
[of.type.getConfigRequest]: getConfigRequest,
|
||||
[of.type.getConfigReply]: switchConfig,
|
||||
[of.type.setConfig]: switchConfig,
|
||||
[of.type.flowMod]: flowMod,
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
protocol: of,
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pack: object => {
|
||||
const type = object.header.type
|
||||
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).pack(
|
||||
object
|
||||
)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const type = buffer.readUInt8(offset + of.offsets.header.type)
|
||||
return get(MESSAGE, type, `Invalid OpenFlow message type: ${type}`).unpack(
|
||||
buffer,
|
||||
offset
|
||||
)
|
||||
},
|
||||
}
|
||||
102
@xen-orchestra/openflow/src/openflow-11/instruction/actions.js
Normal file
102
@xen-orchestra/openflow/src/openflow-11/instruction/actions.js
Normal file
@@ -0,0 +1,102 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import get from '../../util/get-from-map'
|
||||
|
||||
import ofAction from '../action/action'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const SIZES = {
|
||||
[of.actionType.output]: of.sizes.actionOutput,
|
||||
[of.actionType.group]: of.sizes.actionGroup,
|
||||
[of.actionType.setVlanId]: of.sizes.actionVlanId,
|
||||
[of.actionType.setVlanPcp]: of.sizes.actionVlanPcp,
|
||||
[of.actionType.setDlSrc]: of.sizes.actionDlAddr,
|
||||
[of.actionType.setDlDst]: of.sizes.actionDlAddr,
|
||||
[of.actionType.setNwSrc]: of.sizes.actionNwAddr,
|
||||
[of.actionType.setNwDst]: of.sizes.actionNwAddr,
|
||||
[of.actionType.setNwTos]: of.sizes.actionNwTos,
|
||||
[of.actionType.setNwEcn]: of.sizes.actionNwEcn,
|
||||
[of.actionType.setTpSrc]: of.sizes.actionTpPort,
|
||||
[of.actionType.setTpDst]: of.sizes.actionTpPort,
|
||||
[of.actionType.copyTtlOut]: of.sizes.actionHeader,
|
||||
[of.actionType.copyTtlIn]: of.sizes.actionHeader,
|
||||
[of.actionType.setMplsLabel]: of.sizes.actionMplsLabel,
|
||||
[of.actionType.setMplsTc]: of.sizes.actionMplsTc,
|
||||
[of.actionType.setMplsTtl]: of.sizes.actionMplsTtl,
|
||||
[of.actionType.decMplsTtl]: of.sizes.actionMplsTtl,
|
||||
[of.actionType.pushVlan]: of.sizes.actionPush,
|
||||
[of.actionType.popVlan]: of.sizes.actionHeader,
|
||||
[of.actionType.pushMpls]: of.sizes.actionPush,
|
||||
[of.actionType.popMpls]: of.sizes.actionPopMpls,
|
||||
[of.actionType.setQueue]: of.sizes.actionSetQueue,
|
||||
[of.actionType.setNwTtl]: of.sizes.actionNwTtl,
|
||||
[of.actionType.decNwTtl]: of.sizes.actionNwTtl,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const TYPES = [
|
||||
of.instructionType.clearActions,
|
||||
of.instructionType.writeActions,
|
||||
of.instructionType.applyActions,
|
||||
]
|
||||
const OFFSETS = of.offsets.instructionActions
|
||||
|
||||
const PAD_LENGTH = 4
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
const { type } = object
|
||||
assert(TYPES.includes(type))
|
||||
object.len = of.sizes.instructionActions
|
||||
const { actions = [] } = object
|
||||
actions.forEach(action => {
|
||||
assert(Object.values(of.actionType).includes(action.type))
|
||||
// TODO: manage experimenter
|
||||
object.len += get(
|
||||
SIZES,
|
||||
action.type,
|
||||
`Invalid action type: ${action.type}`
|
||||
)
|
||||
})
|
||||
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(object.len)
|
||||
|
||||
buffer.writeUInt16BE(type, offset + OFFSETS.type)
|
||||
buffer.writeUInt16BE(object.len, offset + OFFSETS.len)
|
||||
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD_LENGTH)
|
||||
|
||||
let actionOffset = offset + OFFSETS.actions
|
||||
actions.forEach(action => {
|
||||
ofAction.pack(action, buffer, actionOffset)
|
||||
actionOffset += SIZES[action.type]
|
||||
})
|
||||
},
|
||||
|
||||
unpack: (buffer = undefined, offset = 0) => {
|
||||
const type = buffer.readUInt16BE(offset + OFFSETS.type)
|
||||
assert(TYPES.includes(type))
|
||||
|
||||
const object = { type }
|
||||
object.len = buffer.readUInt16BE(offset + OFFSETS.len)
|
||||
|
||||
if (type === of.instructionType.clearActions) {
|
||||
// No actions for this type
|
||||
return object
|
||||
}
|
||||
|
||||
object.actions = []
|
||||
let actionOffset = offset + OFFSETS.actions
|
||||
while (actionOffset < object.len) {
|
||||
const action = ofAction.unpack(buffer, actionOffset)
|
||||
actionOffset += action.len
|
||||
object.actions.push(action)
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
import get from '../../util/get-from-map'
|
||||
|
||||
import actions from './actions'
|
||||
// import goToTable from './goToTable'
|
||||
import of from '../openflow-11'
|
||||
// import writeMetadata from './writeMetadata'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const INSTRUCTION = {
|
||||
/* TODO:
|
||||
[of.instructionType.goToTable]: goToTable,
|
||||
[of.instructionType.writeMetadata]: writeMetadata,
|
||||
*/
|
||||
[of.instructionType.writeActions]: actions,
|
||||
[of.instructionType.applyActions]: actions,
|
||||
[of.instructionType.clearActions]: actions,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const OFFSETS = of.offsets.instruction
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
const { type } = object
|
||||
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).pack(
|
||||
object,
|
||||
buffer,
|
||||
offset
|
||||
)
|
||||
},
|
||||
|
||||
unpack: (buffer = undefined, offset = 0) => {
|
||||
const type = buffer.readUInt16BE(offset + OFFSETS.type)
|
||||
return get(INSTRUCTION, type, `Invalid instruction type: ${type}`).unpack(
|
||||
buffer,
|
||||
offset
|
||||
)
|
||||
},
|
||||
}
|
||||
46
@xen-orchestra/openflow/src/openflow-11/message/echo.js
Normal file
46
@xen-orchestra/openflow/src/openflow-11/message/echo.js
Normal file
@@ -0,0 +1,46 @@
|
||||
import assert from 'assert'
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.echo
|
||||
const TYPES = [of.type.echoRequest, of.type.echoReply]
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const { header, data } = object
|
||||
assert(TYPES.includes(header.type))
|
||||
const dataSize = data !== undefined ? data.length : 0
|
||||
header.length = of.sizes.header + dataSize
|
||||
|
||||
const buffer = Buffer.alloc(header.length)
|
||||
ofHeader.pack(header, buffer, OFFSETS.header)
|
||||
if (dataSize > 0) {
|
||||
data.copy(buffer, OFFSETS.data, 0, dataSize)
|
||||
}
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(TYPES.includes(header.type))
|
||||
|
||||
const object = { header }
|
||||
const dataSize = header.length - of.sizes.header
|
||||
if (dataSize > 0) {
|
||||
object.data = Buffer.alloc(dataSize)
|
||||
buffer.copy(
|
||||
object.data,
|
||||
0,
|
||||
offset + OFFSETS.data,
|
||||
offset + OFFSETS.data + dataSize
|
||||
)
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
79
@xen-orchestra/openflow/src/openflow-11/message/error.js
Normal file
79
@xen-orchestra/openflow/src/openflow-11/message/error.js
Normal file
@@ -0,0 +1,79 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import get from '../../util/get-from-map'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const ERROR_CODE = {
|
||||
[of.errorType.helloFailed]: of.helloFailedCode,
|
||||
[of.errorType.badRequest]: of.badRequestCode,
|
||||
[of.errorType.badAction]: of.badActionCode,
|
||||
[of.errorType.badInstruction]: of.badInstructionCode,
|
||||
[of.errorType.badMatch]: of.badMatchCode,
|
||||
[of.errorType.flowModFailed]: of.flowModFailedCode,
|
||||
[of.errorType.groupModFailed]: of.groupModFailedCode,
|
||||
[of.errorType.portModFailed]: of.portModFailedCode,
|
||||
[of.errorType.tableModFailed]: of.tableModFailedCode,
|
||||
[of.errorType.queueOpFailed]: of.queueOpFailedCode,
|
||||
[of.errorType.switchConfigFailed]: of.switchConfigFailedCode,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const OFFSETS = of.offsets.errorMsg
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
const { header, type, code, data } = object
|
||||
assert(header.type === of.type.error)
|
||||
const errorCodes = get(ERROR_CODE, type, `Invalid error type: ${type}`)
|
||||
assert(Object.values(errorCodes).includes(code))
|
||||
|
||||
object.length = of.sizes.errorMsg
|
||||
if (data !== undefined) {
|
||||
object.length += data.length
|
||||
}
|
||||
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(object.length)
|
||||
|
||||
ofHeader.pack(header, buffer, offset + OFFSETS.header)
|
||||
buffer.writeUInt16BE(type, offset + OFFSETS.type)
|
||||
buffer.writeUInt16BE(code, offset + OFFSETS.code)
|
||||
|
||||
if (data !== undefined) {
|
||||
data.copy(buffer, offset + OFFSETS.data, 0, data.length)
|
||||
}
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(header.type === of.type.error)
|
||||
|
||||
const type = buffer.readUInt16BE(offset + OFFSETS.type)
|
||||
const errorCodes = get(ERROR_CODE, type, `Invalid error type: ${type}`)
|
||||
|
||||
const code = buffer.readUInt16BE(offset + OFFSETS.code)
|
||||
assert(Object.values(errorCodes).includes(code))
|
||||
|
||||
const object = { header, type, code }
|
||||
const dataSize = header.length - of.sizes.errorMsg
|
||||
if (dataSize > 0) {
|
||||
object.data = Buffer.alloc(dataSize)
|
||||
buffer.copy(
|
||||
object.data,
|
||||
0,
|
||||
offset + OFFSETS.data,
|
||||
offset + OFFSETS.data + dataSize
|
||||
)
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import ofHeader from './header'
|
||||
import ofPort from '../struct/port'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.switchFeatures
|
||||
const PAD_LENGTH = 3
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const {
|
||||
header,
|
||||
datapath_id: did,
|
||||
n_buffers: nBufs,
|
||||
n_tables: nTables,
|
||||
capabilities,
|
||||
reserved,
|
||||
ports,
|
||||
} = object
|
||||
assert(header.type === of.type.featuresReply)
|
||||
|
||||
header.length = of.sizes.switchFeatures + ports.length * of.sizes.port
|
||||
|
||||
const buffer = Buffer.alloc(header.length)
|
||||
ofHeader.pack(header, buffer, OFFSETS.header)
|
||||
|
||||
buffer.writeBigUInt64BE(did, OFFSETS.datapathId)
|
||||
buffer.writeUInt32BE(nBufs, OFFSETS.nBuffers)
|
||||
buffer.writeUInt8(nTables, OFFSETS.nTables)
|
||||
buffer.fill(0, OFFSETS.pad, OFFSETS.pad + PAD_LENGTH)
|
||||
buffer.writeUInt32BE(capabilities, OFFSETS.capabilities)
|
||||
buffer.writeUInt32BE(reserved, OFFSETS.reserved)
|
||||
|
||||
let portsOffset = 0
|
||||
ports.forEach(port => {
|
||||
ofPort.pack(port, buffer, OFFSETS.ports + portsOffset++ * of.sizes.port)
|
||||
})
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(header.type === of.type.featuresReply)
|
||||
|
||||
const object = { header }
|
||||
object.datapath_id = buffer.toString(
|
||||
'hex',
|
||||
offset + OFFSETS.datapathId,
|
||||
offset + OFFSETS.datapathId + 8
|
||||
)
|
||||
object.n_buffers = buffer.readUInt32BE(offset + OFFSETS.nBuffers)
|
||||
object.n_tables = buffer.readUInt8(offset + OFFSETS.nTables)
|
||||
|
||||
object.capabilities = buffer.readUInt32BE(offset + OFFSETS.capabilities)
|
||||
object.reserved = buffer.readUInt32BE(offset + OFFSETS.reserved)
|
||||
|
||||
object.ports = []
|
||||
const nPorts = (header.length - of.sizes.switchFeatures) / of.sizes.port
|
||||
for (let i = 0; i < nPorts; ++i) {
|
||||
object.ports.push(
|
||||
ofPort.unpack(buffer, offset + OFFSETS.ports + i * of.sizes.port)
|
||||
)
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const { header } = object
|
||||
assert(header.type === of.type.featuresRequest)
|
||||
header.length = of.sizes.featuresRequest
|
||||
|
||||
return ofHeader.pack(header)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset)
|
||||
assert(header.type === of.type.featuresRequest)
|
||||
assert(header.length === of.sizes.featuresRequest)
|
||||
|
||||
return { header }
|
||||
},
|
||||
}
|
||||
197
@xen-orchestra/openflow/src/openflow-11/message/flow-mod.js
Normal file
197
@xen-orchestra/openflow/src/openflow-11/message/flow-mod.js
Normal file
@@ -0,0 +1,197 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import get from '../../util/get-from-map'
|
||||
import ofInstruction from '../instruction/instruction'
|
||||
import uIntHelper from '../../util/uint-helper'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
import ofMatch from '../struct/match/match'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const INSTRUCTION_SIZE = {
|
||||
[of.instructionType.goToTable]: of.sizes.instructionWriteMetadata,
|
||||
[of.instructionType.writeMetadata]: of.sizes.instructionGotoTable,
|
||||
[of.instructionType.clearActions]: of.sizes.instructionActions,
|
||||
[of.instructionType.writeActions]: of.sizes.instructionActions,
|
||||
[of.instructionType.applyActions]: of.sizes.instructionActions,
|
||||
}
|
||||
|
||||
const ACTION_SIZE = {
|
||||
[of.actionType.output]: of.sizes.actionOutput,
|
||||
[of.actionType.group]: of.sizes.actionGroup,
|
||||
[of.actionType.setVlanId]: of.sizes.actionVlanId,
|
||||
[of.actionType.setVlanPcp]: of.sizes.actionVlanPcp,
|
||||
[of.actionType.setDlSrc]: of.sizes.actionDlAddr,
|
||||
[of.actionType.setDlDst]: of.sizes.actionDlAddr,
|
||||
[of.actionType.setNwSrc]: of.sizes.actionNwAddr,
|
||||
[of.actionType.setNwDst]: of.sizes.actionNwAddr,
|
||||
[of.actionType.setNwTos]: of.sizes.actionNwTos,
|
||||
[of.actionType.setNwEcn]: of.sizes.actionNwEcn,
|
||||
[of.actionType.setTpSrc]: of.sizes.actionTpPort,
|
||||
[of.actionType.setTpDst]: of.sizes.actionTpPort,
|
||||
[of.actionType.copyTtlOut]: of.sizes.actionHeader,
|
||||
[of.actionType.copyTtlIn]: of.sizes.actionHeader,
|
||||
[of.actionType.setMplsLabel]: of.sizes.actionMplsLabel,
|
||||
[of.actionType.setMplsTc]: of.sizes.actionMplsTc,
|
||||
[of.actionType.setMplsTtl]: of.sizes.actionMplsTtl,
|
||||
[of.actionType.decMplsTtl]: of.sizes.actionMplsTtl,
|
||||
[of.actionType.pushVlan]: of.sizes.actionPush,
|
||||
[of.actionType.popVlan]: of.sizes.actionHeader,
|
||||
[of.actionType.pushMpls]: of.sizes.actionPush,
|
||||
[of.actionType.popMpls]: of.sizes.actionPopMpls,
|
||||
[of.actionType.setQueue]: of.sizes.actionSetQueue,
|
||||
[of.actionType.setNwTtl]: of.sizes.actionNwTtl,
|
||||
[of.actionType.decNwTtl]: of.sizes.actionNwTtl,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const OFFSETS = of.offsets.flowMod
|
||||
|
||||
const COOKIE_LENGTH = 8
|
||||
const PAD_LENGTH = 2
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
const {
|
||||
header,
|
||||
cookie,
|
||||
cookie_mask,
|
||||
table_id = 0,
|
||||
command,
|
||||
idle_timeout = 0,
|
||||
hard_timeout = 0,
|
||||
priority = of.defaultPriority,
|
||||
buffer_id = 0xffffffff,
|
||||
out_port = of.port.any,
|
||||
out_group = of.group.any,
|
||||
flags = 0,
|
||||
match,
|
||||
instructions = [],
|
||||
} = object
|
||||
// fill header length
|
||||
header.length = of.sizes.flowMod
|
||||
instructions.forEach(instruction => {
|
||||
header.length += get(
|
||||
INSTRUCTION_SIZE,
|
||||
instruction.type,
|
||||
`Invalid instruction type: ${instruction.type}`
|
||||
)
|
||||
const { actions = [] } = instruction
|
||||
actions.forEach(action => {
|
||||
header.length += get(
|
||||
ACTION_SIZE,
|
||||
action.type,
|
||||
`Invalid instruction type: ${action.type}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(header.length)
|
||||
|
||||
ofHeader.pack(header, buffer, offset + OFFSETS.header)
|
||||
|
||||
if (cookie !== undefined) {
|
||||
if (cookie_mask !== undefined) {
|
||||
cookie_mask.copy(buffer, offset + OFFSETS.cookieMask)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.cookie_mask,
|
||||
offset + OFFSETS.cookieMask + COOKIE_LENGTH
|
||||
)
|
||||
}
|
||||
cookie.copy(buffer, offset + OFFSETS.cookie)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.cookie,
|
||||
offset + OFFSETS.cookie + COOKIE_LENGTH
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.cookieMask,
|
||||
offset + OFFSETS.cookieMask + COOKIE_LENGTH
|
||||
)
|
||||
}
|
||||
|
||||
buffer.writeUInt8(table_id, offset + OFFSETS.tableId)
|
||||
assert(Object.values(of.flowModCommand).includes(command))
|
||||
buffer.writeUInt8(command, offset + OFFSETS.command)
|
||||
buffer.writeUInt16BE(idle_timeout, offset + OFFSETS.idleTimeout)
|
||||
buffer.writeUInt16BE(hard_timeout, offset + OFFSETS.hardTimeout)
|
||||
buffer.writeUInt16BE(priority, offset + OFFSETS.priority)
|
||||
buffer.writeUInt32BE(buffer_id, offset + OFFSETS.bufferId)
|
||||
buffer.writeUInt32BE(out_port, offset + OFFSETS.outPort)
|
||||
buffer.writeUInt32BE(out_group, offset + OFFSETS.outGroup)
|
||||
buffer.writeUInt16BE(flags, offset + OFFSETS.flags)
|
||||
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD_LENGTH)
|
||||
|
||||
ofMatch.pack(match, buffer, offset + OFFSETS.match)
|
||||
|
||||
let instructionOffset = offset + OFFSETS.instructions
|
||||
instructions.forEach(instruction => {
|
||||
ofInstruction.pack(instruction, buffer, instructionOffset)
|
||||
instructionOffset += instruction.len
|
||||
})
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(header.type === of.type.flowMod)
|
||||
|
||||
const object = { header }
|
||||
|
||||
object.cookie = Buffer.alloc(COOKIE_LENGTH)
|
||||
buffer.copy(
|
||||
object.cookie,
|
||||
0,
|
||||
offset + OFFSETS.cookie,
|
||||
offset + OFFSETS.cookie + COOKIE_LENGTH
|
||||
)
|
||||
if (
|
||||
!uIntHelper.isUInt64None([
|
||||
buffer.readUInt32BE(offset + OFFSETS.cookieMask),
|
||||
buffer.readUInt32BE(offset + OFFSETS.cookieMask + COOKIE_LENGTH / 2),
|
||||
])
|
||||
) {
|
||||
object.cookie_mask = Buffer.alloc(COOKIE_LENGTH)
|
||||
buffer.copy(
|
||||
object.cookie_mask,
|
||||
0,
|
||||
offset + OFFSETS.cookieMask,
|
||||
offset + OFFSETS.cookieMask + COOKIE_LENGTH
|
||||
)
|
||||
}
|
||||
|
||||
object.table_id = buffer.readUInt8(offset + OFFSETS.tableId)
|
||||
object.command = buffer.readUInt8(offset + OFFSETS.command)
|
||||
assert(Object.values(of.flowModCommand).includes(object.command))
|
||||
|
||||
object.idle_timeout = buffer.readUInt16BE(offset + OFFSETS.idleTimeout)
|
||||
object.hard_timeout = buffer.readUInt16BE(offset + OFFSETS.hardTimeout)
|
||||
object.priority = buffer.readUInt16BE(offset + OFFSETS.priority)
|
||||
object.buffer_id = buffer.readUInt32BE(offset + OFFSETS.bufferId)
|
||||
object.out_port = buffer.readUInt32BE(offset + OFFSETS.outPort)
|
||||
object.out_group = buffer.readUInt32BE(offset + OFFSETS.outGroup)
|
||||
object.flags = buffer.readUInt16BE(offset + OFFSETS.flags)
|
||||
|
||||
object.match = ofMatch.unpack(buffer, offset + OFFSETS.match)
|
||||
|
||||
object.instructions = []
|
||||
let instructionOffset = offset + OFFSETS.instructions
|
||||
while (instructionOffset < header.length) {
|
||||
const instruction = ofInstruction.unpack(buffer, instructionOffset)
|
||||
object.instructions.push(instruction)
|
||||
instructionOffset += instruction.len
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const { header } = object
|
||||
assert(header.type === of.type.getConfigRequest)
|
||||
header.length = of.sizes.header
|
||||
|
||||
return ofHeader.pack(header)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset)
|
||||
assert(header.type === of.type.getConfigRequest)
|
||||
assert(header.length === of.sizes.header)
|
||||
|
||||
return { header }
|
||||
},
|
||||
}
|
||||
39
@xen-orchestra/openflow/src/openflow-11/message/header.js
Normal file
39
@xen-orchestra/openflow/src/openflow-11/message/header.js
Normal file
@@ -0,0 +1,39 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.header
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(of.sizes.header)
|
||||
const { version, type, length, xid } = object
|
||||
|
||||
assert(version === of.version)
|
||||
assert(Object.values(of.type).includes(type))
|
||||
|
||||
buffer.writeUInt8(version, offset + OFFSETS.version)
|
||||
buffer.writeUInt8(type, offset + OFFSETS.type)
|
||||
buffer.writeUInt16BE(length, offset + OFFSETS.length)
|
||||
buffer.writeUInt32BE(xid, offset + OFFSETS.xid)
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const version = buffer.readUInt8(offset + OFFSETS.version)
|
||||
assert(version === of.version)
|
||||
|
||||
const type = buffer.readUInt8(offset + OFFSETS.type)
|
||||
assert(Object.values(of.type).includes(type))
|
||||
|
||||
const length = buffer.readUInt16BE(offset + OFFSETS.length)
|
||||
const xid = buffer.readUInt32BE(offset + OFFSETS.xid)
|
||||
|
||||
return { version, type, length, xid }
|
||||
},
|
||||
}
|
||||
27
@xen-orchestra/openflow/src/openflow-11/message/hello.js
Normal file
27
@xen-orchestra/openflow/src/openflow-11/message/hello.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.hello
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const { header } = object
|
||||
assert(header.type === of.type.hello)
|
||||
header.length = of.sizes.hello
|
||||
|
||||
return ofHeader.pack(header)
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(header.type === of.type.hello)
|
||||
|
||||
return { header }
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import ofHeader from './header'
|
||||
import of from '../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.switchConfig
|
||||
const TYPES = [of.type.getConfigReply, of.type.setConfig]
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: object => {
|
||||
const { header, flags, miss_send_len } = object
|
||||
assert(TYPES.includes(header.type))
|
||||
header.length = of.sizes.switchConfig
|
||||
|
||||
const buffer = Buffer.alloc(header.length)
|
||||
ofHeader.pack(header, buffer, OFFSETS.header)
|
||||
|
||||
buffer.writeUInt16BE(flags, OFFSETS.flags)
|
||||
buffer.writeUInt16BE(miss_send_len, OFFSETS.missSendLen)
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const header = ofHeader.unpack(buffer, offset + OFFSETS.header)
|
||||
assert(TYPES.includes(header.type))
|
||||
assert(header.length === of.sizes.switchConfig)
|
||||
|
||||
const flags = buffer.readUInt16BE(offset + OFFSETS.flags)
|
||||
const miss_send_len = buffer.readUInt16BE(offset + OFFSETS.missSendLen)
|
||||
|
||||
return { header, flags, miss_send_len }
|
||||
},
|
||||
}
|
||||
1170
@xen-orchestra/openflow/src/openflow-11/openflow-11.js
Normal file
1170
@xen-orchestra/openflow/src/openflow-11/openflow-11.js
Normal file
File diff suppressed because it is too large
Load Diff
374
@xen-orchestra/openflow/src/openflow-11/struct/match/match.js
Normal file
374
@xen-orchestra/openflow/src/openflow-11/struct/match/match.js
Normal file
@@ -0,0 +1,374 @@
|
||||
import assert from 'assert'
|
||||
import addressParser from '../../../util/addrress-parser'
|
||||
import uIntHelper from '../../../util/uint-helper'
|
||||
import of from '../../openflow-11'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.match
|
||||
const WILDCARDS = of.flowWildcards
|
||||
|
||||
const IP4_ADDR_LEN = 4
|
||||
const METADATA_LENGTH = 8
|
||||
const PAD_LENGTH = 1
|
||||
const PAD2_LENGTH = 3
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
assert(object.type === of.matchType.standard)
|
||||
object.length = of.sizes.match
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(object.length)
|
||||
|
||||
buffer.writeUInt16BE(object.type, offset + OFFSETS.type)
|
||||
buffer.writeUInt16BE(object.length, offset + OFFSETS.length)
|
||||
|
||||
let wildcards = 0
|
||||
let inPort = 0
|
||||
if (object.in_port !== undefined) {
|
||||
inPort = object.in_port
|
||||
} else {
|
||||
wildcards |= WILDCARDS.inPort
|
||||
}
|
||||
buffer.writeUInt32BE(inPort, offset + OFFSETS.inPort)
|
||||
|
||||
if (object.dl_src !== undefined) {
|
||||
if (object.dl_src_mask !== undefined) {
|
||||
addressParser.stringToEth(
|
||||
object.dl_src_mask,
|
||||
buffer,
|
||||
offset + OFFSETS.dlSrcMask
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.dlSrcMask,
|
||||
offset + OFFSETS.dlSrcMask + of.ethAddrLen
|
||||
)
|
||||
}
|
||||
addressParser.stringToEth(object.dl_src, buffer, offset + OFFSETS.dlSrc)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.dlSrc,
|
||||
offset + OFFSETS.dlSrc + of.ethAddrLen
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.dlSrcMask,
|
||||
offset + OFFSETS.dlSrcMask + of.ethAddrLen
|
||||
)
|
||||
}
|
||||
|
||||
if (object.dl_dst !== undefined) {
|
||||
if (object.dl_dst_mask !== undefined) {
|
||||
addressParser.stringToEth(
|
||||
object.dl_dst_mask,
|
||||
buffer,
|
||||
offset + OFFSETS.dlDstMask
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.dlDstMask,
|
||||
offset + OFFSETS.dlDstMask + of.ethAddrLen
|
||||
)
|
||||
}
|
||||
addressParser.stringToEth(object.dl_dst, buffer, offset + OFFSETS.dlDst)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.dlDst,
|
||||
offset + OFFSETS.dlDst + of.ethAddrLen
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.dlDstMask,
|
||||
offset + OFFSETS.dlDstMask + of.ethAddrLen
|
||||
)
|
||||
}
|
||||
|
||||
let dlVlan = 0
|
||||
if (object.dl_vlan !== undefined) {
|
||||
dlVlan = object.dl_vlan
|
||||
} else {
|
||||
wildcards |= WILDCARDS.dlVlan
|
||||
}
|
||||
buffer.writeUInt16BE(dlVlan, offset + OFFSETS.dlVlan)
|
||||
|
||||
let dlVlanPcp = 0
|
||||
if (object.dl_vlan_pcp !== undefined) {
|
||||
dlVlanPcp = object.dl_vlan_pcp
|
||||
} else {
|
||||
wildcards |= WILDCARDS.dlVlanPcp
|
||||
}
|
||||
buffer.writeUInt8(dlVlanPcp, offset + OFFSETS.dlVlanPcp)
|
||||
|
||||
buffer.fill(0, offset + OFFSETS.pad1, offset + OFFSETS.pad1 + PAD_LENGTH)
|
||||
|
||||
let dlType = 0
|
||||
if (object.dl_type !== undefined) {
|
||||
dlType = object.dl_type
|
||||
} else {
|
||||
wildcards |= WILDCARDS.dlType
|
||||
}
|
||||
buffer.writeUInt16BE(dlType, offset + OFFSETS.dlType)
|
||||
|
||||
let nwTos = 0
|
||||
if (object.nw_tos !== undefined) {
|
||||
nwTos = object.nw_tos
|
||||
} else {
|
||||
wildcards |= WILDCARDS.nwTos
|
||||
}
|
||||
buffer.writeUInt8(nwTos, offset + OFFSETS.nwTos)
|
||||
|
||||
let nwProto = 0
|
||||
if (object.nw_proto !== undefined) {
|
||||
nwProto = object.nw_proto
|
||||
} else {
|
||||
wildcards |= WILDCARDS.nwProto
|
||||
}
|
||||
buffer.writeUInt8(nwProto, offset + OFFSETS.nwProto)
|
||||
|
||||
if (object.nw_src !== undefined) {
|
||||
if (object.nw_src_mask !== undefined) {
|
||||
addressParser.stringToip4(
|
||||
object.nw_src_mask,
|
||||
buffer,
|
||||
offset + OFFSETS.nwSrcMask
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.nwSrcMask,
|
||||
offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN
|
||||
)
|
||||
}
|
||||
addressParser.stringToip4(object.nw_src, buffer, offset + OFFSETS.nwSrc)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.nwSrc,
|
||||
offset + OFFSETS.nwSrc + IP4_ADDR_LEN
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.nwSrcMask,
|
||||
offset + OFFSETS.nwSrcMask + IP4_ADDR_LEN
|
||||
)
|
||||
}
|
||||
|
||||
if (object.nw_dst !== undefined) {
|
||||
if (object.nw_dst_mask !== undefined) {
|
||||
addressParser.stringToip4(
|
||||
object.nw_dst_mask,
|
||||
buffer,
|
||||
offset + OFFSETS.nwDstMask
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.nwDstMask,
|
||||
offset + OFFSETS.nwDstMask + IP4_ADDR_LEN
|
||||
)
|
||||
}
|
||||
addressParser.stringToip4(object.nw_dst, buffer, offset + OFFSETS.nwDst)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.nwDst,
|
||||
offset + OFFSETS.nwDst + IP4_ADDR_LEN
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.nwDstMask,
|
||||
offset + OFFSETS.nwDstMask + IP4_ADDR_LEN
|
||||
)
|
||||
}
|
||||
|
||||
let tpSrc = 0
|
||||
if (object.tp_src !== undefined) {
|
||||
tpSrc = object.tp_src
|
||||
} else {
|
||||
wildcards |= WILDCARDS.tpSrc
|
||||
}
|
||||
buffer.writeUInt16BE(tpSrc, offset + OFFSETS.tpSrc)
|
||||
|
||||
let tpDst = 0
|
||||
if (object.tp_dst !== undefined) {
|
||||
tpDst = object.tp_dst
|
||||
} else {
|
||||
wildcards |= WILDCARDS.tpDst
|
||||
}
|
||||
buffer.writeUInt16BE(tpDst, offset + OFFSETS.tpDst)
|
||||
|
||||
let mplsLabel = 0
|
||||
if (object.mpls_label !== undefined) {
|
||||
mplsLabel = object.mpls_label
|
||||
} else {
|
||||
wildcards |= WILDCARDS.mplsLabel
|
||||
}
|
||||
buffer.writeUInt32BE(mplsLabel, offset + OFFSETS.mplsLabel)
|
||||
|
||||
let mplsTc = 0
|
||||
if (object.mpls_tc !== undefined) {
|
||||
mplsTc = object.mpls_tc
|
||||
} else {
|
||||
wildcards |= WILDCARDS.mplsTc
|
||||
}
|
||||
buffer.writeUInt8(mplsTc, offset + OFFSETS.mplsTc)
|
||||
|
||||
buffer.fill(0, offset + OFFSETS.pad2, offset + OFFSETS.pad2 + PAD2_LENGTH)
|
||||
|
||||
if (object.metadata !== undefined) {
|
||||
if (object.metadata_mask !== undefined) {
|
||||
buffer.copy(
|
||||
object.metadata_mask,
|
||||
0,
|
||||
offset + OFFSETS.metadataMask,
|
||||
offset + OFFSETS.metadataMask + METADATA_LENGTH
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.metadataMask,
|
||||
offset + OFFSETS.metadataMask + METADATA_LENGTH
|
||||
)
|
||||
}
|
||||
buffer.copy(
|
||||
object.metadata,
|
||||
0,
|
||||
offset + OFFSETS.metadata,
|
||||
offset + OFFSETS.metadata + METADATA_LENGTH
|
||||
)
|
||||
} else {
|
||||
buffer.fill(
|
||||
0x00,
|
||||
offset + OFFSETS.metadata,
|
||||
offset + OFFSETS.metadata + METADATA_LENGTH
|
||||
)
|
||||
buffer.fill(
|
||||
0xff,
|
||||
offset + OFFSETS.metadataMask,
|
||||
offset + OFFSETS.metadataMask + METADATA_LENGTH
|
||||
)
|
||||
}
|
||||
|
||||
buffer.writeUInt32BE(wildcards, offset + OFFSETS.wildcards)
|
||||
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const object = {}
|
||||
object.type = buffer.readUInt16BE(offset + OFFSETS.type)
|
||||
assert(object.type === of.matchType.standard)
|
||||
|
||||
object.length = buffer.readUInt16BE(offset + OFFSETS.length)
|
||||
assert(object.length === of.sizes.match)
|
||||
|
||||
// Wildcards indicate which value to use for the match.
|
||||
// if `wildcards & of.wildcards.<value>` === 0 then `value` is not wildcarded and must be used.
|
||||
const wildcards = (object.wildcards = buffer.readUInt32BE(
|
||||
offset + OFFSETS.wildcards
|
||||
))
|
||||
if ((wildcards & WILDCARDS.inPort) === 0) {
|
||||
object.in_port = buffer.readUInt32BE(offset + OFFSETS.inPort)
|
||||
}
|
||||
|
||||
if (!addressParser.isEthMaskAll(buffer, offset + OFFSETS.dlSrcMask)) {
|
||||
if (!addressParser.isEthMaskNone(buffer, offset + OFFSETS.dlSrcMask)) {
|
||||
object.dl_src_mask = addressParser.ethToString(
|
||||
buffer,
|
||||
offset + OFFSETS.dlSrcMask
|
||||
)
|
||||
}
|
||||
object.dl_src = addressParser.ethToString(buffer, offset + OFFSETS.dlSrc)
|
||||
}
|
||||
if (!addressParser.isEthMaskAll(buffer, offset + OFFSETS.dlDstMask)) {
|
||||
if (!addressParser.isEthMaskNone(buffer, offset + OFFSETS.dlDstMask)) {
|
||||
object.dl_dst_mask = addressParser.ethToString(
|
||||
buffer,
|
||||
offset + OFFSETS.dlDstMask
|
||||
)
|
||||
}
|
||||
object.dl_dst = addressParser.ethToString(buffer, offset + OFFSETS.dlDst)
|
||||
}
|
||||
|
||||
if ((wildcards & WILDCARDS.dlVlan) === 0) {
|
||||
object.dl_vlan = buffer.readUInt16BE(offset + OFFSETS.dlVlan)
|
||||
}
|
||||
if ((wildcards & WILDCARDS.dlVlanPcp) === 0) {
|
||||
object.dl_vlan_pcp = buffer.readUInt16BE(offset + OFFSETS.dlVlanPcp)
|
||||
}
|
||||
if ((wildcards & WILDCARDS.dlType) === 0) {
|
||||
object.dl_type = buffer.readUInt16BE(offset + OFFSETS.dlType)
|
||||
}
|
||||
|
||||
if ((wildcards & WILDCARDS.nwTos) === 0) {
|
||||
object.nw_tos = buffer.readUInt8(offset + OFFSETS.nwTos)
|
||||
}
|
||||
if ((wildcards & WILDCARDS.nwProto) === 0) {
|
||||
object.nw_proto = buffer.readUInt8(offset + OFFSETS.nwProto)
|
||||
}
|
||||
|
||||
if (!addressParser.isIp4MaskAll(buffer, offset + OFFSETS.nwSrcMask)) {
|
||||
if (!addressParser.isIp4MaskNone(buffer, offset + OFFSETS.nwSrcMask)) {
|
||||
object.nw_src_mask = addressParser.ip4ToString(
|
||||
buffer,
|
||||
offset + OFFSETS.nwSrcMask
|
||||
)
|
||||
}
|
||||
object.nw_src = addressParser.ip4ToString(buffer, offset + OFFSETS.nwSrc)
|
||||
}
|
||||
if (!addressParser.isIp4MaskAll(buffer, offset + OFFSETS.nwDstMask)) {
|
||||
if (!addressParser.isIp4MaskNone(buffer, offset + OFFSETS.nwDstMask)) {
|
||||
object.nw_dst_mask = addressParser.ip4ToString(
|
||||
buffer,
|
||||
offset + OFFSETS.nwDstMask
|
||||
)
|
||||
}
|
||||
object.nw_dst = addressParser.ip4ToString(buffer, offset + OFFSETS.nwDst)
|
||||
}
|
||||
|
||||
if ((wildcards & WILDCARDS.tpSrc) === 0) {
|
||||
object.tp_src = buffer.readUInt16BE(offset + OFFSETS.tpSrc)
|
||||
}
|
||||
if ((wildcards & WILDCARDS.tpDst) === 0) {
|
||||
object.tp_dst = buffer.readUInt16BE(offset + OFFSETS.tpDst)
|
||||
}
|
||||
|
||||
if ((wildcards & WILDCARDS.mplsLabel) === 0) {
|
||||
object.mpls_label = buffer.readUInt32BE(offset + OFFSETS.mplsLabel)
|
||||
}
|
||||
if ((wildcards & WILDCARDS.mplsTc) === 0) {
|
||||
object.mpls_tc = buffer.readUInt32BE(offset + OFFSETS.mplsTc)
|
||||
}
|
||||
|
||||
const metadataMask = [
|
||||
buffer.readUInt32BE(offset + OFFSETS.metadataMask),
|
||||
buffer.readUInt32BE(offset + OFFSETS.metadataMask + METADATA_LENGTH / 2),
|
||||
]
|
||||
if (!uIntHelper.isUInt64All(metadataMask)) {
|
||||
if (!uIntHelper.isUInt64None(metadataMask)) {
|
||||
object.metadata_mask = Buffer.alloc(METADATA_LENGTH)
|
||||
buffer.copy(
|
||||
object.metadata_mask,
|
||||
0,
|
||||
offset + OFFSETS.metadataMask,
|
||||
offset + OFFSETS.metadataMask + METADATA_LENGTH
|
||||
)
|
||||
}
|
||||
object.metadata = Buffer.alloc(METADATA_LENGTH)
|
||||
buffer.copy(
|
||||
object.metadata,
|
||||
0,
|
||||
offset + OFFSETS.metadata,
|
||||
offset + OFFSETS.metadata + METADATA_LENGTH
|
||||
)
|
||||
}
|
||||
|
||||
return object
|
||||
},
|
||||
}
|
||||
79
@xen-orchestra/openflow/src/openflow-11/struct/port.js
Normal file
79
@xen-orchestra/openflow/src/openflow-11/struct/port.js
Normal file
@@ -0,0 +1,79 @@
|
||||
import of from '../openflow-11'
|
||||
import addressParser from '../../util/addrress-parser'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
const OFFSETS = of.offsets.port
|
||||
const PAD_LENGTH = 4
|
||||
const PAD2_LENGTH = 2
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
pack: (object, buffer = undefined, offset = 0) => {
|
||||
buffer = buffer !== undefined ? buffer : Buffer.alloc(of.sizes.port)
|
||||
const {
|
||||
port_no: portNo,
|
||||
hw_addr: hwAddr,
|
||||
name,
|
||||
config,
|
||||
state,
|
||||
curr,
|
||||
advertised,
|
||||
supported,
|
||||
peer,
|
||||
curr_speed: currSpeed,
|
||||
max_speed: maxSpeed,
|
||||
} = object
|
||||
|
||||
buffer.writeUInt32BE(portNo, offset + OFFSETS.portNo)
|
||||
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD_LENGTH)
|
||||
addressParser.stringToEth(hwAddr, buffer, offset + OFFSETS.hwAddr)
|
||||
buffer.fill(0, offset + OFFSETS.pad, offset + OFFSETS.pad + PAD2_LENGTH)
|
||||
buffer.write(name, offset + OFFSETS.name, of.maxPortNameLen)
|
||||
if (name.length < of.maxPortNameLen) {
|
||||
buffer.fill(
|
||||
0,
|
||||
offset + OFFSETS.name + name.length,
|
||||
offset + OFFSETS.name + of.maxPortNameLen
|
||||
)
|
||||
}
|
||||
|
||||
buffer.writeUInt32BE(config, offset + OFFSETS.config)
|
||||
buffer.writeUInt32BE(state, offset + OFFSETS.state)
|
||||
buffer.writeUInt32BE(curr, offset + OFFSETS.curr)
|
||||
buffer.writeUInt32BE(advertised, offset + OFFSETS.advertised)
|
||||
buffer.writeUInt32BE(supported, offset + OFFSETS.supported)
|
||||
buffer.writeUInt32BE(peer, offset + OFFSETS.peer)
|
||||
buffer.writeUInt32BE(currSpeed, offset + OFFSETS.currSpeed)
|
||||
buffer.writeUInt32BE(maxSpeed, offset + OFFSETS.maxSpeed)
|
||||
return buffer
|
||||
},
|
||||
|
||||
unpack: (buffer, offset = 0) => {
|
||||
const body = {}
|
||||
|
||||
body.port_no = buffer.readUInt32BE(offset + OFFSETS.portNo)
|
||||
body.hw_addr = addressParser.ethToString(buffer, offset + OFFSETS.hwAddr)
|
||||
|
||||
const name = buffer.toString(
|
||||
'utf8',
|
||||
offset + OFFSETS.name,
|
||||
offset + OFFSETS.name + of.maxPortNameLen
|
||||
)
|
||||
body.name = name.substr(0, name.indexOf('\0')) // Remove useless 0 if name.length < of.maxPortNameLen
|
||||
|
||||
body.config = buffer.readUInt32BE(offset + OFFSETS.config)
|
||||
body.state = buffer.readUInt32BE(offset + OFFSETS.state)
|
||||
|
||||
body.curr = buffer.readUInt32BE(offset + OFFSETS.curr)
|
||||
body.advertised = buffer.readUInt32BE(offset + OFFSETS.advertised)
|
||||
body.supported = buffer.readUInt32BE(offset + OFFSETS.supported)
|
||||
body.peer = buffer.readUInt32BE(offset + OFFSETS.peer)
|
||||
|
||||
body.curr_speed = buffer.readUInt32BE(offset + OFFSETS.currSpeed)
|
||||
body.max_speed = buffer.readUInt32BE(offset + OFFSETS.maxSpeed)
|
||||
|
||||
return body
|
||||
},
|
||||
}
|
||||
45
@xen-orchestra/openflow/src/parse-socket.js
Normal file
45
@xen-orchestra/openflow/src/parse-socket.js
Normal file
@@ -0,0 +1,45 @@
|
||||
import assert from 'assert'
|
||||
|
||||
import of from './index'
|
||||
import scheme from './default-header-scheme'
|
||||
import { readChunk } from '@vates/read-chunk'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default async function* parse(socket) {
|
||||
let buffer = Buffer.alloc(1024)
|
||||
let data
|
||||
|
||||
// Read the header
|
||||
while ((data = await readChunk(socket, scheme.size)) !== null) {
|
||||
// Read OpenFlow message size from its header
|
||||
const msgSize = data.readUInt16BE(scheme.offsets.length)
|
||||
data.copy(buffer, 0, 0, scheme.size)
|
||||
|
||||
if (buffer.length < msgSize) {
|
||||
buffer = resize(buffer, msgSize)
|
||||
}
|
||||
|
||||
// Read the rest of the openflow message
|
||||
if (msgSize > scheme.size) {
|
||||
data = await readChunk(socket, msgSize - scheme.size)
|
||||
assert.notStrictEqual(data, null)
|
||||
data.copy(buffer, scheme.size, 0, msgSize - scheme.size)
|
||||
}
|
||||
|
||||
yield of.unpack(buffer)
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
function resize(buffer, size) {
|
||||
let newLength = buffer.length
|
||||
do {
|
||||
newLength *= 2
|
||||
} while (newLength < size)
|
||||
|
||||
const newBuffer = Buffer.alloc(newLength)
|
||||
buffer.copy(newBuffer)
|
||||
return newBuffer
|
||||
}
|
||||
64
@xen-orchestra/openflow/src/util/addrress-parser.js
Normal file
64
@xen-orchestra/openflow/src/util/addrress-parser.js
Normal file
@@ -0,0 +1,64 @@
|
||||
import assert from 'assert'
|
||||
import util from 'util'
|
||||
|
||||
// =============================================================================
|
||||
|
||||
export default {
|
||||
isEthMaskNone: (buffer, offset) =>
|
||||
buffer.readUInt32BE(offset) === 0x00000000 &&
|
||||
buffer.readUInt16BE(offset + 4) === 0x0000,
|
||||
|
||||
isEthMaskAll: (buffer, offset) =>
|
||||
buffer.readUInt32BE(offset) === 0xffffffff &&
|
||||
buffer.readUInt16BE(offset + 4) === 0xffff,
|
||||
|
||||
isIp4MaskNone: (buffer, offset) => buffer.readUInt32BE(offset) === 0x00000000,
|
||||
|
||||
isIp4MaskAll: (buffer, offset) => buffer.readUInt32BE(offset) === 0xffffffff,
|
||||
|
||||
ethToString: (buffer, offset) =>
|
||||
buffer.toString('hex', offset, offset + 1) +
|
||||
':' +
|
||||
buffer.toString('hex', offset + 1, offset + 2) +
|
||||
':' +
|
||||
buffer.toString('hex', offset + 2, offset + 3) +
|
||||
':' +
|
||||
buffer.toString('hex', offset + 3, offset + 4) +
|
||||
':' +
|
||||
buffer.toString('hex', offset + 4, offset + 5) +
|
||||
':' +
|
||||
buffer.toString('hex', offset + 5, offset + 6),
|
||||
|
||||
stringToEth: (string, buffer, offset) => {
|
||||
const eth = /^([0-9A-Fa-f]{2}):([0-9A-Fa-f]{2}):([0-9A-Fa-f]{2}):([0-9A-Fa-f]{2}):([0-9A-Fa-f]{2}):([0-9A-Fa-f]{2})$/.exec(
|
||||
string
|
||||
)
|
||||
assert(eth !== null)
|
||||
buffer.writeUInt8(parseInt(eth[1], 16), offset)
|
||||
buffer.writeUInt8(parseInt(eth[2], 16), offset + 1)
|
||||
buffer.writeUInt8(parseInt(eth[3], 16), offset + 2)
|
||||
buffer.writeUInt8(parseInt(eth[4], 16), offset + 3)
|
||||
buffer.writeUInt8(parseInt(eth[5], 16), offset + 4)
|
||||
buffer.writeUInt8(parseInt(eth[6], 16), offset + 5)
|
||||
},
|
||||
|
||||
ip4ToString: (buffer, offset) =>
|
||||
util.format(
|
||||
'%d.%d.%d.%d',
|
||||
buffer.readUInt8(offset),
|
||||
buffer.readUInt8(offset + 1),
|
||||
buffer.readUInt8(offset + 2),
|
||||
buffer.readUInt8(offset + 3)
|
||||
),
|
||||
|
||||
stringToip4: (string, buffer, offset) => {
|
||||
const ip = /^([1-9]?\d|1\d\d|2[0-4]\d|25[0-5])\.([1-9]?\d|1\d\d|2[0-4]\d|25[0-5])\.([1-9]?\d|1\d\d|2[0-4]\d|25[0-5])\.([1-9]?\d|1\d\d|2[0-4]\d|25[0-5])$/.exec(
|
||||
string
|
||||
)
|
||||
assert(ip !== null)
|
||||
buffer.writeUInt8(parseInt(ip[1], 10), offset)
|
||||
buffer.writeUInt8(parseInt(ip[2], 10), offset + 1)
|
||||
buffer.writeUInt8(parseInt(ip[3], 10), offset + 2)
|
||||
buffer.writeUInt8(parseInt(ip[4], 10), offset + 3)
|
||||
},
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user