Compare commits

..

12 Commits

Author SHA1 Message Date
Florent BEAUCHAMP
365e44fbb9 usb nbd for al exports 2023-07-27 08:37:13 +02:00
Florent BEAUCHAMP
b4f13838a6 wip 2023-07-26 17:10:21 +02:00
Julien Fontanet
14a0caa4c6 fix(xo-web/xoa/licenses): fix message *go TO* 2023-07-25 09:43:11 +02:00
Florent BEAUCHAMP
1c23bd5ff7 feat(read-chunk/readChunkStrict): attach read chunk to error if small text (#6940) 2023-07-20 17:01:26 +02:00
Julien Fontanet
49c161b17a fix(xo-server,xo-web): send version when probing NFS SR
Reported by @benjamreis
2023-07-20 16:46:18 +02:00
Gabriel Gunullu
18dce3fce6 test(fs): fix wrong encryption (#6945) 2023-07-20 16:32:09 +02:00
Julien Fontanet
d6fc86b6bc chore(xo-server-transport-xmpp): remove old dep node-xmpp-client
Fix possibly #6942
2023-07-20 10:54:52 +02:00
Florent BEAUCHAMP
61d960d4b1 fix(vmware-explorer): handle snapshot of 1TB+ disks 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
02d3465832 feat(vmware-explorer): don't transform stream for raw import in thick mode 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
4bbadc9515 feat(vmware-explorer): improve import
- use one stream instead of per block queries if possible
- retry block reading if failing
- handle unaligned end block
2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
78586291ca fix(vmware-explorer): better disk size computation 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
945dec94bf feat(vmware-explorer): retry connection to ESXi 2023-07-20 10:25:28 +02:00
40 changed files with 629 additions and 686 deletions

View File

@@ -0,0 +1,32 @@
import NbdClient from "./index.mjs";
async function bench(){
const client = new NbdClient({
address:'172.16.210.14',
port: 8077,
exportname: 'bench_export'
})
await client.connect()
console.log('connected', client.exportSize)
for(let chunk_size=16*1024; chunk_size < 16*1024*1024; chunk_size *=2){
let i=0
const start = + new Date()
for await(const block of client.readBlocks(chunk_size) ){
i++
if((i*chunk_size) % (16*1024*1024) ===0){
process.stdout.write('.')
}
if(i*chunk_size > 1024*1024*1024) break
}
console.log(chunk_size,Math.round( (i*chunk_size/1024/1024*1000)/ (new Date() - start)))
}
}
bench()

View File

@@ -307,11 +307,11 @@ export default class NbdClient {
})
}
async *readBlocks(indexGenerator) {
async *readBlocks(indexGenerator = 2*1024*1024) {
// default : read all blocks
if (indexGenerator === undefined) {
if (typeof indexGenerator === 'number') {
const exportSize = this.#exportSize
const chunkSize = 2 * 1024 * 1024
const chunkSize = indexGenerator
indexGenerator = function* () {
const nbBlocks = Math.ceil(Number(exportSize / BigInt(chunkSize)))
for (let index = 0; BigInt(index) < nbBlocks; index++) {
@@ -319,6 +319,7 @@ export default class NbdClient {
}
}
}
const readAhead = []
const readAheadMaxLength = this.#readAhead
const makeReadBlockPromise = (index, size) => {

View File

@@ -1,6 +1,7 @@
'use strict'
const assert = require('assert')
const isUtf8 = require('isutf8')
/**
* Read a chunk of data from a stream.
@@ -81,6 +82,13 @@ exports.readChunkStrict = async function readChunkStrict(stream, size) {
if (size !== undefined && chunk.length !== size) {
const error = new Error(`stream has ended with not enough data (actual: ${chunk.length}, expected: ${size})`)
// Buffer.isUtf8 is too recent for now
// @todo : replace external package by Buffer.isUtf8 when the supported version of node reach 18
if (chunk.length < 1024 && isUtf8(chunk)) {
error.text = chunk.toString('utf8')
}
Object.defineProperties(error, {
chunk: {
value: chunk,

View File

@@ -102,12 +102,37 @@ describe('readChunkStrict', function () {
assert.strictEqual(error.chunk, undefined)
})
it('throws if stream ends with not enough data', async () => {
it('throws if stream ends with not enough data, utf8', async () => {
const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 6, expected: 10)')
assert.strictEqual(error.text, 'foobar')
assert.deepEqual(error.chunk, Buffer.from('foobar'))
})
it('throws if stream ends with not enough data, non utf8 ', async () => {
const source = [Buffer.alloc(10, 128), Buffer.alloc(10, 128)]
const error = await rejectionOf(readChunkStrict(makeStream(source), 30))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 20, expected: 30)')
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, Buffer.concat(source))
})
it('throws if stream ends with not enough data, utf8 , long data', async () => {
const source = Buffer.from('a'.repeat(1500))
const error = await rejectionOf(readChunkStrict(makeStream([source]), 2000))
assert(error instanceof Error)
assert.strictEqual(error.message, `stream has ended with not enough data (actual: 1500, expected: 2000)`)
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, source)
})
it('succeed', async () => {
const source = Buffer.from('a'.repeat(20))
const chunk = await readChunkStrict(makeStream([source]), 10)
assert.deepEqual(source.subarray(10), chunk)
})
})
describe('skip', function () {
@@ -134,6 +159,16 @@ describe('skip', function () {
it('returns less size if stream ends', async () => {
assert.deepEqual(await skip(makeStream('foo bar'), 10), 7)
})
it('put back if it read too much', async () => {
let source = makeStream(['foo', 'bar'])
await skip(source, 1) // read part of data chunk
const chunk = (await readChunkStrict(source, 2)).toString('utf-8')
assert.strictEqual(chunk, 'oo')
source = makeStream(['foo', 'bar'])
assert.strictEqual(await skip(source, 3), 3) // read aligned with data chunk
})
})
describe('skipStrict', function () {
@@ -144,4 +179,9 @@ describe('skipStrict', function () {
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 7, expected: 10)')
assert.deepEqual(error.bytesSkipped, 7)
})
it('succeed', async () => {
const source = makeStream(['foo', 'bar', 'baz'])
const res = await skipStrict(source, 4)
assert.strictEqual(res, undefined)
})
})

View File

@@ -33,5 +33,8 @@
},
"devDependencies": {
"test": "^3.2.1"
},
"dependencies": {
"isutf8": "^4.0.0"
}
}

View File

@@ -660,14 +660,13 @@ export class RemoteAdapter {
return path
}
async writeVhd(path, input, { checksum = true, validator = noop, writeBlockConcurrency, dedup = false } = {}) {
async writeVhd(path, input, { checksum = true, validator = noop, writeBlockConcurrency } = {}) {
const handler = this._handler
if (this.useVhdDirectory()) {
const dataPath = `${dirname(path)}/data/${uuidv4()}.vhd`
const size = await createVhdDirectoryFromStream(handler, dataPath, input, {
concurrency: writeBlockConcurrency,
compression: this.#getCompressionType(),
dedup,
async validator() {
await input.task
return validator.apply(this, arguments)

View File

@@ -123,19 +123,19 @@ export async function checkAliases(
) {
const aliasFound = []
for (const alias of aliasPaths) {
let target
try {
target = await resolveVhdAlias(handler, alias)
if (!isVhdFile(target)) {
logWarn('alias references non VHD target', { alias, target })
if (remove) {
logInfo('removing alias and non VHD target', { alias, target })
await handler.unlink(target)
await handler.unlink(alias)
}
continue
const target = await resolveVhdAlias(handler, alias)
if (!isVhdFile(target)) {
logWarn('alias references non VHD target', { alias, target })
if (remove) {
logInfo('removing alias and non VHD target', { alias, target })
await handler.unlink(target)
await handler.unlink(alias)
}
continue
}
try {
const { dispose } = await openVhd(handler, target)
try {
await dispose()

View File

@@ -17,7 +17,6 @@ const DEFAULT_XAPI_VM_SETTINGS = {
concurrency: 2,
copyRetention: 0,
deleteFirst: false,
dedup: false,
diskPerVmConcurrency: 0, // not limited by default
exportRetention: 0,
fullInterval: 0,

View File

@@ -160,7 +160,6 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
)
metadataContent = {
dedup: settings.dedup,
jobId,
mode: job.mode,
scheduleId,
@@ -209,7 +208,6 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
dedup: settings.dedup,
validator: tmpPath => checkVhd(handler, tmpPath),
writeBlockConcurrency: this._config.writeBlockConcurrency,
})

View File

@@ -45,34 +45,6 @@ When `useVhdDirectory` is enabled on the remote, the directory containing the VH
└─ <uuid>.vhd
```
#### vhd directory with deduplication
the difference with non dedup mode is that a hash is computed of each vhd block. The hash is splited in 4 chars token and the data are stored in xo-block-store/{token1}/.../{token7}/{token8}.source.
Then a hard link is made from this source to the destination folder in <vdis>/<job UUID>/<VDI UUID>/blocks/{number}/{number}
```
<remote>
└─ xo-block-store
└─ {4 char}
└─ ...
└─ {char.source}
└─ xo-vm-backups
├─ index.json // TODO
└─ <VM UUID>
├─ cache.json.gz
├─ vdis
│ └─ <job UUID>
│ └─ <VDI UUID>
│ ├─ index.json // TODO
│ ├─ <YYYYMMDD>T<HHmmss>.alias.vhd // contains the relative path to a VHD directory
| └─ data
| ├─ <uuid>.vhd // VHD directory format is described in vhd-lib/Vhd/VhdDirectory.js
├─ <YYYYMMDD>T<HHmmss>.json // backup metadata
├─ <YYYYMMDD>T<HHmmss>.xva
└─ <YYYYMMDD>T<HHmmss>.xva.checksum
```
## Cache for a VM
In a VM directory, if the file `cache.json.gz` exists, it contains the metadata for all the backups for this VM.

View File

@@ -1,23 +0,0 @@
# Deduplication
- This this use a additionnal inode (or equivalent on the FS), for each different block in the xo-block-store`sub folder`
- This will not work well with immutabilty/object lock
- only dedup blocks of vhd directory
- prerequisite are : the fs must support hard link and extended attributes
- a key (full backup) does not take more space on te remote than a delta. It will take more inodes , and more time since we'll have to read all the blocks. T
When a new block is written to the remote, a hash is computed. If a file with this hash doesn't exists in xo-block-store` create it, then add the has as an extended attributes.
A link hard link, sharing data and extended attributes is then create to the destination
When deleting a block which has a hash extended attributes, a check is done on the xo-block-store. If there are no other link, then the block is deleted . The directory containing it stays
When merging block : the unlink method is called before overwriting an existing block
### troubleshooting
Since all the blocks are hard linked, you can convert a deduplicated remote to a non deduplicated one by deleting the xo-block-store directory
two new method has been added to the local fs handler :
- deduplicationGarbageCollector(), which should be called from the root of the FS : it will clean any block without other links, and any empty directory
- deduplicationStats() that will compute the number of blocks in store and how many times they are used

View File

@@ -16,7 +16,6 @@ function formatVmBackup(backup) {
}),
id: backup.id,
dedup: backup.dedup,
jobId: backup.jobId,
mode: backup.mode,
scheduleId: backup.scheduleId,

View File

@@ -34,7 +34,6 @@
"bind-property-descriptor": "^2.0.0",
"decorator-synchronized": "^0.6.0",
"execa": "^5.0.0",
"fs-extended-attributes": "^1.0.1",
"fs-extra": "^11.1.0",
"get-stream": "^6.0.0",
"limit-concurrency-decorator": "^0.5.0",

View File

@@ -268,9 +268,9 @@ export default class RemoteHandlerAbstract {
await this._mktree(normalizePath(dir), { mode })
}
async outputFile(file, data, { dedup = false, dirMode, flags = 'wx' } = {}) {
async outputFile(file, data, { dirMode, flags = 'wx' } = {}) {
const encryptedData = this.#encryptor.encryptData(data)
await this._outputFile(normalizePath(file), encryptedData, { dedup, dirMode, flags })
await this._outputFile(normalizePath(file), encryptedData, { dirMode, flags })
}
async read(file, buffer, position) {
@@ -319,8 +319,8 @@ export default class RemoteHandlerAbstract {
await timeout.call(this._rmdir(normalizePath(dir)).catch(ignoreEnoent), this._timeout)
}
async rmtree(dir, { dedup } = {}) {
await this._rmtree(normalizePath(dir), { dedup })
async rmtree(dir) {
await this._rmtree(normalizePath(dir))
}
// Asks the handler to sync the state of the effective remote with its'
@@ -397,10 +397,6 @@ export default class RemoteHandlerAbstract {
}
}
async checkSupport() {
return {}
}
async test() {
const SIZE = 1024 * 1024 * 10
const testFileName = normalizePath(`${Date.now()}.test`)
@@ -441,14 +437,14 @@ export default class RemoteHandlerAbstract {
await this._truncate(file, len)
}
async __unlink(file, { checksum = true, dedup = false } = {}) {
async __unlink(file, { checksum = true } = {}) {
file = normalizePath(file)
if (checksum) {
ignoreErrors.call(this._unlink(checksumFile(file)))
}
await this._unlink(file, { dedup }).catch(ignoreEnoent)
await this._unlink(file).catch(ignoreEnoent)
}
async write(file, buffer, position) {
@@ -564,16 +560,17 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async _outputFile(file, data, { dirMode, flags, dedup = false }) {
async _outputFile(file, data, { dirMode, flags }) {
try {
return await this._writeFile(file, data, { dedup, flags })
return await this._writeFile(file, data, { flags })
} catch (error) {
if (error.code !== 'ENOENT') {
throw error
}
}
await this._mktree(dirname(file), { mode: dirMode })
return this._outputFile(file, data, { dedup, flags })
return this._outputFile(file, data, { flags })
}
async _outputStream(path, input, { dirMode, validator }) {
@@ -616,7 +613,7 @@ export default class RemoteHandlerAbstract {
throw new Error('Not implemented')
}
async _rmtree(dir, { dedup } = {}) {
async _rmtree(dir) {
try {
return await this._rmdir(dir)
} catch (error) {
@@ -627,7 +624,7 @@ export default class RemoteHandlerAbstract {
const files = await this._list(dir)
await asyncEach(files, file =>
this._unlink(`${dir}/${file}`, { dedup }).catch(error => {
this._unlink(`${dir}/${file}`).catch(error => {
// Unlink dir behavior is not consistent across platforms
// https://github.com/nodejs/node-v0.x-archive/issues/5791
if (error.code === 'EISDIR' || error.code === 'EPERM') {
@@ -642,7 +639,7 @@ export default class RemoteHandlerAbstract {
// called to initialize the remote
async _sync() {}
async _unlink(file, opts) {
async _unlink(file) {
throw new Error('Not implemented')
}

View File

@@ -209,7 +209,7 @@ describe('encryption', () => {
// encrypt with a non default algorithm
const encryptor = _getEncryptor('aes-256-cbc', '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gmc"}`)
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gcm"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
// remote is now non empty : can't modify key anymore

View File

@@ -19,8 +19,7 @@ try {
} catch (_) {}
export const getHandler = (remote, ...rest) => {
const { type } = parse(remote.url)
const Handler = HANDLERS[type]
const Handler = HANDLERS[parse(remote.url).type]
if (!Handler) {
throw new Error('Unhandled remote type')
}

View File

@@ -1,17 +1,10 @@
import df from '@sindresorhus/df'
import fs from 'fs-extra'
// import fsx from 'fs-extended-attributes'
import lockfile from 'proper-lockfile'
import { createLogger } from '@xen-orchestra/log'
import { asyncEach } from '@vates/async-each'
import { fromEvent, fromCallback, ignoreErrors, retry } from 'promise-toolbox'
import { synchronized } from 'decorator-synchronized'
import { fromEvent, retry } from 'promise-toolbox'
import RemoteHandlerAbstract from './abstract'
import { normalize as normalizePath } from './path'
import assert from 'node:assert'
import { createHash, randomBytes } from 'node:crypto'
const { info, warn } = createLogger('xo:fs:local')
@@ -44,10 +37,6 @@ export default class LocalHandler extends RemoteHandlerAbstract {
#addSyncStackTrace
#retriesOnEagain
#supportDedup
#dedupDirectory = '/xo-block-store'
#hashMethod = 'sha256'
#attributeKey = `user.hash.${this.#hashMethod}`
constructor(remote, opts = {}) {
super(remote)
@@ -205,267 +194,16 @@ export default class LocalHandler extends RemoteHandlerAbstract {
return this.#addSyncStackTrace(fs.truncate, this.getFilePath(file), len)
}
async #localUnlink(filePath) {
return await this.#addSyncStackTrace(retry, () => fs.unlink(filePath), this.#retriesOnEagain)
}
async _unlink(file, { dedup } = {}) {
async _unlink(file) {
const filePath = this.getFilePath(file)
let hash
// only try to read dedup source if we try to delete something deduplicated
if (dedup === true) {
try {
// get hash before deleting the file
hash = await this.#getExtendedAttribute(file, this.#attributeKey)
} catch (err) {
// whatever : fall back to normal delete
}
}
// delete file in place
await this.#localUnlink(filePath)
// implies we are on a deduplicated file
if (hash !== undefined) {
const dedupPath = this.getFilePath(this.#computeDeduplicationPath(hash))
await this.#removeExtendedAttribute(file, this.#attributeKey)
try {
const { nlink } = await fs.stat(dedupPath)
// get the number of copy still using these data
// delete source if it's alone
if (nlink === 1) {
await this.#localUnlink(dedupPath)
}
} catch (error) {
// no problem if another process deleted the source or if we unlink directly the source file
if (error.code !== 'ENOENT') {
throw error
}
}
}
return await this.#addSyncStackTrace(retry, () => fs.unlink(filePath), this.#retriesOnEagain)
}
_writeFd(file, buffer, position) {
return this.#addSyncStackTrace(fs.write, file.fd, buffer, 0, buffer.length, position)
}
#localWriteFile(file, data, { flags }) {
_writeFile(file, data, { flags }) {
return this.#addSyncStackTrace(fs.writeFile, this.getFilePath(file), data, { flag: flags })
}
async _writeFile(file, data, { flags, dedup }) {
if (dedup === true) {
// only compute support once , and only if needed
if (this.#supportDedup === undefined) {
const supported = await this.checkSupport()
this.#supportDedup = supported.hardLink === true && supported.extendedAttributes === true
}
if (this.#supportDedup) {
const hash = this.#hash(data)
// create the file (if not already present) in the store
const dedupPath = await this.#writeDeduplicationSource(hash, data)
// hard link to the target place
// this linked file will have the same extended attributes
// (used for unlink)
return this.#link(dedupPath, file)
}
}
// fallback
return this.#localWriteFile(file, data, { flags })
}
#hash(data) {
return createHash(this.#hashMethod).update(data).digest('hex')
}
async #getExtendedAttribute(file, attributeName) {
try{
return this._readFile(file+attributeName)
}catch(err){
if(err.code === 'ENOENT'){
return
}
throw err
}
}
async #setExtendedAttribute(file, attributeName, value) {
return this._writeFile(file+attributeName, value)
}
async #removeExtendedAttribute(file, attributeName){
return this._unlink(file+attributeName)
}
/*
async #getExtendedAttribute(file, attributeName) {
return new Promise((resolve, reject) => {
fsx.get(this.getFilePath(file), attributeName, (err, res) => {
if (err) {
reject(err)
} else {
// res is a buffer
// it is null if the file doesn't have this attribute
if (res !== null) {
resolve(res.toString('utf-8'))
}
resolve(undefined)
}
})
})
}
async #setExtendedAttribute(file, attributeName, value) {
return new Promise((resolve, reject) => {
fsx.set(this.getFilePath(file), attributeName, value, (err, res) => {
if (err) {
reject(err)
} else {
resolve(res)
}
})
})
}
async #removeExtendedAttribute(file, attributeName){
}
*/
// create a hard link between to files
#link(source, dest) {
return fs.link(this.getFilePath(source), this.getFilePath(dest))
}
// split path to keep a sane number of file per directory
#computeDeduplicationPath(hash) {
assert.strictEqual(hash.length % 4, 0)
let path = this.#dedupDirectory
for (let i = 0; i < hash.length; i++) {
if (i % 4 === 0) {
path += '/'
}
path += hash[i]
}
path += '.source'
return path
}
async #writeDeduplicationSource(hash, data) {
const path = this.#computeDeduplicationPath(hash)
try {
// flags ensures it fails if it already exists
// _outputfile will create the directory tree
await this._outputFile(path, data, { flags: 'wx' })
} catch (error) {
// if it is alread present : not a problem
if (error.code === 'EEXIST') {
// it should already have the extended attributes, nothing more to do
return path
}
throw error
}
try {
await this.#setExtendedAttribute(path, this.#attributeKey, hash)
} catch (error) {
if (error.code !== 'ENOENT') {
throw error
}
// if a concurrent process deleted the dedup : recreate it
return this.#writeDeduplicationSource(path, hash)
}
return path
}
/**
* delete empty dirs
* delete file source thath don't have any more links
*
* @returns Promise
*/
async deduplicationGarbageCollector(dir = this.#dedupDirectory, alreadyVisited = false) {
try {
await this._rmdir(dir)
return
} catch (error) {
if (error.code !== 'ENOTEMPTY') {
throw error
}
}
// the directory may not be empty after a first visit
if (alreadyVisited) {
return
}
const files = await this._list(dir)
await asyncEach(
files,
async file => {
const stat = await fs.stat(this.getFilePath(`${dir}/${file}`))
// have to check the stat to ensure we don't try to delete
// the directories : they don't have links
if (stat.isDirectory()) {
return this.deduplicationGarbageCollector(`${dir}/${file}`)
}
if (stat.nlink === 1) {
return fs.unlink(this.getFilePath(`${dir}/${file}`))
}
},
{ concurrency: 2 }
) // since we do a recursive traveral with a deep tree)
return this.deduplicationGarbageCollector(dir, true)
}
async deduplicationStats(dir = this.#dedupDirectory) {
let nbSourceBlocks = 0
let nbBlocks = 0
try {
const files = await this._list(dir)
await asyncEach(
files,
async file => {
const stat = await fs.stat(this.getFilePath(`${dir}/${file}`))
if (stat.isDirectory()) {
const { nbSourceBlocks: nbSourceInChild, nbBlocks: nbBlockInChild } = await this.deduplicationStats(
`${dir}/${file}`
)
nbSourceBlocks += nbSourceInChild
nbBlocks += nbBlockInChild
} else {
nbSourceBlocks++
nbBlocks += stat.nlink - 1 // ignore current
}
},
{ concurrency: 2 }
)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
}
}
return { nbSourceBlocks, nbBlocks }
}
@synchronized()
async checkSupport() {
const supported = await super.checkSupport()
const sourceFileName = normalizePath(`${Date.now()}.sourcededup`)
const destFileName = normalizePath(`${Date.now()}.destdedup`)
try {
const SIZE = 1024 * 1024
const data = await fromCallback(randomBytes, SIZE)
const hash = this.#hash(data)
await this._outputFile(sourceFileName, data, { flags: 'wx', dedup: false })
await this.#setExtendedAttribute(sourceFileName, this.#attributeKey, hash)
await this.#link(sourceFileName, destFileName)
const linkedData = await this._readFile(destFileName)
const { nlink } = await fs.stat(this.getFilePath(destFileName))
// contains the right data and the link counter
supported.hardLink = nlink === 2 && linkedData.equals(data)
supported.extendedAttributes = hash === (await this.#getExtendedAttribute(sourceFileName, this.#attributeKey))
} catch (error) {
warn(`error while testing the dedup`, { error })
} finally {
ignoreErrors.call(this._unlink(sourceFileName))
ignoreErrors.call(this._unlink(destFileName))
}
return supported
}
}

View File

@@ -1,107 +0,0 @@
import { after, beforeEach, describe, it } from 'node:test'
import assert from 'node:assert'
import fs from 'node:fs/promises'
import { getSyncedHandler } from './index.js'
import { Disposable, pFromCallback } from 'promise-toolbox'
import tmp from 'tmp'
import execa from 'execa'
import { rimraf } from 'rimraf'
import { randomBytes } from 'node:crypto'
// https://xkcd.com/221/
const data =
'H2GbLa0F2J4LHFLRwLP9zN4dGWJpdx1T6eGWra8BRlV9fBpRGtWIOSKXjU8y7fnxAWVGWpbYPYCwRigvxRSTcuaQsCtwvDNKMmFwYpsGMS14akgBD3EpOMPpKIRRySOsOeknpr48oopO1n9eq0PxGbOcY4Q9aojRu9rn1SMNyjq7YGzwVQEm6twA3etKGSYGvPJVTs2riXm7u6BhBh9VZtQDxQEy5ttkHiZUpgLi6QshSpMjL7dHco8k6gzGcxfpoyS5IzaQeXqDOeRjE6HNn27oUXpze5xRYolQhxA7IqdfzcYwWTqlaZb7UBUZoFCiFs5Y6vPlQVZ2Aw5YganLV1ZcIz78j6TAtXJAfXrDhksm9UteQul8RYT0Ur8AJRYgiGXOsXrWWBKm3CzZci6paLZ2jBmGfgVuBJHlvgFIjOHiVozjulGD4SwKQ2MNqUOylv89NTP1BsJuZ7MC6YCm5yix7FswoE7Y2NhDFqzEQvseRQFyz52AsfuqRY7NruKHlO7LOSI932che2WzxBAwy78Sk1eRHQLsZ37dLB4UkFFIq6TvyjJKznTMAcx9HDOSrFeke6KfsDB1A4W3BAxJk40oAcFMeM72Lg97sJExMJRz1m1nGQJEiGCcnll9G6PqEfHjoOhdDLgN2xewUyvbuRuKEXXxD1H6Tz1iWReyRGSagQNLXvqkKoHoxu3bvSi8nWrbtEY6K2eHLeF5bYubYGXc5VsfiCQNPEzQV4ECzaPdolRtbpRFMcB5aWK70Oew3HJkEcN7IkcXI9vlJKnFvFMqGOHKujd4Tyjhvru2UFh0dAkEwojNzz7W0XlASiXRneea9FgiJNLcrXNtBkvIgw6kRrgbXI6DPJdWDpm3fmWS8EpOICH3aTiXRLQUDZsReAaOsfau1FNtP4JKTQpG3b9rKkO5G7vZEWqTi69mtPGWmyOU47WL1ifJtlzGiFbZ30pcHMc0u4uopHwEQq6ZwM5S6NHvioxihhHQHO8JU2xvcjg5OcTEsXtMwIapD3re'
const hash = '09a3cd9e135114cb870a0b5cf0dfd3f4be994662d0c715b65bcfc5e3b635dd40'
const dataPath = 'xo-block-store/09a3/cd9e/1351/14cb/870a/0b5c/f0df/d3f4/be99/4662/d0c7/15b6/5bcf/c5e3/b635/dd40.source'
let dir
describe('dedup tests', () => {
beforeEach(async () => {
dir = await pFromCallback(cb => tmp.dir(cb))
})
after(async () => {
await rimraf(dir)
})
it('works in general case ', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }, { dedup: true }), async handler => {
await handler.outputFile('in/a/sub/folder/file', data, { dedup: true })
assert.doesNotReject(handler.list('xo-block-store'))
assert.strictEqual((await handler.list('xo-block-store')).length, 1)
assert.strictEqual((await handler.list('in/a/sub/folder')).length, 1)
assert.strictEqual((await handler.readFile('in/a/sub/folder/file')).toString('utf-8'), data)
const value = (await execa('getfattr', ['-n', 'user.hash.sha256', '--only-value', dir + '/in/a/sub/folder/file']))
.stdout
assert.strictEqual(value, hash)
// the source file is created
assert.strictEqual((await handler.readFile(dataPath)).toString('utf-8'), data)
await handler.outputFile('in/anotherfolder/file', data, { dedup: true })
assert.strictEqual((await handler.list('in/anotherfolder')).length, 1)
assert.strictEqual((await handler.readFile('in/anotherfolder/file')).toString('utf-8'), data)
await handler.unlink('in/a/sub/folder/file', { dedup: true })
// source is still here
assert.strictEqual((await handler.readFile(dataPath)).toString('utf-8'), data)
assert.strictEqual((await handler.readFile('in/anotherfolder/file')).toString('utf-8'), data)
await handler.unlink('in/anotherfolder/file', { dedup: true })
// source should have been deleted
assert.strictEqual(
(
await handler.list(
'xo-block-store/09a3/cd9e/1351/14cb/870a/0b5c/f0df/d3f4/be99/4662/d0c7/15b6/5bcf/c5e3/b635'
)
).length,
0
)
assert.strictEqual((await handler.list('in/anotherfolder')).length, 0)
})
})
it('garbage collector an stats ', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }, { dedup: true }), async handler => {
await handler.outputFile('in/anotherfolder/file', data, { dedup: true })
await handler.outputFile('in/anotherfolder/same', data, { dedup: true })
await handler.outputFile('in/a/sub/folder/file', randomBytes(1024), { dedup: true })
let stats = await handler.deduplicationStats()
assert.strictEqual(stats.nbBlocks, 3)
assert.strictEqual(stats.nbSourceBlocks, 2)
await fs.unlink(`${dir}/in/a/sub/folder/file`, { dedup: true })
assert.strictEqual((await handler.list('xo-block-store')).length, 2)
await handler.deduplicationGarbageCollector()
stats = await handler.deduplicationStats()
assert.strictEqual(stats.nbBlocks, 2)
assert.strictEqual(stats.nbSourceBlocks, 1)
assert.strictEqual((await handler.list('xo-block-store')).length, 1)
})
})
it('compute support', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }, { dedup: true }), async handler => {
const supported = await handler.checkSupport()
assert.strictEqual(supported.hardLink, true, 'support hard link is not present in local fs')
assert.strictEqual(supported.extendedAttributes, true, 'support extended attributes is not present in local fs')
})
})
it('handles edge cases : source deleted', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }, { dedup: true }), async handler => {
await handler.outputFile('in/a/sub/folder/edge', data, { dedup: true })
await handler.unlink(dataPath, { dedup: true })
// no error if source si already deleted
await assert.doesNotReject(() => handler.unlink('in/a/sub/folder/edge', { dedup: true }))
})
})
it('handles edge cases : non deduplicated file ', async () => {
await Disposable.use(getSyncedHandler({ url: `file://${dir}` }, { dedup: true }), async handler => {
await handler.outputFile('in/a/sub/folder/edge', data, { dedup: false })
// no error if deleting a non dedup file with dedup flags
await assert.doesNotReject(() => handler.unlink('in/a/sub/folder/edge', { dedup: true }))
})
})
})

View File

@@ -228,11 +228,6 @@ export default class S3Handler extends RemoteHandlerAbstract {
},
})
async _writeFile(file, data, options) {
if (options?.dedup ?? false) {
throw new Error(
"S3 remotes don't support deduplication from XO, please use the deduplication of your S3 provider if any"
)
}
return this.#s3.send(
new PutObjectCommand({
...this.#createParams(file),

View File

@@ -48,7 +48,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
// depending on the paramters we also look into the parent data
return (
this.#grainDirectory.readInt32LE(blockId * 4) !== 0 ||
this.#grainDirectory.readUInt32LE(blockId * 4) !== 0 ||
(this.#lookMissingBlockInParent && this.#parentVhd.containsBlock(blockId))
)
}
@@ -61,14 +61,14 @@ export default class VhdEsxiCowd extends VhdAbstract {
const buffer = await this.#read(0, 2048)
strictEqual(buffer.slice(0, 4).toString('ascii'), 'COWD')
strictEqual(buffer.readInt32LE(4), 1) // version
strictEqual(buffer.readInt32LE(8), 3) // flags
const numSectors = buffer.readInt32LE(12)
const grainSize = buffer.readInt32LE(16)
strictEqual(buffer.readUInt32LE(4), 1) // version
strictEqual(buffer.readUInt32LE(8), 3) // flags
const numSectors = buffer.readUInt32LE(12)
const grainSize = buffer.readUInt32LE(16)
strictEqual(grainSize, 1) // 1 grain should be 1 sector long
strictEqual(buffer.readInt32LE(20), 4) // grain directory position in sectors
strictEqual(buffer.readUInt32LE(20), 4) // grain directory position in sectors
const nbGrainDirectoryEntries = buffer.readInt32LE(24)
const nbGrainDirectoryEntries = buffer.readUInt32LE(24)
strictEqual(nbGrainDirectoryEntries, Math.ceil(numSectors / 4096))
const size = numSectors * 512
// a grain directory entry contains the address of a grain table
@@ -90,7 +90,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
// we're lucky : a grain address can address exacty a full block
async readBlock(blockId) {
notEqual(this.#grainDirectory, undefined, 'grainDirectory is not loaded')
const sectorOffset = this.#grainDirectory.readInt32LE(blockId * 4)
const sectorOffset = this.#grainDirectory.readUInt32LE(blockId * 4)
const buffer = (await this.#parentVhd.readBlock(blockId)).buffer
@@ -137,7 +137,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
}
for (let i = 0; i < graintable.length / 4; i++) {
const grainOffset = graintable.readInt32LE(i * 4)
const grainOffset = graintable.readUInt32LE(i * 4)
if (grainOffset === 0) {
// the content from parent : it is already in buffer
await changeRange()

View File

@@ -1,7 +1,7 @@
import _computeGeometryForSize from 'vhd-lib/_computeGeometryForSize.js'
import { createFooter, createHeader } from 'vhd-lib/_createFooterHeader.js'
import { DISK_TYPES, FOOTER_SIZE } from 'vhd-lib/_constants.js'
import { readChunk } from '@vates/read-chunk'
import { readChunkStrict, skipStrict } from '@vates/read-chunk'
import { Task } from '@vates/task'
import { unpackFooter, unpackHeader } from 'vhd-lib/Vhd/_utils.js'
import { VhdAbstract } from 'vhd-lib'
@@ -21,6 +21,10 @@ export default class VhdEsxiRaw extends VhdAbstract {
#header
#footer
#streamOffset = 0
#stream
#reading = false
static async open(esxi, datastore, path, opts) {
const vhd = new VhdEsxiRaw(esxi, datastore, path, opts)
await vhd.readHeaderAndFooter()
@@ -49,10 +53,10 @@ export default class VhdEsxiRaw extends VhdAbstract {
this.#header = unpackHeader(createHeader(length / VHD_BLOCK_LENGTH))
const geometry = _computeGeometryForSize(length)
const actualSize = geometry.actualSize
this.#footer = unpackFooter(
createFooter(actualSize, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPES.DYNAMIC)
// length can be smaller than disk capacity due to alignment to head/cylinder/sector
createFooter(length, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPES.DYNAMIC)
)
}
@@ -64,12 +68,65 @@ export default class VhdEsxiRaw extends VhdAbstract {
return this.#bat.has(blockId)
}
async readBlock(blockId) {
async #readChunk(start, length) {
if (this.#reading) {
throw new Error('reading must be done sequentially')
}
try {
this.#reading = true
if (this.#stream !== undefined) {
// stream is too far ahead or to far behind
if (this.#streamOffset > start || this.#streamOffset + VHD_BLOCK_LENGTH < start) {
this.#stream.destroy()
this.#stream = undefined
this.#streamOffset = 0
}
}
// no stream
if (this.#stream === undefined) {
const end = this.footer.currentSize - 1
const res = await this.#esxi.download(this.#datastore, this.#path, `${start}-${end}`)
this.#stream = res.body
this.#streamOffset = start
}
// stream a little behind
if (this.#streamOffset < start) {
await skipStrict(this.#stream, start - this.#streamOffset)
this.#streamOffset = start
}
// really read data
this.#streamOffset += length
const data = await readChunkStrict(this.#stream, length)
return data
} catch (error) {
error.start = start
error.length = length
error.streamLength = this.footer.currentSize
this.#stream?.destroy()
this.#stream = undefined
this.#streamOffset = 0
throw error
} finally {
this.#reading = false
}
}
async #readBlock(blockId) {
const start = blockId * VHD_BLOCK_LENGTH
const end = (blockId + 1) * VHD_BLOCK_LENGTH - 1
let length = VHD_BLOCK_LENGTH
let partial = false
if (start + length > this.footer.currentSize) {
length = this.footer.currentSize - start
partial = true
}
const data = await (await this.#esxi.download(this.#datastore, this.#path, `${start}-${end}`)).buffer()
let data = await this.#readChunk(start, length)
if (partial) {
data = Buffer.concat([data, Buffer.alloc(VHD_BLOCK_LENGTH - data.length)])
}
const bitmap = Buffer.alloc(512, 255)
return {
id: blockId,
@@ -79,28 +136,44 @@ export default class VhdEsxiRaw extends VhdAbstract {
}
}
async readBlock(blockId) {
let tries = 5
let lastError
while (tries > 0) {
try {
const res = await this.#readBlock(blockId)
return res
} catch (error) {
lastError = error
lastError.blockId = blockId
console.warn('got error , will retry in 2seconds', lastError)
}
await new Promise(resolve => setTimeout(() => resolve(), 2000))
tries--
}
throw lastError
}
// this will read all the disk once to check which block contains data, it can take a long time to execute depending on the network speed
async readBlockAllocationTable() {
if (!this.#thin) {
// fast path : is we do not use thin mode, the BAT is full
return
}
const res = await this.#esxi.download(this.#datastore, this.#path)
const length = res.headers.get('content-length')
const stream = res.body
const empty = Buffer.alloc(VHD_BLOCK_LENGTH, 0)
let pos = 0
this.#bat = new Set()
let nextChunkLength = Math.min(VHD_BLOCK_LENGTH, length)
Task.set('total', length / VHD_BLOCK_LENGTH)
let nextChunkLength = Math.min(VHD_BLOCK_LENGTH, this.footer.currentSize)
Task.set('total', this.footer.currentSize / VHD_BLOCK_LENGTH)
const progress = setInterval(() => {
Task.set('progress', Math.round((pos * 100) / length))
console.log('reading blocks', pos / VHD_BLOCK_LENGTH, '/', length / VHD_BLOCK_LENGTH)
Task.set('progress', Math.round((pos * 100) / this.footer.currentSize))
console.log('reading blocks', pos / VHD_BLOCK_LENGTH, '/', this.footer.currentSize / VHD_BLOCK_LENGTH)
}, 30 * 1000)
while (nextChunkLength > 0) {
try {
const chunk = await readChunk(stream, nextChunkLength)
const chunk = await this.#readChunk(pos, nextChunkLength)
let isEmpty
if (nextChunkLength === VHD_BLOCK_LENGTH) {
isEmpty = empty.equals(chunk)
@@ -112,15 +185,28 @@ export default class VhdEsxiRaw extends VhdAbstract {
this.#bat.add(pos / VHD_BLOCK_LENGTH)
}
pos += VHD_BLOCK_LENGTH
nextChunkLength = Math.min(VHD_BLOCK_LENGTH, length - pos)
nextChunkLength = Math.min(VHD_BLOCK_LENGTH, this.footer.currentSize - pos)
} catch (error) {
clearInterval(progress)
throw error
}
}
console.log('BAT reading done, remaining ', this.#bat.size, '/', Math.ceil(length / VHD_BLOCK_LENGTH))
console.log(
'BAT reading done, remaining ',
this.#bat.size,
'/',
Math.ceil(this.footer.currentSize / VHD_BLOCK_LENGTH)
)
clearInterval(progress)
}
rawContent() {
return this.#esxi.download(this.#datastore, this.#path).then(res => {
const stream = res.body
stream.length = this.footer.currentSize
return stream
})
}
}
/* eslint-enable no-console */

View File

@@ -1,4 +1,5 @@
import { Client } from '@vates/node-vsphere-soap'
import { createLogger } from '@xen-orchestra/log'
import { dirname } from 'node:path'
import { EventEmitter } from 'node:events'
import { strictEqual, notStrictEqual } from 'node:assert'
@@ -9,6 +10,8 @@ import parseVmdk from './parsers/vmdk.mjs'
import parseVmsd from './parsers/vmsd.mjs'
import parseVmx from './parsers/vmx.mjs'
const { warn } = createLogger('xo:vmware-explorer:esxi')
export default class Esxi extends EventEmitter {
#client
#cookies
@@ -64,7 +67,7 @@ export default class Esxi extends EventEmitter {
})
}
async download(dataStore, path, range) {
async #download(dataStore, path, range) {
strictEqual(this.#ready, true)
notStrictEqual(this.#dcPath, undefined)
const url = new URL('https://localhost')
@@ -102,6 +105,24 @@ export default class Esxi extends EventEmitter {
return res
}
async download(dataStore, path, range) {
let tries = 5
let lastError
while (tries > 0) {
try {
const res = await this.#download(dataStore, path, range)
return res
} catch (error) {
warn('got error , will retry in 2 seconds', { error })
lastError = error
}
await new Promise(resolve => setTimeout(() => resolve(), 2000))
tries--
}
throw lastError
}
// inspired from https://github.com/reedog117/node-vsphere-soap/blob/master/test/vsphere-soap.test.js#L95
async search(type, properties) {
// get property collector

View File

@@ -4,11 +4,12 @@
"version": "0.2.3",
"name": "@xen-orchestra/vmware-explorer",
"dependencies": {
"@vates/task": "^0.2.0",
"@vates/node-vsphere-soap": "^1.0.0",
"@vates/read-chunk": "^1.1.1",
"@vates/task": "^0.2.0",
"@xen-orchestra/log": "^0.6.0",
"lodash": "^4.17.21",
"node-fetch": "^3.3.0",
"@vates/node-vsphere-soap": "^1.0.0",
"vhd-lib": "^4.5.0"
},
"engines": {

View File

@@ -64,8 +64,12 @@ class Vdi {
})
}
async _getNbdClient(ref) {
const nbdInfos = await this.call('VDI.get_nbd_info', ref)
async _getNbdClient(ref) {
const nbdInfos = [{
address:'172.16.210.14',
port: 8077,
exportname: 'bench_export'
}]//await this.call('VDI.get_nbd_info', ref)
if (nbdInfos.length > 0) {
// a little bit of randomization to spread the load
const nbdInfo = nbdInfos[Math.floor(Math.random() * nbdInfos.length)]
@@ -94,13 +98,15 @@ class Vdi {
query.base = baseRef
}
let nbdClient, stream
try {
if (this._preferNbd) {
if (this._preferNbd || true) {
nbdClient = await this._getNbdClient(ref)
}
// the raw nbd export does not need to peek ath the vhd source
if (nbdClient !== undefined && format === VDI_FORMAT_RAW) {
if (nbdClient !== undefined && format === VDI_FORMAT_RAW || true) {
stream = createNbdRawStream(nbdClient)
} else {
// raw export without nbd or vhd exports needs a resource stream

View File

@@ -17,6 +17,8 @@
- [REST API] Fix VDI export when NBD is enabled
- [XO Config Cloud Backup] Improve wording about passphrase (PR [#6938](https://github.com/vatesfr/xen-orchestra/pull/6938))
- [Pool] Fix IPv6 handling when adding hosts
- [New SR] Send provided NFS version to XAPI when probing a share
- [Backup/exports] Show more information on error ` stream has ended with not enough data (actual: xxx, expected: 512)` (PR [#6940](https://github.com/vatesfr/xen-orchestra/pull/6940))
### Packages to release
@@ -38,10 +40,13 @@
- @vates/nbd-client major
- @vates/node-vsphere-soap major
- @xen-orchestra/backups minor
- @xen-orchestra/vmware-explorer minor
- @xen-orchestra/xapi major
- @vates/read-chunk minor
- complex-matcher patch
- xen-api patch
- xo-server patch
- xo-server-transport-xmpp patch
- xo-server-audit patch
- xo-web minor

View File

@@ -104,7 +104,7 @@ describe('VhdAbstract', async () => {
it('renames and unlink a VhdDirectory', async () => {
const initalSize = 4
const vhdDirectory = `${tempDir}/randomfile.dir`
await createRandomVhdDirectory(vhdDirectory, initalSize, { dedup: true })
await createRandomVhdDirectory(vhdDirectory, initalSize)
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file:///' })
@@ -116,24 +116,11 @@ describe('VhdAbstract', async () => {
// it should clean an existing directory
await fs.mkdir(targetFileName)
await fs.writeFile(`${targetFileName}/dummy`, 'I exists')
await VhdAbstract.unlink(handler, `${targetFileName}`)
await VhdAbstract.unlink(handler, `${targetFileName}/dummy`)
assert.equal(await fs.exists(`${targetFileName}/dummy`), false)
})
})
it('unlinks a deduplicated VhdDirectory', async () => {
const initalSize = 4
const vhdDirectory = `${tempDir}/random.vhd`
await createRandomVhdDirectory(vhdDirectory, initalSize, { dedup: true })
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file:///' })
await VhdAbstract.unlink(handler, vhdDirectory)
assert.equal(await fs.exists(vhdDirectory), false)
})
})
it('Creates, renames and unlink alias', async () => {
const initalSize = 4
const rawFileName = `${tempDir}/randomfile`

View File

@@ -206,14 +206,7 @@ exports.VhdAbstract = class VhdAbstract {
await handler.unlink(resolved)
} catch (err) {
if (err.code === 'EISDIR') {
// @todo : should we open it ?
const chunkFilters = await handler.readFile(resolved + '/chunk-filters.json').then(JSON.parse, error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await handler.rmtree(resolved, { dedup: chunkFilters[1] === true })
await handler.rmtree(resolved)
} else {
throw err
}

View File

@@ -19,7 +19,6 @@ const NULL_COMPRESSOR = {
}
const COMPRESSORS = {
none: NULL_COMPRESSOR,
gzip: {
compress: (
gzip => buffer =>
@@ -79,7 +78,6 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
#header
footer
#compressor
#dedup
get compressionType() {
return this.#compressor.id
@@ -104,9 +102,8 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
this.#uncheckedBlockTable = blockTable
}
static async open(handler, path, { compression, flags = 'r+' } = {}) {
const dedup = path.endsWith('dedup.vhd')
const vhd = new VhdDirectory(handler, path, { compression, dedup, flags })
static async open(handler, path, { flags = 'r+' } = {}) {
const vhd = new VhdDirectory(handler, path, { flags })
// openning a file for reading does not trigger EISDIR as long as we don't really read from it :
// https://man7.org/linux/man-pages/man2/open.2.html
@@ -120,9 +117,9 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
}
static async create(handler, path, { flags = 'wx+', compression, dedup } = {}) {
static async create(handler, path, { flags = 'wx+', compression } = {}) {
await handler.mktree(path)
const vhd = new VhdDirectory(handler, path, { flags, compression, dedup })
const vhd = new VhdDirectory(handler, path, { flags, compression })
return {
dispose: () => {},
value: vhd,
@@ -135,7 +132,6 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
this._path = path
this._opts = opts
this.#compressor = getCompressor(opts?.compression)
this.#dedup = opts?.dedup ?? false
this.writeBlockAllocationTable = synchronized()(this.writeBlockAllocationTable)
}
@@ -162,7 +158,7 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
}
async _writeChunk(partName, buffer, dedup = false) {
async _writeChunk(partName, buffer) {
assert.notStrictEqual(
this._opts?.flags,
'r',
@@ -172,7 +168,7 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
// in case of VhdDirectory, we want to create the file if it does not exists
const flags = this._opts?.flags === 'r+' ? 'w' : this._opts?.flags
const compressed = await this.#compressor.compress(buffer)
return this._handler.outputFile(this.#getChunkPath(partName), compressed, { flags, dedup })
return this._handler.outputFile(this.#getChunkPath(partName), compressed, { flags })
}
// put block in subdirectories to limit impact when doing directory listing
@@ -266,10 +262,6 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
try {
const blockExists = this.containsBlock(blockId)
if (blockExists && this.#dedup) {
// this will trigger the dedup store cleaning if needed
await this._handler.unlink(this._getFullBlockPath(blockId), { dedup: true })
}
await this._handler.rename(childBlockPath, this._getFullBlockPath(blockId))
if (!blockExists) {
setBitmap(this.#blockTable, blockId)
@@ -293,7 +285,7 @@ exports.VhdDirectory = class VhdDirectory extends VhdAbstract {
}
async writeEntireBlock(block) {
await this._writeChunk(this.#getBlockPath(block.id), block.buffer, this.#dedup)
await this._writeChunk(this.#getBlockPath(block.id), block.buffer)
setBitmap(this.#blockTable, block.id)
}

View File

@@ -15,7 +15,7 @@ const { fuHeader, checksumStruct } = require('./_structs')
const assert = require('node:assert')
exports.createNbdRawStream = async function createRawStream(nbdClient) {
const stream = Readable.from(nbdClient.readBlocks())
const stream = Readable.from(nbdClient.readBlocks(524288))
stream.on('error', () => nbdClient.disconnect())
stream.on('end', () => nbdClient.disconnect())

View File

@@ -8,8 +8,8 @@ const { asyncEach } = require('@vates/async-each')
const { warn } = createLogger('vhd-lib:createVhdDirectoryFromStream')
const buildVhd = Disposable.wrap(async function* (handler, path, inputStream, { concurrency, compression, dedup }) {
const vhd = yield VhdDirectory.create(handler, path, { compression, dedup })
const buildVhd = Disposable.wrap(async function* (handler, path, inputStream, { concurrency, compression }) {
const vhd = yield VhdDirectory.create(handler, path, { compression })
await asyncEach(
parseVhdStream(inputStream),
async function (item) {
@@ -45,10 +45,10 @@ exports.createVhdDirectoryFromStream = async function createVhdDirectoryFromStre
handler,
path,
inputStream,
{ validator, concurrency = 16, compression, dedup } = {}
{ validator, concurrency = 16, compression } = {}
) {
try {
const size = await buildVhd(handler, path, inputStream, { concurrency, compression, dedup })
const size = await buildVhd(handler, path, inputStream, { concurrency, compression })
if (validator !== undefined) {
await validator.call(this, path)
}

View File

@@ -62,7 +62,7 @@ exports.recoverRawContent = async function recoverRawContent(vhdName, rawName, o
}
// @ todo how can I call vhd-cli copy from here
async function convertToVhdDirectory(rawFileName, vhdFileName, path, { dedup = false } = {}) {
async function convertToVhdDirectory(rawFileName, vhdFileName, path) {
fs.mkdirp(path)
const srcVhd = await fs.open(vhdFileName, 'r')
@@ -95,17 +95,15 @@ async function convertToVhdDirectory(rawFileName, vhdFileName, path, { dedup = f
await fs.read(srcRaw, blockData, 0, blockData.length, offset)
await fs.writeFile(path + '/blocks/0/' + i, Buffer.concat([bitmap, blockData]))
}
await fs.writeFile(path + '/chunk-filters.json', JSON.stringify(['none', dedup]))
await fs.close(srcRaw)
}
exports.convertToVhdDirectory = convertToVhdDirectory
exports.createRandomVhdDirectory = async function createRandomVhdDirectory(path, sizeMB, { dedup = false } = {}) {
exports.createRandomVhdDirectory = async function createRandomVhdDirectory(path, sizeMB) {
fs.mkdirp(path)
const rawFileName = `${path}/temp.raw`
await createRandomFile(rawFileName, sizeMB)
const vhdFileName = `${path}/temp.vhd`
await convertFromRawToVhd(rawFileName, vhdFileName)
await convertToVhdDirectory(rawFileName, vhdFileName, path, { dedup })
await convertToVhdDirectory(rawFileName, vhdFileName, path)
}

View File

@@ -26,10 +26,10 @@
"preferGlobal": false,
"main": "dist/",
"engines": {
"node": ">=6"
"node": ">=10"
},
"dependencies": {
"node-xmpp-client": "^3.0.0",
"@xmpp/client": "^0.13.1",
"promise-toolbox": "^0.21.0"
},
"devDependencies": {

View File

@@ -1,5 +1,5 @@
import fromEvent from 'promise-toolbox/fromEvent'
import XmppClient from 'node-xmpp-client'
import { client, xml } from '@xmpp/client'
// ===================================================================
@@ -46,13 +46,16 @@ class TransportXmppPlugin {
this._client = null
}
configure(conf) {
this._conf = conf
this._conf.reconnect = true
configure({ host, jid, port, password }) {
this._conf = {
password,
service: Object.assign(new URL('xmpp://localhost'), { hostname: host, port }).href,
username: jid,
}
}
async load() {
this._client = new XmppClient(this._conf)
this._client = client(this._conf)
this._client.on('error', () => {})
await fromEvent(this._client.connection.socket, 'data')
@@ -71,12 +74,14 @@ class TransportXmppPlugin {
_sendToXmppClient({ to, message }) {
for (const receiver of to) {
this._client.send(
new XmppClient.Stanza('message', {
to: receiver,
type: 'chat',
})
.c('body')
.t(message)
xml(
'message',
{
to: receiver,
type: 'chat',
},
xml('body', {}, message)
)
)
}
}

View File

@@ -467,10 +467,11 @@ createZfs.resolve = {
// This function helps to detect all NFS shares (exports) on a NFS server
// Return a table of exports with their paths and ACLs
export async function probeNfs({ host, server }) {
export async function probeNfs({ host, nfsVersion, server }) {
const xapi = this.getXapi(host)
const deviceConfig = {
nfsversion: nfsVersion,
server,
}
@@ -501,6 +502,7 @@ export async function probeNfs({ host, server }) {
probeNfs.params = {
host: { type: 'string' },
nfsVersion: { type: 'string', optional: true },
server: { type: 'string' },
}
@@ -837,10 +839,11 @@ probeHbaExists.resolve = {
// This function helps to detect if this NFS SR already exists in XAPI
// It returns a table of SR UUID, empty if no existing connections
export async function probeNfsExists({ host, server, serverPath }) {
export async function probeNfsExists({ host, nfsVersion, server, serverPath }) {
const xapi = this.getXapi(host)
const deviceConfig = {
nfsversion: nfsVersion,
server,
serverpath: serverPath,
}
@@ -859,6 +862,7 @@ export async function probeNfsExists({ host, server, serverPath }) {
probeNfsExists.params = {
host: { type: 'string' },
nfsVersion: { type: 'string', optional: true },
server: { type: 'string' },
serverPath: { type: 'string' },
}

View File

@@ -4,7 +4,7 @@ import { fromEvent } from 'promise-toolbox'
import { createRunner } from '@xen-orchestra/backups/Backup.mjs'
import { Task } from '@xen-orchestra/mixins/Tasks.mjs'
import { v4 as generateUuid } from 'uuid'
import { VDI_FORMAT_VHD } from '@xen-orchestra/xapi'
import { VDI_FORMAT_RAW, VDI_FORMAT_VHD } from '@xen-orchestra/xapi'
import asyncMapSettled from '@xen-orchestra/async-map/legacy.js'
import Esxi from '@xen-orchestra/vmware-explorer/esxi.mjs'
import openDeltaVmdkasVhd from '@xen-orchestra/vmware-explorer/openDeltaVmdkAsVhd.mjs'
@@ -271,10 +271,16 @@ export default class MigrateVm {
}
parentVhd = vhd
}
// it can be empty if the VM don't have a snapshot and is running
if (vhd !== undefined) {
// it can be empty if the VM don't have a snapshot and is running
const stream = vhd.stream()
await vdi.$importContent(stream, { format: VDI_FORMAT_VHD })
if (thin) {
const stream = vhd.stream()
await vdi.$importContent(stream, { format: VDI_FORMAT_VHD })
} else {
// no transformation when there is no snapshot in thick mode
const stream = await vhd.rawContent()
await vdi.$importContent(stream, { format: VDI_FORMAT_RAW })
}
}
return { vdi, vhd }
})

View File

@@ -2513,7 +2513,7 @@ const messages = {
licensesBinding: 'Licenses binding',
notEnoughXcpngLicenses: 'Not enough XCP-ng licenses',
notBoundSelectLicense: 'Not bound (Plan (ID), expiration date)',
xcpngLicensesBindingAvancedView: "To bind an XCP-ng license, go the pool's Advanced tab.",
xcpngLicensesBindingAvancedView: "To bind an XCP-ng license, go to the pool's Advanced tab.",
xosanUnregisteredDisclaimer:
'You are not registered and therefore will not be able to create or manage your XOSAN SRs. {link}',
xosanSourcesDisclaimer:

View File

@@ -533,7 +533,8 @@ const xoItemToRender = {
<span>
<Icon icon='xo-cloud-config' /> <ShortDate timestamp={createdAt} />
</span>
),
)
,
// XO objects.
pool: props => <Pool {...props} />,
@@ -601,7 +602,6 @@ const xoItemToRender = {
</span>{' '}
<span className='tag tag-warning'>{backup.remote.name}</span>{' '}
{backup.size !== undefined && <span className='tag tag-info'>{formatSize(backup.size)}</span>}{' '}
{backup.dedup === true && <span className='tag tag-info'>deduplicated</span>}{' '}
<FormattedDate
value={new Date(backup.timestamp)}
month='long'

View File

@@ -2693,9 +2693,10 @@ export const fetchFiles = (remote, disk, partition, paths) =>
// -------------------------------------------------------------------
export const probeSrNfs = (host, server) => _call('sr.probeNfs', { host, server })
export const probeSrNfs = (host, server, nfsVersion) => _call('sr.probeNfs', { host, nfsVersion, server })
export const probeSrNfsExists = (host, server, serverPath) => _call('sr.probeNfsExists', { host, server, serverPath })
export const probeSrNfsExists = (host, server, serverPath, nfsVersion) =>
_call('sr.probeNfsExists', { host, nfsVersion, server, serverPath })
export const probeSrIscsiIqns = (host, target, port = undefined, chapUser = undefined, chapPassword) => {
const params = { host, target }

View File

@@ -467,11 +467,11 @@ export default class New extends Component {
_handleSearchServer = async () => {
const { password, port, server, username } = this.refs
const { host, type } = this.state
const { host, nfsVersion, type } = this.state
try {
if (type === 'nfs' || type === 'nfsiso') {
const paths = await probeSrNfs(host.id, server.value)
const paths = await probeSrNfs(host.id, server.value, nfsVersion !== '' ? nfsVersion : undefined)
this.setState({
usage: undefined,
paths,
@@ -500,12 +500,12 @@ export default class New extends Component {
_handleSrPathSelection = async path => {
const { server } = this.refs
const { host } = this.state
const { host, nfsVersion } = this.state
try {
this.setState(({ loading }) => ({ loading: loading + 1 }))
this.setState({
existingSrs: await probeSrNfsExists(host.id, server.value, path),
existingSrs: await probeSrNfsExists(host.id, server.value, path, nfsVersion !== '' ? nfsVersion : undefined),
path,
usage: true,
summary: true,

403
yarn.lock
View File

@@ -705,7 +705,7 @@
dependencies:
tslib "^2.5.0"
"@babel/cli@^7.0.0", "@babel/cli@^7.1.5", "@babel/cli@^7.13.16", "@babel/cli@^7.4.4", "@babel/cli@^7.7.0", "@babel/cli@^7.7.4":
"@babel/cli@^7.0.0", "@babel/cli@^7.1.5", "@babel/cli@^7.13.16", "@babel/cli@^7.16.0", "@babel/cli@^7.4.4", "@babel/cli@^7.7.0", "@babel/cli@^7.7.4":
version "7.22.9"
resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.22.9.tgz#501b3614aeda7399371f6d5991404f069b059986"
integrity sha512-nb2O7AThqRo7/E53EGiuAkMaRbb7J5Qp3RvN+dmua1U+kydm0oznkhqbTEG15yk26G/C3yL6OdZjzgl+DMXVVA==
@@ -733,7 +733,7 @@
resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730"
integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ==
"@babel/core@^7.0.0", "@babel/core@^7.1.5", "@babel/core@^7.1.6", "@babel/core@^7.11.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.8", "@babel/core@^7.14.0", "@babel/core@^7.4.4", "@babel/core@^7.5.5", "@babel/core@^7.7.2", "@babel/core@^7.7.4", "@babel/core@^7.7.5", "@babel/core@^7.8.4":
"@babel/core@^7.0.0", "@babel/core@^7.1.5", "@babel/core@^7.1.6", "@babel/core@^7.11.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.8", "@babel/core@^7.14.0", "@babel/core@^7.16.5", "@babel/core@^7.4.4", "@babel/core@^7.5.5", "@babel/core@^7.7.2", "@babel/core@^7.7.4", "@babel/core@^7.7.5", "@babel/core@^7.8.4":
version "7.22.9"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.9.tgz#bd96492c68822198f33e8a256061da3cf391f58f"
integrity sha512-G2EgeufBcYw27U4hhoIwFcgc1XU7TlXJ3mv04oOv1WCuo900U/anZSPzEqNjwdjgffkk2Gs0AN0dW1CKVLcG7w==
@@ -971,6 +971,18 @@
chalk "^2.0.0"
js-tokens "^4.0.0"
"@babel/node@^7.16.5":
version "7.22.6"
resolved "https://registry.yarnpkg.com/@babel/node/-/node-7.22.6.tgz#a47b4f150f06bad1808823c4519690ded6c93911"
integrity sha512-Lt6v+RUQOTsEOXLv+KfjogLFkFfsLPPSoXZqmbngfVatkWjQPnFGHO0xjFRcN6XEvm3vsnZn+AWQiRpgZFsdIA==
dependencies:
"@babel/register" "^7.22.5"
commander "^4.0.1"
core-js "^3.30.2"
node-environment-flags "^1.0.5"
regenerator-runtime "^0.13.11"
v8flags "^3.1.1"
"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.4", "@babel/parser@^7.20.15", "@babel/parser@^7.20.7", "@babel/parser@^7.21.3", "@babel/parser@^7.22.5", "@babel/parser@^7.22.7", "@babel/parser@^7.6.0", "@babel/parser@^7.9.6":
version "7.22.7"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.7.tgz#df8cf085ce92ddbdbf668a7f186ce848c9036cae"
@@ -1570,7 +1582,7 @@
dependencies:
"@babel/plugin-transform-react-jsx" "^7.22.5"
"@babel/plugin-transform-react-jsx@^7.22.5", "@babel/plugin-transform-react-jsx@^7.3.0":
"@babel/plugin-transform-react-jsx@^7.16.5", "@babel/plugin-transform-react-jsx@^7.22.5", "@babel/plugin-transform-react-jsx@^7.3.0":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416"
integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA==
@@ -1792,7 +1804,7 @@
"@babel/plugin-transform-react-jsx-development" "^7.22.5"
"@babel/plugin-transform-react-pure-annotations" "^7.22.5"
"@babel/register@^7.0.0", "@babel/register@^7.13.8":
"@babel/register@^7.0.0", "@babel/register@^7.13.8", "@babel/register@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.22.5.tgz#e4d8d0f615ea3233a27b5c6ada6750ee59559939"
integrity sha512-vV6pm/4CijSQ8Y47RH5SopXzursN35RQINfGJkmOlcpAtGuf94miFvIPhCKGQN7WGIcsgG1BHEX2KVdTYwTwUQ==
@@ -4400,27 +4412,222 @@
resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.9.tgz#b6ef7457e826be8049667ae673eda7876eb049be"
integrity sha512-4VSbbcMoxc4KLjb1gs96SRmi7w4h1SF+fCoiK0XaQX62buCc1G5d0DC5bJ9xJBNPDSVCmIrcl8BiYxzjrqaaJA==
"@xmpp/jid@^0.0.2":
version "0.0.2"
resolved "https://registry.yarnpkg.com/@xmpp/jid/-/jid-0.0.2.tgz#0d528ca9d58dafc833665564ffe62f332a3167f2"
integrity sha512-z8riWz40gZF1xg0QYi/dA+hYLb3LAkugvfWac49X4YU/+nGu1pstB7oR6G7PCTOzZaFlZDbu699fyfBlcJfgNw==
"@xmpp/streamparser@^0.0.6":
version "0.0.6"
resolved "https://registry.yarnpkg.com/@xmpp/streamparser/-/streamparser-0.0.6.tgz#118033ea9db7c86a1cb46103f269ebff79f6f1ea"
integrity sha512-Kt5kkH3b0YNAzfRGww60dXo78D+yVItTw3GvSgwOj6LMPpKevVWrRf0r76Dd+krBQ9Gr4SAnVi0kInGfEpOUTA==
"@xmpp/base64@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/base64/-/base64-0.13.1.tgz#92cb55554cea301e5e157d0f4e957d3c3d6e9c3e"
integrity sha512-ifzj81zZc8uhL9Nl8us2NUDfLt3qsbHr8lwdKmrDMk/9unY8aIGjzHdNBJoFFyJe8GSo1NFq3mS7X+X0TwkQYw==
dependencies:
"@xmpp/xml" "^0.1.3"
inherits "^2.0.3"
ltx "^2.5.0"
base-64 "^1.0.0"
"@xmpp/xml@^0.1.3":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@xmpp/xml/-/xml-0.1.3.tgz#1f14399e53e419688558698f6c62e71e39a86a6e"
integrity sha512-FI/C+isEGhL8JLXJLIEWPWIysLHnDAW27k993Iwly+rPz/A3kiQYGz3W/0jNJJjLbBbeZPhfjpSB9MXM8j2PDQ==
"@xmpp/client-core@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/client-core/-/client-core-0.13.1.tgz#508986bf87560a5f34940977fc6e832c8f420667"
integrity sha512-ANVcqzgDCmmUj/R9pf5rJGH41mL16Bo+DRJ+2trKoRHe9p5s0p6IssjhJtTOSVx6oh2ilPXMB8qoMPjTGzY6cw==
dependencies:
inherits "^2.0.3"
ltx "^2.6.2"
"@xmpp/connection" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/client@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/client/-/client-0.13.1.tgz#6c8db3cd308479ef17b8f78e2eda9a68b4ebd2d6"
integrity sha512-DA+pOkWliTKN5C0Bod4rqlZ4hj/CiqQDHRhQgpx7Y/69qsUwK8M/9C02qylpyZSL2TFGzOM6ZMhr/jlMCsL9jQ==
dependencies:
"@babel/cli" "^7.16.0"
"@babel/core" "^7.16.5"
"@babel/node" "^7.16.5"
"@babel/plugin-transform-react-jsx" "^7.16.5"
"@xmpp/client-core" "^0.13.1"
"@xmpp/iq" "^0.13.1"
"@xmpp/middleware" "^0.13.1"
"@xmpp/reconnect" "^0.13.1"
"@xmpp/resolve" "^0.13.1"
"@xmpp/resource-binding" "^0.13.1"
"@xmpp/sasl" "^0.13.1"
"@xmpp/sasl-anonymous" "^0.13.1"
"@xmpp/sasl-plain" "^0.13.1"
"@xmpp/sasl-scram-sha-1" "^0.13.1"
"@xmpp/session-establishment" "^0.13.1"
"@xmpp/starttls" "^0.13.1"
"@xmpp/stream-features" "^0.13.1"
"@xmpp/stream-management" "^0.13.1"
"@xmpp/tcp" "^0.13.1"
"@xmpp/tls" "^0.13.1"
"@xmpp/websocket" "^0.13.1"
babel-plugin-jsx-pragmatic "^1.0.2"
"@xmpp/connection-tcp@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/connection-tcp/-/connection-tcp-0.13.1.tgz#07a48223ac0dc2cc97fa8adf85d98a73418568f9"
integrity sha512-yTVrj5o5rPVbZT5ql5ljzzIZHnLkCuyTNEQpiU9IYvfjWjy4+E2DreUnpRf3IAbpARkMoPq5uQJchH0RE3WBjg==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/connection@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/connection/-/connection-0.13.1.tgz#1c71e2dcfa8a2e72dda4dbf73fe0b520f8971851"
integrity sha512-A8ojaVRrvGtvRTXcWiOJMnBPAytLFvsz18g/jO9PbnhzuqqeJ6LxmCtyaKqchMdX0lhuZpo0JUgCSPnZ68tXrQ==
dependencies:
"@xmpp/error" "^0.13.1"
"@xmpp/events" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/error@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/error/-/error-0.13.1.tgz#fba9ddd33e34f77616635b87b67e974637b23995"
integrity sha512-tKecj36xIGLhLctdYhUOxWs+ZdiJpl0Tfp/GhfrUCKLHj/wq14d62SP9kxa0sDNKOY1uqRq2N9gWZBQHuP+r2Q==
"@xmpp/events@^0.13.0", "@xmpp/events@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/events/-/events-0.13.1.tgz#8bfa57117bb8c21da87e62a1985d65b7b1c342c2"
integrity sha512-c538zWUoD7KfMzMWGHyJkXvRYE5exzVjK6NAsMtfNtbVqw9SXJJaGLvDvYSXOQmKQaZz5guUuIUGiHJbr7yjsA==
dependencies:
events "^3.3.0"
"@xmpp/id@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/id/-/id-0.13.1.tgz#771f5cc64e402cab0994e6e4e05bfc635a300948"
integrity sha512-ivc7kxfk5sU6PspdQvglsibcWRCr40nbaPEvGYbXO8ymFN6qps91DPlEt0Cc0XJExq7PXo0Yt7DACfe8f7K03g==
"@xmpp/iq@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/iq/-/iq-0.13.1.tgz#29e5e62f6bad7cd73020948dc682c3bb2dd6e2ec"
integrity sha512-YyJj6up2aFTobTUmjdX86vs0+/WIB8i88QQjDDlzSKdMDDXgrB8B8JAMlEBfAsruAv/ZIwUnE4/yqCeMAehTuA==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/id" "^0.13.1"
"@xmpp/middleware" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/jid@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/jid/-/jid-0.13.1.tgz#043cd7c491feeb6a7c9f7b1b3581f94939670717"
integrity sha512-E5ulk4gfPQwPY71TWXapiWzoxxAJz3LP0bDIUXIfgvlf1/2QKP3EcYQ7o+qmI0cLEZwWmwluRGouylqhyuwcAw==
"@xmpp/middleware@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/middleware/-/middleware-0.13.1.tgz#fe64e7f5d12fb74254684d96e17777fb4e7a44ed"
integrity sha512-t7kws9KMgaQURCDMcPjJOm/sEcC2Gs2YtpE35NaTR87NSwr8yZ37ZJL5Kki3Z4qhL6nhMXJPAprc6uqBn5q3Og==
dependencies:
"@xmpp/error" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
koa-compose "^4.1.0"
"@xmpp/reconnect@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/reconnect/-/reconnect-0.13.1.tgz#c815ec749a5c142fb9255af951d64e1dbadc419b"
integrity sha512-m/j/mTU7b3cOXP78uGzBbihmJMuXCYcTcwsTHlexj6tj6CE/vpuLNgxvf6pPkO7B9lH0HfezqU7ExHpS+4Nfaw==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/resolve@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/resolve/-/resolve-0.13.1.tgz#19f7a23983b78b72af5ce6606382428c4e7e7421"
integrity sha512-Lgsl6C/uJCxmYr0jWWOCJMqYvKi5WzN6loZwP7f6ov2nLMOMEZ7TSb66z393/7Pd0hy6DqZeggESMAFOkQH+vw==
dependencies:
"@xmpp/events" "^0.13.0"
"@xmpp/xml" "^0.13.0"
node-fetch "^2.6.6"
"@xmpp/resource-binding@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/resource-binding/-/resource-binding-0.13.1.tgz#76a8248ced24bc24cd4dff0a031ed376eafeb54b"
integrity sha512-S6PGlfufDTTDlh21ynyJrGR0sMeEYIRq+BKUl4QhsR19BvP0RUW0t8Ypx1QwDY3++ihqRjvCllCmtmFMY1iJsQ==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/sasl-anonymous@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-anonymous/-/sasl-anonymous-0.13.1.tgz#21139dafe4dbc8fb626e974daf64af9c46e469c5"
integrity sha512-l0Bqmva7xw10p8MelD2bHO10LwCPz6CEd/t5xO+Kw98hjI9lX6k5cxW7frvdnxRwPxJbGTciTQKHokYWR4luaA==
dependencies:
sasl-anonymous "^0.1.0"
"@xmpp/sasl-plain@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-plain/-/sasl-plain-0.13.1.tgz#ccdf9c73f61fb203ec7822ae55e2c707d3aed305"
integrity sha512-Xx4ay67Mg6aQFeelTZuY5QatP3cCJsArAuD0AozHKzjUWzyLqqydsDS+yFN23pxkOZPGgyYVebc4gKti4jZ+GA==
dependencies:
sasl-plain "^0.1.0"
"@xmpp/sasl-scram-sha-1@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-scram-sha-1/-/sasl-scram-sha-1-0.13.1.tgz#139a74f176301513da987ddc3eb98f1e58c8d9ad"
integrity sha512-qWyR5+v10pykTxQnKfNVUnCnZisA/UmC4Po5EQSgA5dNRuzraqwk/bH5PVi9+M0OcbtdNs9wCO2Hv06YA9AjwA==
dependencies:
sasl-scram-sha-1 "^1.2.1"
"@xmpp/sasl@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl/-/sasl-0.13.1.tgz#ee8484dda611ccff9b39243109b78c47abd01584"
integrity sha512-ynhKsL43EtezqJ9s476leHzliMudCAFS4xNG5x4ZFHoc7Iz5J6p6jFI89LGgnk9DeIdk9A/CFrPWTdyjhvyiTQ==
dependencies:
"@xmpp/base64" "^0.13.1"
"@xmpp/error" "^0.13.1"
"@xmpp/xml" "^0.13.1"
saslmechanisms "^0.1.1"
"@xmpp/session-establishment@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/session-establishment/-/session-establishment-0.13.1.tgz#5fc6e6e2d96d78c4646c227620cdab69046c42ae"
integrity sha512-uba6BZeeSJtbHtU+pCumSiX/zuc9hUdN5dVRNjvRjr/ZcXLMuC5MroRyrld+fm/rQYQLJjF4BcIaxvysXTCAGA==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/starttls@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/starttls/-/starttls-0.13.1.tgz#20d9c399ea822985e5f9ba5636a9fa1cc0600cb9"
integrity sha512-rQumwpbD5+yclcXgPNDF7Jg1mzDFejHKZehD6JRti+Emsxayst/qFDq3uMO3x6P+nKexL4mMoKUtWHlJM7BUGw==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/tls" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/stream-features@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/stream-features/-/stream-features-0.13.1.tgz#1a91aadaf027bfb1e4b5073bb0130ebad7f0e091"
integrity sha512-yZg+CXBRVXsIQzu4SI5UYlDZHmg3wY6YXy4MbeLiI4O8OQ/oCz6OHJlHKUnFl+cGmjDXvhN4Ga6pRhbEIIqM/g==
"@xmpp/stream-management@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/stream-management/-/stream-management-0.13.1.tgz#643b94ed243b81f8a6f74937a6906a21dd8ce889"
integrity sha512-06dhJAlGn+MU5ESrvIUg5xOS7azVE0swq86cx4SCv7t5dWL1WBj4xg2qigLn1hMnFkDw0bO/SOikXTxqGii/hA==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/tcp@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/tcp/-/tcp-0.13.1.tgz#7a7a08791282a9c8a319cc9f6feee1b94c1bdde6"
integrity sha512-N/AQBT+6Updb/E8A1SYdMbIJGaRFG8+7+bkm9MLw44UsihA6Yg0fmvC02O+BjNg3tXGkcMYLhu/8NYpjK4NlQg==
dependencies:
"@xmpp/connection-tcp" "^0.13.1"
"@xmpp/tls@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/tls/-/tls-0.13.1.tgz#bf3891a0fd381478cc90f88358b7e8b706209bd4"
integrity sha512-ecOmnrZmRbMMPDdvDNirw7sYQHt//YV7UJgfS4c9M+R5ljP2eUJiAiotEEykjKJ6CJPMMxdTnrLLP3ullsgfog==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/connection-tcp" "^0.13.1"
"@xmpp/websocket@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/websocket/-/websocket-0.13.1.tgz#a94196555244a0490fa58046cce4322f84fbf58e"
integrity sha512-UyMYyy/0Cm2UtVoAlhfV31u6LzGrBUU0h7I0qGCq1yYPQpscehNl8lXE4vmB8OfpeDvSZmvGk2vJAvGxzunoDQ==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/xml" "^0.13.1"
ws "^8.4.0"
"@xmpp/xml@^0.13.0", "@xmpp/xml@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/xml/-/xml-0.13.1.tgz#856b992dec1978fba89e06bd4718d91a31188916"
integrity sha512-GMfYB3PKY9QzsMnl3dPohgPBGd1JQTBanKOaZexJCSYJN2cdYLU2HGhjMtDlGSno6h9U+t0oO7r0igsJwyigwg==
dependencies:
ltx "^3.0.0"
"@xtuc/ieee754@^1.2.0":
version "1.2.0"
@@ -5356,6 +5563,13 @@ babel-plugin-jest-hoist@^29.5.0:
"@types/babel__core" "^7.1.14"
"@types/babel__traverse" "^7.0.6"
babel-plugin-jsx-pragmatic@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/babel-plugin-jsx-pragmatic/-/babel-plugin-jsx-pragmatic-1.0.2.tgz#41e2beb8642235f34b2a7ab12ca39e07201b8e59"
integrity sha512-+qeGXSbHZwinZzO6R3wP+6XDKup83Pgg2B3TQt2zwfDdgC7NqT9Kd3ws7iqk53zAO/8iOIRU6VUyUzt2LDE3Eg==
dependencies:
babel-plugin-syntax-jsx "^6.0.0"
babel-plugin-polyfill-corejs2@^0.4.4:
version "0.4.4"
resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.4.tgz#9f9a0e1cd9d645cc246a5e094db5c3aa913ccd2b"
@@ -5380,6 +5594,11 @@ babel-plugin-polyfill-regenerator@^0.5.1:
dependencies:
"@babel/helper-define-polyfill-provider" "^0.4.1"
babel-plugin-syntax-jsx@^6.0.0:
version "6.18.0"
resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946"
integrity sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==
babel-plugin-transform-dev@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-dev/-/babel-plugin-transform-dev-2.0.1.tgz#fec5bbfb6b9576cd8413df5bd0ae7aca32b0a2d4"
@@ -5468,16 +5687,16 @@ bach@^1.0.0:
async-settle "^1.0.0"
now-and-later "^2.0.0"
backoff@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/backoff/-/backoff-2.3.0.tgz#ee7c7e38093f92e472859db635e7652454fc21ea"
integrity sha512-ljr33cUQ/vyXE/60QuRO+WKGW4PzQ5OTWNXPWQwOTx5gh43q0pZocaVyXoU2gvFtasMIdIohdm9s01qoT6IJBQ==
balanced-match@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
base-64@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/base-64/-/base-64-1.0.0.tgz#09d0f2084e32a3fd08c2475b973788eee6ae8f4a"
integrity sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg==
base64-js@^1.0.2, base64-js@^1.3.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
@@ -5581,6 +5800,11 @@ bindings@^1.5.0:
dependencies:
file-uri-to-path "1.0.0"
bitwise-xor@0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/bitwise-xor/-/bitwise-xor-0.0.0.tgz#040a8172b5bb8cc562b0b7119f230b2a1a780e3d"
integrity sha512-3eOkZMBO04dRBn7551o6+IX9Ua7V+B/IubS7sffoa/VC3jdBM4YbuD+LjUNFojY7H+gptMUdTaQgHWTce4L3kw==
bl@^4.0.3, bl@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
@@ -5785,11 +6009,6 @@ browser-pack@^6.0.1, browser-pack@^6.0.2:
through2 "^2.0.0"
umd "^3.0.0"
browser-request@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/browser-request/-/browser-request-0.3.3.tgz#9ece5b5aca89a29932242e18bf933def9876cc17"
integrity sha512-YyNI4qJJ+piQG6MMEuo7J3Bzaqssufx04zpEKYfSrl/1Op59HWali9zMtBpXnkmqMcOuWJPZvudrm9wISmnCbg==
browser-resolve@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-2.0.0.tgz#99b7304cb392f8d73dba741bb2d7da28c6d7842b"
@@ -7128,7 +7347,7 @@ core-js@^2.4.0, core-js@^2.5.0:
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec"
integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==
core-js@^3.6.4, core-js@^3.6.5:
core-js@^3.30.2, core-js@^3.6.4, core-js@^3.6.5:
version "3.31.1"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.31.1.tgz#f2b0eea9be9da0def2c5fece71064a7e5d687653"
integrity sha512-2sKLtfq1eFST7l7v62zaqXacPc7uG8ZAya8ogijLhTtaKNcpzpB4TMoTw2Si+8GYKRwFPMMtUT0263QFWFfqyQ==
@@ -7187,7 +7406,7 @@ create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0:
ripemd160 "^2.0.1"
sha.js "^2.4.0"
create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7:
create-hmac@^1.1.0, create-hmac@^1.1.3, create-hmac@^1.1.4, create-hmac@^1.1.7:
version "1.1.7"
resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff"
integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==
@@ -13737,11 +13956,6 @@ lodash._root@^3.0.0:
resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
integrity sha512-O0pWuFSK6x4EXhM1dhZ8gchNtG7JMqBtrHdoUFUWXD7dJnNSUze1GuyQr5sOs0aCvgGeI3o/OJW8f4ca7FDxmQ==
lodash.assign@^4.0.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7"
integrity sha512-hFuH8TY+Yji7Eja3mGiuAxBqLagejScbG8GbG0j6o9vzn0YL14My+ktnqtZgFTosKymC9/44wP6s7xyuLfnClw==
lodash.camelcase@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
@@ -14027,12 +14241,10 @@ ltgt@^2.1.2:
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5"
integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==
ltx@^2.5.0, ltx@^2.6.2:
version "2.10.0"
resolved "https://registry.yarnpkg.com/ltx/-/ltx-2.10.0.tgz#0b794b898e01d9dcc61b54b160e78869003bbb20"
integrity sha512-RB4zR6Mrp/0wTNS9WxMvpgfht/7u/8QAC9DpPD19opL/4OASPa28uoliFqeDkLUU8pQ4aeAfATBZmz1aSAHkMw==
dependencies:
inherits "^2.0.4"
ltx@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/ltx/-/ltx-3.0.0.tgz#f2a2260814165c5e28d455f9f7db2178ed295187"
integrity sha512-bu3/4/ApUmMqVNuIkHaRhqVtEi6didYcBDIF56xhPRCzVpdztCipZ62CUuaxMlMBUzaVL93+4LZRqe02fuAG6A==
magic-string@^0.30.0:
version "0.30.1"
@@ -14170,7 +14382,7 @@ math-random@^1.0.1:
resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c"
integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==
md5.js@^1.3.3, md5.js@^1.3.4:
md5.js@^1.3.4:
version "1.3.5"
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==
@@ -14828,6 +15040,14 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-environment-flags@^1.0.5:
version "1.0.6"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
node-fetch@^1.0.1:
version "1.7.3"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef"
@@ -14836,7 +15056,7 @@ node-fetch@^1.0.1:
encoding "^0.1.11"
is-stream "^1.0.1"
node-fetch@^2.6.7:
node-fetch@^2.6.6, node-fetch@^2.6.7:
version "2.6.12"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.12.tgz#02eb8e22074018e3d5a83016649d04df0e348fba"
integrity sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==
@@ -14927,38 +15147,6 @@ node-version@^1.0.0:
resolved "https://registry.yarnpkg.com/node-version/-/node-version-1.2.0.tgz#34fde3ffa8e1149bd323983479dda620e1b5060d"
integrity sha512-ma6oU4Sk0qOoKEAymVoTvk8EdXEobdS7m/mAGhDJ8Rouugho48crHBORAmy5BoOcv8wraPM6xumapQp5hl4iIQ==
node-xmpp-client@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/node-xmpp-client/-/node-xmpp-client-3.2.0.tgz#af4527df0cc5abd2690cba2139cc1ecdc81ea189"
integrity sha512-UviV0XNomTPPAOnPeeFlZIg9eXN4+w1atj/NtoUKAQir/MTsO4Ve/nT6UdkZFusB2bOZ9REpGjvop4nhLidWYg==
dependencies:
browser-request "^0.3.3"
debug "^2.2.0"
md5.js "^1.3.3"
minimist "^1.2.0"
node-xmpp-core "^5.0.9"
request "^2.65.0"
ws "^1.1.1"
node-xmpp-core@^5.0.9:
version "5.0.9"
resolved "https://registry.yarnpkg.com/node-xmpp-core/-/node-xmpp-core-5.0.9.tgz#5c28c28edb1fb3f8beba2c6760777613f48f342a"
integrity sha512-F1ODQf95i3H9VLhDgAkbPti4T8uk8bgOeATQ0dSfAJVbvrJZYVGPC/SzGreYXgQmV65b7lxHqejzhSBtYpxc4Q==
dependencies:
"@xmpp/jid" "^0.0.2"
"@xmpp/streamparser" "^0.0.6"
"@xmpp/xml" "^0.1.3"
debug "^2.2.0"
inherits "^2.0.1"
lodash.assign "^4.0.0"
node-xmpp-tls-connect "^1.0.1"
reconnect-core "https://github.com/dodo/reconnect-core/tarball/merged"
node-xmpp-tls-connect@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/node-xmpp-tls-connect/-/node-xmpp-tls-connect-1.0.1.tgz#91ace43ac26b138861b2be478df9df19d61dc5c3"
integrity sha512-tDN4ZBb8rDzlvZHQdYHW4NVPs4mZZYQPtv8anAb7ff8Un0mIid3cVxEfUr7COZfMeB5+YvSUz8Zutr1A+/v9LQ==
node-zone@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/node-zone/-/node-zone-0.4.0.tgz#b058401a2e7c4bd34cb8cda8ee9d61360f620711"
@@ -15445,11 +15633,6 @@ optionator@^0.9.3:
prelude-ls "^1.2.1"
type-check "^0.4.0"
options@>=0.0.5:
version "0.0.6"
resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f"
integrity sha512-bOj3L1ypm++N+n7CEbbe473A414AB7z+amKYshRb//iuL3MpdDCLhPnw6aVTdKB9g5ZRVHIEp8eUln6L2NUStg==
ora@^5.4.1:
version "5.4.1"
resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18"
@@ -17681,12 +17864,6 @@ rechoir@^0.6.2:
dependencies:
resolve "^1.1.6"
"reconnect-core@https://github.com/dodo/reconnect-core/tarball/merged":
version "0.0.1"
resolved "https://github.com/dodo/reconnect-core/tarball/merged#b9daf2adc45b19a6cc5fd2f048f8d9406cece498"
dependencies:
backoff "~2.3.0"
redent@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f"
@@ -17918,7 +18095,7 @@ replace-homedir@^1.0.0:
is-absolute "^1.0.0"
remove-trailing-separator "^1.1.0"
request@^2.65.0, request@^2.74.0, request@^2.87.0:
request@^2.74.0, request@^2.87.0:
version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==
@@ -18248,6 +18425,31 @@ safe-regex@^1.1.0:
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
sasl-anonymous@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/sasl-anonymous/-/sasl-anonymous-0.1.0.tgz#f544c7e824df2a40d9ad4733829572cc8d9ed5a5"
integrity sha512-x+0sdsV0Gie2EexxAUsx6ZoB+X6OCthlNBvAQncQxreEWQJByAPntj0EAgTlJc2kZicoc+yFzeR6cl8VfsQGfA==
sasl-plain@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/sasl-plain/-/sasl-plain-0.1.0.tgz#cf145e7c02222b64d60c0806d9cd2ae5380426cc"
integrity sha512-X8mCSfR8y0NryTu0tuVyr4IS2jBunBgyG+3a0gEEkd0nlHGiyqJhlc4EIkzmSwaa7F8S4yo+LS6Cu5qxRkJrmg==
sasl-scram-sha-1@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sasl-scram-sha-1/-/sasl-scram-sha-1-1.2.1.tgz#d88d51feaa0ff320d8eb1d6fc75657653f9dcd4b"
integrity sha512-o63gNo+EGsk1ML0bNeUAjRomIIcG7VaUyA+ffhd9MME5BjqVEpp42YkmBBZqzz1KmJG3YqpRLE4PfUe7FjexaA==
dependencies:
bitwise-xor "0.0.0"
create-hash "^1.1.0"
create-hmac "^1.1.3"
randombytes "^2.0.1"
saslmechanisms@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/saslmechanisms/-/saslmechanisms-0.1.1.tgz#478be1429500fcfaa780be88b3343ced7d2a9182"
integrity sha512-pVlvK5ysevz8MzybRnDIa2YMxn0OJ7b9lDiWhMoaKPoJ7YkAg/7YtNjUgaYzElkwHxsw8dBMhaEn7UP6zxEwPg==
sass@^1.38.1:
version "1.63.6"
resolved "https://registry.yarnpkg.com/sass/-/sass-1.63.6.tgz#481610e612902e0c31c46b46cf2dad66943283ea"
@@ -18322,7 +18524,7 @@ semver-greatest-satisfied-range@^1.1.0:
dependencies:
sver-compat "^1.5.0"
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1:
version "5.7.2"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
@@ -20220,11 +20422,6 @@ uid2@0.0.x:
resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.4.tgz#033f3b1d5d32505f5ce5f888b9f3b667123c0a44"
integrity sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==
ultron@1.0.x:
version "1.0.2"
resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa"
integrity sha512-QMpnpVtYaWEeY+MwKDN/UdKlE/LsFZXM5lO1u7GaZzNgmIbGixHEmVMIKT+vqYOALu3m5GYQy9kz4Xu4IVn7Ow==
umd@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/umd/-/umd-3.0.3.tgz#aa9fe653c42b9097678489c01000acb69f0b26cf"
@@ -20591,7 +20788,7 @@ v8-to-istanbul@^9.0.1:
"@types/istanbul-lib-coverage" "^2.0.1"
convert-source-map "^1.6.0"
v8flags@^3.2.0:
v8flags@^3.1.1, v8flags@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656"
integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==
@@ -21371,14 +21568,6 @@ write-file-atomic@^4.0.2:
imurmurhash "^0.1.4"
signal-exit "^3.0.7"
ws@^1.1.1:
version "1.1.5"
resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.5.tgz#cbd9e6e75e09fc5d2c90015f21f0c40875e0dd51"
integrity sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==
dependencies:
options ">=0.0.5"
ultron "1.0.x"
ws@^6.2.1:
version "6.2.2"
resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.2.tgz#dd5cdbd57a9979916097652d78f1cc5faea0c32e"
@@ -21391,7 +21580,7 @@ ws@^7, ws@^7.5.5:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
ws@^8.2.3, ws@^8.3.0, ws@^8.5.0:
ws@^8.2.3, ws@^8.3.0, ws@^8.4.0, ws@^8.5.0:
version "8.13.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==