Compare commits

...

20 Commits

Author SHA1 Message Date
Florent BEAUCHAMP
f134fd33bf feat(@vates/nbd-client): export a stream from a remote nbd export 2023-08-02 09:13:17 +02:00
Florent BEAUCHAMP
28794fa820 feat(@vates/nbd-client): make defautl iterator chunk size a parameter 2023-08-02 09:09:42 +02:00
Florent BEAUCHAMP
66847f04b4 fix(@vates/nbd-client): group socket reading 2023-08-02 09:08:36 +02:00
Florent BEAUCHAMP
1dbfc6d0a2 fix(nbd-client) better handling of last unaligned block 2023-08-02 09:07:39 +02:00
Florent BEAUCHAMP
12c2083651 fix: code cleanup 2023-07-27 11:05:36 +02:00
Florent BEAUCHAMP
b1ed98a8fd wip 2023-07-26 16:59:34 +02:00
Florent BEAUCHAMP
f495e7110d dumb implementation 2023-07-26 07:03:06 +02:00
Florent BEAUCHAMP
e9b92780b9 fix nraynaud 2023-07-25 09:50:40 +02:00
Florent BEAUCHAMP
d6ac1c2598 fixes 2023-07-25 09:49:31 +02:00
Florent BEAUCHAMP
7e1dd7c26f feat(vmware-explorer): handle sesparse files 2023-07-25 09:49:31 +02:00
Julien Fontanet
14a0caa4c6 fix(xo-web/xoa/licenses): fix message *go TO* 2023-07-25 09:43:11 +02:00
Florent BEAUCHAMP
1c23bd5ff7 feat(read-chunk/readChunkStrict): attach read chunk to error if small text (#6940) 2023-07-20 17:01:26 +02:00
Julien Fontanet
49c161b17a fix(xo-server,xo-web): send version when probing NFS SR
Reported by @benjamreis
2023-07-20 16:46:18 +02:00
Gabriel Gunullu
18dce3fce6 test(fs): fix wrong encryption (#6945) 2023-07-20 16:32:09 +02:00
Julien Fontanet
d6fc86b6bc chore(xo-server-transport-xmpp): remove old dep node-xmpp-client
Fix possibly #6942
2023-07-20 10:54:52 +02:00
Florent BEAUCHAMP
61d960d4b1 fix(vmware-explorer): handle snapshot of 1TB+ disks 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
02d3465832 feat(vmware-explorer): don't transform stream for raw import in thick mode 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
4bbadc9515 feat(vmware-explorer): improve import
- use one stream instead of per block queries if possible
- retry block reading if failing
- handle unaligned end block
2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
78586291ca fix(vmware-explorer): better disk size computation 2023-07-20 10:25:28 +02:00
Florent BEAUCHAMP
945dec94bf feat(vmware-explorer): retry connection to ESXi 2023-07-20 10:25:28 +02:00
20 changed files with 736 additions and 325 deletions

View File

@@ -21,6 +21,7 @@ import {
OPTS_MAGIC,
NBD_CMD_DISC,
} from './constants.mjs'
import { Readable } from 'node:stream'
const { warn } = createLogger('vates:nbd-client')
@@ -232,19 +233,20 @@ export default class NbdClient {
}
try {
this.#waitingForResponse = true
const magic = await this.#readInt32()
const buffer = await this.#read(8)
const magic = buffer.readInt32BE(0)
if (magic !== NBD_REPLY_MAGIC) {
throw new Error(`magic number for block answer is wrong : ${magic} ${NBD_REPLY_MAGIC}`)
}
const error = await this.#readInt32()
const error = buffer.readInt32BE(1)
if (error !== 0) {
// @todo use error code from constants.mjs
throw new Error(`GOT ERROR CODE : ${error}`)
}
const blockQueryId = await this.#readInt64()
const blockQueryId = buffer.readBigUInt64BE(4)
const query = this.#commandQueryBacklog.get(blockQueryId)
if (!query) {
throw new Error(` no query associated with id ${blockQueryId}`)
@@ -281,7 +283,13 @@ export default class NbdClient {
buffer.writeInt16BE(NBD_CMD_READ, 6) // we want to read a data block
buffer.writeBigUInt64BE(queryId, 8)
// byte offset in the raw disk
buffer.writeBigUInt64BE(BigInt(index) * BigInt(size), 16)
const offset = BigInt(index) * BigInt(size)
const remaining = this.#exportSize - offset
if (remaining < BigInt(size)) {
size = Number(remaining)
}
buffer.writeBigUInt64BE(offset, 16)
buffer.writeInt32BE(size, 24)
return new Promise((resolve, reject) => {
@@ -307,14 +315,15 @@ export default class NbdClient {
})
}
async *readBlocks(indexGenerator) {
async *readBlocks(indexGenerator = 2 * 1024 * 1024) {
// default : read all blocks
if (indexGenerator === undefined) {
const exportSize = this.#exportSize
const chunkSize = 2 * 1024 * 1024
if (typeof indexGenerator === 'number') {
const exportSize = Number(this.#exportSize)
const chunkSize = indexGenerator
indexGenerator = function* () {
const nbBlocks = Math.ceil(Number(exportSize / BigInt(chunkSize)))
for (let index = 0; BigInt(index) < nbBlocks; index++) {
const nbBlocks = Math.ceil(exportSize / chunkSize)
for (let index = 0; index < nbBlocks; index++) {
yield { index, size: chunkSize }
}
}
@@ -348,4 +357,15 @@ export default class NbdClient {
yield readAhead.shift()
}
}
stream(chunk_size) {
async function* iterator() {
for await (const chunk of this.readBlocks(chunk_size)) {
yield chunk
}
}
// create a readable stream instead of returning the iterator
// since iterators don't like unshift and partial reading
return Readable.from(iterator())
}
}

View File

@@ -1,6 +1,7 @@
'use strict'
const assert = require('assert')
const isUtf8 = require('isutf8')
/**
* Read a chunk of data from a stream.
@@ -81,6 +82,13 @@ exports.readChunkStrict = async function readChunkStrict(stream, size) {
if (size !== undefined && chunk.length !== size) {
const error = new Error(`stream has ended with not enough data (actual: ${chunk.length}, expected: ${size})`)
// Buffer.isUtf8 is too recent for now
// @todo : replace external package by Buffer.isUtf8 when the supported version of node reach 18
if (chunk.length < 1024 && isUtf8(chunk)) {
error.text = chunk.toString('utf8')
}
Object.defineProperties(error, {
chunk: {
value: chunk,

View File

@@ -102,12 +102,37 @@ describe('readChunkStrict', function () {
assert.strictEqual(error.chunk, undefined)
})
it('throws if stream ends with not enough data', async () => {
it('throws if stream ends with not enough data, utf8', async () => {
const error = await rejectionOf(readChunkStrict(makeStream(['foo', 'bar']), 10))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 6, expected: 10)')
assert.strictEqual(error.text, 'foobar')
assert.deepEqual(error.chunk, Buffer.from('foobar'))
})
it('throws if stream ends with not enough data, non utf8 ', async () => {
const source = [Buffer.alloc(10, 128), Buffer.alloc(10, 128)]
const error = await rejectionOf(readChunkStrict(makeStream(source), 30))
assert(error instanceof Error)
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 20, expected: 30)')
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, Buffer.concat(source))
})
it('throws if stream ends with not enough data, utf8 , long data', async () => {
const source = Buffer.from('a'.repeat(1500))
const error = await rejectionOf(readChunkStrict(makeStream([source]), 2000))
assert(error instanceof Error)
assert.strictEqual(error.message, `stream has ended with not enough data (actual: 1500, expected: 2000)`)
assert.strictEqual(error.text, undefined)
assert.deepEqual(error.chunk, source)
})
it('succeed', async () => {
const source = Buffer.from('a'.repeat(20))
const chunk = await readChunkStrict(makeStream([source]), 10)
assert.deepEqual(source.subarray(10), chunk)
})
})
describe('skip', function () {
@@ -134,6 +159,16 @@ describe('skip', function () {
it('returns less size if stream ends', async () => {
assert.deepEqual(await skip(makeStream('foo bar'), 10), 7)
})
it('put back if it read too much', async () => {
let source = makeStream(['foo', 'bar'])
await skip(source, 1) // read part of data chunk
const chunk = (await readChunkStrict(source, 2)).toString('utf-8')
assert.strictEqual(chunk, 'oo')
source = makeStream(['foo', 'bar'])
assert.strictEqual(await skip(source, 3), 3) // read aligned with data chunk
})
})
describe('skipStrict', function () {
@@ -144,4 +179,9 @@ describe('skipStrict', function () {
assert.strictEqual(error.message, 'stream has ended with not enough data (actual: 7, expected: 10)')
assert.deepEqual(error.bytesSkipped, 7)
})
it('succeed', async () => {
const source = makeStream(['foo', 'bar', 'baz'])
const res = await skipStrict(source, 4)
assert.strictEqual(res, undefined)
})
})

View File

@@ -33,5 +33,8 @@
},
"devDependencies": {
"test": "^3.2.1"
},
"dependencies": {
"isutf8": "^4.0.0"
}
}

View File

@@ -209,7 +209,7 @@ describe('encryption', () => {
// encrypt with a non default algorithm
const encryptor = _getEncryptor('aes-256-cbc', '73c1838d7d8a6088ca2317fb5f29cd91')
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gmc"}`)
await fs.writeFile(`${dir}/encryption.json`, `{"algorithm": "aes-256-gcm"}`)
await fs.writeFile(`${dir}/metadata.json`, encryptor.encryptData(`{"random": "NOTSORANDOM"}`))
// remote is now non empty : can't modify key anymore

View File

@@ -48,7 +48,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
// depending on the paramters we also look into the parent data
return (
this.#grainDirectory.readInt32LE(blockId * 4) !== 0 ||
this.#grainDirectory.readUInt32LE(blockId * 4) !== 0 ||
(this.#lookMissingBlockInParent && this.#parentVhd.containsBlock(blockId))
)
}
@@ -61,14 +61,14 @@ export default class VhdEsxiCowd extends VhdAbstract {
const buffer = await this.#read(0, 2048)
strictEqual(buffer.slice(0, 4).toString('ascii'), 'COWD')
strictEqual(buffer.readInt32LE(4), 1) // version
strictEqual(buffer.readInt32LE(8), 3) // flags
const numSectors = buffer.readInt32LE(12)
const grainSize = buffer.readInt32LE(16)
strictEqual(buffer.readUInt32LE(4), 1) // version
strictEqual(buffer.readUInt32LE(8), 3) // flags
const numSectors = buffer.readUInt32LE(12)
const grainSize = buffer.readUInt32LE(16)
strictEqual(grainSize, 1) // 1 grain should be 1 sector long
strictEqual(buffer.readInt32LE(20), 4) // grain directory position in sectors
strictEqual(buffer.readUInt32LE(20), 4) // grain directory position in sectors
const nbGrainDirectoryEntries = buffer.readInt32LE(24)
const nbGrainDirectoryEntries = buffer.readUInt32LE(24)
strictEqual(nbGrainDirectoryEntries, Math.ceil(numSectors / 4096))
const size = numSectors * 512
// a grain directory entry contains the address of a grain table
@@ -90,7 +90,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
// we're lucky : a grain address can address exacty a full block
async readBlock(blockId) {
notEqual(this.#grainDirectory, undefined, 'grainDirectory is not loaded')
const sectorOffset = this.#grainDirectory.readInt32LE(blockId * 4)
const sectorOffset = this.#grainDirectory.readUInt32LE(blockId * 4)
const buffer = (await this.#parentVhd.readBlock(blockId)).buffer
@@ -137,7 +137,7 @@ export default class VhdEsxiCowd extends VhdAbstract {
}
for (let i = 0; i < graintable.length / 4; i++) {
const grainOffset = graintable.readInt32LE(i * 4)
const grainOffset = graintable.readUInt32LE(i * 4)
if (grainOffset === 0) {
// the content from parent : it is already in buffer
await changeRange()

View File

@@ -1,7 +1,7 @@
import _computeGeometryForSize from 'vhd-lib/_computeGeometryForSize.js'
import { createFooter, createHeader } from 'vhd-lib/_createFooterHeader.js'
import { DISK_TYPES, FOOTER_SIZE } from 'vhd-lib/_constants.js'
import { readChunk } from '@vates/read-chunk'
import { readChunkStrict, skipStrict } from '@vates/read-chunk'
import { Task } from '@vates/task'
import { unpackFooter, unpackHeader } from 'vhd-lib/Vhd/_utils.js'
import { VhdAbstract } from 'vhd-lib'
@@ -21,6 +21,10 @@ export default class VhdEsxiRaw extends VhdAbstract {
#header
#footer
#streamOffset = 0
#stream
#reading = false
static async open(esxi, datastore, path, opts) {
const vhd = new VhdEsxiRaw(esxi, datastore, path, opts)
await vhd.readHeaderAndFooter()
@@ -49,10 +53,10 @@ export default class VhdEsxiRaw extends VhdAbstract {
this.#header = unpackHeader(createHeader(length / VHD_BLOCK_LENGTH))
const geometry = _computeGeometryForSize(length)
const actualSize = geometry.actualSize
this.#footer = unpackFooter(
createFooter(actualSize, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPES.DYNAMIC)
// length can be smaller than disk capacity due to alignment to head/cylinder/sector
createFooter(length, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPES.DYNAMIC)
)
}
@@ -64,12 +68,65 @@ export default class VhdEsxiRaw extends VhdAbstract {
return this.#bat.has(blockId)
}
async readBlock(blockId) {
async #readChunk(start, length) {
if (this.#reading) {
throw new Error('reading must be done sequentially')
}
try {
this.#reading = true
if (this.#stream !== undefined) {
// stream is too far ahead or to far behind
if (this.#streamOffset > start || this.#streamOffset + VHD_BLOCK_LENGTH < start) {
this.#stream.destroy()
this.#stream = undefined
this.#streamOffset = 0
}
}
// no stream
if (this.#stream === undefined) {
const end = this.footer.currentSize - 1
const res = await this.#esxi.download(this.#datastore, this.#path, `${start}-${end}`)
this.#stream = res.body
this.#streamOffset = start
}
// stream a little behind
if (this.#streamOffset < start) {
await skipStrict(this.#stream, start - this.#streamOffset)
this.#streamOffset = start
}
// really read data
this.#streamOffset += length
const data = await readChunkStrict(this.#stream, length)
return data
} catch (error) {
error.start = start
error.length = length
error.streamLength = this.footer.currentSize
this.#stream?.destroy()
this.#stream = undefined
this.#streamOffset = 0
throw error
} finally {
this.#reading = false
}
}
async #readBlock(blockId) {
const start = blockId * VHD_BLOCK_LENGTH
const end = (blockId + 1) * VHD_BLOCK_LENGTH - 1
let length = VHD_BLOCK_LENGTH
let partial = false
if (start + length > this.footer.currentSize) {
length = this.footer.currentSize - start
partial = true
}
const data = await (await this.#esxi.download(this.#datastore, this.#path, `${start}-${end}`)).buffer()
let data = await this.#readChunk(start, length)
if (partial) {
data = Buffer.concat([data, Buffer.alloc(VHD_BLOCK_LENGTH - data.length)])
}
const bitmap = Buffer.alloc(512, 255)
return {
id: blockId,
@@ -79,28 +136,44 @@ export default class VhdEsxiRaw extends VhdAbstract {
}
}
async readBlock(blockId) {
let tries = 5
let lastError
while (tries > 0) {
try {
const res = await this.#readBlock(blockId)
return res
} catch (error) {
lastError = error
lastError.blockId = blockId
console.warn('got error , will retry in 2seconds', lastError)
}
await new Promise(resolve => setTimeout(() => resolve(), 2000))
tries--
}
throw lastError
}
// this will read all the disk once to check which block contains data, it can take a long time to execute depending on the network speed
async readBlockAllocationTable() {
if (!this.#thin) {
// fast path : is we do not use thin mode, the BAT is full
return
}
const res = await this.#esxi.download(this.#datastore, this.#path)
const length = res.headers.get('content-length')
const stream = res.body
const empty = Buffer.alloc(VHD_BLOCK_LENGTH, 0)
let pos = 0
this.#bat = new Set()
let nextChunkLength = Math.min(VHD_BLOCK_LENGTH, length)
Task.set('total', length / VHD_BLOCK_LENGTH)
let nextChunkLength = Math.min(VHD_BLOCK_LENGTH, this.footer.currentSize)
Task.set('total', this.footer.currentSize / VHD_BLOCK_LENGTH)
const progress = setInterval(() => {
Task.set('progress', Math.round((pos * 100) / length))
console.log('reading blocks', pos / VHD_BLOCK_LENGTH, '/', length / VHD_BLOCK_LENGTH)
Task.set('progress', Math.round((pos * 100) / this.footer.currentSize))
console.log('reading blocks', pos / VHD_BLOCK_LENGTH, '/', this.footer.currentSize / VHD_BLOCK_LENGTH)
}, 30 * 1000)
while (nextChunkLength > 0) {
try {
const chunk = await readChunk(stream, nextChunkLength)
const chunk = await this.#readChunk(pos, nextChunkLength)
let isEmpty
if (nextChunkLength === VHD_BLOCK_LENGTH) {
isEmpty = empty.equals(chunk)
@@ -112,15 +185,28 @@ export default class VhdEsxiRaw extends VhdAbstract {
this.#bat.add(pos / VHD_BLOCK_LENGTH)
}
pos += VHD_BLOCK_LENGTH
nextChunkLength = Math.min(VHD_BLOCK_LENGTH, length - pos)
nextChunkLength = Math.min(VHD_BLOCK_LENGTH, this.footer.currentSize - pos)
} catch (error) {
clearInterval(progress)
throw error
}
}
console.log('BAT reading done, remaining ', this.#bat.size, '/', Math.ceil(length / VHD_BLOCK_LENGTH))
console.log(
'BAT reading done, remaining ',
this.#bat.size,
'/',
Math.ceil(this.footer.currentSize / VHD_BLOCK_LENGTH)
)
clearInterval(progress)
}
rawContent() {
return this.#esxi.download(this.#datastore, this.#path).then(res => {
const stream = res.body
stream.length = this.footer.currentSize
return stream
})
}
}
/* eslint-enable no-console */

View File

@@ -1,18 +1,54 @@
import _computeGeometryForSize from 'vhd-lib/_computeGeometryForSize.js'
import { createFooter, createHeader } from 'vhd-lib/_createFooterHeader.js'
import { FOOTER_SIZE } from 'vhd-lib/_constants.js'
import { DISK_TYPES, FOOTER_SIZE } from 'vhd-lib/_constants.js'
import { notEqual, strictEqual } from 'node:assert'
import { unpackFooter, unpackHeader } from 'vhd-lib/Vhd/_utils.js'
import { VhdAbstract } from 'vhd-lib'
// from https://github.com/qemu/qemu/commit/98eb9733f4cf2eeab6d12db7e758665d2fd5367b#
// one big difference with the other versions of VMDK is that the grain tables are actually sparse, they are pre-allocated but not used in grain order,
// so we have to read the grain directory to know where to find the grain tables
function readInt64(buffer, index) {
const n = buffer.readBigInt64LE(index * 8 /* size of an int64 in bytes */)
if (n > Number.MAX_SAFE_INTEGER) {
const SE_SPARSE_DIR_NON_ALLOCATED = 0
const SE_SPARSE_DIR_ALLOCATED = 1
const SE_SPARSE_GRAIN_NON_ALLOCATED = 0 // check in parent
const SE_SPARSE_GRAIN_UNMAPPED = 1 // grain has been unmapped, but index of previous grain still readable for reclamation
const SE_SPARSE_GRAIN_ZERO = 2
const SE_SPARSE_GRAIN_ALLOCATED = 3
const VHD_BLOCK_SIZE_BYTES = 2 * 1024 * 1024
const GRAIN_SIZE_BYTES = 4 * 1024
const GRAIN_TABLE_COUNT = 4 * 1024
const ones = n => (1n << BigInt(n)) - 1n
function asNumber(n) {
if (n > Number.MAX_SAFE_INTEGER)
throw new Error(`can't handle ${n} ${Number.MAX_SAFE_INTEGER} ${n & 0x00000000ffffffffn}`)
return Number(n)
}
return +n
const readInt64 = (buffer, index) => asNumber(buffer.readBigInt64LE(index * 8))
/**
* @returns {{topNibble: number, low60: bigint}} topNibble is the first 4 bits of the 64 bits entry, indexPart is the remaining 60 bits
*/
function readTaggedEntry(buffer, index) {
const entry = buffer.readBigInt64LE(index * 8)
return { topNibble: Number(entry >> 60n), low60: entry & ones(60) }
}
function readSeSparseDir(buffer, index) {
const { topNibble, low60 } = readTaggedEntry(buffer, index)
return { type: topNibble, tableIndex: asNumber(low60) }
}
function readSeSparseTable(buffer, index) {
const { topNibble, low60 } = readTaggedEntry(buffer, index)
// https://lists.gnu.org/archive/html/qemu-block/2019-06/msg00934.html
const topIndexPart = low60 >> 48n // bring the top 12 bits down
const bottomIndexPart = (low60 & ones(48)) << 12n // bring the bottom 48 bits up
return { type: topNibble, grainIndex: asNumber(bottomIndexPart | topIndexPart) }
}
export default class VhdEsxiSeSparse extends VhdAbstract {
@@ -25,27 +61,22 @@ export default class VhdEsxiSeSparse extends VhdAbstract {
#header
#footer
#grainDirectory
// as we will read all grain with data with load everything in memory
// in theory , that can be 512MB of data for a 2TB fully allocated
// but our use case is to transfer a relatively small diff
// and random access is expensive in HTTP, and migration is a one time cors
// so let's go with naive approach, and future me will have to handle a more
// clever approach if necessary
// grain at zero won't be stored
#grainIndex // Map blockId => []
#grainMap = new Map()
#grainSize
#grainTableSize
#grainTableOffset
#grainOffset
#grainDirOffsetBytes
#grainDirSizeBytes
#grainTableOffsetBytes
#grainOffsetBytes
static async open(esxi, datastore, path, parentVhd, opts) {
const vhd = new VhdEsxiSeSparse(esxi, datastore, path, parentVhd, opts)
await vhd.readHeaderAndFooter()
return vhd
}
get path() {
return this.#path
}
constructor(esxi, datastore, path, parentVhd, { lookMissingBlockInParent = true } = {}) {
super()
this.#esxi = esxi
@@ -63,156 +94,149 @@ export default class VhdEsxiSeSparse extends VhdAbstract {
return this.#footer
}
async #readGrain(start, length = 4 * 1024) {
return (await this.#esxi.download(this.#datastore, this.#path, `${start}-${start + length - 1}`)).buffer()
}
containsBlock(blockId) {
notEqual(this.#grainDirectory, undefined, "bat must be loaded to use contain blocks'")
// a grain table is 4096 entries of 4KB
// a grain table cover 8 vhd blocks
// grain table always exists in sespars
// depending on the paramters we also look into the parent data
notEqual(this.#grainIndex, undefined, "bat must be loaded to use contain blocks'")
return (
this.#grainDirectory.readInt32LE(blockId * 4) !== 0 ||
this.#grainIndex.get(blockId) !== undefined ||
(this.#lookMissingBlockInParent && this.#parentVhd.containsBlock(blockId))
)
}
async #read(start, end) {
return (await this.#esxi.download(this.#datastore, this.#path, `${start}-${end}`)).buffer()
async #read(start, length) {
const buffer = await (
await this.#esxi.download(this.#datastore, this.#path, `${start}-${start + length - 1}`)
).buffer()
strictEqual(buffer.length, length)
return buffer
}
async readHeaderAndFooter() {
const buffer = await this.#read(0, 2048)
strictEqual(buffer.readBigInt64LE(0), 0xcafebaben)
const vmdkHeaderBuffer = await this.#read(0, 2048)
strictEqual(readInt64(buffer, 1), 0x200000001) // version 2.1
strictEqual(vmdkHeaderBuffer.readBigInt64LE(0), 0xcafebaben)
strictEqual(readInt64(vmdkHeaderBuffer, 1), 0x200000001) // version 2.1
const capacity = readInt64(buffer, 2)
const grain_size = readInt64(buffer, 3)
this.#grainDirOffsetBytes = readInt64(vmdkHeaderBuffer, 16) * 512
// console.log('grainDirOffsetBytes', this.#grainDirOffsetBytes)
this.#grainDirSizeBytes = readInt64(vmdkHeaderBuffer, 17) * 512
// console.log('grainDirSizeBytes', this.#grainDirSizeBytes)
const grain_tables_offset = readInt64(buffer, 18)
const grain_tables_size = readInt64(buffer, 19)
this.#grainOffset = readInt64(buffer, 24)
const grainSizeSectors = readInt64(vmdkHeaderBuffer, 3)
const grainSizeBytes = grainSizeSectors * 512 // 8 sectors = 4KB default
strictEqual(grainSizeBytes, GRAIN_SIZE_BYTES) // we only support default grain size
this.#grainSize = grain_size * 512 // 8 sectors / 4KB default
this.#grainTableOffset = grain_tables_offset * 512
this.#grainTableSize = grain_tables_size * 512
this.#grainTableOffsetBytes = readInt64(vmdkHeaderBuffer, 18) * 512
// console.log('grainTableOffsetBytes', this.#grainTableOffsetBytes)
const size = capacity * grain_size * 512
this.#header = unpackHeader(createHeader(Math.ceil(size / (4096 * 512))))
const geometry = _computeGeometryForSize(size)
const actualSize = geometry.actualSize
const grainTableCount = (readInt64(vmdkHeaderBuffer, 4) * 512) / 8 // count is the number of 64b entries in each tables
// console.log('grainTableCount', grainTableCount)
strictEqual(grainTableCount, GRAIN_TABLE_COUNT) // we only support tables of 4096 entries (default)
this.#grainOffsetBytes = readInt64(vmdkHeaderBuffer, 24) * 512
// console.log('grainOffsetBytes', this.#grainOffsetBytes)
const sizeBytes = readInt64(vmdkHeaderBuffer, 2) * 512
// console.log('sizeBytes', sizeBytes)
const nbBlocks = Math.ceil(sizeBytes / VHD_BLOCK_SIZE_BYTES)
this.#header = unpackHeader(createHeader(nbBlocks))
const geometry = _computeGeometryForSize(sizeBytes)
this.#footer = unpackFooter(
createFooter(actualSize, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, this.#parentVhd.footer.diskType)
createFooter(sizeBytes, Math.floor(Date.now() / 1000), geometry, FOOTER_SIZE, DISK_TYPES.DYNAMIC)
)
}
async readBlockAllocationTable() {
const CHUNK_SIZE = 64 * 512
this.#grainIndex = new Map()
strictEqual(this.#grainTableSize % CHUNK_SIZE, 0)
for (let chunkIndex = 0, grainIndex = 0; chunkIndex < this.#grainTableSize / CHUNK_SIZE; chunkIndex++) {
process.stdin.write('.')
const start = chunkIndex * CHUNK_SIZE + this.#grainTableOffset
const end = start + 4096 * 8 - 1
const buffer = await this.#read(start, end)
for (let indexInChunk = 0; indexInChunk < 4096; indexInChunk++) {
const entry = buffer.readBigInt64LE(indexInChunk * 8)
switch (entry) {
case 0n: // not allocated, go to parent
break
case 1n: // unmapped
break
}
if (entry > 3n) {
this.#grainMap.set(grainIndex)
grainIndex++
}
}
}
// read grain directory and the grain tables
const nbBlocks = this.header.maxTableEntries
this.#grainDirectory = await this.#read(2048 /* header length */, 2048 + nbBlocks * 4 - 1)
}
// we're lucky : a grain address can address exacty a full block
async readBlock(blockId) {
notEqual(this.#grainDirectory, undefined, 'grainDirectory is not loaded')
const sectorOffset = this.#grainDirectory.readInt32LE(blockId * 4)
const buffer = (await this.#parentVhd.readBlock(blockId)).buffer
if (sectorOffset === 0) {
strictEqual(this.#lookMissingBlockInParent, true, "shouldn't have empty block in a delta alone")
return {
id: blockId,
bitmap: buffer.slice(0, 512),
data: buffer.slice(512),
buffer,
}
}
const offset = sectorOffset * 512
const graintable = await this.#read(offset, offset + 4096 * 4 /* grain table length */ - 1)
strictEqual(graintable.length, 4096 * 4)
// we have no guaranty that data are order or contiguous
// let's construct ranges to limit the number of queries
let rangeStart, offsetStart, offsetEnd
const changeRange = async (index, offset) => {
if (offsetStart !== undefined) {
// if there was a
if (offset === offsetEnd) {
offsetEnd++
return
}
const grains = await this.#read(offsetStart * 512, offsetEnd * 512 - 1)
grains.copy(buffer, (rangeStart + 1) /* block bitmap */ * 512)
}
if (offset) {
// we're at the beginning of a range present in the file
rangeStart = index
offsetStart = offset
offsetEnd = offset + 1
} else {
// we're at the beginning of a range from the parent or empty
rangeStart = undefined
offsetStart = undefined
offsetEnd = undefined
}
}
for (let i = 0; i < graintable.length / 4; i++) {
const grainOffset = graintable.readInt32LE(i * 4)
if (grainOffset === 0) {
await changeRange()
// from parent
const tableSizeBytes = GRAIN_TABLE_COUNT * 8
const grainDirBuffer = await this.#read(this.#grainDirOffsetBytes, this.#grainDirSizeBytes)
// read the grain dir ( first level )
for (let grainDirIndex = 0; grainDirIndex < grainDirBuffer.length / 8; grainDirIndex++) {
const { type: grainDirType, tableIndex } = readSeSparseDir(grainDirBuffer, grainDirIndex)
if (grainDirType === SE_SPARSE_DIR_NON_ALLOCATED) {
// no grain table allocated at all in this grain dir
continue
}
if (grainOffset === 1) {
await changeRange()
// this is a emptied grain, no data, don't look into parent
buffer.fill(0, (i + 1) /* block bitmap */ * 512)
strictEqual(grainDirType, SE_SPARSE_DIR_ALLOCATED)
// read the corresponding grain table ( second level )
const grainTableBuffer = await this.#read(
this.#grainTableOffsetBytes + tableIndex * tableSizeBytes,
tableSizeBytes
)
// offset in bytes if >0, grainType if <=0
let grainOffsets = []
let blockId = grainDirIndex * 8
const addGrain = val => {
grainOffsets.push(val)
// 4096 block of 4Kb per dir entry =>16MB/grain dir
// 1 block = 2MB
// 512 grain => 1 block
// 8 block per dir entry
if (grainOffsets.length === 512) {
this.#grainIndex.set(blockId, grainOffsets)
grainOffsets = []
blockId++
}
}
if (grainOffset > 1) {
// non empty grain
await changeRange(i, grainOffset)
for (let grainTableIndex = 0; grainTableIndex < grainTableBuffer.length / 8; grainTableIndex++) {
const { type: grainType, grainIndex } = readSeSparseTable(grainTableBuffer, grainTableIndex)
if (grainType === SE_SPARSE_GRAIN_ALLOCATED) {
// this is ok in 32 bits int with VMDK smaller than 2TB
const offsetByte = grainIndex * GRAIN_SIZE_BYTES + this.#grainOffsetBytes
addGrain(offsetByte)
} else {
// multiply by -1 to differenciate type and offset
// no offset can be zero
addGrain(-grainType)
}
}
await changeRange()
return {
strictEqual(grainOffsets.length, 0)
}
}
async readBlock(blockId) {
let changed = false
const parentBlock = await this.#parentVhd.readBlock(blockId)
const parentBuffer = parentBlock.buffer
const grainOffsets = this.#grainIndex.get(blockId) // may be undefined if the child contains block and lookMissingBlockInParent=true
const EMPTY_GRAIN = Buffer.alloc(GRAIN_SIZE_BYTES, 0)
for (const index in grainOffsets) {
const value = grainOffsets[index]
let data
if (value > 0) {
// it's the offset in byte of a grain type SE_SPARSE_GRAIN_ALLOCATED
data = await this.#read(value, GRAIN_SIZE_BYTES)
} else {
// back to the real grain type
const type = value * -1
switch (type) {
case SE_SPARSE_GRAIN_ZERO:
case SE_SPARSE_GRAIN_UNMAPPED:
data = EMPTY_GRAIN
break
case SE_SPARSE_GRAIN_NON_ALLOCATED:
/* from parent */
break
default:
throw new Error(`can't handle grain type ${type}`)
}
}
if (data) {
changed = true
data.copy(parentBuffer, index * GRAIN_SIZE_BYTES + 512 /* block bitmap */)
}
}
// no need to copy if data all come from parent
return changed
? {
id: blockId,
bitmap: buffer.slice(0, 512),
data: buffer.slice(512),
buffer,
}
bitmap: parentBuffer.slice(0, 512),
data: parentBuffer.slice(512),
buffer: parentBuffer,
}
: parentBlock
}
}

View File

@@ -1,4 +1,5 @@
import { Client } from '@vates/node-vsphere-soap'
import { createLogger } from '@xen-orchestra/log'
import { dirname } from 'node:path'
import { EventEmitter } from 'node:events'
import { strictEqual, notStrictEqual } from 'node:assert'
@@ -9,6 +10,8 @@ import parseVmdk from './parsers/vmdk.mjs'
import parseVmsd from './parsers/vmsd.mjs'
import parseVmx from './parsers/vmx.mjs'
const { warn } = createLogger('xo:vmware-explorer:esxi')
export default class Esxi extends EventEmitter {
#client
#cookies
@@ -64,7 +67,7 @@ export default class Esxi extends EventEmitter {
})
}
async download(dataStore, path, range) {
async #download(dataStore, path, range) {
strictEqual(this.#ready, true)
notStrictEqual(this.#dcPath, undefined)
const url = new URL('https://localhost')
@@ -102,6 +105,24 @@ export default class Esxi extends EventEmitter {
return res
}
async download(dataStore, path, range) {
let tries = 5
let lastError
while (tries > 0) {
try {
const res = await this.#download(dataStore, path, range)
return res
} catch (error) {
warn('got error , will retry in 2 seconds', { error })
lastError = error
}
await new Promise(resolve => setTimeout(() => resolve(), 2000))
tries--
}
throw lastError
}
// inspired from https://github.com/reedog117/node-vsphere-soap/blob/master/test/vsphere-soap.test.js#L95
async search(type, properties) {
// get property collector

View File

@@ -1,13 +1,10 @@
import VHDEsxiSeSparse from './VhdEsxiSeSparse.mjs'
import VhdEsxiCowd from './VhdEsxiCowd.mjs'
// import VhdEsxiSeSparse from "./VhdEsxiSeSparse.mjs";
export default async function openDeltaVmdkasVhd(esxi, datastore, path, parentVhd, opts) {
let vhd
if (path.endsWith('-sesparse.vmdk')) {
throw new Error(
`sesparse VMDK reading is not functional yet ${path}. For now, this VM can only be migrated if it doesn't have any snapshots and if it is halted.`
)
// vhd = new VhdEsxiSeSparse(esxi, datastore, path, parentVhd, opts)
vhd = new VHDEsxiSeSparse(esxi, datastore, path, parentVhd, opts)
} else {
if (path.endsWith('-delta.vmdk')) {
vhd = new VhdEsxiCowd(esxi, datastore, path, parentVhd, opts)

View File

@@ -4,11 +4,12 @@
"version": "0.2.3",
"name": "@xen-orchestra/vmware-explorer",
"dependencies": {
"@vates/task": "^0.2.0",
"@vates/node-vsphere-soap": "^1.0.0",
"@vates/read-chunk": "^1.1.1",
"@vates/task": "^0.2.0",
"@xen-orchestra/log": "^0.6.0",
"lodash": "^4.17.21",
"node-fetch": "^3.3.0",
"@vates/node-vsphere-soap": "^1.0.0",
"vhd-lib": "^4.5.0"
},
"engines": {

View File

@@ -8,6 +8,7 @@
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Backup/Restore] Button to open the raw log in the REST API (PR [#6936](https://github.com/vatesfr/xen-orchestra/pull/6936))
- [Vmware/Import] Support esxi 6.5+ with snapshot (PR [#6909](https://github.com/vatesfr/xen-orchestra/pull/6909))
### Bug fixes
@@ -17,6 +18,8 @@
- [REST API] Fix VDI export when NBD is enabled
- [XO Config Cloud Backup] Improve wording about passphrase (PR [#6938](https://github.com/vatesfr/xen-orchestra/pull/6938))
- [Pool] Fix IPv6 handling when adding hosts
- [New SR] Send provided NFS version to XAPI when probing a share
- [Backup/exports] Show more information on error ` stream has ended with not enough data (actual: xxx, expected: 512)` (PR [#6940](https://github.com/vatesfr/xen-orchestra/pull/6940))
### Packages to release
@@ -38,10 +41,13 @@
- @vates/nbd-client major
- @vates/node-vsphere-soap major
- @xen-orchestra/backups minor
- @xen-orchestra/vmware-explorer minor
- @xen-orchestra/xapi major
- @vates/read-chunk minor
- complex-matcher patch
- xen-api patch
- xo-server patch
- xo-server minor
- xo-server-transport-xmpp patch
- xo-server-audit patch
- xo-web minor

View File

@@ -26,10 +26,10 @@
"preferGlobal": false,
"main": "dist/",
"engines": {
"node": ">=6"
"node": ">=10"
},
"dependencies": {
"node-xmpp-client": "^3.0.0",
"@xmpp/client": "^0.13.1",
"promise-toolbox": "^0.21.0"
},
"devDependencies": {

View File

@@ -1,5 +1,5 @@
import fromEvent from 'promise-toolbox/fromEvent'
import XmppClient from 'node-xmpp-client'
import { client, xml } from '@xmpp/client'
// ===================================================================
@@ -46,13 +46,16 @@ class TransportXmppPlugin {
this._client = null
}
configure(conf) {
this._conf = conf
this._conf.reconnect = true
configure({ host, jid, port, password }) {
this._conf = {
password,
service: Object.assign(new URL('xmpp://localhost'), { hostname: host, port }).href,
username: jid,
}
}
async load() {
this._client = new XmppClient(this._conf)
this._client = client(this._conf)
this._client.on('error', () => {})
await fromEvent(this._client.connection.socket, 'data')
@@ -71,12 +74,14 @@ class TransportXmppPlugin {
_sendToXmppClient({ to, message }) {
for (const receiver of to) {
this._client.send(
new XmppClient.Stanza('message', {
xml(
'message',
{
to: receiver,
type: 'chat',
})
.c('body')
.t(message)
},
xml('body', {}, message)
)
)
}
}

View File

@@ -467,10 +467,11 @@ createZfs.resolve = {
// This function helps to detect all NFS shares (exports) on a NFS server
// Return a table of exports with their paths and ACLs
export async function probeNfs({ host, server }) {
export async function probeNfs({ host, nfsVersion, server }) {
const xapi = this.getXapi(host)
const deviceConfig = {
nfsversion: nfsVersion,
server,
}
@@ -501,6 +502,7 @@ export async function probeNfs({ host, server }) {
probeNfs.params = {
host: { type: 'string' },
nfsVersion: { type: 'string', optional: true },
server: { type: 'string' },
}
@@ -837,10 +839,11 @@ probeHbaExists.resolve = {
// This function helps to detect if this NFS SR already exists in XAPI
// It returns a table of SR UUID, empty if no existing connections
export async function probeNfsExists({ host, server, serverPath }) {
export async function probeNfsExists({ host, nfsVersion, server, serverPath }) {
const xapi = this.getXapi(host)
const deviceConfig = {
nfsversion: nfsVersion,
server,
serverpath: serverPath,
}
@@ -859,6 +862,7 @@ export async function probeNfsExists({ host, server, serverPath }) {
probeNfsExists.params = {
host: { type: 'string' },
nfsVersion: { type: 'string', optional: true },
server: { type: 'string' },
serverPath: { type: 'string' },
}

View File

@@ -4,7 +4,7 @@ import { fromEvent } from 'promise-toolbox'
import { createRunner } from '@xen-orchestra/backups/Backup.mjs'
import { Task } from '@xen-orchestra/mixins/Tasks.mjs'
import { v4 as generateUuid } from 'uuid'
import { VDI_FORMAT_VHD } from '@xen-orchestra/xapi'
import { VDI_FORMAT_RAW, VDI_FORMAT_VHD } from '@xen-orchestra/xapi'
import asyncMapSettled from '@xen-orchestra/async-map/legacy.js'
import Esxi from '@xen-orchestra/vmware-explorer/esxi.mjs'
import openDeltaVmdkasVhd from '@xen-orchestra/vmware-explorer/openDeltaVmdkAsVhd.mjs'
@@ -271,10 +271,16 @@ export default class MigrateVm {
}
parentVhd = vhd
}
if (vhd !== undefined) {
// it can be empty if the VM don't have a snapshot and is running
if (vhd !== undefined) {
if (thin) {
const stream = vhd.stream()
await vdi.$importContent(stream, { format: VDI_FORMAT_VHD })
} else {
// no transformation when there is no snapshot in thick mode
const stream = await vhd.rawContent()
await vdi.$importContent(stream, { format: VDI_FORMAT_RAW })
}
}
return { vdi, vhd }
})

View File

@@ -2513,7 +2513,7 @@ const messages = {
licensesBinding: 'Licenses binding',
notEnoughXcpngLicenses: 'Not enough XCP-ng licenses',
notBoundSelectLicense: 'Not bound (Plan (ID), expiration date)',
xcpngLicensesBindingAvancedView: "To bind an XCP-ng license, go the pool's Advanced tab.",
xcpngLicensesBindingAvancedView: "To bind an XCP-ng license, go to the pool's Advanced tab.",
xosanUnregisteredDisclaimer:
'You are not registered and therefore will not be able to create or manage your XOSAN SRs. {link}',
xosanSourcesDisclaimer:

View File

@@ -2693,9 +2693,10 @@ export const fetchFiles = (remote, disk, partition, paths) =>
// -------------------------------------------------------------------
export const probeSrNfs = (host, server) => _call('sr.probeNfs', { host, server })
export const probeSrNfs = (host, server, nfsVersion) => _call('sr.probeNfs', { host, nfsVersion, server })
export const probeSrNfsExists = (host, server, serverPath) => _call('sr.probeNfsExists', { host, server, serverPath })
export const probeSrNfsExists = (host, server, serverPath, nfsVersion) =>
_call('sr.probeNfsExists', { host, nfsVersion, server, serverPath })
export const probeSrIscsiIqns = (host, target, port = undefined, chapUser = undefined, chapPassword) => {
const params = { host, target }

View File

@@ -467,11 +467,11 @@ export default class New extends Component {
_handleSearchServer = async () => {
const { password, port, server, username } = this.refs
const { host, type } = this.state
const { host, nfsVersion, type } = this.state
try {
if (type === 'nfs' || type === 'nfsiso') {
const paths = await probeSrNfs(host.id, server.value)
const paths = await probeSrNfs(host.id, server.value, nfsVersion !== '' ? nfsVersion : undefined)
this.setState({
usage: undefined,
paths,
@@ -500,12 +500,12 @@ export default class New extends Component {
_handleSrPathSelection = async path => {
const { server } = this.refs
const { host } = this.state
const { host, nfsVersion } = this.state
try {
this.setState(({ loading }) => ({ loading: loading + 1 }))
this.setState({
existingSrs: await probeSrNfsExists(host.id, server.value, path),
existingSrs: await probeSrNfsExists(host.id, server.value, path, nfsVersion !== '' ? nfsVersion : undefined),
path,
usage: true,
summary: true,

403
yarn.lock
View File

@@ -705,7 +705,7 @@
dependencies:
tslib "^2.5.0"
"@babel/cli@^7.0.0", "@babel/cli@^7.1.5", "@babel/cli@^7.13.16", "@babel/cli@^7.4.4", "@babel/cli@^7.7.0", "@babel/cli@^7.7.4":
"@babel/cli@^7.0.0", "@babel/cli@^7.1.5", "@babel/cli@^7.13.16", "@babel/cli@^7.16.0", "@babel/cli@^7.4.4", "@babel/cli@^7.7.0", "@babel/cli@^7.7.4":
version "7.22.9"
resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.22.9.tgz#501b3614aeda7399371f6d5991404f069b059986"
integrity sha512-nb2O7AThqRo7/E53EGiuAkMaRbb7J5Qp3RvN+dmua1U+kydm0oznkhqbTEG15yk26G/C3yL6OdZjzgl+DMXVVA==
@@ -733,7 +733,7 @@
resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730"
integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ==
"@babel/core@^7.0.0", "@babel/core@^7.1.5", "@babel/core@^7.1.6", "@babel/core@^7.11.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.8", "@babel/core@^7.14.0", "@babel/core@^7.4.4", "@babel/core@^7.5.5", "@babel/core@^7.7.2", "@babel/core@^7.7.4", "@babel/core@^7.7.5", "@babel/core@^7.8.4":
"@babel/core@^7.0.0", "@babel/core@^7.1.5", "@babel/core@^7.1.6", "@babel/core@^7.11.0", "@babel/core@^7.11.6", "@babel/core@^7.12.3", "@babel/core@^7.13.8", "@babel/core@^7.14.0", "@babel/core@^7.16.5", "@babel/core@^7.4.4", "@babel/core@^7.5.5", "@babel/core@^7.7.2", "@babel/core@^7.7.4", "@babel/core@^7.7.5", "@babel/core@^7.8.4":
version "7.22.9"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.9.tgz#bd96492c68822198f33e8a256061da3cf391f58f"
integrity sha512-G2EgeufBcYw27U4hhoIwFcgc1XU7TlXJ3mv04oOv1WCuo900U/anZSPzEqNjwdjgffkk2Gs0AN0dW1CKVLcG7w==
@@ -971,6 +971,18 @@
chalk "^2.0.0"
js-tokens "^4.0.0"
"@babel/node@^7.16.5":
version "7.22.6"
resolved "https://registry.yarnpkg.com/@babel/node/-/node-7.22.6.tgz#a47b4f150f06bad1808823c4519690ded6c93911"
integrity sha512-Lt6v+RUQOTsEOXLv+KfjogLFkFfsLPPSoXZqmbngfVatkWjQPnFGHO0xjFRcN6XEvm3vsnZn+AWQiRpgZFsdIA==
dependencies:
"@babel/register" "^7.22.5"
commander "^4.0.1"
core-js "^3.30.2"
node-environment-flags "^1.0.5"
regenerator-runtime "^0.13.11"
v8flags "^3.1.1"
"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.4", "@babel/parser@^7.20.15", "@babel/parser@^7.20.7", "@babel/parser@^7.21.3", "@babel/parser@^7.22.5", "@babel/parser@^7.22.7", "@babel/parser@^7.6.0", "@babel/parser@^7.9.6":
version "7.22.7"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.7.tgz#df8cf085ce92ddbdbf668a7f186ce848c9036cae"
@@ -1570,7 +1582,7 @@
dependencies:
"@babel/plugin-transform-react-jsx" "^7.22.5"
"@babel/plugin-transform-react-jsx@^7.22.5", "@babel/plugin-transform-react-jsx@^7.3.0":
"@babel/plugin-transform-react-jsx@^7.16.5", "@babel/plugin-transform-react-jsx@^7.22.5", "@babel/plugin-transform-react-jsx@^7.3.0":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416"
integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA==
@@ -1792,7 +1804,7 @@
"@babel/plugin-transform-react-jsx-development" "^7.22.5"
"@babel/plugin-transform-react-pure-annotations" "^7.22.5"
"@babel/register@^7.0.0", "@babel/register@^7.13.8":
"@babel/register@^7.0.0", "@babel/register@^7.13.8", "@babel/register@^7.22.5":
version "7.22.5"
resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.22.5.tgz#e4d8d0f615ea3233a27b5c6ada6750ee59559939"
integrity sha512-vV6pm/4CijSQ8Y47RH5SopXzursN35RQINfGJkmOlcpAtGuf94miFvIPhCKGQN7WGIcsgG1BHEX2KVdTYwTwUQ==
@@ -4400,27 +4412,222 @@
resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.9.tgz#b6ef7457e826be8049667ae673eda7876eb049be"
integrity sha512-4VSbbcMoxc4KLjb1gs96SRmi7w4h1SF+fCoiK0XaQX62buCc1G5d0DC5bJ9xJBNPDSVCmIrcl8BiYxzjrqaaJA==
"@xmpp/jid@^0.0.2":
version "0.0.2"
resolved "https://registry.yarnpkg.com/@xmpp/jid/-/jid-0.0.2.tgz#0d528ca9d58dafc833665564ffe62f332a3167f2"
integrity sha512-z8riWz40gZF1xg0QYi/dA+hYLb3LAkugvfWac49X4YU/+nGu1pstB7oR6G7PCTOzZaFlZDbu699fyfBlcJfgNw==
"@xmpp/streamparser@^0.0.6":
version "0.0.6"
resolved "https://registry.yarnpkg.com/@xmpp/streamparser/-/streamparser-0.0.6.tgz#118033ea9db7c86a1cb46103f269ebff79f6f1ea"
integrity sha512-Kt5kkH3b0YNAzfRGww60dXo78D+yVItTw3GvSgwOj6LMPpKevVWrRf0r76Dd+krBQ9Gr4SAnVi0kInGfEpOUTA==
"@xmpp/base64@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/base64/-/base64-0.13.1.tgz#92cb55554cea301e5e157d0f4e957d3c3d6e9c3e"
integrity sha512-ifzj81zZc8uhL9Nl8us2NUDfLt3qsbHr8lwdKmrDMk/9unY8aIGjzHdNBJoFFyJe8GSo1NFq3mS7X+X0TwkQYw==
dependencies:
"@xmpp/xml" "^0.1.3"
inherits "^2.0.3"
ltx "^2.5.0"
base-64 "^1.0.0"
"@xmpp/xml@^0.1.3":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@xmpp/xml/-/xml-0.1.3.tgz#1f14399e53e419688558698f6c62e71e39a86a6e"
integrity sha512-FI/C+isEGhL8JLXJLIEWPWIysLHnDAW27k993Iwly+rPz/A3kiQYGz3W/0jNJJjLbBbeZPhfjpSB9MXM8j2PDQ==
"@xmpp/client-core@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/client-core/-/client-core-0.13.1.tgz#508986bf87560a5f34940977fc6e832c8f420667"
integrity sha512-ANVcqzgDCmmUj/R9pf5rJGH41mL16Bo+DRJ+2trKoRHe9p5s0p6IssjhJtTOSVx6oh2ilPXMB8qoMPjTGzY6cw==
dependencies:
inherits "^2.0.3"
ltx "^2.6.2"
"@xmpp/connection" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/client@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/client/-/client-0.13.1.tgz#6c8db3cd308479ef17b8f78e2eda9a68b4ebd2d6"
integrity sha512-DA+pOkWliTKN5C0Bod4rqlZ4hj/CiqQDHRhQgpx7Y/69qsUwK8M/9C02qylpyZSL2TFGzOM6ZMhr/jlMCsL9jQ==
dependencies:
"@babel/cli" "^7.16.0"
"@babel/core" "^7.16.5"
"@babel/node" "^7.16.5"
"@babel/plugin-transform-react-jsx" "^7.16.5"
"@xmpp/client-core" "^0.13.1"
"@xmpp/iq" "^0.13.1"
"@xmpp/middleware" "^0.13.1"
"@xmpp/reconnect" "^0.13.1"
"@xmpp/resolve" "^0.13.1"
"@xmpp/resource-binding" "^0.13.1"
"@xmpp/sasl" "^0.13.1"
"@xmpp/sasl-anonymous" "^0.13.1"
"@xmpp/sasl-plain" "^0.13.1"
"@xmpp/sasl-scram-sha-1" "^0.13.1"
"@xmpp/session-establishment" "^0.13.1"
"@xmpp/starttls" "^0.13.1"
"@xmpp/stream-features" "^0.13.1"
"@xmpp/stream-management" "^0.13.1"
"@xmpp/tcp" "^0.13.1"
"@xmpp/tls" "^0.13.1"
"@xmpp/websocket" "^0.13.1"
babel-plugin-jsx-pragmatic "^1.0.2"
"@xmpp/connection-tcp@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/connection-tcp/-/connection-tcp-0.13.1.tgz#07a48223ac0dc2cc97fa8adf85d98a73418568f9"
integrity sha512-yTVrj5o5rPVbZT5ql5ljzzIZHnLkCuyTNEQpiU9IYvfjWjy4+E2DreUnpRf3IAbpARkMoPq5uQJchH0RE3WBjg==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/connection@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/connection/-/connection-0.13.1.tgz#1c71e2dcfa8a2e72dda4dbf73fe0b520f8971851"
integrity sha512-A8ojaVRrvGtvRTXcWiOJMnBPAytLFvsz18g/jO9PbnhzuqqeJ6LxmCtyaKqchMdX0lhuZpo0JUgCSPnZ68tXrQ==
dependencies:
"@xmpp/error" "^0.13.1"
"@xmpp/events" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/error@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/error/-/error-0.13.1.tgz#fba9ddd33e34f77616635b87b67e974637b23995"
integrity sha512-tKecj36xIGLhLctdYhUOxWs+ZdiJpl0Tfp/GhfrUCKLHj/wq14d62SP9kxa0sDNKOY1uqRq2N9gWZBQHuP+r2Q==
"@xmpp/events@^0.13.0", "@xmpp/events@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/events/-/events-0.13.1.tgz#8bfa57117bb8c21da87e62a1985d65b7b1c342c2"
integrity sha512-c538zWUoD7KfMzMWGHyJkXvRYE5exzVjK6NAsMtfNtbVqw9SXJJaGLvDvYSXOQmKQaZz5guUuIUGiHJbr7yjsA==
dependencies:
events "^3.3.0"
"@xmpp/id@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/id/-/id-0.13.1.tgz#771f5cc64e402cab0994e6e4e05bfc635a300948"
integrity sha512-ivc7kxfk5sU6PspdQvglsibcWRCr40nbaPEvGYbXO8ymFN6qps91DPlEt0Cc0XJExq7PXo0Yt7DACfe8f7K03g==
"@xmpp/iq@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/iq/-/iq-0.13.1.tgz#29e5e62f6bad7cd73020948dc682c3bb2dd6e2ec"
integrity sha512-YyJj6up2aFTobTUmjdX86vs0+/WIB8i88QQjDDlzSKdMDDXgrB8B8JAMlEBfAsruAv/ZIwUnE4/yqCeMAehTuA==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/id" "^0.13.1"
"@xmpp/middleware" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/jid@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/jid/-/jid-0.13.1.tgz#043cd7c491feeb6a7c9f7b1b3581f94939670717"
integrity sha512-E5ulk4gfPQwPY71TWXapiWzoxxAJz3LP0bDIUXIfgvlf1/2QKP3EcYQ7o+qmI0cLEZwWmwluRGouylqhyuwcAw==
"@xmpp/middleware@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/middleware/-/middleware-0.13.1.tgz#fe64e7f5d12fb74254684d96e17777fb4e7a44ed"
integrity sha512-t7kws9KMgaQURCDMcPjJOm/sEcC2Gs2YtpE35NaTR87NSwr8yZ37ZJL5Kki3Z4qhL6nhMXJPAprc6uqBn5q3Og==
dependencies:
"@xmpp/error" "^0.13.1"
"@xmpp/jid" "^0.13.1"
"@xmpp/xml" "^0.13.1"
koa-compose "^4.1.0"
"@xmpp/reconnect@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/reconnect/-/reconnect-0.13.1.tgz#c815ec749a5c142fb9255af951d64e1dbadc419b"
integrity sha512-m/j/mTU7b3cOXP78uGzBbihmJMuXCYcTcwsTHlexj6tj6CE/vpuLNgxvf6pPkO7B9lH0HfezqU7ExHpS+4Nfaw==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/resolve@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/resolve/-/resolve-0.13.1.tgz#19f7a23983b78b72af5ce6606382428c4e7e7421"
integrity sha512-Lgsl6C/uJCxmYr0jWWOCJMqYvKi5WzN6loZwP7f6ov2nLMOMEZ7TSb66z393/7Pd0hy6DqZeggESMAFOkQH+vw==
dependencies:
"@xmpp/events" "^0.13.0"
"@xmpp/xml" "^0.13.0"
node-fetch "^2.6.6"
"@xmpp/resource-binding@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/resource-binding/-/resource-binding-0.13.1.tgz#76a8248ced24bc24cd4dff0a031ed376eafeb54b"
integrity sha512-S6PGlfufDTTDlh21ynyJrGR0sMeEYIRq+BKUl4QhsR19BvP0RUW0t8Ypx1QwDY3++ihqRjvCllCmtmFMY1iJsQ==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/sasl-anonymous@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-anonymous/-/sasl-anonymous-0.13.1.tgz#21139dafe4dbc8fb626e974daf64af9c46e469c5"
integrity sha512-l0Bqmva7xw10p8MelD2bHO10LwCPz6CEd/t5xO+Kw98hjI9lX6k5cxW7frvdnxRwPxJbGTciTQKHokYWR4luaA==
dependencies:
sasl-anonymous "^0.1.0"
"@xmpp/sasl-plain@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-plain/-/sasl-plain-0.13.1.tgz#ccdf9c73f61fb203ec7822ae55e2c707d3aed305"
integrity sha512-Xx4ay67Mg6aQFeelTZuY5QatP3cCJsArAuD0AozHKzjUWzyLqqydsDS+yFN23pxkOZPGgyYVebc4gKti4jZ+GA==
dependencies:
sasl-plain "^0.1.0"
"@xmpp/sasl-scram-sha-1@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl-scram-sha-1/-/sasl-scram-sha-1-0.13.1.tgz#139a74f176301513da987ddc3eb98f1e58c8d9ad"
integrity sha512-qWyR5+v10pykTxQnKfNVUnCnZisA/UmC4Po5EQSgA5dNRuzraqwk/bH5PVi9+M0OcbtdNs9wCO2Hv06YA9AjwA==
dependencies:
sasl-scram-sha-1 "^1.2.1"
"@xmpp/sasl@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/sasl/-/sasl-0.13.1.tgz#ee8484dda611ccff9b39243109b78c47abd01584"
integrity sha512-ynhKsL43EtezqJ9s476leHzliMudCAFS4xNG5x4ZFHoc7Iz5J6p6jFI89LGgnk9DeIdk9A/CFrPWTdyjhvyiTQ==
dependencies:
"@xmpp/base64" "^0.13.1"
"@xmpp/error" "^0.13.1"
"@xmpp/xml" "^0.13.1"
saslmechanisms "^0.1.1"
"@xmpp/session-establishment@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/session-establishment/-/session-establishment-0.13.1.tgz#5fc6e6e2d96d78c4646c227620cdab69046c42ae"
integrity sha512-uba6BZeeSJtbHtU+pCumSiX/zuc9hUdN5dVRNjvRjr/ZcXLMuC5MroRyrld+fm/rQYQLJjF4BcIaxvysXTCAGA==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/starttls@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/starttls/-/starttls-0.13.1.tgz#20d9c399ea822985e5f9ba5636a9fa1cc0600cb9"
integrity sha512-rQumwpbD5+yclcXgPNDF7Jg1mzDFejHKZehD6JRti+Emsxayst/qFDq3uMO3x6P+nKexL4mMoKUtWHlJM7BUGw==
dependencies:
"@xmpp/events" "^0.13.1"
"@xmpp/tls" "^0.13.1"
"@xmpp/xml" "^0.13.1"
"@xmpp/stream-features@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/stream-features/-/stream-features-0.13.1.tgz#1a91aadaf027bfb1e4b5073bb0130ebad7f0e091"
integrity sha512-yZg+CXBRVXsIQzu4SI5UYlDZHmg3wY6YXy4MbeLiI4O8OQ/oCz6OHJlHKUnFl+cGmjDXvhN4Ga6pRhbEIIqM/g==
"@xmpp/stream-management@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/stream-management/-/stream-management-0.13.1.tgz#643b94ed243b81f8a6f74937a6906a21dd8ce889"
integrity sha512-06dhJAlGn+MU5ESrvIUg5xOS7azVE0swq86cx4SCv7t5dWL1WBj4xg2qigLn1hMnFkDw0bO/SOikXTxqGii/hA==
dependencies:
"@xmpp/xml" "^0.13.1"
"@xmpp/tcp@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/tcp/-/tcp-0.13.1.tgz#7a7a08791282a9c8a319cc9f6feee1b94c1bdde6"
integrity sha512-N/AQBT+6Updb/E8A1SYdMbIJGaRFG8+7+bkm9MLw44UsihA6Yg0fmvC02O+BjNg3tXGkcMYLhu/8NYpjK4NlQg==
dependencies:
"@xmpp/connection-tcp" "^0.13.1"
"@xmpp/tls@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/tls/-/tls-0.13.1.tgz#bf3891a0fd381478cc90f88358b7e8b706209bd4"
integrity sha512-ecOmnrZmRbMMPDdvDNirw7sYQHt//YV7UJgfS4c9M+R5ljP2eUJiAiotEEykjKJ6CJPMMxdTnrLLP3ullsgfog==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/connection-tcp" "^0.13.1"
"@xmpp/websocket@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/websocket/-/websocket-0.13.1.tgz#a94196555244a0490fa58046cce4322f84fbf58e"
integrity sha512-UyMYyy/0Cm2UtVoAlhfV31u6LzGrBUU0h7I0qGCq1yYPQpscehNl8lXE4vmB8OfpeDvSZmvGk2vJAvGxzunoDQ==
dependencies:
"@xmpp/connection" "^0.13.1"
"@xmpp/xml" "^0.13.1"
ws "^8.4.0"
"@xmpp/xml@^0.13.0", "@xmpp/xml@^0.13.1":
version "0.13.1"
resolved "https://registry.yarnpkg.com/@xmpp/xml/-/xml-0.13.1.tgz#856b992dec1978fba89e06bd4718d91a31188916"
integrity sha512-GMfYB3PKY9QzsMnl3dPohgPBGd1JQTBanKOaZexJCSYJN2cdYLU2HGhjMtDlGSno6h9U+t0oO7r0igsJwyigwg==
dependencies:
ltx "^3.0.0"
"@xtuc/ieee754@^1.2.0":
version "1.2.0"
@@ -5356,6 +5563,13 @@ babel-plugin-jest-hoist@^29.5.0:
"@types/babel__core" "^7.1.14"
"@types/babel__traverse" "^7.0.6"
babel-plugin-jsx-pragmatic@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/babel-plugin-jsx-pragmatic/-/babel-plugin-jsx-pragmatic-1.0.2.tgz#41e2beb8642235f34b2a7ab12ca39e07201b8e59"
integrity sha512-+qeGXSbHZwinZzO6R3wP+6XDKup83Pgg2B3TQt2zwfDdgC7NqT9Kd3ws7iqk53zAO/8iOIRU6VUyUzt2LDE3Eg==
dependencies:
babel-plugin-syntax-jsx "^6.0.0"
babel-plugin-polyfill-corejs2@^0.4.4:
version "0.4.4"
resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.4.tgz#9f9a0e1cd9d645cc246a5e094db5c3aa913ccd2b"
@@ -5380,6 +5594,11 @@ babel-plugin-polyfill-regenerator@^0.5.1:
dependencies:
"@babel/helper-define-polyfill-provider" "^0.4.1"
babel-plugin-syntax-jsx@^6.0.0:
version "6.18.0"
resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946"
integrity sha512-qrPaCSo9c8RHNRHIotaufGbuOBN8rtdC4QrrFFc43vyWCCz7Kl7GL1PGaXtMGQZUXrkCjNEgxDfmAuAabr/rlw==
babel-plugin-transform-dev@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-dev/-/babel-plugin-transform-dev-2.0.1.tgz#fec5bbfb6b9576cd8413df5bd0ae7aca32b0a2d4"
@@ -5468,16 +5687,16 @@ bach@^1.0.0:
async-settle "^1.0.0"
now-and-later "^2.0.0"
backoff@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/backoff/-/backoff-2.3.0.tgz#ee7c7e38093f92e472859db635e7652454fc21ea"
integrity sha512-ljr33cUQ/vyXE/60QuRO+WKGW4PzQ5OTWNXPWQwOTx5gh43q0pZocaVyXoU2gvFtasMIdIohdm9s01qoT6IJBQ==
balanced-match@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
base-64@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/base-64/-/base-64-1.0.0.tgz#09d0f2084e32a3fd08c2475b973788eee6ae8f4a"
integrity sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg==
base64-js@^1.0.2, base64-js@^1.3.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
@@ -5581,6 +5800,11 @@ bindings@^1.5.0:
dependencies:
file-uri-to-path "1.0.0"
bitwise-xor@0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/bitwise-xor/-/bitwise-xor-0.0.0.tgz#040a8172b5bb8cc562b0b7119f230b2a1a780e3d"
integrity sha512-3eOkZMBO04dRBn7551o6+IX9Ua7V+B/IubS7sffoa/VC3jdBM4YbuD+LjUNFojY7H+gptMUdTaQgHWTce4L3kw==
bl@^4.0.3, bl@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a"
@@ -5785,11 +6009,6 @@ browser-pack@^6.0.1, browser-pack@^6.0.2:
through2 "^2.0.0"
umd "^3.0.0"
browser-request@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/browser-request/-/browser-request-0.3.3.tgz#9ece5b5aca89a29932242e18bf933def9876cc17"
integrity sha512-YyNI4qJJ+piQG6MMEuo7J3Bzaqssufx04zpEKYfSrl/1Op59HWali9zMtBpXnkmqMcOuWJPZvudrm9wISmnCbg==
browser-resolve@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-2.0.0.tgz#99b7304cb392f8d73dba741bb2d7da28c6d7842b"
@@ -7128,7 +7347,7 @@ core-js@^2.4.0, core-js@^2.5.0:
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec"
integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ==
core-js@^3.6.4, core-js@^3.6.5:
core-js@^3.30.2, core-js@^3.6.4, core-js@^3.6.5:
version "3.31.1"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.31.1.tgz#f2b0eea9be9da0def2c5fece71064a7e5d687653"
integrity sha512-2sKLtfq1eFST7l7v62zaqXacPc7uG8ZAya8ogijLhTtaKNcpzpB4TMoTw2Si+8GYKRwFPMMtUT0263QFWFfqyQ==
@@ -7187,7 +7406,7 @@ create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0:
ripemd160 "^2.0.1"
sha.js "^2.4.0"
create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7:
create-hmac@^1.1.0, create-hmac@^1.1.3, create-hmac@^1.1.4, create-hmac@^1.1.7:
version "1.1.7"
resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff"
integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==
@@ -13737,11 +13956,6 @@ lodash._root@^3.0.0:
resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
integrity sha512-O0pWuFSK6x4EXhM1dhZ8gchNtG7JMqBtrHdoUFUWXD7dJnNSUze1GuyQr5sOs0aCvgGeI3o/OJW8f4ca7FDxmQ==
lodash.assign@^4.0.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7"
integrity sha512-hFuH8TY+Yji7Eja3mGiuAxBqLagejScbG8GbG0j6o9vzn0YL14My+ktnqtZgFTosKymC9/44wP6s7xyuLfnClw==
lodash.camelcase@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6"
@@ -14027,12 +14241,10 @@ ltgt@^2.1.2:
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5"
integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==
ltx@^2.5.0, ltx@^2.6.2:
version "2.10.0"
resolved "https://registry.yarnpkg.com/ltx/-/ltx-2.10.0.tgz#0b794b898e01d9dcc61b54b160e78869003bbb20"
integrity sha512-RB4zR6Mrp/0wTNS9WxMvpgfht/7u/8QAC9DpPD19opL/4OASPa28uoliFqeDkLUU8pQ4aeAfATBZmz1aSAHkMw==
dependencies:
inherits "^2.0.4"
ltx@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/ltx/-/ltx-3.0.0.tgz#f2a2260814165c5e28d455f9f7db2178ed295187"
integrity sha512-bu3/4/ApUmMqVNuIkHaRhqVtEi6didYcBDIF56xhPRCzVpdztCipZ62CUuaxMlMBUzaVL93+4LZRqe02fuAG6A==
magic-string@^0.30.0:
version "0.30.1"
@@ -14170,7 +14382,7 @@ math-random@^1.0.1:
resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c"
integrity sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==
md5.js@^1.3.3, md5.js@^1.3.4:
md5.js@^1.3.4:
version "1.3.5"
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==
@@ -14828,6 +15040,14 @@ node-domexception@^1.0.0:
resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==
node-environment-flags@^1.0.5:
version "1.0.6"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088"
integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
node-fetch@^1.0.1:
version "1.7.3"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef"
@@ -14836,7 +15056,7 @@ node-fetch@^1.0.1:
encoding "^0.1.11"
is-stream "^1.0.1"
node-fetch@^2.6.7:
node-fetch@^2.6.6, node-fetch@^2.6.7:
version "2.6.12"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.12.tgz#02eb8e22074018e3d5a83016649d04df0e348fba"
integrity sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==
@@ -14927,38 +15147,6 @@ node-version@^1.0.0:
resolved "https://registry.yarnpkg.com/node-version/-/node-version-1.2.0.tgz#34fde3ffa8e1149bd323983479dda620e1b5060d"
integrity sha512-ma6oU4Sk0qOoKEAymVoTvk8EdXEobdS7m/mAGhDJ8Rouugho48crHBORAmy5BoOcv8wraPM6xumapQp5hl4iIQ==
node-xmpp-client@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/node-xmpp-client/-/node-xmpp-client-3.2.0.tgz#af4527df0cc5abd2690cba2139cc1ecdc81ea189"
integrity sha512-UviV0XNomTPPAOnPeeFlZIg9eXN4+w1atj/NtoUKAQir/MTsO4Ve/nT6UdkZFusB2bOZ9REpGjvop4nhLidWYg==
dependencies:
browser-request "^0.3.3"
debug "^2.2.0"
md5.js "^1.3.3"
minimist "^1.2.0"
node-xmpp-core "^5.0.9"
request "^2.65.0"
ws "^1.1.1"
node-xmpp-core@^5.0.9:
version "5.0.9"
resolved "https://registry.yarnpkg.com/node-xmpp-core/-/node-xmpp-core-5.0.9.tgz#5c28c28edb1fb3f8beba2c6760777613f48f342a"
integrity sha512-F1ODQf95i3H9VLhDgAkbPti4T8uk8bgOeATQ0dSfAJVbvrJZYVGPC/SzGreYXgQmV65b7lxHqejzhSBtYpxc4Q==
dependencies:
"@xmpp/jid" "^0.0.2"
"@xmpp/streamparser" "^0.0.6"
"@xmpp/xml" "^0.1.3"
debug "^2.2.0"
inherits "^2.0.1"
lodash.assign "^4.0.0"
node-xmpp-tls-connect "^1.0.1"
reconnect-core "https://github.com/dodo/reconnect-core/tarball/merged"
node-xmpp-tls-connect@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/node-xmpp-tls-connect/-/node-xmpp-tls-connect-1.0.1.tgz#91ace43ac26b138861b2be478df9df19d61dc5c3"
integrity sha512-tDN4ZBb8rDzlvZHQdYHW4NVPs4mZZYQPtv8anAb7ff8Un0mIid3cVxEfUr7COZfMeB5+YvSUz8Zutr1A+/v9LQ==
node-zone@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/node-zone/-/node-zone-0.4.0.tgz#b058401a2e7c4bd34cb8cda8ee9d61360f620711"
@@ -15445,11 +15633,6 @@ optionator@^0.9.3:
prelude-ls "^1.2.1"
type-check "^0.4.0"
options@>=0.0.5:
version "0.0.6"
resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f"
integrity sha512-bOj3L1ypm++N+n7CEbbe473A414AB7z+amKYshRb//iuL3MpdDCLhPnw6aVTdKB9g5ZRVHIEp8eUln6L2NUStg==
ora@^5.4.1:
version "5.4.1"
resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18"
@@ -17681,12 +17864,6 @@ rechoir@^0.6.2:
dependencies:
resolve "^1.1.6"
"reconnect-core@https://github.com/dodo/reconnect-core/tarball/merged":
version "0.0.1"
resolved "https://github.com/dodo/reconnect-core/tarball/merged#b9daf2adc45b19a6cc5fd2f048f8d9406cece498"
dependencies:
backoff "~2.3.0"
redent@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f"
@@ -17918,7 +18095,7 @@ replace-homedir@^1.0.0:
is-absolute "^1.0.0"
remove-trailing-separator "^1.1.0"
request@^2.65.0, request@^2.74.0, request@^2.87.0:
request@^2.74.0, request@^2.87.0:
version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==
@@ -18248,6 +18425,31 @@ safe-regex@^1.1.0:
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
sasl-anonymous@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/sasl-anonymous/-/sasl-anonymous-0.1.0.tgz#f544c7e824df2a40d9ad4733829572cc8d9ed5a5"
integrity sha512-x+0sdsV0Gie2EexxAUsx6ZoB+X6OCthlNBvAQncQxreEWQJByAPntj0EAgTlJc2kZicoc+yFzeR6cl8VfsQGfA==
sasl-plain@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/sasl-plain/-/sasl-plain-0.1.0.tgz#cf145e7c02222b64d60c0806d9cd2ae5380426cc"
integrity sha512-X8mCSfR8y0NryTu0tuVyr4IS2jBunBgyG+3a0gEEkd0nlHGiyqJhlc4EIkzmSwaa7F8S4yo+LS6Cu5qxRkJrmg==
sasl-scram-sha-1@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sasl-scram-sha-1/-/sasl-scram-sha-1-1.2.1.tgz#d88d51feaa0ff320d8eb1d6fc75657653f9dcd4b"
integrity sha512-o63gNo+EGsk1ML0bNeUAjRomIIcG7VaUyA+ffhd9MME5BjqVEpp42YkmBBZqzz1KmJG3YqpRLE4PfUe7FjexaA==
dependencies:
bitwise-xor "0.0.0"
create-hash "^1.1.0"
create-hmac "^1.1.3"
randombytes "^2.0.1"
saslmechanisms@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/saslmechanisms/-/saslmechanisms-0.1.1.tgz#478be1429500fcfaa780be88b3343ced7d2a9182"
integrity sha512-pVlvK5ysevz8MzybRnDIa2YMxn0OJ7b9lDiWhMoaKPoJ7YkAg/7YtNjUgaYzElkwHxsw8dBMhaEn7UP6zxEwPg==
sass@^1.38.1:
version "1.63.6"
resolved "https://registry.yarnpkg.com/sass/-/sass-1.63.6.tgz#481610e612902e0c31c46b46cf2dad66943283ea"
@@ -18322,7 +18524,7 @@ semver-greatest-satisfied-range@^1.1.0:
dependencies:
sver-compat "^1.5.0"
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1:
version "5.7.2"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
@@ -20220,11 +20422,6 @@ uid2@0.0.x:
resolved "https://registry.yarnpkg.com/uid2/-/uid2-0.0.4.tgz#033f3b1d5d32505f5ce5f888b9f3b667123c0a44"
integrity sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==
ultron@1.0.x:
version "1.0.2"
resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa"
integrity sha512-QMpnpVtYaWEeY+MwKDN/UdKlE/LsFZXM5lO1u7GaZzNgmIbGixHEmVMIKT+vqYOALu3m5GYQy9kz4Xu4IVn7Ow==
umd@^3.0.0:
version "3.0.3"
resolved "https://registry.yarnpkg.com/umd/-/umd-3.0.3.tgz#aa9fe653c42b9097678489c01000acb69f0b26cf"
@@ -20591,7 +20788,7 @@ v8-to-istanbul@^9.0.1:
"@types/istanbul-lib-coverage" "^2.0.1"
convert-source-map "^1.6.0"
v8flags@^3.2.0:
v8flags@^3.1.1, v8flags@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656"
integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==
@@ -21371,14 +21568,6 @@ write-file-atomic@^4.0.2:
imurmurhash "^0.1.4"
signal-exit "^3.0.7"
ws@^1.1.1:
version "1.1.5"
resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.5.tgz#cbd9e6e75e09fc5d2c90015f21f0c40875e0dd51"
integrity sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==
dependencies:
options ">=0.0.5"
ultron "1.0.x"
ws@^6.2.1:
version "6.2.2"
resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.2.tgz#dd5cdbd57a9979916097652d78f1cc5faea0c32e"
@@ -21391,7 +21580,7 @@ ws@^7, ws@^7.5.5:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
ws@^8.2.3, ws@^8.3.0, ws@^8.5.0:
ws@^8.2.3, ws@^8.3.0, ws@^8.4.0, ws@^8.5.0:
version "8.13.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==