feat(remotes): AWS S3 backup storage (#5037)

This commit is contained in:
Nicolas Raynaud 2020-07-30 16:47:04 +02:00 committed by GitHub
parent ffa431a3cd
commit 407586e2d5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 527 additions and 107 deletions

View File

@ -25,6 +25,7 @@
"@marsaud/smb2": "^0.15.0",
"@sindresorhus/df": "^3.1.1",
"@xen-orchestra/async-map": "^0.0.0",
"aws-sdk": "^2.686.0",
"decorator-synchronized": "^0.5.0",
"execa": "^4.0.2",
"fs-extra": "^9.0.0",

View File

@ -5,7 +5,7 @@ import getStream from 'get-stream'
import asyncMap from '@xen-orchestra/async-map'
import limit from 'limit-concurrency-decorator'
import path from 'path'
import path, { basename } from 'path'
import synchronized from 'decorator-synchronized'
import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
import { parse } from 'xo-remote-parser'
@ -121,6 +121,7 @@ export default class RemoteHandlerAbstract {
await this.__closeFile(fd)
}
// TODO: remove method
async createOutputStream(
file: File,
{ checksum = false, ...options }: Object = {}
@ -221,19 +222,15 @@ export default class RemoteHandlerAbstract {
)
}
createWriteStream(
file: File,
options: { end?: number, flags?: string, start?: number } = {}
): Promise<LaxWritable> {
return timeout.call(
this._createWriteStream(
typeof file === 'string' ? normalizePath(file) : file,
{
flags: 'wx',
...options,
}
)
)
// write a stream to a file using a temporary file
async outputStream(
input: Readable | Promise<Readable>,
path: string,
{ checksum = true }: { checksum?: boolean } = {}
): Promise<void> {
path = normalizePath(path)
input = await input
return this._outputStream(await input, normalizePath(path), { checksum })
}
// Free the resources possibly dedicated to put the remote at work, when it
@ -321,18 +318,6 @@ export default class RemoteHandlerAbstract {
return this._readFile(normalizePath(file), { flags })
}
async refreshChecksum(path: string): Promise<void> {
path = normalizePath(path)
const stream = (await this._createReadStream(path, { flags: 'r' })).pipe(
createChecksumStream()
)
stream.resume() // start reading the whole file
await this._outputFile(checksumFile(path), await stream.checksum, {
flags: 'wx',
})
}
async rename(
oldPath: string,
newPath: string,
@ -548,6 +533,22 @@ export default class RemoteHandlerAbstract {
return this._outputFile(file, data, options)
}
async _outputStream(input, path, { checksum }) {
const tmpPath = `${dirname(path)}/.${basename(path)}`
const output = await this.createOutputStream(tmpPath, { checksum })
try {
input.pipe(output)
await fromEvent(output, 'finish')
await output.checksumWritten
// $FlowFixMe
await input.task
await this.rename(tmpPath, path, { checksum })
} catch (error) {
await this.unlink(tmpPath, { checksum })
throw error
}
}
_read(
file: File,
buffer: Buffer,

View File

@ -42,18 +42,6 @@ describe('createOutputStream()', () => {
})
})
describe('createReadStream()', () => {
it(`throws in case of timeout`, async () => {
const testHandler = new TestHandler({
createReadStream: () => new Promise(() => {}),
})
const promise = testHandler.createReadStream('file')
jest.advanceTimersByTime(TIMEOUT)
await expect(promise).rejects.toThrowError(TimeoutError)
})
})
describe('getInfo()', () => {
it('throws in case of timeout', async () => {
const testHandler = new TestHandler({

View File

@ -2,7 +2,6 @@
import 'dotenv/config'
import asyncIteratorToStream from 'async-iterator-to-stream'
import getStream from 'get-stream'
import { forOwn, random } from 'lodash'
import { fromCallback } from 'promise-toolbox'
import { pipeline } from 'readable-stream'
@ -91,31 +90,6 @@ handlers.forEach(url => {
})
})
describe('#createReadStream()', () => {
beforeEach(() => handler.outputFile('file', TEST_DATA))
testWithFileDescriptor('file', 'r', async ({ file, flags }) => {
await expect(
await getStream.buffer(
await handler.createReadStream(file, { flags })
)
).toEqual(TEST_DATA)
})
})
describe('#createWriteStream()', () => {
testWithFileDescriptor('file', 'wx', async ({ file, flags }) => {
const stream = await handler.createWriteStream(file, { flags })
await fromCallback(pipeline, createTestDataStream(), stream)
await expect(await handler.readFile('file')).toEqual(TEST_DATA)
})
it('fails if parent dir is missing', async () => {
const error = await rejectionOf(handler.createWriteStream('dir/file'))
expect(error.code).toBe('ENOENT')
})
})
describe('#getInfo()', () => {
let info
beforeAll(async () => {

View File

@ -4,6 +4,7 @@ import execa from 'execa'
import type RemoteHandler from './abstract'
import RemoteHandlerLocal from './local'
import RemoteHandlerNfs from './nfs'
import RemoteHandlerS3 from './s3'
import RemoteHandlerSmb from './smb'
import RemoteHandlerSmbMount from './smb-mount'
@ -13,6 +14,7 @@ export type Remote = { url: string }
const HANDLERS = {
file: RemoteHandlerLocal,
nfs: RemoteHandlerNfs,
s3: RemoteHandlerS3,
}
try {

253
@xen-orchestra/fs/src/s3.js Normal file
View File

@ -0,0 +1,253 @@
import AWS from 'aws-sdk'
import { parse } from 'xo-remote-parser'
import RemoteHandlerAbstract from './abstract'
import { createChecksumStream } from './checksum'
// endpoints https://docs.aws.amazon.com/general/latest/gr/s3.html
export default class S3Handler extends RemoteHandlerAbstract {
constructor(remote, _opts) {
super(remote)
const { host, path, username, password } = parse(remote.url)
// https://www.zenko.io/blog/first-things-first-getting-started-scality-s3-server/
this._s3 = new AWS.S3({
accessKeyId: username,
apiVersion: '2006-03-01',
endpoint: host,
s3ForcePathStyle: true,
secretAccessKey: password,
signatureVersion: 'v4',
})
const splitPath = path.split('/').filter(s => s.length)
this._bucket = splitPath.shift()
this._dir = splitPath.join('/')
}
get type() {
return 's3'
}
_createParams(file) {
return { Bucket: this._bucket, Key: this._dir + file }
}
async _outputStream(input, path, { checksum }) {
let inputStream = input
if (checksum) {
const checksumStream = createChecksumStream()
const forwardError = error => {
checksumStream.emit('error', error)
}
input.pipe(checksumStream)
input.on('error', forwardError)
inputStream = checksumStream
}
const upload = this._s3.upload({
...this._createParams(path),
Body: inputStream,
})
await upload.promise()
if (checksum) {
const checksum = await inputStream.checksum
const params = {
...this._createParams(path + '.checksum'),
Body: checksum,
}
await this._s3.upload(params).promise()
}
await input.task
}
async _writeFile(file, data, options) {
return this._s3
.putObject({ ...this._createParams(file), Body: data })
.promise()
}
async _createReadStream(file, options) {
return this._s3.getObject(this._createParams(file)).createReadStream()
}
async _unlink(file) {
return this._s3.deleteObject(this._createParams(file)).promise()
}
async _list(dir) {
function splitPath(path) {
return path.split('/').filter(d => d.length)
}
const prefix = [this._dir, dir].join('/')
const splitPrefix = splitPath(prefix)
const request = this._s3.listObjectsV2({
Bucket: this._bucket,
Prefix: splitPrefix.join('/'),
})
const result = await request.promise()
const uniq = new Set()
for (const entry of result.Contents) {
const line = splitPath(entry.Key)
if (line.length > splitPrefix.length) {
uniq.add(line[splitPrefix.length])
}
}
return [...uniq]
}
async _rename(oldPath, newPath) {
const params = {
...this._createParams(newPath),
CopySource: `/${this._bucket}/${this._dir}${oldPath}`,
}
await this._s3.copyObject(params).promise()
await this._s3.deleteObject(this._createParams(oldPath)).promise()
}
async _getSize(file) {
if (typeof file !== 'string') {
file = file.fd
}
const result = await this._s3.headObject(this._createParams(file)).promise()
return +result.ContentLength
}
async _read(file, buffer, position = 0) {
if (typeof file !== 'string') {
file = file.fd
}
const params = this._createParams(file)
params.Range = `bytes=${position}-${position + buffer.length - 1}`
const result = await this._s3.getObject(params).promise()
result.Body.copy(buffer)
return { bytesRead: result.Body.length, buffer }
}
async _write(file, buffer, position) {
const MIN_FRAGMENT_SIZE = 1024 * 1024 * 5 // 5Mo
if (typeof file !== 'string') {
file = file.fd
}
const uploadParams = this._createParams(file)
const fileSize = +(await this._s3.headObject(uploadParams).promise())
.ContentLength
if (fileSize < MIN_FRAGMENT_SIZE) {
const resultBuffer = Buffer.alloc(
Math.max(fileSize, position + buffer.length)
)
const fileContent = (await this._s3.getObject(uploadParams).promise())
.Body
fileContent.copy(resultBuffer)
buffer.copy(resultBuffer, position)
await this._s3
.putObject({ ...uploadParams, Body: resultBuffer })
.promise()
return { buffer, bytesWritten: buffer.length }
} else {
// using this trick: https://stackoverflow.com/a/38089437/72637
// multipart fragments have a minimum size of 5Mo unless they are last
// splitting the file in 3 parts: [prefix, edit, suffix]
// if `prefix` is bigger than 5Mo, it will be sourced from uploadPartCopy()
// otherwise otherwise it will be downloaded, concatenated to `edit`
// `edit` will always be an upload part
// `suffix` will ways be sourced from uploadPartCopy()
const multipartParams = await this._s3
.createMultipartUpload(uploadParams)
.promise()
try {
const parts = []
const prefixSize = position
let suffixOffset = prefixSize + buffer.length
let suffixSize = Math.max(0, fileSize - suffixOffset)
let hasSuffix = suffixSize > 0
let editBuffer = buffer
let editBufferOffset = position
let partNumber = 1
const prefixRange = `bytes=0-${prefixSize - 1}`
if (prefixSize < MIN_FRAGMENT_SIZE) {
const downloadParams = { ...uploadParams, Range: prefixRange }
const prefixBuffer =
prefixSize > 0
? (await this._s3.getObject(downloadParams).promise()).Body
: Buffer.alloc(0)
editBuffer = Buffer.concat([prefixBuffer, buffer])
editBufferOffset = 0
} else {
const copyPrefixParams = {
...multipartParams,
PartNumber: partNumber++,
CopySource: `/${this._bucket}/${this._dir + file}`,
CopySourceRange: prefixRange,
}
const prefixPart = (
await this._s3.uploadPartCopy(copyPrefixParams).promise()
).CopyPartResult
parts.push({
ETag: prefixPart.ETag,
PartNumber: copyPrefixParams.PartNumber,
})
}
if (hasSuffix && editBuffer.length < MIN_FRAGMENT_SIZE) {
// the edit fragment is too short and is not the last fragment
// let's steal from the suffix fragment to reach the minimum size
// the suffix might be too short and itself entirely absorbed in the edit fragment, making it the last one.
const complementSize = Math.min(
MIN_FRAGMENT_SIZE - editBuffer.length,
suffixSize
)
const complementOffset = editBufferOffset + editBuffer.length
suffixOffset += complementSize
suffixSize -= complementSize
hasSuffix = suffixSize > 0
const prefixRange = `bytes=${complementOffset}-${
complementOffset + complementSize - 1
}`
const downloadParams = { ...uploadParams, Range: prefixRange }
const complementBuffer = (
await this._s3.getObject(downloadParams).promise()
).Body
editBuffer = Buffer.concat([editBuffer, complementBuffer])
}
const editParams = {
...multipartParams,
Body: editBuffer,
PartNumber: partNumber++,
}
const editPart = await this._s3.uploadPart(editParams).promise()
parts.push({ ETag: editPart.ETag, PartNumber: editParams.PartNumber })
if (hasSuffix) {
const suffixRange = `bytes=${suffixOffset}-${fileSize - 1}`
const copySuffixParams = {
...multipartParams,
PartNumber: partNumber++,
CopySource: `/${this._bucket}/${this._dir + file}`,
CopySourceRange: suffixRange,
}
const suffixPart = (
await this._s3.uploadPartCopy(copySuffixParams).promise()
).CopyPartResult
parts.push({
ETag: suffixPart.ETag,
PartNumber: copySuffixParams.PartNumber,
})
}
await this._s3
.completeMultipartUpload({
...multipartParams,
MultipartUpload: { Parts: parts },
})
.promise()
} catch (e) {
await this._s3.abortMultipartUpload(multipartParams).promise()
throw e
}
}
}
async _openFile(path, flags) {
return path
}
async _closeFile(fd) {}
}

View File

@ -7,6 +7,8 @@
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Remotes] Add AWS S3 as a backup storage
### Bug fixes
> Users must be able to say: “I had this issue, happy to know it's fixed”
@ -30,5 +32,8 @@
>
> In case of conflict, the highest (lowest in previous list) `$version` wins.
xo-web patch
xo-server-sdn-controller patch
- xo-server-sdn-controller patch
- xo-remote-parser minor
- @xen-orchestra/fs minor
- xo-web minor
- xo-server minor

View File

@ -26,7 +26,8 @@
"node": ">=6"
},
"dependencies": {
"lodash": "^4.13.1"
"lodash": "^4.13.1",
"url-parse": "^1.4.7"
},
"devDependencies": {
"@babel/cli": "^7.0.0",

View File

@ -2,6 +2,7 @@ import filter from 'lodash/filter'
import map from 'lodash/map'
import trim from 'lodash/trim'
import trimStart from 'lodash/trimStart'
import Url from 'url-parse'
const NFS_RE = /^([^:]+):(?:(\d+):)?([^:]+)$/
const SMB_RE = /^([^:]+):(.+)@([^@]+)\\\\([^\0]+)(?:\0(.*))?$/
@ -39,6 +40,13 @@ export const parse = string => {
object.domain = domain
object.username = username
object.password = password
} else if (type === 's3') {
const parsed = new Url(string)
object.type = 's3'
object.host = parsed.host
object.path = parsed.pathname
object.username = parsed.username
object.password = decodeURIComponent(parsed.password)
}
return object
}
@ -60,6 +68,9 @@ export const format = ({
if (type === 'smb') {
string += `${username}:${password}@${domain}\\\\${host}`
}
if (type === 's3') {
string += `${username}:${encodeURIComponent(password)}@${host}`
}
path = sanitizePath(path)
if (type === 'smb') {
path = path.split('/')

View File

@ -44,6 +44,17 @@ const data = deepFreeze({
path: '/media/nfs',
},
},
S3: {
string:
's3://AKIAS:XSuBupZ0mJlu%2B@s3-us-west-2.amazonaws.com/test-bucket/dir',
object: {
type: 's3',
host: 's3-us-west-2.amazonaws.com',
path: '/test-bucket/dir',
username: 'AKIAS',
password: 'XSuBupZ0mJlu+',
},
},
})
const parseData = deepFreeze({

View File

@ -8,7 +8,7 @@ import defer from 'golike-defer'
import limitConcurrency from 'limit-concurrency-decorator'
import safeTimeout from 'strict-timeout/safe'
import { type Pattern, createPredicate } from 'value-matcher'
import { type Readable, PassThrough } from 'stream'
import { PassThrough } from 'stream'
import { AssertionError } from 'assert'
import { basename, dirname } from 'path'
import { decorateWith } from '@vates/decorate-with'
@ -29,13 +29,7 @@ import {
sum,
values,
} from 'lodash'
import {
CancelToken,
ignoreErrors,
pFinally,
pFromEvent,
timeout,
} from 'promise-toolbox'
import { CancelToken, ignoreErrors, pFinally, timeout } from 'promise-toolbox'
import Vhd, {
chainVhd,
checkVhdChain,
@ -326,31 +320,6 @@ const parseVmBackupId = (id: string) => {
}
}
// write a stream to a file using a temporary file
//
// TODO: merge into RemoteHandlerAbstract
const writeStream = async (
input: Readable | Promise<Readable>,
handler: RemoteHandler,
path: string,
{ checksum = true }: { checksum?: boolean } = {}
): Promise<void> => {
input = await input
const tmpPath = `${dirname(path)}/.${basename(path)}`
const output = await handler.createOutputStream(tmpPath, { checksum })
try {
input.pipe(output)
await pFromEvent(output, 'finish')
await output.checksumWritten
// $FlowFixMe
await input.task
await handler.rename(tmpPath, path, { checksum })
} catch (error) {
await handler.unlink(tmpPath, { checksum })
throw error
}
}
const wrapTask = async <T>(opts: any, task: Promise<T>): Promise<T> => {
const { data, logger, message, parentId, result } = opts
@ -1507,7 +1476,7 @@ export default class BackupNg {
parentId: taskId,
result: () => ({ size: xva.size }),
},
writeStream(fork, handler, dataFilename)
handler.outputStream(fork, dataFilename)
)
if (handler._getFilePath !== undefined) {
@ -1876,9 +1845,8 @@ export default class BackupNg {
}
// FIXME: should only be renamed after the metadata file has been written
await writeStream(
await handler.outputStream(
fork.streams[`${id}.vhd`](),
handler,
path,
{
// no checksum for VHDs, because they will be invalidated by

View File

@ -547,6 +547,7 @@ const messages = {
remoteTypeLocal: 'Local',
remoteTypeNfs: 'NFS',
remoteTypeSmb: 'SMB',
remoteTypeS3: 'Amazon Web Services S3 (beta)',
remoteType: 'Type',
remoteSmbWarningMessage:
'SMB remotes are meant to work with Windows Server. For other systems (Linux Samba, which means almost all NAS), please use NFS.',
@ -593,6 +594,8 @@ const messages = {
remoteSmbPlaceHolderDomain: 'Domain',
remoteSmbPlaceHolderAddressShare: '<address>\\\\<share>',
remoteSmbPlaceHolderOptions: 'Custom mount options',
remoteS3PlaceHolderBucket: 'AWS S3 bucket name',
remoteS3PlaceHolderDirectory: 'directory',
remotePlaceHolderPassword: 'Password(fill to edit)',
// ------ New Storage -----

View File

@ -294,6 +294,58 @@ const COLUMNS_SMB_REMOTE = [
COLUMN_PROXY,
]
const COLUMNS_S3_REMOTE = [
COLUMN_NAME,
{
itemRenderer: (remote, { formatMessage }) => (
<Text
data-element='host'
data-remote={remote}
onChange={_changeUrlElement}
placeholder='AWS endpoint'
value={remote.host}
/>
),
name: 'AWS S3 Endpoint',
},
{
itemRenderer: (remote, { formatMessage }) => (
<Text
data-element='path'
data-remote={remote}
onChange={_changeUrlElement}
placeholder='bucket placeholder'
value={remote.path}
/>
),
name: 'Bucket',
},
COLUMN_STATE,
{
itemRenderer: (remote, { formatMessage }) => (
<span>
<Text
data-element='username'
data-remote={remote}
onChange={_changeUrlElement}
value={remote.username}
/>
:
<Password
data-element='password'
data-remote={remote}
onChange={_changeUrlElement}
placeholder='Click to change Secret Key'
value=''
/>
</span>
),
name: 'Key',
},
COLUMN_SPEED,
COLUMN_PROXY,
]
const GROUPED_ACTIONS = [
{
handler: deleteRemotes,
@ -443,6 +495,23 @@ export default decorate([
/>
</div>
)}
{!isEmpty(state.remoteWithInfo.s3) && (
<div>
<h2>{_('remoteTypeS3')}</h2>
<SortedTable
collection={state.remoteWithInfo.s3}
columns={COLUMNS_S3_REMOTE}
data-editRemote={effects.editRemote}
data-formatMessage={formatMessage}
data-reset={effects.reset}
filters={FILTERS}
groupedActions={GROUPED_ACTIONS}
individualActions={INDIVIDUAL_ACTIONS}
stateUrlParam='s3'
/>
</div>
)}
<Remote formatMessage={formatMessage} key={state.formKey} />
</div>
),

View File

@ -18,6 +18,7 @@ const remoteTypes = {
file: 'remoteTypeLocal',
nfs: 'remoteTypeNfs',
smb: 'remoteTypeSmb',
s3: 'remoteTypeS3',
}
export default decorate([
@ -36,6 +37,8 @@ export default decorate([
proxyId: undefined,
type: undefined,
username: undefined,
directory: undefined,
bucket: undefined,
}),
effects: {
linkState,
@ -93,6 +96,8 @@ export default decorate([
path,
port,
proxyId,
bucket,
directory,
type = 'nfs',
username,
} = state
@ -103,6 +108,9 @@ export default decorate([
port,
type,
}
if (type === 's3') {
urlParams.path = bucket + '/' + directory
}
username && (urlParams.username = username)
password && (urlParams.password = password)
domain && (urlParams.domain = domain)
@ -142,6 +150,10 @@ export default decorate([
password = remote.password || '',
parsedPath,
path = parsedPath || '',
parsedBucket = parsedPath && parsedPath.split('/')[0],
bucket = parsedBucket || '',
parsedDirectory,
directory = parsedDirectory || '',
port = remote.port,
proxyId = remote.proxy,
type = remote.type || 'nfs',
@ -170,6 +182,11 @@ export default decorate([
{type === 'smb' && (
<em className='text-warning'>{_('remoteSmbWarningMessage')}</em>
)}
{type === 's3' && (
<em className='text-warning'>
Backup to Amazon S3 is a BETA feature
</em>
)}
</div>
<div className='form-group'>
<input
@ -332,6 +349,73 @@ export default decorate([
</div>
</fieldset>
)}
{type === 's3' && (
<fieldset className='form-group form-group'>
<div className='input-group '>
<input
className='form-control'
name='host'
onChange={effects.linkState}
// pattern='^[^\\/]+\\[^\\/]+$'
placeholder='AWS S3 endpoint (ex: s3.us-east-2.amazonaws.com)'
required
type='text'
value={host}
/>
</div>
<div className='input-group '>
<input
className='form-control'
name='bucket'
onChange={effects.linkState}
// https://stackoverflow.com/a/58248645/72637
pattern='(?!^(\d{1,3}\.){3}\d{1,3}$)(^[a-z0-9]([a-z0-9-]*(\.[a-z0-9])?)*$)'
placeholder={formatMessage(
messages.remoteS3PlaceHolderBucket
)}
required
type='text'
value={bucket}
/>
</div>
<div className='input-group form-group'>
<input
className='form-control'
name='directory'
onChange={effects.linkState}
pattern='^(([^/]+)+(/[^/]+)*)?$'
placeholder={formatMessage(
messages.remoteS3PlaceHolderDirectory
)}
required
type='text'
value={directory}
/>
</div>
<div className='input-group'>
<input
className='form-control'
name='username'
onChange={effects.linkState}
placeholder='Access key ID'
required
type='text'
value={username}
/>
</div>
<div className='input-group'>
<input
className='form-control'
name='password'
onChange={effects.linkState}
placeholder='Secret access key'
required
type='text'
value={password}
/>
</div>
</fieldset>
)}
<div className='form-group'>
<ActionButton
btnStyle='primary'

View File

@ -2843,6 +2843,21 @@ autoprefixer@^9.5.1, autoprefixer@^9.6.1:
postcss "^7.0.30"
postcss-value-parser "^4.1.0"
aws-sdk@^2.686.0:
version "2.687.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.687.0.tgz#664019f43a697b9697a5b90789a5ecbbb9e2b90f"
integrity sha512-gUIL4YXWdg3dv1QJMs0PCLpaHRe4DtPIYgRh3PPLJHnbIETLQdiA98co6qthAXIIWKx2hET4d1BhnAlAp77zQw==
dependencies:
buffer "4.9.2"
events "1.1.1"
ieee754 "1.1.13"
jmespath "0.15.0"
querystring "0.2.0"
sax "1.2.1"
url "0.10.3"
uuid "3.3.2"
xml2js "0.4.19"
aws-sign2@~0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
@ -4333,7 +4348,7 @@ buffer-xor@^1.0.3:
resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=
buffer@^4.3.0:
buffer@4.9.2, buffer@^4.3.0:
version "4.9.2"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
@ -7274,7 +7289,7 @@ eventemitter3@^4.0.0:
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384"
integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ==
events@^1.0.2, events@^1.1.0:
events@1.1.1, events@^1.0.2, events@^1.1.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=
@ -9236,7 +9251,7 @@ icss-utils@^4.1.0:
dependencies:
postcss "^7.0.14"
ieee754@^1.1.4:
ieee754@1.1.13, ieee754@^1.1.4:
version "1.1.13"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
@ -10837,6 +10852,10 @@ jest@^24.8.0:
import-local "^2.0.0"
jest-cli "^24.9.0"
jmespath@0.15.0:
version "0.15.0"
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
jest@^26.0.1:
version "26.0.1"
resolved "https://registry.yarnpkg.com/jest/-/jest-26.0.1.tgz#5c51a2e58dff7525b65f169721767173bf832694"
@ -15894,6 +15913,11 @@ sass-graph@2.2.5:
scss-tokenizer "^0.2.3"
yargs "^13.3.2"
sax@1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o=
sax@1.2.x, sax@>=0.6.0, sax@^1.2.4, sax@~1.2.4:
version "1.2.4"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
@ -17951,7 +17975,7 @@ url-parse-lax@^3.0.0:
dependencies:
prepend-http "^2.0.0"
url-parse@^1.2.0, url-parse@^1.4.3:
url-parse@^1.2.0, url-parse@^1.4.3, url-parse@^1.4.7:
version "1.4.7"
resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278"
integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==
@ -17959,6 +17983,14 @@ url-parse@^1.2.0, url-parse@^1.4.3:
querystringify "^2.1.1"
requires-port "^1.0.0"
url@0.10.3:
version "0.10.3"
resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64"
integrity sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=
dependencies:
punycode "1.3.2"
querystring "0.2.0"
url@^0.11.0, url@~0.11.0:
version "0.11.0"
resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
@ -18026,6 +18058,11 @@ utils-merge@1.0.1, utils-merge@1.x.x:
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
uuid@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
uuid@^3.0.1, uuid@^3.3.2, uuid@^3.4.0:
version "3.4.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
@ -18812,6 +18849,14 @@ xml-name-validator@^3.0.0:
resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==
xml2js@0.4.19:
version "0.4.19"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
dependencies:
sax ">=0.6.0"
xmlbuilder "~9.0.1"
xml2js@0.4.x, xml2js@^0.4.19, xml2js@^0.4.23:
version "0.4.23"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
@ -18830,6 +18875,10 @@ xmlbuilder@^11.0.0, xmlbuilder@~11.0.0:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
xmlbuilder@~9.0.1:
version "9.0.7"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=
xmlchars@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"