diff --git a/@xen-orchestra/backups/_cleanVm.js b/@xen-orchestra/backups/_cleanVm.js index 960781743..8d5d63829 100644 --- a/@xen-orchestra/backups/_cleanVm.js +++ b/@xen-orchestra/backups/_cleanVm.js @@ -18,7 +18,10 @@ const handlerPath = require('@xen-orchestra/fs/path') // checking the size of a vhd directory is costly // 1 Http Query per 1000 blocks // we only check size of all the vhd are VhdFiles -function shouldComputeVhdsSize(vhds) { +function shouldComputeVhdsSize(handler, vhds) { + if (handler.isEncrypted) { + return false + } return vhds.every(vhd => vhd instanceof VhdFile) } @@ -26,7 +29,7 @@ const computeVhdsSize = (handler, vhdPaths) => Disposable.use( vhdPaths.map(vhdPath => openVhd(handler, vhdPath)), async vhds => { - if (shouldComputeVhdsSize(vhds)) { + if (shouldComputeVhdsSize(handler, vhds)) { const sizes = await asyncMap(vhds, vhd => vhd.getSize()) return sum(sizes) } @@ -486,7 +489,11 @@ exports.cleanVm = async function cleanVm( if (mode === 'full') { // a full backup : check size const linkedXva = resolve('/', vmDir, xva) - fileSystemSize = await handler.getSize(linkedXva) + try { + fileSystemSize = await handler.getSize(linkedXva) + } catch (error) { + // can fail with encrypted remote + } } else if (mode === 'delta') { const linkedVhds = Object.keys(vhds).map(key => resolve('/', vmDir, vhds[key])) fileSystemSize = await computeVhdsSize(handler, linkedVhds) diff --git a/@xen-orchestra/backups/_isValidXva.js b/@xen-orchestra/backups/_isValidXva.js index c89a4406c..03a42e418 100644 --- a/@xen-orchestra/backups/_isValidXva.js +++ b/@xen-orchestra/backups/_isValidXva.js @@ -49,6 +49,11 @@ const isValidTar = async (handler, size, fd) => { // TODO: find an heuristic for compressed files async function isValidXva(path) { const handler = this._handler + + // size is longer when encrypted + reading part of an encrypted file is not implemented + if (handler.isEncrypted) { + return true + } try { const fd = await handler.openFile(path, 'r') try { @@ -66,7 +71,6 @@ async function isValidXva(path) { } } catch (error) { // never throw, log and report as valid to avoid side effects - console.error('isValidXva', path, error) return true } } diff --git a/@xen-orchestra/fs/docs/encryption.md b/@xen-orchestra/fs/docs/encryption.md new file mode 100644 index 000000000..a8179de52 --- /dev/null +++ b/@xen-orchestra/fs/docs/encryption.md @@ -0,0 +1,19 @@ +## metadata files + +- Older remotes dont have any metadata file +- Remote used since 5.75 have two files : encryption.json and metadata.json + +The metadata files are checked by the sync() method. If the check fails it MUST throw an error and dismount. + +If the remote is empty, the `sync` method creates them + +### encryption.json + +A non encrypted file contain the algorithm and parameters used for this remote. +This MUST NOT contains the key. + +### metadata.json + +An encrypted JSON file containing the settings of a remote. Today this is an empty JSON file ( `{random: }` ), it serves to check if the encryption key set in the remote is valid, but in the future will be able to store some remote settings to ease disaster recovery. + +If this file can't be read (decrypted, decompressed, .. ), that means that the remote settings have been updated. If the remote is empty, update the `encryption.json` and `metadata.json` files , else raise an error. diff --git a/@xen-orchestra/fs/package.json b/@xen-orchestra/fs/package.json index 0fd3d0492..c5eab26b3 100644 --- a/@xen-orchestra/fs/package.json +++ b/@xen-orchestra/fs/package.json @@ -28,6 +28,7 @@ "@vates/async-each": "^1.0.0", "@vates/coalesce-calls": "^0.1.0", "@vates/decorate-with": "^2.0.0", + "@vates/read-chunk": "^1.0.0", "@xen-orchestra/async-map": "^0.1.2", "@xen-orchestra/log": "^0.3.0", "bind-property-descriptor": "^2.0.0", @@ -39,6 +40,7 @@ "lodash": "^4.17.4", "promise-toolbox": "^0.21.0", "proper-lockfile": "^4.1.2", + "pumpify": "^2.0.1", "readable-stream": "^4.1.0", "through2": "^4.0.2", "xo-remote-parser": "^0.9.1" diff --git a/@xen-orchestra/fs/src/_encryptor.js b/@xen-orchestra/fs/src/_encryptor.js new file mode 100644 index 000000000..3361038e4 --- /dev/null +++ b/@xen-orchestra/fs/src/_encryptor.js @@ -0,0 +1,71 @@ +const { readChunk } = require('@vates/read-chunk') +const crypto = require('crypto') +const pumpify = require('pumpify') + +function getEncryptor(key) { + if (key === undefined) { + return { + id: 'NULL_ENCRYPTOR', + algorithm: 'none', + key: 'none', + ivLength: 0, + encryptData: buffer => buffer, + encryptStream: stream => stream, + decryptData: buffer => buffer, + decryptStream: stream => stream, + } + } + const algorithm = 'aes-256-cbc' + const ivLength = 16 + + function encryptStream(input) { + const iv = crypto.randomBytes(ivLength) + const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv) + + const encrypted = pumpify(input, cipher) + encrypted.unshift(iv) + return encrypted + } + + async function decryptStream(encryptedStream) { + const iv = await readChunk(encryptedStream, ivLength) + const cipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv) + /** + * WARNING + * + * the crytped size has an initializtion vector + a padding at the end + * whe can't predict the decrypted size from the start of the encrypted size + * thus, we can't set decrypted.length reliably + * + */ + return pumpify(encryptedStream, cipher) + } + + function encryptData(buffer) { + const iv = crypto.randomBytes(ivLength) + const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv) + const encrypted = cipher.update(buffer) + return Buffer.concat([iv, encrypted, cipher.final()]) + } + + function decryptData(buffer) { + const iv = buffer.slice(0, ivLength) + const encrypted = buffer.slice(ivLength) + const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv) + const decrypted = decipher.update(encrypted) + return Buffer.concat([decrypted, decipher.final()]) + } + + return { + id: algorithm, + algorithm, + key, + ivLength, + encryptData, + encryptStream, + decryptData, + decryptStream, + } +} + +exports._getEncryptor = getEncryptor diff --git a/@xen-orchestra/fs/src/abstract.js b/@xen-orchestra/fs/src/abstract.js index 792466d4b..0a2cfd13c 100644 --- a/@xen-orchestra/fs/src/abstract.js +++ b/@xen-orchestra/fs/src/abstract.js @@ -1,4 +1,5 @@ import asyncMapSettled from '@xen-orchestra/async-map/legacy' +import assert from 'assert' import getStream from 'get-stream' import { coalesceCalls } from '@vates/coalesce-calls' import { createLogger } from '@xen-orchestra/log' @@ -6,13 +7,14 @@ import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox' import { limitConcurrency } from 'limit-concurrency-decorator' import { parse } from 'xo-remote-parser' import { pipeline } from 'stream' -import { randomBytes } from 'crypto' +import { randomBytes, randomUUID } from 'crypto' import { synchronized } from 'decorator-synchronized' import { basename, dirname, normalize as normalizePath } from './path' import { createChecksumStream, validChecksumOfReadStream } from './checksum' +import { _getEncryptor } from './_encryptor' -const { warn } = createLogger('@xen-orchestra:fs') +const { info, warn } = createLogger('@xen-orchestra:fs') const checksumFile = file => file + '.checksum' const computeRate = (hrtime, size) => { @@ -23,6 +25,9 @@ const computeRate = (hrtime, size) => { const DEFAULT_TIMEOUT = 6e5 // 10 min const DEFAULT_MAX_PARALLEL_OPERATIONS = 10 +const ENCRYPTION_DESC_FILENAME = 'encryption.json' +const ENCRYPTION_METADATA_FILENAME = 'metadata.json' + const ignoreEnoent = error => { if (error == null || error.code !== 'ENOENT') { throw error @@ -63,6 +68,7 @@ class PrefixWrapper { } export default class RemoteHandlerAbstract { + _encryptor constructor(remote, options = {}) { if (remote.url === 'test://') { this._remote = remote @@ -73,6 +79,7 @@ export default class RemoteHandlerAbstract { } } ;({ highWaterMark: this._highWaterMark, timeout: this._timeout = DEFAULT_TIMEOUT } = options) + this._encryptor = _getEncryptor(this._remote.encryptionKey) const sharedLimit = limitConcurrency(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS) this.closeFile = sharedLimit(this.closeFile) @@ -111,55 +118,51 @@ export default class RemoteHandlerAbstract { await this.__closeFile(fd) } - createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) { + async createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) { + if (options.end !== undefined || options.start !== undefined) { + assert.strictEqual(this.isEncrypted, false, `Can't read part of a file when encryption is active ${file}`) + } if (typeof file === 'string') { file = normalizePath(file) } - const path = typeof file === 'string' ? file : file.path - const streamP = timeout - .call(this._createReadStream(file, { ...options, highWaterMark: this._highWaterMark }), this._timeout) - .then(stream => { - // detect early errors - let promise = fromEvent(stream, 'readable') - // try to add the length prop if missing and not a range stream - if (stream.length === undefined && options.end === undefined && options.start === undefined) { - promise = Promise.all([ - promise, - ignoreErrors.call( - this._getSize(file).then(size => { - stream.length = size - }) - ), - ]) + let stream = await timeout.call( + this._createReadStream(file, { ...options, highWaterMark: this._highWaterMark }), + this._timeout + ) + + // detect early errors + await fromEvent(stream, 'readable') + + if (checksum) { + try { + const path = typeof file === 'string' ? file : file.path + const checksum = await this._readFile(checksumFile(path), { flags: 'r' }) + + const { length } = stream + stream = validChecksumOfReadStream(stream, String(checksum).trim()) + stream.length = length + } catch (error) { + if (!(ignoreMissingChecksum && error.code === 'ENOENT')) { + throw error } - - return promise.then(() => stream) - }) - - if (!checksum) { - return streamP + } } - // avoid a unhandled rejection warning - ignoreErrors.call(streamP) - - return this._readFile(checksumFile(path), { flags: 'r' }).then( - checksum => - streamP.then(stream => { - const { length } = stream - stream = validChecksumOfReadStream(stream, String(checksum).trim()) - stream.length = length - - return stream - }), - error => { - if (ignoreMissingChecksum && error && error.code === 'ENOENT') { - return streamP + if (this.isEncrypted) { + stream = this._encryptor.decryptStream(stream) + } else { + // try to add the length prop if missing and not a range stream + if (stream.length === undefined && options.end === undefined && options.start === undefined) { + try { + stream.length = await this._getSize(file) + } catch (error) { + // ignore errors } - throw error } - ) + } + + return stream } /** @@ -175,6 +178,8 @@ export default class RemoteHandlerAbstract { async outputStream(path, input, { checksum = true, dirMode, validator } = {}) { path = normalizePath(path) let checksumStream + + input = this._encryptor.encryptStream(input) if (checksum) { checksumStream = createChecksumStream() pipeline(input, checksumStream, noop) @@ -185,6 +190,8 @@ export default class RemoteHandlerAbstract { validator, }) if (checksum) { + // using _outpuFile means the checksum will NOT be encrypted + // it is by design to allow checking of encrypted files without the key await this._outputFile(checksumFile(path), await checksumStream.checksum, { dirMode, flags: 'wx' }) } } @@ -204,8 +211,13 @@ export default class RemoteHandlerAbstract { return timeout.call(this._getInfo(), this._timeout) } + // when using encryption, the file size is aligned with the encryption block size ( 16 bytes ) + // that means that the size will be 1 to 16 bytes more than the content size + the initialized vector length (16 bytes) async getSize(file) { - return timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout) + assert.strictEqual(this.isEncrypted, false, `Can't compute size of an encrypted file ${file}`) + + const size = await timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout) + return size - this._encryptor.ivLength } async list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) { @@ -251,15 +263,18 @@ export default class RemoteHandlerAbstract { } async outputFile(file, data, { dirMode, flags = 'wx' } = {}) { - await this._outputFile(normalizePath(file), data, { dirMode, flags }) + const encryptedData = this._encryptor.encryptData(data) + await this._outputFile(normalizePath(file), encryptedData, { dirMode, flags }) } async read(file, buffer, position) { + assert.strictEqual(this.isEncrypted, false, `Can't read part of an encrypted file ${file}`) return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position) } async readFile(file, { flags = 'r' } = {}) { - return this._readFile(normalizePath(file), { flags }) + const data = await this._readFile(normalizePath(file), { flags }) + return this._encryptor.decryptData(data) } async rename(oldPath, newPath, { checksum = false } = {}) { @@ -299,6 +314,61 @@ export default class RemoteHandlerAbstract { @synchronized() async sync() { await this._sync() + try { + await this._checkMetadata() + } catch (error) { + await this._forget() + throw error + } + } + + async _canWriteMetadata() { + const list = await this.list('/', { + filter: e => !e.startsWith('.') && e !== ENCRYPTION_DESC_FILENAME && e !== ENCRYPTION_METADATA_FILENAME, + }) + return list.length === 0 + } + + async _createMetadata() { + await Promise.all([ + this._writeFile( + normalizePath(ENCRYPTION_DESC_FILENAME), + JSON.stringify({ algorithm: this._encryptor.algorithm }), + { + flags: 'w', + } + ), // not encrypted + this.writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted + ]) + } + + async _checkMetadata() { + try { + // this file is not encrypted + const data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME)) + JSON.parse(data) + } catch (error) { + if (error.code !== 'ENOENT') { + throw error + } + } + + try { + // this file is encrypted + const data = await this.readFile(ENCRYPTION_METADATA_FILENAME) + JSON.parse(data) + } catch (error) { + if (error.code === 'ENOENT' || (await this._canWriteMetadata())) { + info('will update metadata of this remote') + return this._createMetadata() + } + warn( + `The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`, + { error } + ) + // will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect + throw error + } } async test() { @@ -352,11 +422,13 @@ export default class RemoteHandlerAbstract { } async write(file, buffer, position) { + assert.strictEqual(this.isEncrypted, false, `Can't write part of a file with encryption ${file}`) await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position) } async writeFile(file, data, { flags = 'wx' } = {}) { - await this._writeFile(normalizePath(file), data, { flags }) + const encryptedData = this._encryptor.encryptData(data) + await this._writeFile(normalizePath(file), encryptedData, { flags }) } // Methods that can be called by private methods to avoid parallel limit on public methods @@ -564,6 +636,10 @@ export default class RemoteHandlerAbstract { async _writeFile(file, data, options) { throw new Error('Not implemented') } + + get isEncrypted() { + return this._encryptor.id !== 'NULL_ENCRYPTOR' + } } function createPrefixWrapperMethods() { diff --git a/packages/vhd-lib/Vhd/VhdFile.js b/packages/vhd-lib/Vhd/VhdFile.js index 00781830d..d32471d4e 100644 --- a/packages/vhd-lib/Vhd/VhdFile.js +++ b/packages/vhd-lib/Vhd/VhdFile.js @@ -83,6 +83,7 @@ exports.VhdFile = class VhdFile extends VhdAbstract { } static async open(handler, path, { flags, checkSecondFooter = true } = {}) { + assert(!handler.isEncrypted, `VHDFile implementation is not compatible with encrypted remote`) const fd = await handler.openFile(path, flags ?? 'r+') const vhd = new VhdFile(handler, fd) // openning a file for reading does not trigger EISDIR as long as we don't really read from it : diff --git a/packages/vhd-lib/aliases.js b/packages/vhd-lib/aliases.js index f43864c80..aced84af3 100644 --- a/packages/vhd-lib/aliases.js +++ b/packages/vhd-lib/aliases.js @@ -12,11 +12,14 @@ exports.resolveVhdAlias = async function resolveVhdAlias(handler, filename) { if (!isVhdAlias(filename)) { return filename } - const size = await handler.getSize(filename) - if (size > ALIAS_MAX_PATH_LENGTH) { - // seems reasonnable for a relative path - throw new Error(`The alias file ${filename} is too big (${size} bytes)`) + if (!handler.isEncrypted) { + const size = await handler.getSize(filename) + if (size > ALIAS_MAX_PATH_LENGTH) { + // seems reasonnable for a relative path + throw new Error(`The alias file ${filename} is too big (${size} bytes)`) + } } + const aliasContent = (await handler.readFile(filename)).toString().trim() // also handle circular references and unreasonnably long chains if (isVhdAlias(aliasContent)) { diff --git a/packages/vhd-lib/openVhd.js b/packages/vhd-lib/openVhd.js index 8852cbafe..ce7632b9b 100644 --- a/packages/vhd-lib/openVhd.js +++ b/packages/vhd-lib/openVhd.js @@ -9,7 +9,8 @@ exports.openVhd = async function openVhd(handler, path, opts) { try { return await VhdFile.open(handler, resolved, opts) } catch (e) { - if (e.code !== 'EISDIR') { + // if the remote is encrypted, trying to open a VhdFile will throw an assertion error before checking if the path is a directory, therefore we should try to open a VhdDirectory anyway. + if (e.code !== 'EISDIR' && e.code !== 'ERR_ASSERTION') { throw e } return await VhdDirectory.open(handler, resolved, opts)