feat: server side of backup encryption

This commit is contained in:
Florent Beauchamp 2022-08-23 12:04:16 +02:00 committed by Julien Fontanet
parent 74d15e1a92
commit 1297c925ad
9 changed files with 239 additions and 55 deletions

View File

@ -18,7 +18,10 @@ const handlerPath = require('@xen-orchestra/fs/path')
// checking the size of a vhd directory is costly // checking the size of a vhd directory is costly
// 1 Http Query per 1000 blocks // 1 Http Query per 1000 blocks
// we only check size of all the vhd are VhdFiles // we only check size of all the vhd are VhdFiles
function shouldComputeVhdsSize(vhds) { function shouldComputeVhdsSize(handler, vhds) {
if (handler.isEncrypted) {
return false
}
return vhds.every(vhd => vhd instanceof VhdFile) return vhds.every(vhd => vhd instanceof VhdFile)
} }
@ -26,7 +29,7 @@ const computeVhdsSize = (handler, vhdPaths) =>
Disposable.use( Disposable.use(
vhdPaths.map(vhdPath => openVhd(handler, vhdPath)), vhdPaths.map(vhdPath => openVhd(handler, vhdPath)),
async vhds => { async vhds => {
if (shouldComputeVhdsSize(vhds)) { if (shouldComputeVhdsSize(handler, vhds)) {
const sizes = await asyncMap(vhds, vhd => vhd.getSize()) const sizes = await asyncMap(vhds, vhd => vhd.getSize())
return sum(sizes) return sum(sizes)
} }
@ -486,7 +489,11 @@ exports.cleanVm = async function cleanVm(
if (mode === 'full') { if (mode === 'full') {
// a full backup : check size // a full backup : check size
const linkedXva = resolve('/', vmDir, xva) const linkedXva = resolve('/', vmDir, xva)
try {
fileSystemSize = await handler.getSize(linkedXva) fileSystemSize = await handler.getSize(linkedXva)
} catch (error) {
// can fail with encrypted remote
}
} else if (mode === 'delta') { } else if (mode === 'delta') {
const linkedVhds = Object.keys(vhds).map(key => resolve('/', vmDir, vhds[key])) const linkedVhds = Object.keys(vhds).map(key => resolve('/', vmDir, vhds[key]))
fileSystemSize = await computeVhdsSize(handler, linkedVhds) fileSystemSize = await computeVhdsSize(handler, linkedVhds)

View File

@ -49,6 +49,11 @@ const isValidTar = async (handler, size, fd) => {
// TODO: find an heuristic for compressed files // TODO: find an heuristic for compressed files
async function isValidXva(path) { async function isValidXva(path) {
const handler = this._handler const handler = this._handler
// size is longer when encrypted + reading part of an encrypted file is not implemented
if (handler.isEncrypted) {
return true
}
try { try {
const fd = await handler.openFile(path, 'r') const fd = await handler.openFile(path, 'r')
try { try {
@ -66,7 +71,6 @@ async function isValidXva(path) {
} }
} catch (error) { } catch (error) {
// never throw, log and report as valid to avoid side effects // never throw, log and report as valid to avoid side effects
console.error('isValidXva', path, error)
return true return true
} }
} }

View File

@ -0,0 +1,19 @@
## metadata files
- Older remotes dont have any metadata file
- Remote used since 5.75 have two files : encryption.json and metadata.json
The metadata files are checked by the sync() method. If the check fails it MUST throw an error and dismount.
If the remote is empty, the `sync` method creates them
### encryption.json
A non encrypted file contain the algorithm and parameters used for this remote.
This MUST NOT contains the key.
### metadata.json
An encrypted JSON file containing the settings of a remote. Today this is an empty JSON file ( `{random: <randomuuid>}` ), it serves to check if the encryption key set in the remote is valid, but in the future will be able to store some remote settings to ease disaster recovery.
If this file can't be read (decrypted, decompressed, .. ), that means that the remote settings have been updated. If the remote is empty, update the `encryption.json` and `metadata.json` files , else raise an error.

View File

@ -28,6 +28,7 @@
"@vates/async-each": "^1.0.0", "@vates/async-each": "^1.0.0",
"@vates/coalesce-calls": "^0.1.0", "@vates/coalesce-calls": "^0.1.0",
"@vates/decorate-with": "^2.0.0", "@vates/decorate-with": "^2.0.0",
"@vates/read-chunk": "^1.0.0",
"@xen-orchestra/async-map": "^0.1.2", "@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.3.0", "@xen-orchestra/log": "^0.3.0",
"bind-property-descriptor": "^2.0.0", "bind-property-descriptor": "^2.0.0",
@ -39,6 +40,7 @@
"lodash": "^4.17.4", "lodash": "^4.17.4",
"promise-toolbox": "^0.21.0", "promise-toolbox": "^0.21.0",
"proper-lockfile": "^4.1.2", "proper-lockfile": "^4.1.2",
"pumpify": "^2.0.1",
"readable-stream": "^4.1.0", "readable-stream": "^4.1.0",
"through2": "^4.0.2", "through2": "^4.0.2",
"xo-remote-parser": "^0.9.1" "xo-remote-parser": "^0.9.1"

View File

@ -0,0 +1,71 @@
const { readChunk } = require('@vates/read-chunk')
const crypto = require('crypto')
const pumpify = require('pumpify')
function getEncryptor(key) {
if (key === undefined) {
return {
id: 'NULL_ENCRYPTOR',
algorithm: 'none',
key: 'none',
ivLength: 0,
encryptData: buffer => buffer,
encryptStream: stream => stream,
decryptData: buffer => buffer,
decryptStream: stream => stream,
}
}
const algorithm = 'aes-256-cbc'
const ivLength = 16
function encryptStream(input) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = pumpify(input, cipher)
encrypted.unshift(iv)
return encrypted
}
async function decryptStream(encryptedStream) {
const iv = await readChunk(encryptedStream, ivLength)
const cipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
/**
* WARNING
*
* the crytped size has an initializtion vector + a padding at the end
* whe can't predict the decrypted size from the start of the encrypted size
* thus, we can't set decrypted.length reliably
*
*/
return pumpify(encryptedStream, cipher)
}
function encryptData(buffer) {
const iv = crypto.randomBytes(ivLength)
const cipher = crypto.createCipheriv(algorithm, Buffer.from(key), iv)
const encrypted = cipher.update(buffer)
return Buffer.concat([iv, encrypted, cipher.final()])
}
function decryptData(buffer) {
const iv = buffer.slice(0, ivLength)
const encrypted = buffer.slice(ivLength)
const decipher = crypto.createDecipheriv(algorithm, Buffer.from(key), iv)
const decrypted = decipher.update(encrypted)
return Buffer.concat([decrypted, decipher.final()])
}
return {
id: algorithm,
algorithm,
key,
ivLength,
encryptData,
encryptStream,
decryptData,
decryptStream,
}
}
exports._getEncryptor = getEncryptor

View File

@ -1,4 +1,5 @@
import asyncMapSettled from '@xen-orchestra/async-map/legacy' import asyncMapSettled from '@xen-orchestra/async-map/legacy'
import assert from 'assert'
import getStream from 'get-stream' import getStream from 'get-stream'
import { coalesceCalls } from '@vates/coalesce-calls' import { coalesceCalls } from '@vates/coalesce-calls'
import { createLogger } from '@xen-orchestra/log' import { createLogger } from '@xen-orchestra/log'
@ -6,13 +7,14 @@ import { fromCallback, fromEvent, ignoreErrors, timeout } from 'promise-toolbox'
import { limitConcurrency } from 'limit-concurrency-decorator' import { limitConcurrency } from 'limit-concurrency-decorator'
import { parse } from 'xo-remote-parser' import { parse } from 'xo-remote-parser'
import { pipeline } from 'stream' import { pipeline } from 'stream'
import { randomBytes } from 'crypto' import { randomBytes, randomUUID } from 'crypto'
import { synchronized } from 'decorator-synchronized' import { synchronized } from 'decorator-synchronized'
import { basename, dirname, normalize as normalizePath } from './path' import { basename, dirname, normalize as normalizePath } from './path'
import { createChecksumStream, validChecksumOfReadStream } from './checksum' import { createChecksumStream, validChecksumOfReadStream } from './checksum'
import { _getEncryptor } from './_encryptor'
const { warn } = createLogger('@xen-orchestra:fs') const { info, warn } = createLogger('@xen-orchestra:fs')
const checksumFile = file => file + '.checksum' const checksumFile = file => file + '.checksum'
const computeRate = (hrtime, size) => { const computeRate = (hrtime, size) => {
@ -23,6 +25,9 @@ const computeRate = (hrtime, size) => {
const DEFAULT_TIMEOUT = 6e5 // 10 min const DEFAULT_TIMEOUT = 6e5 // 10 min
const DEFAULT_MAX_PARALLEL_OPERATIONS = 10 const DEFAULT_MAX_PARALLEL_OPERATIONS = 10
const ENCRYPTION_DESC_FILENAME = 'encryption.json'
const ENCRYPTION_METADATA_FILENAME = 'metadata.json'
const ignoreEnoent = error => { const ignoreEnoent = error => {
if (error == null || error.code !== 'ENOENT') { if (error == null || error.code !== 'ENOENT') {
throw error throw error
@ -63,6 +68,7 @@ class PrefixWrapper {
} }
export default class RemoteHandlerAbstract { export default class RemoteHandlerAbstract {
_encryptor
constructor(remote, options = {}) { constructor(remote, options = {}) {
if (remote.url === 'test://') { if (remote.url === 'test://') {
this._remote = remote this._remote = remote
@ -73,6 +79,7 @@ export default class RemoteHandlerAbstract {
} }
} }
;({ highWaterMark: this._highWaterMark, timeout: this._timeout = DEFAULT_TIMEOUT } = options) ;({ highWaterMark: this._highWaterMark, timeout: this._timeout = DEFAULT_TIMEOUT } = options)
this._encryptor = _getEncryptor(this._remote.encryptionKey)
const sharedLimit = limitConcurrency(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS) const sharedLimit = limitConcurrency(options.maxParallelOperations ?? DEFAULT_MAX_PARALLEL_OPERATIONS)
this.closeFile = sharedLimit(this.closeFile) this.closeFile = sharedLimit(this.closeFile)
@ -111,55 +118,51 @@ export default class RemoteHandlerAbstract {
await this.__closeFile(fd) await this.__closeFile(fd)
} }
createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) { async createReadStream(file, { checksum = false, ignoreMissingChecksum = false, ...options } = {}) {
if (options.end !== undefined || options.start !== undefined) {
assert.strictEqual(this.isEncrypted, false, `Can't read part of a file when encryption is active ${file}`)
}
if (typeof file === 'string') { if (typeof file === 'string') {
file = normalizePath(file) file = normalizePath(file)
} }
const path = typeof file === 'string' ? file : file.path
const streamP = timeout let stream = await timeout.call(
.call(this._createReadStream(file, { ...options, highWaterMark: this._highWaterMark }), this._timeout) this._createReadStream(file, { ...options, highWaterMark: this._highWaterMark }),
.then(stream => { this._timeout
)
// detect early errors // detect early errors
let promise = fromEvent(stream, 'readable') await fromEvent(stream, 'readable')
// try to add the length prop if missing and not a range stream if (checksum) {
if (stream.length === undefined && options.end === undefined && options.start === undefined) { try {
promise = Promise.all([ const path = typeof file === 'string' ? file : file.path
promise, const checksum = await this._readFile(checksumFile(path), { flags: 'r' })
ignoreErrors.call(
this._getSize(file).then(size => {
stream.length = size
})
),
])
}
return promise.then(() => stream)
})
if (!checksum) {
return streamP
}
// avoid a unhandled rejection warning
ignoreErrors.call(streamP)
return this._readFile(checksumFile(path), { flags: 'r' }).then(
checksum =>
streamP.then(stream => {
const { length } = stream const { length } = stream
stream = validChecksumOfReadStream(stream, String(checksum).trim()) stream = validChecksumOfReadStream(stream, String(checksum).trim())
stream.length = length stream.length = length
} catch (error) {
return stream if (!(ignoreMissingChecksum && error.code === 'ENOENT')) {
}),
error => {
if (ignoreMissingChecksum && error && error.code === 'ENOENT') {
return streamP
}
throw error throw error
} }
) }
}
if (this.isEncrypted) {
stream = this._encryptor.decryptStream(stream)
} else {
// try to add the length prop if missing and not a range stream
if (stream.length === undefined && options.end === undefined && options.start === undefined) {
try {
stream.length = await this._getSize(file)
} catch (error) {
// ignore errors
}
}
}
return stream
} }
/** /**
@ -175,6 +178,8 @@ export default class RemoteHandlerAbstract {
async outputStream(path, input, { checksum = true, dirMode, validator } = {}) { async outputStream(path, input, { checksum = true, dirMode, validator } = {}) {
path = normalizePath(path) path = normalizePath(path)
let checksumStream let checksumStream
input = this._encryptor.encryptStream(input)
if (checksum) { if (checksum) {
checksumStream = createChecksumStream() checksumStream = createChecksumStream()
pipeline(input, checksumStream, noop) pipeline(input, checksumStream, noop)
@ -185,6 +190,8 @@ export default class RemoteHandlerAbstract {
validator, validator,
}) })
if (checksum) { if (checksum) {
// using _outpuFile means the checksum will NOT be encrypted
// it is by design to allow checking of encrypted files without the key
await this._outputFile(checksumFile(path), await checksumStream.checksum, { dirMode, flags: 'wx' }) await this._outputFile(checksumFile(path), await checksumStream.checksum, { dirMode, flags: 'wx' })
} }
} }
@ -204,8 +211,13 @@ export default class RemoteHandlerAbstract {
return timeout.call(this._getInfo(), this._timeout) return timeout.call(this._getInfo(), this._timeout)
} }
// when using encryption, the file size is aligned with the encryption block size ( 16 bytes )
// that means that the size will be 1 to 16 bytes more than the content size + the initialized vector length (16 bytes)
async getSize(file) { async getSize(file) {
return timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout) assert.strictEqual(this.isEncrypted, false, `Can't compute size of an encrypted file ${file}`)
const size = await timeout.call(this._getSize(typeof file === 'string' ? normalizePath(file) : file), this._timeout)
return size - this._encryptor.ivLength
} }
async list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) { async list(dir, { filter, ignoreMissing = false, prependDir = false } = {}) {
@ -251,15 +263,18 @@ export default class RemoteHandlerAbstract {
} }
async outputFile(file, data, { dirMode, flags = 'wx' } = {}) { async outputFile(file, data, { dirMode, flags = 'wx' } = {}) {
await this._outputFile(normalizePath(file), data, { dirMode, flags }) const encryptedData = this._encryptor.encryptData(data)
await this._outputFile(normalizePath(file), encryptedData, { dirMode, flags })
} }
async read(file, buffer, position) { async read(file, buffer, position) {
assert.strictEqual(this.isEncrypted, false, `Can't read part of an encrypted file ${file}`)
return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position) return this._read(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
} }
async readFile(file, { flags = 'r' } = {}) { async readFile(file, { flags = 'r' } = {}) {
return this._readFile(normalizePath(file), { flags }) const data = await this._readFile(normalizePath(file), { flags })
return this._encryptor.decryptData(data)
} }
async rename(oldPath, newPath, { checksum = false } = {}) { async rename(oldPath, newPath, { checksum = false } = {}) {
@ -299,6 +314,61 @@ export default class RemoteHandlerAbstract {
@synchronized() @synchronized()
async sync() { async sync() {
await this._sync() await this._sync()
try {
await this._checkMetadata()
} catch (error) {
await this._forget()
throw error
}
}
async _canWriteMetadata() {
const list = await this.list('/', {
filter: e => !e.startsWith('.') && e !== ENCRYPTION_DESC_FILENAME && e !== ENCRYPTION_METADATA_FILENAME,
})
return list.length === 0
}
async _createMetadata() {
await Promise.all([
this._writeFile(
normalizePath(ENCRYPTION_DESC_FILENAME),
JSON.stringify({ algorithm: this._encryptor.algorithm }),
{
flags: 'w',
}
), // not encrypted
this.writeFile(ENCRYPTION_METADATA_FILENAME, `{"random":"${randomUUID()}"}`, { flags: 'w' }), // encrypted
])
}
async _checkMetadata() {
try {
// this file is not encrypted
const data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME))
JSON.parse(data)
} catch (error) {
if (error.code !== 'ENOENT') {
throw error
}
}
try {
// this file is encrypted
const data = await this.readFile(ENCRYPTION_METADATA_FILENAME)
JSON.parse(data)
} catch (error) {
if (error.code === 'ENOENT' || (await this._canWriteMetadata())) {
info('will update metadata of this remote')
return this._createMetadata()
}
warn(
`The encryptionKey settings of this remote does not match the key used to create it. You won't be able to read any data from this remote`,
{ error }
)
// will probably send a ERR_OSSL_EVP_BAD_DECRYPT if key is incorrect
throw error
}
} }
async test() { async test() {
@ -352,11 +422,13 @@ export default class RemoteHandlerAbstract {
} }
async write(file, buffer, position) { async write(file, buffer, position) {
assert.strictEqual(this.isEncrypted, false, `Can't write part of a file with encryption ${file}`)
await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position) await this._write(typeof file === 'string' ? normalizePath(file) : file, buffer, position)
} }
async writeFile(file, data, { flags = 'wx' } = {}) { async writeFile(file, data, { flags = 'wx' } = {}) {
await this._writeFile(normalizePath(file), data, { flags }) const encryptedData = this._encryptor.encryptData(data)
await this._writeFile(normalizePath(file), encryptedData, { flags })
} }
// Methods that can be called by private methods to avoid parallel limit on public methods // Methods that can be called by private methods to avoid parallel limit on public methods
@ -564,6 +636,10 @@ export default class RemoteHandlerAbstract {
async _writeFile(file, data, options) { async _writeFile(file, data, options) {
throw new Error('Not implemented') throw new Error('Not implemented')
} }
get isEncrypted() {
return this._encryptor.id !== 'NULL_ENCRYPTOR'
}
} }
function createPrefixWrapperMethods() { function createPrefixWrapperMethods() {

View File

@ -83,6 +83,7 @@ exports.VhdFile = class VhdFile extends VhdAbstract {
} }
static async open(handler, path, { flags, checkSecondFooter = true } = {}) { static async open(handler, path, { flags, checkSecondFooter = true } = {}) {
assert(!handler.isEncrypted, `VHDFile implementation is not compatible with encrypted remote`)
const fd = await handler.openFile(path, flags ?? 'r+') const fd = await handler.openFile(path, flags ?? 'r+')
const vhd = new VhdFile(handler, fd) const vhd = new VhdFile(handler, fd)
// openning a file for reading does not trigger EISDIR as long as we don't really read from it : // openning a file for reading does not trigger EISDIR as long as we don't really read from it :

View File

@ -12,11 +12,14 @@ exports.resolveVhdAlias = async function resolveVhdAlias(handler, filename) {
if (!isVhdAlias(filename)) { if (!isVhdAlias(filename)) {
return filename return filename
} }
if (!handler.isEncrypted) {
const size = await handler.getSize(filename) const size = await handler.getSize(filename)
if (size > ALIAS_MAX_PATH_LENGTH) { if (size > ALIAS_MAX_PATH_LENGTH) {
// seems reasonnable for a relative path // seems reasonnable for a relative path
throw new Error(`The alias file ${filename} is too big (${size} bytes)`) throw new Error(`The alias file ${filename} is too big (${size} bytes)`)
} }
}
const aliasContent = (await handler.readFile(filename)).toString().trim() const aliasContent = (await handler.readFile(filename)).toString().trim()
// also handle circular references and unreasonnably long chains // also handle circular references and unreasonnably long chains
if (isVhdAlias(aliasContent)) { if (isVhdAlias(aliasContent)) {

View File

@ -9,7 +9,8 @@ exports.openVhd = async function openVhd(handler, path, opts) {
try { try {
return await VhdFile.open(handler, resolved, opts) return await VhdFile.open(handler, resolved, opts)
} catch (e) { } catch (e) {
if (e.code !== 'EISDIR') { // if the remote is encrypted, trying to open a VhdFile will throw an assertion error before checking if the path is a directory, therefore we should try to open a VhdDirectory anyway.
if (e.code !== 'EISDIR' && e.code !== 'ERR_ASSERTION') {
throw e throw e
} }
return await VhdDirectory.open(handler, resolved, opts) return await VhdDirectory.open(handler, resolved, opts)