feat(vhd-lib): add VhdAbstract#{stream,rawContent}() methods (#5992)

This commit is contained in:
Florent BEAUCHAMP 2021-11-17 09:16:34 +01:00 committed by GitHub
parent 5c8ebce9eb
commit a4bb453401
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 226 additions and 39 deletions

View File

@ -1,13 +1,18 @@
import { createContentStream } from 'vhd-lib'
import { getHandler } from '@xen-orchestra/fs'
import { openVhd } from 'vhd-lib'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { resolve } from 'path'
import { writeStream } from '../_utils'
import { Disposable } from 'promise-toolbox'
export default async args => {
if (args.length < 2 || args.some(_ => _ === '-h' || _ === '--help')) {
return `Usage: ${this.command} <input VHD> [<output raw>]`
}
await writeStream(createContentStream(getHandler({ url: 'file:///' }), resolve(args[0])), args[1])
await Disposable.use(async function* () {
const handler = getSyncedHandler({ url: 'file:///' })
const vhd = openVhd(handler, resolve(args[0]))
await writeStream(vhd.rawContent())
})
}

View File

@ -7,8 +7,10 @@ import { getSyncedHandler } from '@xen-orchestra/fs'
import { Disposable, pFromCallback } from 'promise-toolbox'
import { openVhd } from '../index'
import { createRandomFile, convertFromRawToVhd, createRandomVhdDirectory } from '../tests/utils'
import { checkFile, createRandomFile, convertFromRawToVhd, createRandomVhdDirectory } from '../tests/utils'
import { VhdAbstract } from './VhdAbstract'
import { SECTOR_SIZE } from '../../dist/_constants'
import { BLOCK_UNUSED, FOOTER_SIZE, HEADER_SIZE } from '../_constants'
let tempDir
@ -22,6 +24,15 @@ afterEach(async () => {
await pFromCallback(cb => rimraf(tempDir, cb))
})
const streamToBuffer = stream => {
let buffer = Buffer.alloc(0)
return new Promise((resolve, reject) => {
stream.on('data', data => (buffer = Buffer.concat([buffer, data])))
stream.on('end', () => resolve(buffer))
})
}
test('It creates an alias', async () => {
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file://' + tempDir })
@ -139,3 +150,104 @@ test('It create , rename and unlink alias', async () => {
expect(await fs.exists(aliasFileNameRenamed)).toEqual(false)
})
})
test('it can create a vhd stream', async () => {
const initialNbBlocks = 3
const initalSize = initialNbBlocks * 2
const rawFileName = `${tempDir}/randomfile`
await createRandomFile(rawFileName, initalSize)
const vhdFileName = `${tempDir}/vhd.vhd`
await convertFromRawToVhd(rawFileName, vhdFileName)
const bat = Buffer.alloc(512)
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file://' + tempDir })
const vhd = yield openVhd(handler, 'vhd.vhd')
// mark first block as unused
await handler.read('vhd.vhd', bat, vhd.header.tableOffset)
bat.writeUInt32BE(BLOCK_UNUSED, 0)
await handler.write('vhd.vhd', bat, vhd.header.tableOffset)
// read our modified bat
await vhd.readBlockAllocationTable()
const stream = vhd.stream()
// read all the stream into a buffer
const buffer = await streamToBuffer(stream)
const length = buffer.length
const start = FOOTER_SIZE + HEADER_SIZE + vhd.batSize
const footer = buffer.slice(0, 512)
// 1 deleted block should be in ouput
expect(length).toEqual(start + (initialNbBlocks - 1) * vhd.fullBlockSize + FOOTER_SIZE)
// blocks
const blockBuf = Buffer.alloc(vhd.sectorsPerBlock * SECTOR_SIZE, 0)
for (let i = 1; i < initialNbBlocks; i++) {
const blockDataStart = start + (i - 1) * vhd.fullBlockSize + 512 /* block bitmap */
const blockDataEnd = blockDataStart + vhd.sectorsPerBlock * SECTOR_SIZE
const content = buffer.slice(blockDataStart, blockDataEnd)
await handler.read('randomfile', blockBuf, i * vhd.sectorsPerBlock * SECTOR_SIZE)
expect(content).toEqual(blockBuf)
}
// footer
const endFooter = buffer.slice(length - 512)
expect(footer).toEqual(endFooter)
await handler.writeFile('out.vhd', buffer)
// check that the vhd is still valid
await checkFile(`${tempDir}/out.vhd`)
})
})
it('can stream content', async () => {
const initalSizeMb = 5 // 2 block and an half
const initialByteSize = initalSizeMb * 1024 * 1024
const rawFileName = `${tempDir}/randomfile`
await createRandomFile(rawFileName, initalSizeMb)
const vhdFileName = `${tempDir}/vhd.vhd`
await convertFromRawToVhd(rawFileName, vhdFileName)
const bat = Buffer.alloc(512)
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file://' + tempDir })
const vhd = yield openVhd(handler, 'vhd.vhd')
// mark first block as unused
await handler.read('vhd.vhd', bat, vhd.header.tableOffset)
bat.writeUInt32BE(BLOCK_UNUSED, 0)
await handler.write('vhd.vhd', bat, vhd.header.tableOffset)
// read our modified block allocation table
await vhd.readBlockAllocationTable()
const stream = vhd.rawContent()
const buffer = await streamToBuffer(stream)
// qemu can modify size, to align it to geometry
// check that data didn't change
const blockDataLength = vhd.sectorsPerBlock * SECTOR_SIZE
// first block should be empty
const EMPTY = Buffer.alloc(blockDataLength, 0)
const firstBlock = buffer.slice(0, blockDataLength)
// using buffer1 toEquals buffer2 make jest crash trying to stringify it on failure
expect(firstBlock.equals(EMPTY)).toEqual(true)
let remainingLength = initialByteSize - blockDataLength // already checked the first block
for (let i = 1; i < initialNbBlocks; i++) {
// last block will be truncated
const blockSize = Math.min(blockDataLength, remainingLength - blockDataLength)
const blockDataStart = i * blockDataLength // first block have been deleted
const blockDataEnd = blockDataStart + blockSize
const content = buffer.slice(blockDataStart, blockDataEnd)
const blockBuf = Buffer.alloc(blockSize, 0)
await handler.read('randomfile', blockBuf, i * blockDataLength)
expect(content.equals(blockBuf)).toEqual(true)
remainingLength -= blockSize
}
})
})

View File

@ -1,9 +1,18 @@
import { computeBatSize, sectorsRoundUpNoZero, sectorsToBytes } from './_utils'
import { PLATFORM_NONE, SECTOR_SIZE, PLATFORM_W2KU, PARENT_LOCATOR_ENTRIES } from '../_constants'
import { resolveAlias, isVhdAlias } from '../_resolveAlias'
import {
PLATFORM_NONE,
SECTOR_SIZE,
PLATFORM_W2KU,
PARENT_LOCATOR_ENTRIES,
FOOTER_SIZE,
HEADER_SIZE,
BLOCK_UNUSED,
} from '../_constants'
import assert from 'assert'
import path from 'path'
import asyncIteratorToStream from 'async-iterator-to-stream'
import { checksumStruct, fuFooter, fuHeader } from '../_structs'
import { isVhdAlias, resolveAlias } from '../_resolveAlias'
export class VhdAbstract {
#header
@ -212,4 +221,97 @@ export class VhdAbstract {
const relativePathToTarget = path.relative(aliasDir, path.resolve('/', targetPath))
await handler.writeFile(aliasPath, relativePathToTarget)
}
stream() {
const { footer, batSize } = this
const { ...header } = this.header // copy since we don't ant to modifiy the current header
const rawFooter = fuFooter.pack(footer)
checksumStruct(rawFooter, fuFooter)
// compute parent locator place and size
// update them in header
// update checksum in header
let offset = FOOTER_SIZE + HEADER_SIZE + batSize
for (let i = 0; i < PARENT_LOCATOR_ENTRIES; i++) {
const { ...entry } = header.parentLocatorEntry[i]
if (entry.platformDataSpace > 0) {
entry.platformDataOffset = offset
offset += entry.platformDataSpace
}
header.parentLocatorEntry[i] = entry
}
const rawHeader = fuHeader.pack(header)
checksumStruct(rawHeader, fuHeader)
assert.strictEqual(offset % SECTOR_SIZE, 0)
const bat = Buffer.allocUnsafe(batSize)
let offsetSector = offset / SECTOR_SIZE
const blockSizeInSectors = this.fullBlockSize / SECTOR_SIZE
// compute BAT , blocks starts after parent locator entries
for (let i = 0; i < header.maxTableEntries; i++) {
if (this.containsBlock(i)) {
bat.writeUInt32BE(offsetSector, i * 4)
offsetSector += blockSizeInSectors
} else {
bat.writeUInt32BE(BLOCK_UNUSED, i * 4)
}
}
const fileSize = offsetSector * SECTOR_SIZE + FOOTER_SIZE /* the footer at the end */
const self = this
async function* iterator() {
yield rawFooter
yield rawHeader
yield bat
// yield parent locator entries
for (let i = 0; i < PARENT_LOCATOR_ENTRIES; i++) {
if (header.parentLocatorEntry[i].platformDataSpace > 0) {
const parentLocator = await self.readParentLocator(i)
// @ todo pad to platformDataSpace
yield parentLocator.data
}
}
// yield all blocks
// since contains() can be costly for synthetic vhd, use the computed bat
for (let i = 0; i < header.maxTableEntries; i++) {
if (bat.readUInt32BE(i * 4) !== BLOCK_UNUSED) {
const block = await self.readBlock(i)
yield block.buffer
}
}
// yield footer again
yield rawFooter
}
const stream = asyncIteratorToStream(iterator())
stream.length = fileSize
return stream
}
rawContent() {
const { header, footer } = this
const { blockSize } = header
const self = this
async function* iterator() {
const nBlocks = header.maxTableEntries
let remainingSize = footer.currentSize
const EMPTY = Buffer.alloc(blockSize, 0)
for (let blockId = 0; blockId < nBlocks; ++blockId) {
let buffer = self.containsBlock(blockId) ? (await self.readBlock(blockId)).data : EMPTY
// the last block can be truncated since raw size is not a multiple of blockSize
buffer = remainingSize < blockSize ? buffer.slice(0, remainingSize) : buffer
remainingSize -= blockSize
yield buffer
}
}
const stream = asyncIteratorToStream(iterator())
stream.length = footer.currentSize
return stream
}
}

View File

@ -1,31 +0,0 @@
import asyncIteratorToStream from 'async-iterator-to-stream'
import { VhdFile } from '.'
export default asyncIteratorToStream(async function* (handler, path) {
const fd = await handler.openFile(path, 'r')
try {
const vhd = new VhdFile(handler, fd)
await vhd.readHeaderAndFooter()
await vhd.readBlockAllocationTable()
const {
footer: { currentSize },
header: { blockSize },
} = vhd
const nFullBlocks = Math.floor(currentSize / blockSize)
const nLeftoverBytes = currentSize % blockSize
const emptyBlock = Buffer.alloc(blockSize)
for (let i = 0; i < nFullBlocks; ++i) {
yield vhd.containsBlock(i) ? (await vhd.readBlock(i)).data : emptyBlock
}
if (nLeftoverBytes !== 0) {
yield (vhd.containsBlock(nFullBlocks) ? (await vhd.readBlock(nFullBlocks)).data : emptyBlock).slice(
0,
nLeftoverBytes
)
}
} finally {
await handler.closeFile(fd)
}
})

View File

@ -1,7 +1,6 @@
export { default as chainVhd } from './chain'
export { default as checkFooter } from './checkFooter'
export { default as checkVhdChain } from './checkChain'
export { default as createContentStream } from './createContentStream'
export { default as createReadableRawStream } from './createReadableRawStream'
export { default as createReadableSparseStream } from './createReadableSparseStream'
export { default as createSyntheticStream } from './createSyntheticStream'