Compare commits
21 Commits
xo-server-
...
xo-server-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d29348e30 | ||
|
|
a24db3f896 | ||
|
|
cffac27d0a | ||
|
|
b207cbdd77 | ||
|
|
10baecefb9 | ||
|
|
42620323a9 | ||
|
|
4d91006994 | ||
|
|
a81f0b9a93 | ||
|
|
2cee413ae1 | ||
|
|
53099eacc8 | ||
|
|
b628c5c07e | ||
|
|
12889b6a09 | ||
|
|
0c23ca5b66 | ||
|
|
d732ee3ade | ||
|
|
65cb0bc4cf | ||
|
|
1ba68a94e3 | ||
|
|
084430451a | ||
|
|
458a4d4efe | ||
|
|
62eeab2a74 | ||
|
|
790b43910d | ||
|
|
ba65461c4d |
@@ -92,6 +92,22 @@ export default class RemoteHandlerAbstract {
|
||||
await promise
|
||||
}
|
||||
|
||||
async read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
return this._read(file, buffer, position)
|
||||
}
|
||||
|
||||
_read (
|
||||
file: File,
|
||||
buffer: Buffer,
|
||||
position?: number
|
||||
): Promise<{| bytesRead: number, buffer: Buffer |}> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async readFile (file: string, options?: Object): Promise<Buffer> {
|
||||
return this._readFile(file, options)
|
||||
}
|
||||
|
||||
@@ -50,6 +50,24 @@ export default class LocalHandler extends RemoteHandlerAbstract {
|
||||
await fs.writeFile(path, data, options)
|
||||
}
|
||||
|
||||
async _read (file, buffer, position) {
|
||||
const needsClose = typeof file === 'string'
|
||||
file = needsClose ? await fs.open(this._getFilePath(file), 'r') : file.fd
|
||||
try {
|
||||
return await fs.read(
|
||||
file,
|
||||
buffer,
|
||||
0,
|
||||
buffer.length,
|
||||
position === undefined ? null : position
|
||||
)
|
||||
} finally {
|
||||
if (needsClose) {
|
||||
await fs.close(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _readFile (file, options) {
|
||||
return fs.readFile(this._getFilePath(file), options)
|
||||
}
|
||||
|
||||
@@ -8,9 +8,13 @@
|
||||
- Create new VDI from SR view not attached to any VM [#2229](https://github.com/vatesfr/xen-orchestra/issues/2229)
|
||||
- [Patches] ignore XS upgrade in missing patches counter [#2866](https://github.com/vatesfr/xen-orchestra/issues/2866)
|
||||
- [Health] List VM snapshots related to non-existing backup jobs/schedules [#2828](https://github.com/vatesfr/xen-orchestra/issues/2828)
|
||||
- [Delta Backup NG logs] Display wether the export is a full or a delta [#2711](https://github.com/vatesfr/xen-orchestra/issues/2711)
|
||||
|
||||
### Bugs
|
||||
|
||||
- update the xentools search item to return the version number of installed xentools [#3015](https://github.com/vatesfr/xen-orchestra/issues/3015)
|
||||
- Fix Nagios backup reports [#2991](https://github.com/vatesfr/xen-orchestra/issues/2991)
|
||||
|
||||
## **5.19.0** (2018-05-01)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
### Check list
|
||||
|
||||
> Check items when done or if not relevant
|
||||
|
||||
- [ ] PR reference the relevant issue (e.g. `Fixes #007`)
|
||||
- [ ] if UI changes, a screenshot has been added to the PR
|
||||
- [ ] CHANGELOG updated
|
||||
- [ ] documentation updated
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
"async-iterator-to-stream": "^1.0.2",
|
||||
"from2": "^2.3.0",
|
||||
"fs-extra": "^6.0.1",
|
||||
"get-stream": "^3.0.0",
|
||||
"limit-concurrency-decorator": "^0.4.0",
|
||||
"promise-toolbox": "^0.9.5",
|
||||
"struct-fu": "^1.2.0",
|
||||
@@ -41,6 +40,7 @@
|
||||
"cross-env": "^5.1.3",
|
||||
"execa": "^0.10.0",
|
||||
"fs-promise": "^2.0.0",
|
||||
"get-stream": "^3.0.0",
|
||||
"index-modules": "^0.3.0",
|
||||
"rimraf": "^2.6.2",
|
||||
"tmp": "^0.0.33"
|
||||
|
||||
@@ -28,7 +28,7 @@ function createBAT (
|
||||
) {
|
||||
let currentVhdPositionSector = firstBlockPosition / SECTOR_SIZE
|
||||
blockAddressList.forEach(blockPosition => {
|
||||
assert.strictEqual(blockPosition % 512, 0)
|
||||
assert.strictEqual(blockPosition % SECTOR_SIZE, 0)
|
||||
const vhdTableIndex = Math.floor(blockPosition / VHD_BLOCK_SIZE_BYTES)
|
||||
if (bat.readUInt32BE(vhdTableIndex * 4) === BLOCK_UNUSED) {
|
||||
bat.writeUInt32BE(currentVhdPositionSector, vhdTableIndex * 4)
|
||||
@@ -57,7 +57,8 @@ export default asyncIteratorToStream(async function * (
|
||||
}
|
||||
|
||||
const maxTableEntries = Math.ceil(diskSize / VHD_BLOCK_SIZE_BYTES) + 1
|
||||
const tablePhysicalSizeBytes = Math.ceil(maxTableEntries * 4 / 512) * 512
|
||||
const tablePhysicalSizeBytes =
|
||||
Math.ceil(maxTableEntries * 4 / SECTOR_SIZE) * SECTOR_SIZE
|
||||
|
||||
const batPosition = FOOTER_SIZE + HEADER_SIZE
|
||||
const firstBlockPosition = batPosition + tablePhysicalSizeBytes
|
||||
@@ -101,13 +102,14 @@ export default asyncIteratorToStream(async function * (
|
||||
if (currentVhdBlockIndex >= 0) {
|
||||
yield * yieldAndTrack(
|
||||
currentBlockWithBitmap,
|
||||
bat.readUInt32BE(currentVhdBlockIndex * 4) * 512
|
||||
bat.readUInt32BE(currentVhdBlockIndex * 4) * SECTOR_SIZE
|
||||
)
|
||||
}
|
||||
currentBlockWithBitmap = Buffer.alloc(bitmapSize + VHD_BLOCK_SIZE_BYTES)
|
||||
currentVhdBlockIndex = batIndex
|
||||
}
|
||||
const blockOffset = (next.offsetBytes / 512) % VHD_BLOCK_SIZE_SECTORS
|
||||
const blockOffset =
|
||||
(next.offsetBytes / SECTOR_SIZE) % VHD_BLOCK_SIZE_SECTORS
|
||||
for (let bitPos = 0; bitPos < VHD_BLOCK_SIZE_SECTORS / ratio; bitPos++) {
|
||||
setBitmap(currentBlockWithBitmap, blockOffset + bitPos)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import assert from 'assert'
|
||||
import getStream from 'get-stream'
|
||||
import { fromEvent } from 'promise-toolbox'
|
||||
|
||||
import constantStream from './_constant-stream'
|
||||
@@ -93,20 +92,14 @@ export default class Vhd {
|
||||
// Read functions.
|
||||
// =================================================================
|
||||
|
||||
_readStream (start, n) {
|
||||
return this._handler.createReadStream(this._path, {
|
||||
start,
|
||||
end: start + n - 1, // end is inclusive
|
||||
})
|
||||
}
|
||||
|
||||
_read (start, n) {
|
||||
return this._readStream(start, n)
|
||||
.then(getStream.buffer)
|
||||
.then(buf => {
|
||||
assert.equal(buf.length, n)
|
||||
return buf
|
||||
})
|
||||
async _read (start, n) {
|
||||
const { bytesRead, buffer } = await this._handler.read(
|
||||
this._path,
|
||||
Buffer.alloc(n),
|
||||
start
|
||||
)
|
||||
assert.equal(bytesRead, n)
|
||||
return buffer
|
||||
}
|
||||
|
||||
containsBlock (id) {
|
||||
@@ -336,11 +329,11 @@ export default class Vhd {
|
||||
`freeFirstBlockSpace: move first block ${firstSector} -> ${newFirstSector}`
|
||||
)
|
||||
// copy the first block at the end
|
||||
const stream = await this._readStream(
|
||||
const block = await this._read(
|
||||
sectorsToBytes(firstSector),
|
||||
fullBlockSize
|
||||
)
|
||||
await this._write(stream, sectorsToBytes(newFirstSector))
|
||||
await this._write(block, sectorsToBytes(newFirstSector))
|
||||
await this._setBatEntry(first, newFirstSector)
|
||||
await this.writeFooter(true)
|
||||
spaceNeededBytes -= this.fullBlockSize
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server-backup-reports",
|
||||
"version": "0.12.0",
|
||||
"version": "0.12.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Backup reports plugin for XO-Server",
|
||||
"keywords": [
|
||||
|
||||
@@ -62,10 +62,12 @@ const formatSize = bytes =>
|
||||
})
|
||||
|
||||
const formatSpeed = (bytes, milliseconds) =>
|
||||
humanFormat(bytes * 1e3 / milliseconds, {
|
||||
scale: 'binary',
|
||||
unit: 'B/s',
|
||||
})
|
||||
milliseconds > 0
|
||||
? humanFormat(bytes * 1e3 / milliseconds, {
|
||||
scale: 'binary',
|
||||
unit: 'B/s',
|
||||
})
|
||||
: 'N/A'
|
||||
|
||||
const logError = e => {
|
||||
console.error('backup report error:', e)
|
||||
@@ -115,18 +117,19 @@ class BackupReportsXoPlugin {
|
||||
const log = await xo.getBackupNgLogs(runJobId)
|
||||
|
||||
const { reportWhen, mode } = log.data || {}
|
||||
if (reportWhen === 'never') {
|
||||
return
|
||||
}
|
||||
|
||||
const formatDate = createDateFormater(timezone)
|
||||
|
||||
if (log.status === 'success' && reportWhen === 'failure') {
|
||||
if (
|
||||
reportWhen === 'never' ||
|
||||
(log.status === 'success' && reportWhen === 'failure')
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const jobName = (await xo.getJob(log.jobId, 'backup')).name
|
||||
if (log.result !== undefined) {
|
||||
const formatDate = createDateFormater(timezone)
|
||||
if (
|
||||
(log.status === 'failure' || log.status === 'skipped') &&
|
||||
log.result !== undefined
|
||||
) {
|
||||
let markdown = [
|
||||
`## Global status: ${log.status}`,
|
||||
'',
|
||||
@@ -239,11 +242,26 @@ class BackupReportsXoPlugin {
|
||||
}
|
||||
|
||||
forEach(subTaskLog.tasks, operationLog => {
|
||||
const size = operationLog.result.size
|
||||
if (operationLog.message === 'merge') {
|
||||
globalMergeSize += size
|
||||
const operationInfoText = []
|
||||
if (operationLog.status === 'success') {
|
||||
const size = operationLog.result.size
|
||||
if (operationLog.message === 'merge') {
|
||||
globalMergeSize += size
|
||||
} else {
|
||||
globalTransferSize += size
|
||||
}
|
||||
|
||||
operationInfoText.push(
|
||||
` - **Size**: ${formatSize(size)}`,
|
||||
` - **Speed**: ${formatSpeed(
|
||||
size,
|
||||
operationLog.end - operationLog.start
|
||||
)}`
|
||||
)
|
||||
} else {
|
||||
globalTransferSize += size
|
||||
operationInfoText.push(
|
||||
` - **Error**: ${get(operationLog.result, 'message')}`
|
||||
)
|
||||
}
|
||||
const operationText = [
|
||||
` - **${operationLog.message}** ${
|
||||
@@ -254,13 +272,7 @@ class BackupReportsXoPlugin {
|
||||
` - **Duration**: ${formatDuration(
|
||||
operationLog.end - operationLog.start
|
||||
)}`,
|
||||
operationLog.status === 'failure'
|
||||
? `- **Error**: ${get(operationLog.result, 'message')}`
|
||||
: ` - **Size**: ${formatSize(size)}`,
|
||||
` - **Speed**: ${formatSpeed(
|
||||
size,
|
||||
operationLog.end - operationLog.start
|
||||
)}`,
|
||||
...operationInfoText,
|
||||
].join('\n')
|
||||
if (get(subTaskLog, 'data.type') === 'remote') {
|
||||
remotesText.push(operationText)
|
||||
@@ -316,7 +328,7 @@ class BackupReportsXoPlugin {
|
||||
++nFailures
|
||||
failedVmsText.push(...text, '', '', ...subText, '')
|
||||
nagiosText.push(
|
||||
`[(Failed) ${
|
||||
`[${
|
||||
vm !== undefined ? vm.name_label : 'undefined'
|
||||
}: (failed)[${failedSubTasks.toString()}]]`
|
||||
)
|
||||
@@ -407,7 +419,7 @@ class BackupReportsXoPlugin {
|
||||
}),
|
||||
xo.sendPassiveCheck !== undefined &&
|
||||
xo.sendPassiveCheck({
|
||||
nagiosStatus,
|
||||
status: nagiosStatus,
|
||||
message: nagiosMarkdown,
|
||||
}),
|
||||
])
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server-usage-report",
|
||||
"version": "0.4.2",
|
||||
"version": "0.5.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "",
|
||||
"keywords": [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "xo-server",
|
||||
"version": "5.19.9",
|
||||
"version": "5.20.0",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Server part of Xen-Orchestra",
|
||||
"keywords": [
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
// FIXME so far, no acls for jobs
|
||||
|
||||
export function cancel ({ runId }) {
|
||||
return this.cancelJobRun(runId)
|
||||
}
|
||||
|
||||
cancel.permission = 'admin'
|
||||
cancel.description = 'Cancel a current run'
|
||||
|
||||
export async function getAll () {
|
||||
return /* await */ this.getAllJobs('call')
|
||||
}
|
||||
|
||||
@@ -244,8 +244,9 @@ const TRANSFORMS = {
|
||||
}
|
||||
|
||||
return {
|
||||
major,
|
||||
minor,
|
||||
major: +major,
|
||||
minor: +minor,
|
||||
version: +`${major}.${minor}`,
|
||||
}
|
||||
})()
|
||||
|
||||
|
||||
@@ -1083,7 +1083,7 @@ export default class Xapi extends XapiBase {
|
||||
.once('finish', () => {
|
||||
transferSize += sizeStream.size
|
||||
})
|
||||
stream.task = sizeStream.task
|
||||
sizeStream.task = stream.task
|
||||
await this._importVdiContent(vdi, sizeStream, VDI_FORMAT_VHD)
|
||||
}
|
||||
}),
|
||||
|
||||
@@ -11,7 +11,7 @@ const getStatus = (
|
||||
status = error === undefined ? 'success' : 'failure'
|
||||
) => (status === 'failure' && isSkippedError(error) ? 'skipped' : status)
|
||||
|
||||
const computeStatus = (status, tasks) => {
|
||||
const computeStatusAndSortTasks = (status, tasks) => {
|
||||
if (status === 'failure' || tasks === undefined) {
|
||||
return status
|
||||
}
|
||||
@@ -26,9 +26,27 @@ const computeStatus = (status, tasks) => {
|
||||
}
|
||||
}
|
||||
|
||||
tasks.sort(taskTimeComparator)
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
const taskTimeComparator = ({ start: s1, end: e1 }, { start: s2, end: e2 }) => {
|
||||
if (e1 !== undefined) {
|
||||
if (e2 !== undefined) {
|
||||
// finished tasks are ordered by their end times
|
||||
return e1 - e2
|
||||
}
|
||||
// finished task before unfinished tasks
|
||||
return -1
|
||||
} else if (e2 === undefined) {
|
||||
// unfinished tasks are ordered by their start times
|
||||
return s1 - s2
|
||||
}
|
||||
// unfinished task after finished tasks
|
||||
return 1
|
||||
}
|
||||
|
||||
export default {
|
||||
async getBackupNgLogs (runId?: string) {
|
||||
const { runningJobs } = this
|
||||
@@ -56,7 +74,7 @@ export default {
|
||||
if (log !== undefined) {
|
||||
delete started[runJobId]
|
||||
log.end = time
|
||||
log.status = computeStatus(
|
||||
log.status = computeStatusAndSortTasks(
|
||||
getStatus((log.result = data.error)),
|
||||
log.tasks
|
||||
)
|
||||
@@ -81,7 +99,7 @@ export default {
|
||||
// TODO: merge/transfer work-around
|
||||
delete started[taskId]
|
||||
log.end = time
|
||||
log.status = computeStatus(
|
||||
log.status = computeStatusAndSortTasks(
|
||||
getStatus((log.result = data.result), data.status),
|
||||
log.tasks
|
||||
)
|
||||
@@ -107,7 +125,7 @@ export default {
|
||||
if (log !== undefined) {
|
||||
delete started[runCallId]
|
||||
log.end = time
|
||||
log.status = computeStatus(
|
||||
log.status = computeStatusAndSortTasks(
|
||||
getStatus((log.result = data.error)),
|
||||
log.tasks
|
||||
)
|
||||
|
||||
@@ -1018,12 +1018,16 @@ export default class BackupNg {
|
||||
}
|
||||
})()
|
||||
|
||||
const isFull = some(
|
||||
deltaExport.vdis,
|
||||
vdi => vdi.other_config['xo:base_delta'] === undefined
|
||||
)
|
||||
await waitAll(
|
||||
[
|
||||
...remotes.map(
|
||||
wrapTaskFn(
|
||||
id => ({
|
||||
data: { id, type: 'remote' },
|
||||
data: { id, isFull, type: 'remote' },
|
||||
logger,
|
||||
message: 'export',
|
||||
parentId: taskId,
|
||||
@@ -1119,7 +1123,7 @@ export default class BackupNg {
|
||||
...srs.map(
|
||||
wrapTaskFn(
|
||||
id => ({
|
||||
data: { id, type: 'SR' },
|
||||
data: { id, isFull, type: 'SR' },
|
||||
logger,
|
||||
message: 'export',
|
||||
parentId: taskId,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import type { Pattern } from 'value-matcher'
|
||||
|
||||
import { cancelable } from 'promise-toolbox'
|
||||
import { CancelToken } from 'promise-toolbox'
|
||||
import { map as mapToArray } from 'lodash'
|
||||
import { noSuchObject } from 'xo-common/api-errors'
|
||||
|
||||
@@ -121,6 +121,7 @@ export default class Jobs {
|
||||
_jobs: JobsDb
|
||||
_logger: Logger
|
||||
_runningJobs: { __proto__: null, [string]: string }
|
||||
_runs: { __proto__: null, [string]: () => void }
|
||||
|
||||
get runningJobs () {
|
||||
return this._runningJobs
|
||||
@@ -136,6 +137,7 @@ export default class Jobs {
|
||||
}))
|
||||
this._logger = undefined
|
||||
this._runningJobs = { __proto__: null }
|
||||
this._runs = { __proto__: null }
|
||||
|
||||
executors.call = executeCall
|
||||
|
||||
@@ -154,6 +156,13 @@ export default class Jobs {
|
||||
})
|
||||
}
|
||||
|
||||
cancelJobRun (id: string) {
|
||||
const run = this._runs[id]
|
||||
if (run !== undefined) {
|
||||
return run.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
async getAllJobs (type?: string): Promise<Array<Job>> {
|
||||
// $FlowFixMe don't know what is the problem (JFT)
|
||||
const jobs = await this._jobs.get()
|
||||
@@ -205,7 +214,7 @@ export default class Jobs {
|
||||
return /* await */ this._jobs.remove(id)
|
||||
}
|
||||
|
||||
async _runJob (cancelToken: any, job: Job, schedule?: Schedule, data_?: any) {
|
||||
async _runJob (job: Job, schedule?: Schedule, data_?: any) {
|
||||
const { id } = job
|
||||
|
||||
const runningJobs = this._runningJobs
|
||||
@@ -244,6 +253,11 @@ export default class Jobs {
|
||||
|
||||
runningJobs[id] = runJobId
|
||||
|
||||
const runs = this._runs
|
||||
|
||||
const { cancel, token } = CancelToken.source()
|
||||
runs[runJobId] = { cancel }
|
||||
|
||||
let session
|
||||
try {
|
||||
const app = this._app
|
||||
@@ -252,7 +266,7 @@ export default class Jobs {
|
||||
|
||||
const status = await executor({
|
||||
app,
|
||||
cancelToken,
|
||||
cancelToken: token,
|
||||
data: data_,
|
||||
job,
|
||||
logger,
|
||||
@@ -275,15 +289,14 @@ export default class Jobs {
|
||||
throw error
|
||||
} finally {
|
||||
delete runningJobs[id]
|
||||
delete runs[runJobId]
|
||||
if (session !== undefined) {
|
||||
session.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@cancelable
|
||||
async runJobSequence (
|
||||
$cancelToken: any,
|
||||
idSequence: Array<string>,
|
||||
schedule?: Schedule,
|
||||
data?: any
|
||||
@@ -293,10 +306,7 @@ export default class Jobs {
|
||||
)
|
||||
|
||||
for (const job of jobs) {
|
||||
if ($cancelToken.requested) {
|
||||
break
|
||||
}
|
||||
await this._runJob($cancelToken, job, schedule, data)
|
||||
await this._runJob(job, schedule, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createReadableSparseStream } from 'vhd-lib'
|
||||
|
||||
import { VMDKDirectParser, readVmdkGrainTable } from './vmdk-read'
|
||||
import VMDKDirectParser from './vmdk-read'
|
||||
import readVmdkGrainTable from './vmdk-read-table'
|
||||
|
||||
async function convertFromVMDK (vmdkReadStream, table) {
|
||||
const parser = new VMDKDirectParser(vmdkReadStream)
|
||||
|
||||
97
packages/xo-vmdk-to-vhd/src/vmdk-read-table.js
Normal file
97
packages/xo-vmdk-to-vhd/src/vmdk-read-table.js
Normal file
@@ -0,0 +1,97 @@
|
||||
const SECTOR_SIZE = 512
|
||||
const HEADER_SIZE = 512
|
||||
const FOOTER_POSITION = -1024
|
||||
const DISK_CAPACITY_OFFSET = 12
|
||||
const GRAIN_SIZE_OFFSET = 20
|
||||
const NUM_GTE_PER_GT_OFFSET = 44
|
||||
const GRAIN_ADDRESS_OFFSET = 56
|
||||
/**
|
||||
*
|
||||
* the grain table is the array of LBAs (in byte, not in sector) ordered by their position in the VDMK file
|
||||
* THIS CODE RUNS ON THE BROWSER
|
||||
*/
|
||||
export default async function readVmdkGrainTable (fileAccessor) {
|
||||
const getLongLong = (buffer, offset, name) => {
|
||||
if (buffer.length < offset + 8) {
|
||||
throw new Error(
|
||||
`buffer ${name} is too short, expecting ${offset + 8} minimum, got ${
|
||||
buffer.length
|
||||
}`
|
||||
)
|
||||
}
|
||||
const dataView = new DataView(buffer)
|
||||
const res = dataView.getUint32(offset, true)
|
||||
const highBits = dataView.getUint32(offset + 4, true)
|
||||
const MANTISSA_BITS_IN_DOUBLE = 53
|
||||
if (highBits >= Math.pow(2, MANTISSA_BITS_IN_DOUBLE - 32)) {
|
||||
throw new Error(
|
||||
'Unsupported file, high order bits are to high in field ' + name
|
||||
)
|
||||
}
|
||||
return res + highBits * Math.pow(2, 32)
|
||||
}
|
||||
let headerBuffer = await fileAccessor(0, HEADER_SIZE)
|
||||
let grainAddrBuffer = headerBuffer.slice(
|
||||
GRAIN_ADDRESS_OFFSET,
|
||||
GRAIN_ADDRESS_OFFSET + 8
|
||||
)
|
||||
if (
|
||||
new Int8Array(grainAddrBuffer).reduce((acc, val) => acc && val === -1, true)
|
||||
) {
|
||||
headerBuffer = await fileAccessor(
|
||||
FOOTER_POSITION,
|
||||
FOOTER_POSITION + HEADER_SIZE
|
||||
)
|
||||
grainAddrBuffer = headerBuffer.slice(
|
||||
GRAIN_ADDRESS_OFFSET,
|
||||
GRAIN_ADDRESS_OFFSET + 8
|
||||
)
|
||||
}
|
||||
const grainDirPosBytes =
|
||||
getLongLong(grainAddrBuffer, 0, 'grain directory address') * SECTOR_SIZE
|
||||
const capacity =
|
||||
getLongLong(headerBuffer, DISK_CAPACITY_OFFSET, 'capacity') * SECTOR_SIZE
|
||||
const grainSize =
|
||||
getLongLong(headerBuffer, GRAIN_SIZE_OFFSET, 'grain size') * SECTOR_SIZE
|
||||
const grainCount = Math.ceil(capacity / grainSize)
|
||||
const numGTEsPerGT = getLongLong(
|
||||
headerBuffer,
|
||||
NUM_GTE_PER_GT_OFFSET,
|
||||
'num GTE per GT'
|
||||
)
|
||||
const grainTablePhysicalSize = numGTEsPerGT * 4
|
||||
const grainDirectoryEntries = Math.ceil(grainCount / numGTEsPerGT)
|
||||
const grainDirectoryPhysicalSize = grainDirectoryEntries * 4
|
||||
const grainDirBuffer = await fileAccessor(
|
||||
grainDirPosBytes,
|
||||
grainDirPosBytes + grainDirectoryPhysicalSize
|
||||
)
|
||||
const grainDir = new Uint32Array(grainDirBuffer)
|
||||
const cachedGrainTables = []
|
||||
for (let i = 0; i < grainDirectoryEntries; i++) {
|
||||
const grainTableAddr = grainDir[i] * SECTOR_SIZE
|
||||
if (grainTableAddr !== 0) {
|
||||
cachedGrainTables[i] = new Uint32Array(
|
||||
await fileAccessor(
|
||||
grainTableAddr,
|
||||
grainTableAddr + grainTablePhysicalSize
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
const extractedGrainTable = []
|
||||
for (let i = 0; i < grainCount; i++) {
|
||||
const directoryEntry = Math.floor(i / numGTEsPerGT)
|
||||
const grainTable = cachedGrainTables[directoryEntry]
|
||||
if (grainTable !== undefined) {
|
||||
const grainAddr = grainTable[i % numGTEsPerGT]
|
||||
if (grainAddr !== 0) {
|
||||
extractedGrainTable.push([i, grainAddr])
|
||||
}
|
||||
}
|
||||
}
|
||||
extractedGrainTable.sort(
|
||||
([i1, grainAddress1], [i2, grainAddress2]) => grainAddress1 - grainAddress2
|
||||
)
|
||||
return extractedGrainTable.map(([index, grainAddress]) => index * grainSize)
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { fromCallback as pFromCallback } from 'promise-toolbox'
|
||||
import rimraf from 'rimraf'
|
||||
import tmp from 'tmp'
|
||||
|
||||
import { VMDKDirectParser } from './vmdk-read'
|
||||
import VMDKDirectParser from './vmdk-read'
|
||||
|
||||
jest.setTimeout(10000)
|
||||
|
||||
|
||||
@@ -4,7 +4,9 @@ import zlib from 'zlib'
|
||||
|
||||
import { VirtualBuffer } from './virtual-buffer'
|
||||
|
||||
const sectorSize = 512
|
||||
const SECTOR_SIZE = 512
|
||||
const HEADER_SIZE = 512
|
||||
const VERSION_OFFSET = 4
|
||||
const compressionDeflate = 'COMPRESSION_DEFLATE'
|
||||
const compressionNone = 'COMPRESSION_NONE'
|
||||
const compressionMap = [compressionNone, compressionDeflate]
|
||||
@@ -119,7 +121,7 @@ function parseHeader (buffer) {
|
||||
}
|
||||
}
|
||||
async function readGrain (offsetSectors, buffer, compressed) {
|
||||
const offset = offsetSectors * sectorSize
|
||||
const offset = offsetSectors * SECTOR_SIZE
|
||||
const size = buffer.readUInt32LE(offset + 8)
|
||||
const grainBuffer = buffer.slice(offset + 12, offset + 12 + size)
|
||||
const grainContent = compressed
|
||||
@@ -130,7 +132,7 @@ async function readGrain (offsetSectors, buffer, compressed) {
|
||||
offsetSectors: offsetSectors,
|
||||
offset,
|
||||
lba,
|
||||
lbaBytes: lba * sectorSize,
|
||||
lbaBytes: lba * SECTOR_SIZE,
|
||||
size,
|
||||
buffer: grainBuffer,
|
||||
grain: grainContent,
|
||||
@@ -146,10 +148,10 @@ function tryToParseMarker (buffer) {
|
||||
}
|
||||
|
||||
function alignSectors (number) {
|
||||
return Math.ceil(number / sectorSize) * sectorSize
|
||||
return Math.ceil(number / SECTOR_SIZE) * SECTOR_SIZE
|
||||
}
|
||||
|
||||
export class VMDKDirectParser {
|
||||
export default class VMDKDirectParser {
|
||||
constructor (readStream) {
|
||||
this.virtualBuffer = new VirtualBuffer(readStream)
|
||||
this.header = null
|
||||
@@ -177,9 +179,9 @@ export class VMDKDirectParser {
|
||||
l2IsContiguous = l2IsContiguous && l1Entry - previousL1Entry === 4
|
||||
} else {
|
||||
l2IsContiguous =
|
||||
l1Entry * sectorSize === this.virtualBuffer.position ||
|
||||
l1Entry * sectorSize === this.virtualBuffer.position + 512
|
||||
l2Start = l1Entry * sectorSize
|
||||
l1Entry * SECTOR_SIZE === this.virtualBuffer.position ||
|
||||
l1Entry * SECTOR_SIZE === this.virtualBuffer.position + SECTOR_SIZE
|
||||
l2Start = l1Entry * SECTOR_SIZE
|
||||
}
|
||||
}
|
||||
if (!l2IsContiguous) {
|
||||
@@ -200,37 +202,29 @@ export class VMDKDirectParser {
|
||||
l2ByteSize,
|
||||
'L2 table ' + position
|
||||
)
|
||||
let grainsAreInAscendingOrder = true
|
||||
let previousL2Entry = 0
|
||||
let firstGrain = null
|
||||
for (let i = 0; i < l2entries; i++) {
|
||||
const l2Entry = l2Buffer.readUInt32LE(i * 4)
|
||||
if (i > 0 && previousL2Entry !== 0 && l2Entry !== 0) {
|
||||
grainsAreInAscendingOrder =
|
||||
grainsAreInAscendingOrder && previousL2Entry < l2Entry
|
||||
}
|
||||
previousL2Entry = l2Entry
|
||||
if (firstGrain === null) {
|
||||
firstGrain = l2Entry
|
||||
}
|
||||
}
|
||||
if (!grainsAreInAscendingOrder) {
|
||||
// TODO: here we could transform the file to a sparse VHD on the fly because we have the complete table
|
||||
throw new Error('Unsupported file format')
|
||||
}
|
||||
const freeSpace = firstGrain * sectorSize - this.virtualBuffer.position
|
||||
const freeSpace = firstGrain * SECTOR_SIZE - this.virtualBuffer.position
|
||||
if (freeSpace > 0) {
|
||||
await this.virtualBuffer.readChunk(freeSpace, 'freeSpace after L2')
|
||||
}
|
||||
}
|
||||
|
||||
async readHeader () {
|
||||
const headerBuffer = await this.virtualBuffer.readChunk(512, 'readHeader')
|
||||
const headerBuffer = await this.virtualBuffer.readChunk(
|
||||
HEADER_SIZE,
|
||||
'readHeader'
|
||||
)
|
||||
const magicString = headerBuffer.slice(0, 4).toString('ascii')
|
||||
if (magicString !== 'KDMV') {
|
||||
throw new Error('not a VMDK file')
|
||||
}
|
||||
const version = headerBuffer.readUInt32LE(4)
|
||||
const version = headerBuffer.readUInt32LE(VERSION_OFFSET)
|
||||
if (version !== 1 && version !== 3) {
|
||||
throw new Error(
|
||||
'unsupported VMDK version ' +
|
||||
@@ -240,7 +234,7 @@ export class VMDKDirectParser {
|
||||
}
|
||||
this.header = parseHeader(headerBuffer)
|
||||
// I think the multiplications are OK, because the descriptor is always at the beginning of the file
|
||||
const descriptorLength = this.header.descriptorSizeSectors * sectorSize
|
||||
const descriptorLength = this.header.descriptorSizeSectors * SECTOR_SIZE
|
||||
const descriptorBuffer = await this.virtualBuffer.readChunk(
|
||||
descriptorLength,
|
||||
'descriptor'
|
||||
@@ -251,16 +245,16 @@ export class VMDKDirectParser {
|
||||
this.header.grainDirectoryOffsetSectors !== -1 &&
|
||||
this.header.grainDirectoryOffsetSectors !== 0
|
||||
) {
|
||||
l1PositionBytes = this.header.grainDirectoryOffsetSectors * sectorSize
|
||||
l1PositionBytes = this.header.grainDirectoryOffsetSectors * SECTOR_SIZE
|
||||
}
|
||||
const endOfDescriptor = this.virtualBuffer.position
|
||||
if (
|
||||
l1PositionBytes !== null &&
|
||||
(l1PositionBytes === endOfDescriptor ||
|
||||
l1PositionBytes === endOfDescriptor + sectorSize)
|
||||
l1PositionBytes === endOfDescriptor + SECTOR_SIZE)
|
||||
) {
|
||||
if (l1PositionBytes === endOfDescriptor + sectorSize) {
|
||||
await this.virtualBuffer.readChunk(sectorSize, 'skipping L1 marker')
|
||||
if (l1PositionBytes === endOfDescriptor + SECTOR_SIZE) {
|
||||
await this.virtualBuffer.readChunk(SECTOR_SIZE, 'skipping L1 marker')
|
||||
}
|
||||
await this._readL1()
|
||||
}
|
||||
@@ -271,7 +265,7 @@ export class VMDKDirectParser {
|
||||
while (!this.virtualBuffer.isDepleted) {
|
||||
const position = this.virtualBuffer.position
|
||||
const sector = await this.virtualBuffer.readChunk(
|
||||
512,
|
||||
SECTOR_SIZE,
|
||||
'marker start ' + position
|
||||
)
|
||||
if (sector.length === 0) {
|
||||
@@ -281,14 +275,14 @@ export class VMDKDirectParser {
|
||||
if (marker.size === 0) {
|
||||
if (marker.value !== 0) {
|
||||
await this.virtualBuffer.readChunk(
|
||||
marker.value * sectorSize,
|
||||
marker.value * SECTOR_SIZE,
|
||||
'other marker value ' + this.virtualBuffer.position
|
||||
)
|
||||
}
|
||||
} else if (marker.size > 10) {
|
||||
const grainDiskSize = marker.size + 12
|
||||
const alignedGrainDiskSize = alignSectors(grainDiskSize)
|
||||
const remainOfBufferSize = alignedGrainDiskSize - sectorSize
|
||||
const remainOfBufferSize = alignedGrainDiskSize - SECTOR_SIZE
|
||||
const remainderOfGrainBuffer = await this.virtualBuffer.readChunk(
|
||||
remainOfBufferSize,
|
||||
'grain remainder ' + this.virtualBuffer.position
|
||||
@@ -305,60 +299,3 @@ export class VMDKDirectParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function readVmdkGrainTable (fileAccessor) {
|
||||
let headerBuffer = await fileAccessor(0, 512)
|
||||
let grainAddrBuffer = headerBuffer.slice(56, 56 + 8)
|
||||
if (
|
||||
new Int8Array(grainAddrBuffer).reduce((acc, val) => acc && val === -1, true)
|
||||
) {
|
||||
headerBuffer = await fileAccessor(-1024, -1024 + 512)
|
||||
grainAddrBuffer = headerBuffer.slice(56, 56 + 8)
|
||||
}
|
||||
const grainDirPosBytes =
|
||||
new DataView(grainAddrBuffer).getUint32(0, true) * 512
|
||||
const capacity =
|
||||
new DataView(headerBuffer.slice(12, 12 + 8)).getUint32(0, true) * 512
|
||||
const grainSize =
|
||||
new DataView(headerBuffer.slice(20, 20 + 8)).getUint32(0, true) * 512
|
||||
const grainCount = Math.ceil(capacity / grainSize)
|
||||
const numGTEsPerGT = new DataView(headerBuffer.slice(44, 44 + 8)).getUint32(
|
||||
0,
|
||||
true
|
||||
)
|
||||
const grainTablePhysicalSize = numGTEsPerGT * 4
|
||||
const grainDirectoryEntries = Math.ceil(grainCount / numGTEsPerGT)
|
||||
const grainDirectoryPhysicalSize = grainDirectoryEntries * 4
|
||||
const grainDirBuffer = await fileAccessor(
|
||||
grainDirPosBytes,
|
||||
grainDirPosBytes + grainDirectoryPhysicalSize
|
||||
)
|
||||
const grainDir = new Uint32Array(grainDirBuffer)
|
||||
const cachedGrainTables = []
|
||||
for (let i = 0; i < grainDirectoryEntries; i++) {
|
||||
const grainTableAddr = grainDir[i] * 512
|
||||
if (grainTableAddr !== 0) {
|
||||
cachedGrainTables[i] = new Uint32Array(
|
||||
await fileAccessor(
|
||||
grainTableAddr,
|
||||
grainTableAddr + grainTablePhysicalSize
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
const extractedGrainTable = []
|
||||
for (let i = 0; i < grainCount; i++) {
|
||||
const directoryEntry = Math.floor(i / numGTEsPerGT)
|
||||
const grainTable = cachedGrainTables[directoryEntry]
|
||||
if (grainTable !== undefined) {
|
||||
const grainAddr = grainTable[i % numGTEsPerGT]
|
||||
if (grainAddr !== 0) {
|
||||
extractedGrainTable.push([i, grainAddr])
|
||||
}
|
||||
}
|
||||
}
|
||||
extractedGrainTable.sort(
|
||||
([i1, grainAddress1], [i2, grainAddress2]) => grainAddress1 - grainAddress2
|
||||
)
|
||||
return extractedGrainTable.map(([index, grainAddress]) => index * grainSize)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "xo-web",
|
||||
"version": "5.19.8",
|
||||
"version": "5.20.1",
|
||||
"license": "AGPL-3.0",
|
||||
"description": "Web interface client for Xen-Orchestra",
|
||||
"keywords": [
|
||||
|
||||
@@ -7,26 +7,22 @@ const call = fn => fn()
|
||||
// callbacks have been correctly initialized when there are circular dependencies
|
||||
const addSubscriptions = subscriptions => Component =>
|
||||
class SubscriptionWrapper extends React.PureComponent {
|
||||
constructor () {
|
||||
super()
|
||||
|
||||
// provide all props since the beginning (better behavior with Freactal)
|
||||
const state = (this.state = {})
|
||||
Object.keys(subscriptions).forEach(key => {
|
||||
state[key] = undefined
|
||||
})
|
||||
}
|
||||
|
||||
_unsubscribes = null
|
||||
|
||||
componentWillMount () {
|
||||
const state = {}
|
||||
this._unsubscribes = map(
|
||||
typeof subscriptions === 'function'
|
||||
? subscriptions(this.props)
|
||||
: subscriptions,
|
||||
(subscribe, prop) =>
|
||||
subscribe(value => this.setState({ [prop]: value }))
|
||||
(subscribe, prop) => {
|
||||
state[prop] = undefined
|
||||
return subscribe(value => this.setState({ [prop]: value }))
|
||||
}
|
||||
)
|
||||
|
||||
// provide all props since the beginning (better behavior with Freactal)
|
||||
this.setState(state)
|
||||
}
|
||||
|
||||
componentWillUnmount () {
|
||||
|
||||
@@ -188,6 +188,7 @@ const messages = {
|
||||
// ----- Forms -----
|
||||
formCancel: 'Cancel',
|
||||
formCreate: 'Create',
|
||||
formEdit: 'Edit',
|
||||
formReset: 'Reset',
|
||||
formSave: 'Save',
|
||||
add: 'Add',
|
||||
@@ -261,6 +262,7 @@ const messages = {
|
||||
jobTransferredDataSpeed: 'Transfer speed:',
|
||||
operationSize: 'Size',
|
||||
operationSpeed: 'Speed',
|
||||
exportType: 'Type',
|
||||
jobMergedDataSize: 'Merge size:',
|
||||
jobMergedDataSpeed: 'Merge speed:',
|
||||
allJobCalls: 'All',
|
||||
|
||||
@@ -167,7 +167,10 @@ class ColumnHead extends Component {
|
||||
})
|
||||
class Checkbox extends Component {
|
||||
componentDidUpdate () {
|
||||
const { props: { indeterminate }, ref } = this
|
||||
const {
|
||||
props: { indeterminate },
|
||||
ref,
|
||||
} = this
|
||||
if (ref !== null) {
|
||||
ref.indeterminate = indeterminate
|
||||
}
|
||||
@@ -487,8 +490,8 @@ export default class SortedTable extends Component {
|
||||
) {
|
||||
this.setState({
|
||||
highlighted:
|
||||
(itemIndex + visibleItems.length + 1) % visibleItems.length ||
|
||||
0,
|
||||
(itemIndex + visibleItems.length + 1) %
|
||||
visibleItems.length || 0,
|
||||
})
|
||||
}
|
||||
break
|
||||
@@ -500,8 +503,8 @@ export default class SortedTable extends Component {
|
||||
) {
|
||||
this.setState({
|
||||
highlighted:
|
||||
(itemIndex + visibleItems.length - 1) % visibleItems.length ||
|
||||
0,
|
||||
(itemIndex + visibleItems.length - 1) %
|
||||
visibleItems.length || 0,
|
||||
})
|
||||
}
|
||||
break
|
||||
@@ -893,7 +896,7 @@ export default class SortedTable extends Component {
|
||||
</span>
|
||||
)
|
||||
)}
|
||||
{nSelectedItems !== 0 && (
|
||||
{(nSelectedItems !== 0 || all) && (
|
||||
<div className='pull-right'>
|
||||
<ButtonGroup>
|
||||
{map(groupedActions, (props, key) => (
|
||||
|
||||
@@ -206,7 +206,7 @@ export default class Restore extends Component {
|
||||
|
||||
render () {
|
||||
return (
|
||||
<Upgrade place='restoreBackup' available={2}>
|
||||
<Upgrade place='restoreBackup' available={4}>
|
||||
<div>
|
||||
<div className='mb-1'>
|
||||
<ActionButton
|
||||
|
||||
@@ -137,7 +137,7 @@ class JobsTable extends React.Component {
|
||||
},
|
||||
{
|
||||
handler: (job, { goTo }) => goTo(`/backup-ng/${job.id}/edit`),
|
||||
label: '',
|
||||
label: _('formEdit'),
|
||||
icon: 'edit',
|
||||
level: 'primary',
|
||||
},
|
||||
|
||||
@@ -115,7 +115,7 @@ const showTasks = log =>
|
||||
{log.id}
|
||||
</span>
|
||||
</span>,
|
||||
<LogAlertBody log={log} />
|
||||
<LogAlertBody id={log.id} />
|
||||
)
|
||||
|
||||
const LOG_INDIVIDUAL_ACTIONS = [
|
||||
|
||||
@@ -7,10 +7,10 @@ import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
|
||||
import Select from 'form/select'
|
||||
import Tooltip from 'tooltip'
|
||||
import { addSubscriptions, formatSize, formatSpeed } from 'utils'
|
||||
import { filter, isEmpty, get, keyBy, map } from 'lodash'
|
||||
import { countBy, filter, get, keyBy, map } from 'lodash'
|
||||
import { FormattedDate } from 'react-intl'
|
||||
import { injectState, provideState } from '@julien-f/freactal'
|
||||
import { runBackupNgJob, subscribeRemotes } from 'xo'
|
||||
import { runBackupNgJob, subscribeBackupNgLogs, subscribeRemotes } from 'xo'
|
||||
|
||||
const TASK_STATUS = {
|
||||
failure: {
|
||||
@@ -77,40 +77,20 @@ const TASK_FILTER_OPTIONS = [
|
||||
{ label: 'taskSuccess', value: 'success' },
|
||||
]
|
||||
|
||||
const getFilteredTaskLogs = (logs, filterValue) =>
|
||||
filterValue === 'all'
|
||||
? logs
|
||||
: filter(logs, ({ status }) => status === filterValue)
|
||||
|
||||
const getInitialFilter = tasks => {
|
||||
const isEmptyFilter = filterValue =>
|
||||
isEmpty(getFilteredTaskLogs(tasks, filterValue))
|
||||
|
||||
if (!isEmptyFilter('pending')) {
|
||||
return PENDING_FILTER_OPTION
|
||||
}
|
||||
|
||||
if (!isEmptyFilter('failure')) {
|
||||
return FAILURE_FILTER_OPTION
|
||||
}
|
||||
|
||||
if (!isEmptyFilter('interrupted')) {
|
||||
return INTERRUPTED_FILTER_OPTION
|
||||
}
|
||||
|
||||
return ALL_FILTER_OPTION
|
||||
}
|
||||
|
||||
export default [
|
||||
addSubscriptions({
|
||||
addSubscriptions(({ id }) => ({
|
||||
remotes: cb =>
|
||||
subscribeRemotes(remotes => {
|
||||
cb(keyBy(remotes, 'id'))
|
||||
}),
|
||||
}),
|
||||
log: cb =>
|
||||
subscribeBackupNgLogs(logs => {
|
||||
cb(logs[id])
|
||||
}),
|
||||
})),
|
||||
provideState({
|
||||
initialState: ({ log }) => ({
|
||||
filter: getInitialFilter(log.tasks),
|
||||
initialState: () => ({
|
||||
filter: undefined,
|
||||
}),
|
||||
effects: {
|
||||
setFilter: (_, filter) => state => ({
|
||||
@@ -129,21 +109,47 @@ export default [
|
||||
},
|
||||
},
|
||||
computed: {
|
||||
filteredTaskLogs: ({ filter: { value } }, { log }) =>
|
||||
getFilteredTaskLogs(log.tasks, value),
|
||||
optionRenderer: (state, { log }) => ({ label, value }) => (
|
||||
filteredTaskLogs: (
|
||||
{ defaultFilter, filter: { value } = defaultFilter },
|
||||
{ log = {} }
|
||||
) =>
|
||||
value === 'all'
|
||||
? log.tasks
|
||||
: filter(log.tasks, ({ status }) => status === value),
|
||||
optionRenderer: ({ countByStatus }) => ({ label, value }) => (
|
||||
<span>
|
||||
{_(label)} ({getFilteredTaskLogs(log.tasks, value).length})
|
||||
{_(label)} ({countByStatus[value] || 0})
|
||||
</span>
|
||||
),
|
||||
countByStatus: (_, { log = {} }) => ({
|
||||
all: get(log.tasks, 'length'),
|
||||
...countBy(log.tasks, 'status'),
|
||||
}),
|
||||
defaultFilter: ({ countByStatus }) => {
|
||||
if (countByStatus.pending > 0) {
|
||||
return PENDING_FILTER_OPTION
|
||||
}
|
||||
|
||||
if (countByStatus.failure > 0) {
|
||||
return FAILURE_FILTER_OPTION
|
||||
}
|
||||
|
||||
if (countByStatus.interrupted > 0) {
|
||||
return INTERRUPTED_FILTER_OPTION
|
||||
}
|
||||
|
||||
return ALL_FILTER_OPTION
|
||||
},
|
||||
},
|
||||
}),
|
||||
injectState,
|
||||
({ log, remotes, state, effects }) =>
|
||||
log.result !== undefined ? (
|
||||
<span className={log.status === 'skipped' ? 'text-info' : 'text-danger'}>
|
||||
<Copiable tagName='p' data={JSON.stringify(log.result, null, 2)}>
|
||||
<Icon icon='alarm' /> {log.result.message}
|
||||
({ log = {}, remotes, state, effects }) => {
|
||||
const { status, result, scheduleId } = log
|
||||
return (status === 'failure' || status === 'skipped') &&
|
||||
result !== undefined ? (
|
||||
<span className={status === 'skipped' ? 'text-info' : 'text-danger'}>
|
||||
<Copiable tagName='p' data={JSON.stringify(result, null, 2)}>
|
||||
<Icon icon='alarm' /> {result.message}
|
||||
</Copiable>
|
||||
</span>
|
||||
) : (
|
||||
@@ -154,86 +160,147 @@ export default [
|
||||
optionRenderer={state.optionRenderer}
|
||||
options={TASK_FILTER_OPTIONS}
|
||||
required
|
||||
value={state.filter}
|
||||
value={state.filter || state.defaultFilter}
|
||||
valueKey='value'
|
||||
/>
|
||||
<br />
|
||||
<ul className='list-group'>
|
||||
{map(state.filteredTaskLogs, taskLog => (
|
||||
<li key={taskLog.data.id} className='list-group-item'>
|
||||
{renderXoItemFromId(taskLog.data.id)} ({taskLog.data.id.slice(
|
||||
4,
|
||||
8
|
||||
)}) <TaskStateInfos status={taskLog.status} />{' '}
|
||||
{log.scheduleId !== undefined &&
|
||||
taskLog.status === 'failure' && (
|
||||
<ActionButton
|
||||
handler={effects.restartVmJob}
|
||||
icon='run'
|
||||
size='small'
|
||||
tooltip={_('backupRestartVm')}
|
||||
data-vm={taskLog.data.id}
|
||||
/>
|
||||
)}
|
||||
<ul>
|
||||
{map(taskLog.tasks, subTaskLog => (
|
||||
<li key={subTaskLog.id}>
|
||||
{subTaskLog.message === 'snapshot' ? (
|
||||
<span>
|
||||
<Icon icon='task' /> {_('snapshotVmLabel')}
|
||||
</span>
|
||||
) : subTaskLog.data.type === 'remote' ? (
|
||||
<span>
|
||||
{get(remotes, subTaskLog.data.id) !== undefined
|
||||
? renderXoItem({
|
||||
type: 'remote',
|
||||
value: remotes[subTaskLog.data.id],
|
||||
})
|
||||
: _('errorNoSuchItem')}{' '}
|
||||
({subTaskLog.data.id.slice(4, 8)})
|
||||
</span>
|
||||
) : (
|
||||
<span>
|
||||
{renderXoItemFromId(subTaskLog.data.id)} ({subTaskLog.data.id.slice(
|
||||
4,
|
||||
8
|
||||
)})
|
||||
</span>
|
||||
)}{' '}
|
||||
<TaskStateInfos status={subTaskLog.status} />
|
||||
<ul>
|
||||
{map(subTaskLog.tasks, operationLog => (
|
||||
<li key={operationLog.id}>
|
||||
{map(state.filteredTaskLogs, taskLog => {
|
||||
let globalIsFull
|
||||
return (
|
||||
<li key={taskLog.data.id} className='list-group-item'>
|
||||
{renderXoItemFromId(taskLog.data.id)} ({taskLog.data.id.slice(
|
||||
4,
|
||||
8
|
||||
)}) <TaskStateInfos status={taskLog.status} />{' '}
|
||||
{scheduleId !== undefined &&
|
||||
taskLog.status === 'failure' && (
|
||||
<ActionButton
|
||||
handler={effects.restartVmJob}
|
||||
icon='run'
|
||||
size='small'
|
||||
tooltip={_('backupRestartVm')}
|
||||
data-vm={taskLog.data.id}
|
||||
/>
|
||||
)}
|
||||
<ul>
|
||||
{map(taskLog.tasks, subTaskLog => {
|
||||
const isFull = get(subTaskLog.data, 'isFull')
|
||||
if (isFull !== undefined && globalIsFull === undefined) {
|
||||
globalIsFull = isFull
|
||||
}
|
||||
return (
|
||||
<li key={subTaskLog.id}>
|
||||
{subTaskLog.message === 'snapshot' ? (
|
||||
<span>
|
||||
<Icon icon='task' /> {operationLog.message}
|
||||
</span>{' '}
|
||||
<TaskStateInfos status={operationLog.status} />
|
||||
<br />
|
||||
<TaskDate
|
||||
label='taskStart'
|
||||
value={operationLog.start}
|
||||
/>
|
||||
{operationLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate
|
||||
label='taskEnd'
|
||||
value={operationLog.end}
|
||||
/>
|
||||
<Icon icon='task' /> {_('snapshotVmLabel')}
|
||||
</span>
|
||||
) : subTaskLog.data.type === 'remote' ? (
|
||||
<span>
|
||||
{get(remotes, subTaskLog.data.id) !== undefined
|
||||
? renderXoItem({
|
||||
type: 'remote',
|
||||
value: remotes[subTaskLog.data.id],
|
||||
})
|
||||
: _('errorNoSuchItem')}{' '}
|
||||
({subTaskLog.data.id.slice(4, 8)})
|
||||
</span>
|
||||
) : (
|
||||
<span>
|
||||
{renderXoItemFromId(subTaskLog.data.id)} ({subTaskLog.data.id.slice(
|
||||
4,
|
||||
8
|
||||
)})
|
||||
</span>
|
||||
)}{' '}
|
||||
<TaskStateInfos status={subTaskLog.status} />
|
||||
<ul>
|
||||
{map(subTaskLog.tasks, operationLog => (
|
||||
<li key={operationLog.id}>
|
||||
<span>
|
||||
<Icon icon='task' /> {operationLog.message}
|
||||
</span>{' '}
|
||||
<TaskStateInfos status={operationLog.status} />
|
||||
<br />
|
||||
{_.keyValue(
|
||||
<TaskDate
|
||||
label='taskStart'
|
||||
value={operationLog.start}
|
||||
/>
|
||||
{operationLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate
|
||||
label='taskEnd'
|
||||
value={operationLog.end}
|
||||
/>
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskDuration'),
|
||||
<FormattedDuration
|
||||
duration={
|
||||
operationLog.end - operationLog.start
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<br />
|
||||
{operationLog.status === 'failure' ? (
|
||||
<Copiable
|
||||
tagName='p'
|
||||
data={JSON.stringify(
|
||||
operationLog.result,
|
||||
null,
|
||||
2
|
||||
)}
|
||||
>
|
||||
{_.keyValue(
|
||||
_('taskError'),
|
||||
<span className='text-danger'>
|
||||
{operationLog.result.message}
|
||||
</span>
|
||||
)}
|
||||
</Copiable>
|
||||
) : (
|
||||
operationLog.result.size > 0 && (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('operationSize'),
|
||||
formatSize(operationLog.result.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('operationSpeed'),
|
||||
formatSpeed(
|
||||
operationLog.result.size,
|
||||
operationLog.end -
|
||||
operationLog.start
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<TaskDate label='taskStart' value={subTaskLog.start} />
|
||||
{subTaskLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate label='taskEnd' value={subTaskLog.end} />
|
||||
<br />
|
||||
{subTaskLog.message !== 'snapshot' &&
|
||||
_.keyValue(
|
||||
_('taskDuration'),
|
||||
<FormattedDuration
|
||||
duration={
|
||||
operationLog.end - operationLog.start
|
||||
}
|
||||
duration={subTaskLog.end - subTaskLog.start}
|
||||
/>
|
||||
)}
|
||||
<br />
|
||||
{operationLog.status === 'failure' ? (
|
||||
<br />
|
||||
{subTaskLog.status === 'failure' &&
|
||||
subTaskLog.result !== undefined && (
|
||||
<Copiable
|
||||
tagName='p'
|
||||
data={JSON.stringify(
|
||||
operationLog.result,
|
||||
subTaskLog.result,
|
||||
null,
|
||||
2
|
||||
)}
|
||||
@@ -241,147 +308,107 @@ export default [
|
||||
{_.keyValue(
|
||||
_('taskError'),
|
||||
<span className='text-danger'>
|
||||
{operationLog.result.message}
|
||||
{subTaskLog.result.message}
|
||||
</span>
|
||||
)}
|
||||
</Copiable>
|
||||
) : (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('operationSize'),
|
||||
formatSize(operationLog.result.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('operationSpeed'),
|
||||
formatSpeed(
|
||||
operationLog.result.size,
|
||||
operationLog.end - operationLog.start
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<TaskDate label='taskStart' value={subTaskLog.start} />
|
||||
{subTaskLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate label='taskEnd' value={subTaskLog.end} />
|
||||
<br />
|
||||
{subTaskLog.message !== 'snapshot' &&
|
||||
_.keyValue(
|
||||
_('taskDuration'),
|
||||
<FormattedDuration
|
||||
duration={subTaskLog.end - subTaskLog.start}
|
||||
/>
|
||||
)}
|
||||
<br />
|
||||
{subTaskLog.status === 'failure' &&
|
||||
subTaskLog.result !== undefined && (
|
||||
<Copiable
|
||||
tagName='p'
|
||||
data={JSON.stringify(subTaskLog.result, null, 2)}
|
||||
</div>
|
||||
)}
|
||||
</li>
|
||||
)
|
||||
})}
|
||||
</ul>
|
||||
<TaskDate label='taskStart' value={taskLog.start} />
|
||||
{taskLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate label='taskEnd' value={taskLog.end} />
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskDuration'),
|
||||
<FormattedDuration
|
||||
duration={taskLog.end - taskLog.start}
|
||||
/>
|
||||
)}
|
||||
<br />
|
||||
{taskLog.result !== undefined ? (
|
||||
taskLog.result.message === UNHEALTHY_VDI_CHAIN_ERROR ? (
|
||||
<Tooltip content={_('clickForMoreInformation')}>
|
||||
<a
|
||||
className='text-info'
|
||||
href={UNHEALTHY_VDI_CHAIN_LINK}
|
||||
rel='noopener noreferrer'
|
||||
target='_blank'
|
||||
>
|
||||
<Icon icon='info' /> {_('unhealthyVdiChainError')}
|
||||
</a>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<Copiable
|
||||
tagName='p'
|
||||
data={JSON.stringify(taskLog.result, null, 2)}
|
||||
>
|
||||
{_.keyValue(
|
||||
taskLog.status === 'skipped'
|
||||
? _('taskReason')
|
||||
: _('taskError'),
|
||||
<span
|
||||
className={
|
||||
taskLog.status === 'skipped'
|
||||
? 'text-info'
|
||||
: 'text-danger'
|
||||
}
|
||||
>
|
||||
{_.keyValue(
|
||||
_('taskError'),
|
||||
<span className='text-danger'>
|
||||
{subTaskLog.result.message}
|
||||
</span>
|
||||
)}
|
||||
</Copiable>
|
||||
{taskLog.result.message}
|
||||
</span>
|
||||
)}
|
||||
</Copiable>
|
||||
)
|
||||
) : (
|
||||
<div>
|
||||
{taskLog.transfer !== undefined && (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('taskTransferredDataSize'),
|
||||
formatSize(taskLog.transfer.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskTransferredDataSpeed'),
|
||||
formatSpeed(
|
||||
taskLog.transfer.size,
|
||||
taskLog.transfer.duration
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{taskLog.merge !== undefined && (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('taskMergedDataSize'),
|
||||
formatSize(taskLog.merge.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskMergedDataSpeed'),
|
||||
formatSpeed(
|
||||
taskLog.merge.size,
|
||||
taskLog.merge.duration
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<TaskDate label='taskStart' value={taskLog.start} />
|
||||
{taskLog.end !== undefined && (
|
||||
<div>
|
||||
<TaskDate label='taskEnd' value={taskLog.end} />
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskDuration'),
|
||||
<FormattedDuration duration={taskLog.end - taskLog.start} />
|
||||
)}
|
||||
<br />
|
||||
{taskLog.result !== undefined ? (
|
||||
taskLog.result.message === UNHEALTHY_VDI_CHAIN_ERROR ? (
|
||||
<Tooltip content={_('clickForMoreInformation')}>
|
||||
<a
|
||||
className='text-info'
|
||||
href={UNHEALTHY_VDI_CHAIN_LINK}
|
||||
rel='noopener noreferrer'
|
||||
target='_blank'
|
||||
>
|
||||
<Icon icon='info' /> {_('unhealthyVdiChainError')}
|
||||
</a>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<Copiable
|
||||
tagName='p'
|
||||
data={JSON.stringify(taskLog.result, null, 2)}
|
||||
>
|
||||
{_.keyValue(
|
||||
taskLog.status === 'skipped'
|
||||
? _('taskReason')
|
||||
: _('taskError'),
|
||||
<span
|
||||
className={
|
||||
taskLog.status === 'skipped'
|
||||
? 'text-info'
|
||||
: 'text-danger'
|
||||
}
|
||||
>
|
||||
{taskLog.result.message}
|
||||
</span>
|
||||
)}
|
||||
</Copiable>
|
||||
)
|
||||
) : (
|
||||
<div>
|
||||
{taskLog.transfer !== undefined && (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('taskTransferredDataSize'),
|
||||
formatSize(taskLog.transfer.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskTransferredDataSpeed'),
|
||||
formatSpeed(
|
||||
taskLog.transfer.size,
|
||||
taskLog.transfer.duration
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{taskLog.merge !== undefined && (
|
||||
<div>
|
||||
{_.keyValue(
|
||||
_('taskMergedDataSize'),
|
||||
formatSize(taskLog.merge.size)
|
||||
)}
|
||||
<br />
|
||||
{_.keyValue(
|
||||
_('taskMergedDataSpeed'),
|
||||
formatSpeed(
|
||||
taskLog.merge.size,
|
||||
taskLog.merge.duration
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
{globalIsFull !== undefined &&
|
||||
_.keyValue(_('exportType'), globalIsFull ? 'full' : 'delta')}
|
||||
</li>
|
||||
)
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
),
|
||||
)
|
||||
},
|
||||
].reduceRight((value, decorator) => decorator(value))
|
||||
|
||||
Reference in New Issue
Block a user