feat(Backup NG): implrtment logs and reports (#2869)

This commit is contained in:
Julien Fontanet 2018-05-15 14:40:11 +02:00 committed by GitHub
parent fdde916388
commit ef942a6209
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 1591 additions and 341 deletions

View File

@ -1,4 +1,12 @@
declare module 'lodash' {
declare export function forEach<K, V>(
object: { [K]: V },
iteratee: (V, K) => void
): void
declare export function groupBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: V[] }
declare export function invert<K, V>(object: { [K]: V }): { [V]: K }
declare export function isEmpty(mixed): boolean
declare export function keyBy<T>(array: T[], iteratee: string): boolean

View File

@ -35,6 +35,7 @@
"node": ">=4"
},
"dependencies": {
"babel-runtime": "^6.26.0",
"human-format": "^0.10.0",
"lodash": "^4.13.1",
"moment-timezone": "^0.5.13"
@ -42,6 +43,7 @@
"devDependencies": {
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.5.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.1"
@ -56,7 +58,8 @@
},
"babel": {
"plugins": [
"lodash"
"lodash",
"transform-runtime"
],
"presets": [
[

View File

@ -1,6 +1,6 @@
import humanFormat from 'human-format'
import moment from 'moment-timezone'
import { forEach, startCase } from 'lodash'
import { find, forEach, get, startCase } from 'lodash'
import pkg from '../package'
@ -41,9 +41,9 @@ const DATE_FORMAT = 'dddd, MMMM Do YYYY, h:mm:ss a'
const createDateFormater = timezone =>
timezone !== undefined
? timestamp =>
moment(timestamp)
.tz(timezone)
.format(DATE_FORMAT)
moment(timestamp)
.tz(timezone)
.format(DATE_FORMAT)
: timestamp => moment(timestamp).format(DATE_FORMAT)
const formatDuration = milliseconds => moment.duration(milliseconds).humanize()
@ -66,6 +66,7 @@ const logError = e => {
console.error('backup report error:', e)
}
const NO_VMS_MATCH_THIS_PATTERN = 'no VMs match this pattern'
const NO_SUCH_OBJECT_ERROR = 'no such object'
const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const UNHEALTHY_VDI_CHAIN_MESSAGE =
@ -94,14 +95,351 @@ class BackupReportsXoPlugin {
this._xo.removeListener('job:terminated', this._report)
}
_wrapper (status) {
return new Promise(resolve => resolve(this._listener(status))).catch(
logError
)
_wrapper (status, job, schedule) {
return new Promise(resolve =>
resolve(
job.type === 'backup'
? this._backupNgListener(status, job, schedule)
: this._listener(status, job, schedule)
)
).catch(logError)
}
async _backupNgListener (runJobId, _, { timezone }) {
const xo = this._xo
const logs = await xo.getBackupNgLogs(runJobId)
const jobLog = logs['roots'][0]
const vmsTaskLog = logs[jobLog.id]
const { reportWhen, mode } = jobLog.data || {}
if (reportWhen === 'never') {
return
}
const formatDate = createDateFormater(timezone)
const jobName = (await xo.getJob(jobLog.jobId, 'backup')).name
if (jobLog.error !== undefined) {
const [globalStatus, icon] =
jobLog.error.message === NO_VMS_MATCH_THIS_PATTERN
? ['Skipped', ICON_SKIPPED]
: ['Failure', ICON_FAILURE]
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Error**: ${jobLog.error.message}`,
'---',
'',
`*${pkg.name} v${pkg.version}*`,
]
markdown = markdown.join('\n')
return this._sendReport({
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${icon}`,
markdown,
nagiosStatus: 2,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Backup report for ${jobName} - Error : ${
jobLog.error.message
}`,
})
}
const failedVmsText = []
const skippedVmsText = []
const successfulVmsText = []
const nagiosText = []
let globalMergeSize = 0
let globalTransferSize = 0
let nFailures = 0
let nSkipped = 0
for (const vmTaskLog of vmsTaskLog || []) {
const vmTaskStatus = vmTaskLog.status
if (vmTaskStatus === 'success' && reportWhen === 'failure') {
return
}
const vmId = vmTaskLog.data.id
let vm
try {
vm = xo.getObject(vmId)
} catch (e) {}
const text = [
`### ${vm !== undefined ? vm.name_label : 'VM not found'}`,
'',
`- **UUID**: ${vm !== undefined ? vm.uuid : vmId}`,
`- **Start time**: ${formatDate(vmTaskLog.start)}`,
`- **End time**: ${formatDate(vmTaskLog.end)}`,
`- **Duration**: ${formatDuration(vmTaskLog.duration)}`,
]
const failedSubTasks = []
const operationsText = []
const srsText = []
const remotesText = []
for (const subTaskLog of logs[vmTaskLog.taskId] || []) {
const { data, status, result, message } = subTaskLog
const icon =
subTaskLog.status === 'success' ? ICON_SUCCESS : ICON_FAILURE
const errorMessage = ` **Error**: ${get(result, 'message')}`
if (message === 'snapshot') {
operationsText.push(`- **Snapshot** ${icon}`)
if (status === 'failure') {
failedSubTasks.push('Snapshot')
operationsText.push('', errorMessage)
}
} else if (data.type === 'remote') {
const remoteId = data.id
const remote = await xo.getRemote(remoteId).catch(() => {})
remotesText.push(
`- **${
remote !== undefined ? remote.name : `Remote Not found`
}** (${remoteId}) ${icon}`
)
if (status === 'failure') {
failedSubTasks.push(remote !== undefined ? remote.name : remoteId)
remotesText.push('', errorMessage)
}
} else {
const srId = data.id
let sr
try {
sr = xo.getObject(srId)
} catch (e) {}
const [srName, srUuid] =
sr !== undefined ? [sr.name_label, sr.uuid] : [`SR Not found`, srId]
srsText.push(`- **${srName}** (${srUuid}) ${icon}`)
if (status === 'failure') {
failedSubTasks.push(sr !== undefined ? sr.name_label : srId)
srsText.push('', errorMessage)
}
}
}
if (operationsText.length !== 0) {
operationsText.unshift(`#### Operations`, '')
}
if (srsText.length !== 0) {
srsText.unshift(`#### SRs`, '')
}
if (remotesText.length !== 0) {
remotesText.unshift(`#### remotes`, '')
}
const subText = [...operationsText, '', ...srsText, '', ...remotesText]
const result = vmTaskLog.result
if (vmTaskStatus === 'failure' && result !== undefined) {
const { message } = result
if (isSkippedError(result)) {
++nSkipped
skippedVmsText.push(
...text,
`- **Reason**: ${
message === UNHEALTHY_VDI_CHAIN_ERROR
? UNHEALTHY_VDI_CHAIN_MESSAGE
: message
}`,
''
)
nagiosText.push(
`[(Skipped) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
} else {
++nFailures
failedVmsText.push(...text, `- **Error**: ${message}`, '')
nagiosText.push(
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
}
} else {
let transferSize, transferDuration, mergeSize, mergeDuration
forEach(logs[vmTaskLog.taskId], ({ taskId }) => {
if (transferSize !== undefined) {
return false
}
const transferTask = find(logs[taskId], { message: 'transfer' })
if (transferTask !== undefined) {
transferSize = transferTask.result.size
transferDuration = transferTask.end - transferTask.start
}
const mergeTask = find(logs[taskId], { message: 'merge' })
if (mergeTask !== undefined) {
mergeSize = mergeTask.result.size
mergeDuration = mergeTask.end - mergeTask.start
}
})
if (transferSize !== undefined) {
globalTransferSize += transferSize
text.push(
`- **Transfer size**: ${formatSize(transferSize)}`,
`- **Transfer speed**: ${formatSpeed(
transferSize,
transferDuration
)}`
)
}
if (mergeSize !== undefined) {
globalMergeSize += mergeSize
text.push(
`- **Merge size**: ${formatSize(mergeSize)}`,
`- **Merge speed**: ${formatSpeed(mergeSize, mergeDuration)}`
)
}
if (vmTaskStatus === 'failure') {
++nFailures
failedVmsText.push(...text, '', '', ...subText, '')
nagiosText.push(
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
}: (failed)[${failedSubTasks.toString()}]]`
)
} else {
successfulVmsText.push(...text, '', '', ...subText, '')
}
}
}
const globalSuccess = nFailures === 0 && nSkipped === 0
if (reportWhen === 'failure' && globalSuccess) {
return
}
const nVms = vmsTaskLog.length
const nSuccesses = nVms - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
: nFailures !== 0 ? `Failure` : `Skipped`
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Successes**: ${nSuccesses} / ${nVms}`,
]
if (globalTransferSize !== 0) {
markdown.push(`- **Transfer size**: ${formatSize(globalTransferSize)}`)
}
if (globalMergeSize !== 0) {
markdown.push(`- **Merge size**: ${formatSize(globalMergeSize)}`)
}
markdown.push('')
if (nFailures !== 0) {
markdown.push(
'---',
'',
`## ${nFailures} Failure${nFailures === 1 ? '' : 's'}`,
'',
...failedVmsText
)
}
if (nSkipped !== 0) {
markdown.push('---', '', `## ${nSkipped} Skipped`, '', ...skippedVmsText)
}
if (nSuccesses !== 0 && reportWhen !== 'failure') {
markdown.push(
'---',
'',
`## ${nSuccesses} Success${nSuccesses === 1 ? '' : 'es'}`,
'',
...successfulVmsText
)
}
markdown.push('---', '', `*${pkg.name} v${pkg.version}*`)
markdown = markdown.join('\n')
return this._sendReport({
markdown,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${jobName} - VMs : ${nagiosText.join(' ')}`,
})
}
_sendReport ({ markdown, subject, nagiosStatus, nagiosMarkdown }) {
const xo = this._xo
return Promise.all([
xo.sendEmail !== undefined &&
xo.sendEmail({
to: this._mailsReceivers,
subject,
markdown,
}),
xo.sendToXmppClient !== undefined &&
xo.sendToXmppClient({
to: this._xmppReceivers,
message: markdown,
}),
xo.sendSlackMessage !== undefined &&
xo.sendSlackMessage({
message: markdown,
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
nagiosStatus,
message: nagiosMarkdown,
}),
])
}
_listener (status) {
const { calls } = status
const { calls, timezone, error } = status
const formatDate = createDateFormater(timezone)
if (status.error !== undefined) {
const [globalStatus, icon] =
error.message === NO_VMS_MATCH_THIS_PATTERN
? ['Skipped', ICON_SKIPPED]
: ['Failure', ICON_FAILURE]
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **Start time**: ${formatDate(status.start)}`,
`- **End time**: ${formatDate(status.end)}`,
`- **Duration**: ${formatDuration(status.end - status.start)}`,
`- **Error**: ${error.message}`,
'---',
'',
`*${pkg.name} v${pkg.version}*`,
]
markdown = markdown.join('\n')
return this._sendReport({
subject: `[Xen Orchestra] ${globalStatus} ${icon}`,
markdown,
nagiosStatus: 2,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Error : ${
error.message
}`,
})
}
const callIds = Object.keys(calls)
const nCalls = callIds.length
@ -139,8 +477,6 @@ class BackupReportsXoPlugin {
const skippedBackupsText = []
const successfulBackupText = []
const formatDate = createDateFormater(status.timezone)
forEach(calls, call => {
const { id = call.params.vm } = call.params
@ -226,9 +562,8 @@ class BackupReportsXoPlugin {
return
}
const { end, start } = status
const { tag } = oneCall.params
const duration = end - start
const duration = status.end - status.start
const nSuccesses = nCalls - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
@ -238,8 +573,8 @@ class BackupReportsXoPlugin {
`## Global status: ${globalStatus}`,
'',
`- **Type**: ${formatMethod(method)}`,
`- **Start time**: ${formatDate(start)}`,
`- **End time**: ${formatDate(end)}`,
`- **Start time**: ${formatDate(status.start)}`,
`- **End time**: ${formatDate(status.end)}`,
`- **Duration**: ${formatDuration(duration)}`,
`- **Successes**: ${nSuccesses} / ${nCalls}`,
]
@ -285,37 +620,20 @@ class BackupReportsXoPlugin {
markdown = markdown.join('\n')
const xo = this._xo
return Promise.all([
xo.sendEmail !== undefined &&
xo.sendEmail({
to: this._mailsReceivers,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${tag} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
markdown,
}),
xo.sendToXmppClient !== undefined &&
xo.sendToXmppClient({
to: this._xmppReceivers,
message: markdown,
}),
xo.sendSlackMessage !== undefined &&
xo.sendSlackMessage({
message: markdown,
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
status: globalSuccess ? 0 : 2,
message: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${tag}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
}),
])
return this._sendReport({
markdown,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${tag} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${tag}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
})
}
}

View File

@ -134,6 +134,14 @@ runJob.params = {
// -----------------------------------------------------------------------------
export function getAllLogs () {
return this.getBackupNgLogs()
}
getAllLogs.permission = 'admin'
// -----------------------------------------------------------------------------
export function deleteVmBackup ({ id }) {
return this.deleteVmBackupNg(id)
}

View File

@ -1,19 +1,5 @@
export async function get ({ namespace }) {
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
logger
.createReadStream()
.on('data', data => {
logs[data.key] = data.value
})
.on('end', () => {
resolve(logs)
})
.on('error', reject)
})
export function get ({ namespace }) {
return this.getLogs(namespace)
}
get.description = 'returns logs list for one namespace'

View File

@ -16,6 +16,11 @@ export default {
key: {
type: 'string',
},
type: {
default: 'call',
enum: ['backup', 'call'],
},
data: {},
},
required: ['event', 'userId', 'jobId', 'key'],
required: ['event', 'userId', 'jobId'],
}

View File

@ -0,0 +1,18 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['task.end'],
},
taskId: {
type: 'string',
description: 'identifier of this task',
},
status: {
enum: ['canceled', 'failure', 'success'],
},
result: {},
},
required: ['event', 'taskId', 'status'],
}

View File

@ -0,0 +1,15 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['task.start'],
},
parentId: {
type: 'string',
description: 'identifier of the parent task or job',
},
data: {},
},
required: ['event'],
}

View File

@ -58,7 +58,7 @@ declare export class Xapi {
_updateObjectMapProperty(
object: XapiObject,
property: string,
entries: $Dict<string>
entries: $Dict<null | string>
): Promise<void>;
_setObjectProperties(
object: XapiObject,

View File

@ -6,7 +6,15 @@ import defer from 'golike-defer'
import { type Pattern, createPredicate } from 'value-matcher'
import { type Readable, PassThrough } from 'stream'
import { basename, dirname } from 'path'
import { isEmpty, last, mapValues, noop, values } from 'lodash'
import {
forEach,
groupBy,
isEmpty,
last,
mapValues,
noop,
values,
} from 'lodash'
import { timeout as pTimeout } from 'promise-toolbox'
import Vhd, {
chainVhd,
@ -33,10 +41,12 @@ import {
import { translateLegacyJob } from './migration'
type Mode = 'full' | 'delta'
type ReportWhen = 'always' | 'failure' | 'never'
type Settings = {|
deleteFirst?: boolean,
exportRetention?: number,
reportWhen?: ReportWhen,
snapshotRetention?: number,
vmTimeout?: number,
|}
@ -56,13 +66,6 @@ export type BackupJob = {|
vms: Pattern,
|}
type BackupResult = {|
mergeDuration: number,
mergeSize: number,
transferDuration: number,
transferSize: number,
|}
type MetadataBase = {|
_filename?: string,
jobId: string,
@ -87,6 +90,33 @@ type MetadataFull = {|
|}
type Metadata = MetadataDelta | MetadataFull
type ConsolidatedJob = {|
duration?: number,
end?: number,
error?: Object,
id: string,
jobId: string,
mode: Mode,
start: number,
type: 'backup' | 'call',
userId: string,
|}
type ConsolidatedTask = {|
data?: Object,
duration?: number,
end?: number,
parentId: string,
message: string,
result?: Object,
start: number,
status: 'canceled' | 'failure' | 'success',
taskId: string,
|}
type ConsolidatedBackupNgLog = {
roots: Array<ConsolidatedJob>,
[parentId: string]: Array<ConsolidatedTask>,
}
const compareSnapshotTime = (a: Vm, b: Vm): number =>
a.snapshot_time < b.snapshot_time ? -1 : 1
@ -105,6 +135,7 @@ const getOldEntries = <T>(retention: number, entries?: T[]): T[] =>
const defaultSettings: Settings = {
deleteFirst: false,
exportRetention: 0,
reportWhen: 'failure',
snapshotRetention: 0,
vmTimeout: 0,
}
@ -283,6 +314,77 @@ const writeStream = async (
}
}
const wrapTask = async <T>(opts: any, task: Promise<T>): Promise<T> => {
const { data, logger, message, parentId, result } = opts
const taskId = logger.notice(message, {
event: 'task.start',
parentId,
data,
})
return task.then(
value => {
logger.notice(message, {
event: 'task.end',
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
return task
},
result => {
logger.error(message, {
event: 'task.end',
result: serializeError(result),
status: 'failure',
taskId,
})
return task
}
)
}
const wrapTaskFn = <T>(
opts: any,
task: (...any) => Promise<T>
): ((taskId: string, ...any) => Promise<T>) =>
async function () {
const { data, logger, message, parentId, result } =
typeof opts === 'function' ? opts.apply(this, arguments) : opts
const taskId = logger.notice(message, {
event: 'task.start',
parentId,
data,
})
try {
const value = await task.apply(this, [taskId, ...arguments])
logger.notice(message, {
event: 'task.end',
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
return value
} catch (result) {
logger.error(message, {
event: 'task.end',
result: serializeError(result),
status: 'failure',
taskId,
})
throw result
}
}
// File structure on remotes:
//
// <remote>
@ -316,6 +418,7 @@ export default class BackupNg {
getXapi: (id: string) => Xapi,
getJob: ((id: string, 'backup') => Promise<BackupJob>) &
((id: string, 'call') => Promise<CallJob>),
getLogs: (namespace: string) => Promise<{ [id: string]: Object }>,
updateJob: (($Shape<BackupJob>, ?boolean) => Promise<BackupJob>) &
(($Shape<CallJob>, ?boolean) => Promise<CallJob>),
removeJob: (id: string) => Promise<void>,
@ -349,82 +452,59 @@ export default class BackupNg {
}
const jobId = job.id
const scheduleId = schedule.id
const status: Object = {
calls: {},
runJobId,
start: Date.now(),
timezone: schedule.timezone,
}
const { calls } = status
await asyncMap(vms, async vm => {
const { uuid } = vm
const method = 'backup-ng'
const params = {
id: uuid,
tag: job.name,
}
const name = vm.name_label
const runCallId = logger.notice(
const { name_label: name, uuid } = vm
const taskId: string = logger.notice(
`Starting backup of ${name}. (${jobId})`,
{
event: 'jobCall.start',
method,
params,
runJobId,
event: 'task.start',
parentId: runJobId,
data: {
type: 'VM',
id: uuid,
},
}
)
const call: Object = (calls[runCallId] = {
method,
params,
start: Date.now(),
})
const vmCancel = cancelToken.fork()
try {
// $FlowFixMe injected $defer param
let p = this._backupVm(vmCancel.token, uuid, job, schedule)
let p = this._backupVm(
vmCancel.token,
uuid,
job,
schedule,
logger,
taskId
)
const vmTimeout: number = getSetting(
job.settings,
'vmTimeout',
uuid,
scheduleId
scheduleId,
logger,
taskId
)
if (vmTimeout !== 0) {
p = pTimeout.call(p, vmTimeout)
}
const returnedValue = await p
logger.notice(
`Backuping ${name} (${runCallId}) is a success. (${jobId})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue,
}
)
call.returnedValue = returnedValue
call.end = Date.now()
await p
logger.notice(`Backuping ${name} is a success. (${jobId})`, {
event: 'task.end',
taskId,
status: 'success',
})
} catch (error) {
vmCancel.cancel()
logger.notice(
`Backuping ${name} (${runCallId}) has failed. (${jobId})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
error: Array.isArray(error)
? error.map(serializeError)
: serializeError(error),
}
)
call.error = error
call.end = Date.now()
logger.error(`Backuping ${name} has failed. (${jobId})`, {
event: 'task.end',
taskId,
status: 'failure',
result: Array.isArray(error)
? error.map(serializeError)
: serializeError(error),
})
}
})
status.end = Date.now()
return status
}
app.registerJobExecutor('backup', executor)
})
@ -618,8 +698,10 @@ export default class BackupNg {
$cancelToken: any,
vmUuid: string,
job: BackupJob,
schedule: Schedule
): Promise<BackupResult> {
schedule: Schedule,
logger: any,
taskId: string
): Promise<void> {
const app = this._app
const xapi = app.getXapi(vmUuid)
const vm: Vm = (xapi.getObject(vmUuid): any)
@ -660,10 +742,18 @@ export default class BackupNg {
await xapi._assertHealthyVdiChains(vm)
let snapshot: Vm = (await xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
let snapshot: Vm = (await wrapTask(
{
parentId: taskId,
logger,
message: 'snapshot',
result: _ => _.uuid,
},
xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
)
): any)
await xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
@ -686,12 +776,7 @@ export default class BackupNg {
snapshot = ((await xapi.barrier(snapshot.$ref): any): Vm)
if (exportRetention === 0) {
return {
mergeDuration: 0,
mergeSize: 0,
transferDuration: 0,
transferSize: 0,
}
return
}
const remotes = unboxIds(job.remotes)
@ -746,93 +831,123 @@ export default class BackupNg {
const jsonMetadata = JSON.stringify(metadata)
const errors = []
await waitAll(
[
...remotes.map(async remoteId => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
await writeStream(fork, handler, dataFilename)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const vm = await xapi.barrier(
await xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
)
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
...remotes.map(
wrapTaskFn(
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
async (taskId, remoteId) => {
const fork = forkExport()
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(
settings,
'deleteFirst',
remoteId
)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
writeStream(fork, handler, dataFilename)
)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
}
)
),
...srs.map(
wrapTaskFn(
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, srId) => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const vm = await xapi.barrier(
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
)
)
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}
)
),
],
error => {
console.warn(error)
errors.push(error)
}
noop // errors are handled in logs
)
if (errors.length !== 0) {
throw errors
}
return {
mergeDuration: 0,
mergeSize: 0,
transferDuration: Date.now() - now,
transferSize: xva.size,
}
} else if (job.mode === 'delta') {
if (snapshotRetention === 0) {
// only keep the snapshot in case of success
@ -904,128 +1019,164 @@ export default class BackupNg {
}
})()
const mergeStart = 0
const mergeEnd = 0
let transferStart = 0
let transferEnd = 0
const errors = []
await waitAll(
[
...remotes.map(async remoteId => {
const fork = forkExport()
...remotes.map(
wrapTaskFn(
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, remoteId) => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const deleteOldBackups = () =>
wrapTask(
{
logger,
message: 'merge',
parentId: taskId,
result: {
size: 0,
},
},
this._deleteDeltaVmBackups(handler, oldBackups)
)
const deleteFirst =
exportRetention > 1 &&
getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await this._deleteDeltaVmBackups(handler, oldBackups)
}
await asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
const isDelta = vdi.other_config['xo:base_delta'] !== undefined
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
const deleteFirst =
exportRetention > 1 &&
getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await deleteOldBackups()
}
await writeStream(fork.streams[`${id}.vhd`](), handler, path, {
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
})
$defer.onFailure.call(handler, 'unlink', path)
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
const isDelta =
vdi.other_config['xo:base_delta'] !== undefined
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
}
await writeStream(
fork.streams[`${id}.vhd`](),
handler,
path,
{
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
}
)
$defer.onFailure.call(handler, 'unlink', path)
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
}
})
)
)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await deleteOldBackups()
}
})
}
)
),
...srs.map(
wrapTaskFn(
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, srId) => {
const fork = forkExport()
await handler.outputFile(metadataFilename, jsonMetadata)
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
if (!deleteFirst) {
await this._deleteDeltaVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
const { vm } = await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
})
)
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
transferStart = Math.min(transferStart, Date.now())
const { vm } = await xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
})
transferEnd = Math.max(transferEnd, Date.now())
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
),
],
error => {
console.warn(error)
errors.push(error)
}
noop // errors are handled in logs
)
if (errors.length !== 0) {
throw errors
}
return {
mergeDuration: mergeEnd - mergeStart,
mergeSize: 0,
transferDuration: transferEnd - transferStart,
transferSize: 0,
}
} else {
throw new Error(`no exporter for backup mode ${job.mode}`)
}
@ -1137,4 +1288,54 @@ export default class BackupNg {
return backups.sort(compareTimestamp)
}
async getBackupNgLogs (runId?: string): Promise<ConsolidatedBackupNgLog> {
const rawLogs = await this._app.getLogs('jobs')
const logs: $Dict<ConsolidatedJob & ConsolidatedTask> = {}
forEach(rawLogs, (log, id) => {
const { data, time, message } = log
const { event } = data
delete data.event
switch (event) {
case 'job.start':
if (data.type === 'backup' && (runId === undefined || runId === id)) {
logs[id] = {
...data,
id,
start: time,
}
}
break
case 'job.end':
const job = logs[data.runJobId]
if (job !== undefined) {
job.end = time
job.duration = time - job.start
job.error = data.error
}
break
case 'task.start':
if (logs[data.parentId] !== undefined) {
logs[id] = {
...data,
start: time,
message,
}
}
break
case 'task.end':
const task = logs[data.taskId]
if (task !== undefined) {
task.status = data.status
task.taskId = data.taskId
task.result = data.result
task.end = time
task.duration = time - task.start
}
}
})
return groupBy(logs, log => log.parentId || 'roots')
}
}

View File

@ -209,18 +209,32 @@ export default class Jobs {
throw new Error(`job ${id} is already running`)
}
const executor = this._executors[job.type]
const { type } = job
const executor = this._executors[type]
if (executor === undefined) {
throw new Error(`cannot run job ${id}: no executor for type ${job.type}`)
throw new Error(`cannot run job ${id}: no executor for type ${type}`)
}
let data
if (type === 'backup') {
// $FlowFixMe only defined for BackupJob
const settings = job.settings['']
data = {
// $FlowFixMe only defined for BackupJob
mode: job.mode,
reportWhen: (settings && settings.reportWhen) || 'failure',
}
}
const logger = this._logger
const runJobId = logger.notice(`Starting execution of ${id}.`, {
data,
event: 'job.start',
userId: job.userId,
jobId: id,
// $FlowFixMe only defined for CallJob
key: job.key,
type,
})
runningJobs[id] = runJobId
@ -231,7 +245,7 @@ export default class Jobs {
session = app.createUserConnection()
session.set('user_id', job.userId)
const status = await executor({
await executor({
app,
cancelToken,
job,
@ -245,8 +259,7 @@ export default class Jobs {
runJobId,
})
session.close()
app.emit('job:terminated', status)
app.emit('job:terminated', runJobId, job, schedule)
} catch (error) {
logger.error(`The execution of ${id} has failed.`, {
event: 'job.end',

View File

@ -32,11 +32,11 @@ export default class Logs {
const onData =
keep !== 0
? () => {
if (--keep === 0) {
stream.on('data', deleteEntry)
stream.removeListener('data', onData)
if (--keep === 0) {
stream.on('data', deleteEntry)
stream.removeListener('data', onData)
}
}
}
: deleteEntry
stream.on('data', onData)
@ -51,4 +51,22 @@ export default class Logs {
.getStore('logs')
.then(store => new LevelDbLogger(store, namespace))
}
async getLogs (namespace) {
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
logger
.createReadStream()
.on('data', data => {
logs[data.key] = data.value
})
.on('end', () => {
resolve(logs)
})
.on('error', reject)
})
}
}

View File

@ -288,6 +288,23 @@ const messages = {
jobFinished: 'Finished',
jobInterrupted: 'Interrupted',
jobStarted: 'Started',
jobFailed: 'Failed',
jobSkipped: 'Skipped',
jobSuccess: 'Successful',
allTasks: 'All',
taskStart: 'Start',
taskEnd: 'End',
taskDuration: 'Duration',
taskSuccess: 'Successful',
taskFailed: 'Failed',
taskSkipped: 'Skipped',
taskStarted: 'Started',
taskInterrupted: 'Interrupted',
taskTransferredDataSize: 'Transfer size',
taskTransferredDataSpeed: 'Transfer speed',
taskMergedDataSize: 'Merge size',
taskMergedDataSpeed: 'Merge speed',
taskError: 'Error',
saveBackupJob: 'Save',
deleteBackupSchedule: 'Remove backup job',
deleteBackupScheduleQuestion:
@ -326,6 +343,11 @@ const messages = {
runBackupNgJobConfirm: 'Are you sure you want to run {name} ({id})?',
// ------ New backup -----
newBackupAdvancedSettings: 'Advanced settings',
reportWhenAlways: 'Always',
reportWhenFailure: 'Failure',
reportWhenNever: 'Never',
reportWhen: 'Report when',
newBackupSelection: 'Select your backup type:',
smartBackupModeSelection: 'Select backup mode:',
normalBackup: 'Normal backup',
@ -1633,6 +1655,7 @@ const messages = {
logParams: 'Params',
logMessage: 'Message',
logError: 'Error',
logTitle: 'Logs',
logDisplayDetails: 'Display details',
logTime: 'Date',
logNoStackTrace: 'No stack trace',

View File

@ -1705,6 +1705,10 @@ export const subscribeBackupNgJobs = createSubscription(() =>
_call('backupNg.getAllJobs')
)
export const subscribeBackupNgLogs = createSubscription(() =>
_call('backupNg.getAllLogs')
)
export const createBackupNgJob = props =>
_call('backupNg.createJob', props)::tap(subscribeBackupNgJobs.forceRefresh)

View File

@ -21,7 +21,7 @@ import {
subscribeSchedules,
} from 'xo'
import LogsTable from '../logs'
import LogsTable from '../logs/backup-ng-logs'
import Page from '../page'
import Edit from './edit'

View File

@ -3,6 +3,7 @@ import ActionButton from 'action-button'
import Icon from 'icon'
import React from 'react'
import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
import Select from 'form/select'
import Tooltip from 'tooltip'
import Upgrade from 'xoa-upgrade'
import { addSubscriptions, resolveId, resolveIds } from 'utils'
@ -12,9 +13,10 @@ import {
find,
findKey,
flatten,
keyBy,
get,
includes,
isEmpty,
keyBy,
map,
some,
} from 'lodash'
@ -89,6 +91,23 @@ const getNewSchedules = schedules => {
return newSchedules
}
const REPORT_WHEN_FILTER_OPTIONS = [
{
label: 'reportWhenAlways',
value: 'always',
},
{
label: 'reportWhenFailure',
value: 'failure',
},
{
label: 'reportWhenNever',
value: 'Never',
},
]
const getOptionRenderer = ({ label }) => <span>{_(label)}</span>
const getInitialState = () => ({
$pool: {},
backupMode: false,
@ -103,6 +122,7 @@ const getInitialState = () => ({
paramsUpdated: false,
powerState: 'All',
remotes: [],
reportWhen: 'failure',
schedules: [],
settings: {},
smartMode: false,
@ -136,6 +156,9 @@ export default [
schedules: getNewSchedules(state.newSchedules),
settings: {
...getNewSettings(state.newSchedules),
'': {
reportWhen: state.reportWhen,
},
},
remotes:
state.deltaMode || state.backupMode
@ -195,11 +218,16 @@ export default [
const oldSettings = props.job.settings
const settings = state.settings
if (!('' in oldSettings)) {
oldSettings[''] = {}
}
for (const id in oldSettings) {
const oldSetting = oldSettings[id]
const newSetting = settings[id]
if (!(id in settings)) {
if (id === '') {
oldSetting.reportWhen = state.reportWhen
} else if (!(id in settings)) {
delete oldSettings[id]
} else if (
oldSetting.snapshotRetention !== newSetting.snapshotRetention ||
@ -281,6 +309,9 @@ export default [
const remotes =
job.remotes !== undefined ? destructPattern(job.remotes) : []
const srs = job.srs !== undefined ? destructPattern(job.srs) : []
const globalSettings = job.settings['']
const settings = { ...job.settings }
delete settings['']
return {
...state,
@ -298,7 +329,8 @@ export default [
crMode: job.mode === 'delta' && !isEmpty(srs),
remotes,
srs,
settings: job.settings,
reportWhen: get(globalSettings, 'reportWhen') || 'failure',
settings,
schedules,
...destructVmsPattern(job.vms),
}
@ -455,6 +487,10 @@ export default [
return getInitialState()
},
setReportWhen: (_, { value }) => state => ({
...state,
reportWhen: value,
}),
},
computed: {
needUpdateParams: (state, { job, schedules }) =>
@ -698,6 +734,25 @@ export default [
</CardBlock>
</Card>
)}
<Card>
<CardHeader>{_('newBackupAdvancedSettings')}</CardHeader>
<CardBlock>
<FormGroup>
<label>
<strong>{_('reportWhen')}</strong>
</label>
<Select
labelKey='label'
onChange={effects.setReportWhen}
optionRenderer={getOptionRenderer}
options={REPORT_WHEN_FILTER_OPTIONS}
required
value={state.reportWhen}
valueKey='value'
/>
</FormGroup>
</CardBlock>
</Card>
</Col>
<Col mediumSize={6}>
<Schedules />

View File

@ -0,0 +1,199 @@
import _, { FormattedDuration } from 'intl'
import addSubscriptions from 'add-subscriptions'
import Icon from 'icon'
import NoObjects from 'no-objects'
import React from 'react'
import SortedTable from 'sorted-table'
import { alert } from 'modal'
import { Card, CardHeader, CardBlock } from 'card'
import { forEach, keyBy } from 'lodash'
import { FormattedDate } from 'react-intl'
import { get } from 'xo-defined'
import {
deleteJobsLogs,
subscribeBackupNgJobs,
subscribeBackupNgLogs,
} from 'xo'
import LogAlertBody from './log-alert-body'
import { isSkippedError, NO_VMS_MATCH_THIS_PATTERN } from './utils'
const STATUS_LABELS = {
failure: {
className: 'danger',
label: 'jobFailed',
},
skipped: {
className: 'info',
label: 'jobSkipped',
},
success: {
className: 'success',
label: 'jobSuccess',
},
started: {
className: 'warning',
label: 'jobStarted',
},
interrupted: {
className: 'danger',
label: 'jobInterrupted',
},
}
const LOG_COLUMNS = [
{
name: _('jobId'),
itemRenderer: log => log.jobId.slice(4, 8),
sortCriteria: log => log.jobId,
},
{
name: _('jobMode'),
itemRenderer: log => get(() => log.data.mode),
sortCriteria: log => get(() => log.data.mode),
},
{
name: _('jobName'),
itemRenderer: (log, { jobs }) => get(() => jobs[log.jobId].name),
sortCriteria: (log, { jobs }) => get(() => jobs[log.jobId].name),
},
{
name: _('jobStart'),
itemRenderer: log => (
<FormattedDate
value={new Date(log.start)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
),
sortCriteria: log => log.start,
sortOrder: 'desc',
},
{
default: true,
name: _('jobEnd'),
itemRenderer: log =>
log.end !== undefined && (
<FormattedDate
value={new Date(log.end)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
),
sortCriteria: log => log.end || log.start,
sortOrder: 'desc',
},
{
name: _('jobDuration'),
itemRenderer: log =>
log.duration !== undefined && (
<FormattedDuration duration={log.duration} />
),
sortCriteria: log => log.duration,
},
{
name: _('jobStatus'),
itemRenderer: log => {
const { className, label } = STATUS_LABELS[log.status]
return <span className={`tag tag-${className}`}>{_(label)}</span>
},
},
]
const showCalls = (log, { logs, jobs }) =>
alert(
_('jobModalTitle', { job: log.jobId.slice(4, 8) }),
<LogAlertBody log={log} job={get(() => jobs[log.jobId])} logs={logs} />
)
const LOG_INDIVIDUAL_ACTIONS = [
{
handler: showCalls,
icon: 'preview',
label: _('logDisplayDetails'),
},
]
const LOG_ACTIONS = [
{
handler: deleteJobsLogs,
icon: 'delete',
label: _('remove'),
},
]
const LOG_FILTERS = {
jobFailed: 'status: failure',
jobInterrupted: 'status: interrupted',
jobSkipped: 'status: skipped',
jobStarted: 'status: started',
jobSuccess: 'status: success',
}
const rowTransform = (log, { logs, jobs }) => {
let status
if (log.end !== undefined) {
if (log.error !== undefined) {
status =
log.error.message === NO_VMS_MATCH_THIS_PATTERN ? 'skipped' : 'failure'
} else {
let hasError = false
let hasTaskSkipped = false
forEach(logs[log.id], ({ status, result }) => {
if (status !== 'failure') {
return
}
if (result === undefined || !isSkippedError(result)) {
hasError = true
return false
}
hasTaskSkipped = true
})
status = hasError ? 'failure' : hasTaskSkipped ? 'skipped' : 'success'
}
} else {
status =
log.id === get(() => jobs[log.jobId].runId) ? 'started' : 'interrupted'
}
return {
...log,
status,
}
}
export default [
addSubscriptions({
logs: subscribeBackupNgLogs,
jobs: cb => subscribeBackupNgJobs(jobs => cb(keyBy(jobs, 'id'))),
}),
({ logs, jobs }) => (
<Card>
<CardHeader>
<Icon icon='log' /> {_('logTitle')}
</CardHeader>
<CardBlock>
<NoObjects
actions={LOG_ACTIONS}
collection={get(() => logs['roots'])}
columns={LOG_COLUMNS}
component={SortedTable}
data-jobs={jobs}
data-logs={logs}
emptyMessage={_('noLogs')}
filters={LOG_FILTERS}
individualActions={LOG_INDIVIDUAL_ACTIONS}
rowTransform={rowTransform}
/>
</CardBlock>
</Card>
),
].reduceRight((value, decorator) => decorator(value))

View File

@ -131,6 +131,7 @@ const PREDICATES = {
success: () => call => call.end !== undefined && call.error === undefined,
}
const NO_OBJECTS_MATCH_THIS_PATTERN = 'no objects match this pattern'
const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const NO_SUCH_OBJECT_ERROR = 'no such object'
const UNHEALTHY_VDI_CHAIN_LINK =
@ -173,7 +174,18 @@ class Log extends BaseComponent {
)
render () {
return (
const { error } = this.props.log
return error !== undefined ? (
<span
className={
error.message === NO_OBJECTS_MATCH_THIS_PATTERN
? 'text-info'
: 'text-danger'
}
>
<Icon icon='alarm' /> {error.message}
</span>
) : (
<div>
<Select
labelKey='label'
@ -442,6 +454,15 @@ export default [
entry.end = time
entry.duration = time - entry.start
entry.status = 'finished'
if (data.error !== undefined) {
entry.error = data.error
if (data.error.message === NO_OBJECTS_MATCH_THIS_PATTERN) {
entry.callSkipped = true
} else {
entry.hasErrors = true
}
}
} else if (data.event === 'jobCall.start') {
entry.calls[id] = {
callKey: id,

View File

@ -0,0 +1,348 @@
import _, { FormattedDuration } from 'intl'
import Copiable from 'copiable'
import Icon from 'icon'
import React from 'react'
import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
import Select from 'form/select'
import Tooltip from 'tooltip'
import { addSubscriptions, formatSize, formatSpeed } from 'utils'
import { createSelector } from 'selectors'
import { find, filter, isEmpty, get, keyBy, map, forEach } from 'lodash'
import { FormattedDate } from 'react-intl'
import { injectState, provideState } from '@julien-f/freactal'
import { subscribeRemotes } from 'xo'
import {
isSkippedError,
NO_VMS_MATCH_THIS_PATTERN,
UNHEALTHY_VDI_CHAIN_ERROR,
} from './utils'
const getTaskStatus = createSelector(
taskLog => taskLog,
isJobRunning => isJobRunning,
({ end, status, result }, isJobRunning) =>
end !== undefined
? status === 'success'
? 'success'
: result !== undefined && isSkippedError(result) ? 'skipped' : 'failure'
: isJobRunning ? 'started' : 'interrupted'
)
const getSubTaskStatus = createSelector(
taskLog => taskLog,
isJobRunning => isJobRunning,
({ end, status, result }, isJobRunning) =>
end !== undefined
? status === 'success' ? 'success' : 'failure'
: isJobRunning ? 'started' : 'interrupted'
)
const TASK_STATUS = {
failure: {
icon: 'halted',
label: 'taskFailed',
},
skipped: {
icon: 'skipped',
label: 'taskSkipped',
},
success: {
icon: 'running',
label: 'taskSuccess',
},
started: {
icon: 'busy',
label: 'taskStarted',
},
interrupted: {
icon: 'halted',
label: 'taskInterrupted',
},
}
const TaskStateInfos = ({ status }) => {
const { icon, label } = TASK_STATUS[status]
return (
<Tooltip content={_(label)}>
<Icon icon={icon} />
</Tooltip>
)
}
const VmTaskDataInfos = ({ logs, vmTaskId }) => {
let transferSize, transferDuration, mergeSize, mergeDuration
forEach(logs[vmTaskId], ({ taskId }) => {
if (transferSize !== undefined) {
return false
}
const transferTask = find(logs[taskId], { message: 'transfer' })
if (transferTask !== undefined) {
transferSize = transferTask.result.size
transferDuration = transferTask.end - transferTask.start
}
const mergeTask = find(logs[taskId], { message: 'merge' })
if (mergeTask !== undefined) {
mergeSize = mergeTask.result.size
mergeDuration = mergeTask.end - mergeTask.start
}
})
if (transferSize === undefined) {
return null
}
return (
<div>
{_.keyValue(_('taskTransferredDataSize'), formatSize(transferSize))}
<br />
{_.keyValue(
_('taskTransferredDataSpeed'),
formatSpeed(transferSize, transferDuration)
)}
{mergeSize !== undefined && (
<div>
{_.keyValue(_('taskMergedDataSize'), formatSize(mergeSize))}
<br />
{_.keyValue(
_('taskMergedDataSpeed'),
formatSpeed(mergeSize, mergeDuration)
)}
</div>
)}
</div>
)
}
const UNHEALTHY_VDI_CHAIN_LINK =
'https://xen-orchestra.com/docs/backup_troubleshooting.html#vdi-chain-protection'
const ALL_FILTER_OPTION = { label: 'allTasks', value: 'all' }
const FAILURE_FILTER_OPTION = { label: 'taskFailed', value: 'failure' }
const STARTED_FILTER_OPTION = { label: 'taskStarted', value: 'started' }
const TASK_FILTER_OPTIONS = [
ALL_FILTER_OPTION,
FAILURE_FILTER_OPTION,
STARTED_FILTER_OPTION,
{ label: 'taskInterrupted', value: 'interrupted' },
{ label: 'taskSkipped', value: 'skipped' },
{ label: 'taskSuccess', value: 'success' },
]
const getFilteredTaskLogs = (logs, isJobRunning, filterValue) =>
filterValue === 'all'
? logs
: filter(logs, log => getTaskStatus(log, isJobRunning) === filterValue)
const getInitialFilter = (job, logs, log) => {
const isEmptyFilter = filterValue =>
isEmpty(
getFilteredTaskLogs(
logs[log.id],
get(job, 'runId') === log.id,
filterValue
)
)
if (!isEmptyFilter('started')) {
return STARTED_FILTER_OPTION
}
if (!isEmptyFilter('failure')) {
return FAILURE_FILTER_OPTION
}
return ALL_FILTER_OPTION
}
export default [
addSubscriptions({
remotes: cb =>
subscribeRemotes(remotes => {
cb(keyBy(remotes, 'id'))
}),
}),
provideState({
initialState: ({ job, logs, log }) => ({
filter: getInitialFilter(job, logs, log),
}),
effects: {
setFilter: (_, filter) => state => ({
...state,
filter,
}),
},
computed: {
isJobRunning: (_, { job, log }) => get(job, 'runId') === log.id,
filteredTaskLogs: ({ filter: { value }, isJobRunning }, { log, logs }) =>
getFilteredTaskLogs(logs[log.id], isJobRunning, value),
optionRenderer: ({ isJobRunning }, { log, logs }) => ({
label,
value,
}) => (
<span>
{_(label)} ({
getFilteredTaskLogs(logs[log.id], isJobRunning, value).length
})
</span>
),
},
}),
injectState,
({ job, log, logs, remotes, state, effects }) =>
log.error !== undefined ? (
<span
className={
log.error.message === NO_VMS_MATCH_THIS_PATTERN
? 'text-info'
: 'text-danger'
}
>
<Copiable tagName='p' data={JSON.stringify(log.error, null, 2)}>
<Icon icon='alarm' /> {log.error.message}
</Copiable>
</span>
) : (
<div>
<Select
labelKey='label'
onChange={effects.setFilter}
optionRenderer={state.optionRenderer}
options={TASK_FILTER_OPTIONS}
required
value={state.filter}
valueKey='value'
/>
<br />
<ul className='list-group'>
{map(state.filteredTaskLogs, vmTaskLog => (
<li key={vmTaskLog.data.id} className='list-group-item'>
{renderXoItemFromId(vmTaskLog.data.id)} ({vmTaskLog.data.id.slice(
4,
8
)}){' '}
<TaskStateInfos
status={getTaskStatus(vmTaskLog, state.isJobRunning)}
/>
<ul>
{map(logs[vmTaskLog.taskId], subTaskLog => (
<li key={subTaskLog.taskId}>
{subTaskLog.message === 'snapshot' ? (
<span>
<Icon icon='task' /> {_('snapshotVmLabel')}
</span>
) : subTaskLog.data.type === 'remote' ? (
<span>
{get(remotes, subTaskLog.data.id) !== undefined
? renderXoItem({
type: 'remote',
value: remotes[subTaskLog.data.id],
})
: _('errorNoSuchItem')}{' '}
({subTaskLog.data.id.slice(4, 8)})
</span>
) : (
<span>
{renderXoItemFromId(subTaskLog.data.id)} ({subTaskLog.data.id.slice(
4,
8
)})
</span>
)}{' '}
<TaskStateInfos
status={getSubTaskStatus(subTaskLog, state.isJobRunning)}
/>
<br />
{subTaskLog.status === 'failure' && (
<Copiable
tagName='p'
data={JSON.stringify(subTaskLog.result, null, 2)}
>
{_.keyValue(
_('taskError'),
<span className={'text-danger'}>
{subTaskLog.result.message}
</span>
)}
</Copiable>
)}
</li>
))}
</ul>
{_.keyValue(
_('taskStart'),
<FormattedDate
value={new Date(vmTaskLog.start)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
)}
{vmTaskLog.end !== undefined && (
<div>
{_.keyValue(
_('taskEnd'),
<FormattedDate
value={new Date(vmTaskLog.end)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
)}
<br />
{_.keyValue(
_('taskDuration'),
<FormattedDuration duration={vmTaskLog.duration} />
)}
<br />
{vmTaskLog.status === 'failure' &&
vmTaskLog.result !== undefined ? (
vmTaskLog.result.message === UNHEALTHY_VDI_CHAIN_ERROR ? (
<Tooltip content={_('clickForMoreInformation')}>
<a
className='text-info'
href={UNHEALTHY_VDI_CHAIN_LINK}
rel='noopener noreferrer'
target='_blank'
>
<Icon icon='info' /> {_('unhealthyVdiChainError')}
</a>
</Tooltip>
) : (
<Copiable
tagName='p'
data={JSON.stringify(vmTaskLog.result, null, 2)}
>
{_.keyValue(
_('taskError'),
<span
className={
isSkippedError(vmTaskLog.result)
? 'text-info'
: 'text-danger'
}
>
{vmTaskLog.result.message}
</span>
)}
</Copiable>
)
) : (
<VmTaskDataInfos logs={logs} vmTaskId={vmTaskLog.taskId} />
)}
</div>
)}
</li>
))}
</ul>
</div>
),
].reduceRight((value, decorator) => decorator(value))

View File

@ -0,0 +1,7 @@
export const NO_VMS_MATCH_THIS_PATTERN = 'no VMs match this pattern'
export const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const NO_SUCH_OBJECT_ERROR = 'no such object'
export const isSkippedError = error =>
error.message === UNHEALTHY_VDI_CHAIN_ERROR ||
error.message === NO_SUCH_OBJECT_ERROR