feat(xo-server/getBackupNgLogs): implement debounce (#4509) (#4541)

Similar to #4509

Fixes xoa-support#1676

For now, the delay is set to 10s which is the duration used by xo-web's
subscription, which makes it enough to make it independent of the number of
clients.

In the future, this could be configurable, but we may simply do the
consolidation only once during the backup execution.
This commit is contained in:
Julien Fontanet
2019-09-23 16:20:17 +02:00
committed by GitHub
parent 925eca1463
commit a6d182e92d
2 changed files with 126 additions and 116 deletions

View File

@@ -8,6 +8,7 @@
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Settings/Logs] Differenciate XS/XCP-ng errors from XO errors [#4101](https://github.com/vatesfr/xen-orchestra/issues/4101) (PR [#4385](https://github.com/vatesfr/xen-orchestra/pull/4385))
- [Backups] Improve performance by caching logs consolidation (PR [#4541](https://github.com/vatesfr/xen-orchestra/pull/4541))
### Bug fixes

View File

@@ -1,6 +1,8 @@
import ms from 'ms'
import { forEach, isEmpty, iteratee, sortedIndexBy } from 'lodash'
import { debounceWithKey } from '../_pDebounceWithKey'
const isSkippedError = error =>
error.message === 'no disks found' ||
error.message === 'no VMs match this pattern' ||
@@ -64,131 +66,138 @@ const taskTimeComparator = ({ start: s1, end: e1 }, { start: s2, end: e2 }) => {
// tasks?: Task[],
// }
export default {
async getBackupNgLogs(runId?: string) {
const [jobLogs, restoreLogs, restoreMetadataLogs] = await Promise.all([
this.getLogs('jobs'),
this.getLogs('restore'),
this.getLogs('metadataRestore'),
])
getBackupNgLogs: debounceWithKey(
async function getBackupNgLogs(runId?: string) {
const [jobLogs, restoreLogs, restoreMetadataLogs] = await Promise.all([
this.getLogs('jobs'),
this.getLogs('restore'),
this.getLogs('metadataRestore'),
])
const { runningJobs, runningRestores, runningMetadataRestores } = this
const consolidated = {}
const started = {}
const { runningJobs, runningRestores, runningMetadataRestores } = this
const consolidated = {}
const started = {}
const handleLog = ({ data, time, message }, id) => {
const { event } = data
if (event === 'job.start') {
if (
(data.type === 'backup' || data.key === undefined) &&
(runId === undefined || runId === id)
) {
const { scheduleId, jobId } = data
consolidated[id] = started[id] = {
const handleLog = ({ data, time, message }, id) => {
const { event } = data
if (event === 'job.start') {
if (
(data.type === 'backup' || data.key === undefined) &&
(runId === undefined || runId === id)
) {
const { scheduleId, jobId } = data
consolidated[id] = started[id] = {
data: data.data,
id,
jobId,
jobName: data.jobName,
message: 'backup',
scheduleId,
start: time,
status: runningJobs[jobId] === id ? 'pending' : 'interrupted',
}
}
} else if (event === 'job.end') {
const { runJobId } = data
const log = started[runJobId]
if (log !== undefined) {
delete started[runJobId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
} else if (event === 'task.start') {
const task = {
data: data.data,
id,
jobId,
jobName: data.jobName,
message: 'backup',
scheduleId,
message,
start: time,
status: runningJobs[jobId] === id ? 'pending' : 'interrupted',
}
const { parentId } = data
let parent
if (parentId === undefined && (runId === undefined || runId === id)) {
// top level task
task.status =
(message === 'restore' && !runningRestores.has(id)) ||
(message === 'metadataRestore' &&
!runningMetadataRestores.has(id))
? 'interrupted'
: 'pending'
consolidated[id] = started[id] = task
} else if ((parent = started[parentId]) !== undefined) {
// sub-task for which the parent exists
task.status = parent.status
started[id] = task
;(parent.tasks || (parent.tasks = [])).push(task)
}
} else if (event === 'task.end') {
const { taskId } = data
const log = started[taskId]
if (log !== undefined) {
// TODO: merge/transfer work-around
delete started[taskId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.result), data.status),
log.tasks
)
}
} else if (event === 'task.warning') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.warnings || (parent.warnings = [])).push({
data: data.data,
message,
})
} else if (event === 'task.info') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.infos || (parent.infos = [])).push({
data: data.data,
message,
})
} else if (event === 'jobCall.start') {
const parent = started[data.runJobId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: {
type: 'VM',
id: data.params.id,
},
id,
start: time,
status: parent.status,
})
)
}
} else if (event === 'jobCall.end') {
const { runCallId } = data
const log = started[runCallId]
if (log !== undefined) {
delete started[runCallId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
}
} else if (event === 'job.end') {
const { runJobId } = data
const log = started[runJobId]
if (log !== undefined) {
delete started[runJobId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
} else if (event === 'task.start') {
const task = {
data: data.data,
id,
message,
start: time,
}
const { parentId } = data
let parent
if (parentId === undefined && (runId === undefined || runId === id)) {
// top level task
task.status =
(message === 'restore' && !runningRestores.has(id)) ||
(message === 'metadataRestore' && !runningMetadataRestores.has(id))
? 'interrupted'
: 'pending'
consolidated[id] = started[id] = task
} else if ((parent = started[parentId]) !== undefined) {
// sub-task for which the parent exists
task.status = parent.status
started[id] = task
;(parent.tasks || (parent.tasks = [])).push(task)
}
} else if (event === 'task.end') {
const { taskId } = data
const log = started[taskId]
if (log !== undefined) {
// TODO: merge/transfer work-around
delete started[taskId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.result), data.status),
log.tasks
)
}
} else if (event === 'task.warning') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.warnings || (parent.warnings = [])).push({
data: data.data,
message,
})
} else if (event === 'task.info') {
const parent = started[data.taskId]
parent !== undefined &&
(parent.infos || (parent.infos = [])).push({
data: data.data,
message,
})
} else if (event === 'jobCall.start') {
const parent = started[data.runJobId]
if (parent !== undefined) {
;(parent.tasks || (parent.tasks = [])).push(
(started[id] = {
data: {
type: 'VM',
id: data.params.id,
},
id,
start: time,
status: parent.status,
})
)
}
} else if (event === 'jobCall.end') {
const { runCallId } = data
const log = started[runCallId]
if (log !== undefined) {
delete started[runCallId]
log.end = time
log.status = computeStatusAndSortTasks(
getStatus((log.result = data.error)),
log.tasks
)
}
}
forEach(jobLogs, handleLog)
forEach(restoreLogs, handleLog)
forEach(restoreMetadataLogs, handleLog)
return runId === undefined ? consolidated : consolidated[runId]
},
10e3,
function keyFn(runId) {
return [this, runId]
}
forEach(jobLogs, handleLog)
forEach(restoreLogs, handleLog)
forEach(restoreMetadataLogs, handleLog)
return runId === undefined ? consolidated : consolidated[runId]
},
),
async getBackupNgLogsSorted({ after, before, filter, limit }) {
let logs = await this.getBackupNgLogs()