Compare commits

..

10 Commits

Author SHA1 Message Date
Julien Fontanet
766175b4a0 feat(xo-server): multi processes 2018-05-15 15:47:32 +02:00
Julien Fontanet
0e2270fb6e feat(xo-web): 5.19.2 2018-05-15 14:46:33 +02:00
Julien Fontanet
593493ec0c feat(xo-server): 5.19.4 2018-05-15 14:46:07 +02:00
Julien Fontanet
d92898a806 feat(xo-vmdk-to-vhd): 0.1.0 2018-05-15 14:45:19 +02:00
Julien Fontanet
7890e46551 feat(xo-server-backup-reports): 0.11.0 2018-05-15 14:42:32 +02:00
Julien Fontanet
ef942a6209 feat(Backup NG): implrtment logs and reports (#2869) 2018-05-15 14:40:11 +02:00
Nicolas Raynaud
fdde916388 feat(xo-web/vms-import): redirect to VM or home page (#2942)
If a single VM has been imported, redirect to its page.

If multiple VMs has been imported, redirect to the homepage with all other VMs filtered out.
2018-05-14 17:42:11 +02:00
Julien Fontanet
31314d201b fix(xo-server/backupNg/delta): await deletion/merge 2018-05-14 15:38:11 +02:00
Julien Fontanet
a29a949c51 fix(xo-server/backupNg/delta): deleteFirst iff retention > 1 2018-05-14 15:37:09 +02:00
Julien Fontanet
cc1ce8c5f8 chore: update yarn.lock 2018-05-14 13:53:03 +02:00
48 changed files with 2937 additions and 1048 deletions

View File

@@ -0,0 +1,3 @@
module.exports = require('../../@xen-orchestra/babel-config')(
require('./package.json')
)

View File

@@ -0,0 +1,24 @@
/benchmark/
/benchmarks/
*.bench.js
*.bench.js.map
/examples/
example.js
example.js.map
*.example.js
*.example.js.map
/fixture/
/fixtures/
*.fixture.js
*.fixture.js.map
*.fixtures.js
*.fixtures.js.map
/test/
/tests/
*.spec.js
*.spec.js.map
__snapshots__/

View File

@@ -0,0 +1,149 @@
# @xen-orchestra/log [![Build Status](https://travis-ci.org/vatesfr/xen-orchestra.png?branch=master)](https://travis-ci.org/vatesfr/xen-orchestra)
> ${pkg.description}
## Install
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/log):
```
> npm install --save @xen-orchestra/log
```
## Usage
Everywhere something should be logged:
```js
import { createLogger } from '@xen-orchestra/log'
const log = createLogger('xo-server-api')
log.warn('foo')
```
Then at application level you can choose how to handle these logs:
```js
import configure from '@xen-orchestra/log/configure'
import createConsoleTransport from '@xen-orchestra/log/transports/console'
import createEmailTransport from '@xen-orchestra/log/transports/email'
configure([
{
// if filter is a string, then it is pattern
// (https://github.com/visionmedia/debug#wildcards) which is
// matched against the namespace of the logs
filter: process.env.DEBUG,
transport: createConsoleTransport()
},
{
// only levels >= warn
level: 'warn',
transport: createEmaileTransport({
service: 'gmail',
auth: {
user: 'jane.smith@gmail.com',
pass: 'H&NbECcpXF|pyXe#%ZEb'
},
from: 'jane.smith@gmail.com',
to: [
'jane.smith@gmail.com',
'sam.doe@yahoo.com'
]
})
}
])
```
### Transports
#### Console
```js
import createConsoleTransport from '@xen-orchestra/log/transports/console'
configure(createConsoleTransport())
```
#### Email
Optional dependency:
```
> yarn add nodemailer pretty-format
```
Configuration:
```js
import createEmailTransport from '@xen-orchestra/log/transports/email'
configure(createEmailTransport({
service: 'gmail',
auth: {
user: 'jane.smith@gmail.com',
pass: 'H&NbECcpXF|pyXe#%ZEb'
},
from: 'jane.smith@gmail.com',
to: [
'jane.smith@gmail.com',
'sam.doe@yahoo.com'
]
}))
```
#### Syslog
Optional dependency:
```
> yarn add split-host syslog-client
```
Configuration:
```js
import createSyslogTransport from '@xen-orchestra/log/transports/syslog'
// By default, log to udp://localhost:514
configure(createSyslogTransport())
// But TCP, a different host, or a different port can be used
configure(createSyslogTransport('tcp://syslog.company.lan'))
```
## Development
```
# Install dependencies
> yarn
# Run the tests
> yarn test
# Continuously compile
> yarn dev
# Continuously run the tests
> yarn dev-test
# Build for production (automatically called by npm install)
> yarn build
```
## Contributions
Contributions are *very* welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xo-web/issues/)
you've encountered;
- fork and create a pull request.
## License
ISC © [Vates SAS](https://vates.fr)

View File

@@ -0,0 +1 @@
module.exports = require('./dist/configure')

View File

@@ -0,0 +1,52 @@
{
"private": true,
"name": "@xen-orchestra/log",
"version": "0.0.0",
"license": "ISC",
"description": "",
"keywords": [],
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/packages/@xen-orchestra/log",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Julien Fontanet",
"email": "julien.fontanet@vates.fr"
},
"preferGlobal": false,
"main": "dist/",
"bin": {},
"files": [
"dist/"
],
"browserslist": [
">2%"
],
"engines": {
"node": ">=4"
},
"dependencies": {
"@babel/polyfill": "7.0.0-beta.42",
"lodash": "^4.17.4",
"promise-toolbox": "^0.9.5"
},
"devDependencies": {
"@babel/cli": "7.0.0-beta.42",
"@babel/core": "7.0.0-beta.42",
"@babel/preset-env": "7.0.0-beta.42",
"@babel/preset-flow": "7.0.0-beta.42",
"babel-plugin-lodash": "^3.3.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.2"
},
"scripts": {
"build": "cross-env NODE_ENV=production babel --source-maps --out-dir=dist/ src/",
"clean": "rimraf dist/",
"dev": "cross-env NODE_ENV=development babel --watch --source-maps --out-dir=dist/ src/",
"prebuild": "yarn run clean",
"predev": "yarn run prebuild",
"prepublishOnly": "yarn run build"
}
}

View File

@@ -0,0 +1,105 @@
import createConsoleTransport from './transports/console'
import LEVELS, { resolve } from './levels'
import { compileGlobPattern } from './utils'
// ===================================================================
const createTransport = config => {
if (typeof config === 'function') {
return config
}
if (Array.isArray(config)) {
const transports = config.map(createTransport)
const { length } = transports
return function () {
for (let i = 0; i < length; ++i) {
transports[i].apply(this, arguments)
}
}
}
let { filter, transport } = config
const level = resolve(config.level)
if (filter !== undefined) {
if (typeof filter === 'string') {
const re = compileGlobPattern(filter)
filter = log => re.test(log.namespace)
}
const orig = transport
transport = function (log) {
if ((level !== undefined && log.level >= level) || filter(log)) {
return orig.apply(this, arguments)
}
}
} else if (level !== undefined) {
const orig = transport
transport = function (log) {
if (log.level >= level) {
return orig.apply(this, arguments)
}
}
}
return transport
}
let transport = createTransport({
// display warnings or above, and all that are enabled via DEBUG or
// NODE_DEBUG env
filter: process.env.DEBUG || process.env.NODE_DEBUG,
level: LEVELS.INFO,
transport: createConsoleTransport(),
})
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
global[symbol] = log => transport(log)
export const configure = config => {
transport = createTransport(config)
}
// -------------------------------------------------------------------
export const catchGlobalErrors = logger => {
// patch process
const onUncaughtException = error => {
logger.error('uncaught exception', { error })
}
const onUnhandledRejection = error => {
logger.warn('possibly unhandled rejection', { error })
}
const onWarning = error => {
logger.warn('Node warning', { error })
}
process.on('uncaughtException', onUncaughtException)
process.on('unhandledRejection', onUnhandledRejection)
process.on('warning', onWarning)
// patch EventEmitter
const EventEmitter = require('events')
const { prototype } = EventEmitter
const { emit } = prototype
function patchedEmit (event, error) {
event === 'error' && !this.listenerCount(event)
? logger.error('unhandled error event', { error })
: emit.apply(this, arguments)
}
prototype.emit = patchedEmit
return () => {
process.removeListener('uncaughtException', onUncaughtException)
process.removeListener('unhandledRejection', onUnhandledRejection)
process.removeListener('warning', onWarning)
if (prototype.emit === patchedEmit) {
prototype.emit = emit
}
}
}

View File

@@ -0,0 +1,65 @@
import createTransport from './transports/console'
import LEVELS from './levels'
const symbol =
typeof Symbol !== 'undefined'
? Symbol.for('@xen-orchestra/log')
: '@@@xen-orchestra/log'
if (!(symbol in global)) {
// the default behavior, without requiring `configure` is to avoid
// logging anything unless it's a real error
const transport = createTransport()
global[symbol] = log => log.level > LEVELS.WARN && transport(log)
}
// -------------------------------------------------------------------
function Log (data, level, namespace, message, time) {
this.data = data
this.level = level
this.namespace = namespace
this.message = message
this.time = time
}
function Logger (namespace) {
this._namespace = namespace
// bind all logging methods
for (const name in LEVELS) {
const lowerCase = name.toLowerCase()
this[lowerCase] = this[lowerCase].bind(this)
}
}
const { prototype } = Logger
for (const name in LEVELS) {
const level = LEVELS[name]
prototype[name.toLowerCase()] = function (message, data) {
global[symbol](new Log(data, level, this._namespace, message, new Date()))
}
}
prototype.wrap = function (message, fn) {
const logger = this
const warnAndRethrow = error => {
logger.warn(message, { error })
throw error
}
return function () {
try {
const result = fn.apply(this, arguments)
const then = result != null && result.then
return typeof then === 'function'
? then.call(result, warnAndRethrow)
: result
} catch (error) {
warnAndRethrow(error)
}
}
}
const createLogger = namespace => new Logger(namespace)
export { createLogger }

View File

@@ -0,0 +1,24 @@
const LEVELS = Object.create(null)
export { LEVELS as default }
// https://github.com/trentm/node-bunyan#levels
LEVELS.FATAL = 60 // service/app is going to down
LEVELS.ERROR = 50 // fatal for current action
LEVELS.WARN = 40 // something went wrong but it's not fatal
LEVELS.INFO = 30 // detail on unusual but normal operation
LEVELS.DEBUG = 20
export const NAMES = Object.create(null)
for (const name in LEVELS) {
NAMES[LEVELS[name]] = name
}
export const resolve = level => {
if (typeof level === 'string') {
level = LEVELS[level.toUpperCase()]
}
return level
}
Object.freeze(LEVELS)
Object.freeze(NAMES)

View File

@@ -0,0 +1,32 @@
/* eslint-env jest */
import { forEach, isInteger } from 'lodash'
import LEVELS, { NAMES, resolve } from './levels'
describe('LEVELS', () => {
it('maps level names to their integer values', () => {
forEach(LEVELS, (value, name) => {
expect(isInteger(value)).toBe(true)
})
})
})
describe('NAMES', () => {
it('maps level values to their names', () => {
forEach(LEVELS, (value, name) => {
expect(NAMES[value]).toBe(name)
})
})
})
describe('resolve()', () => {
it('returns level values either from values or names', () => {
forEach(LEVELS, value => {
expect(resolve(value)).toBe(value)
})
forEach(NAMES, (name, value) => {
expect(resolve(name)).toBe(+value)
})
})
})

View File

@@ -0,0 +1,20 @@
import LEVELS, { NAMES } from '../levels'
// Bind console methods (necessary for browsers)
const debugConsole = console.log.bind(console)
const infoConsole = console.info.bind(console)
const warnConsole = console.warn.bind(console)
const errorConsole = console.error.bind(console)
const { ERROR, INFO, WARN } = LEVELS
const consoleTransport = ({ data, level, namespace, message, time }) => {
const fn =
level < INFO
? debugConsole
: level < WARN ? infoConsole : level < ERROR ? warnConsole : errorConsole
fn('%s - %s - [%s] %s', time.toISOString(), namespace, NAMES[level], message)
data != null && fn(data)
}
export default () => consoleTransport

View File

@@ -0,0 +1,68 @@
import prettyFormat from 'pretty-format' // eslint-disable-line node/no-extraneous-import
import { createTransport } from 'nodemailer' // eslint-disable-line node/no-extraneous-import
import { fromCallback } from 'promise-toolbox'
import { evalTemplate, required } from '../utils'
import { NAMES } from '../levels'
export default ({
// transport options (https://nodemailer.com/smtp/)
auth,
authMethod,
host,
ignoreTLS,
port,
proxy,
requireTLS,
secure,
service,
tls,
// message options (https://nodemailer.com/message/)
bcc,
cc,
from = required('from'),
to = required('to'),
subject = '[{{level}} - {{namespace}}] {{time}} {{message}}',
}) => {
const transporter = createTransport(
{
auth,
authMethod,
host,
ignoreTLS,
port,
proxy,
requireTLS,
secure,
service,
tls,
disableFileAccess: true,
disableUrlAccess: true,
},
{
bcc,
cc,
from,
to,
}
)
return log =>
fromCallback(cb =>
transporter.sendMail(
{
subject: evalTemplate(
subject,
key =>
key === 'level'
? NAMES[log.level]
: key === 'time' ? log.time.toISOString() : log[key]
),
text: prettyFormat(log.data),
},
cb
)
)
}

View File

@@ -0,0 +1,42 @@
import splitHost from 'split-host' // eslint-disable-line node/no-extraneous-import node/no-missing-import
import { createClient, Facility, Severity, Transport } from 'syslog-client' // eslint-disable-line node/no-extraneous-import node/no-missing-import
import { fromCallback } from 'promise-toolbox'
import { startsWith } from 'lodash'
import LEVELS from '../levels'
// https://github.com/paulgrove/node-syslog-client#syslogseverity
const LEVEL_TO_SEVERITY = {
[LEVELS.FATAL]: Severity.Critical,
[LEVELS.ERROR]: Severity.Error,
[LEVELS.WARN]: Severity.Warning,
[LEVELS.INFO]: Severity.Informational,
[LEVELS.DEBUG]: Severity.Debug,
}
const facility = Facility.User
export default target => {
const opts = {}
if (target !== undefined) {
if (startsWith(target, 'tcp://')) {
target = target.slice(6)
opts.transport = Transport.Tcp
} else if (startsWith(target, 'udp://')) {
target = target.slice(6)
opts.transport = Transport.Ucp
}
;({ host: target, port: opts.port } = splitHost(target))
}
const client = createClient(target, opts)
return log =>
fromCallback(cb =>
client.log(log.message, {
facility,
severity: LEVEL_TO_SEVERITY[log.level],
})
)
}

View File

@@ -0,0 +1,62 @@
import escapeRegExp from 'lodash/escapeRegExp'
// ===================================================================
const TPL_RE = /\{\{(.+?)\}\}/g
export const evalTemplate = (tpl, data) => {
const getData =
typeof data === 'function' ? (_, key) => data(key) : (_, key) => data[key]
return tpl.replace(TPL_RE, getData)
}
// -------------------------------------------------------------------
const compileGlobPatternFragment = pattern =>
pattern
.split('*')
.map(escapeRegExp)
.join('.*')
export const compileGlobPattern = pattern => {
const no = []
const yes = []
pattern.split(/[\s,]+/).forEach(pattern => {
if (pattern[0] === '-') {
no.push(pattern.slice(1))
} else {
yes.push(pattern)
}
})
const raw = ['^']
if (no.length !== 0) {
raw.push('(?!', no.map(compileGlobPatternFragment).join('|'), ')')
}
if (yes.length !== 0) {
raw.push('(?:', yes.map(compileGlobPatternFragment).join('|'), ')')
} else {
raw.push('.*')
}
raw.push('$')
return new RegExp(raw.join(''))
}
// -------------------------------------------------------------------
export const required = name => {
throw new Error(`missing required arg ${name}`)
}
// -------------------------------------------------------------------
export const serializeError = error => ({
...error,
message: error.message,
name: error.name,
stack: error.stack,
})

View File

@@ -0,0 +1,13 @@
/* eslint-env jest */
import { compileGlobPattern } from './utils'
describe('compileGlobPattern()', () => {
it('works', () => {
const re = compileGlobPattern('foo, ba*, -bar')
expect(re.test('foo')).toBe(true)
expect(re.test('bar')).toBe(false)
expect(re.test('baz')).toBe(true)
expect(re.test('qux')).toBe(false)
})
})

View File

@@ -0,0 +1 @@
dist/transports

View File

@@ -1,4 +1,12 @@
declare module 'lodash' {
declare export function forEach<K, V>(
object: { [K]: V },
iteratee: (V, K) => void
): void
declare export function groupBy<K, V>(
object: { [K]: V },
iteratee: K | ((V, K) => string)
): { [string]: V[] }
declare export function invert<K, V>(object: { [K]: V }): { [V]: K }
declare export function isEmpty(mixed): boolean
declare export function keyBy<T>(array: T[], iteratee: string): boolean

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server-backup-reports",
"version": "0.10.0",
"version": "0.11.0",
"license": "AGPL-3.0",
"description": "Backup reports plugin for XO-Server",
"keywords": [
@@ -35,6 +35,7 @@
"node": ">=4"
},
"dependencies": {
"babel-runtime": "^6.26.0",
"human-format": "^0.10.0",
"lodash": "^4.13.1",
"moment-timezone": "^0.5.13"
@@ -42,6 +43,7 @@
"devDependencies": {
"babel-cli": "^6.24.1",
"babel-plugin-lodash": "^3.3.2",
"babel-plugin-transform-runtime": "^6.23.0",
"babel-preset-env": "^1.5.2",
"cross-env": "^5.1.3",
"rimraf": "^2.6.1"
@@ -56,7 +58,8 @@
},
"babel": {
"plugins": [
"lodash"
"lodash",
"transform-runtime"
],
"presets": [
[

View File

@@ -1,6 +1,6 @@
import humanFormat from 'human-format'
import moment from 'moment-timezone'
import { forEach, startCase } from 'lodash'
import { find, forEach, get, startCase } from 'lodash'
import pkg from '../package'
@@ -41,9 +41,9 @@ const DATE_FORMAT = 'dddd, MMMM Do YYYY, h:mm:ss a'
const createDateFormater = timezone =>
timezone !== undefined
? timestamp =>
moment(timestamp)
.tz(timezone)
.format(DATE_FORMAT)
moment(timestamp)
.tz(timezone)
.format(DATE_FORMAT)
: timestamp => moment(timestamp).format(DATE_FORMAT)
const formatDuration = milliseconds => moment.duration(milliseconds).humanize()
@@ -66,6 +66,7 @@ const logError = e => {
console.error('backup report error:', e)
}
const NO_VMS_MATCH_THIS_PATTERN = 'no VMs match this pattern'
const NO_SUCH_OBJECT_ERROR = 'no such object'
const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const UNHEALTHY_VDI_CHAIN_MESSAGE =
@@ -94,14 +95,351 @@ class BackupReportsXoPlugin {
this._xo.removeListener('job:terminated', this._report)
}
_wrapper (status) {
return new Promise(resolve => resolve(this._listener(status))).catch(
logError
)
_wrapper (status, job, schedule) {
return new Promise(resolve =>
resolve(
job.type === 'backup'
? this._backupNgListener(status, job, schedule)
: this._listener(status, job, schedule)
)
).catch(logError)
}
async _backupNgListener (runJobId, _, { timezone }) {
const xo = this._xo
const logs = await xo.getBackupNgLogs(runJobId)
const jobLog = logs['roots'][0]
const vmsTaskLog = logs[jobLog.id]
const { reportWhen, mode } = jobLog.data || {}
if (reportWhen === 'never') {
return
}
const formatDate = createDateFormater(timezone)
const jobName = (await xo.getJob(jobLog.jobId, 'backup')).name
if (jobLog.error !== undefined) {
const [globalStatus, icon] =
jobLog.error.message === NO_VMS_MATCH_THIS_PATTERN
? ['Skipped', ICON_SKIPPED]
: ['Failure', ICON_FAILURE]
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Error**: ${jobLog.error.message}`,
'---',
'',
`*${pkg.name} v${pkg.version}*`,
]
markdown = markdown.join('\n')
return this._sendReport({
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${icon}`,
markdown,
nagiosStatus: 2,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Backup report for ${jobName} - Error : ${
jobLog.error.message
}`,
})
}
const failedVmsText = []
const skippedVmsText = []
const successfulVmsText = []
const nagiosText = []
let globalMergeSize = 0
let globalTransferSize = 0
let nFailures = 0
let nSkipped = 0
for (const vmTaskLog of vmsTaskLog || []) {
const vmTaskStatus = vmTaskLog.status
if (vmTaskStatus === 'success' && reportWhen === 'failure') {
return
}
const vmId = vmTaskLog.data.id
let vm
try {
vm = xo.getObject(vmId)
} catch (e) {}
const text = [
`### ${vm !== undefined ? vm.name_label : 'VM not found'}`,
'',
`- **UUID**: ${vm !== undefined ? vm.uuid : vmId}`,
`- **Start time**: ${formatDate(vmTaskLog.start)}`,
`- **End time**: ${formatDate(vmTaskLog.end)}`,
`- **Duration**: ${formatDuration(vmTaskLog.duration)}`,
]
const failedSubTasks = []
const operationsText = []
const srsText = []
const remotesText = []
for (const subTaskLog of logs[vmTaskLog.taskId] || []) {
const { data, status, result, message } = subTaskLog
const icon =
subTaskLog.status === 'success' ? ICON_SUCCESS : ICON_FAILURE
const errorMessage = ` **Error**: ${get(result, 'message')}`
if (message === 'snapshot') {
operationsText.push(`- **Snapshot** ${icon}`)
if (status === 'failure') {
failedSubTasks.push('Snapshot')
operationsText.push('', errorMessage)
}
} else if (data.type === 'remote') {
const remoteId = data.id
const remote = await xo.getRemote(remoteId).catch(() => {})
remotesText.push(
`- **${
remote !== undefined ? remote.name : `Remote Not found`
}** (${remoteId}) ${icon}`
)
if (status === 'failure') {
failedSubTasks.push(remote !== undefined ? remote.name : remoteId)
remotesText.push('', errorMessage)
}
} else {
const srId = data.id
let sr
try {
sr = xo.getObject(srId)
} catch (e) {}
const [srName, srUuid] =
sr !== undefined ? [sr.name_label, sr.uuid] : [`SR Not found`, srId]
srsText.push(`- **${srName}** (${srUuid}) ${icon}`)
if (status === 'failure') {
failedSubTasks.push(sr !== undefined ? sr.name_label : srId)
srsText.push('', errorMessage)
}
}
}
if (operationsText.length !== 0) {
operationsText.unshift(`#### Operations`, '')
}
if (srsText.length !== 0) {
srsText.unshift(`#### SRs`, '')
}
if (remotesText.length !== 0) {
remotesText.unshift(`#### remotes`, '')
}
const subText = [...operationsText, '', ...srsText, '', ...remotesText]
const result = vmTaskLog.result
if (vmTaskStatus === 'failure' && result !== undefined) {
const { message } = result
if (isSkippedError(result)) {
++nSkipped
skippedVmsText.push(
...text,
`- **Reason**: ${
message === UNHEALTHY_VDI_CHAIN_ERROR
? UNHEALTHY_VDI_CHAIN_MESSAGE
: message
}`,
''
)
nagiosText.push(
`[(Skipped) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
} else {
++nFailures
failedVmsText.push(...text, `- **Error**: ${message}`, '')
nagiosText.push(
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
} : ${message} ]`
)
}
} else {
let transferSize, transferDuration, mergeSize, mergeDuration
forEach(logs[vmTaskLog.taskId], ({ taskId }) => {
if (transferSize !== undefined) {
return false
}
const transferTask = find(logs[taskId], { message: 'transfer' })
if (transferTask !== undefined) {
transferSize = transferTask.result.size
transferDuration = transferTask.end - transferTask.start
}
const mergeTask = find(logs[taskId], { message: 'merge' })
if (mergeTask !== undefined) {
mergeSize = mergeTask.result.size
mergeDuration = mergeTask.end - mergeTask.start
}
})
if (transferSize !== undefined) {
globalTransferSize += transferSize
text.push(
`- **Transfer size**: ${formatSize(transferSize)}`,
`- **Transfer speed**: ${formatSpeed(
transferSize,
transferDuration
)}`
)
}
if (mergeSize !== undefined) {
globalMergeSize += mergeSize
text.push(
`- **Merge size**: ${formatSize(mergeSize)}`,
`- **Merge speed**: ${formatSpeed(mergeSize, mergeDuration)}`
)
}
if (vmTaskStatus === 'failure') {
++nFailures
failedVmsText.push(...text, '', '', ...subText, '')
nagiosText.push(
`[(Failed) ${
vm !== undefined ? vm.name_label : 'undefined'
}: (failed)[${failedSubTasks.toString()}]]`
)
} else {
successfulVmsText.push(...text, '', '', ...subText, '')
}
}
}
const globalSuccess = nFailures === 0 && nSkipped === 0
if (reportWhen === 'failure' && globalSuccess) {
return
}
const nVms = vmsTaskLog.length
const nSuccesses = nVms - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
: nFailures !== 0 ? `Failure` : `Skipped`
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **mode**: ${mode}`,
`- **Start time**: ${formatDate(jobLog.start)}`,
`- **End time**: ${formatDate(jobLog.end)}`,
`- **Duration**: ${formatDuration(jobLog.duration)}`,
`- **Successes**: ${nSuccesses} / ${nVms}`,
]
if (globalTransferSize !== 0) {
markdown.push(`- **Transfer size**: ${formatSize(globalTransferSize)}`)
}
if (globalMergeSize !== 0) {
markdown.push(`- **Merge size**: ${formatSize(globalMergeSize)}`)
}
markdown.push('')
if (nFailures !== 0) {
markdown.push(
'---',
'',
`## ${nFailures} Failure${nFailures === 1 ? '' : 's'}`,
'',
...failedVmsText
)
}
if (nSkipped !== 0) {
markdown.push('---', '', `## ${nSkipped} Skipped`, '', ...skippedVmsText)
}
if (nSuccesses !== 0 && reportWhen !== 'failure') {
markdown.push(
'---',
'',
`## ${nSuccesses} Success${nSuccesses === 1 ? '' : 'es'}`,
'',
...successfulVmsText
)
}
markdown.push('---', '', `*${pkg.name} v${pkg.version}*`)
markdown = markdown.join('\n')
return this._sendReport({
markdown,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${jobName} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${jobName} - VMs : ${nagiosText.join(' ')}`,
})
}
_sendReport ({ markdown, subject, nagiosStatus, nagiosMarkdown }) {
const xo = this._xo
return Promise.all([
xo.sendEmail !== undefined &&
xo.sendEmail({
to: this._mailsReceivers,
subject,
markdown,
}),
xo.sendToXmppClient !== undefined &&
xo.sendToXmppClient({
to: this._xmppReceivers,
message: markdown,
}),
xo.sendSlackMessage !== undefined &&
xo.sendSlackMessage({
message: markdown,
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
nagiosStatus,
message: nagiosMarkdown,
}),
])
}
_listener (status) {
const { calls } = status
const { calls, timezone, error } = status
const formatDate = createDateFormater(timezone)
if (status.error !== undefined) {
const [globalStatus, icon] =
error.message === NO_VMS_MATCH_THIS_PATTERN
? ['Skipped', ICON_SKIPPED]
: ['Failure', ICON_FAILURE]
let markdown = [
`## Global status: ${globalStatus}`,
'',
`- **Start time**: ${formatDate(status.start)}`,
`- **End time**: ${formatDate(status.end)}`,
`- **Duration**: ${formatDuration(status.end - status.start)}`,
`- **Error**: ${error.message}`,
'---',
'',
`*${pkg.name} v${pkg.version}*`,
]
markdown = markdown.join('\n')
return this._sendReport({
subject: `[Xen Orchestra] ${globalStatus} ${icon}`,
markdown,
nagiosStatus: 2,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Error : ${
error.message
}`,
})
}
const callIds = Object.keys(calls)
const nCalls = callIds.length
@@ -139,8 +477,6 @@ class BackupReportsXoPlugin {
const skippedBackupsText = []
const successfulBackupText = []
const formatDate = createDateFormater(status.timezone)
forEach(calls, call => {
const { id = call.params.vm } = call.params
@@ -226,9 +562,8 @@ class BackupReportsXoPlugin {
return
}
const { end, start } = status
const { tag } = oneCall.params
const duration = end - start
const duration = status.end - status.start
const nSuccesses = nCalls - nFailures - nSkipped
const globalStatus = globalSuccess
? `Success`
@@ -238,8 +573,8 @@ class BackupReportsXoPlugin {
`## Global status: ${globalStatus}`,
'',
`- **Type**: ${formatMethod(method)}`,
`- **Start time**: ${formatDate(start)}`,
`- **End time**: ${formatDate(end)}`,
`- **Start time**: ${formatDate(status.start)}`,
`- **End time**: ${formatDate(status.end)}`,
`- **Duration**: ${formatDuration(duration)}`,
`- **Successes**: ${nSuccesses} / ${nCalls}`,
]
@@ -285,37 +620,20 @@ class BackupReportsXoPlugin {
markdown = markdown.join('\n')
const xo = this._xo
return Promise.all([
xo.sendEmail !== undefined &&
xo.sendEmail({
to: this._mailsReceivers,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${tag} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
markdown,
}),
xo.sendToXmppClient !== undefined &&
xo.sendToXmppClient({
to: this._xmppReceivers,
message: markdown,
}),
xo.sendSlackMessage !== undefined &&
xo.sendSlackMessage({
message: markdown,
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
status: globalSuccess ? 0 : 2,
message: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${tag}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
}),
])
return this._sendReport({
markdown,
subject: `[Xen Orchestra] ${globalStatus} Backup report for ${tag} ${
globalSuccess
? ICON_SUCCESS
: nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
nagiosStatus: globalSuccess ? 0 : 2,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${tag}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
})
}
}

View File

@@ -1,31 +0,0 @@
#!/usr/bin/env node
'use strict'
// ===================================================================
// Better stack traces if possible.
require('../better-stacks')
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
global.Promise = require('bluebird')
// Make unhandled rejected promises visible.
process.on('unhandledRejection', function (reason) {
console.warn('[Warn] Possibly unhandled rejection:', reason && reason.stack || reason)
})
;(function (EE) {
var proto = EE.prototype
var emit = proto.emit
proto.emit = function patchedError (event, error) {
if (event === 'error' && !this.listenerCount(event)) {
return console.warn('[Warn] Unhandled error event:', error && error.stack || error)
}
return emit.apply(this, arguments)
}
})(require('events').EventEmitter)
require('exec-promise')(require('../'))

View File

@@ -1,11 +0,0 @@
'use strict'
// ===================================================================
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// Import the real main module.
module.exports = require('./dist').default

View File

@@ -1,6 +1,6 @@
{
"name": "xo-server",
"version": "5.19.3",
"version": "5.19.4",
"license": "AGPL-3.0",
"description": "Server part of Xen-Orchestra",
"keywords": [
@@ -16,6 +16,9 @@
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"preferGlobal": true,
"bin": {
"xo-server": "dist/cli"
},
"files": [
"better-stacks.js",
"bin/",
@@ -119,7 +122,7 @@
"xo-collection": "^0.4.1",
"xo-common": "^0.1.1",
"xo-remote-parser": "^0.3",
"xo-vmdk-to-vhd": "0.0.12",
"xo-vmdk-to-vhd": "0.1.0",
"yazl": "^2.4.3"
},
"devDependencies": {

View File

@@ -134,6 +134,14 @@ runJob.params = {
// -----------------------------------------------------------------------------
export function getAllLogs () {
return this.getBackupNgLogs()
}
getAllLogs.permission = 'admin'
// -----------------------------------------------------------------------------
export function deleteVmBackup ({ id }) {
return this.deleteVmBackupNg(id)
}

View File

@@ -1,19 +1,5 @@
export async function get ({ namespace }) {
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
logger
.createReadStream()
.on('data', data => {
logs[data.key] = data.value
})
.on('end', () => {
resolve(logs)
})
.on('error', reject)
})
export function get ({ namespace }) {
return this.getLogs(namespace)
}
get.description = 'returns logs list for one namespace'

176
packages/xo-server/src/cli.js Executable file
View File

@@ -0,0 +1,176 @@
#!/usr/bin/env node
const APP_NAME = 'xo-server'
// Enable xo logs by default.
if (process.env.DEBUG === undefined) {
process.env.DEBUG = 'app-conf,xo:*,-xo:api'
}
// -------------------------------------------------------------------
require('@xen-orchestra/log/configure').configure([
{
filter: process.env.DEBUG,
level: 'warn',
transport: require('@xen-orchestra/log/transports/console').default(),
},
])
const { info, warn } = require('@xen-orchestra/log').createLogger('bootstrap')
process.on('unhandledRejection', reason => {
warn('possibly unhandled rejection', reason)
})
process.on('warning', warning => {
warn('Node warning', warning)
})
;(({ prototype }) => {
const { emit } = prototype
prototype.emit = function (event, error) {
event === 'error' && !this.listenerCount(event)
? warn('unhandled error event', error)
: emit.apply(this, arguments)
}
})(require('events').EventEmitter)
// Use Bluebird for all promises as it provides better performance and
// less memory usage.
const Bluebird = require('bluebird')
Bluebird.config({
longStackTraces: true,
warnings: true,
})
global.Promise = Bluebird
// -------------------------------------------------------------------
const main = async args => {
if (args.includes('--help') || args.includes('-h')) {
const { name, version } = require('../package.json')
return console.log(`Usage: ${name} [--safe-mode]
${name} v${version}`)
}
info('starting')
const config = await require('app-conf').load(APP_NAME, {
appDir: `${__dirname}/..`,
ignoreUnknownFormats: true,
})
// Print a message if deprecated entries are specified.
;['users', 'servers'].forEach(entry => {
if (entry in config) {
warn(`${entry} configuration is deprecated`)
}
})
const httpServer = require('stoppable')(new (require('http-server-plus'))())
const readFile = Bluebird.promisify(require('fs').readFile)
await Promise.all(
config.http.listen.map(
async ({
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}) => {
if (cert !== undefined && key !== undefined) {
;[opts.cert, opts.key] = await Promise.all([
readFile(cert),
readFile(key),
])
}
try {
const niceAddress = await httpServer.listen(opts)
info(`web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress !== undefined) {
warn(`web server could not listen on ${error.niceAddress}`)
const { code } = error
if (code === 'EACCES') {
warn(' access denied.')
warn(' ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' address already in use.')
}
} else {
warn('web server could not listen', error)
}
}
}
)
)
// Now the web server is listening, drop privileges.
try {
const { group, user } = config
if (group !== undefined) {
process.setgid(group)
info('group changed to', group)
}
if (user !== undefined) {
process.setuid(user)
info('user changed to', user)
}
} catch (error) {
warn('failed to change group/user', error)
}
const child = require('child_process').fork(require.resolve('./worker.js'))
child.send([''])
const App = require('./xo').default
const app = new App({
appName: APP_NAME,
config,
httpServer,
safeMode: require('lodash/includes')(args, '--safe-mode'),
})
// Register web server close on XO stop.
app.on('stop', () => Bluebird.fromCallback(cb => httpServer.stop(cb)))
await app.start()
// Trigger a clean job.
await app.clean()
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
require('lodash/forEach')(['SIGINT', 'SIGTERM'], signal => {
let alreadyCalled = false
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
info(`${signal} caught, closing…`)
app.stop()
})
})
await require('event-to-promise')(app, 'stopped')
}
main(process.argv.slice(2)).then(
() => info('bye :-)'),
error => {
if (typeof error === 'number') {
process.exitCode = error
} else {
warn('fatal error', error)
}
}
)

View File

@@ -0,0 +1,348 @@
const compilePug = require('pug').compile
const createProxyServer = require('http-proxy').createServer
const JsonRpcPeer = require('json-rpc-peer')
const LocalStrategy = require('passport-local').Strategy
const parseCookies = require('cookie').parse
const Passport = require('passport')
const serveStatic = require('serve-static')
const WebSocket = require('ws')
const { fromCallback } = require('promise-toolbox')
const { invalidCredentials } = require('xo-common/api-errors')
const { readFile } = require('fs')
const proxyConsole = require('../proxy-console')
const { debug, warn } = require('@xen-orchestra/log').createLogger('front')
function createExpressApp ({ http: config }, httpServer) {
const express = require('express')()
express.use(require('helmet')())
if (config.redirectToHttps) {
const https = config.listen.find(
_ =>
_.port !== undefined &&
(_.cert !== undefined || _.certificate !== undefined)
)
if (https === undefined) {
warn('could not setup HTTPs redirection: no HTTPs config found')
} else {
const { port } = https
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
Object.keys(config.mounts).forEach(url => {
const paths = config.mounts[url]
;(Array.isArray(paths) ? paths : [paths]).forEach(path => {
debug('Setting up %s → %s', url, path)
express.use(url, serveStatic(path))
})
})
return express
}
function setUpApi (config, httpServer, xo) {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => fromCallback(cb => webSocketServer.close(cb)))
const onConnection = (socket, upgradeReq) => {
const { remoteAddress } = upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
connection.once('close', () => {
socket.close()
})
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = jsonRpc.notify.bind(jsonRpc)
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
// Connect the WebSocket to the JSON-RPC server.
socket.on('message', message => {
jsonRpc.write(message)
})
const onSend = error => {
if (error) {
warn('WebSocket send:', error.stack)
}
}
jsonRpc.on('data', data => {
// The socket may have been closed during the API method
// execution.
if (socket.readyState === WebSocket.OPEN) {
socket.send(data, onSend)
}
})
}
httpServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, ws =>
onConnection(ws, req)
)
}
})
}
function setUpConsoleProxy (httpServer, xo) {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
httpServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
}
const [, id] = matches
try {
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [[id, 'operate']])) {
throw invalidCredentials()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
async function setUpPassport (express, xo) {
// necessary for connect-flash
express.use(require('cookie-parser')())
express.use(
require('express-session')({
resave: false,
saveUninitialized: false,
// TODO: should be in the config file.
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
})
)
// necessary for Passport to display error messages
express.use(require('connect-flash')())
// necessary for Passport to access the username and password from the sign
// in form
express.use(require('body-parser').urlencoded({ extended: false }))
express.use(Passport.initialize())
const strategies = { __proto__: null }
xo.registerPassportStrategy = strategy => {
Passport.use(strategy)
const { name } = strategy
if (name !== 'local') {
strategies[name] = strategy.label || name
}
}
// Registers the sign in form.
const signInPage = compilePug(
await fromCallback(cb => readFile(`${__dirname}/../signin.pug`, cb))
)
express.get('/signin', (req, res, next) => {
res.send(
signInPage({
error: req.flash('error')[0],
strategies,
})
)
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches !== null) {
return Passport.authenticate(matches[1], async (err, user, info) => {
if (err) {
return next(err)
}
if (!user) {
req.flash('error', info ? info.message : 'Invalid credentials')
return res.redirect('/signin')
}
// The cookie will be set in via the next request because some
// browsers do not save cookies on redirect.
req.flash(
'token',
(await xo.createAuthenticationToken({ userId: user.id })).id
)
// The session is only persistent for internal provider and if 'Remember me' box is checked
req.flash(
'session-is-persistent',
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
const token = req.flash('token')[0]
if (token) {
const isPersistent = req.flash('session-is-persistent')[0]
if (isPersistent) {
// Persistent cookie ? => 1 year
res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 })
} else {
// Non-persistent : external provider as Github, Twitter...
res.cookie('token', token)
}
next()
} else if (req.cookies.token) {
next()
} else if (
/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)
) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
// Install the local strategy.
xo.registerPassportStrategy(
new LocalStrategy(async (username, password, done) => {
try {
const user = await xo.authenticateUser({ username, password })
done(null, user)
} catch (error) {
done(null, false, { message: error.message })
}
})
)
}
function setUpProxies ({ http: { proxies } }, httpServer, express, xo) {
if (proxies === undefined) {
return
}
const proxy = createProxyServer({
ignorePath: true,
}).on('error', error => console.error(error))
const prefixes = Object.keys(proxies).sort((a, b) => a.length - b.length)
const n = prefixes.length
// HTTP request proxy.
express.use((req, res, next) => {
const { url } = req
for (let i = 0; i < n; ++i) {
const prefix = prefixes[i]
if (url.startsWith(prefix)) {
const target = proxies[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length),
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => fromCallback(cb => webSocketServer.close(cb)))
httpServer.on('upgrade', (req, socket, head) => {
const { url } = req
for (let i = 0; i < n; ++i) {
const prefix = prefixes[i]
if (url.startsWith(prefix)) {
const target = proxies[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length),
})
return
}
}
})
}
export default async function main ({ config, httpServer, safeMode }) {
const express = createExpressApp(config, httpServer)
setUpProxies(config, httpServer, express, xo)
setUpApi(config, httpServer, xo)
// must be set up before the API
setUpConsoleProxy(httpServer, xo)
await setUpPassport(express, xo)
// TODO: express.use(xo._handleHttpRequest.bind(xo))
}

View File

@@ -1,656 +0,0 @@
import appConf from 'app-conf'
import bind from 'lodash/bind'
import blocked from 'blocked'
import createExpress from 'express'
import createLogger from 'debug'
import has from 'lodash/has'
import helmet from 'helmet'
import includes from 'lodash/includes'
import proxyConsole from './proxy-console'
import serveStatic from 'serve-static'
import startsWith from 'lodash/startsWith'
import stoppable from 'stoppable'
import WebSocket from 'ws'
import { compile as compilePug } from 'pug'
import { createServer as createProxyServer } from 'http-proxy'
import { fromEvent } from 'promise-toolbox'
import { join as joinPath } from 'path'
import JsonRpcPeer from 'json-rpc-peer'
import { invalidCredentials } from 'xo-common/api-errors'
import { ensureDir, readdir, readFile } from 'fs-extra'
import WebServer from 'http-server-plus'
import Xo from './xo'
import {
forEach,
isArray,
isFunction,
mapToArray,
pFromCallback,
} from './utils'
import bodyParser from 'body-parser'
import connectFlash from 'connect-flash'
import cookieParser from 'cookie-parser'
import expressSession from 'express-session'
import passport from 'passport'
import { parse as parseCookies } from 'cookie'
import { Strategy as LocalStrategy } from 'passport-local'
// ===================================================================
const debug = createLogger('xo:main')
const warn = (...args) => {
console.warn('[Warn]', ...args)
}
// ===================================================================
const DEPRECATED_ENTRIES = ['users', 'servers']
async function loadConfiguration () {
const config = await appConf.load('xo-server', {
appDir: joinPath(__dirname, '..'),
ignoreUnknownFormats: true,
})
debug('Configuration loaded.')
// Print a message if deprecated entries are specified.
forEach(DEPRECATED_ENTRIES, entry => {
if (has(config, entry)) {
warn(`${entry} configuration is deprecated.`)
}
})
return config
}
// ===================================================================
function createExpressApp () {
const app = createExpress()
app.use(helmet())
// Registers the cookie-parser and express-session middlewares,
// necessary for connect-flash.
app.use(cookieParser())
app.use(
expressSession({
resave: false,
saveUninitialized: false,
// TODO: should be in the config file.
secret: 'CLWguhRZAZIXZcbrMzHCYmefxgweItKnS',
})
)
// Registers the connect-flash middleware, necessary for Passport to
// display error messages.
app.use(connectFlash())
// Registers the body-parser middleware, necessary for Passport to
// access the username and password from the sign in form.
app.use(bodyParser.urlencoded({ extended: false }))
// Registers Passport's middlewares.
app.use(passport.initialize())
return app
}
async function setUpPassport (express, xo) {
const strategies = { __proto__: null }
xo.registerPassportStrategy = strategy => {
passport.use(strategy)
const { name } = strategy
if (name !== 'local') {
strategies[name] = strategy.label || name
}
}
// Registers the sign in form.
const signInPage = compilePug(
await readFile(joinPath(__dirname, '..', 'signin.pug'))
)
express.get('/signin', (req, res, next) => {
res.send(
signInPage({
error: req.flash('error')[0],
strategies,
})
)
})
express.get('/signout', (req, res) => {
res.clearCookie('token')
res.redirect('/')
})
const SIGNIN_STRATEGY_RE = /^\/signin\/([^/]+)(\/callback)?(:?\?.*)?$/
express.use(async (req, res, next) => {
const { url } = req
const matches = url.match(SIGNIN_STRATEGY_RE)
if (matches) {
return passport.authenticate(matches[1], async (err, user, info) => {
if (err) {
return next(err)
}
if (!user) {
req.flash('error', info ? info.message : 'Invalid credentials')
return res.redirect('/signin')
}
// The cookie will be set in via the next request because some
// browsers do not save cookies on redirect.
req.flash(
'token',
(await xo.createAuthenticationToken({ userId: user.id })).id
)
// The session is only persistent for internal provider and if 'Remember me' box is checked
req.flash(
'session-is-persistent',
matches[1] === 'local' && req.body['remember-me'] === 'on'
)
res.redirect(req.flash('return-url')[0] || '/')
})(req, res, next)
}
const token = req.flash('token')[0]
if (token) {
const isPersistent = req.flash('session-is-persistent')[0]
if (isPersistent) {
// Persistent cookie ? => 1 year
res.cookie('token', token, { maxAge: 1000 * 60 * 60 * 24 * 365 })
} else {
// Non-persistent : external provider as Github, Twitter...
res.cookie('token', token)
}
next()
} else if (req.cookies.token) {
next()
} else if (
/favicon|fontawesome|images|styles|\.(?:css|jpg|png)$/.test(url)
) {
next()
} else {
req.flash('return-url', url)
return res.redirect('/signin')
}
})
// Install the local strategy.
xo.registerPassportStrategy(
new LocalStrategy(async (username, password, done) => {
try {
const user = await xo.authenticateUser({ username, password })
done(null, user)
} catch (error) {
done(null, false, { message: error.message })
}
})
)
}
// ===================================================================
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema,
configurationPresets,
testSchema,
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance = isFunction(factory)
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(
mapToArray(files, name => {
if (startsWith(name, PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
})
)
}
async function registerPlugins (xo) {
await Promise.all(
mapToArray(
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'],
xo::registerPluginsInPath
)
)
}
// ===================================================================
async function makeWebServerListen (
webServer,
{
certificate,
// The properties was called `certificate` before.
cert = certificate,
key,
...opts
}
) {
if (cert && key) {
;[opts.cert, opts.key] = await Promise.all([readFile(cert), readFile(key)])
}
try {
const niceAddress = await webServer.listen(opts)
debug(`Web server listening on ${niceAddress}`)
} catch (error) {
if (error.niceAddress) {
warn(`Web server could not listen on ${error.niceAddress}`)
const { code } = error
if (code === 'EACCES') {
warn(' Access denied.')
warn(' Ports < 1024 are often reserved to privileges users.')
} else if (code === 'EADDRINUSE') {
warn(' Address already in use.')
}
} else {
warn('Web server could not listen:', error.message)
}
}
}
async function createWebServer ({ listen, listenOptions }) {
const webServer = stoppable(new WebServer())
await Promise.all(
mapToArray(listen, opts =>
makeWebServerListen(webServer, { ...listenOptions, ...opts })
)
)
return webServer
}
// ===================================================================
const setUpProxies = (express, opts, xo) => {
if (!opts) {
return
}
const proxy = createProxyServer({
ignorePath: true,
}).on('error', error => console.error(error))
// TODO: sort proxies by descending prefix length.
// HTTP request proxy.
express.use((req, res, next) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.web(req, res, {
target: target + url.slice(prefix.length),
})
return
}
}
next()
})
// WebSocket proxy.
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
express.on('upgrade', (req, socket, head) => {
const { url } = req
for (const prefix in opts) {
if (startsWith(url, prefix)) {
const target = opts[prefix]
proxy.ws(req, socket, head, {
target: target + url.slice(prefix.length),
})
return
}
}
})
}
// ===================================================================
const setUpStaticFiles = (express, opts) => {
forEach(opts, (paths, url) => {
if (!isArray(paths)) {
paths = [paths]
}
forEach(paths, path => {
debug('Setting up %s → %s', url, path)
express.use(url, serveStatic(path))
})
})
}
// ===================================================================
const setUpApi = (webServer, xo, verboseLogsOnErrors) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
const onConnection = (socket, upgradeReq) => {
const { remoteAddress } = upgradeReq.socket
debug('+ WebSocket connection (%s)', remoteAddress)
// Create the abstract XO object for this connection.
const connection = xo.createUserConnection()
connection.once('close', () => {
socket.close()
})
// Create the JSON-RPC server for this connection.
const jsonRpc = new JsonRpcPeer(message => {
if (message.type === 'request') {
return xo.callApiMethod(connection, message.method, message.params)
}
})
connection.notify = bind(jsonRpc.notify, jsonRpc)
// Close the XO connection with this WebSocket.
socket.once('close', () => {
debug('- WebSocket connection (%s)', remoteAddress)
connection.close()
})
// Connect the WebSocket to the JSON-RPC server.
socket.on('message', message => {
jsonRpc.write(message)
})
const onSend = error => {
if (error) {
warn('WebSocket send:', error.stack)
}
}
jsonRpc.on('data', data => {
// The socket may have been closed during the API method
// execution.
if (socket.readyState === WebSocket.OPEN) {
socket.send(data, onSend)
}
})
}
webServer.on('upgrade', (req, socket, head) => {
if (req.url === '/api/') {
webSocketServer.handleUpgrade(req, socket, head, ws =>
onConnection(ws, req)
)
}
})
}
// ===================================================================
const CONSOLE_PROXY_PATH_RE = /^\/api\/consoles\/(.*)$/
const setUpConsoleProxy = (webServer, xo) => {
const webSocketServer = new WebSocket.Server({
noServer: true,
})
xo.on('stop', () => pFromCallback(cb => webSocketServer.close(cb)))
webServer.on('upgrade', async (req, socket, head) => {
const matches = CONSOLE_PROXY_PATH_RE.exec(req.url)
if (!matches) {
return
}
const [, id] = matches
try {
// TODO: factorize permissions checking in an Express middleware.
{
const { token } = parseCookies(req.headers.cookie)
const user = await xo.authenticateUser({ token })
if (!await xo.hasPermissions(user.id, [[id, 'operate']])) {
throw invalidCredentials()
}
const { remoteAddress } = socket
debug('+ Console proxy (%s - %s)', user.name, remoteAddress)
socket.on('close', () => {
debug('- Console proxy (%s - %s)', user.name, remoteAddress)
})
}
const xapi = xo.getXapi(id, ['VM', 'VM-controller'])
const vmConsole = xapi.getVmConsole(id)
// FIXME: lost connection due to VM restart is not detected.
webSocketServer.handleUpgrade(req, socket, head, connection => {
proxyConsole(connection, vmConsole, xapi.sessionId)
})
} catch (error) {
console.error((error && error.stack) || error)
}
})
}
// ===================================================================
const USAGE = (({ name, version }) => `Usage: ${name} [--safe-mode]
${name} v${version}`)(require('../package.json'))
// ===================================================================
export default async function main (args) {
if (includes(args, '--help') || includes(args, '-h')) {
return USAGE
}
{
const debug = createLogger('xo:perf')
blocked(
ms => {
debug('blocked for %sms', ms | 0)
},
{
threshold: 50,
}
)
}
const config = await loadConfiguration()
const webServer = await createWebServer(config.http)
// Now the web server is listening, drop privileges.
try {
const { user, group } = config
if (group) {
process.setgid(group)
debug('Group changed to', group)
}
if (user) {
process.setuid(user)
debug('User changed to', user)
}
} catch (error) {
warn('Failed to change user/group:', error)
}
// Creates main object.
const xo = new Xo(config)
// Register web server close on XO stop.
xo.on('stop', () => pFromCallback(cb => webServer.stop(cb)))
// Connects to all registered servers.
await xo.start()
// Trigger a clean job.
await xo.clean()
// Express is used to manage non WebSocket connections.
const express = createExpressApp()
if (config.http.redirectToHttps) {
let port
forEach(config.http.listen, listen => {
if (listen.port && (listen.cert || listen.certificate)) {
port = listen.port
return false
}
})
if (port === undefined) {
warn('Could not setup HTTPs redirection: no HTTPs port found')
} else {
express.use((req, res, next) => {
if (req.secure) {
return next()
}
res.redirect(`https://${req.hostname}:${port}${req.originalUrl}`)
})
}
}
// Must be set up before the API.
setUpConsoleProxy(webServer, xo)
// Must be set up before the API.
express.use(bind(xo._handleHttpRequest, xo))
// Everything above is not protected by the sign in, allowing xo-cli
// to work properly.
await setUpPassport(express, xo)
// Attaches express to the web server.
webServer.on('request', express)
webServer.on('upgrade', (req, socket, head) => {
express.emit('upgrade', req, socket, head)
})
// Must be set up before the static files.
setUpApi(webServer, xo, config.verboseApiLogsOnErrors)
setUpProxies(express, config.http.proxies, xo)
setUpStaticFiles(express, config.http.mounts)
if (!includes(args, '--safe-mode')) {
await registerPlugins(xo)
}
// Gracefully shutdown on signals.
//
// TODO: implements a timeout? (or maybe it is the services launcher
// responsibility?)
forEach(['SIGINT', 'SIGTERM'], signal => {
let alreadyCalled = false
process.on(signal, () => {
if (alreadyCalled) {
warn('forced exit')
process.exit(1)
}
alreadyCalled = true
debug('%s caught, closing…', signal)
xo.stop()
})
})
await fromEvent(xo, 'stopped')
debug('bye :-)')
}

View File

@@ -16,6 +16,11 @@ export default {
key: {
type: 'string',
},
type: {
default: 'call',
enum: ['backup', 'call'],
},
data: {},
},
required: ['event', 'userId', 'jobId', 'key'],
required: ['event', 'userId', 'jobId'],
}

View File

@@ -0,0 +1,18 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['task.end'],
},
taskId: {
type: 'string',
description: 'identifier of this task',
},
status: {
enum: ['canceled', 'failure', 'success'],
},
result: {},
},
required: ['event', 'taskId', 'status'],
}

View File

@@ -0,0 +1,15 @@
export default {
$schema: 'http://json-schema.org/draft-04/schema#',
type: 'object',
properties: {
event: {
enum: ['task.start'],
},
parentId: {
type: 'string',
description: 'identifier of the parent task or job',
},
data: {},
},
required: ['event'],
}

View File

@@ -0,0 +1,3 @@
process.on('message', ([action, ...args]) => {
console.log(action, args)
})

View File

@@ -0,0 +1,143 @@
import blocked from 'blocked'
import { createLogger } from '@xen-orchestra/log'
import { fromEvent } from 'promise-toolbox'
import { ensureDir, readdir } from 'fs-extra'
import Xo from './xo'
// ===================================================================
const { debug } = createLogger('xo:main')
// ===================================================================
async function registerPlugin (pluginPath, pluginName) {
const plugin = require(pluginPath)
const { description, version = 'unknown' } = (() => {
try {
return require(pluginPath + '/package.json')
} catch (_) {
return {}
}
})()
// Supports both “normal” CommonJS and Babel's ES2015 modules.
const {
default: factory = plugin,
configurationSchema,
configurationPresets,
testSchema,
} = plugin
// The default export can be either a factory or directly a plugin
// instance.
const instance =
typeof factory === 'function'
? factory({
xo: this,
getDataDir: () => {
const dir = `${this._config.datadir}/${pluginName}`
return ensureDir(dir).then(() => dir)
},
})
: factory
await this.registerPlugin(
pluginName,
instance,
configurationSchema,
configurationPresets,
description,
testSchema,
version
)
}
const debugPlugin = createLogger('xo:plugin')
function registerPluginWrapper (pluginPath, pluginName) {
debugPlugin('register %s', pluginName)
return registerPlugin.call(this, pluginPath, pluginName).then(
() => {
debugPlugin(`successfully register ${pluginName}`)
},
error => {
debugPlugin(`failed register ${pluginName}`)
debugPlugin(error)
}
)
}
const PLUGIN_PREFIX = 'xo-server-'
const PLUGIN_PREFIX_LENGTH = PLUGIN_PREFIX.length
async function registerPluginsInPath (path) {
const files = await readdir(path).catch(error => {
if (error.code === 'ENOENT') {
return []
}
throw error
})
await Promise.all(
files.map(name => {
if (name.startsWith(PLUGIN_PREFIX)) {
return registerPluginWrapper.call(
this,
`${path}/${name}`,
name.slice(PLUGIN_PREFIX_LENGTH)
)
}
})
)
}
async function registerPlugins (xo) {
await Promise.all(
[`${__dirname}/../node_modules/`, '/usr/local/lib/node_modules/'].map(
xo::registerPluginsInPath
)
)
}
// ===================================================================
async function main ({ config, safeMode }) {
{
const debug = createLogger('xo:perf')
blocked(ms => {
debug('blocked for %sms', ms | 0)
})
}
// Creates main object.
const xo = new Xo(config)
// Connects to all registered servers.
await xo.start()
// Trigger a clean job.
await xo.clean()
if (!safeMode) {
await registerPlugins(xo)
}
await new Promise(resolve => {
const onMessage = message => {
if (message[0] === 'STOP') {
process.removeListener('message', onMessage)
resolve()
}
}
process.on('message', onMessage)
})
await fromEvent(xo, 'stopped')
}
main().then(
() => process.send(['STOPPED']),
error => process.send(['STOPPED_WITH_ERROR', error])
)

View File

@@ -58,7 +58,7 @@ declare export class Xapi {
_updateObjectMapProperty(
object: XapiObject,
property: string,
entries: $Dict<string>
entries: $Dict<null | string>
): Promise<void>;
_setObjectProperties(
object: XapiObject,

View File

@@ -6,7 +6,15 @@ import defer from 'golike-defer'
import { type Pattern, createPredicate } from 'value-matcher'
import { type Readable, PassThrough } from 'stream'
import { basename, dirname } from 'path'
import { isEmpty, last, mapValues, noop, values } from 'lodash'
import {
forEach,
groupBy,
isEmpty,
last,
mapValues,
noop,
values,
} from 'lodash'
import { timeout as pTimeout } from 'promise-toolbox'
import Vhd, {
chainVhd,
@@ -33,10 +41,12 @@ import {
import { translateLegacyJob } from './migration'
type Mode = 'full' | 'delta'
type ReportWhen = 'always' | 'failure' | 'never'
type Settings = {|
deleteFirst?: boolean,
exportRetention?: number,
reportWhen?: ReportWhen,
snapshotRetention?: number,
vmTimeout?: number,
|}
@@ -56,13 +66,6 @@ export type BackupJob = {|
vms: Pattern,
|}
type BackupResult = {|
mergeDuration: number,
mergeSize: number,
transferDuration: number,
transferSize: number,
|}
type MetadataBase = {|
_filename?: string,
jobId: string,
@@ -87,6 +90,33 @@ type MetadataFull = {|
|}
type Metadata = MetadataDelta | MetadataFull
type ConsolidatedJob = {|
duration?: number,
end?: number,
error?: Object,
id: string,
jobId: string,
mode: Mode,
start: number,
type: 'backup' | 'call',
userId: string,
|}
type ConsolidatedTask = {|
data?: Object,
duration?: number,
end?: number,
parentId: string,
message: string,
result?: Object,
start: number,
status: 'canceled' | 'failure' | 'success',
taskId: string,
|}
type ConsolidatedBackupNgLog = {
roots: Array<ConsolidatedJob>,
[parentId: string]: Array<ConsolidatedTask>,
}
const compareSnapshotTime = (a: Vm, b: Vm): number =>
a.snapshot_time < b.snapshot_time ? -1 : 1
@@ -105,6 +135,7 @@ const getOldEntries = <T>(retention: number, entries?: T[]): T[] =>
const defaultSettings: Settings = {
deleteFirst: false,
exportRetention: 0,
reportWhen: 'failure',
snapshotRetention: 0,
vmTimeout: 0,
}
@@ -283,6 +314,77 @@ const writeStream = async (
}
}
const wrapTask = async <T>(opts: any, task: Promise<T>): Promise<T> => {
const { data, logger, message, parentId, result } = opts
const taskId = logger.notice(message, {
event: 'task.start',
parentId,
data,
})
return task.then(
value => {
logger.notice(message, {
event: 'task.end',
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
return task
},
result => {
logger.error(message, {
event: 'task.end',
result: serializeError(result),
status: 'failure',
taskId,
})
return task
}
)
}
const wrapTaskFn = <T>(
opts: any,
task: (...any) => Promise<T>
): ((taskId: string, ...any) => Promise<T>) =>
async function () {
const { data, logger, message, parentId, result } =
typeof opts === 'function' ? opts.apply(this, arguments) : opts
const taskId = logger.notice(message, {
event: 'task.start',
parentId,
data,
})
try {
const value = await task.apply(this, [taskId, ...arguments])
logger.notice(message, {
event: 'task.end',
result:
result === undefined
? value
: typeof result === 'function' ? result(value) : result,
status: 'success',
taskId,
})
return value
} catch (result) {
logger.error(message, {
event: 'task.end',
result: serializeError(result),
status: 'failure',
taskId,
})
throw result
}
}
// File structure on remotes:
//
// <remote>
@@ -316,6 +418,7 @@ export default class BackupNg {
getXapi: (id: string) => Xapi,
getJob: ((id: string, 'backup') => Promise<BackupJob>) &
((id: string, 'call') => Promise<CallJob>),
getLogs: (namespace: string) => Promise<{ [id: string]: Object }>,
updateJob: (($Shape<BackupJob>, ?boolean) => Promise<BackupJob>) &
(($Shape<CallJob>, ?boolean) => Promise<CallJob>),
removeJob: (id: string) => Promise<void>,
@@ -349,82 +452,59 @@ export default class BackupNg {
}
const jobId = job.id
const scheduleId = schedule.id
const status: Object = {
calls: {},
runJobId,
start: Date.now(),
timezone: schedule.timezone,
}
const { calls } = status
await asyncMap(vms, async vm => {
const { uuid } = vm
const method = 'backup-ng'
const params = {
id: uuid,
tag: job.name,
}
const name = vm.name_label
const runCallId = logger.notice(
const { name_label: name, uuid } = vm
const taskId: string = logger.notice(
`Starting backup of ${name}. (${jobId})`,
{
event: 'jobCall.start',
method,
params,
runJobId,
event: 'task.start',
parentId: runJobId,
data: {
type: 'VM',
id: uuid,
},
}
)
const call: Object = (calls[runCallId] = {
method,
params,
start: Date.now(),
})
const vmCancel = cancelToken.fork()
try {
// $FlowFixMe injected $defer param
let p = this._backupVm(vmCancel.token, uuid, job, schedule)
let p = this._backupVm(
vmCancel.token,
uuid,
job,
schedule,
logger,
taskId
)
const vmTimeout: number = getSetting(
job.settings,
'vmTimeout',
uuid,
scheduleId
scheduleId,
logger,
taskId
)
if (vmTimeout !== 0) {
p = pTimeout.call(p, vmTimeout)
}
const returnedValue = await p
logger.notice(
`Backuping ${name} (${runCallId}) is a success. (${jobId})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
returnedValue,
}
)
call.returnedValue = returnedValue
call.end = Date.now()
await p
logger.notice(`Backuping ${name} is a success. (${jobId})`, {
event: 'task.end',
taskId,
status: 'success',
})
} catch (error) {
vmCancel.cancel()
logger.notice(
`Backuping ${name} (${runCallId}) has failed. (${jobId})`,
{
event: 'jobCall.end',
runJobId,
runCallId,
error: Array.isArray(error)
? error.map(serializeError)
: serializeError(error),
}
)
call.error = error
call.end = Date.now()
logger.error(`Backuping ${name} has failed. (${jobId})`, {
event: 'task.end',
taskId,
status: 'failure',
result: Array.isArray(error)
? error.map(serializeError)
: serializeError(error),
})
}
})
status.end = Date.now()
return status
}
app.registerJobExecutor('backup', executor)
})
@@ -618,8 +698,10 @@ export default class BackupNg {
$cancelToken: any,
vmUuid: string,
job: BackupJob,
schedule: Schedule
): Promise<BackupResult> {
schedule: Schedule,
logger: any,
taskId: string
): Promise<void> {
const app = this._app
const xapi = app.getXapi(vmUuid)
const vm: Vm = (xapi.getObject(vmUuid): any)
@@ -660,10 +742,18 @@ export default class BackupNg {
await xapi._assertHealthyVdiChains(vm)
let snapshot: Vm = (await xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
let snapshot: Vm = (await wrapTask(
{
parentId: taskId,
logger,
message: 'snapshot',
result: _ => _.uuid,
},
xapi._snapshotVm(
$cancelToken,
vm,
`[XO Backup ${job.name}] ${vm.name_label}`
)
): any)
await xapi._updateObjectMapProperty(snapshot, 'other_config', {
'xo:backup:job': jobId,
@@ -686,12 +776,7 @@ export default class BackupNg {
snapshot = ((await xapi.barrier(snapshot.$ref): any): Vm)
if (exportRetention === 0) {
return {
mergeDuration: 0,
mergeSize: 0,
transferDuration: 0,
transferSize: 0,
}
return
}
const remotes = unboxIds(job.remotes)
@@ -746,93 +831,123 @@ export default class BackupNg {
const jsonMetadata = JSON.stringify(metadata)
const errors = []
await waitAll(
[
...remotes.map(async remoteId => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
await writeStream(fork, handler, dataFilename)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const vm = await xapi.barrier(
await xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
)
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
...remotes.map(
wrapTaskFn(
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
async (taskId, remoteId) => {
const fork = forkExport()
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataFull[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'full' && _.scheduleId === scheduleId
)
): any)
const deleteFirst = getSetting(
settings,
'deleteFirst',
remoteId
)
if (deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
writeStream(fork, handler, dataFilename)
)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await this._deleteFullVmBackups(handler, oldBackups)
}
}
)
),
...srs.map(
wrapTaskFn(
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, srId) => {
const fork = forkExport()
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const vm = await xapi.barrier(
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
xapi._importVm($cancelToken, fork, sr, vm =>
xapi._setObjectProperties(vm, {
nameLabel: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
})
)
)
)
await Promise.all([
xapi.addTag(vm.$ref, 'Disaster Recovery'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}
)
),
],
error => {
console.warn(error)
errors.push(error)
}
noop // errors are handled in logs
)
if (errors.length !== 0) {
throw errors
}
return {
mergeDuration: 0,
mergeSize: 0,
transferDuration: Date.now() - now,
transferSize: xva.size,
}
} else if (job.mode === 'delta') {
if (snapshotRetention === 0) {
// only keep the snapshot in case of success
@@ -904,126 +1019,164 @@ export default class BackupNg {
}
})()
const mergeStart = 0
const mergeEnd = 0
let transferStart = 0
let transferEnd = 0
const errors = []
await waitAll(
[
...remotes.map(async remoteId => {
const fork = forkExport()
...remotes.map(
wrapTaskFn(
id => ({
data: { id, type: 'remote' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, remoteId) => {
const fork = forkExport()
const handler = await app.getRemoteHandler(remoteId)
const handler = await app.getRemoteHandler(remoteId)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const oldBackups: MetadataDelta[] = (getOldEntries(
exportRetention,
await this._listVmBackups(
handler,
vm,
_ => _.mode === 'delta' && _.scheduleId === scheduleId
)
): any)
const deleteOldBackups = () =>
wrapTask(
{
logger,
message: 'merge',
parentId: taskId,
result: {
size: 0,
},
},
this._deleteDeltaVmBackups(handler, oldBackups)
)
const deleteFirst = getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
await asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
const isDelta = vdi.other_config['xo:base_delta'] !== undefined
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
const deleteFirst =
exportRetention > 1 &&
getSetting(settings, 'deleteFirst', remoteId)
if (deleteFirst) {
await deleteOldBackups()
}
await writeStream(fork.streams[`${id}.vhd`](), handler, path, {
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
})
$defer.onFailure.call(handler, 'unlink', path)
await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
asyncMap(
fork.vdis,
defer(async ($defer, vdi, id) => {
const path = `${vmDir}/${metadata.vhds[id]}`
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
const isDelta =
vdi.other_config['xo:base_delta'] !== undefined
let parentPath
if (isDelta) {
const vdiDir = dirname(path)
const parent = (await handler.list(vdiDir))
.filter(isVhd)
.sort()
.pop()
parentPath = `${vdiDir}/${parent}`
}
await writeStream(
fork.streams[`${id}.vhd`](),
handler,
path,
{
// no checksum for VHDs, because they will be invalidated by
// merges and chainings
checksum: false,
}
)
$defer.onFailure.call(handler, 'unlink', path)
if (isDelta) {
await chainVhd(handler, parentPath, handler, path)
}
})
)
)
await handler.outputFile(metadataFilename, jsonMetadata)
if (!deleteFirst) {
await deleteOldBackups()
}
})
}
)
),
...srs.map(
wrapTaskFn(
id => ({
data: { id, type: 'SR' },
logger,
message: 'export',
parentId: taskId,
}),
async (taskId, srId) => {
const fork = forkExport()
await handler.outputFile(metadataFilename, jsonMetadata)
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
if (!deleteFirst) {
this._deleteDeltaVmBackups(handler, oldBackups)
}
}),
...srs.map(async srId => {
const fork = forkExport()
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
)
const xapi = app.getXapi(srId)
const sr = xapi.getObject(srId)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
const oldVms = getOldEntries(
exportRetention,
listReplicatedVms(xapi, scheduleId, srId, vmUuid)
const { vm } = await wrapTask(
{
logger,
message: 'transfer',
parentId: taskId,
result: {
size: 0,
},
},
xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
})
)
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}
)
const deleteFirst = getSetting(settings, 'deleteFirst', srId)
if (deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
transferStart = Math.min(transferStart, Date.now())
const { vm } = await xapi.importDeltaVm(fork, {
disableStartAfterImport: false, // we'll take care of that
name_label: `${metadata.vm.name_label} (${safeDateFormat(
metadata.timestamp
)})`,
srId: sr.$id,
})
transferEnd = Math.max(transferEnd, Date.now())
await Promise.all([
xapi.addTag(vm.$ref, 'Continuous Replication'),
xapi._updateObjectMapProperty(vm, 'blocked_operations', {
start:
'Start operation for this vm is blocked, clone it if you want to use it.',
}),
xapi._updateObjectMapProperty(vm, 'other_config', {
'xo:backup:sr': srId,
}),
])
if (!deleteFirst) {
await this._deleteVms(xapi, oldVms)
}
}),
),
],
error => {
console.warn(error)
errors.push(error)
}
noop // errors are handled in logs
)
if (errors.length !== 0) {
throw errors
}
return {
mergeDuration: mergeEnd - mergeStart,
mergeSize: 0,
transferDuration: transferEnd - transferStart,
transferSize: 0,
}
} else {
throw new Error(`no exporter for backup mode ${job.mode}`)
}
@@ -1135,4 +1288,54 @@ export default class BackupNg {
return backups.sort(compareTimestamp)
}
async getBackupNgLogs (runId?: string): Promise<ConsolidatedBackupNgLog> {
const rawLogs = await this._app.getLogs('jobs')
const logs: $Dict<ConsolidatedJob & ConsolidatedTask> = {}
forEach(rawLogs, (log, id) => {
const { data, time, message } = log
const { event } = data
delete data.event
switch (event) {
case 'job.start':
if (data.type === 'backup' && (runId === undefined || runId === id)) {
logs[id] = {
...data,
id,
start: time,
}
}
break
case 'job.end':
const job = logs[data.runJobId]
if (job !== undefined) {
job.end = time
job.duration = time - job.start
job.error = data.error
}
break
case 'task.start':
if (logs[data.parentId] !== undefined) {
logs[id] = {
...data,
start: time,
message,
}
}
break
case 'task.end':
const task = logs[data.taskId]
if (task !== undefined) {
task.status = data.status
task.taskId = data.taskId
task.result = data.result
task.end = time
task.duration = time - task.start
}
}
})
return groupBy(logs, log => log.parentId || 'roots')
}
}

View File

@@ -209,18 +209,32 @@ export default class Jobs {
throw new Error(`job ${id} is already running`)
}
const executor = this._executors[job.type]
const { type } = job
const executor = this._executors[type]
if (executor === undefined) {
throw new Error(`cannot run job ${id}: no executor for type ${job.type}`)
throw new Error(`cannot run job ${id}: no executor for type ${type}`)
}
let data
if (type === 'backup') {
// $FlowFixMe only defined for BackupJob
const settings = job.settings['']
data = {
// $FlowFixMe only defined for BackupJob
mode: job.mode,
reportWhen: (settings && settings.reportWhen) || 'failure',
}
}
const logger = this._logger
const runJobId = logger.notice(`Starting execution of ${id}.`, {
data,
event: 'job.start',
userId: job.userId,
jobId: id,
// $FlowFixMe only defined for CallJob
key: job.key,
type,
})
runningJobs[id] = runJobId
@@ -231,7 +245,7 @@ export default class Jobs {
session = app.createUserConnection()
session.set('user_id', job.userId)
const status = await executor({
await executor({
app,
cancelToken,
job,
@@ -245,8 +259,7 @@ export default class Jobs {
runJobId,
})
session.close()
app.emit('job:terminated', status)
app.emit('job:terminated', runJobId, job, schedule)
} catch (error) {
logger.error(`The execution of ${id} has failed.`, {
event: 'job.end',

View File

@@ -32,11 +32,11 @@ export default class Logs {
const onData =
keep !== 0
? () => {
if (--keep === 0) {
stream.on('data', deleteEntry)
stream.removeListener('data', onData)
if (--keep === 0) {
stream.on('data', deleteEntry)
stream.removeListener('data', onData)
}
}
}
: deleteEntry
stream.on('data', onData)
@@ -51,4 +51,22 @@ export default class Logs {
.getStore('logs')
.then(store => new LevelDbLogger(store, namespace))
}
async getLogs (namespace) {
const logger = await this.getLogger(namespace)
return new Promise((resolve, reject) => {
const logs = {}
logger
.createReadStream()
.on('data', data => {
logs[data.key] = data.value
})
.on('end', () => {
resolve(logs)
})
.on('error', reject)
})
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "xo-vmdk-to-vhd",
"version": "0.0.12",
"version": "0.1.0",
"license": "AGPL-3.0",
"description": "JS lib streaming a vmdk file to a vhd",
"keywords": [

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "xo-web",
"version": "5.19.1",
"version": "5.19.2",
"license": "AGPL-3.0",
"description": "Web interface client for Xen-Orchestra",
"keywords": [
@@ -33,7 +33,7 @@
"@julien-f/freactal": "0.1.0",
"@nraynaud/novnc": "0.6.1",
"@xen-orchestra/cron": "^1.0.3",
"xo-vmdk-to-vhd": "0.0.12",
"xo-vmdk-to-vhd": "0.1.0",
"ansi_up": "^3.0.0",
"asap": "^2.0.6",
"babel-core": "^6.26.0",

View File

@@ -288,6 +288,23 @@ const messages = {
jobFinished: 'Finished',
jobInterrupted: 'Interrupted',
jobStarted: 'Started',
jobFailed: 'Failed',
jobSkipped: 'Skipped',
jobSuccess: 'Successful',
allTasks: 'All',
taskStart: 'Start',
taskEnd: 'End',
taskDuration: 'Duration',
taskSuccess: 'Successful',
taskFailed: 'Failed',
taskSkipped: 'Skipped',
taskStarted: 'Started',
taskInterrupted: 'Interrupted',
taskTransferredDataSize: 'Transfer size',
taskTransferredDataSpeed: 'Transfer speed',
taskMergedDataSize: 'Merge size',
taskMergedDataSpeed: 'Merge speed',
taskError: 'Error',
saveBackupJob: 'Save',
deleteBackupSchedule: 'Remove backup job',
deleteBackupScheduleQuestion:
@@ -326,6 +343,11 @@ const messages = {
runBackupNgJobConfirm: 'Are you sure you want to run {name} ({id})?',
// ------ New backup -----
newBackupAdvancedSettings: 'Advanced settings',
reportWhenAlways: 'Always',
reportWhenFailure: 'Failure',
reportWhenNever: 'Never',
reportWhen: 'Report when',
newBackupSelection: 'Select your backup type:',
smartBackupModeSelection: 'Select backup mode:',
normalBackup: 'Normal backup',
@@ -1633,6 +1655,7 @@ const messages = {
logParams: 'Params',
logMessage: 'Message',
logError: 'Error',
logTitle: 'Logs',
logDisplayDetails: 'Display details',
logTime: 'Date',
logNoStackTrace: 'No stack trace',

View File

@@ -1204,6 +1204,7 @@ export const importVm = (file, type = 'xva', data = undefined, sr) => {
throw res.status
}
success(_('vmImportSuccess'), name)
return res.json().then(body => body.result)
})
.catch(() => {
error(_('vmImportFailed'), name)
@@ -1704,6 +1705,10 @@ export const subscribeBackupNgJobs = createSubscription(() =>
_call('backupNg.getAllJobs')
)
export const subscribeBackupNgLogs = createSubscription(() =>
_call('backupNg.getAllLogs')
)
export const createBackupNgJob = props =>
_call('backupNg.createJob', props)::tap(subscribeBackupNgJobs.forceRefresh)

View File

@@ -21,7 +21,7 @@ import {
subscribeSchedules,
} from 'xo'
import LogsTable from '../logs'
import LogsTable from '../logs/backup-ng-logs'
import Page from '../page'
import Edit from './edit'

View File

@@ -3,6 +3,7 @@ import ActionButton from 'action-button'
import Icon from 'icon'
import React from 'react'
import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
import Select from 'form/select'
import Tooltip from 'tooltip'
import Upgrade from 'xoa-upgrade'
import { addSubscriptions, resolveId, resolveIds } from 'utils'
@@ -12,9 +13,10 @@ import {
find,
findKey,
flatten,
keyBy,
get,
includes,
isEmpty,
keyBy,
map,
some,
} from 'lodash'
@@ -89,6 +91,23 @@ const getNewSchedules = schedules => {
return newSchedules
}
const REPORT_WHEN_FILTER_OPTIONS = [
{
label: 'reportWhenAlways',
value: 'always',
},
{
label: 'reportWhenFailure',
value: 'failure',
},
{
label: 'reportWhenNever',
value: 'Never',
},
]
const getOptionRenderer = ({ label }) => <span>{_(label)}</span>
const getInitialState = () => ({
$pool: {},
backupMode: false,
@@ -103,6 +122,7 @@ const getInitialState = () => ({
paramsUpdated: false,
powerState: 'All',
remotes: [],
reportWhen: 'failure',
schedules: [],
settings: {},
smartMode: false,
@@ -136,6 +156,9 @@ export default [
schedules: getNewSchedules(state.newSchedules),
settings: {
...getNewSettings(state.newSchedules),
'': {
reportWhen: state.reportWhen,
},
},
remotes:
state.deltaMode || state.backupMode
@@ -195,11 +218,16 @@ export default [
const oldSettings = props.job.settings
const settings = state.settings
if (!('' in oldSettings)) {
oldSettings[''] = {}
}
for (const id in oldSettings) {
const oldSetting = oldSettings[id]
const newSetting = settings[id]
if (!(id in settings)) {
if (id === '') {
oldSetting.reportWhen = state.reportWhen
} else if (!(id in settings)) {
delete oldSettings[id]
} else if (
oldSetting.snapshotRetention !== newSetting.snapshotRetention ||
@@ -281,6 +309,9 @@ export default [
const remotes =
job.remotes !== undefined ? destructPattern(job.remotes) : []
const srs = job.srs !== undefined ? destructPattern(job.srs) : []
const globalSettings = job.settings['']
const settings = { ...job.settings }
delete settings['']
return {
...state,
@@ -298,7 +329,8 @@ export default [
crMode: job.mode === 'delta' && !isEmpty(srs),
remotes,
srs,
settings: job.settings,
reportWhen: get(globalSettings, 'reportWhen') || 'failure',
settings,
schedules,
...destructVmsPattern(job.vms),
}
@@ -455,6 +487,10 @@ export default [
return getInitialState()
},
setReportWhen: (_, { value }) => state => ({
...state,
reportWhen: value,
}),
},
computed: {
needUpdateParams: (state, { job, schedules }) =>
@@ -698,6 +734,25 @@ export default [
</CardBlock>
</Card>
)}
<Card>
<CardHeader>{_('newBackupAdvancedSettings')}</CardHeader>
<CardBlock>
<FormGroup>
<label>
<strong>{_('reportWhen')}</strong>
</label>
<Select
labelKey='label'
onChange={effects.setReportWhen}
optionRenderer={getOptionRenderer}
options={REPORT_WHEN_FILTER_OPTIONS}
required
value={state.reportWhen}
valueKey='value'
/>
</FormGroup>
</CardBlock>
</Card>
</Col>
<Col mediumSize={6}>
<Schedules />

View File

@@ -0,0 +1,199 @@
import _, { FormattedDuration } from 'intl'
import addSubscriptions from 'add-subscriptions'
import Icon from 'icon'
import NoObjects from 'no-objects'
import React from 'react'
import SortedTable from 'sorted-table'
import { alert } from 'modal'
import { Card, CardHeader, CardBlock } from 'card'
import { forEach, keyBy } from 'lodash'
import { FormattedDate } from 'react-intl'
import { get } from 'xo-defined'
import {
deleteJobsLogs,
subscribeBackupNgJobs,
subscribeBackupNgLogs,
} from 'xo'
import LogAlertBody from './log-alert-body'
import { isSkippedError, NO_VMS_MATCH_THIS_PATTERN } from './utils'
const STATUS_LABELS = {
failure: {
className: 'danger',
label: 'jobFailed',
},
skipped: {
className: 'info',
label: 'jobSkipped',
},
success: {
className: 'success',
label: 'jobSuccess',
},
started: {
className: 'warning',
label: 'jobStarted',
},
interrupted: {
className: 'danger',
label: 'jobInterrupted',
},
}
const LOG_COLUMNS = [
{
name: _('jobId'),
itemRenderer: log => log.jobId.slice(4, 8),
sortCriteria: log => log.jobId,
},
{
name: _('jobMode'),
itemRenderer: log => get(() => log.data.mode),
sortCriteria: log => get(() => log.data.mode),
},
{
name: _('jobName'),
itemRenderer: (log, { jobs }) => get(() => jobs[log.jobId].name),
sortCriteria: (log, { jobs }) => get(() => jobs[log.jobId].name),
},
{
name: _('jobStart'),
itemRenderer: log => (
<FormattedDate
value={new Date(log.start)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
),
sortCriteria: log => log.start,
sortOrder: 'desc',
},
{
default: true,
name: _('jobEnd'),
itemRenderer: log =>
log.end !== undefined && (
<FormattedDate
value={new Date(log.end)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
),
sortCriteria: log => log.end || log.start,
sortOrder: 'desc',
},
{
name: _('jobDuration'),
itemRenderer: log =>
log.duration !== undefined && (
<FormattedDuration duration={log.duration} />
),
sortCriteria: log => log.duration,
},
{
name: _('jobStatus'),
itemRenderer: log => {
const { className, label } = STATUS_LABELS[log.status]
return <span className={`tag tag-${className}`}>{_(label)}</span>
},
},
]
const showCalls = (log, { logs, jobs }) =>
alert(
_('jobModalTitle', { job: log.jobId.slice(4, 8) }),
<LogAlertBody log={log} job={get(() => jobs[log.jobId])} logs={logs} />
)
const LOG_INDIVIDUAL_ACTIONS = [
{
handler: showCalls,
icon: 'preview',
label: _('logDisplayDetails'),
},
]
const LOG_ACTIONS = [
{
handler: deleteJobsLogs,
icon: 'delete',
label: _('remove'),
},
]
const LOG_FILTERS = {
jobFailed: 'status: failure',
jobInterrupted: 'status: interrupted',
jobSkipped: 'status: skipped',
jobStarted: 'status: started',
jobSuccess: 'status: success',
}
const rowTransform = (log, { logs, jobs }) => {
let status
if (log.end !== undefined) {
if (log.error !== undefined) {
status =
log.error.message === NO_VMS_MATCH_THIS_PATTERN ? 'skipped' : 'failure'
} else {
let hasError = false
let hasTaskSkipped = false
forEach(logs[log.id], ({ status, result }) => {
if (status !== 'failure') {
return
}
if (result === undefined || !isSkippedError(result)) {
hasError = true
return false
}
hasTaskSkipped = true
})
status = hasError ? 'failure' : hasTaskSkipped ? 'skipped' : 'success'
}
} else {
status =
log.id === get(() => jobs[log.jobId].runId) ? 'started' : 'interrupted'
}
return {
...log,
status,
}
}
export default [
addSubscriptions({
logs: subscribeBackupNgLogs,
jobs: cb => subscribeBackupNgJobs(jobs => cb(keyBy(jobs, 'id'))),
}),
({ logs, jobs }) => (
<Card>
<CardHeader>
<Icon icon='log' /> {_('logTitle')}
</CardHeader>
<CardBlock>
<NoObjects
actions={LOG_ACTIONS}
collection={get(() => logs['roots'])}
columns={LOG_COLUMNS}
component={SortedTable}
data-jobs={jobs}
data-logs={logs}
emptyMessage={_('noLogs')}
filters={LOG_FILTERS}
individualActions={LOG_INDIVIDUAL_ACTIONS}
rowTransform={rowTransform}
/>
</CardBlock>
</Card>
),
].reduceRight((value, decorator) => decorator(value))

View File

@@ -131,6 +131,7 @@ const PREDICATES = {
success: () => call => call.end !== undefined && call.error === undefined,
}
const NO_OBJECTS_MATCH_THIS_PATTERN = 'no objects match this pattern'
const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const NO_SUCH_OBJECT_ERROR = 'no such object'
const UNHEALTHY_VDI_CHAIN_LINK =
@@ -173,7 +174,18 @@ class Log extends BaseComponent {
)
render () {
return (
const { error } = this.props.log
return error !== undefined ? (
<span
className={
error.message === NO_OBJECTS_MATCH_THIS_PATTERN
? 'text-info'
: 'text-danger'
}
>
<Icon icon='alarm' /> {error.message}
</span>
) : (
<div>
<Select
labelKey='label'
@@ -442,6 +454,15 @@ export default [
entry.end = time
entry.duration = time - entry.start
entry.status = 'finished'
if (data.error !== undefined) {
entry.error = data.error
if (data.error.message === NO_OBJECTS_MATCH_THIS_PATTERN) {
entry.callSkipped = true
} else {
entry.hasErrors = true
}
}
} else if (data.event === 'jobCall.start') {
entry.calls[id] = {
callKey: id,

View File

@@ -0,0 +1,348 @@
import _, { FormattedDuration } from 'intl'
import Copiable from 'copiable'
import Icon from 'icon'
import React from 'react'
import renderXoItem, { renderXoItemFromId } from 'render-xo-item'
import Select from 'form/select'
import Tooltip from 'tooltip'
import { addSubscriptions, formatSize, formatSpeed } from 'utils'
import { createSelector } from 'selectors'
import { find, filter, isEmpty, get, keyBy, map, forEach } from 'lodash'
import { FormattedDate } from 'react-intl'
import { injectState, provideState } from '@julien-f/freactal'
import { subscribeRemotes } from 'xo'
import {
isSkippedError,
NO_VMS_MATCH_THIS_PATTERN,
UNHEALTHY_VDI_CHAIN_ERROR,
} from './utils'
const getTaskStatus = createSelector(
taskLog => taskLog,
isJobRunning => isJobRunning,
({ end, status, result }, isJobRunning) =>
end !== undefined
? status === 'success'
? 'success'
: result !== undefined && isSkippedError(result) ? 'skipped' : 'failure'
: isJobRunning ? 'started' : 'interrupted'
)
const getSubTaskStatus = createSelector(
taskLog => taskLog,
isJobRunning => isJobRunning,
({ end, status, result }, isJobRunning) =>
end !== undefined
? status === 'success' ? 'success' : 'failure'
: isJobRunning ? 'started' : 'interrupted'
)
const TASK_STATUS = {
failure: {
icon: 'halted',
label: 'taskFailed',
},
skipped: {
icon: 'skipped',
label: 'taskSkipped',
},
success: {
icon: 'running',
label: 'taskSuccess',
},
started: {
icon: 'busy',
label: 'taskStarted',
},
interrupted: {
icon: 'halted',
label: 'taskInterrupted',
},
}
const TaskStateInfos = ({ status }) => {
const { icon, label } = TASK_STATUS[status]
return (
<Tooltip content={_(label)}>
<Icon icon={icon} />
</Tooltip>
)
}
const VmTaskDataInfos = ({ logs, vmTaskId }) => {
let transferSize, transferDuration, mergeSize, mergeDuration
forEach(logs[vmTaskId], ({ taskId }) => {
if (transferSize !== undefined) {
return false
}
const transferTask = find(logs[taskId], { message: 'transfer' })
if (transferTask !== undefined) {
transferSize = transferTask.result.size
transferDuration = transferTask.end - transferTask.start
}
const mergeTask = find(logs[taskId], { message: 'merge' })
if (mergeTask !== undefined) {
mergeSize = mergeTask.result.size
mergeDuration = mergeTask.end - mergeTask.start
}
})
if (transferSize === undefined) {
return null
}
return (
<div>
{_.keyValue(_('taskTransferredDataSize'), formatSize(transferSize))}
<br />
{_.keyValue(
_('taskTransferredDataSpeed'),
formatSpeed(transferSize, transferDuration)
)}
{mergeSize !== undefined && (
<div>
{_.keyValue(_('taskMergedDataSize'), formatSize(mergeSize))}
<br />
{_.keyValue(
_('taskMergedDataSpeed'),
formatSpeed(mergeSize, mergeDuration)
)}
</div>
)}
</div>
)
}
const UNHEALTHY_VDI_CHAIN_LINK =
'https://xen-orchestra.com/docs/backup_troubleshooting.html#vdi-chain-protection'
const ALL_FILTER_OPTION = { label: 'allTasks', value: 'all' }
const FAILURE_FILTER_OPTION = { label: 'taskFailed', value: 'failure' }
const STARTED_FILTER_OPTION = { label: 'taskStarted', value: 'started' }
const TASK_FILTER_OPTIONS = [
ALL_FILTER_OPTION,
FAILURE_FILTER_OPTION,
STARTED_FILTER_OPTION,
{ label: 'taskInterrupted', value: 'interrupted' },
{ label: 'taskSkipped', value: 'skipped' },
{ label: 'taskSuccess', value: 'success' },
]
const getFilteredTaskLogs = (logs, isJobRunning, filterValue) =>
filterValue === 'all'
? logs
: filter(logs, log => getTaskStatus(log, isJobRunning) === filterValue)
const getInitialFilter = (job, logs, log) => {
const isEmptyFilter = filterValue =>
isEmpty(
getFilteredTaskLogs(
logs[log.id],
get(job, 'runId') === log.id,
filterValue
)
)
if (!isEmptyFilter('started')) {
return STARTED_FILTER_OPTION
}
if (!isEmptyFilter('failure')) {
return FAILURE_FILTER_OPTION
}
return ALL_FILTER_OPTION
}
export default [
addSubscriptions({
remotes: cb =>
subscribeRemotes(remotes => {
cb(keyBy(remotes, 'id'))
}),
}),
provideState({
initialState: ({ job, logs, log }) => ({
filter: getInitialFilter(job, logs, log),
}),
effects: {
setFilter: (_, filter) => state => ({
...state,
filter,
}),
},
computed: {
isJobRunning: (_, { job, log }) => get(job, 'runId') === log.id,
filteredTaskLogs: ({ filter: { value }, isJobRunning }, { log, logs }) =>
getFilteredTaskLogs(logs[log.id], isJobRunning, value),
optionRenderer: ({ isJobRunning }, { log, logs }) => ({
label,
value,
}) => (
<span>
{_(label)} ({
getFilteredTaskLogs(logs[log.id], isJobRunning, value).length
})
</span>
),
},
}),
injectState,
({ job, log, logs, remotes, state, effects }) =>
log.error !== undefined ? (
<span
className={
log.error.message === NO_VMS_MATCH_THIS_PATTERN
? 'text-info'
: 'text-danger'
}
>
<Copiable tagName='p' data={JSON.stringify(log.error, null, 2)}>
<Icon icon='alarm' /> {log.error.message}
</Copiable>
</span>
) : (
<div>
<Select
labelKey='label'
onChange={effects.setFilter}
optionRenderer={state.optionRenderer}
options={TASK_FILTER_OPTIONS}
required
value={state.filter}
valueKey='value'
/>
<br />
<ul className='list-group'>
{map(state.filteredTaskLogs, vmTaskLog => (
<li key={vmTaskLog.data.id} className='list-group-item'>
{renderXoItemFromId(vmTaskLog.data.id)} ({vmTaskLog.data.id.slice(
4,
8
)}){' '}
<TaskStateInfos
status={getTaskStatus(vmTaskLog, state.isJobRunning)}
/>
<ul>
{map(logs[vmTaskLog.taskId], subTaskLog => (
<li key={subTaskLog.taskId}>
{subTaskLog.message === 'snapshot' ? (
<span>
<Icon icon='task' /> {_('snapshotVmLabel')}
</span>
) : subTaskLog.data.type === 'remote' ? (
<span>
{get(remotes, subTaskLog.data.id) !== undefined
? renderXoItem({
type: 'remote',
value: remotes[subTaskLog.data.id],
})
: _('errorNoSuchItem')}{' '}
({subTaskLog.data.id.slice(4, 8)})
</span>
) : (
<span>
{renderXoItemFromId(subTaskLog.data.id)} ({subTaskLog.data.id.slice(
4,
8
)})
</span>
)}{' '}
<TaskStateInfos
status={getSubTaskStatus(subTaskLog, state.isJobRunning)}
/>
<br />
{subTaskLog.status === 'failure' && (
<Copiable
tagName='p'
data={JSON.stringify(subTaskLog.result, null, 2)}
>
{_.keyValue(
_('taskError'),
<span className={'text-danger'}>
{subTaskLog.result.message}
</span>
)}
</Copiable>
)}
</li>
))}
</ul>
{_.keyValue(
_('taskStart'),
<FormattedDate
value={new Date(vmTaskLog.start)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
)}
{vmTaskLog.end !== undefined && (
<div>
{_.keyValue(
_('taskEnd'),
<FormattedDate
value={new Date(vmTaskLog.end)}
month='short'
day='numeric'
year='numeric'
hour='2-digit'
minute='2-digit'
second='2-digit'
/>
)}
<br />
{_.keyValue(
_('taskDuration'),
<FormattedDuration duration={vmTaskLog.duration} />
)}
<br />
{vmTaskLog.status === 'failure' &&
vmTaskLog.result !== undefined ? (
vmTaskLog.result.message === UNHEALTHY_VDI_CHAIN_ERROR ? (
<Tooltip content={_('clickForMoreInformation')}>
<a
className='text-info'
href={UNHEALTHY_VDI_CHAIN_LINK}
rel='noopener noreferrer'
target='_blank'
>
<Icon icon='info' /> {_('unhealthyVdiChainError')}
</a>
</Tooltip>
) : (
<Copiable
tagName='p'
data={JSON.stringify(vmTaskLog.result, null, 2)}
>
{_.keyValue(
_('taskError'),
<span
className={
isSkippedError(vmTaskLog.result)
? 'text-info'
: 'text-danger'
}
>
{vmTaskLog.result.message}
</span>
)}
</Copiable>
)
) : (
<VmTaskDataInfos logs={logs} vmTaskId={vmTaskLog.taskId} />
)}
</div>
)}
</li>
))}
</ul>
</div>
),
].reduceRight((value, decorator) => decorator(value))

View File

@@ -0,0 +1,7 @@
export const NO_VMS_MATCH_THIS_PATTERN = 'no VMs match this pattern'
export const UNHEALTHY_VDI_CHAIN_ERROR = 'unhealthy VDI chain'
const NO_SUCH_OBJECT_ERROR = 'no such object'
export const isSkippedError = error =>
error.message === UNHEALTHY_VDI_CHAIN_ERROR ||
error.message === NO_SUCH_OBJECT_ERROR

View File

@@ -235,6 +235,10 @@ const parseFile = async (file, type, func) => {
}
}
const getRedirectionUrl = vms => vms.length === 1
? `/vms/${vms[0]}`
: `/home?s=${encodeURIComponent(`id:|(${vms.join(' ')})`)}&t=VM`
export default class Import extends Component {
constructor (props) {
super(props)
@@ -397,7 +401,7 @@ export default class Import extends Component {
form='import-form'
handler={this._import}
icon='import'
redirectOnSuccess='/'
redirectOnSuccess={getRedirectionUrl}
type='submit'
>
{_('newImport')}

View File

@@ -3737,10 +3737,6 @@ defined@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
deflate-js@^0.2.3:
version "0.2.3"
resolved "https://registry.yarnpkg.com/deflate-js/-/deflate-js-0.2.3.tgz#f85abb58ebc5151a306147473d57c3e4f7e4426b"
degenerator@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/degenerator/-/degenerator-1.0.4.tgz#fcf490a37ece266464d9cc431ab98c5819ced095"