Compare commits

..

10 Commits

Author SHA1 Message Date
Nicolas Raynaud
e5d711dd28 fix(fs/S3Handler#_write): work when file doesn't exist (#5561) 2021-02-16 13:45:01 +01:00
badrAZ
10b127ca55 feat(xo-web/backup): ability to force full backup per schedule (#5546)
Fixes #5541
2021-02-16 11:31:17 +01:00
Julien Fontanet
fb4dff4fca chore(xo-cli/USAGE): clearer JSON param explanation 2021-02-16 10:52:22 +01:00
badrAZ
ef25b364ec fix(xo-server/metadata-backups): fix interrupted status on backup running (#5573)
Introduced by 8a3ae59f77
2021-02-15 14:31:01 +01:00
Nicolas Raynaud
9394db986d fix(import/disk): allow uppercase extensions (#5574)
See https://xcp-ng.org/forum/topic/4216/cannot-import-a-large-vhd-using-import-disk-option
2021-02-15 12:05:49 +01:00
Rajaa.BARHTAOUI
9226c6cac1 fix(xo-server/api): don't log host.stats errors (#5553)
See xoa-support#3323

This avoids flooding the logs with ECONNREFUSED errors when the host's toolstack
is restarted
2021-02-12 11:29:16 +01:00
Julien Fontanet
283193e992 feat(complex-matcher): 0.7.0 2021-02-11 13:54:29 +01:00
Julien Fontanet
72f8a6d220 chore(complex-matcher): add test for non-ASCII raw strings
Related to f5e4fb49c
2021-02-11 13:53:54 +01:00
Albin Hedman
f5e4fb49c3 feat(complex-matcher): allow most letters to be unquoted (#5555) 2021-02-11 11:14:44 +01:00
Olivier Lambert
3cd15c783c fix(docs): update the deploy script to fix an issue (#5565) 2021-02-11 10:17:42 +01:00
23 changed files with 215 additions and 169 deletions

View File

@@ -376,7 +376,7 @@ export default class RemoteHandlerAbstract {
// Methods that can be called by private methods to avoid parallel limit on public methods
async __closeFile(fd: FileDescriptor): Promise<void> {
await timeout.call(this._closeFile(fd), this._timeout)
await timeout.call(this._closeFile(fd.fd), this._timeout)
}
async __mkdir(dir: string, { mode }: { mode?: number } = {}): Promise<void> {

View File

@@ -348,11 +348,5 @@ handlers.forEach(url => {
}
)
})
describe('#open()', () => {
it('can do an open/close cycle without crashing', async () => {
const file = await handler.openFile('write', 'w')
expect(async () => handler.closeFile(file)).not.toThrow()
})
})
})
})

View File

@@ -18,7 +18,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
async _closeFile(fd) {
return fs.close(fd.fd)
return fs.close(fd)
}
async _createReadStream(file, options) {

View File

@@ -152,10 +152,19 @@ export default class S3Handler extends RemoteHandlerAbstract {
file = file.fd
}
const uploadParams = this._createParams(file)
const fileSize = +(await this._s3.headObject(uploadParams)).ContentLength
let fileSize
try {
fileSize = +(await this._s3.headObject(uploadParams)).ContentLength
} catch (e) {
if (e.code === 'NotFound') {
fileSize = 0
} else {
throw e
}
}
if (fileSize < MIN_PART_SIZE) {
const resultBuffer = Buffer.alloc(Math.max(fileSize, position + buffer.length))
const fileContent = (await this._s3.getObject(uploadParams)).Body
const fileContent = fileSize !== 0 ? (await this._s3.getObject(uploadParams)).Body : Buffer.alloc(0)
fileContent.copy(resultBuffer)
buffer.copy(resultBuffer, position)
await this._s3.putObject({ ...uploadParams, Body: resultBuffer })

View File

@@ -10,6 +10,8 @@
- [Task] Display age and estimated duration (PR [#5530](https://github.com/vatesfr/xen-orchestra/pull/5530))
- [Proxy] Ask for a confirmation before upgrading a proxy with running backups (PR [#5533](https://github.com/vatesfr/xen-orchestra/pull/5533))
- [Backup/restore] Allow backup restore to any licence even if XOA isn't registered (PR [#5547](https://github.com/vatesfr/xen-orchestra/pull/5547))
- [Import] Ignore case when detecting file type (PR [#5574](https://github.com/vatesfr/xen-orchestra/pull/5574))
- [Backup] Ability to set a specific schedule to always run full backups [#5541](https://github.com/vatesfr/xen-orchestra/issues/5541) (PR [#5546](https://github.com/vatesfr/xen-orchestra/pull/5546))
### Bug fixes
@@ -17,6 +19,7 @@
- [VM/Snapshot export] Fix `Error: no available place in queue` on canceling an export via browser then starting a new one when the concurrency threshold is reached [#5535](https://github.com/vatesfr/xen-orchestra/issues/5535) (PR [#5538](https://github.com/vatesfr/xen-orchestra/pull/5538))
- [Servers] Hide pool's objects if its master is unreachable [#5475](https://github.com/vatesfr/xen-orchestra/issues/5475) (PR [#5526](https://github.com/vatesfr/xen-orchestra/pull/5526))
- [Host] Restart toolstack: fix `ECONNREFUSED` error (PR [#5553](https://github.com/vatesfr/xen-orchestra/pull/5553))
### Packages to release
@@ -35,7 +38,7 @@
>
> In case of conflict, the highest (lowest in previous list) `$version` wins.
- @xen-orchestra/fs patch
- @xen-orchestra/fs minor
- xen-api patch
- xo-common minor
- xo-server minor

View File

@@ -61,7 +61,7 @@ Please only use this if you have issues with [the default way to deploy XOA](ins
Alternatively, you can deploy it by connecting to your XenServer host and executing the following:
```
curl -sS https://xoa.io/deploy | bash
bash -c "$(curl -sS https://xoa.io/deploy)"
```
:::tip
@@ -78,7 +78,7 @@ curl: (35) error:1407742E:SSL routines:SSL23_GET_SERVER_HELLO:tlsv1 alert protoc
It means that the secure HTTPS protocol is not supported, you can bypass this using the unsecure command instead:
```
curl -sS http://xoa.io/deploy | bash
bash -c "$(curl -sS http://xoa.io/deploy)"
```
:::

View File

@@ -318,7 +318,7 @@ XOSAN is a 100% software defined solution for XenServer hyperconvergence. You ca
You will need to be registered on our website in order to use Xen Orchestra. If you are not yet registered, [here is the way](https://xen-orchestra.com/#!/signup)
SSH in your XenServer and use the command line `curl -sS https://xoa.io/deploy | bash` - it will deploy Xen Orchestra Appliance on your XenServer infrastructure which is required to use XOSAN.
SSH in your XenServer and use the command line `bash -c "$(curl -sS https://xoa.io/deploy)"` - it will deploy Xen Orchestra Appliance on your XenServer infrastructure which is required to use XOSAN.
> Note: You can also download the XVA file and follow [these instructions](https://xen-orchestra.com/docs/xoa.html#the-alternative).

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "complex-matcher",
"version": "0.6.0",
"version": "0.7.0",
"license": "ISC",
"description": "",
"keywords": [],

View File

@@ -2,34 +2,19 @@ import { escapeRegExp, isPlainObject, some } from 'lodash'
// ===================================================================
const RAW_STRING_CHARS = (() => {
const chars = { __proto__: null }
const add = (a, b = a) => {
let i = a.charCodeAt(0)
const j = b.charCodeAt(0)
while (i <= j) {
chars[String.fromCharCode(i++)] = true
}
}
add('$')
add('-')
add('.')
add('0', '9')
add('_')
add('A', 'Z')
add('a', 'z')
return chars
})()
const isRawString = string => {
const { length } = string
for (let i = 0; i < length; ++i) {
if (!(string[i] in RAW_STRING_CHARS)) {
return false
}
}
return true
const RAW_STRING_SYMBOLS = {
__proto__: null,
_: true,
'-': true,
'.': true,
$: true,
}
const isRawStringChar = c =>
(c >= '0' && c <= '9') || c in RAW_STRING_SYMBOLS || !(c === c.toUpperCase() && c === c.toLowerCase())
const isRawString = string => [...string].every(isRawStringChar)
// -------------------------------------------------------------------
class Node {
@@ -459,7 +444,7 @@ const parser = P.grammar({
globPattern: new P((input, pos, end) => {
let value = ''
let c
while (pos < end && ((c = input[pos]) === '*' || c in RAW_STRING_CHARS)) {
while (pos < end && ((c = input[pos]) === '*' || isRawStringChar(c))) {
++pos
value += c
}
@@ -486,7 +471,7 @@ const parser = P.grammar({
rawString: new P((input, pos, end) => {
let value = ''
let c
while (pos < end && RAW_STRING_CHARS[(c = input[pos])]) {
while (pos < end && isRawStringChar((c = input[pos]))) {
++pos
value += c
}

View File

@@ -8,7 +8,9 @@ import {
NumberNode,
NumberOrStringNode,
parse,
Property,
setPropertyClause,
StringNode,
} from './'
it('getPropertyClausesStrings', () => {
@@ -42,6 +44,10 @@ describe('parse', () => {
expect(node.match('32')).toBe(true)
expect(node.toString()).toBe('"32"')
})
it('supports non-ASCII letters in raw strings', () => {
expect(parse('åäöé:ÅÄÖÉ')).toStrictEqual(new Property('åäöé', new StringNode('ÅÄÖÉ')))
})
})
describe('GlobPattern', () => {

View File

@@ -13,7 +13,7 @@ import { randomBytes } from 'crypto'
import Vhd, { chainVhd, createSyntheticStream, mergeVhd as vhdMerge } from './'
import { SECTOR_SIZE } from './_constants'
import { SECTOR_SIZE } from './src/_constants'
let tempDir = null

View File

@@ -8,7 +8,7 @@ import { pipeline } from 'readable-stream'
import { createReadableRawStream, createReadableSparseStream } from './'
import { createFooter } from './_createFooterHeader'
import { createFooter } from './src/_createFooterHeader'
let tempDir = null

View File

@@ -97,12 +97,11 @@ E.g., adding a new server:
The return value is the identifier of this new server in XO.
Parameters (except `true` and `false` which are correctly parsed as
booleans) are assumed to be strings, for other types, you may use JSON
encoding by prefixing with `json:`:
Because command lines are usually untyped, parameters (except `true` and `false` which are considered as
booleans) are assumed as strings by default, other types must be encoded as JSON and prefixed by `json:`:
```
> xo-cli foo.bar baz='json:[1, 2, 3]'
> xo-cli method string=foo number=json:42 array=json:'["item1", "item2"]'
```
##### Configuration export

View File

@@ -79,12 +79,11 @@ E.g., adding a new server:
The return value is the identifier of this new server in XO.
Parameters (except `true` and `false` which are correctly parsed as
booleans) are assumed to be strings, for other types, you may use JSON
encoding by prefixing with `json:`:
Because command lines are usually untyped, parameters (except `true` and `false` which are considered as
booleans) are assumed as strings by default, other types must be encoded as JSON and prefixed by `json:`:
```
> xo-cli foo.bar baz='json:[1, 2, 3]'
> xo-cli method string=foo number=json:42 array=json:'["item1", "item2"]'
```
##### Configuration export

View File

@@ -342,10 +342,13 @@ export default class Api {
Date.now() - startTime
)}] =!> ${error}`
// 2020-07-10: Work-around: many kinds of error can be triggered by this
// method, which can generates a lot of logs due to the fact that xo-web
// uses 5s active subscriptions to call it
if (name !== 'pool.listMissingPatches') {
// 2020-07-10: Work-around: many kinds of error can be triggered by
// 'pool.listMissingPatches' method, which can generates a lot of logs due to the fact that xo-web
// uses 5s active subscriptions to call it.
// 2021-02-11: Work-around: ECONNREFUSED error can be triggered by
// 'host.stats' method because there is no connection to the host during a
// toolstack restart and xo-web may call it often
if (name !== 'pool.listMissingPatches' || name !== 'host.stats') {
this._logger.error(message, {
...data,
duration: Date.now() - startTime,

View File

@@ -816,109 +816,113 @@ export default class metadataBackup {
const metadataFolder = `${dir}/${path.join('/')}`
const { proxy, url, options } = await app.getRemoteWithCredentials(remoteId)
if (proxy !== undefined) {
let xapi
if (dir === DIR_XO_POOL_METADATA_BACKUPS) {
const poolUuid = path[1]
const { allowUnauthorized, host, password, username } = await app.getXenServer(
app.getXenServerIdByObject(poolUuid)
)
xapi = {
allowUnauthorized,
credentials: {
username,
password,
},
url: host,
}
}
const logsStream = await app.callProxyMethod(
proxy,
'backup.restoreMetadataBackup',
{
backupId: metadataFolder,
remote: { url, options },
xapi,
},
{
assertType: 'iterator',
}
)
let rootTaskId
const localTaskIds = { __proto__: null }
for await (const log of logsStream) {
const { event, message, taskId } = log
const common = {
data: log.data,
event: 'task.' + event,
result: log.result,
status: log.status,
}
if (event === 'start') {
const { parentId } = log
if (parentId === undefined) {
rootTaskId = localTaskIds[taskId] = logger.notice(message, common)
} else {
common.parentId = localTaskIds[parentId]
localTaskIds[taskId] = logger.notice(message, common)
}
} else {
const localTaskId = localTaskIds[taskId]
if (localTaskId === rootTaskId && dir === DIR_XO_CONFIG_BACKUPS && log.status === 'success') {
try {
await app.importConfig(log.result)
} catch (error) {
common.result = serializeError(error)
common.status = 'failure'
}
}
common.taskId = localTaskId
logger.notice(message, common)
}
}
return
}
const message = 'metadataRestore'
const handler = await app.getRemoteHandler(remoteId)
const taskId = logger.notice(message, {
event: 'task.start',
data: JSON.parse(String(await handler.readFile(`${metadataFolder}/metadata.json`))),
})
let rootTaskId
try {
this._runningMetadataRestores.add(taskId)
if (proxy !== undefined) {
let xapi
if (dir === DIR_XO_POOL_METADATA_BACKUPS) {
const poolUuid = path[1]
const { allowUnauthorized, host, password, username } = await app.getXenServer(
app.getXenServerIdByObject(poolUuid)
)
xapi = {
allowUnauthorized,
credentials: {
username,
password,
},
url: host,
}
}
let result
if (dir === DIR_XO_CONFIG_BACKUPS) {
result = await app.importConfig(await handler.readFile(`${metadataFolder}/data.json`))
} else {
result = await app
.getXapi(path[1])
.importPoolMetadata(await handler.createReadStream(`${metadataFolder}/data`), true)
const logsStream = await app.callProxyMethod(
proxy,
'backup.restoreMetadataBackup',
{
backupId: metadataFolder,
remote: { url, options },
xapi,
},
{
assertType: 'iterator',
}
)
const localTaskIds = { __proto__: null }
for await (const log of logsStream) {
const { event, message, taskId } = log
const common = {
data: log.data,
event: 'task.' + event,
result: log.result,
status: log.status,
}
if (event === 'start') {
const { parentId } = log
if (parentId === undefined) {
rootTaskId = localTaskIds[taskId] = logger.notice(message, common)
this._runningMetadataRestores.add(rootTaskId)
} else {
common.parentId = localTaskIds[parentId]
localTaskIds[taskId] = logger.notice(message, common)
}
} else {
const localTaskId = localTaskIds[taskId]
if (localTaskId === rootTaskId && dir === DIR_XO_CONFIG_BACKUPS && log.status === 'success') {
try {
await app.importConfig(log.result)
} catch (error) {
common.result = serializeError(error)
common.status = 'failure'
}
}
common.taskId = localTaskId
logger.notice(message, common)
}
}
return
}
logger.notice(message, {
event: 'task.end',
result,
status: 'success',
taskId,
const message = 'metadataRestore'
const handler = await app.getRemoteHandler(remoteId)
rootTaskId = logger.notice(message, {
event: 'task.start',
data: JSON.parse(String(await handler.readFile(`${metadataFolder}/metadata.json`))),
})
} catch (error) {
logger.error(message, {
event: 'task.end',
result: serializeError(error),
status: 'failure',
taskId,
})
throw error
try {
this._runningMetadataRestores.add(rootTaskId)
let result
if (dir === DIR_XO_CONFIG_BACKUPS) {
result = await app.importConfig(await handler.readFile(`${metadataFolder}/data.json`))
} else {
result = await app
.getXapi(path[1])
.importPoolMetadata(await handler.createReadStream(`${metadataFolder}/data`), true)
}
logger.notice(message, {
event: 'task.end',
result,
status: 'success',
taskId: rootTaskId,
})
} catch (error) {
logger.error(message, {
event: 'task.end',
result: serializeError(error),
status: 'failure',
taskId: rootTaskId,
})
throw error
}
} finally {
this._runningMetadataRestores.delete(taskId)
this._runningMetadataRestores.delete(rootTaskId)
}
}

View File

@@ -60,7 +60,7 @@
"chartist-plugin-legend": "^0.6.1",
"chartist-plugin-tooltip": "0.0.11",
"classnames": "^2.2.3",
"complex-matcher": "^0.6.0",
"complex-matcher": "^0.7.0",
"copy-to-clipboard": "^3.0.8",
"d3": "^5.0.0",
"debounce-input-decorator": "^1.0.0",

View File

@@ -506,6 +506,7 @@ const messages = {
timeout: 'Timeout',
timeoutInfo: 'Number of hours after which a job is considered failed',
fullBackupInterval: 'Full backup interval',
forceFullBackup: 'Force full backup',
timeoutUnit: 'In hours',
dbAndDrRequireEnterprisePlan: 'Delta Backup and DR require an Enterprise plan',
crRequiresPremiumPlan: 'CR requires a Premium plan',

View File

@@ -34,7 +34,7 @@ import {
isSrWritable,
subscribeRemotes,
} from 'xo'
import { flatten, includes, isEmpty, map, mapValues, max, omit, some } from 'lodash'
import { flatten, includes, isEmpty, map, mapValues, omit, some } from 'lodash'
import NewSchedule from './new-schedule'
import ReportWhen from './_reportWhen'
@@ -143,6 +143,7 @@ const normalizeSettings = ({ copyMode, exportMode, offlineBackupActive, settings
settings.map(setting =>
defined(setting.copyRetention, setting.exportRetention, setting.snapshotRetention) !== undefined
? {
...setting,
copyRetention: copyMode ? setting.copyRetention : undefined,
exportRetention: exportMode ? setting.exportRetention : undefined,
snapshotRetention: snapshotMode && !offlineBackupActive ? setting.snapshotRetention : undefined,
@@ -160,8 +161,7 @@ const destructVmsPattern = pattern =>
}
// isRetentionLow returns the expected result when the 'fullInterval' is undefined.
const isRetentionLow = (settings, retention) =>
retention < RETENTION_LIMIT || settings.getIn(['', 'fullInterval']) < RETENTION_LIMIT
const isRetentionLow = (fullInterval, retention) => retention < RETENTION_LIMIT || fullInterval < RETENTION_LIMIT
const checkRetentions = (schedule, { copyMode, exportMode, snapshotMode }) =>
(!copyMode && !exportMode && !snapshotMode) ||
@@ -425,14 +425,20 @@ const New = decorate([
{ copyMode, exportMode, deltaMode, propSettings, settings = propSettings, snapshotMode },
{ intl: { formatMessage } }
) => {
const modes = { copyMode, exportMode, snapshotMode }
const modes = { copyMode, deltaMode, exportMode, snapshotMode }
const schedule = await form({
defaultValue: storedSchedule,
render: props => (
<NewSchedule
missingRetentions={!checkRetentions(props.value, modes)}
modes={modes}
showRetentionWarning={deltaMode && !isRetentionLow(settings, props.value.exportRetention)}
showRetentionWarning={
deltaMode &&
!isRetentionLow(
defined(props.value.fullInterval, settings.getIn(['', 'fullInterval'])),
props.value.exportRetention
)
}
{...props}
/>
),
@@ -466,7 +472,7 @@ const New = decorate([
},
saveSchedule: (
_,
{ copyRetention, cron, enabled = true, exportRetention, id, name, snapshotRetention, timezone }
{ copyRetention, cron, enabled = true, exportRetention, fullInterval, id, name, snapshotRetention, timezone }
) => ({ propSettings, schedules, settings = propSettings }) => ({
schedules: {
...schedules,
@@ -480,8 +486,9 @@ const New = decorate([
},
},
settings: settings.set(id, {
exportRetention,
copyRetention,
exportRetention,
fullInterval,
snapshotRetention,
}),
}),
@@ -617,12 +624,21 @@ const New = decorate([
get(() => hostsById[$container].version) || get(() => hostsById[poolsById[$container].master].version)
),
selectedVmIds: state => resolveIds(state.vms),
showRetentionWarning: ({ deltaMode, propSettings, settings = propSettings, schedules }) =>
deltaMode &&
!isRetentionLow(
settings,
defined(max(Object.keys(schedules).map(key => settings.getIn([key, 'exportRetention']))), 0)
),
showRetentionWarning: ({ deltaMode, propSettings, settings = propSettings, schedules }) => {
if (!deltaMode) {
return false
}
const globalFullInterval = settings.getIn(['', 'fullInterval'])
return some(
Object.keys(schedules),
key =>
!isRetentionLow(
defined(settings.getIn([key, 'fullInterval']), globalFullInterval),
settings.getIn([key, 'exportRetention'])
)
)
},
srPredicate: ({ srs }) => sr => isSrWritable(sr) && !includes(srs, sr.id),
remotePredicate: ({ proxyId, remotes }) => remote => {
if (proxyId === null) {
@@ -634,6 +650,7 @@ const New = decorate([
Map(get(() => job.settings)).map(setting =>
defined(setting.copyRetention, setting.exportRetention, setting.snapshotRetention)
? {
...setting,
copyRetention: defined(setting.copyRetention, DEFAULT_RETENTION),
exportRetention: defined(setting.exportRetention, DEFAULT_RETENTION),
snapshotRetention: defined(setting.snapshotRetention, DEFAULT_RETENTION),

View File

@@ -15,6 +15,7 @@ import { FormGroup, Input } from './../utils'
const New = decorate([
provideState({
computed: {
forceFullBackup: (_, { value }) => value.fullInterval === 1,
formId: generateId,
idInputName: generateId,
},
@@ -51,6 +52,11 @@ const New = decorate([
name: value.trim() === '' ? null : value,
})
},
toggleForceFullBackup({ setSchedule }) {
setSchedule({
fullInterval: this.state.forceFullBackup ? undefined : 1,
})
},
},
}),
injectState,
@@ -97,6 +103,14 @@ const New = decorate([
<Number min='0' onChange={effects.setSnapshotRetention} value={schedule.snapshotRetention} required />
</FormGroup>
)}
{modes.deltaMode && (
<FormGroup>
<label>
<strong>{_('forceFullBackup')}</strong>{' '}
<input checked={state.forceFullBackup} onChange={effects.toggleForceFullBackup} type='checkbox' />
</label>
</FormGroup>
)}
<Scheduler onChange={effects.setCronTimezone} cronPattern={schedule.cron} timezone={schedule.timezone} />
<SchedulePreview cronPattern={schedule.cron} timezone={schedule.timezone} />
</CardBlock>

View File

@@ -78,6 +78,14 @@ export default decorate([
},
]
if (state.deltaMode) {
columns.push({
itemRenderer: schedule => (schedule.fullInterval === 1 ? _('stateEnabled') : _('stateDisabled')),
sortCriteria: 'fullInterval',
name: _('forceFullBackup'),
})
}
if (state.exportMode) {
columns.push({
itemRenderer: _ => _.exportRetention,

View File

@@ -37,7 +37,11 @@ const DiskImport = decorate([
const { name } = file
const extIndex = name.lastIndexOf('.')
let type
if (extIndex >= 0 && (type = name.slice(extIndex + 1)) && (type === 'vmdk' || type === 'vhd')) {
if (
extIndex >= 0 &&
(type = name.slice(extIndex + 1).toLowerCase()) &&
(type === 'vmdk' || type === 'vhd')
) {
let vmdkData
if (type === 'vmdk') {
const parsed = await readCapacityAndGrainTable(async (start, end) => {

View File

@@ -230,7 +230,7 @@ export default class Import extends Component {
let func
let type
if (extIndex >= 0 && (type = name.slice(extIndex + 1)) && (func = FORMAT_TO_HANDLER[type])) {
if (extIndex >= 0 && (type = name.slice(extIndex + 1).toLowerCase()) && (func = FORMAT_TO_HANDLER[type])) {
push(parseFile(file, type, func))
}
})