Compare commits

..

1 Commits

Author SHA1 Message Date
Florent Beauchamp
91f65048ca feat(plugins): auto all plugins in source use 2024-01-16 10:35:02 +00:00
307 changed files with 3948 additions and 9104 deletions

View File

@@ -48,7 +48,7 @@ module.exports = {
},
},
{
files: ['@xen-orchestra/{web-core,lite,web}/**/*.{vue,ts}'],
files: ['@xen-orchestra/lite/**/*.{vue,ts}'],
parserOptions: {
sourceType: 'module',
},
@@ -65,11 +65,10 @@ module.exports = {
typescript: true,
'eslint-import-resolver-custom-alias': {
alias: {
'@core': '../web-core/lib',
'@': './src',
},
extensions: ['.ts'],
packages: ['@xen-orchestra/lite', '@xen-orchestra/web'],
packages: ['@xen-orchestra/lite'],
},
},
},
@@ -80,25 +79,6 @@ module.exports = {
'vue/require-default-prop': 'off', // https://github.com/vuejs/eslint-plugin-vue/issues/2051
},
},
{
files: ['@xen-orchestra/{web-core,lite,web}/src/pages/**/*.vue'],
parserOptions: {
sourceType: 'module',
},
rules: {
'vue/multi-word-component-names': 'off',
},
},
{
files: ['@xen-orchestra/{web-core,lite,web}/typed-router.d.ts'],
parserOptions: {
sourceType: 'module',
},
rules: {
'eslint-comments/disable-enable-pair': 'off',
'eslint-comments/no-unlimited-disable': 'off',
},
},
],
parserOptions: {

View File

@@ -64,7 +64,7 @@ body:
id: error-message
attributes:
label: Error message
render: Text
render: Markdown
validations:
required: false
- type: textarea

View File

@@ -24,12 +24,8 @@ jobs:
cache: 'yarn'
- name: Install project dependencies
run: yarn
- name: Ensure yarn.lock is up-to-date
run: git diff --exit-code yarn.lock
- name: Build the project
run: yarn build
- name: Unit tests
run: yarn test-unit
- name: Lint tests
run: yarn test-lint
- name: Integration tests

3
.gitignore vendored
View File

@@ -36,6 +36,3 @@ yarn-error.log.*
.nyc_output/
coverage/
.turbo/
# https://node-tap.org/dot-tap-folder/
.tap/

View File

@@ -34,6 +34,7 @@
},
"devDependencies": {
"sinon": "^17.0.1",
"tap": "^16.3.0",
"test": "^3.2.1"
}
}

View File

@@ -62,42 +62,6 @@ decorateClass(Foo, {
})
```
### `decorateObject(object, map)`
Decorates an object the same way `decorateClass()` decorates a class:
```js
import { decorateObject } from '@vates/decorate-with'
const object = {
get bar() {
// body
},
set bar(value) {
// body
},
baz() {
// body
},
}
decorateObject(object, {
// getter and/or setter
bar: {
// without arguments
get: lodash.memoize,
// with arguments
set: [lodash.debounce, 150],
},
// method (with or without arguments)
baz: lodash.curry,
})
```
### `perInstance(fn, ...args)`
Helper to decorate the method by instance instead of for the whole class.

View File

@@ -80,42 +80,6 @@ decorateClass(Foo, {
})
```
### `decorateObject(object, map)`
Decorates an object the same way `decorateClass()` decorates a class:
```js
import { decorateObject } from '@vates/decorate-with'
const object = {
get bar() {
// body
},
set bar(value) {
// body
},
baz() {
// body
},
}
decorateObject(object, {
// getter and/or setter
bar: {
// without arguments
get: lodash.memoize,
// with arguments
set: [lodash.debounce, 150],
},
// method (with or without arguments)
baz: lodash.curry,
})
```
### `perInstance(fn, ...args)`
Helper to decorate the method by instance instead of for the whole class.

View File

@@ -14,13 +14,10 @@ function applyDecorator(decorator, value) {
}
exports.decorateClass = exports.decorateMethodsWith = function decorateClass(klass, map) {
return decorateObject(klass.prototype, map)
}
function decorateObject(object, map) {
const { prototype } = klass
for (const name of Object.keys(map)) {
const decorator = map[name]
const descriptor = getOwnPropertyDescriptor(object, name)
const descriptor = getOwnPropertyDescriptor(prototype, name)
if (typeof decorator === 'function' || Array.isArray(decorator)) {
descriptor.value = applyDecorator(decorator, descriptor.value)
} else {
@@ -33,11 +30,10 @@ function decorateObject(object, map) {
}
}
defineProperty(object, name, descriptor)
defineProperty(prototype, name, descriptor)
}
return object
return klass
}
exports.decorateObject = decorateObject
exports.perInstance = function perInstance(fn, decorator, ...args) {
const map = new WeakMap()

View File

@@ -20,7 +20,7 @@
"url": "https://vates.fr"
},
"license": "ISC",
"version": "2.1.0",
"version": "2.0.0",
"engines": {
"node": ">=8.10"
},

View File

@@ -20,9 +20,6 @@ function assertListeners(t, event, listeners) {
}
t.beforeEach(function (t) {
// work around https://github.com/tapjs/tapjs/issues/998
t.context = {}
t.context.ee = new EventEmitter()
t.context.em = new EventListenersManager(t.context.ee)
})

View File

@@ -38,9 +38,9 @@
"version": "1.0.1",
"scripts": {
"postversion": "npm publish --access public",
"test": "tap --allow-incomplete-coverage"
"test": "tap --branches=72"
},
"devDependencies": {
"tap": "^18.7.0"
"tap": "^16.2.0"
}
}

View File

@@ -1,28 +0,0 @@
Mount a vhd generated by xen-orchestra to filesystem
### Library
```js
import { mount } from 'fuse-vhd'
// return a disposable, see promise-toolbox/Disposable
// unmount automatically when disposable is disposed
// in case of differencing VHD, it mounts the full chain
await mount(handler, diskId, mountPoint)
```
### cli
From the install folder :
```
cli.mjs <remoteUrl> <vhdPathInRemote> <mountPoint>
```
After installing the package
```
xo-fuse-vhd <remoteUrl> <vhdPathInRemote> <mountPoint>
```
remoteUrl can be found by using cli in `@xen-orchestra/fs` , for example a local remote will have a url like `file:///path/to/remote/root`

View File

@@ -1,59 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @vates/fuse-vhd
[![Package Version](https://badgen.net/npm/v/@vates/fuse-vhd)](https://npmjs.org/package/@vates/fuse-vhd) ![License](https://badgen.net/npm/license/@vates/fuse-vhd) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@vates/fuse-vhd)](https://bundlephobia.com/result?p=@vates/fuse-vhd) [![Node compatibility](https://badgen.net/npm/node/@vates/fuse-vhd)](https://npmjs.org/package/@vates/fuse-vhd)
## Install
Installation of the [npm package](https://npmjs.org/package/@vates/fuse-vhd):
```sh
npm install --save @vates/fuse-vhd
```
## Usage
Mount a vhd generated by xen-orchestra to filesystem
### Library
```js
import { mount } from 'fuse-vhd'
// return a disposable, see promise-toolbox/Disposable
// unmount automatically when disposable is disposed
// in case of differencing VHD, it mounts the full chain
await mount(handler, diskId, mountPoint)
```
### cli
From the install folder :
```
cli.mjs <remoteUrl> <vhdPathInRemote> <mountPoint>
```
After installing the package
```
xo-fuse-vhd <remoteUrl> <vhdPathInRemote> <mountPoint>
```
remoteUrl can be found by using cli in `@xen-orchestra/fs` , for example a local remote will have a url like `file:///path/to/remote/root`
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[ISC](https://spdx.org/licenses/ISC) © [Vates SAS](https://vates.fr)

View File

@@ -1,26 +0,0 @@
#!/usr/bin/env node
import Disposable from 'promise-toolbox/Disposable'
import { getSyncedHandler } from '@xen-orchestra/fs'
import { mount } from './index.mjs'
async function* main([remoteUrl, vhdPathInRemote, mountPoint]) {
if (mountPoint === undefined) {
throw new TypeError('missing arg: cli <remoteUrl> <vhdPathInRemote> <mountPoint>')
}
const handler = yield getSyncedHandler({ url: remoteUrl })
const mounted = await mount(handler, vhdPathInRemote, mountPoint)
let disposePromise
process.on('SIGINT', async () => {
// ensure single dispose
if (!disposePromise) {
disposePromise = mounted.dispose()
}
await disposePromise
process.exit()
})
}
Disposable.wrap(main)(process.argv.slice(2))

View File

@@ -58,7 +58,7 @@ export const mount = Disposable.factory(async function* mount(handler, diskPath,
},
})
return new Disposable(
() => fromCallback(cb => fuse.unmount(cb)),
fromCallback(cb => fuse.mount(cb))
() => fromCallback(() => fuse.unmount()),
fromCallback(() => fuse.mount())
)
})

View File

@@ -1,6 +1,6 @@
{
"name": "@vates/fuse-vhd",
"version": "2.1.0",
"version": "2.0.0",
"license": "ISC",
"private": false,
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@vates/fuse-vhd",
@@ -19,15 +19,11 @@
},
"main": "./index.mjs",
"dependencies": {
"@xen-orchestra/fs": "^4.1.4",
"fuse-native": "^2.2.6",
"lru-cache": "^7.14.0",
"promise-toolbox": "^0.21.0",
"vhd-lib": "^4.9.0"
},
"bin": {
"xo-fuse-vhd": "./cli.mjs"
},
"scripts": {
"postversion": "npm publish --access public"
}

View File

@@ -61,23 +61,22 @@ export default class MultiNbdClient {
async *readBlocks(indexGenerator) {
// default : read all blocks
const readAhead = []
const makeReadBlockPromise = (index, buffer, size) => {
// pass through any pre loaded buffer
const promise = buffer ? Promise.resolve(buffer) : this.readBlock(index, size)
const makeReadBlockPromise = (index, size) => {
const promise = this.readBlock(index, size)
// error is handled during unshift
promise.catch(() => {})
return promise
}
// read all blocks, but try to keep readAheadMaxLength promise waiting ahead
for (const { index, buffer, size } of indexGenerator()) {
for (const { index, size } of indexGenerator()) {
// stack readAheadMaxLength promises before starting to handle the results
if (readAhead.length === this.#readAhead) {
// any error will stop reading blocks
yield readAhead.shift()
}
readAhead.push(makeReadBlockPromise(index, buffer, size))
readAhead.push(makeReadBlockPromise(index, size))
}
while (readAhead.length > 0) {
yield readAhead.shift()

View File

@@ -24,14 +24,14 @@
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/log": "^0.6.0",
"promise-toolbox": "^0.21.0",
"xen-api": "^2.0.1"
"xen-api": "^2.0.0"
},
"devDependencies": {
"tap": "^18.7.0",
"tap": "^16.3.0",
"tmp": "^0.2.1"
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap --allow-incomplete-coverage"
"test-integration": "tap --lines 97 --functions 95 --branches 74 --statements 97 tests/*.integ.mjs"
}
}

View File

@@ -1,5 +1,5 @@
import { strict as assert } from 'node:assert'
import test from 'test'
import { describe, it } from 'tap/mocha'
import {
generateHotp,
@@ -11,8 +11,6 @@ import {
verifyTotp,
} from './index.mjs'
const { describe, it } = test
describe('generateSecret', function () {
it('generates a string of 32 chars', async function () {
const secret = generateSecret()

View File

@@ -31,9 +31,9 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test": "node--test"
"test": "tap"
},
"devDependencies": {
"test": "^3.3.0"
"tap": "^16.3.0"
}
}

View File

@@ -1,7 +1,7 @@
'use strict'
const assert = require('assert/strict')
const { describe, it } = require('test')
const { describe, it } = require('tap').mocha
const { every, not, some } = require('./')

View File

@@ -32,9 +32,9 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test": "node--test"
"test": "tap"
},
"devDependencies": {
"test": "^3.3.0"
"tap": "^16.0.1"
}
}

View File

@@ -1,7 +1,7 @@
'use strict'
const assert = require('assert/strict')
const { afterEach, describe, it } = require('test')
const { afterEach, describe, it } = require('tap').mocha
const { AlteredRecordError, AuditCore, MissingRecordError, NULL_ID, Storage } = require('.')

View File

@@ -13,10 +13,10 @@
},
"scripts": {
"postversion": "npm publish --access public",
"test": "node--test"
"test": "tap --lines 67 --functions 92 --branches 52 --statements 67"
},
"dependencies": {
"@vates/decorate-with": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@xen-orchestra/log": "^0.6.0",
"golike-defer": "^0.5.1",
"object-hash": "^2.0.1"
@@ -28,6 +28,6 @@
"url": "https://vates.fr"
},
"devDependencies": {
"test": "^3.3.0"
"tap": "^16.0.1"
}
}

View File

@@ -7,8 +7,8 @@
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"dependencies": {
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/backups": "^0.44.6",
"@xen-orchestra/fs": "^4.1.4",
"@xen-orchestra/backups": "^0.44.3",
"@xen-orchestra/fs": "^4.1.3",
"filenamify": "^6.0.0",
"getopts": "^2.2.5",
"lodash": "^4.17.15",

View File

@@ -160,10 +160,10 @@ export class ImportVmBackup {
// update the stream with the negative vhd stream
stream = await negativeVhd.stream()
vdis[vdiRef].baseVdi = snapshotCandidate
} catch (error) {
} catch (err) {
// can be a broken VHD chain, a vhd chain with a key backup, ....
// not an irrecuperable error, don't dispose parentVhd, and fallback to full restore
warn(`can't use differential restore`, { error })
warn(`can't use differential restore`, err)
disposableDescendants?.dispose()
}
}

View File

@@ -35,8 +35,6 @@ export const DIR_XO_CONFIG_BACKUPS = 'xo-config-backups'
export const DIR_XO_POOL_METADATA_BACKUPS = 'xo-pool-metadata-backups'
const IMMUTABILTY_METADATA_FILENAME = '/immutability.json'
const { debug, warn } = createLogger('xo:backups:RemoteAdapter')
const compareTimestamp = (a, b) => a.timestamp - b.timestamp
@@ -191,14 +189,13 @@ export class RemoteAdapter {
// check if we will be allowed to merge a a vhd created in this adapter
// with the vhd at path `path`
async isMergeableParent(packedParentUid, path) {
return await Disposable.use(VhdSynthetic.fromVhdChain(this.handler, path), vhd => {
return await Disposable.use(openVhd(this.handler, path), vhd => {
// this baseUuid is not linked with this vhd
if (!vhd.footer.uuid.equals(packedParentUid)) {
return false
}
// check if all the chain is composed of vhd directory
const isVhdDirectory = vhd.checkVhdsClass(VhdDirectory)
const isVhdDirectory = vhd instanceof VhdDirectory
return isVhdDirectory
? this.useVhdDirectory() && this.#getCompressionType() === vhd.compressionType
: !this.useVhdDirectory()
@@ -752,37 +749,10 @@ export class RemoteAdapter {
}
async readVmBackupMetadata(path) {
let json
let isImmutable = false
let remoteIsImmutable = false
// if the remote is immutable, check if this metadatas are also immutables
try {
// this file is not encrypted
await this._handler._readFile(IMMUTABILTY_METADATA_FILENAME)
remoteIsImmutable = true
} catch (error) {
if (error.code !== 'ENOENT') {
throw error
}
}
try {
// this will trigger an EPERM error if the file is immutable
json = await this.handler.readFile(path, { flag: 'r+' })
// s3 handler don't respect flags
} catch (err) {
// retry without triggerring immutbaility check ,only on immutable remote
if (err.code === 'EPERM' && remoteIsImmutable) {
isImmutable = true
json = await this._handler.readFile(path, { flag: 'r' })
} else {
throw err
}
}
// _filename is a private field used to compute the backup id
//
// it's enumerable to make it cacheable
const metadata = { ...JSON.parse(json), _filename: path, isImmutable }
const metadata = { ...JSON.parse(await this._handler.readFile(path)), _filename: path }
// backups created on XenServer < 7.1 via JSON in XML-RPC transports have boolean values encoded as integers, which make them unusable with more recent XAPIs
if (typeof metadata.vm.is_a_template === 'number') {

View File

@@ -79,16 +79,9 @@ export async function exportIncrementalVm(
$SR$uuid: vdi.$SR.uuid,
}
let changedBlocks
console.log('CBT ? ', vdi.cbt_enabled,vdiRef,baseVdi?.$ref)
if (vdi.cbt_enabled && baseVdi?.$ref) {
// @todo log errors and fallback to default mode
changedBlocks = await vdi.$listChangedBlock(baseVdi?.$ref)
}
streams[`${vdiRef}.vhd`] = await vdi.$exportContent({
baseRef: baseVdi?.$ref,
cancelToken,
changedBlocks,
format: 'vhd',
nbdConcurrency,
preferNbd,

View File

@@ -2,7 +2,6 @@ import { asyncEach } from '@vates/async-each'
import { decorateMethodsWith } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import assert from 'node:assert'
import * as UUID from 'uuid'
import isVhdDifferencingDisk from 'vhd-lib/isVhdDifferencingDisk.js'
import mapValues from 'lodash/mapValues.js'
@@ -10,48 +9,11 @@ import { AbstractRemote } from './_AbstractRemote.mjs'
import { forkDeltaExport } from './_forkDeltaExport.mjs'
import { IncrementalRemoteWriter } from '../_writers/IncrementalRemoteWriter.mjs'
import { Task } from '../../Task.mjs'
import { Disposable } from 'promise-toolbox'
import { openVhd } from 'vhd-lib'
import { getVmBackupDir } from '../../_getVmBackupDir.mjs'
class IncrementalRemoteVmBackupRunner extends AbstractRemote {
_getRemoteWriter() {
return IncrementalRemoteWriter
}
async _selectBaseVm(metadata) {
// for each disk , get the parent
const baseUuidToSrcVdi = new Map()
// no previous backup for a base( =key) backup
if (metadata.isBase) {
return
}
await asyncEach(Object.entries(metadata.vdis), async ([id, vdi]) => {
const isDifferencing = metadata.isVhdDifferencing[`${id}.vhd`]
if (isDifferencing) {
const vmDir = getVmBackupDir(metadata.vm.uuid)
const path = `${vmDir}/${metadata.vhds[id]}`
// don't catch error : we can't recover if the source vhd are missing
await Disposable.use(openVhd(this._sourceRemoteAdapter._handler, path), vhd => {
baseUuidToSrcVdi.set(UUID.stringify(vhd.header.parentUuid), vdi.$snapshot_of$uuid)
})
}
})
const presentBaseVdis = new Map(baseUuidToSrcVdi)
await this._callWriters(
writer => presentBaseVdis.size !== 0 && writer.checkBaseVdis(presentBaseVdis),
'writer.checkBaseVdis()',
false
)
// check if the parent vdi are present in all the remotes
baseUuidToSrcVdi.forEach((srcVdiUuid, baseUuid) => {
if (!presentBaseVdis.has(baseUuid)) {
throw new Error(`Missing vdi ${baseUuid} which is a base for a delta`)
}
})
// yeah , let's go
}
async _run($defer) {
const transferList = await this._computeTransferList(({ mode }) => mode === 'delta')
await this._callWriters(async writer => {
@@ -64,7 +26,7 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
if (transferList.length > 0) {
for (const metadata of transferList) {
assert.strictEqual(metadata.mode, 'delta')
await this._selectBaseVm(metadata)
await this._callWriters(writer => writer.prepare({ isBase: metadata.isBase }), 'writer.prepare()')
const incrementalExport = await this._sourceRemoteAdapter.readIncrementalVmBackup(metadata, undefined, {
useChain: false,
@@ -88,17 +50,6 @@ class IncrementalRemoteVmBackupRunner extends AbstractRemote {
}),
'writer.transfer()'
)
// this will update parent name with the needed alias
await this._callWriters(
writer =>
writer.updateUuidAndChain({
isVhdDifferencing,
timestamp: metadata.timestamp,
vdis: incrementalExport.vdis,
}),
'writer.updateUuidAndChain()'
)
await this._callWriters(writer => writer.cleanup(), 'writer.cleanup()')
// for healthcheck
this._tags = metadata.vm.tags

View File

@@ -78,18 +78,6 @@ export const IncrementalXapi = class IncrementalXapiVmBackupRunner extends Abstr
'writer.transfer()'
)
// we want to control the uuid of the vhd in the chain
// and ensure they are correctly chained
await this._callWriters(
writer =>
writer.updateUuidAndChain({
isVhdDifferencing,
timestamp,
vdis: deltaExport.vdis,
}),
'writer.updateUuidAndChain()'
)
this._baseVm = exportedVm
if (baseVm !== undefined) {
@@ -145,7 +133,7 @@ export const IncrementalXapi = class IncrementalXapiVmBackupRunner extends Abstr
])
const srcVdi = srcVdis[snapshotOf]
if (srcVdi !== undefined) {
baseUuidToSrcVdi.set(baseUuid, srcVdi.uuid)
baseUuidToSrcVdi.set(baseUuid, srcVdi)
} else {
debug('ignore snapshot VDI because no longer present on VM', {
vdi: baseUuid,
@@ -166,18 +154,18 @@ export const IncrementalXapi = class IncrementalXapiVmBackupRunner extends Abstr
}
const fullVdisRequired = new Set()
baseUuidToSrcVdi.forEach((srcVdiUuid, baseUuid) => {
baseUuidToSrcVdi.forEach((srcVdi, baseUuid) => {
if (presentBaseVdis.has(baseUuid)) {
debug('found base VDI', {
base: baseUuid,
vdi: srcVdiUuid,
vdi: srcVdi.uuid,
})
} else {
debug('missing base VDI', {
base: baseUuid,
vdi: srcVdiUuid,
vdi: srcVdi.uuid,
})
fullVdisRequired.add(srcVdiUuid)
fullVdisRequired.add(srcVdi.uuid)
}
})

View File

@@ -193,17 +193,6 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
const allSettings = this.job.settings
const baseSettings = this._baseSettings
const baseVmRef = this._baseVm?.$ref
if (this._settings.deltaComputeMode === 'CBT' && this._exportedVm?.$ref && this._exportedVm?.$ref != this._vm.$ref) {
console.log('WILL PURGE',this._exportedVm?.$ref)
const xapi = this._xapi
const vdiRefs = await this._xapi.VM_getDisks(this._exportedVm?.$ref)
await xapi.call('VM.destroy',this._exportedVm.$ref)
// @todo: ensure it is really the snapshot
for (const vdiRef of vdiRefs) {
// @todo handle error
await xapi.VDI_dataDestroy(vdiRef)
}
}
const snapshotsPerSchedule = groupBy(this._jobSnapshots, _ => _.other_config['xo:backup:schedule'])
const xapi = this._xapi
@@ -219,8 +208,6 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
}
})
})
}
async copy() {
@@ -239,22 +226,6 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
throw new Error('Not implemented')
}
async enableCbt() {
// for each disk of the VM , enable CBT
if (this._settings.deltaComputeMode !== 'CBT') {
return
}
const vm = this._vm
const xapi = this._xapi
console.log(vm.VBDs)
const vdiRefs = await vm.$getDisks(vm.VBDs)
for (const vdiRef of vdiRefs) {
// @todo handle error
await xapi.VDI_enableChangeBlockTracking(vdiRef)
}
// @todo : when do we disable CBT ?
}
async run($defer) {
const settings = this._settings
assert(
@@ -275,7 +246,7 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
await this._cleanMetadata()
await this._removeUnusedSnapshots()
await this.enableCbt()
const vm = this._vm
const isRunning = vm.power_state === 'Running'
const startAfter = isRunning && (settings.offlineBackup ? 'backup' : settings.offlineSnapshot && 'snapshot')
@@ -296,7 +267,6 @@ export const AbstractXapi = class AbstractXapiVmBackupRunner extends Abstract {
await this._exportedVm.update_blocked_operations({ pool_migrate: reason, migrate_send: reason })
try {
await this._copy()
// @todo if CBT is enabled : should call vdi.datadestroy on snapshot here
} finally {
await this._exportedVm.update_blocked_operations({ pool_migrate, migrate_send })
}

View File

@@ -1,15 +1,17 @@
import assert from 'node:assert'
import mapValues from 'lodash/mapValues.js'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { asyncEach } from '@vates/async-each'
import { asyncMap } from '@xen-orchestra/async-map'
import { chainVhd, openVhd } from 'vhd-lib'
import { chainVhd, checkVhdChain, openVhd, VhdAbstract } from 'vhd-lib'
import { createLogger } from '@xen-orchestra/log'
import { decorateClass } from '@vates/decorate-with'
import { defer } from 'golike-defer'
import { dirname, basename } from 'node:path'
import { dirname } from 'node:path'
import { formatFilenameDate } from '../../_filenameDate.mjs'
import { getOldEntries } from '../../_getOldEntries.mjs'
import { TAG_BASE_DELTA } from '../../_incrementalVm.mjs'
import { Task } from '../../Task.mjs'
import { MixinRemoteWriter } from './_MixinRemoteWriter.mjs'
@@ -21,45 +23,42 @@ import { Disposable } from 'promise-toolbox'
const { warn } = createLogger('xo:backups:DeltaBackupWriter')
export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrementalWriter) {
#parentVdiPaths
#vhds
async checkBaseVdis(baseUuidToSrcVdi) {
this.#parentVdiPaths = {}
const { handler } = this._adapter
const adapter = this._adapter
const vdisDir = `${this._vmBackupDir}/vdis/${this._job.id}`
await asyncMap(baseUuidToSrcVdi, async ([baseUuid, srcVdiUuid]) => {
let parentDestPath
const vhdDir = `${vdisDir}/${srcVdiUuid}`
await asyncMap(baseUuidToSrcVdi, async ([baseUuid, srcVdi]) => {
let found = false
try {
const vhds = await handler.list(vhdDir, {
const vhds = await handler.list(`${vdisDir}/${srcVdi.uuid}`, {
filter: _ => _[0] !== '.' && _.endsWith('.vhd'),
ignoreMissing: true,
prependDir: true,
})
const packedBaseUuid = packUuid(baseUuid)
// the last one is probably the right one
for (let i = vhds.length - 1; i >= 0 && parentDestPath === undefined; i--) {
const path = vhds[i]
await asyncMap(vhds, async path => {
try {
if (await adapter.isMergeableParent(packedBaseUuid, path)) {
parentDestPath = path
}
await checkVhdChain(handler, path)
// Warning, this should not be written as found = found || await adapter.isMergeableParent(packedBaseUuid, path)
//
// since all the checks of a path are done in parallel, found would be containing
// only the last answer of isMergeableParent which is probably not the right one
// this led to the support tickets https://help.vates.fr/#ticket/zoom/4751 , 4729, 4665 and 4300
const isMergeable = await adapter.isMergeableParent(packedBaseUuid, path)
found = found || isMergeable
} catch (error) {
warn('checkBaseVdis', { error })
await ignoreErrors.call(VhdAbstract.unlink(handler, path))
}
}
})
} catch (error) {
warn('checkBaseVdis', { error })
}
// no usable parent => the runner will have to decide to fall back to a full or stop backup
if (parentDestPath === undefined) {
if (!found) {
baseUuidToSrcVdi.delete(baseUuid)
} else {
this.#parentVdiPaths[vhdDir] = parentDestPath
}
})
}
@@ -124,44 +123,6 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
}
}
async updateUuidAndChain({ isVhdDifferencing, vdis }) {
assert.notStrictEqual(
this.#vhds,
undefined,
'_transfer must be called before updateUuidAndChain for incremental backups'
)
const parentVdiPaths = this.#parentVdiPaths
const { handler } = this._adapter
const vhds = this.#vhds
await asyncEach(Object.entries(vdis), async ([id, vdi]) => {
const isDifferencing = isVhdDifferencing[`${id}.vhd`]
const path = `${this._vmBackupDir}/${vhds[id]}`
if (isDifferencing) {
assert.notStrictEqual(
parentVdiPaths,
'checkbasevdi must be called before updateUuidAndChain for incremental backups'
)
const parentPath = parentVdiPaths[dirname(path)]
// we are in a incremental backup
// we already computed the chain in checkBaseVdis
assert.notStrictEqual(parentPath, undefined, 'A differential VHD must have a parent')
// forbid any kind of loop
assert.ok(basename(parentPath) < basename(path), `vhd must be sorted to be chained`)
await chainVhd(handler, parentPath, handler, path)
}
// set the correct UUID in the VHD if needed
await Disposable.use(openVhd(handler, path), async vhd => {
if (!vhd.footer.uuid.equals(packUuid(vdi.uuid))) {
vhd.footer.uuid = packUuid(vdi.uuid)
await vhd.readBlockAllocationTable() // required by writeFooter()
await vhd.writeFooter()
}
})
})
}
async _deleteOldEntries() {
const adapter = this._adapter
const oldEntries = this._oldEntries
@@ -180,10 +141,14 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
const jobId = job.id
const handler = adapter.handler
let metadataContent = await this._isAlreadyTransferred(timestamp)
if (metadataContent !== undefined) {
// @todo : should skip backup while being vigilant to not stuck the forked stream
Task.info('This backup has already been transfered')
}
const basename = formatFilenameDate(timestamp)
// update this.#vhds before eventually skipping transfer, so that
// updateUuidAndChain has all the mandatory data
const vhds = (this.#vhds = mapValues(
const vhds = mapValues(
deltaExport.vdis,
vdi =>
`vdis/${jobId}/${
@@ -193,15 +158,7 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
vdi.uuid
: vdi.$snapshot_of$uuid
}/${adapter.getVhdFileName(basename)}`
))
let metadataContent = await this._isAlreadyTransferred(timestamp)
if (metadataContent !== undefined) {
// skip backup while being vigilant to not stuck the forked stream
Task.info('This backup has already been transfered')
Object.values(deltaExport.streams).forEach(stream => stream.destroy())
return { size: 0 }
}
)
metadataContent = {
isVhdDifferencing,
@@ -217,13 +174,38 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
vm,
vmSnapshot,
}
const { size } = await Task.run({ name: 'transfer' }, async () => {
let transferSize = 0
await asyncEach(
Object.keys(deltaExport.vdis),
async id => {
Object.entries(deltaExport.vdis),
async ([id, vdi]) => {
const path = `${this._vmBackupDir}/${vhds[id]}`
const isDifferencing = isVhdDifferencing[`${id}.vhd`]
let parentPath
if (isDifferencing) {
const vdiDir = dirname(path)
parentPath = (
await handler.list(vdiDir, {
filter: filename => filename[0] !== '.' && filename.endsWith('.vhd'),
prependDir: true,
})
)
.sort()
.pop()
assert.notStrictEqual(
parentPath,
undefined,
`missing parent of ${id} in ${dirname(path)}, looking for ${vdi.other_config[TAG_BASE_DELTA]}`
)
parentPath = parentPath.slice(1) // remove leading slash
// TODO remove when this has been done before the export
await checkVhd(handler, parentPath)
}
// don't write it as transferSize += await async function
// since i += await asyncFun lead to race condition
// as explained : https://eslint.org/docs/latest/rules/require-atomic-updates
@@ -235,6 +217,17 @@ export class IncrementalRemoteWriter extends MixinRemoteWriter(AbstractIncrement
writeBlockConcurrency: this._config.writeBlockConcurrency,
})
transferSize += transferSizeOneDisk
if (isDifferencing) {
await chainVhd(handler, parentPath, handler, path)
}
// set the correct UUID in the VHD
await Disposable.use(openVhd(handler, path), async vhd => {
vhd.footer.uuid = packUuid(vdi.uuid)
await vhd.readBlockAllocationTable() // required by writeFooter()
await vhd.writeFooter()
})
},
{
concurrency: settings.diskPerVmConcurrency,

View File

@@ -1,4 +1,3 @@
import assert from 'node:assert'
import { asyncMap, asyncMapSettled } from '@xen-orchestra/async-map'
import ignoreErrors from 'promise-toolbox/ignoreErrors'
import { formatDateTime } from '@xen-orchestra/xapi'
@@ -15,7 +14,6 @@ import find from 'lodash/find.js'
export class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWriter) {
async checkBaseVdis(baseUuidToSrcVdi, baseVm) {
assert.notStrictEqual(baseVm, undefined)
const sr = this._sr
const replicatedVm = listReplicatedVms(sr.$xapi, this._job.id, sr.uuid, this._vmUuid).find(
vm => vm.other_config[TAG_COPY_SRC] === baseVm.uuid
@@ -38,9 +36,7 @@ export class IncrementalXapiWriter extends MixinXapiWriter(AbstractIncrementalWr
}
}
}
updateUuidAndChain() {
// nothing to do, the chaining is not modified in this case
}
prepare({ isFull }) {
// create the task related to this export and ensure all methods are called in this context
const task = new Task({

View File

@@ -5,10 +5,6 @@ export class AbstractIncrementalWriter extends AbstractWriter {
throw new Error('Not implemented')
}
updateUuidAndChain() {
throw new Error('Not implemented')
}
cleanup() {
throw new Error('Not implemented')
}

View File

@@ -113,13 +113,13 @@ export const MixinRemoteWriter = (BaseClass = Object) =>
)
}
async _isAlreadyTransferred(timestamp) {
_isAlreadyTransferred(timestamp) {
const vmUuid = this._vmUuid
const adapter = this._adapter
const backupDir = getVmBackupDir(vmUuid)
try {
const actualMetadata = JSON.parse(
await adapter._handler.readFile(`${backupDir}/${formatFilenameDate(timestamp)}.json`)
adapter._handler.readFile(`${backupDir}/${formatFilenameDate(timestamp)}.json`)
)
return actualMetadata
} catch (error) {}

View File

@@ -230,7 +230,6 @@ Settings are described in [`@xen-orchestra/backups/\_runners/VmsXapi.mjs``](http
- `checkBaseVdis(baseUuidToSrcVdi, baseVm)`
- `prepare({ isFull })`
- `transfer({ timestamp, deltaExport, sizeContainers })`
- `updateUuidAndChain({ isVhdDifferencing, vdis })`
- `cleanup()`
- `healthCheck()` // is not executed if no health check sr or tag doesn't match
- **Full**

View File

@@ -2,21 +2,8 @@ import mapValues from 'lodash/mapValues.js'
import { dirname } from 'node:path'
function formatVmBackup(backup) {
const { isVhdDifferencing, vmSnapshot } = backup
const { isVhdDifferencing } = backup
let differencingVhds
let dynamicVhds
// some backups don't use snapshots, therefore cannot be with memory
const withMemory = vmSnapshot !== undefined && vmSnapshot.suspend_VDI !== 'OpaqueRef:NULL'
// isVhdDifferencing is either undefined or an object
if (isVhdDifferencing !== undefined) {
differencingVhds = Object.values(isVhdDifferencing).filter(t => t).length
dynamicVhds = Object.values(isVhdDifferencing).filter(t => !t).length
if (withMemory) {
// the suspend VDI (memory) is always a dynamic
dynamicVhds -= 1
}
}
return {
disks:
backup.vhds === undefined
@@ -31,7 +18,6 @@ function formatVmBackup(backup) {
}),
id: backup.id,
isImmutable: backup.isImmutable,
jobId: backup.jobId,
mode: backup.mode,
scheduleId: backup.scheduleId,
@@ -42,9 +28,9 @@ function formatVmBackup(backup) {
name_label: backup.vm.name_label,
},
differencingVhds,
dynamicVhds,
withMemory,
// isVhdDifferencing is either undefined or an object
differencingVhds: isVhdDifferencing && Object.values(isVhdDifferencing).filter(t => t).length,
dynamicVhds: isVhdDifferencing && Object.values(isVhdDifferencing).filter(t => !t).length,
}
}

View File

@@ -8,7 +8,7 @@
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"version": "0.44.6",
"version": "0.44.3",
"engines": {
"node": ">=14.18"
},
@@ -22,13 +22,13 @@
"@vates/async-each": "^1.0.0",
"@vates/cached-dns.lookup": "^1.0.0",
"@vates/compose": "^2.1.0",
"@vates/decorate-with": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/disposable": "^0.1.5",
"@vates/fuse-vhd": "^2.1.0",
"@vates/fuse-vhd": "^2.0.0",
"@vates/nbd-client": "^3.0.0",
"@vates/parse-duration": "^0.1.1",
"@xen-orchestra/async-map": "^0.1.2",
"@xen-orchestra/fs": "^4.1.4",
"@xen-orchestra/fs": "^4.1.3",
"@xen-orchestra/log": "^0.6.0",
"@xen-orchestra/template": "^0.1.0",
"app-conf": "^2.3.0",
@@ -45,7 +45,7 @@
"tar": "^6.1.15",
"uuid": "^9.0.0",
"vhd-lib": "^4.9.0",
"xen-api": "^2.0.1",
"xen-api": "^2.0.0",
"yazl": "^2.5.1"
},
"devDependencies": {
@@ -56,7 +56,7 @@
"tmp": "^0.2.1"
},
"peerDependencies": {
"@xen-orchestra/xapi": "^4.2.0"
"@xen-orchestra/xapi": "^4.1.0"
},
"license": "AGPL-3.0-or-later",
"author": {

View File

@@ -18,7 +18,7 @@
"preferGlobal": true,
"dependencies": {
"golike-defer": "^0.5.1",
"xen-api": "^2.0.1"
"xen-api": "^2.0.0"
},
"scripts": {
"postversion": "npm publish"

View File

@@ -1,7 +1,7 @@
{
"private": false,
"name": "@xen-orchestra/fs",
"version": "4.1.4",
"version": "4.1.3",
"license": "AGPL-3.0-or-later",
"description": "The File System for Xen Orchestra backups.",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/fs",
@@ -28,7 +28,7 @@
"@sindresorhus/df": "^3.1.1",
"@vates/async-each": "^1.0.0",
"@vates/coalesce-calls": "^0.1.0",
"@vates/decorate-with": "^2.1.0",
"@vates/decorate-with": "^2.0.0",
"@vates/read-chunk": "^1.2.0",
"@xen-orchestra/log": "^0.6.0",
"bind-property-descriptor": "^2.0.0",

View File

@@ -364,7 +364,7 @@ export default class RemoteHandlerAbstract {
let data
try {
// this file is not encrypted
data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME))
data = await this._readFile(normalizePath(ENCRYPTION_DESC_FILENAME), 'utf-8')
const json = JSON.parse(data)
encryptionAlgorithm = json.algorithm
} catch (error) {
@@ -377,7 +377,7 @@ export default class RemoteHandlerAbstract {
try {
this.#rawEncryptor = _getEncryptor(encryptionAlgorithm, this._remote.encryptionKey)
// this file is encrypted
const data = await this.__readFile(ENCRYPTION_METADATA_FILENAME)
const data = await this.__readFile(ENCRYPTION_METADATA_FILENAME, 'utf-8')
JSON.parse(data)
} catch (error) {
// can be enoent, bad algorithm, or broeken json ( bad key or algorithm)

View File

@@ -171,12 +171,7 @@ export default class LocalHandler extends RemoteHandlerAbstract {
}
}
async _readFile(file, { flags, ...options } = {}) {
// contrary to createReadStream, readFile expect singular `flag`
if (flags !== undefined) {
options.flag = flags
}
async _readFile(file, options) {
const filePath = this.getFilePath(file)
return await this.#addSyncStackTrace(retry, () => fs.readFile(filePath, options), this.#retriesOnEagain)
}

View File

@@ -20,7 +20,5 @@ export function split(path) {
return parts
}
// paths are made absolute otherwise fs.relative() would resolve them against working directory
export const relativeFromFile = (file, path) => relative(dirname(normalize(file)), normalize(path))
export const relativeFromFile = (file, path) => relative(dirname(file), path)
export const resolveFromFile = (file, path) => resolve('/', dirname(file), path).slice(1)

View File

@@ -1,17 +0,0 @@
import { describe, it } from 'test'
import { strict as assert } from 'assert'
import { relativeFromFile } from './path.js'
describe('relativeFromFile()', function () {
for (const [title, args] of Object.entries({
'file absolute and path absolute': ['/foo/bar/file.vhd', '/foo/baz/path.vhd'],
'file relative and path absolute': ['foo/bar/file.vhd', '/foo/baz/path.vhd'],
'file absolute and path relative': ['/foo/bar/file.vhd', 'foo/baz/path.vhd'],
'file relative and path relative': ['foo/bar/file.vhd', 'foo/baz/path.vhd'],
})) {
it('works with ' + title, function () {
assert.equal(relativeFromFile(...args), '../baz/path.vhd')
})
}
})

View File

@@ -1,10 +0,0 @@
### make a remote immutable
launch the `xo-immutable-remote` command. The configuration is stored in the config file.
This script must be kept running to make file immutable reliably.
### make file mutable
launch the `xo-lift-remote-immutability` cli. The configuration is stored in the config file .
If the config file have a `liftEvery`, this script will contiue to run and check regularly if there are files to update.

View File

@@ -1 +0,0 @@
../../scripts/npmignore

View File

@@ -1,41 +0,0 @@
<!-- DO NOT EDIT MANUALLY, THIS FILE HAS BEEN GENERATED -->
# @xen-orchestra/immutable-backups
[![Package Version](https://badgen.net/npm/v/@xen-orchestra/immutable-backups)](https://npmjs.org/package/@xen-orchestra/immutable-backups) ![License](https://badgen.net/npm/license/@xen-orchestra/immutable-backups) [![PackagePhobia](https://badgen.net/bundlephobia/minzip/@xen-orchestra/immutable-backups)](https://bundlephobia.com/result?p=@xen-orchestra/immutable-backups) [![Node compatibility](https://badgen.net/npm/node/@xen-orchestra/immutable-backups)](https://npmjs.org/package/@xen-orchestra/immutable-backups)
## Install
Installation of the [npm package](https://npmjs.org/package/@xen-orchestra/immutable-backups):
```sh
npm install --save @xen-orchestra/immutable-backups
```
## Usage
### make a remote immutable
launch the `xo-immutable-remote` command. The configuration is stored in the config file.
This script must be kept running to make file immutable reliably.
### make file mutable
launch the `xo-lift-remote-immutability` cli. The configuration is stored in the config file .
If the config file have a `liftEvery`, this script will contiue to run and check regularly if there are files to update.
## Contributions
Contributions are _very_ welcomed, either on the documentation or on
the code.
You may:
- report any [issue](https://github.com/vatesfr/xen-orchestra/issues)
you've encountered;
- fork and create a pull request.
## License
[AGPL-3.0-or-later](https://spdx.org/licenses/AGPL-3.0-or-later) © [Vates SAS](https://vates.fr)

View File

@@ -1,10 +0,0 @@
import fs from 'node:fs/promises'
import { dirname, join } from 'node:path'
import isBackupMetadata from './isBackupMetadata.mjs'
export default async path => {
if (isBackupMetadata(path)) {
// snipe vm metadata cache to force XO to update it
await fs.unlink(join(dirname(path), 'cache.json.gz'))
}
}

View File

@@ -1,4 +0,0 @@
import { dirname } from 'node:path'
// check if we are handling file directly under a vhd directory ( bat, headr, footer,..)
export default path => dirname(path).endsWith('.vhd')

View File

@@ -1,46 +0,0 @@
import { load } from 'app-conf'
import { homedir } from 'os'
import { join } from 'node:path'
import ms from 'ms'
const APP_NAME = 'xo-immutable-backups'
const APP_DIR = new URL('.', import.meta.url).pathname
export default async function loadConfig() {
const config = await load(APP_NAME, {
appDir: APP_DIR,
ignoreUnknownFormats: true,
})
if (config.remotes === undefined || config.remotes?.length < 1) {
throw new Error(
'No remotes are configured in the config file, please add at least one [remotes.<remoteid>] with a root property pointing to the absolute path of the remote to watch'
)
}
if (config.liftEvery) {
config.liftEvery = ms(config.liftEvery)
}
for (const [remoteId, { indexPath, immutabilityDuration, root }] of Object.entries(config.remotes)) {
if (!root) {
throw new Error(
`Remote ${remoteId} don't have a root property,containing the absolute path to the root of a backup repository `
)
}
if (!immutabilityDuration) {
throw new Error(
`Remote ${remoteId} don't have a immutabilityDuration property to indicate the minimal duration the backups should be protected by immutability `
)
}
if (ms(immutabilityDuration) < ms('1d')) {
throw new Error(
`Remote ${remoteId} immutability duration is smaller than the minimum allowed (1d), current : ${immutabilityDuration}`
)
}
if (!indexPath) {
const basePath = indexPath ?? process.env.XDG_DATA_HOME ?? join(homedir(), '.local', 'share')
const immutabilityIndexPath = join(basePath, APP_NAME, remoteId)
config.remotes[remoteId].indexPath = immutabilityIndexPath
}
config.remotes[remoteId].immutabilityDuration = ms(immutabilityDuration)
}
return config
}

View File

@@ -1,14 +0,0 @@
# how often does the lift immutability script will run to check if
# some files need to be made mutable
liftEvery = 1h
# you can add as many remote as you want, if you change the id ( here : remote1)
#[remotes.remote1]
#root = "/mnt/ssd/vhdblock/" # the absolute path of the root of the backup repository
#immutabilityDuration = 7d # mandatory
# optional, default value is false will scan and update the index on start, can be expensive
#rebuildIndexOnStart = true
# the index path is optional, default in XDG_DATA_HOME, or if this is not set, in ~/.local/share
#indexPath = "/var/lib/" # will add automatically the application name immutable-backup

View File

@@ -1,33 +0,0 @@
import { describe, it } from 'node:test'
import assert from 'node:assert/strict'
import fs from 'node:fs/promises'
import path from 'node:path'
import { tmpdir } from 'node:os'
import * as Directory from './directory.mjs'
import { rimraf } from 'rimraf'
describe('immutable-backups/file', async () => {
it('really lock a directory', async () => {
const dir = await fs.mkdtemp(path.join(tmpdir(), 'immutable-backups-tests'))
const dataDir = path.join(dir, 'data')
await fs.mkdir(dataDir)
const immutDir = path.join(dir, '.immutable')
const filePath = path.join(dataDir, 'test')
await fs.writeFile(filePath, 'data')
await Directory.makeImmutable(dataDir, immutDir)
assert.strictEqual(await Directory.isImmutable(dataDir), true)
await assert.rejects(() => fs.writeFile(filePath, 'data'))
await assert.rejects(() => fs.appendFile(filePath, 'data'))
await assert.rejects(() => fs.unlink(filePath))
await assert.rejects(() => fs.rename(filePath, filePath + 'copy'))
await assert.rejects(() => fs.writeFile(path.join(dataDir, 'test2'), 'data'))
await assert.rejects(() => fs.rename(dataDir, dataDir + 'copy'))
await Directory.liftImmutability(dataDir, immutDir)
assert.strictEqual(await Directory.isImmutable(dataDir), false)
await fs.writeFile(filePath, 'data')
await fs.appendFile(filePath, 'data')
await fs.unlink(filePath)
await fs.rename(dataDir, dataDir + 'copy')
await rimraf(dir)
})
})

View File

@@ -1,21 +0,0 @@
import execa from 'execa'
import { unindexFile, indexFile } from './fileIndex.mjs'
export async function makeImmutable(dirPath, immutabilityCachePath) {
if (immutabilityCachePath) {
await indexFile(dirPath, immutabilityCachePath)
}
await execa('chattr', ['+i', '-R', dirPath])
}
export async function liftImmutability(dirPath, immutabilityCachePath) {
if (immutabilityCachePath) {
await unindexFile(dirPath, immutabilityCachePath)
}
await execa('chattr', ['-i', '-R', dirPath])
}
export async function isImmutable(path) {
const { stdout } = await execa('lsattr', ['-d', path])
return stdout[4] === 'i'
}

View File

@@ -1,114 +0,0 @@
# Imutability
the goal is to make a remote that XO can write, but not modify during the immutability duration set on the remote. That way, it's not possible for XO to delete or encrypt any backup during this period. It protects your backup agains ransomware, at least as long as the attacker does not have a root access to the remote server.
We target `governance` type of immutability, **the local root account of the remote server will be able to lift immutability**.
We use the file system capabilities, they are tested on the protection process start.
It is compatible with encryption at rest made by XO.
## Prerequisites
The commands must be run as root on the remote, or by a user with the `CAP_LINUX_IMMUTABLE` capability . On start, the protect process writes into the remote `imutability.json` file its status and the immutability duration.
the `chattr` and `lsattr` should be installed on the system
## Configuring
this package uses app-conf to store its config. The application name is `xo-immutable-backup`. A sample config file is provided in this package.
## Making a file immutable
when marking a file or a folder immutable, it create an alias file in the `<indexPath>/<DayOfFileCreation>/<sha256(fullpath)>`.
`indexPath` can be defined in the config file, otherwise `XDG_HOME` is used. If not available it goes to `~/.local/share`
This index is used when lifting the immutability of the remote, it will only look at the old enough `<indexPath>/<DayOfFileCreation>/` folders.
## Real time protecting
On start, the watcher will create the index if it does not exists.
It will also do a checkup to ensure immutability could work on this remote and handle the easiest issues.
The watching process depends on the backup type, since we don't want to make temporary files and cache immutable.
It won't protect files during upload, only when the files have been completly written on disk. Real time, in this case, means "protecting critical files as soon as possible after they are uploaded"
This can be alleviated by :
- Coupling immutability with encryption to ensure the file is not modified
- Making health check to ensure the data are exactly as the snapshot data
List of protected files :
```js
const PATHS = [
// xo configuration backupq
'xo-config-backups/*/*/data',
'xo-config-backups/*/*/data.json',
'xo-config-backups/*/*/metadata.json',
// pool backupq
'xo-pool-metadata-backups/*/metadata.json',
'xo-pool-metadata-backups/*/data',
// vm backups , xo-vm-backups/<vmuuid>/
'xo-vm-backups/*/*.json',
'xo-vm-backups/*/*.xva',
'xo-vm-backups/*/*.xva.checksum',
// xo-vm-backups/<vmuuid>/vdis/<jobid>/<vdiUuid>
'xo-vm-backups/*/vdis/*/*/*.vhd', // can be an alias or a vhd file
// for vhd directory :
'xo-vm-backups/*/vdis/*/*/data/*.vhd/bat',
'xo-vm-backups/*/vdis/*/*/data/*.vhd/header',
'xo-vm-backups/*/vdis/*/*/data/*.vhd/footer',
]
```
## Releasing protection on old enough files on a remote
the watcher will periodically check if some file must by unlocked
## Troubleshooting
### some files are still locked
add the `rebuildIndexOnStart` option to the config file
### make remote fully mutable again
- Update the immutability setting with a 0 duration
- launch the `liftProtection` cli.
- remove the `protectRemotes` service
### increasing the immutability duration
this will prolong immutable file, but won't protect files that are already out of immutability
### reducing the immutability duration
change the setting, and launch the `liftProtection` cli , or wait for next planed execution
### why are my incremental backups not marked as protected in XO ?
are not marked as protected in XO ?
For incremental backups to be marked as protected in XO, the entire chain must be under protection. To ensure at least 7 days of backups are protected, you need to set the immutability duration and retention at 14 days, the full backup interval at 7 days
That means that if the last backup chain is complete ( 7 backup ) it is completely under protection, and if not, the precedent chain is also under protection. K are key backups, and are delta
```
Kd Kdddddd Kdddddd K # 8 backups protected, 2 chains
K Kdddddd Kdddddd Kd # 9 backups protected, 2 chains
Kdddddd Kdddddd Kdd # 10 backups protected, 2 chains
Kddddd Kdddddd Kddd # 11 backups protected, 2 chains
Kdddd Kdddddd Kdddd # 12 backups protected, 2 chains
Kddd Kdddddd Kddddd # 13 backups protected, 2 chains
Kdd Kdddddd Kdddddd # 7 backups protected, 1 chain since precedent full is now mutable
Kd Kdddddd Kdddddd K # 8 backups protected, 2 chains
```
### Why doesn't the protect process start ?
- it should be run as root or by a user with the `CAP_LINUX_IMMUTABLE` capability
- the underlying file system should support immutability, especially the `chattr` and `lsattr` command
- logs are in journalctl

View File

@@ -1,29 +0,0 @@
import { describe, it } from 'node:test'
import assert from 'node:assert/strict'
import fs from 'node:fs/promises'
import path from 'node:path'
import * as File from './file.mjs'
import { tmpdir } from 'node:os'
import { rimraf } from 'rimraf'
describe('immutable-backups/file', async () => {
it('really lock a file', async () => {
const dir = await fs.mkdtemp(path.join(tmpdir(), 'immutable-backups-tests'))
const immutDir = path.join(dir, '.immutable')
const filePath = path.join(dir, 'test.ext')
await fs.writeFile(filePath, 'data')
assert.strictEqual(await File.isImmutable(filePath), false)
await File.makeImmutable(filePath, immutDir)
assert.strictEqual(await File.isImmutable(filePath), true)
await assert.rejects(() => fs.writeFile(filePath, 'data'))
await assert.rejects(() => fs.appendFile(filePath, 'data'))
await assert.rejects(() => fs.unlink(filePath))
await assert.rejects(() => fs.rename(filePath, filePath + 'copy'))
await File.liftImmutability(filePath, immutDir)
assert.strictEqual(await File.isImmutable(filePath), false)
await fs.writeFile(filePath, 'data')
await fs.appendFile(filePath, 'data')
await fs.unlink(filePath)
await rimraf(dir)
})
})

View File

@@ -1,24 +0,0 @@
import execa from 'execa'
import { unindexFile, indexFile } from './fileIndex.mjs'
// this work only on linux like systems
// this could work on windows : https://4sysops.com/archives/set-and-remove-the-read-only-file-attribute-with-powershell/
export async function makeImmutable(path, immutabilityCachePath) {
if (immutabilityCachePath) {
await indexFile(path, immutabilityCachePath)
}
await execa('chattr', ['+i', path])
}
export async function liftImmutability(filePath, immutabilityCachePath) {
if (immutabilityCachePath) {
await unindexFile(filePath, immutabilityCachePath)
}
await execa('chattr', ['-i', filePath])
}
export async function isImmutable(path) {
const { stdout } = await execa('lsattr', ['-d', path])
return stdout[4] === 'i'
}

View File

@@ -1,81 +0,0 @@
import { describe, it } from 'node:test'
import assert from 'node:assert/strict'
import fs from 'node:fs/promises'
import path from 'node:path'
import * as FileIndex from './fileIndex.mjs'
import * as Directory from './directory.mjs'
import { tmpdir } from 'node:os'
import { rimraf } from 'rimraf'
describe('immutable-backups/fileIndex', async () => {
it('index File changes', async () => {
const dir = await fs.mkdtemp(path.join(tmpdir(), 'immutable-backups-tests'))
const immutDir = path.join(dir, '.immutable')
const filePath = path.join(dir, 'test.ext')
await fs.writeFile(filePath, 'data')
await FileIndex.indexFile(filePath, immutDir)
await fs.mkdir(path.join(immutDir, 'NOTADATE'))
await fs.writeFile(path.join(immutDir, 'NOTADATE.file'), 'content')
let nb = 0
let index, target
for await ({ index, target } of FileIndex.listOlderTargets(immutDir, 0)) {
assert.strictEqual(true, false, 'Nothing should be eligible for deletion')
}
nb = 0
for await ({ index, target } of FileIndex.listOlderTargets(immutDir, -24 * 60 * 60 * 1000)) {
assert.strictEqual(target, filePath)
await fs.unlink(index)
nb++
}
assert.strictEqual(nb, 1)
await fs.rmdir(path.join(immutDir, 'NOTADATE'))
await fs.rm(path.join(immutDir, 'NOTADATE.file'))
for await ({ index, target } of FileIndex.listOlderTargets(immutDir, -24 * 60 * 60 * 1000)) {
// should remove the empty dir
assert.strictEqual(true, false, 'Nothing should have stayed here')
}
assert.strictEqual((await fs.readdir(immutDir)).length, 0)
await rimraf(dir)
})
it('fails correctly', async () => {
const dir = await fs.mkdtemp(path.join(tmpdir(), 'immutable-backups-tests'))
const immutDir = path.join(dir, '.immutable')
await fs.mkdir(immutDir)
const placeholderFile = path.join(dir, 'test.ext')
await fs.writeFile(placeholderFile, 'data')
await FileIndex.indexFile(placeholderFile, immutDir)
const filePath = path.join(dir, 'test2.ext')
await fs.writeFile(filePath, 'data')
await FileIndex.indexFile(filePath, immutDir)
await assert.rejects(() => FileIndex.indexFile(filePath, immutDir), { code: 'EEXIST' })
await Directory.makeImmutable(immutDir)
await assert.rejects(() => FileIndex.unindexFile(filePath, immutDir), { code: 'EPERM' })
await Directory.liftImmutability(immutDir)
await rimraf(dir)
})
it('handles bomb index files', async () => {
const dir = await fs.mkdtemp(path.join(tmpdir(), 'immutable-backups-tests'))
const immutDir = path.join(dir, '.immutable')
await fs.mkdir(immutDir)
const placeholderFile = path.join(dir, 'test.ext')
await fs.writeFile(placeholderFile, 'data')
await FileIndex.indexFile(placeholderFile, immutDir)
const indexDayDir = path.join(immutDir, '1980,11-28')
await fs.mkdir(indexDayDir)
await fs.writeFile(path.join(indexDayDir, 'big'), Buffer.alloc(2 * 1024 * 1024))
assert.rejects(async () => {
let index, target
for await ({ index, target } of FileIndex.listOlderTargets(immutDir, 0)) {
// should remove the empty dir
assert.strictEqual(true, false, `Nothing should have stayed here, got ${index} ${target}`)
}
})
await rimraf(dir)
})
})

View File

@@ -1,88 +0,0 @@
import { join } from 'node:path'
import { createHash } from 'node:crypto'
import fs from 'node:fs/promises'
import { dirname } from 'path'
const MAX_INDEX_FILE_SIZE = 1024 * 1024
function sha256(content) {
return createHash('sha256').update(content).digest('hex')
}
function formatDate(date) {
return date.toISOString().split('T')[0]
}
async function computeIndexFilePath(path, immutabilityIndexPath) {
const stat = await fs.stat(path)
const date = new Date(stat.birthtimeMs)
const day = formatDate(date)
const hash = sha256(path)
return join(immutabilityIndexPath, day, hash)
}
export async function indexFile(path, immutabilityIndexPath) {
const indexFilePath = await computeIndexFilePath(path, immutabilityIndexPath)
try {
await fs.writeFile(indexFilePath, path, { flag: 'wx' })
} catch (err) {
// missing dir: make it
if (err.code === 'ENOENT') {
await fs.mkdir(dirname(indexFilePath), { recursive: true })
await fs.writeFile(indexFilePath, path)
} else {
throw err
}
}
return indexFilePath
}
export async function unindexFile(path, immutabilityIndexPath) {
try {
const cacheFileName = await computeIndexFilePath(path, immutabilityIndexPath)
await fs.unlink(cacheFileName)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
}
}
}
export async function* listOlderTargets(immutabilityCachePath, immutabilityDuration) {
// walk all dir by day until the limit day
const limitDate = new Date(Date.now() - immutabilityDuration)
const limitDay = formatDate(limitDate)
const dir = await fs.opendir(immutabilityCachePath)
for await (const dirent of dir) {
if (dirent.isFile()) {
continue
}
// ensure we have a valid date
if (isNaN(new Date(dirent.name))) {
continue
}
// recent enough to be kept
if (dirent.name >= limitDay) {
continue
}
const subDirPath = join(immutabilityCachePath, dirent.name)
const subdir = await fs.opendir(subDirPath)
let nb = 0
for await (const hashFileEntry of subdir) {
const entryFullPath = join(subDirPath, hashFileEntry.name)
const { size } = await fs.stat(entryFullPath)
if (size > MAX_INDEX_FILE_SIZE) {
throw new Error(`Index file at ${entryFullPath} is too big, ${size} bytes `)
}
const targetPath = await fs.readFile(entryFullPath, { encoding: 'utf8' })
yield {
index: entryFullPath,
target: targetPath,
}
nb++
}
// cleanup older folder
if (nb === 0) {
await fs.rmdir(subDirPath)
}
}
}

View File

@@ -1 +0,0 @@
export default path => path.match(/xo-vm-backups\/[^/]+\/[^/]+\.json$/)

View File

@@ -1,37 +0,0 @@
#!/usr/bin/env node
import fs from 'node:fs/promises'
import * as Directory from './directory.mjs'
import { createLogger } from '@xen-orchestra/log'
import { listOlderTargets } from './fileIndex.mjs'
import cleanXoCache from './_cleanXoCache.mjs'
import loadConfig from './_loadConfig.mjs'
const { info, warn } = createLogger('xen-orchestra:immutable-backups:liftProtection')
async function liftRemoteImmutability(immutabilityCachePath, immutabilityDuration) {
for await (const { index, target } of listOlderTargets(immutabilityCachePath, immutabilityDuration)) {
await Directory.liftImmutability(target, immutabilityCachePath)
await fs.unlink(index)
await cleanXoCache(target)
}
}
async function liftImmutability(remotes) {
for (const [remoteId, { indexPath, immutabilityDuration }] of Object.entries(remotes)) {
liftRemoteImmutability(indexPath, immutabilityDuration).catch(err =>
warn('error during watchRemote', { err, remoteId, indexPath, immutabilityDuration })
)
}
}
const { liftEvery, remotes } = await loadConfig()
if (liftEvery > 0) {
info('setup watcher for immutability lifting')
setInterval(async () => {
liftImmutability(remotes)
}, liftEvery)
} else {
liftImmutability(remotes)
}

View File

@@ -1,42 +0,0 @@
{
"private": false,
"name": "@xen-orchestra/immutable-backups",
"homepage": "https://github.com/vatesfr/xen-orchestra/tree/master/@xen-orchestra/immutable-backups",
"bugs": "https://github.com/vatesfr/xen-orchestra/issues",
"repository": {
"directory": "@xen-orchestra/immutable-backups",
"type": "git",
"url": "https://github.com/vatesfr/xen-orchestra.git"
},
"author": {
"name": "Vates SAS",
"url": "https://vates.fr"
},
"bin": {
"xo-immutable-remote": "./protectRemotes.mjs",
"xo-lift-remote-immutability": "./liftProtection.mjs"
},
"license": "AGPL-3.0-or-later",
"version": "1.0.1",
"engines": {
"node": ">=14.0.0"
},
"dependencies": {
"@vates/async-each": "^1.0.0",
"@xen-orchestra/backups": "^0.44.6",
"@xen-orchestra/log": "^0.6.0",
"app-conf": "^2.3.0",
"chokidar": "^3.5.3",
"execa": "^5.0.0",
"ms": "^2.1.3",
"vhd-lib": "^4.7.0"
},
"devDependencies": {
"rimraf": "^5.0.5",
"tap": "^18.6.1"
},
"scripts": {
"postversion": "npm publish --access public",
"test-integration": "tap *.integ.mjs"
}
}

View File

@@ -1,191 +0,0 @@
#!/usr/bin/env node
import fs from 'node:fs/promises'
import * as File from './file.mjs'
import * as Directory from './directory.mjs'
import assert from 'node:assert'
import { dirname, join, sep } from 'node:path'
import { createLogger } from '@xen-orchestra/log'
import chokidar from 'chokidar'
import { indexFile } from './fileIndex.mjs'
import cleanXoCache from './_cleanXoCache.mjs'
import loadConfig from './_loadConfig.mjs'
import isInVhdDirectory from './_isInVhdDirectory.mjs'
const { debug, info, warn } = createLogger('xen-orchestra:immutable-backups:remote')
async function test(remotePath, indexPath) {
await fs.readdir(remotePath)
const testPath = join(remotePath, '.test-immut')
// cleanup
try {
await File.liftImmutability(testPath, indexPath)
await fs.unlink(testPath)
} catch (err) {}
// can create , modify and delete a file
await fs.writeFile(testPath, `test immut ${new Date()}`)
await fs.writeFile(testPath, `test immut change 1 ${new Date()}`)
await fs.unlink(testPath)
// cannot modify or delete an immutable file
await fs.writeFile(testPath, `test immut ${new Date()}`)
await File.makeImmutable(testPath, indexPath)
await assert.rejects(fs.writeFile(testPath, `test immut change 2 ${new Date()}`), { code: 'EPERM' })
await assert.rejects(fs.unlink(testPath), { code: 'EPERM' })
// can modify and delete a file after lifting immutability
await File.liftImmutability(testPath, indexPath)
await fs.writeFile(testPath, `test immut change 3 ${new Date()}`)
await fs.unlink(testPath)
}
async function handleExistingFile(root, indexPath, path) {
try {
// a vhd block directory is completly immutable
if (isInVhdDirectory(path)) {
// this will trigger 3 times per vhd blocks
const dir = join(root, dirname(path))
if (Directory.isImmutable(dir)) {
await indexFile(dir, indexPath)
}
} else {
// other files are immutable a file basis
const fullPath = join(root, path)
if (File.isImmutable(fullPath)) {
await indexFile(fullPath, indexPath)
}
}
} catch (error) {
if (error.code !== 'EEXIST') {
// there can be a symbolic link in the tree
warn('handleExistingFile', { error })
}
}
}
async function handleNewFile(root, indexPath, pendingVhds, path) {
// with awaitWriteFinish we have complete files here
// we can make them immutable
if (isInVhdDirectory(path)) {
// watching a vhd block
// wait for header/footer and BAT before making this immutable recursively
const splitted = path.split(sep)
const vmUuid = splitted[1]
const vdiUuid = splitted[4]
const uniqPath = `${vmUuid}/${vdiUuid}`
const { existing } = pendingVhds.get(uniqPath) ?? {}
if (existing === undefined) {
pendingVhds.set(uniqPath, { existing: 1, lastModified: Date.now() })
} else {
// already two of the key files,and we got the last one
if (existing === 2) {
await Directory.makeImmutable(join(root, dirname(path)), indexPath)
pendingVhds.delete(uniqPath)
} else {
// wait for the other
pendingVhds.set(uniqPath, { existing: existing + 1, lastModified: Date.now() })
}
}
} else {
const fullFilePath = join(root, path)
await File.makeImmutable(fullFilePath, indexPath)
await cleanXoCache(fullFilePath)
}
}
export async function watchRemote(remoteId, { root, immutabilityDuration, rebuildIndexOnStart = false, indexPath }) {
// create index directory
await fs.mkdir(indexPath, { recursive: true })
// test if fs and index directories are well configured
await test(root, indexPath)
// add duration and watch status in the metadata.json of the remote
const settingPath = join(root, 'immutability.json')
try {
// this file won't be made mutable by liftimmutability
await File.liftImmutability(settingPath)
} catch (error) {
// file may not exists, and it's not really a problem
info('lifting immutability on current settings', { error })
}
await fs.writeFile(
settingPath,
JSON.stringify({
since: Date.now(),
immutable: true,
duration: immutabilityDuration,
})
)
// no index path in makeImmutable(): the immutability won't be lifted
File.makeImmutable(settingPath)
// we wait for footer/header AND BAT to be written before locking a vhd directory
// this map allow us to track the vhd with partial metadata
const pendingVhds = new Map()
// cleanup pending vhd map periodically
setInterval(
() => {
pendingVhds.forEach(({ lastModified, existing }, path) => {
if (Date.now() - lastModified > 60 * 60 * 1000) {
pendingVhds.delete(path)
warn(`vhd at ${path} is incomplete since ${lastModified}`, { existing, lastModified, path })
}
})
},
60 * 60 * 1000
)
// watch the remote for any new VM metadata json file
const PATHS = [
'xo-config-backups/*/*/data',
'xo-config-backups/*/*/data.json',
'xo-config-backups/*/*/metadata.json',
'xo-pool-metadata-backups/*/metadata.json',
'xo-pool-metadata-backups/*/data',
// xo-vm-backups/<vmuuid>/
'xo-vm-backups/*/*.json',
'xo-vm-backups/*/*.xva',
'xo-vm-backups/*/*.xva.checksum',
// xo-vm-backups/<vmuuid>/vdis/<jobid>/<vdiUuid>
'xo-vm-backups/*/vdis/*/*/*.vhd', // can be an alias or a vhd file
// for vhd directory :
'xo-vm-backups/*/vdis/*/*/data/*.vhd/bat',
'xo-vm-backups/*/vdis/*/*/data/*.vhd/header',
'xo-vm-backups/*/vdis/*/*/data/*.vhd/footer',
]
let ready = false
const watcher = chokidar.watch(PATHS, {
ignored: [
/(^|[/\\])\../, // ignore dotfiles
/\.lock$/,
],
cwd: root,
recursive: false, // vhd directory can generate a lot of folder, don't let chokidar choke on this
ignoreInitial: !rebuildIndexOnStart,
depth: 7,
awaitWriteFinish: true,
})
// Add event listeners.
watcher
.on('add', async path => {
debug(`File ${path} has been added ${path.split('/').length}`)
if (ready) {
await handleNewFile(root, indexPath, pendingVhds, path)
} else {
await handleExistingFile(root, indexPath, path)
}
})
.on('error', error => warn(`Watcher error: ${error}`))
.on('ready', () => {
ready = true
info('Ready for changes')
})
}
const { remotes } = await loadConfig()
for (const [remoteId, remote] of Object.entries(remotes)) {
watchRemote(remoteId, remote).catch(err => warn('error during watchRemote', { err, remoteId, remote }))
}

View File

@@ -2,8 +2,8 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" href="/favicon.svg" type="image/svg+xml" />
<link rel="manifest" href="/manifest.webmanifest" />
<link rel="icon" href="/favicon.ico" />
<link rel="manifest" href="/manifest.webmanifest">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>XO Lite</title>
</head>

View File

@@ -14,7 +14,6 @@
"type-check": "vue-tsc --build --force tsconfig.type-check.json"
},
"devDependencies": {
"@csstools/postcss-global-data": "^2.1.1",
"@fontsource/poppins": "^5.0.8",
"@fortawesome/fontawesome-svg-core": "^6.5.1",
"@fortawesome/free-regular-svg-icons": "^6.5.1",
@@ -23,16 +22,16 @@
"@intlify/unplugin-vue-i18n": "^2.0.0",
"@novnc/novnc": "^1.4.0",
"@tsconfig/node18": "^18.2.2",
"strip-json-comments": "^5.0.1",
"@types/d3-time-format": "^4.0.3",
"@types/file-saver": "^2.0.7",
"@types/lodash-es": "^4.17.12",
"@types/node": "^18.19.7",
"@vitejs/plugin-vue": "^5.0.3",
"@types/node": "^18.19.5",
"@vitejs/plugin-vue": "^5.0.2",
"@vue/tsconfig": "^0.5.1",
"@vueuse/core": "^10.7.1",
"@vueuse/math": "^10.7.1",
"@vueuse/shared": "^10.7.1",
"@xen-orchestra/web-core": "*",
"complex-matcher": "^0.7.1",
"d3-time-format": "^4.1.0",
"decorator-synchronized": "^0.6.0",
@@ -52,16 +51,15 @@
"pinia": "^2.1.7",
"placement.js": "^1.0.0-beta.5",
"postcss": "^8.4.33",
"postcss-color-function": "^4.1.0",
"postcss-custom-media": "^10.0.2",
"postcss-nested": "^6.0.1",
"typescript": "~5.3.3",
"typescript": "^5.3.3",
"vite": "^5.0.11",
"vue": "^3.4.13",
"vue": "^3.4.7",
"vue-echarts": "^6.6.8",
"vue-i18n": "^9.9.0",
"vue-router": "^4.2.5",
"vue-tsc": "^1.8.27",
"vue-tsc": "^1.8.22",
"zx": "^7.2.3"
},
"private": true,

View File

@@ -1,10 +1,6 @@
export default {
plugins: {
'@csstools/postcss-global-data': {
files: ['../web-core/lib/assets/css/.globals.pcss'],
},
'postcss-nested': {},
'postcss-custom-media': {},
'postcss-color-function': {},
},
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

View File

@@ -16,6 +16,7 @@
</template>
<script lang="ts" setup>
import favicon from '@/assets/favicon.svg'
import AppHeader from '@/components/AppHeader.vue'
import AppLogin from '@/components/AppLogin.vue'
import AppNavigation from '@/components/AppNavigation.vue'
@@ -31,6 +32,14 @@ import { logicAnd } from '@vueuse/math'
import { computed } from 'vue'
import { useI18n } from 'vue-i18n'
let link = document.querySelector("link[rel~='icon']") as HTMLLinkElement | null
if (link == null) {
link = document.createElement('link')
link.rel = 'icon'
document.getElementsByTagName('head')[0].appendChild(link)
}
link.href = favicon
const xenApiStore = useXenApiStore()
const { pool } = usePoolCollection()
@@ -66,6 +75,10 @@ whenever(
useUnreachableHosts()
</script>
<style lang="postcss">
@import '@/assets/base.css';
</style>
<style lang="postcss" scoped>
.main {
overflow: auto;

View File

@@ -0,0 +1,2 @@
@custom-media --mobile (max-width: 1023px);
@custom-media --desktop (min-width: 1024px);

View File

@@ -0,0 +1,100 @@
@import 'reset.css';
@import 'theme.css';
@import '@fontsource/poppins/400.css';
@import '@fontsource/poppins/500.css';
@import '@fontsource/poppins/600.css';
@import '@fontsource/poppins/700.css';
@import '@fontsource/poppins/900.css';
@import '@fontsource/poppins/400-italic.css';
body {
min-height: 100vh;
font-size: 1.3rem;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
color: var(--color-blue-scale-100);
}
a {
color: var(--color-extra-blue-base);
}
code,
code *,
pre {
font-family: SFMono-Regular, Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace;
}
.card-view {
padding: 1.2rem;
display: flex;
gap: 2rem;
}
.link {
text-decoration: underline;
color: var(--color-extra-blue-base);
cursor: pointer;
}
.link:hover {
color: var(--color-extra-blue-d20);
}
.link:active,
.link.router-link-active {
color: var(--color-extra-blue-d40);
}
.link.router-link-active {
text-decoration: underline;
}
.context-color-success {
color: var(--color-green-infra-base);
}
.context-color-error {
color: var(--color-red-vates-base);
}
.context-color-warning {
color: var(--color-orange-world-base);
}
.context-color-info {
color: var(--color-extra-blue-base);
}
.context-background-color-success {
background-color: var(--background-color-green-infra);
}
.context-background-color-error {
background-color: var(--background-color-red-vates);
}
.context-background-color-warning {
background-color: var(--background-color-orange-world);
}
.context-background-color-info {
background-color: var(--background-color-extra-blue);
}
.context-border-color-success {
border-color: var(--color-green-infra-base);
}
.context-border-color-error {
border-color: var(--color-red-vates-base);
}
.context-border-color-warning {
border-color: var(--color-orange-world-base);
}
.context-border-color-info {
border-color: var(--color-extra-blue-base);
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 10 KiB

View File

@@ -3,10 +3,6 @@ html {
font-size: 10px;
}
body {
font-size: 1.6rem;
}
*,
*::before,
*::after {

View File

@@ -0,0 +1,87 @@
:root {
--color-logo: #282467;
--color-blue-scale-000: #000000;
--color-blue-scale-100: #1a1b38;
--color-blue-scale-200: #595a6f;
--color-blue-scale-300: #9899a5;
--color-blue-scale-400: #e5e5e7;
--color-blue-scale-500: #ffffff;
--color-extra-blue-l60: #d1cefb;
--color-extra-blue-l40: #bbb5f9;
--color-extra-blue-l20: #a39df8;
--color-extra-blue-base: #8f84ff;
--color-extra-blue-d20: #716ac6;
--color-extra-blue-d40: #554f94;
--color-extra-blue-d60: #383563;
--color-green-infra-l60: #b5dbca;
--color-green-infra-l40: #91c9b0;
--color-green-infra-l20: #70b795;
--color-green-infra-base: #55a57b;
--color-green-infra-d20: #438463;
--color-green-infra-d40: #32634a;
--color-green-infra-d60: #214231;
--color-orange-world-l60: #f2cda8;
--color-orange-world-l40: #ebb57d;
--color-orange-world-l20: #e59d56;
--color-orange-world-base: #ef7f18;
--color-orange-world-d20: #bf6612;
--color-orange-world-d40: #864f1f;
--color-orange-world-d60: #5a3514;
--color-red-vates-l60: #dda5a7;
--color-red-vates-l40: #ce787c;
--color-red-vates-l20: #bf4f51;
--color-red-vates-base: #be1621;
--color-red-vates-d20: #8e2221;
--color-red-vates-d40: #6a1919;
--color-red-vates-d60: #471010;
--color-grayscale-200: #585757;
--background-color-primary: #ffffff;
--background-color-secondary: #f6f6f7;
--background-color-extra-blue: #f4f3fe;
--background-color-green-infra: #ecf5f2;
--background-color-orange-world: #fbf2e9;
--background-color-red-vates: #f5e8e9;
--shadow-100: 0 0.1rem 0.1rem rgba(20, 20, 30, 0.06);
--shadow-200: 0 0.1rem 0.3rem rgba(20, 20, 30, 0.1), 0 0.2rem 0.1rem rgba(20, 20, 30, 0.06),
0 0.1rem 0.1rem rgba(20, 20, 30, 0.08);
--shadow-300: 0 0.3rem 0.5rem rgba(20, 20, 30, 0.1), 0 0.1rem 1.8rem rgba(20, 20, 30, 0.06),
0 0.6rem 1rem rgba(20, 20, 30, 0.08);
--shadow-400: 0 1.1rem 1.5rem rgba(20, 20, 30, 0.1), 0 0.9rem 4.6rem rgba(20, 20, 30, 0.06),
0 2.4rem 3.8rem rgba(20, 20, 30, 0.04);
}
:root.dark {
color-scheme: dark;
--color-logo: #e5e5e7;
--color-blue-scale-000: #ffffff;
--color-blue-scale-100: #e5e5e7;
--color-blue-scale-200: #9899a5;
--color-blue-scale-300: #595a6f;
--color-blue-scale-400: #1a1b38;
--color-blue-scale-500: #000000;
--background-color-primary: #14141d;
--background-color-secondary: #17182a;
--background-color-extra-blue: #35335d;
--background-color-green-infra: #243b3d;
--background-color-orange-world: #493328;
--background-color-red-vates: #3c1a28;
--shadow-100: 0 0.1rem 0.1rem rgba(20, 20, 30, 0.12);
--shadow-200: 0 0.1rem 0.3rem rgba(20, 20, 30, 0.2), 0 0.2rem 0.1rem rgba(20, 20, 30, 0.12),
0 0.1rem 0.1rem rgba(20, 20, 30, 0.16);
--shadow-300: 0 0.3rem 0.5rem rgba(20, 20, 30, 0.2), 0 0.1rem 1.8rem rgba(20, 20, 30, 0.12),
0 0.6rem 1rem rgba(20, 20, 30, 0.16);
--shadow-400: 0 1.1rem 1.5rem rgba(20, 20, 30, 0.2), 0 0.9rem 4.6rem rgba(20, 20, 30, 0.12),
0 2.4rem 3.8rem rgba(20, 20, 30, 0.08);
}

View File

@@ -51,14 +51,14 @@ const openSettings = () => router.push({ name: 'settings' })
display: flex;
align-items: center;
padding: 1rem;
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
border: none;
border-radius: 0.8rem;
background-color: var(--background-color-secondary);
gap: 0.8rem;
&:disabled {
color: var(--color-grey-500);
color: var(--color-blue-scale-400);
}
&:not(:disabled) {
@@ -72,7 +72,7 @@ const openSettings = () => router.push({ name: 'settings' })
&:active,
&.active {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
}
}
@@ -86,6 +86,6 @@ const openSettings = () => router.push({ name: 'settings' })
}
.menu-item-logout {
color: var(--color-red-base);
color: var(--color-red-vates-base);
}
</style>

View File

@@ -46,7 +46,7 @@ const { trigger: navigationTrigger } = storeToRefs(navigationStore)
justify-content: space-between;
height: 5.5rem;
padding: 1rem;
border-bottom: 0.1rem solid var(--color-grey-500);
border-bottom: 0.1rem solid var(--color-blue-scale-400);
background-color: var(--background-color-secondary);
img {

View File

@@ -135,7 +135,7 @@ form {
background-color: var(--background-color-secondary);
.error {
color: var(--color-red-base);
color: var(--color-red-vates-base);
}
}
@@ -156,7 +156,7 @@ input {
max-width: 100%;
margin-bottom: 1rem;
padding: 1rem 1.5rem;
border: 1px solid var(--color-grey-500);
border: 1px solid var(--color-blue-scale-400);
border-radius: 0.8rem;
background-color: white;
}

View File

@@ -60,7 +60,7 @@ useEventListener(
}
code:not(.hljs-code) {
background-color: var(--background-color-purple-10);
background-color: var(--background-color-extra-blue);
padding: 0.3rem 0.6rem;
border-radius: 0.6rem;
}
@@ -81,12 +81,12 @@ useEventListener(
}
thead th {
border-bottom: 2px solid var(--color-grey-500);
border-bottom: 2px solid var(--color-blue-scale-400);
background-color: var(--background-color-secondary);
}
tbody td {
border-bottom: 1px solid var(--color-grey-500);
border-bottom: 1px solid var(--color-blue-scale-400);
}
}
@@ -103,11 +103,11 @@ useEventListener(
background-color: transparent;
&:hover {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
&:active {
color: var(--color-purple-d20);
color: var(--color-extra-blue-d20);
}
}
}

View File

@@ -45,7 +45,7 @@ whenever(isOpen, () => {
max-width: 37rem;
height: calc(100vh - 5.5rem);
padding: 0.5rem;
border-right: 1px solid var(--color-grey-500);
border-right: 1px solid var(--color-blue-scale-400);
background-color: var(--background-color-primary);
&.collapsible {

View File

@@ -41,9 +41,9 @@ watchEffect(() => {
display: inline-flex;
padding: 0.3125em 0.5em;
pointer-events: none;
color: var(--color-grey-600);
color: var(--color-blue-scale-500);
border-radius: 0.5em;
background-color: var(--color-grey-100);
background-color: var(--color-blue-scale-100);
z-index: 2;
}
@@ -145,6 +145,6 @@ watchEffect(() => {
content: '';
transform: rotate(45deg) skew(20deg, 20deg);
border-radius: 0.3125em;
background-color: var(--color-grey-100);
background-color: var(--color-blue-scale-100);
}
</style>

View File

@@ -54,14 +54,14 @@ const isIcon = (maybeIcon: any): maybeIcon is IconDefinition => typeof maybeIcon
align-items: stretch;
overflow: hidden;
padding: 0 0.7rem;
border: 1px solid var(--color-grey-500);
border: 1px solid var(--color-blue-scale-400);
border-radius: 0.8rem;
background-color: var(--color-grey-600);
background-color: var(--color-blue-scale-500);
box-shadow: var(--shadow-100);
gap: 0.1rem;
&:focus-within {
outline: 1px solid var(--color-purple-l40);
outline: 1px solid var(--color-extra-blue-l40);
}
}
@@ -71,7 +71,7 @@ const isIcon = (maybeIcon: any): maybeIcon is IconDefinition => typeof maybeIcon
}
.form-widget:hover .widget {
border-color: var(--color-purple-l60);
border-color: var(--color-extra-blue-l60);
}
.element {
@@ -93,8 +93,8 @@ const isIcon = (maybeIcon: any): maybeIcon is IconDefinition => typeof maybeIcon
font-size: inherit;
border: none;
outline: none;
color: var(--color-grey-100);
background-color: var(--color-grey-600);
color: var(--color-blue-scale-100);
background-color: var(--color-blue-scale-500);
flex: 1;
&:disabled {
@@ -134,7 +134,7 @@ const isIcon = (maybeIcon: any): maybeIcon is IconDefinition => typeof maybeIcon
&:disabled {
cursor: not-allowed;
color: var(--color-grey-200);
color: var(--color-blue-scale-200);
}
}
</style>

View File

@@ -25,7 +25,7 @@ defineProps<{
font-size: 1.3rem;
line-height: 150%;
margin: 0.5rem 0;
color: var(--color-red-base);
color: var(--color-red-vates-base);
& svg {
margin-right: 0.5rem;

View File

@@ -25,6 +25,6 @@
font-weight: 500;
font-size: 1.25em;
line-height: 150%;
color: var(--color-red-base);
color: var(--color-red-vates-base);
}
</style>

View File

@@ -27,6 +27,6 @@
font-weight: 500;
font-size: 2rem;
line-height: 150%;
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
</style>

View File

@@ -85,7 +85,7 @@ const objectRoute = computed(() => {
<style lang="postcss" scoped>
.unknown {
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
font-style: italic;
}
</style>

View File

@@ -33,7 +33,7 @@ const isRecordNotFound = computed(() => props.isReady && !props.uuidChecker(id.v
}
.spinner {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
display: flex;
margin: auto;
width: 10rem;

View File

@@ -26,7 +26,7 @@ import UiStatusPanel from '@/components/ui/UiStatusPanel.vue'
.contact {
font-weight: 400;
font-size: 20px;
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
& a {
text-transform: lowercase;

View File

@@ -44,7 +44,7 @@ const masterSessionStorage = useSessionStorage('master', null)
<style lang="postcss" scoped>
.warning-not-current-pool {
color: var(--color-orange-base);
color: var(--color-orange-world-base);
cursor: pointer;
.wrapper {

View File

@@ -26,18 +26,18 @@ const className = computed(() => `state-${props.state.toLocaleLowerCase()}`)
<style lang="postcss" scoped>
.power-state-icon {
color: var(--color-purple-d60);
color: var(--color-extra-blue-d60);
&.state-running {
color: var(--color-green-base);
color: var(--color-green-infra-base);
}
&.state-paused {
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
}
&.state-suspended {
color: var(--color-purple-d20);
color: var(--color-extra-blue-d20);
}
}
</style>

View File

@@ -37,7 +37,7 @@ const progress = computed(() => {
.progress-circle-fill {
animation: progress 1s ease-out forwards;
fill: none;
stroke: var(--color-green-base);
stroke: var(--color-green-infra-base);
stroke-width: 1.2;
stroke-linecap: round;
stroke-dasharray: v-bind(progress), 100;
@@ -46,13 +46,13 @@ const progress = computed(() => {
.progress-circle-background {
fill: none;
stroke-width: 1.2;
stroke: var(--color-grey-500);
stroke: var(--color-blue-scale-400);
}
.progress-circle-text {
font-size: 0.7rem;
font-weight: bold;
fill: var(--color-green-base);
fill: var(--color-green-infra-base);
text-anchor: middle;
alignment-baseline: middle;
}

View File

@@ -29,18 +29,18 @@ defineProps<{
align-items: center;
height: 6rem;
padding: 0 1.5rem;
border-bottom: 1px solid var(--color-grey-500);
border-bottom: 1px solid var(--color-blue-scale-400);
background-color: var(--background-color-primary);
gap: 0.8rem;
}
.icon {
font-size: 2.5rem;
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
.title {
font-size: 2.5rem;
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
}
</style>

View File

@@ -60,28 +60,28 @@ const computedData = computed(() => {
}
.progress-item:nth-child(1) {
--progress-bar-color: var(--color-purple-d60);
--progress-bar-color: var(--color-extra-blue-d60);
}
.progress-item:nth-child(2) {
--progress-bar-color: var(--color-purple-d40);
--progress-bar-color: var(--color-extra-blue-d40);
}
.progress-item:nth-child(3) {
--progress-bar-color: var(--color-purple-d20);
--progress-bar-color: var(--color-extra-blue-d20);
}
.progress-item {
--progress-bar-height: 1.2rem;
--progress-bar-color: var(--color-purple-l20);
--progress-bar-background-color: var(--color-grey-500);
--progress-bar-color: var(--color-extra-blue-l20);
--progress-bar-background-color: var(--color-blue-scale-400);
&.warning {
--progress-bar-color: var(--color-orange-base);
--progress-bar-color: var(--color-orange-world-base);
}
&.error {
--progress-bar-color: var(--color-red-base);
--progress-bar-color: var(--color-red-vates-base);
}
}
</style>

View File

@@ -25,12 +25,12 @@
th,
td {
padding: 0.3rem 0.6rem;
border-bottom: 0.1rem solid var(--color-grey-500);
border-bottom: 0.1rem solid var(--color-blue-scale-400);
vertical-align: center;
}
&:nth-child(odd) {
background-color: var(--background-color-purple-10);
background-color: var(--background-color-extra-blue);
}
}

View File

@@ -127,14 +127,14 @@ const openRawValueModal = (code: string) =>
align-items: center;
padding: 0.4rem 0.6rem;
cursor: pointer;
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
border-radius: 0.4rem;
gap: 0.6rem;
&.active {
font-weight: 600;
cursor: default;
color: var(--color-green-l20);
color: var(--color-green-infra-l20);
}
}
}
@@ -157,7 +157,7 @@ const openRawValueModal = (code: string) =>
.help {
font-style: italic;
color: var(--color-grey-200);
color: var(--color-blue-scale-200);
}
.default-value {
@@ -168,12 +168,12 @@ const openRawValueModal = (code: string) =>
font-weight: 600;
font-style: normal;
opacity: 1;
color: var(--color-green-base);
color: var(--color-green-infra-base);
}
}
.v-model-indicator,
.context-indicator {
color: var(--color-green-base);
color: var(--color-green-infra-base);
}
</style>

View File

@@ -81,7 +81,7 @@ const isIndeterminate = computed(() => (type === 'checkbox' || type === 'toggle'
.input.indeterminate + .fake-checkbox > .icon {
opacity: 1;
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
}
}
@@ -114,7 +114,7 @@ const isIndeterminate = computed(() => (type === 'checkbox' || type === 'toggle'
.fake-checkbox {
width: 2.5em;
--background-color: var(--color-grey-500);
--background-color: var(--color-blue-scale-400);
}
.icon {
@@ -128,7 +128,7 @@ const isIndeterminate = computed(() => (type === 'checkbox' || type === 'toggle'
.input.indeterminate + .fake-checkbox > .icon {
opacity: 1;
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
transform: translateX(0);
}
}
@@ -143,7 +143,7 @@ const isIndeterminate = computed(() => (type === 'checkbox' || type === 'toggle'
.icon {
font-size: var(--checkbox-icon-size);
position: absolute;
color: var(--color-grey-600);
color: var(--color-blue-scale-500);
filter: drop-shadow(0 0.0625em 0.5em rgba(0, 0, 0, 0.1)) drop-shadow(0 0.1875em 0.1875em rgba(0, 0, 0, 0.06))
drop-shadow(0 0.1875em 0.25em rgba(0, 0, 0, 0.08));
@@ -162,44 +162,44 @@ const isIndeterminate = computed(() => (type === 'checkbox' || type === 'toggle'
background-color: var(--background-color);
box-shadow: var(--shadow-100);
--border-color: var(--color-grey-500);
--border-color: var(--color-blue-scale-400);
}
.input:disabled {
& + .fake-checkbox {
cursor: not-allowed;
--background-color: var(--background-color-secondary);
--border-color: var(--color-grey-500);
--border-color: var(--color-blue-scale-400);
}
&:checked + .fake-checkbox {
--border-color: transparent;
--background-color: var(--color-purple-l60);
--background-color: var(--color-extra-blue-l60);
}
}
.input:not(:disabled) {
&:hover + .fake-checkbox,
&:focus + .fake-checkbox {
--border-color: var(--color-purple-l40);
--border-color: var(--color-extra-blue-l40);
}
&:active + .fake-checkbox {
--border-color: var(--color-purple-l20);
--border-color: var(--color-extra-blue-l20);
}
&:checked + .fake-checkbox {
--border-color: transparent;
--background-color: var(--color-purple-base);
--background-color: var(--color-extra-blue-base);
}
&:checked:hover + .fake-checkbox,
&:checked:focus + .fake-checkbox {
--background-color: var(--color-purple-d20);
--background-color: var(--color-extra-blue-d20);
}
&:checked:active + .fake-checkbox {
--background-color: var(--color-purple-d40);
--background-color: var(--color-extra-blue-d40);
}
}
</style>

View File

@@ -144,14 +144,14 @@ defineExpose({
--after-width: v-bind('afterWidth || "1.625em"');
--caret-width: 1.5em;
--text-color: var(--color-grey-100);
--text-color: var(--color-blue-scale-100);
&.empty {
--text-color: var(--color-grey-300);
--text-color: var(--color-blue-scale-300);
}
&.disabled {
--text-color: var(--color-grey-500);
--text-color: var(--color-blue-scale-400);
}
}
@@ -189,7 +189,7 @@ defineExpose({
}
--background-color: var(--background-color-primary);
--border-color: var(--color-grey-500);
--border-color: var(--color-blue-scale-400);
&:disabled {
cursor: not-allowed;
@@ -199,63 +199,63 @@ defineExpose({
&:not(:disabled) {
&.info {
&:hover {
--border-color: var(--color-purple-l60);
--border-color: var(--color-extra-blue-l60);
}
&:active {
--border-color: var(--color-purple-l40);
--border-color: var(--color-extra-blue-l40);
}
&:focus {
--border-color: var(--color-purple-base);
--border-color: var(--color-extra-blue-base);
}
}
&.success {
--border-color: var(--color-green-base);
--border-color: var(--color-green-infra-base);
&:hover {
--border-color: var(--color-green-l60);
--border-color: var(--color-green-infra-l60);
}
&:active {
--border-color: var(--color-green-l40);
--border-color: var(--color-green-infra-l40);
}
&:focus {
--border-color: var(--color-green-base);
--border-color: var(--color-green-infra-base);
}
}
&.warning {
--border-color: var(--color-orange-base);
--border-color: var(--color-orange-world-base);
&:hover {
--border-color: var(--color-orange-l60);
--border-color: var(--color-orange-world-l60);
}
&:active {
--border-color: var(--color-orange-l40);
--border-color: var(--color-orange-world-l40);
}
&:focus {
--border-color: var(--color-orange-base);
--border-color: var(--color-orange-world-base);
}
}
&.error {
--border-color: var(--color-red-base);
--border-color: var(--color-red-vates-base);
&:hover {
--border-color: var(--color-red-l60);
--border-color: var(--color-red-vates-l60);
}
&:active {
--border-color: var(--color-red-l40);
--border-color: var(--color-red-vates-l40);
}
&:focus-within {
--border-color: var(--color-red-base);
--border-color: var(--color-red-vates-base);
}
}
}

View File

@@ -96,7 +96,7 @@ useContext(DisabledContext, () => props.disabled)
&.light {
font-size: 1.6rem;
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
font-weight: 400;
}
@@ -104,7 +104,7 @@ useContext(DisabledContext, () => props.disabled)
font-size: 1.4rem;
text-transform: uppercase;
font-weight: 700;
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
}
}
@@ -126,7 +126,7 @@ useContext(DisabledContext, () => props.disabled)
align-items: center;
gap: 0.5rem;
text-decoration: none;
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
& > span {
text-decoration: underline;
@@ -134,14 +134,14 @@ useContext(DisabledContext, () => props.disabled)
}
.warning {
color: var(--color-orange-base);
color: var(--color-orange-world-base);
}
.error {
color: var(--color-red-base);
color: var(--color-red-vates-base);
}
.help {
color: var(--color-grey-300);
color: var(--color-blue-scale-300);
}
</style>

View File

@@ -53,7 +53,7 @@ whenever(
<style lang="postcss" scoped>
.collapsible {
padding: 1rem 1.5rem;
background-color: var(--background-color-purple-10);
background-color: var(--background-color-extra-blue);
border-radius: 0.8rem;
}
@@ -67,16 +67,16 @@ whenever(
display: flex;
align-items: center;
justify-content: space-between;
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
border: none;
border-bottom: 1px solid var(--color-purple-base);
border-bottom: 1px solid var(--color-extra-blue-base);
width: 100%;
font-size: 2rem;
font-weight: 500;
padding-bottom: 1rem;
.collapsible & {
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
padding-bottom: 0;
cursor: pointer;
}
@@ -87,6 +87,6 @@ whenever(
}
.collapse-icon {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
</style>

View File

@@ -65,7 +65,7 @@ const vmCount = computed(() => recordsByHostRef.value.get(props.hostOpaqueRef)?.
}
.master-icon {
color: var(--color-orange-base);
color: var(--color-orange-world-base);
}
.vm-count {
@@ -76,9 +76,9 @@ const vmCount = computed(() => recordsByHostRef.value.get(props.hostOpaqueRef)?.
justify-content: center;
width: var(--size);
height: var(--size);
color: var(--color-grey-600);
color: var(--color-blue-scale-500);
border-radius: calc(var(--size) / 2);
background-color: var(--color-purple-base);
background-color: var(--color-extra-blue-base);
--size: 2.3rem;
}
</style>

View File

@@ -23,6 +23,6 @@ const { records: hosts, isReady, hasError } = useHostCollection()
font-weight: 700;
font-size: 16px;
line-height: 150%;
color: var(--color-red-base);
color: var(--color-red-vates-base);
}
</style>

View File

@@ -40,27 +40,27 @@ const hasTooltip = computed(() => hasEllipsis(textElement.value))
.infra-item-label {
display: flex;
align-items: stretch;
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
border-radius: 0.8rem;
background-color: var(--background-color-primary);
&:hover {
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
background-color: var(--background-color-secondary);
}
&:active,
&.active {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
background-color: var(--background-color-primary);
}
&.exact-active {
color: var(--color-grey-100);
background-color: var(--background-color-purple-10);
color: var(--color-blue-scale-100);
background-color: var(--background-color-extra-blue);
.icon {
color: var(--color-purple-base);
color: var(--color-extra-blue-base);
}
}
}

View File

@@ -27,7 +27,7 @@ defineProps<{
}
.icon {
color: var(--color-grey-100);
color: var(--color-blue-scale-100);
}
.link-placeholder {
@@ -41,7 +41,7 @@ defineProps<{
.loader {
flex: 1;
animation: pulse alternate 1s infinite;
background-color: var(--background-color-purple-10);
background-color: var(--background-color-extra-blue);
}
@keyframes pulse {

Some files were not shown because too many files have changed in this diff Show More