Compare commits

...

36 Commits

Author SHA1 Message Date
Julien Fontanet
71d2c28899 WiP: should proxy 2022-11-29 10:31:07 +01:00
Julien Fontanet
18ece4b90c fix(xo-server/MigrateVm): fix uuid import
Introduced by 72c69d791

Fixes #6561
2022-11-29 10:30:09 +01:00
Florent Beauchamp
3862fb2664 fix(fs/rename): throw ENOENT when source file is missing 2022-11-28 17:33:57 +01:00
Florent BEAUCHAMP
72c69d791a feat(xo-server): implement warm migration backend (#6549) 2022-11-28 17:28:19 +01:00
Julien Fontanet
d6192a4a7a chore: remove unused travis-tests.js 2022-11-28 15:51:47 +01:00
Julien Fontanet
0f824ffa70 lint(vhd-lib): remove unused var and fix formatting
Introduced by f6c227e7f
2022-11-26 10:10:08 +01:00
Florent BEAUCHAMP
f6c227e7f5 feat(vhd-lib): merge resume can resume when rename fails (#6530) 2022-11-25 20:51:33 +01:00
Julien Fontanet
9d5bc8af6e feat: run-script.js now only shows output on error by default 2022-11-25 15:45:52 +01:00
Julien Fontanet
9480079770 feat: script test-unit now bails on first error 2022-11-25 15:45:08 +01:00
Julien Fontanet
54fe9147ac chore: only enable Babel debug on prod builds
The output was making test results hard to see.
2022-11-25 14:43:36 +01:00
Gabriel Gunullu
b6a0477232 feat(xo-server-transport-nagios): report backed up VM individually (#6534) 2022-11-25 14:36:41 +01:00
Julien Fontanet
c60644c578 chore(lite): merge lint with the root config 2022-11-25 11:23:04 +01:00
Thierry Goettelmann
abdce94c5f feat(lite): type check on test (#6547) 2022-11-25 11:19:58 +01:00
Mathieu
d7dee04013 feat(xo-web/settings/users): remove OTP of users in admin panel (#6541)
See https://xcp-ng.org/forum/topic/6521
2022-11-25 11:15:07 +01:00
Julien Fontanet
dfc62132b7 fix(xo-web/remote): prevent browser from autocompleting encryption key 2022-11-24 18:48:45 +01:00
Julien Fontanet
36f7f193aa feat: run linter in CI 2022-11-24 17:00:59 +01:00
Julien Fontanet
ca4a82ec38 fix: make test-lint script ignore xo-web
Too many errors in this legacy package.
2022-11-24 16:26:40 +01:00
Julien Fontanet
37aea1888d chore: fix lint issues 2022-11-24 16:26:40 +01:00
Julien Fontanet
92f3b4ddd7 chore(backups/RemoteAdapter): remove unused invalidateVmBackupListCache 2022-11-24 16:26:40 +01:00
Mathieu
647995428c feat(lite/pool/dashboard): top 5 RAM usage (#6419) 2022-11-24 15:57:11 +01:00
Mathieu
407e9c25f3 feat(xo-web/licenses): text to explicit where to bind xcp-ng licenses (#6551)
See zammad#11037
2022-11-24 15:42:16 +01:00
Julien Fontanet
1612ab7335 fix(backups-cli/clean-vms): remove incorrect console.log
Introduced by 94c755b10
2022-11-23 23:03:46 +01:00
Julien Fontanet
b952c36210 fix(vhd-lib/merge): VhdAbstract.rename → handler.rename
Missing changed from c5b3acfce
2022-11-23 15:02:56 +01:00
Florent BEAUCHAMP
96b5cb2c61 feat(xo-vmdk-to-vhd): overprovision vmdk size to generate ova in one pass (#6487) 2022-11-23 14:48:18 +01:00
Florent Beauchamp
c5b3acfce2 fix(vhd-lib): remove unsafe VhdAbstract.rename implementation
actual implementation was deleting the target vhd even if the source did not exist, leading to ptential data loss
2022-11-23 14:31:37 +01:00
Julien Fontanet
20a01bf266 feat(lint-staged): format all files with Prettier 2022-11-22 18:20:01 +01:00
Julien Fontanet
a33b88cf1c chore: format with Prettier 2022-11-22 17:30:14 +01:00
Julien Fontanet
09a2f45ada feat: run test script for all pkgs with changed files 2022-11-22 17:30:14 +01:00
Julien Fontanet
83a7dd7ea1 chore: remove custom scripts/lint-staged 2022-11-22 17:30:14 +01:00
Julien Fontanet
afc1b6a5c0 Revert "feat: run pre-commit script for all packages"
This reverts commit f5b91cd45d.
2022-11-22 17:30:14 +01:00
Thierry Goettelmann
7f4f860735 feat(lite/color mode): "auto" mode + "D" shortcut to toggle (#6536)
The shortcut is only enabled in dev environment
2022-11-22 15:35:31 +01:00
Julien Fontanet
d789e3aa0d chore: update to husky@8 2022-11-22 15:33:43 +01:00
Julien Fontanet
f5b91cd45d feat: run pre-commit script for all packages 2022-11-22 11:37:40 +01:00
Julien Fontanet
92ab4b3309 chore(lite): format with Prettier (#6545) 2022-11-22 11:33:03 +01:00
Florent Beauchamp
2c456e4c89 fix(vhd-lib): create directory for merged blocks 2022-11-22 11:05:51 +01:00
Florent Beauchamp
1460e63449 fix(vhd-lib): write state at the begining 2022-11-22 11:05:51 +01:00
74 changed files with 1144 additions and 560 deletions

4
.husky/pre-commit Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

View File

@@ -30,6 +30,7 @@ if (args.length === 0) {
${name} v${version}
`)
// eslint-disable-next-line n/no-process-exit
process.exit()
}

View File

@@ -5,7 +5,6 @@ const PRESETS_RE = /^@babel\/preset-.+$/
const NODE_ENV = process.env.NODE_ENV || 'development'
const __PROD__ = NODE_ENV === 'production'
const __TEST__ = NODE_ENV === 'test'
const configs = {
'@babel/plugin-proposal-decorators': {
@@ -15,7 +14,7 @@ const configs = {
proposal: 'minimal',
},
'@babel/preset-env': {
debug: !__TEST__,
debug: __PROD__,
// disabled until https://github.com/babel/babel/issues/8323 is resolved
// loose: true,

View File

@@ -22,7 +22,6 @@ export default async function cleanVms(args) {
await asyncMap(_, vmDir =>
Disposable.use(getSyncedHandler({ url: pathToFileURL(dirname(vmDir)).href }), async handler => {
console.log(handler, basename(vmDir))
try {
await new RemoteAdapter(handler).cleanVm(basename(vmDir), {
fixMetadata: fix,

View File

@@ -537,10 +537,6 @@ class RemoteAdapter {
}
}
async invalidateVmBackupListCache(vmUuid) {
await this.handler.unlink(this.#getVmBackupsCache(vmUuid))
}
async #getCachabledDataListVmBackups(dir) {
debug('generating cache', { path: dir })

View File

@@ -49,7 +49,6 @@ exports.FullBackupWriter = class FullBackupWriter extends MixinBackupWriter(Abst
const dataBasename = basename + '.xva'
const dataFilename = backupDir + '/' + dataBasename
const metadataFilename = `${backupDir}/${basename}.json`
const metadata = {
jobId: job.id,
mode: job.mode,

View File

@@ -284,15 +284,25 @@ export default class RemoteHandlerAbstract {
return this._encryptor.decryptData(data)
}
async rename(oldPath, newPath, { checksum = false } = {}) {
oldPath = normalizePath(oldPath)
newPath = normalizePath(newPath)
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
if (checksum) {
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
async #rename(oldPath, newPath, { checksum }, createTree = true) {
try {
let p = timeout.call(this._rename(oldPath, newPath), this._timeout)
if (checksum) {
p = Promise.all([p, this._rename(checksumFile(oldPath), checksumFile(newPath))])
}
await p
} catch (error) {
// ENOENT can be a missing target directory OR a missing source
if (error.code === 'ENOENT' && createTree) {
await this._mktree(dirname(newPath))
return this.#rename(oldPath, newPath, { checksum }, false)
}
throw error
}
return p
}
rename(oldPath, newPath, { checksum = false } = {}) {
return this.#rename(normalizePath(oldPath), normalizePath(newPath), { checksum })
}
async copy(oldPath, newPath, { checksum = false } = {}) {

View File

@@ -228,6 +228,17 @@ handlers.forEach(url => {
expect(await handler.list('.')).toEqual(['file2'])
expect(await handler.readFile(`file2`)).toEqual(TEST_DATA)
})
it(`should rename the file and create dest directory`, async () => {
await handler.outputFile('file', TEST_DATA)
await handler.rename('file', `sub/file2`)
expect(await handler.list('sub')).toEqual(['file2'])
expect(await handler.readFile(`sub/file2`)).toEqual(TEST_DATA)
})
it(`should fail with enoent if source file is missing`, async () => {
const error = await rejectionOf(handler.rename('file', `sub/file2`))
expect(error.code).toBe('ENOENT')
})
})
describe('#rmdir()', () => {

View File

@@ -5,6 +5,7 @@
- Invalidate sessionId token after logout (PR [#6480](https://github.com/vatesfr/xen-orchestra/pull/6480))
- Settings page (PR [#6418](https://github.com/vatesfr/xen-orchestra/pull/6418))
- Uncollapse hosts in the tree by default (PR [#6428](https://github.com/vatesfr/xen-orchestra/pull/6428))
- Display RAM usage in pool dashboard (PR [#6419](https://github.com/vatesfr/xen-orchestra/pull/6419))
## **0.1.0**

View File

@@ -105,7 +105,7 @@ Use the `busy` prop to display a loader icon.
</template>
<script lang="ts" setup>
import UiIcon from "@/components/ui/UiIcon.vue"
import UiIcon from "@/components/ui/UiIcon.vue";
import { faDisplay } from "@fortawesome/free-solid-svg-icons";
</script>
```

View File

@@ -7,8 +7,8 @@
"preview": "vite preview --port 4173",
"build-only": "GIT_HEAD=$(git rev-parse HEAD) vite build",
"deploy": "./scripts/deploy.sh",
"type-check": "vue-tsc --noEmit",
"lint": "eslint . --ext .vue,.js,.jsx,.cjs,.mjs,.ts,.tsx,.cts,.mts --fix --ignore-path .gitignore"
"test": "yarn run type-check",
"type-check": "vue-tsc --noEmit"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^6.1.1",
@@ -19,6 +19,7 @@
"@types/d3-time-format": "^4.0.0",
"@types/lodash-es": "^4.17.6",
"@vueuse/core": "^9.5.0",
"@vueuse/math": "^9.5.0",
"complex-matcher": "^0.7.0",
"d3-time-format": "^4.1.0",
"decorator-synchronized": "^0.6.0",

View File

@@ -32,6 +32,9 @@
</template>
<script lang="ts" setup>
import { useUiStore } from "@/stores/ui.store";
import { useActiveElement, useMagicKeys, whenever } from "@vueuse/core";
import { logicAnd } from "@vueuse/math";
import { difference } from "lodash";
import { computed, ref, watch, watchEffect } from "vue";
import favicon from "@/assets/favicon.svg";
@@ -58,13 +61,28 @@ link.href = favicon;
document.title = "XO Lite";
if (window.localStorage?.getItem("colorMode") !== "light") {
document.documentElement.classList.add("dark");
}
const xenApiStore = useXenApiStore();
const hostStore = useHostStore();
useChartTheme();
const uiStore = useUiStore();
if (import.meta.env.DEV) {
const activeElement = useActiveElement();
const { D } = useMagicKeys();
const canToggleDarkMode = computed(() => {
if (activeElement.value == null) {
return true;
}
return !["INPUT", "TEXTAREA"].includes(activeElement.value.tagName);
});
whenever(
logicAnd(D, canToggleDarkMode),
() => (uiStore.colorMode = uiStore.colorMode === "dark" ? "light" : "dark")
);
}
watchEffect(() => {
if (xenApiStore.isConnected) {

View File

@@ -11,7 +11,7 @@
</template>
<script lang="ts" setup>
import AccountButton from '@/components/AccountButton.vue'
import AccountButton from "@/components/AccountButton.vue";
</script>
<style lang="postcss" scoped>

View File

@@ -43,14 +43,14 @@
<template #buttons>
<UiButton transparent @click="addNewFilter">
{{ $t("add-or") }}
</UiButton>
{{ $t("add-or") }}
</UiButton>
<UiButton :disabled="!isFilterValid" type="submit">
{{ $t(editedFilter ? "update" : "add") }}
</UiButton>
<UiButton outlined @click="handleCancel">
{{ $t("cancel") }}
</UiButton>
{{ $t("cancel") }}
</UiButton>
</template>
</UiModal>
</template>

View File

@@ -41,8 +41,8 @@
<template #buttons>
<UiButton type="submit">{{ $t("add") }}</UiButton>
<UiButton outlined @click="handleCancel">
{{ $t("cancel") }}
</UiButton>
{{ $t("cancel") }}
</UiButton>
</template>
</UiModal>
</template>

View File

@@ -3,10 +3,10 @@
<div class="progress-bar">
<div class="progress-bar-fill" />
</div>
<div class="badge" v-if="label !== undefined">
<div class="legend" v-if="label !== undefined">
<span class="circle" />
{{ label }}
<UiBadge>{{ badgeLabel ?? progressWithUnit }}</UiBadge>
<UiBadge class="badge">{{ badgeLabel ?? progressWithUnit }}</UiBadge>
</div>
</div>
</template>
@@ -33,9 +33,14 @@ const progressWithUnit = computed(() => {
</script>
<style lang="postcss" scoped>
.badge {
.legend {
text-align: right;
margin: 1rem 0;
margin: 1.6em 0;
}
.badge {
font-size: 0.9em;
font-weight: 700;
}
.circle {

View File

@@ -12,7 +12,7 @@
:icon="faServer"
:route="{ name: 'host.dashboard', params: { uuid: host.uuid } }"
>
{{ host.name_label || '(Host)' }}
{{ host.name_label || "(Host)" }}
<template #actions>
<InfraAction
:icon="isExpanded ? faAngleDown : faAngleUp"

View File

@@ -12,7 +12,7 @@
:icon="faDisplay"
:route="{ name: 'vm.console', params: { uuid: vm.uuid } }"
>
{{ vm.name_label || '(VM)' }}
{{ vm.name_label || "(VM)" }}
<template #actions>
<InfraAction>
<PowerStateIcon :state="vm?.power_state" />

View File

@@ -0,0 +1,14 @@
<template>
<UiCard>
<UiTitle type="h4">{{ $t("ram-usage") }}</UiTitle>
<HostsRamUsage />
<VmsRamUsage />
</UiCard>
</template>
<script setup lang="ts">
import HostsRamUsage from "@/components/pool/dashboard/ramUsage/HostsRamUsage.vue";
import VmsRamUsage from "@/components/pool/dashboard/ramUsage/VmsRamUsage.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiTitle from "@/components/ui/UiTitle.vue";
</script>

View File

@@ -1,10 +1,10 @@
<template>
<UiCard>
<UiTitle type="h4">{{ $t("storage-usage") }}</UiTitle>
<UsageBar :data="srStore.isReady ? data.result : undefined" :nItems="5">
<UsageBar :data="srStore.isReady ? data.result : undefined" :nItems="N_ITEMS">
<template #header>
<span>{{ $t("storage") }}</span>
<span>{{ $t("top-#", { n: 5 }) }}</span>
<span>{{ $t("top-#", { n: N_ITEMS }) }}</span>
</template>
<template #footer v-if="showFooter">
<div class="footer-card">
@@ -37,6 +37,7 @@ import UiCard from "@/components/ui/UiCard.vue";
import UiTitle from "@/components/ui/UiTitle.vue";
import { formatSize, percent } from "@/libs/utils";
import { useSrStore } from "@/stores/storage.store";
import { N_ITEMS } from "@/views/pool/PoolDashboardView.vue";
const srStore = useSrStore();

View File

@@ -1,8 +1,8 @@
<template>
<UsageBar :data="statFetched ? data : undefined" :n-items="5">
<UsageBar :data="statFetched ? data : undefined" :n-items="N_ITEMS">
<template #header>
<span>{{ $t("hosts") }}</span>
<span>{{ $t("top-#", { n: 5 }) }}</span>
<span>{{ $t("top-#", { n: N_ITEMS }) }}</span>
</template>
</UsageBar>
</template>
@@ -13,6 +13,7 @@ import UsageBar from "@/components/UsageBar.vue";
import type { Stat } from "@/composables/fetch-stats.composable";
import { getAvgCpuUsage } from "@/libs/utils";
import type { HostStats } from "@/libs/xapi-stats";
import { N_ITEMS } from "@/views/pool/PoolDashboardView.vue";
const stats = inject<ComputedRef<Stat<HostStats>[]>>(
"hostStats",

View File

@@ -1,8 +1,8 @@
<template>
<UsageBar :data="statFetched ? data : undefined" :n-items="5">
<UsageBar :data="statFetched ? data : undefined" :n-items="N_ITEMS">
<template #header>
<span>{{ $t("vms") }}</span>
<span>{{ $t("top-#", { n: 5 }) }}</span>
<span>{{ $t("top-#", { n: N_ITEMS }) }}</span>
</template>
</UsageBar>
</template>
@@ -13,6 +13,7 @@ import UsageBar from "@/components/UsageBar.vue";
import type { Stat } from "@/composables/fetch-stats.composable";
import { getAvgCpuUsage } from "@/libs/utils";
import type { VmStats } from "@/libs/xapi-stats";
import { N_ITEMS } from "@/views/pool/PoolDashboardView.vue";
const stats = inject<ComputedRef<Stat<VmStats>[]>>(
"vmStats",

View File

@@ -0,0 +1,52 @@
<template>
<UsageBar :data="statFetched ? data : undefined" :n-items="N_ITEMS">
<template #header>
<span>{{ $t("hosts") }}</span>
<span>{{ $t("top-#", { n: N_ITEMS }) }}</span>
</template>
</UsageBar>
</template>
<script lang="ts" setup>
import { type ComputedRef, computed, inject } from "vue";
import UsageBar from "@/components/UsageBar.vue";
import type { Stat } from "@/composables/fetch-stats.composable";
import { formatSize, parseRamUsage } from "@/libs/utils";
import type { HostStats } from "@/libs/xapi-stats";
import { N_ITEMS } from "@/views/pool/PoolDashboardView.vue";
const stats = inject<ComputedRef<Stat<HostStats>[]>>(
"hostStats",
computed(() => [])
);
const data = computed(() => {
const result: {
id: string;
label: string;
value: number;
badgeLabel: string;
}[] = [];
stats.value.forEach((stat) => {
if (stat.stats === undefined) {
return;
}
const { percentUsed, total, used } = parseRamUsage(stat.stats);
result.push({
id: stat.id,
label: stat.name,
value: percentUsed,
badgeLabel: `${formatSize(used)}/${formatSize(total)}`,
});
});
return result;
});
const statFetched: ComputedRef<boolean> = computed(
() =>
statFetched.value ||
(stats.value.length > 0 && stats.value.length === data.value.length)
);
</script>

View File

@@ -0,0 +1,52 @@
<template>
<UsageBar :data="statFetched ? data : undefined" :n-items="N_ITEMS">
<template #header>
<span>{{ $t("vms") }}</span>
<span>{{ $t("top-#", { n: N_ITEMS }) }}</span>
</template>
</UsageBar>
</template>
<script lang="ts" setup>
import { type ComputedRef, computed, inject } from "vue";
import UsageBar from "@/components/UsageBar.vue";
import type { Stat } from "@/composables/fetch-stats.composable";
import { formatSize, parseRamUsage } from "@/libs/utils";
import type { VmStats } from "@/libs/xapi-stats";
import { N_ITEMS } from "@/views/pool/PoolDashboardView.vue";
const stats = inject<ComputedRef<Stat<VmStats>[]>>(
"vmStats",
computed(() => [])
);
const data = computed(() => {
const result: {
id: string;
label: string;
value: number;
badgeLabel: string;
}[] = [];
stats.value.forEach((stat) => {
if (stat.stats === undefined) {
return;
}
const { percentUsed, total, used } = parseRamUsage(stat.stats);
result.push({
id: stat.id,
label: stat.name,
value: percentUsed,
badgeLabel: `${formatSize(used)}/${formatSize(total)}`,
});
});
return result;
});
const statFetched: ComputedRef<boolean> = computed(
() =>
statFetched.value ||
(stats.value.length > 0 && stats.value.length === data.value.length)
);
</script>

View File

@@ -22,7 +22,7 @@ defineProps<{
font-size: 1.4rem;
font-weight: 500;
padding: 0 0.8rem;
height: 2.4rem;
height: 1.8em;
color: var(--color-blue-scale-500);
border-radius: 9.6rem;
background-color: var(--color-blue-scale-300);

View File

@@ -1,23 +1,22 @@
# useBusy composable
```vue
<template>
<span class="error" v-if="error">{{ error }}</span>
<button @click="run" :disabled="isBusy">Do something</button>
</template>
<script lang="ts" setup>
import useBusy from '@/composables/busy.composable';
import useBusy from "@/composables/busy.composable";
async function doSomething() {
try {
// Doing some async work
} catch (e) {
throw "Something bad happened";
}
async function doSomething() {
try {
// Doing some async work
} catch (e) {
throw "Something bad happened";
}
}
const { isBusy, error, run } = useBusy(doSomething)
const { isBusy, error, run } = useBusy(doSomething);
</script>
```

View File

@@ -13,19 +13,23 @@ const filteredCollection = myCollection.filter(predicate);
By default, when adding/removing filters, the URL will update automatically.
```typescript
addFilter('name:/^foo/i'); // Will update the URL with ?filter=name:/^foo/i
addFilter("name:/^foo/i"); // Will update the URL with ?filter=name:/^foo/i
```
### Change the URL query string parameter name
```typescript
const { /* ... */ } = useCollectionFilter({ queryStringParam: 'f' }); // ?f=name:/^foo/i
const {
/* ... */
} = useCollectionFilter({ queryStringParam: "f" }); // ?f=name:/^foo/i
```
### Disable the usage of URL query string
```typescript
const { /* ... */ } = useCollectionFilter({ queryStringParam: undefined });
const {
/* ... */
} = useCollectionFilter({ queryStringParam: undefined });
```
## Example of using the composable with the `CollectionFilter` component
@@ -38,32 +42,32 @@ const { /* ... */ } = useCollectionFilter({ queryStringParam: undefined });
@add-filter="addFilter"
@remove-filter="removeFilter"
/>
<div v-for="item in filteredCollection">...</div>
</template>
<script lang="ts" setup>
import CollectionFilter from "@/components/CollectionFilter.vue";
import useCollectionFilter from "@/composables/collection-filter.composable";
import { computed } from "vue";
import CollectionFilter from "@/components/CollectionFilter.vue";
import useCollectionFilter from "@/composables/collection-filter.composable";
import { computed } from "vue";
const collection = [
{ name: "Foo", age: 5, registered: true },
{ name: "Bar", age: 12, registered: false },
{ name: "Foo Bar", age: 2, registered: true },
{ name: "Bar Baz", age: 45, registered: false },
{ name: "Foo Baz", age: 32, registered: false },
{ name: "Foo Bar Baz", age: 32, registered: true },
];
const collection = [
{ name: "Foo", age: 5, registered: true },
{ name: "Bar", age: 12, registered: false },
{ name: "Foo Bar", age: 2, registered: true },
{ name: "Bar Baz", age: 45, registered: false },
{ name: "Foo Baz", age: 32, registered: false },
{ name: "Foo Bar Baz", age: 32, registered: true },
];
const availableFilters: AvailableFilter[] = [
{ property: "name", label: "Name", type: "string" },
{ property: "age", label: "Age", type: "number" },
{ property: "registered", label: "Registered", type: "boolean", icon: faKey },
];
const availableFilters: AvailableFilter[] = [
{ property: "name", label: "Name", type: "string" },
{ property: "age", label: "Age", type: "number" },
{ property: "registered", label: "Registered", type: "boolean", icon: faKey },
];
const { filters, addFilter, removeFilter, predicate } = useCollectionFilter();
const filteredCollection = computed(() => collection.filter(predicate));
const { filters, addFilter, removeFilter, predicate } = useCollectionFilter();
const filteredCollection = computed(() => collection.filter(predicate));
</script>
```

View File

@@ -2,14 +2,17 @@
```vue
<script lang="ts" setup>
import useFilteredCollection from './filtered-collection.composable';
import useFilteredCollection from "./filtered-collection.composable";
const players = [
{ name: "Foo", team: "Blue" },
{ name: "Bar", team: "Red" },
{ name: "Baz", team: "Blue" },
]
const bluePlayers = useFilteredCollection(players, (player) => player.team === "Blue");
const players = [
{ name: "Foo", team: "Blue" },
{ name: "Bar", team: "Red" },
{ name: "Baz", team: "Blue" },
];
const bluePlayers = useFilteredCollection(
players,
(player) => player.team === "Blue"
);
</script>
```

View File

@@ -5,27 +5,28 @@
<div v-for="item in items">
{{ item.name }} <button @click="openRemoveModal(item)">Delete</button>
</div>
<UiModal v-if="isRemoveModalOpen">
Are you sure you want to delete {{ removeModalPayload.name }}
<button @click="handleRemove">Yes</button> <button @click="closeRemoveModal">No</button>
<button @click="handleRemove">Yes</button>
<button @click="closeRemoveModal">No</button>
</UiModal>
</template>
<script lang="ts" setup>
import useModal from '@/composables/modal.composable';
import useModal from "@/composables/modal.composable";
const {
payload: removeModalPayload,
isOpen: isRemoveModalOpen,
open: openRemoveModal,
close: closeRemoveModal,
} = useModal()
async function handleRemove() {
await removeItem(removeModalPayload.id);
closeRemoveModal()
}
const {
payload: removeModalPayload,
isOpen: isRemoveModalOpen,
open: openRemoveModal,
close: closeRemoveModal,
} = useModal();
async function handleRemove() {
await removeItem(removeModalPayload.id);
closeRemoveModal();
}
</script>
```

View File

@@ -4,34 +4,30 @@
<template>
<table>
<thead>
<tr>
<th>
<input type="checkbox" v-model="areAllSelected">
</th>
<th>Name</th>
</tr>
<tr>
<th>
<input type="checkbox" v-model="areAllSelected" />
</th>
<th>Name</th>
</tr>
</thead>
<tbody>
<tr v-for="item in items">
<td>
<input type="checkbox" :value="item.id" v-model="selected" />
</td>
<td>{{ item.name }}</td>
</tr>
<tr v-for="item in items">
<td>
<input type="checkbox" :value="item.id" v-model="selected" />
</td>
<td>{{ item.name }}</td>
</tr>
</tbody>
</table>
<!-- You can use something else than a "Select All" checkbox -->
<button @click="areAllSelected = !areAllSelected">Toggle all selected</button>
</template>
<script lang="ts" setup>
import useMultiSelect from './multi-select.composable';
import useMultiSelect from "./multi-select.composable";
const {
selected,
areAllSelected,
} = useMultiSelect()
const { selected, areAllSelected } = useMultiSelect();
</script>
```

View File

@@ -123,3 +123,37 @@ export const buildXoObject = (
...record,
$ref: params.opaqueRef,
});
export function parseRamUsage(
{
memory,
memoryFree,
}: {
memory: number[];
memoryFree?: number[];
},
{ nSequence = 4 } = {}
) {
const _nSequence = Math.min(memory.length, nSequence);
let total = 0;
let used = 0;
memory = memory.slice(memory.length - _nSequence);
memoryFree = memoryFree?.slice(memoryFree.length - _nSequence);
memory.forEach((ram, key) => {
total += ram;
used += ram - (memoryFree?.[key] ?? 0);
});
const percentUsed = percent(used, total);
return {
// In case `memoryFree` is not given by the xapi,
// we won't be able to calculate the percentage of used memory properly.
percentUsed:
memoryFree === undefined || isNaN(percentUsed) ? 0 : percentUsed,
total: total / _nSequence,
used: memoryFree === undefined ? 0 : used / _nSequence,
};
}

View File

@@ -259,7 +259,7 @@ export type VmStats = {
w: Record<string, number[]>;
};
memory: number[];
memoryFree: number[];
memoryFree?: number[];
vifs: {
rx: Record<string, number[]>;
tx: Record<string, number[]>;

View File

@@ -15,7 +15,9 @@
"community-name": "{name} community",
"copy": "Copy",
"cpu-usage":"CPU usage",
"dark-mode": "Dark mode",
"theme-dark": "Dark",
"theme-light": "Light",
"theme-auto": "Auto",
"dashboard": "Dashboard",
"delete": "Delete",
"descending": "descending",
@@ -37,6 +39,7 @@
"or": "Or",
"password": "Password",
"property": "Property",
"ram-usage":"RAM usage",
"send-us-feedback": "Send us feedback",
"settings": "Settings",
"snapshot": "Snapshot",

View File

@@ -15,7 +15,9 @@
"community-name": "Communauté {name}",
"copy": "Copier",
"cpu-usage":"Utilisation CPU",
"dark-mode": "Mode sombre",
"theme-dark": "Sombre",
"theme-light": "Clair",
"theme-auto": "Auto",
"dashboard": "Tableau de bord",
"delete": "Supprimer",
"descending": "descendant",
@@ -37,6 +39,7 @@
"or": "Ou",
"password": "Mot de passe",
"property": "Propriété",
"ram-usage":"Utilisation de la RAM",
"send-us-feedback": "Envoyez-nous vos commentaires",
"settings": "Paramètres",
"snapshot": "Instantané",

View File

@@ -1,10 +1,14 @@
import { useColorMode } from "@vueuse/core";
import { defineStore } from "pinia";
import { ref } from "vue";
export const useUiStore = defineStore("ui", () => {
const currentHostOpaqueRef = ref();
const colorMode = useColorMode({ emitAuto: true, initialValue: "dark" });
return {
colorMode,
currentHostOpaqueRef,
};
});

View File

@@ -3,13 +3,18 @@
<PoolDashboardStatus class="item" />
<PoolDashboardStorageUsage class="item" />
<PoolDashboardCpuUsage class="item" />
<PoolDashboardRamUsage class="item" />
</div>
</template>
<script lang="ts">
export const N_ITEMS = 5;
</script>
<script lang="ts" setup>
import { differenceBy } from "lodash-es";
import { computed, onMounted, provide, watch } from "vue";
import PoolDashboardCpuUsage from "@/components/pool/dashboard/PoolDashboardCpuUsage.vue";
import PoolDashboardRamUsage from "@/components/pool/dashboard/PoolDashboardRamUsage.vue";
import PoolDashboardStatus from "@/components/pool/dashboard/PoolDashboardStatus.vue";
import PoolDashboardStorageUsage from "@/components/pool/dashboard/PoolDashboardStorageUsage.vue";
import useFetchStats from "@/composables/fetch-stats.composable";

View File

@@ -19,7 +19,9 @@
rel="noopener noreferrer"
href="https://xcp-ng.org/blog/"
>{{ $t("news-name", { name: "XCP-ng" }) }}</a
> - <a
>
-
<a
target="_blank"
rel="noopener noreferrer"
href="https://xen-orchestra.com/blog/"
@@ -35,7 +37,9 @@
rel="noopener noreferrer"
href="https://xcp-ng.org/forum"
>{{ $t("community-name", { name: "XCP-ng" }) }}</a
> - <a
>
-
<a
target="_blank"
rel="noopener noreferrer"
href="https://xcp-ng.org/forum/category/12/xen-orchestra"
@@ -50,14 +54,15 @@
<UiKeyValueList>
<UiKeyValueRow>
<template #key>{{ $t("appearance") }}</template>
<template #value
><FormLabel>
<FormToggle
:modelValue="darkMode"
@update:modelValue="setDarkMode"
/>{{ $t("dark-mode") }}</FormLabel
></template
>
<template #value>
<FormLabel>
<FormSelect v-model="colorMode">
<option value="auto">{{ $t("theme-auto") }}</option>
<option value="dark">{{ $t("theme-dark") }}</option>
<option value="light">{{ $t("theme-light") }}</option>
</FormSelect>
</FormLabel>
</template>
</UiKeyValueRow>
</UiKeyValueList>
</UiCard>
@@ -85,15 +90,16 @@
</template>
<script lang="ts" setup>
import { computed, watch } from "vue";
import FormSelect from "@/components/form/FormSelect.vue";
import { useUiStore } from "@/stores/ui.store";
import { storeToRefs } from "pinia";
import { watch } from "vue";
import { useI18n } from "vue-i18n";
import { locales } from "@/i18n";
import { faEarthAmericas, faGear } from "@fortawesome/free-solid-svg-icons";
import { useLocalStorage } from "@vueuse/core";
import FormWidget from "@/components/FormWidget.vue";
import TitleBar from "@/components/TitleBar.vue";
import FormLabel from "@/components/form/FormLabel.vue";
import FormToggle from "@/components/form/FormToggle.vue";
import UiCard from "@/components/ui/UiCard.vue";
import UiKeyValueList from "@/components/ui/UiKeyValueList.vue";
import UiKeyValueRow from "@/components/ui/UiKeyValueRow.vue";
@@ -105,12 +111,7 @@ const { locale } = useI18n();
watch(locale, (newLocale) => localStorage.setItem("lang", newLocale));
const colorMode = useLocalStorage<string>("colorMode", "dark");
const darkMode = computed(() => colorMode.value !== "light");
const setDarkMode = (enabled: boolean) => {
colorMode.value = enabled ? "dark" : "light";
document.documentElement.classList[enabled ? "add" : "remove"]("dark");
};
const { colorMode } = storeToRefs(useUiStore());
</script>
<style lang="postcss" scoped>

View File

@@ -1,9 +1,9 @@
'use strict'
const fromCallback = require('promise-toolbox/fromCallback')
// eslint-disable-next-line n/no-missing-require
// eslint-disable-next-line n/no-extraneous-require
const splitHost = require('split-host')
// eslint-disable-next-line n/no-missing-require
// eslint-disable-next-line n/no-extraneous-require
const { createClient, Facility, Severity, Transport } = require('syslog-client')
const LEVELS = require('../levels')

View File

@@ -8,12 +8,18 @@
> Users must be able to say: “Nice enhancement, I'm eager to test it”
- [Remotes] Prevent remote path from ending with `xo-vm-backups` as it's usually a mistake
- [OVA export] Speed up OVA generation by 2. Generated file will be bigger (as big as uncompressed XVA) (PR [#6487](https://github.com/vatesfr/xen-orchestra/pull/6487))
- [Settings/Users] Add `Remove` button to delete OTP of users from the admin panel [Forum#6521](https://xcp-ng.org/forum/topic/6521/remove-totp-on-a-user-account) (PR [#6541](https://github.com/vatesfr/xen-orchestra/pull/6541))
- [Plugin/transport-nagios] XO now reports beckup VMs invidually with the VM name label used as *host* and backup job name used as *service*
### Bug fixes
> Users must be able to say: “I had this issue, happy to know it's fixed”
- [Dashboard/Health] Fix `Unknown SR` and `Unknown VDI` in Unhealthy VDIs (PR [#6519](https://github.com/vatesfr/xen-orchestra/pull/6519))
- [Delta Backup] Can now recover VHD merge when failed at the begining
- [Delta Backup] Fix `ENOENT` errors when merging a VHD directory on non-S3 remote
- [Remote] Prevent the browser from auto-completing the encryption key field
### Packages to release
@@ -32,9 +38,13 @@
<!--packages-start-->
- @xen-orchestra/backups-cli major
- @xen-orchestra/fs minor
- @xen-orchestra/log minor
- vhd-lib minor
- xo-cli patch
- xo-server minor
- xo-server-transport-nagios major
- xo-vmdk-to-vhd minor
- xo-web minor
<!--packages-end-->

View File

@@ -20,7 +20,7 @@
"getopts": "^2.3.0",
"globby": "^13.1.1",
"handlebars": "^4.7.6",
"husky": "^4.2.5",
"husky": "^8.0.2",
"jest": "^29.0.3",
"lint-staged": "^13.0.3",
"lodash": "^4.17.4",
@@ -34,11 +34,6 @@
"node": ">=14",
"yarn": "^1.7.0"
},
"husky": {
"hooks": {
"pre-commit": "lint-staged && scripts/lint-staged.js"
}
},
"jest": {
"moduleNameMapper": {
"^(@vates/[^/]+)$": [
@@ -75,23 +70,30 @@
"testRegex": "\\.spec\\.js$"
},
"lint-staged": {
"*.{md,ts,ts}": "prettier --write"
"*": [
"scripts/run-changed-pkgs.js test",
"prettier --ignore-unknown --write"
],
"*.{{{,c,m}j,t}s{,x},vue}": [
"eslint --ignore-pattern '!*'",
"jest --testRegex='^(?!.*.integ.spec.js$).*.spec.js$' --findRelatedTests --passWithNoTests"
]
},
"private": true,
"scripts": {
"build": "scripts/run-script.js --parallel --concurrency 2 build",
"ci": "yarn && yarn build && yarn test-integration",
"ci": "yarn && yarn build && yarn test-lint && yarn test-integration",
"clean": "scripts/run-script.js --parallel clean",
"dev": "scripts/run-script.js --parallel dev",
"dev-test": "jest --bail --watch \"^(?!.*\\.integ\\.spec\\.js$)\"",
"docs:dev": "vuepress dev docs",
"docs:build": "vuepress build docs",
"prepare": "husky install",
"prettify": "prettier --ignore-path .gitignore --write '**/*.{cjs,js,jsx,md,mjs,ts,tsx}'",
"test": "npm run test-lint && npm run test-unit",
"test-integration": "jest \".integ\\.spec\\.js$\"",
"test-lint": "eslint --ignore-path .gitignore .",
"test-unit": "jest \"^(?!.*\\.integ\\.spec\\.js$)\" && scripts/run-script.js test",
"travis-tests": "scripts/travis-tests.js"
"test-lint": "eslint --ignore-path .gitignore --ignore-pattern packages/xo-web .",
"test-unit": "jest \"^(?!.*\\.integ\\.spec\\.js$)\" && scripts/run-script.js --bail test"
},
"workspaces": [
"@*/*",

View File

@@ -94,5 +94,5 @@ describe('setPropertyClause', () => {
})
it('toString', () => {
assert.equal(ast.toString(), pattern)
assert.equal(ast.toString(), pattern)
})

View File

@@ -2,7 +2,9 @@
// This file has been generated by [index-modules](https://npmjs.com/index-modules)
//
var d = Object.defineProperty
'use strict'
const d = Object.defineProperty
function de(o, n, v) {
d(o, n, { enumerable: true, value: v })
return v
@@ -17,7 +19,7 @@ function dl(o, n, g, a) {
})
}
function r(p) {
var v = require(p)
const v = require(p)
return v && v.__esModule
? v
: typeof v === 'object' || typeof v === 'function'
@@ -32,7 +34,7 @@ function e(p, i) {
}
d(exports, '__esModule', { value: true })
var defaults = de(exports, 'default', {})
const defaults = de(exports, 'default', {})
e('./check.js', 'check')
e('./compare.js', 'compare')
e('./copy.js', 'copy')

View File

@@ -95,15 +95,9 @@ test('It rename and unlink a VHDFile', async () => {
await convertFromRawToVhd(rawFileName, vhdFileName)
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file:///' })
const { size } = await fs.stat(vhdFileName)
const targetFileName = `${tempDir}/renamed.vhd`
await VhdAbstract.rename(handler, vhdFileName, targetFileName)
await VhdAbstract.unlink(handler, vhdFileName)
expect(await fs.exists(vhdFileName)).toEqual(false)
const { size: renamedSize } = await fs.stat(targetFileName)
expect(size).toEqual(renamedSize)
await VhdAbstract.unlink(handler, targetFileName)
expect(await fs.exists(targetFileName)).toEqual(false)
})
})
@@ -122,12 +116,8 @@ test('It rename and unlink a VhdDirectory', async () => {
// it should clean an existing directory
await fs.mkdir(targetFileName)
await fs.writeFile(`${targetFileName}/dummy`, 'I exists')
await VhdAbstract.rename(handler, vhdDirectory, targetFileName)
expect(await fs.exists(vhdDirectory)).toEqual(false)
expect(await fs.exists(targetFileName)).toEqual(true)
await VhdAbstract.unlink(handler, `${targetFileName}/dummy`)
expect(await fs.exists(`${targetFileName}/dummy`)).toEqual(false)
await VhdAbstract.unlink(handler, targetFileName)
expect(await fs.exists(targetFileName)).toEqual(false)
})
})
@@ -138,7 +128,6 @@ test('It create , rename and unlink alias', async () => {
const vhdFileName = `${tempDir}/randomfile.vhd`
await convertFromRawToVhd(rawFileName, vhdFileName)
const aliasFileName = `${tempDir}/aliasFileName.alias.vhd`
const aliasFileNameRenamed = `${tempDir}/aliasFileNameRenamed.alias.vhd`
await Disposable.use(async function* () {
const handler = yield getSyncedHandler({ url: 'file:///' })
@@ -146,15 +135,9 @@ test('It create , rename and unlink alias', async () => {
expect(await fs.exists(aliasFileName)).toEqual(true)
expect(await fs.exists(vhdFileName)).toEqual(true)
await VhdAbstract.rename(handler, aliasFileName, aliasFileNameRenamed)
expect(await fs.exists(aliasFileName)).toEqual(false)
expect(await fs.exists(vhdFileName)).toEqual(true)
expect(await fs.exists(aliasFileNameRenamed)).toEqual(true)
await VhdAbstract.unlink(handler, aliasFileNameRenamed)
await VhdAbstract.unlink(handler, aliasFileName)
expect(await fs.exists(aliasFileName)).toEqual(false)
expect(await fs.exists(vhdFileName)).toEqual(false)
expect(await fs.exists(aliasFileNameRenamed)).toEqual(false)
})
})

View File

@@ -200,14 +200,6 @@ exports.VhdAbstract = class VhdAbstract {
}
}
static async rename(handler, sourcePath, targetPath) {
try {
// delete target if it already exists
await VhdAbstract.unlink(handler, targetPath)
} catch (e) {}
await handler.rename(sourcePath, targetPath)
}
static async unlink(handler, path) {
const resolved = await resolveVhdAlias(handler, path)
try {

View File

@@ -6,10 +6,10 @@ const fs = require('fs-extra')
const rimraf = require('rimraf')
const tmp = require('tmp')
const { getSyncedHandler } = require('@xen-orchestra/fs')
const { pFromCallback } = require('promise-toolbox')
const { pFromCallback, Disposable } = require('promise-toolbox')
const { VhdFile, chainVhd } = require('./index')
const { _cleanupVhds: cleanupVhds, mergeVhdChain } = require('./merge')
const { VhdFile, chainVhd, openVhd } = require('./index')
const { mergeVhdChain } = require('./merge')
const { checkFile, createRandomFile, convertFromRawToVhd } = require('./tests/utils')
@@ -163,6 +163,77 @@ test('it can resume a simple merge ', async () => {
}
})
test('it can resume a failed renaming ', async () => {
const mbOfFather = 8
const mbOfChildren = 4
const parentRandomFileName = `${tempDir}/randomfile`
await createRandomFile(`${tempDir}/randomfile`, mbOfFather)
await convertFromRawToVhd(`${tempDir}/randomfile`, `${tempDir}/parent.vhd`)
const parentVhd = new VhdFile(handler, 'parent.vhd')
await parentVhd.readHeaderAndFooter()
await createRandomFile(`${tempDir}/small_randomfile`, mbOfChildren)
await convertFromRawToVhd(`${tempDir}/small_randomfile`, `${tempDir}/child1.vhd`)
await chainVhd(handler, 'parent.vhd', handler, 'child1.vhd', true)
const childVhd = new VhdFile(handler, 'child1.vhd')
await childVhd.readHeaderAndFooter()
await handler.writeFile(
'.parent.vhd.merge.json',
JSON.stringify({
parent: {
header: parentVhd.header.checksum,
},
child: {
header: childVhd.header.checksum,
},
step: 'cleanupVhds',
})
)
// expect merge to succed
await mergeVhdChain(handler, ['parent.vhd', 'child1.vhd'])
// parent have been renamed
expect(await fs.exists(`${tempDir}/parent.vhd`)).toBeFalsy()
expect(await fs.exists(`${tempDir}/.parent.vhd.merge.json`)).toBeFalsy()
Disposable.use(openVhd(handler, 'child1.vhd'), async mergedVhd => {
await mergedVhd.readBlockAllocationTable()
// the resume is at the step 'cleanupVhds' it should not have merged blocks and should still contians parent data
let offset = 0
const fd = await fs.open(parentRandomFileName, 'r')
for await (const block of mergedVhd.blocks()) {
const blockContent = block.data
const buffer = Buffer.alloc(blockContent.length)
await fs.read(fd, buffer, 0, buffer.length, offset)
expect(buffer.equals(blockContent)).toEqual(true)
offset += childVhd.header.blockSize
}
})
// merge succeed if renaming was already done
await handler.writeFile(
'.parent.vhd.merge.json',
JSON.stringify({
parent: {
header: parentVhd.header.checksum,
},
child: {
header: childVhd.header.checksum,
},
step: 'cleanupVhds',
})
)
await mergeVhdChain(handler, ['parent.vhd', 'child1.vhd'])
expect(await fs.exists(`${tempDir}/parent.vhd`)).toBeFalsy()
expect(await fs.exists(`${tempDir}/child1.vhd`)).toBeTruthy()
expect(await fs.exists(`${tempDir}/.parent.vhd.merge.json`)).toBeFalsy()
})
test('it can resume a multiple merge ', async () => {
const mbOfFather = 8
const mbOfChildren = 6
@@ -226,7 +297,11 @@ test('it can resume a multiple merge ', async () => {
})
)
// it should succeed
await mergeVhdChain(handler, ['parent.vhd', 'child.vhd', 'grandchild.vhd'])
await mergeVhdChain(handler, ['parent.vhd', 'child.vhd', 'grandchild.vhd'], { removeUnused: true })
expect(await fs.exists(`${tempDir}/parent.vhd`)).toBeFalsy()
expect(await fs.exists(`${tempDir}/child.vhd`)).toBeFalsy()
expect(await fs.exists(`${tempDir}/grandchild.vhd`)).toBeTruthy()
expect(await fs.exists(`${tempDir}/.parent.vhd.merge.json`)).toBeFalsy()
})
test('it merge multiple child in one pass ', async () => {
@@ -278,18 +353,3 @@ test('it merge multiple child in one pass ', async () => {
offset += parentVhd.header.blockSize
}
})
test('it cleans vhd mergedfiles', async () => {
await handler.writeFile('parent', 'parentData')
await handler.writeFile('child1', 'child1Data')
await handler.writeFile('child2', 'child2Data')
await handler.writeFile('child3', 'child3Data')
await cleanupVhds(handler, ['parent', 'child1', 'child2', 'child3'], { merge: true, removeUnused: true })
// only child3 should stay, with the data of parent
const [child3, ...other] = await handler.list('.')
expect(other.length).toEqual(0)
expect(child3).toEqual('child3')
expect((await handler.readFile('child3')).toString('utf8')).toEqual('parentData')
})

View File

@@ -41,91 +41,97 @@ const { warn } = createLogger('vhd-lib:merge')
// | |
// \_____________rename_____________/
// write the merge progress file at most every `delay` seconds
function makeThrottledWriter(handler, path, delay) {
let lastWrite = Date.now()
return async json => {
class Merger {
#chain
#childrenPaths
#handler
#isResuming = false
#lastStateWrittenAt = 0
#logInfo
#mergeBlockConcurrency
#onProgress
#parentPath
#removeUnused
#state
#statePath
constructor(handler, chain, { onProgress, logInfo, removeUnused, mergeBlockConcurrency }) {
this.#chain = chain
this.#handler = handler
this.#parentPath = chain[0]
this.#childrenPaths = chain.slice(1)
this.#logInfo = logInfo
this.#onProgress = onProgress
this.#removeUnused = removeUnused
this.#mergeBlockConcurrency = mergeBlockConcurrency
this.#statePath = dirname(this.#parentPath) + '/.' + basename(this.#parentPath) + '.merge.json'
}
async #writeState() {
await this.#handler.writeFile(this.#statePath, JSON.stringify(this.#state), { flags: 'w' }).catch(warn)
}
async #writeStateThrottled() {
const delay = 10e3
const now = Date.now()
if (now - lastWrite > delay) {
lastWrite = now
await handler.writeFile(path, JSON.stringify(json), { flags: 'w' }).catch(warn)
if (now - this.#lastStateWrittenAt > delay) {
this.#lastStateWrittenAt = now
await this.#writeState()
}
}
}
// make the rename / delete part of the merge process
// will fail if parent and children are in different remote
async function cleanupVhds(handler, chain, { logInfo = noop, removeUnused = false } = {}) {
const parent = chain[0]
const children = chain.slice(1, -1)
const mergeTargetChild = chain[chain.length - 1]
await VhdAbstract.rename(handler, parent, mergeTargetChild)
return asyncMap(children, child => {
logInfo(`the VHD child is already merged`, { child })
if (removeUnused) {
logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
})
}
module.exports._cleanupVhds = cleanupVhds
// Merge a chain of VHDs into a single VHD
module.exports.mergeVhdChain = limitConcurrency(2)(async function mergeVhdChain(
handler,
chain,
{ onProgress = noop, logInfo = noop, removeUnused = false, mergeBlockConcurrency = 2 } = {}
) {
assert(chain.length >= 2)
const parentPath = chain[0]
const childrenPaths = chain.slice(1)
const mergeStatePath = dirname(parentPath) + '/.' + basename(parentPath) + '.merge.json'
return await Disposable.use(async function* () {
let mergeState
let isResuming = false
async merge() {
try {
const mergeStateContent = await handler.readFile(mergeStatePath)
mergeState = JSON.parse(mergeStateContent)
const mergeStateContent = await this.#handler.readFile(this.#statePath)
this.#state = JSON.parse(mergeStateContent)
// work-around a bug introduce in 97d94b795
//
// currentBlock could be `null` due to the JSON.stringify of a `NaN` value
if (mergeState.currentBlock === null) {
mergeState.currentBlock = 0
if (this.#state.currentBlock === null) {
this.#state.currentBlock = 0
}
this.#isResuming = true
} catch (error) {
if (error.code !== 'ENOENT') {
warn('problem while checking the merge state', { error })
}
}
/* eslint-disable no-fallthrough */
switch (this.#state?.step ?? 'mergeBlocks') {
case 'mergeBlocks':
await this.#step_mergeBlocks()
case 'cleanupVhds':
await this.#step_cleanVhds()
return this.#cleanup()
default:
warn(`Step ${this.#state.step} is unknown`, { state: this.#state })
}
/* eslint-enable no-fallthrough */
}
async *#openVhds() {
// during merging, the end footer of the parent can be overwritten by new blocks
// we should use it as a way to check vhd health
const parentVhd = yield openVhd(handler, parentPath, {
const parentVhd = yield openVhd(this.#handler, this.#parentPath, {
flags: 'r+',
checkSecondFooter: mergeState === undefined,
checkSecondFooter: this.#state === undefined,
})
let childVhd
const parentIsVhdDirectory = parentVhd instanceof VhdDirectory
let childIsVhdDirectory
if (childrenPaths.length !== 1) {
childVhd = yield VhdSynthetic.open(handler, childrenPaths)
if (this.#childrenPaths.length !== 1) {
childVhd = yield VhdSynthetic.open(this.#handler, this.#childrenPaths)
childIsVhdDirectory = childVhd.checkVhdsClass(VhdDirectory)
} else {
childVhd = yield openVhd(handler, childrenPaths[0])
childVhd = yield openVhd(this.#handler, this.#childrenPaths[0])
childIsVhdDirectory = childVhd instanceof VhdDirectory
}
// merging vhdFile must not be concurrently with the potential block reordering after a change
const concurrency = parentIsVhdDirectory && childIsVhdDirectory ? mergeBlockConcurrency : 1
if (mergeState === undefined) {
this.#mergeBlockConcurrency = parentIsVhdDirectory && childIsVhdDirectory ? this.#mergeBlockConcurrency : 1
if (this.#state === undefined) {
// merge should be along a vhd chain
assert.strictEqual(UUID.stringify(childVhd.header.parentUuid), UUID.stringify(parentVhd.footer.uuid))
const parentDiskType = parentVhd.footer.diskType
@@ -133,69 +139,86 @@ module.exports.mergeVhdChain = limitConcurrency(2)(async function mergeVhdChain(
assert.strictEqual(childVhd.footer.diskType, DISK_TYPES.DIFFERENCING)
assert.strictEqual(childVhd.header.blockSize, parentVhd.header.blockSize)
} else {
isResuming = true
// vhd should not have changed to resume
assert.strictEqual(parentVhd.header.checksum, mergeState.parent.header)
assert.strictEqual(childVhd.header.checksum, mergeState.child.header)
assert.strictEqual(parentVhd.header.checksum, this.#state.parent.header)
assert.strictEqual(childVhd.header.checksum, this.#state.child.header)
}
// Read allocation table of child/parent.
await Promise.all([parentVhd.readBlockAllocationTable(), childVhd.readBlockAllocationTable()])
return { childVhd, parentVhd }
}
async #step_mergeBlocks() {
const self = this
await Disposable.use(async function* () {
const { childVhd, parentVhd } = yield* self.#openVhds()
const { maxTableEntries } = childVhd.header
if (self.#state === undefined) {
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
self.#state = {
child: { header: childVhd.header.checksum },
parent: { header: parentVhd.header.checksum },
currentBlock: 0,
mergedDataSize: 0,
step: 'mergeBlocks',
chain: self.#chain.map(vhdPath => handlerPath.relativeFromFile(self.#statePath, vhdPath)),
}
// finds first allocated block for the 2 following loops
while (self.#state.currentBlock < maxTableEntries && !childVhd.containsBlock(self.#state.currentBlock)) {
++self.#state.currentBlock
}
await self.#writeState()
}
await self.#mergeBlocks(parentVhd, childVhd)
await self.#updateHeaders(parentVhd, childVhd)
})
}
async #mergeBlocks(parentVhd, childVhd) {
const { maxTableEntries } = childVhd.header
if (mergeState === undefined) {
await parentVhd.ensureBatSize(childVhd.header.maxTableEntries)
mergeState = {
child: { header: childVhd.header.checksum },
parent: { header: parentVhd.header.checksum },
currentBlock: 0,
mergedDataSize: 0,
chain: chain.map(vhdPath => handlerPath.relativeFromFile(mergeStatePath, vhdPath)),
}
// finds first allocated block for the 2 following loops
while (mergeState.currentBlock < maxTableEntries && !childVhd.containsBlock(mergeState.currentBlock)) {
++mergeState.currentBlock
}
}
// counts number of allocated blocks
const toMerge = []
for (let block = mergeState.currentBlock; block < maxTableEntries; block++) {
for (let block = this.#state.currentBlock; block < maxTableEntries; block++) {
if (childVhd.containsBlock(block)) {
toMerge.push(block)
}
}
const nBlocks = toMerge.length
onProgress({ total: nBlocks, done: 0 })
this.#onProgress({ total: nBlocks, done: 0 })
const merging = new Set()
let counter = 0
const mergeStateWriter = makeThrottledWriter(handler, mergeStatePath, 10e3)
await asyncEach(
toMerge,
async blockId => {
merging.add(blockId)
mergeState.mergedDataSize += await parentVhd.mergeBlock(childVhd, blockId, isResuming)
this.#state.mergedDataSize += await parentVhd.mergeBlock(childVhd, blockId, this.#isResuming)
mergeState.currentBlock = Math.min(...merging)
this.#state.currentBlock = Math.min(...merging)
merging.delete(blockId)
onProgress({
this.#onProgress({
total: nBlocks,
done: counter + 1,
})
counter++
mergeStateWriter(mergeState)
this.#writeStateThrottled()
},
{
concurrency,
concurrency: this.#mergeBlockConcurrency,
}
)
onProgress({ total: nBlocks, done: nBlocks })
// ensure data size is correct
await this.#writeState()
this.#onProgress({ total: nBlocks, done: nBlocks })
}
async #updateHeaders(parentVhd, childVhd) {
// some blocks could have been created or moved in parent : write bat
await parentVhd.writeBlockAllocationTable()
@@ -211,19 +234,70 @@ module.exports.mergeVhdChain = limitConcurrency(2)(async function mergeVhdChain(
// necessary to update values and to recreate the footer after block
// creation
await parentVhd.writeFooter()
}
await cleanupVhds(handler, chain, { logInfo, removeUnused })
// make the rename / delete part of the merge process
// will fail if parent and children are in different remote
async #step_cleanVhds() {
assert.notEqual(this.#state, undefined)
this.#state.step = 'cleanupVhds'
await this.#writeState()
// should be a disposable
handler.unlink(mergeStatePath).catch(warn)
const chain = this.#chain
const handler = this.#handler
return mergeState.mergedDataSize
}).catch(error => {
const parent = chain[0]
const children = chain.slice(1, -1)
const mergeTargetChild = chain[chain.length - 1]
// in the case is an alias, renaming parent to mergeTargetChild will keep the real data
// of mergeTargetChild in the data folder
// mergeTargetChild is already in an incomplete state, its blocks have been transferred to parent
await VhdAbstract.unlink(handler, mergeTargetChild)
try {
await handler.rename(parent, mergeTargetChild)
} catch (error) {
// maybe the renaming was already successfull during merge
if (error.code === 'ENOENT' && this.#isResuming) {
Disposable.use(openVhd(handler, mergeTargetChild), vhd => {
// we are sure that mergeTargetChild is the right one
assert.strictEqual(vhd.header.checksum, this.#state.parent.header)
})
this.#logInfo(`the VHD parent was already renamed`, { parent, mergeTargetChild })
}
}
await asyncMap(children, child => {
this.#logInfo(`the VHD child is already merged`, { child })
if (this.#removeUnused) {
this.#logInfo(`deleting merged VHD child`, { child })
return VhdAbstract.unlink(handler, child)
}
})
}
async #cleanup() {
const mergedSize = this.#state?.mergedDataSize ?? 0
await this.#handler.unlink(this.#statePath).catch(warn)
return mergedSize
}
}
module.exports.mergeVhdChain = limitConcurrency(2)(async function mergeVhdChain(
handler,
chain,
{ onProgress = noop, logInfo = noop, removeUnused = false, mergeBlockConcurrency = 2 } = {}
) {
const merger = new Merger(handler, chain, { onProgress, logInfo, removeUnused, mergeBlockConcurrency })
try {
return merger.merge()
} catch (error) {
try {
error.chain = chain
} finally {
// eslint-disable-next-line no-unsafe-finally
throw error
}
})
}
})

View File

@@ -0,0 +1,5 @@
'use strict'
module.exports = {
ignorePatterns: ['*'],
}

View File

@@ -55,3 +55,4 @@ setTimeout(function () {
name: 'Steve',
})
}, 10)
/* eslint-enable no-console */

View File

@@ -1,3 +1,5 @@
/* eslint-disable no-console */
'use strict'
process.on('unhandledRejection', function (error) {
@@ -59,3 +61,5 @@ xo.open()
.then(function () {
return xo.close()
})
/* eslint-enable no-console */

View File

@@ -284,8 +284,6 @@ class BackupReportsXoPlugin {
getErrorMarkdown(log),
]
const nagiosText = []
// body
for (const status of STATUS) {
const tasks = tasksByStatus[status]
@@ -310,10 +308,6 @@ class BackupReportsXoPlugin {
const { title, body } = taskMarkdown
const subMarkdown = [...body, ...getWarningsMarkdown(task.warnings)]
if (task.status !== 'success') {
nagiosText.push(`[${task.status}] ${title}`)
}
for (const subTask of task.tasks ?? []) {
const taskMarkdown = await getMarkdown(subTask, { formatDate, xo })
if (taskMarkdown === undefined) {
@@ -335,10 +329,6 @@ class BackupReportsXoPlugin {
subject: `[Xen Orchestra] ${log.status} Metadata backup report for ${log.jobName} ${STATUS_ICON[log.status]}`,
markdown: toMarkdown(markdown),
success: log.status === 'success',
nagiosMarkdown:
log.status === 'success'
? `[Xen Orchestra] [Success] Metadata backup report for ${log.jobName}`
: `[Xen Orchestra] [${log.status}] Metadata backup report for ${log.jobName} - ${nagiosText.join(' ')}`,
})
}
@@ -369,9 +359,6 @@ class BackupReportsXoPlugin {
mailReceivers,
markdown: toMarkdown(markdown),
success: false,
nagiosMarkdown: `[Xen Orchestra] [${log.status}] Backup report for ${jobName}${
log.result?.message !== undefined ? ` - Error : ${log.result.message}` : ''
}`,
})
}
@@ -379,7 +366,6 @@ class BackupReportsXoPlugin {
const skippedVmsText = []
const successfulVmsText = []
const interruptedVmsText = []
const nagiosText = []
let globalMergeSize = 0
let globalTransferSize = 0
@@ -401,16 +387,13 @@ class BackupReportsXoPlugin {
if (type === 'SR') {
const { name_label: name, uuid } = xo.getObject(id)
failedTasksText.push(`### ${name}`, '', `- **UUID**: ${uuid}`)
nagiosText.push(`[(${type} failed) ${name} : ${taskLog.result.message} ]`)
} else {
const { name } = await xo.getRemote(id)
failedTasksText.push(`### ${name}`, '', `- **UUID**: ${id}`)
nagiosText.push(`[(${type} failed) ${name} : ${taskLog.result.message} ]`)
}
} catch (error) {
logger.warn(error)
failedTasksText.push(`### ${UNKNOWN_ITEM}`, '', `- **UUID**: ${id}`)
nagiosText.push(`[(${type} failed) ${id} : ${taskLog.result.message} ]`)
}
failedTasksText.push(
@@ -553,22 +536,17 @@ class BackupReportsXoPlugin {
: taskLog.result.message
}`
)
nagiosText.push(`[(Skipped) ${vm !== undefined ? vm.name_label : 'undefined'} : ${taskLog.result.message} ]`)
} else {
++nFailures
failedTasksText.push(...text, `- **Error**: ${taskLog.result.message}`)
nagiosText.push(`[(Failed) ${vm !== undefined ? vm.name_label : 'undefined'} : ${taskLog.result.message} ]`)
}
} else {
if (taskLog.status === 'failure') {
++nFailures
failedTasksText.push(...text, ...subText)
nagiosText.push(`[${vm !== undefined ? vm.name_label : 'undefined'}: (failed)[${failedSubTasks.toString()}]]`)
} else if (taskLog.status === 'interrupted') {
++nInterrupted
interruptedVmsText.push(...text, ...subText)
nagiosText.push(`[(Interrupted) ${vm !== undefined ? vm.name_label : 'undefined'}]`)
} else {
++nSuccesses
successfulVmsText.push(...text, ...subText)
@@ -614,16 +592,10 @@ class BackupReportsXoPlugin {
markdown: toMarkdown(markdown),
subject: `[Xen Orchestra] ${log.status} Backup report for ${jobName} ${STATUS_ICON[log.status]}`,
success: log.status === 'success',
nagiosMarkdown:
log.status === 'success'
? `[Xen Orchestra] [Success] Backup report for ${jobName}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${jobName} - VMs : ${nagiosText.join(' ')}`,
})
}
_sendReport({ mailReceivers, markdown, nagiosMarkdown, subject, success }) {
_sendReport({ mailReceivers, markdown, subject, success }) {
if (mailReceivers === undefined || mailReceivers.length === 0) {
mailReceivers = this._mailsReceivers
}
@@ -645,11 +617,6 @@ class BackupReportsXoPlugin {
xo.sendSlackMessage({
message: markdown,
}),
xo.sendPassiveCheck !== undefined &&
xo.sendPassiveCheck({
status: success ? 0 : 2,
message: nagiosMarkdown,
}),
xo.sendIcinga2Status !== undefined &&
xo.sendIcinga2Status({
status: success ? 'OK' : 'CRITICAL',
@@ -683,7 +650,6 @@ class BackupReportsXoPlugin {
subject: `[Xen Orchestra] ${globalStatus} ${icon}`,
markdown,
success: false,
nagiosMarkdown: `[Xen Orchestra] [${globalStatus}] Error : ${error.message}`,
})
}
@@ -720,7 +686,6 @@ class BackupReportsXoPlugin {
let nSkipped = 0
const failedBackupsText = []
const nagiosText = []
const skippedBackupsText = []
const successfulBackupText = []
@@ -754,13 +719,9 @@ class BackupReportsXoPlugin {
`- **Reason**: ${message === UNHEALTHY_VDI_CHAIN_ERROR ? UNHEALTHY_VDI_CHAIN_MESSAGE : message}`,
''
)
nagiosText.push(`[(Skipped) ${vm !== undefined ? vm.name_label : 'undefined'} : ${message} ]`)
} else {
++nFailures
failedBackupsText.push(...text, `- **Error**: ${message}`, '')
nagiosText.push(`[(Failed) ${vm !== undefined ? vm.name_label : 'undefined'} : ${message} ]`)
}
} else if (!reportOnFailure) {
const { returnedValue } = call
@@ -835,11 +796,6 @@ class BackupReportsXoPlugin {
globalSuccess ? ICON_SUCCESS : nFailures !== 0 ? ICON_FAILURE : ICON_SKIPPED
}`,
success: globalSuccess,
nagiosMarkdown: globalSuccess
? `[Xen Orchestra] [Success] Backup report for ${tag}`
: `[Xen Orchestra] [${
nFailures !== 0 ? 'Failure' : 'Skipped'
}] Backup report for ${tag} - VMs : ${nagiosText.join(' ')}`,
})
}
}

View File

@@ -1,3 +1,5 @@
/* eslint-disable no-console */
'use strict'
// This is one of the simplest xo-server's plugin than can be created.
@@ -78,3 +80,5 @@ exports.default = function (opts) {
},
}
}
/* eslint-enable no-console */

View File

@@ -23,17 +23,24 @@ export const configurationSchema = {
type: 'string',
description: 'The encryption key',
},
host: {
type: 'string',
description: 'The host name in Nagios',
},
service: {
type: 'string',
description: 'The service description in Nagios',
},
},
additionalProperties: false,
required: ['server', 'port', 'key', 'host', 'service'],
required: ['server', 'port', 'key'],
}
export const testSchema = {
type: 'object',
properties: {
host: {
description: 'Nagios host',
type: 'string',
},
service: {
description: 'Nagios service',
type: 'string',
},
},
required: ['host', 'service'],
}
// ===================================================================
@@ -102,15 +109,22 @@ class XoServerNagios {
this._unset()
}
test() {
return this._sendPassiveCheck({
message: 'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)',
status: OK,
})
test({ host, service }) {
return this._sendPassiveCheck(
{
message: 'The server-nagios plugin for Xen Orchestra server seems to be working fine, nicely done :)',
status: OK,
},
host,
service
)
}
_sendPassiveCheck({ message, status }) {
_sendPassiveCheck({ message, status }, host, service) {
return new Promise((resolve, reject) => {
this._conf.host = host
this._conf.service = service
if (/\r|\n/.test(message)) {
warn('the message must not contain a line break', { message })
for (let i = 0, n = message.length; i < n; ++i) {

View File

@@ -6,6 +6,7 @@ function handleHook(type, data) {
const hooks = this._hooks[data.method]?.[type]
if (hooks !== undefined) {
return Promise.all(
// eslint-disable-next-line array-callback-return
hooks.map(({ url, waitForResponse = false }) => {
const promise = this._makeRequest(url, type, data).catch(error => {
log.error('web hook failed', {

View File

@@ -1,3 +1,31 @@
import humanFormat from 'human-format'
import ms from 'ms'
import { createLogger } from '@xen-orchestra/log'
const { warn } = createLogger('xo:server:handleBackupLog')
async function sendToNagios(app, jobName, vmBackupInfo) {
try {
const messageToNagios = {
id: vmBackupInfo.id,
result: vmBackupInfo.result,
size: humanFormat.bytes(vmBackupInfo.size),
duration: ms(vmBackupInfo.end - vmBackupInfo.start),
}
await app.sendPassiveCheck(
{
message: JSON.stringify(messageToNagios),
status: 0,
},
app.getObject(messageToNagios.id).name_label,
jobName
)
} catch (error) {
warn('sendToNagios:', error)
}
}
function forwardResult(log) {
if (log.status === 'failure') {
throw log.result
@@ -6,8 +34,40 @@ function forwardResult(log) {
}
// it records logs generated by `@xen-orchestra/backups/Task#run`
export const handleBackupLog = (log, { logger, localTaskIds, rootTaskId, runJobId = rootTaskId, handleRootTaskId }) => {
const { event, message, taskId } = log
export const handleBackupLog = (
log,
{ vmBackupInfo, app, jobName, logger, localTaskIds, rootTaskId, runJobId = rootTaskId, handleRootTaskId }
) => {
const { event, message, parentId, taskId } = log
if (app !== undefined && jobName !== undefined) {
if (event === 'start') {
if (log.data?.type === 'VM') {
vmBackupInfo.set('vm-' + taskId, {
id: log.data.id,
start: log.timestamp,
})
} else if (vmBackupInfo.has('vm-' + parentId) && log.message === 'export') {
vmBackupInfo.set('export-' + taskId, {
parentId: 'vm-' + parentId,
})
} else if (vmBackupInfo.has('export-' + parentId) && log.message === 'transfer') {
vmBackupInfo.set('transfer-' + taskId, {
parentId: 'export-' + parentId,
})
}
} else if (event === 'end') {
if (vmBackupInfo.has('vm-' + taskId)) {
const data = vmBackupInfo.get('vm-' + taskId)
data.result = log.status
data.end = log.timestamp
sendToNagios(app, jobName, data)
} else if (vmBackupInfo.has('transfer-' + taskId)) {
vmBackupInfo.get(vmBackupInfo.get(vmBackupInfo.get('transfer-' + taskId).parentId).parentId).size =
log.result.size
}
}
}
// If `runJobId` is defined, it means that the root task is already handled by `runJob`
if (runJobId !== undefined) {

View File

@@ -0,0 +1,28 @@
import splitHost from 'split-host'
// https://about.gitlab.com/blog/2021/01/27/we-need-to-talk-no-proxy/
export function shouldProxy(host, { NO_PROXY, no_proxy = NO_PROXY } = process.env) {
if (no_proxy == null) {
return true
}
if (no_proxy === '*') {
return false
}
const { hostname } = splitHost(host)
for (let entry of no_proxy.split(',')) {
entry = entry.trim()
if (entry[0] === '.') {
entry = entry.slice(1)
}
entry = splitHost(entry.trim())
console.log(hostname, entry.hostname)
if (hostname.endsWith(entry.hostname)) {
return false
}
}
return true
}

View File

@@ -0,0 +1,61 @@
import { shouldProxy } from './_shouldProxy.mjs'
import t from 'tap'
const ensureArray = v => (v === undefined ? [] : Array.isArray(v) ? v : [v])
;[
{
no_proxy: null,
ok: 'example.org',
},
{
no_proxy: '*',
nok: 'example.org',
},
{
no_proxy: 'example.org, example.com',
nok: ['example.org', 'example.org:1024', 'example.com'],
ok: 'example.net',
},
{
no_proxy: ['example.org', '.example.org'],
nok: ['example.org', 'example.org:1024', 'sub.example.org'],
ok: 'example.com',
},
// {
// no_proxy: 'example.org:1024',
// nok: ['example.org:1024', 'sub.example.org:1024'],
// ok: ['example.com', 'example.org'],
// },
{
no_proxy: '[::1]',
nok: ['[::1]', '[::1]:1024'],
ok: ['[::2]', '[0::1]'],
},
].forEach(({ no_proxy: noProxies, ok, nok }) => {
for (const no_proxy of ensureArray(noProxies)) {
const opts = { no_proxy }
t.test(String(no_proxy), function (t) {
ok = ensureArray(ok)
if (ok.length !== 0) {
t.test('should proxy', t => {
for (const host of ok) {
t.equal(shouldProxy(host, opts), true, host)
}
t.end()
})
}
nok = ensureArray(nok)
if (nok.length !== 0) {
t.test('should not proxy', t => {
for (const host of nok) {
t.equal(shouldProxy(host, opts), false, host)
}
t.end()
})
}
t.end()
})
}
})

View File

@@ -57,8 +57,10 @@ export async function copyVm({ vm, sr }) {
// full
{
// eslint-disable-next-line no-console
console.log('export full VM...')
const input = await srcXapi.VM_export(vm._xapiRef)
// eslint-disable-next-line no-console
console.log('import full VM...')
await tgtXapi.VM_destroy((await tgtXapi.importVm(input, { srId: sr })).$ref)
}

View File

@@ -596,6 +596,22 @@ migrate.resolve = {
migrationNetwork: ['migrationNetwork', 'network', 'administrate'],
}
export async function warmMigration({ vm, sr, startVm, deleteSource }) {
await this.warmMigrateVm(vm, sr, startVm, deleteSource)
}
warmMigration.permission = 'admin'
warmMigration.params = {
vm: {
type: 'string',
},
sr: {
type: 'string',
},
startDestinationVm: { type: 'boolean' },
deleteSourceVm: { type: 'boolean' },
}
// -------------------------------------------------------------------
export const set = defer(async function ($defer, params) {

View File

@@ -149,11 +149,15 @@ export default class BackupNg {
try {
if (!useXoProxy && backupsConfig.disableWorkers) {
const localTaskIds = { __proto__: null }
const vmBackupInfo = new Map()
return await Task.run(
{
name: 'backup run',
onLog: log =>
handleBackupLog(log, {
vmBackupInfo,
app: this._app,
jobName: job.name,
localTaskIds,
logger,
runJobId,
@@ -279,8 +283,12 @@ export default class BackupNg {
const localTaskIds = { __proto__: null }
let result
const vmBackupInfo = new Map()
for await (const log of logsStream) {
result = handleBackupLog(log, {
vmBackupInfo,
app: this._app,
jobName: job.name,
logger,
localTaskIds,
runJobId,
@@ -296,6 +304,7 @@ export default class BackupNg {
}
} else {
const localTaskIds = { __proto__: null }
const vmBackupInfo = new Map()
return await runBackupWorker(
{
config: backupsConfig,
@@ -306,6 +315,9 @@ export default class BackupNg {
},
log =>
handleBackupLog(log, {
vmBackupInfo,
app: this._app,
jobName: job.name,
logger,
localTaskIds,
runJobId,

View File

@@ -0,0 +1,109 @@
import { Backup } from '@xen-orchestra/backups/Backup.js'
import { v4 as generateUuid } from 'uuid'
export default class MigrateVm {
constructor(app) {
this._app = app
}
// Backup should be reinstentiated each time
#createWarmBackup(sourceVmId, srId, jobId) {
const app = this._app
const config = {
snapshotNameLabelTpl: '[XO warm migration {job.name}] {vm.name_label}',
}
const job = {
type: 'backup',
id: jobId,
mode: 'delta',
vms: { id: sourceVmId },
name: `Warm migration`,
srs: { id: srId },
settings: {
'': {
// mandatory for delta replication writer
copyRetention: 1,
},
},
}
const schedule = { id: 'one-time' }
// for now we only support this from the main OA, no proxy
return new Backup({
config,
job,
schedule,
getAdapter: async remoteId => app.getBackupsRemoteAdapter(await app.getRemoteWithCredentials(remoteId)),
// `@xen-orchestra/backups/Backup` expect that `getConnectedRecord` returns a promise
getConnectedRecord: async (xapiType, uuid) => app.getXapiObject(uuid),
})
}
async warmMigrateVm(sourceVmId, srId, startDestVm = true, deleteSource = false) {
// we'll use a one time use continuous replication job with the VM to migrate
const jobId = generateUuid()
const app = this._app
const sourceVm = app.getXapiObject(sourceVmId)
let backup = this.#createWarmBackup(sourceVmId, srId, jobId)
await backup.run()
const xapi = sourceVm.$xapi
const ref = sourceVm.$ref
// stop the source VM before
try {
await xapi.callAsync('VM.clean_shutdown', ref)
} catch (error) {
await xapi.callAsync('VM.hard_shutdown', ref)
}
// make it so it can't be restarted by error
const message =
'This VM has been migrated somewhere else and might not be up to date, check twice before starting it.'
await sourceVm.update_blocked_operations({
start: message,
start_on: message,
})
// run the transfer again to transfer the changed parts
// since the source is stopped, there won't be any new change after
backup = this.#createWarmBackup(sourceVmId, srId)
await backup.run()
// find the destination Vm
const targets = Object.keys(
app.getObjects({
filter: obj => {
return (
'other' in obj &&
obj.other['xo:backup:job'] === jobId &&
obj.other['xo:backup:sr'] === srId &&
obj.other['xo:backup:vm'] === sourceVm.uuid &&
'start' in obj.blockedOperations
)
},
})
)
if (targets.length === 0) {
throw new Error(`Vm target of warm migration not found for ${sourceVmId} on SR ${srId} `)
}
if (targets.length > 1) {
throw new Error(`Multiple target of warm migration found for ${sourceVmId} on SR ${srId} `)
}
const targetVm = app.getXapiObject(targets[0])
// new vm is ready to start
// delta replication writer as set this as blocked
await targetVm.update_blocked_operations({ start: null, start_on: null })
if (startDestVm) {
// boot it
await targetVm.$xapi.startVm(targetVm.$ref)
// wait for really started
// delete source
if (deleteSource) {
sourceVm.$xapi.VM_destroy(sourceVm.$ref)
} else {
// @todo should we delete the snapshot if we keep the source vm ?
}
}
}
}

View File

@@ -29,7 +29,8 @@ async function vmdkToVhd(vmdkReadStream, grainLogicalAddressList, grainFileOffse
export async function computeVmdkLength(diskName, vhdReadStream) {
let length = 0
for await (const b of await vhdToVMDKIterator(diskName, vhdReadStream)) {
const { iterator } = await vhdToVMDKIterator(diskName, vhdReadStream)
for await (const b of iterator) {
length += b.length
}
return length
@@ -43,13 +44,15 @@ export async function computeVmdkLength(diskName, vhdReadStream) {
* @returns a readable stream representing a VMDK file
*/
export async function vhdToVMDK(diskName, vhdReadStreamGetter, withLength = false) {
const { iterator, size } = await vhdToVMDKIterator(diskName, await vhdReadStreamGetter())
let length
const stream = await asyncIteratorToStream(iterator)
if (withLength) {
length = await computeVmdkLength(diskName, await vhdReadStreamGetter())
}
const iterable = await vhdToVMDKIterator(diskName, await vhdReadStreamGetter())
const stream = await asyncIteratorToStream(iterable)
if (withLength) {
if (size === undefined) {
length = await computeVmdkLength(diskName, await vhdReadStreamGetter())
} else {
length = size
}
stream.length = length
}
return stream
@@ -62,8 +65,15 @@ export async function vhdToVMDK(diskName, vhdReadStreamGetter, withLength = fals
* @returns a readable stream representing a VMDK file
*/
export async function vhdToVMDKIterator(diskName, vhdReadStream) {
const { blockSize, blocks, diskSize, geometry } = await parseVhdToBlocks(vhdReadStream)
return generateVmdkData(diskName, diskSize, blockSize, blocks, geometry)
const { blockSize, blockCount, blocks, diskSize, geometry } = await parseVhdToBlocks(vhdReadStream)
const vmdkTargetSize = blockSize * blockCount + 3 * 1024 * 1024 // header/footer/descriptor
const iterator = await generateVmdkData(diskName, diskSize, blockSize, blocks, geometry, vmdkTargetSize)
return {
iterator,
size: vmdkTargetSize,
}
}
export { ParsableFile, parseOVAFile, vmdkToVhd, writeOvaOn }

View File

@@ -32,17 +32,18 @@ export async function writeOvaOn(
// https://github.com/mafintosh/tar-stream/issues/24#issuecomment-558358268
async function pushDisk(disk) {
const size = await computeVmdkLength(disk.name, await disk.getStream())
let { iterator, size } = await vhdToVMDKIterator(disk.name, await disk.getStream())
if (size === undefined) {
size = await computeVmdkLength(disk.name, await disk.getStream())
}
disk.fileSize = size
const blockIterator = await vhdToVMDKIterator(disk.name, await disk.getStream())
return new Promise((resolve, reject) => {
const entry = pack.entry({ name: `${disk.name}.vmdk`, size: size }, err => {
const entry = pack.entry({ name: `${disk.name}.vmdk`, size }, err => {
if (err == null) {
return resolve()
} else return reject(err)
})
return writeDisk(entry, blockIterator).then(
return writeDisk(entry, iterator).then(
() => entry.end(),
e => reject(e)
)

View File

@@ -33,7 +33,8 @@ export async function generateVmdkData(
sectorsPerTrackCylinder: 63,
heads: 16,
cylinders: 10402,
}
},
targetSize
) {
const cid = Math.floor(Math.random() * Math.pow(2, 32))
const diskCapacitySectors = Math.ceil(diskCapacityBytes / SECTOR_SIZE)
@@ -150,10 +151,39 @@ ddb.geometry.cylinders = "${geometry.cylinders}"
}
}
function* padding() {
if (targetSize === undefined) {
return
}
let remaining = targetSize - streamPosition
remaining -= SECTOR_SIZE // MARKER_GT
remaining -= tableBuffer.length
remaining -= SECTOR_SIZE // MARKER_GD
remaining -= roundToSector(headerData.grainDirectoryEntries * 4)
remaining -= SECTOR_SIZE // MARKER_GT
remaining -= tableBuffer.length
remaining -= SECTOR_SIZE // MARKER_GD
remaining -= roundToSector(headerData.grainDirectoryEntries * 4)
remaining -= SECTOR_SIZE // MARKER_FOOTER
remaining -= SECTOR_SIZE // stream optimizedheader
remaining -= SECTOR_SIZE // MARKER_EOS
if (remaining < 0) {
throw new Error('vmdk is bigger than precalculed size ')
}
const size = 1024 * 1024
while (remaining > 0) {
const yieldSize = Math.min(size, remaining)
remaining -= yieldSize
yield track(Buffer.alloc(yieldSize))
}
}
async function* iterator() {
yield track(headerData.buffer)
yield track(descriptorBuffer)
yield* emitBlocks(grainSizeBytes, blockGenerator)
yield* padding()
yield track(createEmptyMarker(MARKER_GT))
let tableOffset = streamPosition
// grain tables
@@ -181,6 +211,5 @@ ddb.geometry.cylinders = "${geometry.cylinders}"
yield track(footer.buffer)
yield track(createEmptyMarker(MARKER_EOS))
}
return iterator()
}

View File

@@ -84,7 +84,7 @@ export default {
homeTemplatePage: 'Шаблоны',
// Original text: 'Storages'
homeSrPage: "Хранилища",
homeSrPage: 'Хранилища',
// Original text: "Dashboard"
dashboardPage: 'Контрольные панели',
@@ -144,7 +144,7 @@ export default {
aboutPage: 'О программе',
// Original text: 'About XO {xoaPlan}'
aboutXoaPlan: "О Xen Orchestra {xoaPlan}",
aboutXoaPlan: 'О Xen Orchestra {xoaPlan}',
// Original text: "New"
newMenu: 'Добавить',
@@ -399,10 +399,10 @@ export default {
highAvailability: 'Высокая доступность',
// Original text: 'Shared {type}'
srSharedType: "Совместное использование {type}",
srSharedType: 'Совместное использование {type}',
// Original text: 'Not shared {type}'
srNotSharedType: "Без совместного использования {type}",
srNotSharedType: 'Без совместного использования {type}',
// Original text: "Add"
add: 'Добавить',
@@ -561,10 +561,10 @@ export default {
unknownSchedule: 'Неизвестно',
// Original text: 'Web browser timezone'
timezonePickerUseLocalTime: "Часовой пояс WEB-браузера",
timezonePickerUseLocalTime: 'Часовой пояс WEB-браузера',
// Original text: 'Server timezone ({value})'
serverTimezoneOption: "Часовой пояс сервера ({value})",
serverTimezoneOption: 'Часовой пояс сервера ({value})',
// Original text: 'Cron Pattern:'
cronPattern: 'Cron-шаблон: ',
@@ -726,7 +726,8 @@ export default {
localRemoteWarningTitle: undefined,
// Original text: 'Warning: local remotes will use limited XOA disk space. Only for advanced users.'
localRemoteWarningMessage: 'Предупреждение: локальные удаленные устройства будут использовать ограниченное дисковое пространство XOA. Только для продвинутых пользователей.',
localRemoteWarningMessage:
'Предупреждение: локальные удаленные устройства будут использовать ограниченное дисковое пространство XOA. Только для продвинутых пользователей.',
// Original text: 'Warning: this feature works only with XenServer 6.5 or newer.'
backupVersionWarning: undefined,
@@ -2553,7 +2554,8 @@ export default {
noHostsAvailable: 'Нет доступных хостов',
// Original text: "VMs created from this resource set shall run on the following hosts."
availableHostsDescription: 'Виртуальные машины, созданные из этого набора ресурсов, должны работать на следующих хостах.',
availableHostsDescription:
'Виртуальные машины, созданные из этого набора ресурсов, должны работать на следующих хостах.',
// Original text: "Maximum CPUs"
maxCpus: 'Максимум CPUs',
@@ -2882,7 +2884,8 @@ export default {
deleteVmModalTitle: 'Удалить ВМ',
// Original text: "Are you sure you want to delete this VM? ALL VM DISKS WILL BE REMOVED"
deleteVmModalMessage: 'Вы уверены, что хотите удалить эту виртуальную машину? ВСЕ ДИСКИ ВИРТУАЛЬНОЙ МАШИНЫ БУДУТ УДАЛЕНЫ!',
deleteVmModalMessage:
'Вы уверены, что хотите удалить эту виртуальную машину? ВСЕ ДИСКИ ВИРТУАЛЬНОЙ МАШИНЫ БУДУТ УДАЛЕНЫ!',
// Original text: "Migrate VM"
migrateVmModalTitle: 'Переместить ВМ',

View File

@@ -19,6 +19,7 @@ const messages = {
errorUnknownItem: 'Unknown {type}',
generateNewMacAddress: 'Generate new MAC addresses',
memoryFree: '{memoryFree} RAM free',
notConfigured: 'Not configured',
utcDate: 'UTC date',
utcTime: 'UTC time',
date: 'Date',
@@ -2418,6 +2419,7 @@ const messages = {
licensesBinding: 'Licenses binding',
notEnoughXcpngLicenses: 'Not enough XCP-ng licenses',
notBoundSelectLicense: 'Not bound (Plan (ID), expiration date)',
xcpngLicensesBindingAvancedView: "To bind an XCP-ng license, go the pool's Advanced tab.",
xosanUnregisteredDisclaimer:
'You are not registered and therefore will not be able to create or manage your XOSAN SRs. {link}',
xosanSourcesDisclaimer:

View File

@@ -2860,9 +2860,9 @@ export const changePassword = (oldPassword, newPassword) =>
() => error(_('pwdChangeError'), _('pwdChangeErrorBody'))
)
const _setUserPreferences = preferences =>
const _setUserPreferences = (preferences, userId) =>
_call('user.set', {
id: xo.user.id,
id: userId ?? xo.user.id,
preferences,
})::tap(subscribeCurrentUser.forceRefresh)
@@ -2923,15 +2923,18 @@ export const addOtp = secret =>
noop
)
export const removeOtp = () =>
export const removeOtp = user =>
confirm({
title: _('removeOtpConfirm'),
body: _('removeOtpConfirmMessage'),
}).then(
() =>
_setUserPreferences({
otp: null,
}),
_setUserPreferences(
{
otp: null,
},
resolveId(user)
),
noop
)

View File

@@ -495,6 +495,7 @@ export default decorate([
<li>{_('remoteEncryptionBackupSize')}</li>
</ul>
<input
autoComplete='new-password'
className='form-control'
name='encryptionKey'
onChange={effects.linkState}

View File

@@ -1,7 +1,9 @@
import * as Editable from 'editable'
import _, { messages } from 'intl'
import ActionButton from 'action-button'
import Button from 'button'
import Component from 'base-component'
import Icon from 'icon'
import isEmpty from 'lodash/isEmpty'
import keyBy from 'lodash/keyBy'
import map from 'lodash/map'
@@ -14,7 +16,7 @@ import { get } from '@xen-orchestra/defined'
import { injectIntl } from 'react-intl'
import { Password, Select } from 'form'
import { createUser, deleteUser, deleteUsers, editUser, subscribeGroups, subscribeUsers } from 'xo'
import { createUser, deleteUser, deleteUsers, editUser, removeOtp, subscribeGroups, subscribeUsers } from 'xo'
const permissions = {
none: {
@@ -78,6 +80,17 @@ const USER_COLUMNS = [
itemRenderer: user =>
isEmpty(user.authProviders) && <Editable.Password onChange={password => editUser(user, { password })} value='' />,
},
{
name: 'OTP',
itemRenderer: user =>
user.preferences.otp !== undefined ? (
<Button btnStyle='danger' onClick={() => removeOtp(user)} size='small'>
<Icon icon='remove' /> {_('remove')}
</Button>
) : (
_('notConfigured')
),
},
]
const USER_ACTIONS = [

View File

@@ -323,6 +323,9 @@ export default class Licenses extends Component {
return (
<Container>
<Row className='text-info mb-1'>
<Icon icon='info' /> <i>{_('xcpngLicensesBindingAvancedView')}</i>
</Row>
<Row className='mb-1'>
<Col>
<a

View File

@@ -1,68 +0,0 @@
#!/usr/bin/env node
'use strict'
const formatFiles = files => {
run('./node_modules/.bin/prettier', ['--write'].concat(files))
}
const testFiles = files => {
run('./node_modules/.bin/eslint', ['--ignore-pattern', '!*'].concat(files))
run(
'./node_modules/.bin/jest',
['--testRegex=^(?!.*.integ.spec.js$).*.spec.js$', '--findRelatedTests', '--passWithNoTests'].concat(files)
)
}
// -----------------------------------------------------------------------------
const { execFileSync, spawnSync } = require('child_process')
const { readFileSync, writeFileSync } = require('fs')
const run = (command, args) => {
const { status } = spawnSync(command, args, { stdio: 'inherit' })
if (status !== 0) {
process.exit(status)
}
}
const gitDiff = (what, args = []) =>
execFileSync('git', ['diff-' + what, '--diff-filter=AM', '--ignore-submodules', '--name-only'].concat(args), {
encoding: 'utf8',
})
.split('\n')
.filter(_ => _ !== '')
const gitDiffFiles = (files = []) => gitDiff('files', files)
const gitDiffIndex = () => gitDiff('index', ['--cached', 'HEAD'])
// -----------------------------------------------------------------------------
const files = gitDiffIndex().filter(_ => _.endsWith('.cjs') || _.endsWith('.js') || _.endsWith('.mjs'))
if (files.length === 0) {
return
}
// save the list of files with unstaged changes
let unstaged = gitDiffFiles(files)
// format all files
formatFiles(files)
if (unstaged.length !== 0) {
// refresh the list of files with unstaged changes, maybe the
// changes have been reverted by the formatting
run('git', ['update-index', '-q', '--refresh'])
unstaged = gitDiffFiles(unstaged)
if (unstaged.length !== 0) {
const contents = unstaged.map(name => readFileSync(name))
process.on('exit', () => unstaged.map((name, i) => writeFileSync(name, contents[i])))
run('git', ['checkout'].concat(unstaged))
formatFiles(unstaged)
}
}
// add formatting changes so that even if the test fails, there won't be
// stylistic diffs between files and index
run('git', ['add'].concat(files))
testFiles(files)

28
scripts/run-changed-pkgs.js Executable file
View File

@@ -0,0 +1,28 @@
#!/usr/bin/env node
'use strict'
const { join, relative, sep } = require('path')
const [, , script, ...files] = process.argv
const pkgs = new Set()
const root = join(__dirname, '..')
for (const file of files) {
const parts = relative(root, file).split(sep)
if ((parts.length > 2 && parts[0] === 'packages') || parts[0][0] === '@') {
pkgs.add(parts.slice(0, 2).join(sep))
}
}
if (pkgs.size !== 0) {
const args = ['run', '--if-present', script]
for (const pkg of pkgs) {
args.push('-w', pkg)
}
const { status } = require('child_process').spawnSync('npm', args, { stdio: 'inherit' })
if (status !== 0) {
process.exit(status)
}
}

View File

@@ -11,16 +11,44 @@ const { getPackages } = require('./utils')
const { env } = process
async function run(command, opts, verbose) {
const child = spawn(command, {
...opts,
shell: true,
stdio: verbose ? 'inherit' : 'pipe',
})
const output = []
if (!verbose) {
function onData(chunk) {
output.push(chunk)
}
child.stderr.on('data', onData)
child.stdout.on('data', onData)
}
const code = await fromEvent(child, 'exit')
if (code !== 0) {
for (const chunk of output) {
process.stderr.write(chunk)
}
throw code
}
}
// run a script for each package (also run pre and post)
//
// TODO: https://docs.npmjs.com/misc/scripts#environment
require('exec-promise')(args => {
const {
bail,
concurrency,
parallel,
verbose,
_: [script],
} = getopts(args, {
boolean: ['parallel'],
boolean: ['bail', 'parallel', 'verbose'],
string: ['concurrency'],
})
@@ -37,15 +65,18 @@ require('exec-promise')(args => {
env: Object.assign({}, env, {
PATH: `${dir}/node_modules/.bin${delimiter}${env.PATH}`,
}),
shell: true,
stdio: 'inherit',
}
return forEach.call([`pre${script}`, script, `post${script}`], script => {
const command = scripts[script]
if (command !== undefined) {
console.log(`* ${name}:${script} `, command)
return fromEvent(spawn(command, spawnOpts), 'exit').then(code => {
return run(command, spawnOpts, verbose).catch(code => {
if (code !== 0) {
if (bail) {
// eslint-disable-next-line no-throw-literal
throw `${name}:${script} Error: ` + code
}
++errors
console.log(`* ${name}:${script} Error:`, code)
}

View File

@@ -1,26 +0,0 @@
#!/usr/bin/env node
'use strict'
const { execFileSync, spawnSync } = require('child_process')
const run = (command, args) => spawnSync(command, args, { stdio: 'inherit' }).status
const getFiles = () =>
execFileSync('git', ['diff-index', '--diff-filter=AM', '--ignore-submodules', '--name-only', 'master'], {
encoding: 'utf8',
})
.split('\n')
.filter(_ => _ !== '')
// -----------------------------------------------------------------------------
// Travis vars : https://docs.travis-ci.com/user/environment-variables#default-environment-variables.
if (process.env.TRAVIS_PULL_REQUEST !== 'false') {
const files = getFiles().filter(_ => _.endsWith('.cjs') || _.endsWith('.js') || _.endsWith('.mjs'))
if (files.length !== 0) {
process.exit(run('./node_modules/.bin/jest', ['--findRelatedTests', '--passWithNoTests'].concat(files)))
}
} else {
process.exit(run('yarn', ['test-lint']) + run('yarn', ['test-unit']) + run('yarn', ['test-integration']))
}

View File

@@ -2952,11 +2952,6 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-16.18.3.tgz#d7f7ba828ad9e540270f01ce00d391c54e6e0abc"
integrity sha512-jh6m0QUhIRcZpNv7Z/rpN+ZWXOicUUQbSoWks7Htkbb9IjFQj4kzcX/xFCkjstCj5flMsN8FiSvt+q+Tcs4Llg==
"@types/parse-json@^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0"
integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==
"@types/prettier@^2.1.5":
version "2.7.1"
resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e"
@@ -3657,6 +3652,14 @@
"@vueuse/shared" "9.5.0"
vue-demi "*"
"@vueuse/math@^9.5.0":
version "9.5.0"
resolved "https://registry.yarnpkg.com/@vueuse/math/-/math-9.5.0.tgz#df20ce74031727a4eaef3cdbaa443bfda80fb3e1"
integrity sha512-dPr5CkxE4Oo+OEvTqPfAZ8Lv1AVbnLH2N5gJSm5EWykxGPLbSaimUIckqXXR8DDyvaWIV545tELekpFUHLoFmw==
dependencies:
"@vueuse/shared" "9.5.0"
vue-demi "*"
"@vueuse/metadata@9.5.0":
version "9.5.0"
resolved "https://registry.yarnpkg.com/@vueuse/metadata/-/metadata-9.5.0.tgz#b01c84230261ddee4d439ae5d9c21343dc5ae565"
@@ -6167,11 +6170,6 @@ commondir@^1.0.1:
resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==
compare-versions@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-3.6.0.tgz#1a5689913685e5a87637b8d3ffca75514ec41d62"
integrity sha512-W6Af2Iw1z4CB7q4uU4hv646dW9GQuBM+YpC0UvUCWSD8w90SJjp+ujJuXaEMtAXBtSqGfMPuFOVn4/+FlaqfBA==
compare-versions@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-5.0.1.tgz#14c6008436d994c3787aba38d4087fabe858555e"
@@ -6449,17 +6447,6 @@ cosmiconfig@^5.0.0:
js-yaml "^3.13.1"
parse-json "^4.0.0"
cosmiconfig@^7.0.0:
version "7.1.0"
resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6"
integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==
dependencies:
"@types/parse-json" "^4.0.0"
import-fresh "^3.2.1"
parse-json "^5.0.0"
path-type "^4.0.0"
yaml "^1.10.0"
create-ecdh@^4.0.0:
version "4.0.4"
resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e"
@@ -9287,13 +9274,6 @@ find-up@^5.0.0:
locate-path "^6.0.0"
path-exists "^4.0.0"
find-versions@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/find-versions/-/find-versions-4.0.0.tgz#3c57e573bf97769b8cb8df16934b627915da4965"
integrity sha512-wgpWy002tA+wgmO27buH/9KzyEOQnKsG/R0yrcjPT9BOFm0zRBVQbZ95nRGXWMywS8YR5knRbpohio0bcJABxQ==
dependencies:
semver-regex "^3.1.2"
findit@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/findit/-/findit-2.0.0.tgz#6509f0126af4c178551cfa99394e032e13a4d56e"
@@ -10738,21 +10718,10 @@ human-signals@^3.0.1:
resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-3.0.1.tgz#c740920859dafa50e5a3222da9d3bf4bb0e5eef5"
integrity sha512-rQLskxnM/5OCldHo+wNXbpVgDn5A17CUoKX+7Sokwaknlq7CdSnphy0W39GU8dw59XiCXmFXDg4fRuckQRKewQ==
husky@^4.2.5:
version "4.3.8"
resolved "https://registry.yarnpkg.com/husky/-/husky-4.3.8.tgz#31144060be963fd6850e5cc8f019a1dfe194296d"
integrity sha512-LCqqsB0PzJQ/AlCgfrfzRe3e3+NvmefAdKQhRYpxS4u6clblBoDdzzvHi8fmxKRzvMxPY/1WZWzomPZww0Anow==
dependencies:
chalk "^4.0.0"
ci-info "^2.0.0"
compare-versions "^3.6.0"
cosmiconfig "^7.0.0"
find-versions "^4.0.0"
opencollective-postinstall "^2.0.2"
pkg-dir "^5.0.0"
please-upgrade-node "^3.2.0"
slash "^3.0.0"
which-pm-runs "^1.0.0"
husky@^8.0.2:
version "8.0.2"
resolved "https://registry.yarnpkg.com/husky/-/husky-8.0.2.tgz#5816a60db02650f1f22c8b69b928fd6bcd77a236"
integrity sha512-Tkv80jtvbnkK3mYWxPZePGFpQ/tT3HNSs/sasF9P2YfkMezDl3ON37YN6jUUI4eTg5LcyVynlb6r4eyvOmspvg==
iconv-lite@0.4, iconv-lite@0.4.24, iconv-lite@^0.4.24:
version "0.4.24"
@@ -14797,7 +14766,7 @@ parse-json@^4.0.0:
error-ex "^1.3.1"
json-parse-better-errors "^1.0.1"
parse-json@^5.0.0, parse-json@^5.2.0:
parse-json@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
@@ -15148,13 +15117,6 @@ pkg-dir@^4.1.0, pkg-dir@^4.2.0:
dependencies:
find-up "^4.0.0"
pkg-dir@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-5.0.0.tgz#a02d6aebe6ba133a928f74aec20bafdfe6b8e760"
integrity sha512-NPE8TDbzl/3YQYY7CSS228s3g2ollTFnc+Qi3tqmqJp9Vg2ovUpixcJEo2HJScN2Ez+kEaal6y70c0ehqJBJeA==
dependencies:
find-up "^5.0.0"
placement.js@^1.0.0-beta.5:
version "1.0.0-beta.5"
resolved "https://registry.yarnpkg.com/placement.js/-/placement.js-1.0.0-beta.5.tgz#2aac6bd8e670729bbf26ad47f2f9656b19e037d5"
@@ -15165,13 +15127,6 @@ platform@^1.3.0, platform@^1.3.3:
resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
please-upgrade-node@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz#aeddd3f994c933e4ad98b99d9a556efa0e2fe942"
integrity sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==
dependencies:
semver-compare "^1.0.0"
plugin-error@1.0.1, plugin-error@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c"
@@ -17297,11 +17252,6 @@ selfsigned@^1.10.8:
dependencies:
node-forge "^0.10.0"
semver-compare@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc"
integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==
semver-diff@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
@@ -17316,11 +17266,6 @@ semver-greatest-satisfied-range@^1.1.0:
dependencies:
sver-compat "^1.5.0"
semver-regex@^3.1.2:
version "3.1.4"
resolved "https://registry.yarnpkg.com/semver-regex/-/semver-regex-3.1.4.tgz#13053c0d4aa11d070a2f2872b6b1e3ae1e1971b4"
integrity sha512-6IiqeZNgq01qGf0TId0t3NvKzSvUsjcpdEO3AQNeIjR6A2+ckTnQlDpl4qu1bjRv0RzN3FP9hzFmws3lKqRWkA==
"semver@2 || 3 || 4 || 5", semver@^5.1.0, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
@@ -20023,11 +19968,6 @@ which-module@^2.0.0:
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==
which-pm-runs@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/which-pm-runs/-/which-pm-runs-1.1.0.tgz#35ccf7b1a0fce87bd8b92a478c9d045785d3bf35"
integrity sha512-n1brCuqClxfFfq/Rb0ICg9giSZqCS+pLtccdag6C2HyufBrh3fBOiy9nb6ggRMvWOVH5GrdJskj5iGTZNxd7SA==
which@^1.2.14, which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"