grafana/toolkit: improve CI task (#18189)

This commit is contained in:
Ryan McKinley 2019-08-03 12:34:02 -07:00 committed by GitHub
parent 32d6740b8f
commit d8f86834d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 991 additions and 297 deletions

View File

@ -2,5 +2,5 @@
"npmClient": "yarn",
"useWorkspaces": true,
"packages": ["packages/*"],
"version": "6.4.0-alpha.22"
"version": "6.4.0-alpha.44"
}

View File

@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/data",
"version": "6.4.0-alpha.22",
"version": "6.4.0-alpha.44",
"description": "Grafana Data Library",
"keywords": [
"typescript"

View File

@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/runtime",
"version": "6.4.0-alpha.22",
"version": "6.4.0-alpha.44",
"description": "Grafana Runtime Library",
"keywords": [
"grafana",

View File

@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/toolkit",
"version": "6.4.0-alpha.22",
"version": "6.4.0-alpha.44",
"description": "Grafana Toolkit",
"keywords": [
"grafana",
@ -39,6 +39,7 @@
"@types/semver": "^6.0.0",
"@types/tmp": "^0.1.0",
"@types/webpack": "4.4.34",
"aws-sdk": "^2.495.0",
"@grafana/data": "^6.4.0-alpha",
"@grafana/ui": "^6.4.0-alpha",
"axios": "0.19.0",
@ -59,11 +60,15 @@
"jest": "24.8.0",
"jest-cli": "^24.8.0",
"jest-coverage-badges": "^1.1.2",
"jest-junit": "^6.4.0",
"lodash": "4.17.14",
"md5-file": "^4.0.0",
"mini-css-extract-plugin": "^0.7.0",
"node-sass": "^4.12.0",
"optimize-css-assets-webpack-plugin": "^5.0.3",
"ora": "^3.4.0",
"pixelmatch": "^5.0.2",
"pngjs": "^3.4.0",
"postcss-flexbugs-fixes": "4.1.0",
"postcss-loader": "3.0.0",
"postcss-preset-env": "6.6.0",

View File

@ -189,8 +189,11 @@ export const run = (includeInternalScripts = false) => {
program
.command('plugin:ci-report')
.description('Build a report for this whole process')
.option('--upload', 'upload packages also')
.action(async cmd => {
await execTask(ciPluginReportTask)({});
await execTask(ciPluginReportTask)({
upload: cmd.upload,
});
});
program.on('command:*', () => {

View File

@ -1,8 +1,9 @@
import { Task, TaskRunner } from './task';
import fs from 'fs';
// @ts-ignore
import execa = require('execa');
import path = require('path');
import fs = require('fs');
import glob = require('glob');
import { Linter, Configuration, RuleFailure } from 'tslint';
import * as prettier from 'prettier';

View File

@ -1,28 +1,43 @@
import { Task, TaskRunner } from './task';
import { pluginBuildRunner } from './plugin.build';
import { restoreCwd } from '../utils/cwd';
import { S3Client } from '../../plugins/aws';
import { getPluginJson } from '../../config/utils/pluginValidation';
import { PluginMeta } from '@grafana/ui';
// @ts-ignore
import execa = require('execa');
import path = require('path');
import fs = require('fs');
import { getPackageDetails } from '../utils/fileHelper';
import fs from 'fs';
import { getPackageDetails, findImagesInFolder, appendPluginHistory } from '../../plugins/utils';
import {
job,
getJobFolder,
writeJobStats,
getCiFolder,
agregateWorkflowInfo,
agregateCoverageInfo,
getPluginSourceInfo,
TestResultInfo,
agregateTestInfo,
} from './plugin/ci';
getPluginBuildInfo,
getBuildNumber,
getPullRequestNumber,
getCircleDownloadBaseURL,
} from '../../plugins/env';
import { agregateWorkflowInfo, agregateCoverageInfo, agregateTestInfo } from '../../plugins/workflow';
import {
PluginPackageDetails,
PluginBuildReport,
PluginHistory,
defaultPluginHistory,
TestResultsInfo,
PluginDevInfo,
PluginDevSummary,
DevSummary,
} from '../../plugins/types';
import { runEndToEndTests } from '../../plugins/e2e/launcher';
import { getEndToEndSettings } from '../../plugins/index';
export interface PluginCIOptions {
backend?: string;
full?: boolean;
upload?: boolean;
}
/**
@ -77,7 +92,8 @@ export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', build
const buildPluginDocsRunner: TaskRunner<PluginCIOptions> = async () => {
const docsSrc = path.resolve(process.cwd(), 'docs');
if (!fs.existsSync(docsSrc)) {
throw new Error('Docs folder does not exist!');
console.log('No docs src');
return;
}
const start = Date.now();
@ -146,7 +162,7 @@ const packagePluginRunner: TaskRunner<PluginCIOptions> = async () => {
console.log('Save the source info in plugin.json');
const pluginJsonFile = path.resolve(distDir, 'plugin.json');
const pluginInfo = getPluginJson(pluginJsonFile);
(pluginInfo.info as any).source = await getPluginSourceInfo();
pluginInfo.info.build = await getPluginBuildInfo();
fs.writeFile(pluginJsonFile, JSON.stringify(pluginInfo, null, 2), err => {
if (err) {
throw new Error('Error writing: ' + pluginJsonFile);
@ -165,7 +181,7 @@ const packagePluginRunner: TaskRunner<PluginCIOptions> = async () => {
throw new Error('Invalid zip file: ' + zipFile);
}
const info: any = {
const info: PluginPackageDetails = {
plugin: await getPackageDetails(zipFile, distDir),
};
@ -220,8 +236,7 @@ export const ciPackagePluginTask = new Task<PluginCIOptions>('Bundle Plugin', pa
const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
const start = Date.now();
const workDir = getJobFolder();
const pluginInfo = getPluginJson(`${process.cwd()}/ci/dist/plugin.json`);
const results: TestResultInfo = { job };
const results: TestResultsInfo = { job, passed: 0, failed: 0, screenshots: [] };
const args = {
withCredentials: true,
baseURL: process.env.BASE_URL || 'http://localhost:3000/',
@ -232,6 +247,14 @@ const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
},
};
const settings = getEndToEndSettings();
await execa('rimraf', [settings.outputFolder]);
fs.mkdirSync(settings.outputFolder);
const tempDir = path.resolve(process.cwd(), 'e2e-temp');
await execa('rimraf', [tempDir]);
fs.mkdirSync(tempDir);
try {
const axios = require('axios');
const frontendSettings = await axios.get('api/frontend/settings', args);
@ -239,17 +262,37 @@ const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
console.log('Grafana: ' + JSON.stringify(results.grafana, null, 2));
const pluginSettings = await axios.get(`api/plugins/${pluginInfo.id}/settings`, args);
console.log('Plugin Info: ' + JSON.stringify(pluginSettings.data, null, 2));
const loadedMetaRsp = await axios.get(`api/plugins/${settings.plugin.id}/settings`, args);
const loadedMeta: PluginMeta = loadedMetaRsp.data;
console.log('Plugin Info: ' + JSON.stringify(loadedMeta, null, 2));
if (loadedMeta.info.build) {
const currentHash = settings.plugin.info.build!.hash;
console.log('Check version: ', settings.plugin.info.build);
if (loadedMeta.info.build.hash !== currentHash) {
console.warn(`Testing wrong plugin version. Expected: ${currentHash}, found: ${loadedMeta.info.build.hash}`);
throw new Error('Wrong plugin version');
}
}
console.log('TODO Puppeteer Tests', workDir);
if (!fs.existsSync('e2e-temp')) {
fs.mkdirSync(tempDir);
}
results.status = 'TODO... puppeteer';
await execa('cp', [
'node_modules/@grafana/toolkit/src/plugins/e2e/commonPluginTests.ts',
path.resolve(tempDir, 'common.test.ts'),
]);
await runEndToEndTests(settings.outputFolder, results);
} catch (err) {
results.error = err;
results.status = 'EXCEPTION Thrown';
console.log('Test Error', err);
}
await execa('rimraf', [tempDir]);
// Now copy everything to work folder
await execa('cp', ['-rv', settings.outputFolder + '/.', workDir]);
results.screenshots = findImagesInFolder(workDir);
const f = path.resolve(workDir, 'results.json');
fs.writeFile(f, JSON.stringify(results, null, 2), err => {
@ -267,31 +310,117 @@ export const ciTestPluginTask = new Task<PluginCIOptions>('Test Plugin (e2e)', t
* 4. Report
*
* Create a report from all the previous steps
*
*/
const pluginReportRunner: TaskRunner<PluginCIOptions> = async () => {
const pluginReportRunner: TaskRunner<PluginCIOptions> = async ({ upload }) => {
const ciDir = path.resolve(process.cwd(), 'ci');
const packageInfo = require(path.resolve(ciDir, 'packages', 'info.json'));
const packageDir = path.resolve(ciDir, 'packages');
const packageInfo = require(path.resolve(packageDir, 'info.json')) as PluginPackageDetails;
console.log('Save the source info in plugin.json');
const pluginJsonFile = path.resolve(ciDir, 'dist', 'plugin.json');
const report = {
plugin: getPluginJson(pluginJsonFile),
console.log('Load info from: ' + pluginJsonFile);
const pluginMeta = getPluginJson(pluginJsonFile);
const report: PluginBuildReport = {
plugin: pluginMeta,
packages: packageInfo,
workflow: agregateWorkflowInfo(),
coverage: agregateCoverageInfo(),
tests: agregateTestInfo(),
artifactsBaseURL: await getCircleDownloadBaseURL(),
};
const pr = getPullRequestNumber();
if (pr) {
report.pullRequest = pr;
}
console.log('REPORT', report);
// Save the report to disk
const file = path.resolve(ciDir, 'report.json');
fs.writeFile(file, JSON.stringify(report, null, 2), err => {
if (err) {
throw new Error('Unable to write: ' + file);
}
});
console.log('TODO... notify some service');
console.log('Initalizing S3 Client');
const s3 = new S3Client();
const build = pluginMeta.info.build;
if (!build) {
throw new Error('Metadata missing build info');
}
const version = pluginMeta.info.version || 'unknown';
const branch = build.branch || 'unknown';
const buildNumber = getBuildNumber();
const root = `dev/${pluginMeta.id}`;
const dirKey = pr ? `${root}/pr/${pr}` : `${root}/branch/${branch}/${buildNumber}`;
const jobKey = `${dirKey}/index.json`;
if (await s3.exits(jobKey)) {
throw new Error('Job already registered: ' + jobKey);
}
console.log('Write Job', jobKey);
await s3.writeJSON(jobKey, report, {
Tagging: `version=${version}&type=${pluginMeta.type}`,
});
// Upload logo
const logo = await s3.uploadLogo(report.plugin.info, {
local: path.resolve(ciDir, 'dist'),
remote: root,
});
const latest: PluginDevInfo = {
pluginId: pluginMeta.id,
name: pluginMeta.name,
logo,
build: pluginMeta.info.build!,
version,
};
if (pr) {
latest.build.pr = pr;
} else {
latest.build.number = buildNumber;
const base = `${root}/branch/${branch}/`;
const historyKey = base + `history.json`;
console.log('Read', historyKey);
const history: PluginHistory = await s3.readJSON(historyKey, defaultPluginHistory);
appendPluginHistory(report, latest, history);
await s3.writeJSON(historyKey, history);
console.log('wrote history');
}
// Private things may want to upload
if (upload) {
s3.uploadPackages(packageInfo, {
local: packageDir,
remote: dirKey + '/packages',
});
s3.uploadTestFiles(report.tests, {
local: ciDir,
remote: dirKey,
});
}
console.log('Update Directory Indexes');
let indexKey = `${root}/index.json`;
const index: PluginDevSummary = await s3.readJSON(indexKey, { branch: {}, pr: {} });
if (pr) {
index.pr[pr] = latest;
} else {
index.branch[branch] = latest;
}
await s3.writeJSON(indexKey, index);
indexKey = `dev/index.json`;
const pluginIndex: DevSummary = await s3.readJSON(indexKey, {});
pluginIndex[pluginMeta.id] = latest;
await s3.writeJSON(indexKey, pluginIndex);
console.log('wrote index');
};
export const ciPluginReportTask = new Task<PluginCIOptions>('Generate Plugin Report', pluginReportRunner);

View File

@ -1,214 +0,0 @@
import execa = require('execa');
import path = require('path');
import fs = require('fs');
export interface PluginSourceInfo {
time?: number;
repo?: string;
branch?: string;
hash?: string;
}
export interface JobInfo {
job?: string;
startTime: number;
endTime: number;
elapsed: number;
status?: string;
buildNumber?: number;
}
export interface WorkflowInfo extends JobInfo {
workflowId?: string;
jobs: JobInfo[];
user?: string;
repo?: string;
}
const getJobFromProcessArgv = () => {
const arg = process.argv[2];
if (arg && arg.startsWith('plugin:ci-')) {
const task = arg.substring('plugin:ci-'.length);
if ('build' === task) {
if ('--backend' === process.argv[3] && process.argv[4]) {
return task + '_' + process.argv[4];
}
return 'build_plugin';
}
return task;
}
return 'unknown_job';
};
export const job = process.env.CIRCLE_JOB || getJobFromProcessArgv();
export const getPluginSourceInfo = async (): Promise<PluginSourceInfo> => {
if (process.env.CIRCLE_SHA1) {
return Promise.resolve({
time: Date.now(),
repo: process.env.CIRCLE_REPOSITORY_URL,
branch: process.env.CIRCLE_BRANCH,
hash: process.env.CIRCLE_SHA1,
});
}
const exe = await execa('git', ['rev-parse', 'HEAD']);
return {
time: Date.now(),
hash: exe.stdout,
};
};
const getBuildNumber = (): number | undefined => {
if (process.env.CIRCLE_BUILD_NUM) {
return parseInt(process.env.CIRCLE_BUILD_NUM, 10);
}
return undefined;
};
export const getJobFolder = () => {
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
export const getCiFolder = () => {
const dir = path.resolve(process.cwd(), 'ci');
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
export const writeJobStats = (startTime: number, workDir: string) => {
const endTime = Date.now();
const stats: JobInfo = {
job,
startTime,
endTime,
elapsed: endTime - startTime,
buildNumber: getBuildNumber(),
};
const f = path.resolve(workDir, 'job.json');
fs.writeFile(f, JSON.stringify(stats, null, 2), err => {
if (err) {
throw new Error('Unable to stats: ' + f);
}
});
};
export const agregateWorkflowInfo = (): WorkflowInfo => {
const now = Date.now();
const workflow: WorkflowInfo = {
jobs: [],
startTime: now,
endTime: now,
workflowId: process.env.CIRCLE_WORKFLOW_ID,
repo: process.env.CIRCLE_PROJECT_REPONAME,
user: process.env.CIRCLE_PROJECT_USERNAME,
buildNumber: getBuildNumber(),
elapsed: 0,
};
const jobsFolder = path.resolve(getCiFolder(), 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
const p = path.resolve(jobsFolder, file, 'job.json');
if (fs.existsSync(p)) {
const job = require(p) as JobInfo;
workflow.jobs.push(job);
if (job.startTime < workflow.startTime) {
workflow.startTime = job.startTime;
}
if (job.endTime > workflow.endTime) {
workflow.endTime = job.endTime;
}
} else {
console.log('Missing Job info: ', p);
}
});
} else {
console.log('NO JOBS IN: ', jobsFolder);
}
}
workflow.elapsed = workflow.endTime - workflow.startTime;
return workflow;
};
export interface CoverageDetails {
total: number;
covered: number;
skipped: number;
pct: number;
}
export interface CoverageInfo {
job: string;
summary: { [key: string]: CoverageDetails };
report?: string; // path to report
}
export const agregateCoverageInfo = (): CoverageInfo[] => {
const coverage: CoverageInfo[] = [];
const ciDir = getCiFolder();
const jobsFolder = path.resolve(ciDir, 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
const dir = path.resolve(jobsFolder, file, 'coverage');
if (fs.existsSync(dir)) {
const s = path.resolve(dir, 'coverage-summary.json');
const r = path.resolve(dir, 'lcov-report', 'index.html');
if (fs.existsSync(s)) {
const raw = require(s);
const info: CoverageInfo = {
job: file,
summary: raw.total,
};
if (fs.existsSync(r)) {
info.report = r.substring(ciDir.length);
}
coverage.push(info);
}
}
});
} else {
console.log('NO JOBS IN: ', jobsFolder);
}
}
return coverage;
};
export interface TestResultInfo {
job: string;
grafana?: any;
status?: string;
error?: string;
}
export const agregateTestInfo = (): TestResultInfo[] => {
const tests: TestResultInfo[] = [];
const ciDir = getCiFolder();
const jobsFolder = path.resolve(ciDir, 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
if (file.startsWith('test')) {
const summary = path.resolve(jobsFolder, file, 'results.json');
if (fs.existsSync(summary)) {
tests.push(require(summary) as TestResultInfo);
}
}
});
} else {
console.log('NO Jobs IN: ', jobsFolder);
}
}
return tests;
};

View File

@ -3,6 +3,7 @@ import * as fs from 'fs';
import chalk from 'chalk';
import { useSpinner } from '../utils/useSpinner';
import { Task, TaskRunner } from './task';
import escapeRegExp from 'lodash/escapeRegExp';
const path = require('path');
@ -49,7 +50,7 @@ const preparePackage = async (pkg: any) => {
});
};
const moveFiles = () => {
const copyFiles = () => {
const files = [
'README.md',
'CHANGELOG.md',
@ -59,6 +60,9 @@ const moveFiles = () => {
'src/config/tsconfig.plugin.json',
'src/config/tsconfig.plugin.local.json',
'src/config/tslint.plugin.json',
// plugin test file
'src/plugins/e2e/commonPluginTests.ts',
];
// @ts-ignore
return useSpinner<void>(`Moving ${files.join(', ')} files`, async () => {
@ -111,8 +115,23 @@ const toolkitBuildTaskRunner: TaskRunner<void> = async () => {
await preparePackage(pkg);
fs.mkdirSync('./dist/bin');
fs.mkdirSync('./dist/sass');
await moveFiles();
await copyFiles();
await copySassFiles();
// RYAN HACK HACK HACK
// when Dominik is back from vacation, we can find a better way
// This moves the index to the root so plugin e2e tests can import them
console.warn('hacking an index.js file for toolkit. Help!');
const index = `${distDir}/src/index.js`;
fs.readFile(index, 'utf8', (err, data) => {
const pattern = 'require("./';
const js = data.replace(new RegExp(escapeRegExp(pattern), 'g'), 'require("./src/');
fs.writeFile(`${distDir}/index.js`, js, err => {
if (err) {
throw new Error('Error writing index: ' + err);
}
});
});
};
export const toolkitBuildTask = new Task<void>('@grafana/toolkit build', toolkitBuildTaskRunner);

View File

@ -1,5 +1,5 @@
import path = require('path');
import fs = require('fs');
import fs from 'fs';
const whitelistedJestConfigOverrides = ['snapshotSerializers', 'moduleNameMapper'];

View File

@ -42,7 +42,7 @@ const getModuleFiles = () => {
const getManualChunk = (id: string) => {
if (id.endsWith('module.ts') || id.endsWith('module.tsx')) {
const idx = id.indexOf(path.sep + 'src' + path.sep);
const idx = id.lastIndexOf(path.sep + 'src' + path.sep);
if (idx > 0) {
const name = id.substring(idx + 5, id.lastIndexOf('.'));

View File

@ -1,4 +1,6 @@
const fs = require('fs');
import fs from 'fs';
import path from 'path';
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const supportedExtensions = ['css', 'scss'];
@ -91,6 +93,7 @@ export const getStyleLoaders = () => {
},
];
const styleDir = path.resolve(process.cwd(), 'src', 'styles') + path.sep;
const rules = [
{
test: /(dark|light)\.css$/,
@ -103,12 +106,12 @@ export const getStyleLoaders = () => {
{
test: /\.css$/,
use: ['style-loader', ...cssLoaders, 'sass-loader'],
exclude: [`${process.cwd()}/src/styles/light.css`, `${process.cwd()}/src/styles/dark.css`],
exclude: [`${styleDir}light.css`, `${styleDir}dark.css`],
},
{
test: /\.scss$/,
use: ['style-loader', ...cssLoaders, 'sass-loader'],
exclude: [`${process.cwd()}/src/styles/light.scss`, `${process.cwd()}/src/styles/dark.scss`],
exclude: [`${styleDir}light.scss`, `${styleDir}dark.scss`],
},
];

View File

@ -4,5 +4,8 @@ export * from './install';
export * from './launcher';
export * from './login';
export * from './pageObjects';
export * from './pages';
export * from './pageInfo';
export * from './scenario';
import * as pages from './pages';
export { pages };

View File

@ -1,7 +1,7 @@
import { Page } from 'puppeteer-core';
import { constants } from './constants';
import { loginPage } from './start/loginPage';
import { loginPage } from './pages/loginPage';
export const login = async (page: Page) => {
await loginPage.init(page);

View File

@ -0,0 +1,2 @@
export * from './loginPage';
export * from './pluginsPage';

View File

@ -1,4 +1,4 @@
import { TestPage } from '../pages';
import { TestPage } from '../pageInfo';
import {
Selector,
InputPageObject,

View File

@ -0,0 +1,17 @@
import { TestPage } from '../pageInfo';
export interface PluginsPage {}
export const pluginsPage = new TestPage<PluginsPage>({
url: '/plugins',
pageObjects: {},
});
export function getPluginPage(id: string) {
return new TestPage<PluginsPage>({
url: `/plugins/${id}/`,
pageObjects: {
// TODO Find update/enable buttons
},
});
}

View File

@ -1 +1,6 @@
export * from './e2e';
// Namespace for Plugins
import * as plugins from './plugins';
export { plugins };

View File

@ -0,0 +1,183 @@
import AWS from 'aws-sdk';
import path from 'path';
import fs from 'fs';
import { PluginPackageDetails, ZipFileInfo, TestResultsInfo } from './types';
import defaults from 'lodash/defaults';
import clone from 'lodash/clone';
import { PluginMetaInfo } from '@grafana/ui';
interface UploadArgs {
local: string;
remote: string;
}
export class S3Client {
readonly bucket: string;
readonly prefix: string;
readonly s3: AWS.S3;
constructor(bucket?: string) {
this.bucket = bucket || 'grafana-experiments';
this.prefix = 'plugins/';
this.s3 = new AWS.S3({ apiVersion: '2006-03-01' });
this.s3.headBucket({ Bucket: this.bucket }, (err, data) => {
if (err) {
throw new Error('Unable to read: ' + this.bucket);
} else {
console.log('s3: ' + data);
}
});
}
private async uploadPackage(file: ZipFileInfo, folder: UploadArgs): Promise<string> {
const fpath = path.resolve(process.cwd(), folder.local, file.name);
return await this.uploadFile(fpath, folder.remote + '/' + file.name, file.md5);
}
async uploadPackages(packageInfo: PluginPackageDetails, folder: UploadArgs) {
await this.uploadPackage(packageInfo.plugin, folder);
if (packageInfo.docs) {
await this.uploadPackage(packageInfo.docs, folder);
}
}
async uploadTestFiles(tests: TestResultsInfo[], folder: UploadArgs) {
for (const test of tests) {
for (const s of test.screenshots) {
const img = path.resolve(folder.local, 'jobs', test.job, s);
await this.uploadFile(img, folder.remote + `/jobs/${test.job}/${s}`);
}
}
}
async uploadLogo(meta: PluginMetaInfo, folder: UploadArgs): Promise<string | undefined> {
const { logos } = meta;
if (logos && logos.large) {
const img = folder.local + '/' + logos.large;
const idx = img.lastIndexOf('.');
const name = 'logo' + img.substring(idx);
const key = folder.remote + '/' + name;
await this.uploadFile(img, key);
return name;
}
return undefined;
}
async uploadFile(fpath: string, path: string, md5?: string): Promise<string> {
if (!fs.existsSync(fpath)) {
return Promise.reject('File not found: ' + fpath);
}
console.log('Uploading: ' + fpath);
const stream = fs.createReadStream(fpath);
return new Promise((resolve, reject) => {
this.s3.putObject(
{
Key: this.prefix + path,
Bucket: this.bucket,
Body: stream,
ContentType: getContentTypeForFile(path),
},
(err, data) => {
if (err) {
reject(err);
} else {
if (md5 && md5 !== data.ETag && `"${md5}"` !== data.ETag) {
reject(`Upload ETag does not match MD5 (${md5} !== ${data.ETag})`);
} else {
resolve(data.ETag);
}
}
}
);
});
}
async exits(key: string): Promise<boolean> {
return new Promise((resolve, reject) => {
this.s3.getObject(
{
Bucket: this.bucket,
Key: this.prefix + key,
},
(err, data) => {
if (err) {
resolve(false);
} else {
resolve(true);
}
}
);
});
}
async readJSON<T>(key: string, defaultValue: T): Promise<T> {
return new Promise((resolve, reject) => {
this.s3.getObject(
{
Bucket: this.bucket,
Key: this.prefix + key,
},
(err, data) => {
if (err) {
resolve(clone(defaultValue));
} else {
try {
const v = JSON.parse(data.Body as string);
resolve(defaults(v, defaultValue));
} catch (e) {
console.log('ERROR', e);
reject('Error reading response');
}
}
}
);
});
}
async writeJSON(
key: string,
obj: {},
params?: Partial<AWS.S3.Types.PutObjectRequest>
): Promise<AWS.S3.Types.PutObjectOutput> {
return new Promise((resolve, reject) => {
this.s3.putObject(
{
...params,
Key: this.prefix + key,
Bucket: this.bucket,
Body: JSON.stringify(obj, null, 2), // Pretty print
ContentType: 'application/json',
},
(err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
}
);
});
}
}
function getContentTypeForFile(name: string): string | undefined {
const idx = name.lastIndexOf('.');
if (idx > 0) {
const ext = name.substring(idx + 1).toLowerCase();
if (ext === 'zip') {
return 'application/zip';
}
if (ext === 'json') {
return 'application/json';
}
if (ext === 'svg') {
return 'image/svg+xml';
}
if (ext === 'png') {
return 'image/png';
}
}
return undefined;
}

View File

@ -0,0 +1,23 @@
import { Browser, Page } from 'puppeteer-core';
import { e2eScenario, takeScreenShot, plugins, pages } from '@grafana/toolkit';
// ****************************************************************
// NOTE, This file is copied to plugins at runtime, it is not run locally
// ****************************************************************
const sleep = (milliseconds: number) => {
return new Promise(resolve => setTimeout(resolve, milliseconds));
};
e2eScenario('Common Plugin Test', 'should pass', async (browser: Browser, page: Page) => {
const settings = plugins.getEndToEndSettings();
const pluginPage = pages.getPluginPage(settings.plugin.id);
await pluginPage.init(page);
await pluginPage.navigateTo();
// TODO: find a better way to avoid the 'loading' page
await sleep(500);
const fileName = 'plugin-page';
await takeScreenShot(page, fileName);
});

View File

@ -0,0 +1,45 @@
import * as jestCLI from 'jest-cli';
import { TestResultsInfo } from '../types';
import fs from 'fs';
export async function runEndToEndTests(outputDirectory: string, results: TestResultsInfo): Promise<void> {
const setupPath = 'node_modules/@grafana/toolkit/src/e2e/install';
let ext = '.js';
if (!fs.existsSync(setupPath + ext)) {
ext = '.ts'; // When running yarn link
}
const jestConfig = {
preset: 'ts-jest',
verbose: false,
moduleDirectories: ['node_modules'], // add the plugin somehow?
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json'],
setupFiles: [],
setupFilesAfterEnv: [
'expect-puppeteer', // Setup Puppeteer
'<rootDir>/' + setupPath + ext, // Loads Chromimum
],
globals: { 'ts-jest': { isolatedModules: true } },
testMatch: [
'<rootDir>/e2e-temp/**/*.test.ts', // Copied from node_modules
'<rootDir>/e2e/test/**/*.test.ts',
],
reporters: [
'default',
['jest-junit', { outputDirectory }], // save junit.xml to folder
],
};
const cliConfig = {
config: JSON.stringify(jestConfig),
passWithNoTests: true,
};
// @ts-ignore
const runJest = () => jestCLI.runCLI(cliConfig, [process.cwd()]);
const jestOutput = await runJest();
results.passed = jestOutput.results.numPassedTests;
results.failed = jestOutput.results.numFailedTestSuites;
return;
}

View File

@ -0,0 +1,38 @@
import { PluginMeta } from '@grafana/ui';
import path from 'path';
import fs from 'fs';
import { constants } from '../../e2e/constants';
export interface Settings {
plugin: PluginMeta;
outputFolder: string;
}
let env: Settings | null = null;
export function getEndToEndSettings() {
if (env) {
return env;
}
let f = path.resolve(process.cwd(), 'ci', 'dist', 'plugin.json');
if (!fs.existsSync(f)) {
f = path.resolve(process.cwd(), 'dist', 'plugin.json');
if (!fs.existsSync(f)) {
f = path.resolve(process.cwd(), 'src', 'plugin.json');
}
}
const outputFolder = path.resolve(process.cwd(), 'e2e-results');
if (!fs.existsSync(outputFolder)) {
fs.mkdirSync(outputFolder, { recursive: true });
}
constants.screenShotsTruthDir = path.resolve(process.cwd(), 'e2e', 'truth');
constants.screenShotsOutputDir = outputFolder;
return (env = {
plugin: require(f) as PluginMeta,
outputFolder,
});
}

View File

@ -0,0 +1,113 @@
import execa from 'execa';
import path from 'path';
import fs from 'fs';
import { PluginBuildInfo } from '@grafana/ui';
import { JobInfo } from './types';
const getJobFromProcessArgv = () => {
const arg = process.argv[2];
if (arg && arg.startsWith('plugin:ci-')) {
const task = arg.substring('plugin:ci-'.length);
if ('build' === task) {
if ('--backend' === process.argv[3] && process.argv[4]) {
return task + '_' + process.argv[4];
}
return 'build_plugin';
}
return task;
}
return 'unknown_job';
};
export const job = process.env.CIRCLE_JOB || getJobFromProcessArgv();
export const getPluginBuildInfo = async (): Promise<PluginBuildInfo> => {
if (process.env.CIRCLE_SHA1) {
return Promise.resolve({
time: Date.now(),
repo: process.env.CIRCLE_REPOSITORY_URL,
branch: process.env.CIRCLE_BRANCH,
hash: process.env.CIRCLE_SHA1,
});
}
const branch = await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD']);
const hash = await execa('git', ['rev-parse', 'HEAD']);
return {
time: Date.now(),
branch: branch.stdout,
hash: hash.stdout,
};
};
export const getBuildNumber = (): number | undefined => {
if (process.env.CIRCLE_BUILD_NUM) {
return parseInt(process.env.CIRCLE_BUILD_NUM, 10);
}
return undefined;
};
export const getPullRequestNumber = (): number | undefined => {
if (process.env.CIRCLE_PULL_REQUEST) {
const url = process.env.CIRCLE_PULL_REQUEST;
const idx = url.lastIndexOf('/') + 1;
return parseInt(url.substring(idx), 10);
}
return undefined;
};
export const getJobFolder = () => {
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
export const getCiFolder = () => {
const dir = path.resolve(process.cwd(), 'ci');
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
export const writeJobStats = (startTime: number, workDir: string) => {
const endTime = Date.now();
const stats: JobInfo = {
job,
startTime,
endTime,
elapsed: endTime - startTime,
buildNumber: getBuildNumber(),
};
const f = path.resolve(workDir, 'job.json');
fs.writeFile(f, JSON.stringify(stats, null, 2), err => {
if (err) {
throw new Error('Unable to stats: ' + f);
}
});
};
export async function getCircleDownloadBaseURL(): Promise<string | undefined> {
try {
const axios = require('axios');
const buildNumber = getBuildNumber();
const repo = process.env.CIRCLE_PROJECT_REPONAME;
const user = process.env.CIRCLE_PROJECT_USERNAME;
let url = `https://circleci.com/api/v1.1/project/github/${user}/${repo}/latest/artifacts`;
const rsp = await axios.get(url);
for (const s of rsp.data) {
let idx = s.url.indexOf('-');
if (idx > 0) {
url = s.url.substring(idx);
idx = url.indexOf('circleci/plugin/ci');
if (idx > 0) {
url = url.substring(0, idx);
url = `https://${buildNumber}${url}circleci/plugin/ci`;
return url;
}
}
}
} catch {}
return undefined;
}

View File

@ -0,0 +1,6 @@
export * from './aws';
export * from './env';
export * from './utils';
export * from './workflow';
export * from './types';
export * from './e2e/settings';

View File

@ -0,0 +1,110 @@
import { PluginMeta, PluginBuildInfo } from '@grafana/ui';
import { DataFrame } from '@grafana/data';
export interface PluginPackageDetails {
plugin: ZipFileInfo;
docs?: ZipFileInfo;
}
export interface PluginBuildReport {
plugin: PluginMeta;
packages: PluginPackageDetails;
workflow: WorkflowInfo;
coverage: CoverageInfo[];
tests: TestResultsInfo[];
pullRequest?: number;
artifactsBaseURL?: string;
}
export interface JobInfo {
job?: string;
startTime: number;
endTime: number;
elapsed: number;
status?: string;
buildNumber?: number;
}
export interface WorkflowInfo extends JobInfo {
workflowId?: string;
jobs: JobInfo[];
user?: string;
repo?: string;
}
export interface CoverageDetails {
total: number;
covered: number;
skipped: number;
pct: number;
}
export interface CoverageInfo {
job: string;
summary: { [key: string]: CoverageDetails };
report?: string; // path to report
}
export interface TestResultsInfo {
job: string;
grafana?: any;
error?: string;
passed: number;
failed: number;
screenshots: string[];
}
// Saved at the folder level
export interface PluginHistory {
last: {
info: PluginDevInfo;
report: PluginBuildReport;
};
size: DataFrame[]; // New frame for each package
coverage: DataFrame[]; // New frame for each job
timing: DataFrame[]; // New frame for each job/workflow
}
export interface PluginDevInfo {
pluginId: string;
name: string;
logo?: string; // usually logo.svg or logo.png
build: PluginBuildInfo;
version: string;
}
export interface DevSummary {
[key: string]: PluginDevInfo;
}
export interface PluginDevSummary {
branch: DevSummary;
pr: DevSummary;
}
export const defaultPluginHistory: PluginHistory = {
last: {
info: {} as PluginDevInfo,
report: {} as PluginBuildReport,
},
size: [],
coverage: [],
timing: [],
};
export interface CountAndSize {
count: number;
bytes: number;
}
export interface ExtensionSize {
[key: string]: CountAndSize;
}
export interface ZipFileInfo {
name: string;
size: number;
contents: ExtensionSize;
sha1?: string;
md5?: string;
}

View File

@ -1,15 +1,12 @@
import execa = require('execa');
import path = require('path');
import fs = require('fs');
import execa from 'execa';
import path from 'path';
import fs from 'fs';
import { PluginDevInfo, ExtensionSize, ZipFileInfo, PluginBuildReport, PluginHistory } from './types';
interface ExtensionBytes {
[key: string]: number;
}
const md5File = require('md5-file');
export function getFileSizeReportInFolder(dir: string, info?: ExtensionBytes): ExtensionBytes {
if (!info) {
info = {};
}
export function getFileSizeReportInFolder(dir: string, info?: ExtensionSize): ExtensionSize {
const acc: ExtensionSize = info ? info : {};
const files = fs.readdirSync(dir);
if (files) {
@ -19,25 +16,22 @@ export function getFileSizeReportInFolder(dir: string, info?: ExtensionBytes): E
if (stat.isDirectory()) {
getFileSizeReportInFolder(newbase, info);
} else {
let ext = '<other>';
let ext = '_none_';
const idx = file.lastIndexOf('.');
if (idx > 0) {
ext = file.substring(idx + 1).toLowerCase();
}
const current = info![ext] || 0;
info![ext] = current + stat.size;
const current = acc[ext];
if (current) {
current.count += 1;
current.bytes += stat.size;
} else {
acc[ext] = { bytes: stat.size, count: 1 };
}
}
});
}
return info;
}
interface ZipFileInfo {
name: string;
size: number;
contents: ExtensionBytes;
sha1?: string;
md5?: string;
return acc;
}
export async function getPackageDetails(zipFile: string, zipSrc: string, writeChecksum = true): Promise<ZipFileInfo> {
@ -62,11 +56,37 @@ export async function getPackageDetails(zipFile: string, zipSrc: string, writeCh
console.warn('Unable to read SHA1 Checksum');
}
try {
const exe = await execa('md5sum', [zipFile]);
const idx = exe.stdout.indexOf(' ');
info.md5 = exe.stdout.substring(0, idx);
info.md5 = md5File.sync(zipFile);
} catch {
console.warn('Unable to read MD5 Checksum');
}
return info;
}
export function findImagesInFolder(dir: string, prefix = '', append?: string[]): string[] {
const imgs = append || [];
const files = fs.readdirSync(dir);
if (files) {
files.forEach(file => {
if (file.endsWith('.png')) {
imgs.push(file);
}
});
}
return imgs;
}
export function appendPluginHistory(report: PluginBuildReport, info: PluginDevInfo, history: PluginHistory) {
history.last = {
info,
report,
};
if (!history.size) {
history.size = [];
}
console.log('TODO, append build stats to the last one');
}

View File

@ -0,0 +1,99 @@
import path from 'path';
import fs from 'fs';
import { JobInfo, WorkflowInfo, CoverageInfo, TestResultsInfo } from './types';
import { getBuildNumber, getCiFolder } from './env';
export const agregateWorkflowInfo = (): WorkflowInfo => {
const now = Date.now();
const workflow: WorkflowInfo = {
jobs: [],
startTime: now,
endTime: now,
workflowId: process.env.CIRCLE_WORKFLOW_ID,
repo: process.env.CIRCLE_PROJECT_REPONAME,
user: process.env.CIRCLE_PROJECT_USERNAME,
buildNumber: getBuildNumber(),
elapsed: 0,
};
const jobsFolder = path.resolve(getCiFolder(), 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
const p = path.resolve(jobsFolder, file, 'job.json');
if (fs.existsSync(p)) {
const job = require(p) as JobInfo;
workflow.jobs.push(job);
if (job.startTime < workflow.startTime) {
workflow.startTime = job.startTime;
}
if (job.endTime > workflow.endTime) {
workflow.endTime = job.endTime;
}
} else {
console.log('Missing Job info: ', p);
}
});
} else {
console.log('NO JOBS IN: ', jobsFolder);
}
}
workflow.elapsed = workflow.endTime - workflow.startTime;
return workflow;
};
export const agregateCoverageInfo = (): CoverageInfo[] => {
const coverage: CoverageInfo[] = [];
const ciDir = getCiFolder();
const jobsFolder = path.resolve(ciDir, 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
const dir = path.resolve(jobsFolder, file, 'coverage');
if (fs.existsSync(dir)) {
const s = path.resolve(dir, 'coverage-summary.json');
const r = path.resolve(dir, 'lcov-report', 'index.html');
if (fs.existsSync(s)) {
const raw = require(s);
const info: CoverageInfo = {
job: file,
summary: raw.total,
};
if (fs.existsSync(r)) {
info.report = r.substring(ciDir.length);
}
coverage.push(info);
}
}
});
} else {
console.log('NO JOBS IN: ', jobsFolder);
}
}
return coverage;
};
export const agregateTestInfo = (): TestResultsInfo[] => {
const tests: TestResultsInfo[] = [];
const ciDir = getCiFolder();
const jobsFolder = path.resolve(ciDir, 'jobs');
if (fs.existsSync(jobsFolder)) {
const files = fs.readdirSync(jobsFolder);
if (files && files.length) {
files.forEach(file => {
if (file.startsWith('test')) {
const summary = path.resolve(jobsFolder, file, 'results.json');
if (fs.existsSync(summary)) {
tests.push(require(summary) as TestResultsInfo);
}
}
});
} else {
console.log('NO Jobs IN: ', jobsFolder);
}
}
return tests;
};

View File

@ -6,7 +6,8 @@
"module": "commonjs",
"rootDirs": ["."],
"outDir": "dist/src",
"declaration": false,
"declaration": true,
"declarationDir": "dist",
"typeRoots": ["./node_modules/@types"],
"esModuleInterop": true,
"lib": ["es2015", "es2017.string", "dom"]

View File

@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/ui",
"version": "6.4.0-alpha.22",
"version": "6.4.0-alpha.44",
"description": "Grafana Components Library",
"keywords": [
"grafana",

View File

@ -1,5 +1,5 @@
import React, { Component, createRef } from 'react';
import { omit } from 'lodash';
import omit from 'lodash/omit';
import { PopperController } from '../Tooltip/PopperController';
import { Popper } from '../Tooltip/Popper';
import { ColorPickerPopover, ColorPickerProps, ColorPickerChangeHandler } from './ColorPickerPopover';

View File

@ -83,7 +83,12 @@ export interface PluginBuildInfo {
branch?: string;
hash?: string;
number?: number;
pr?: string;
pr?: number;
}
export interface ScreenshotInfo {
name: string;
path: string;
}
export interface PluginMetaInfo {
@ -98,7 +103,7 @@ export interface PluginMetaInfo {
small: string;
};
build?: PluginBuildInfo;
screenshots: any[];
screenshots: ScreenshotInfo[];
updated: string;
version: string;
}

View File

@ -4190,6 +4190,21 @@ awesome-typescript-loader@5.2.1, awesome-typescript-loader@^5.2.1:
source-map-support "^0.5.3"
webpack-log "^1.2.0"
aws-sdk@^2.495.0:
version "2.495.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.495.0.tgz#0b0ad8fcf581cb7bb858864fab88d461f0e67677"
integrity sha512-KG2nqF3biiAliMJpbavM0tLGzhcLkgJMHQ/q84+Wi5kc6+mjPSbtnctWYnvAFwoRiiygx82FA4Fx5ShnHOqinw==
dependencies:
buffer "4.9.1"
events "1.1.1"
ieee754 "1.1.8"
jmespath "0.15.0"
querystring "0.2.0"
sax "1.2.1"
url "0.10.3"
uuid "3.3.2"
xml2js "0.4.19"
aws-sign2@~0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
@ -4909,7 +4924,7 @@ buffer-xor@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9"
buffer@^4.3.0:
buffer@4.9.1, buffer@^4.3.0:
version "4.9.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298"
dependencies:
@ -5088,11 +5103,6 @@ caniuse-api@^3.0.0:
lodash.memoize "^4.1.2"
lodash.uniq "^4.5.0"
caniuse-db@1.0.30000772:
version "1.0.30000772"
resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000772.tgz#51aae891768286eade4a3d8319ea76d6a01b512b"
integrity sha1-UarokXaChureSj2DGep21qAbUSs=
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000929, caniuse-lite@^1.0.30000947, caniuse-lite@^1.0.30000957, caniuse-lite@^1.0.30000963:
version "1.0.30000966"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000966.tgz#f3c6fefacfbfbfb981df6dfa68f2aae7bff41b64"
@ -7635,6 +7645,11 @@ eventemitter3@^3.0.0, eventemitter3@^3.1.0:
version "3.1.2"
resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.2.tgz#2d3d48f9c346698fce83a85d7d664e98535df6e7"
events@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=
events@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/events/-/events-3.0.0.tgz#9a0a0dfaf62893d92b875b8f2698ca4114973e88"
@ -9334,6 +9349,11 @@ icss-utils@^4.1.0:
dependencies:
postcss "^7.0.14"
ieee754@1.1.8:
version "1.1.8"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
integrity sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=
ieee754@^1.1.4:
version "1.1.13"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
@ -10292,6 +10312,16 @@ jest-jasmine2@^24.8.0:
pretty-format "^24.8.0"
throat "^4.0.0"
jest-junit@^6.4.0:
version "6.4.0"
resolved "https://registry.yarnpkg.com/jest-junit/-/jest-junit-6.4.0.tgz#23e15c979fa6338afde46f2d2ac2a6b7e8cf0d9e"
integrity sha512-GXEZA5WBeUich94BARoEUccJumhCgCerg7mXDFLxWwI2P7wL3Z7sGWk+53x343YdBLjiMR9aD/gYMVKO+0pE4Q==
dependencies:
jest-validate "^24.0.0"
mkdirp "^0.5.1"
strip-ansi "^4.0.0"
xml "^1.0.1"
jest-leak-detector@^24.8.0:
version "24.8.0"
resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-24.8.0.tgz#c0086384e1f650c2d8348095df769f29b48e6980"
@ -10442,7 +10472,7 @@ jest-util@^24.8.0:
slash "^2.0.0"
source-map "^0.6.0"
jest-validate@^24.8.0:
jest-validate@^24.0.0, jest-validate@^24.8.0:
version "24.8.0"
resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-24.8.0.tgz#624c41533e6dfe356ffadc6e2423a35c2d3b4849"
dependencies:
@ -10479,6 +10509,11 @@ jest@24.8.0:
import-local "^2.0.0"
jest-cli "^24.8.0"
jmespath@0.15.0:
version "0.15.0"
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
jquery@3.4.1:
version "3.4.1"
resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2"
@ -11449,6 +11484,11 @@ math-random@^1.0.1:
version "1.0.4"
resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.4.tgz#5dd6943c938548267016d4e34f057583080c514c"
md5-file@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/md5-file/-/md5-file-4.0.0.tgz#f3f7ba1e2dd1144d5bf1de698d0e5f44a4409584"
integrity sha512-UC0qFwyAjn4YdPpKaDNw6gNxRf7Mcx7jC1UGCY4boCzgvU2Aoc1mOGzTtrjjLKhM5ivsnhoKpQVxKPp+1j1qwg==
md5.js@^1.3.4:
version "1.3.5"
resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f"
@ -13181,6 +13221,13 @@ pixelmatch@4.0.2:
dependencies:
pngjs "^3.0.0"
pixelmatch@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/pixelmatch/-/pixelmatch-5.0.2.tgz#b1349c3b544e20107a4dd7e532b01291946258cd"
integrity sha512-b65UpTI40rGFY8QwN6IYuCbpmwAOL6M8d6voX4F3zR99UmDqh7r2QWLxoeHOazBRgEmDUdqNVESDREqFxQS7rQ==
dependencies:
pngjs "^3.4.0"
pkg-dir@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b"
@ -13220,7 +13267,7 @@ pn@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb"
pngjs@3.4.0, pngjs@^3.0.0:
pngjs@3.4.0, pngjs@^3.0.0, pngjs@^3.4.0:
version "3.4.0"
resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-3.4.0.tgz#99ca7d725965fb655814eaf65f38f12bbdbf555f"
@ -15841,7 +15888,12 @@ sass-loader@7.1.0:
pify "^3.0.0"
semver "^5.5.0"
sax@^1.2.4, sax@~1.2.4:
sax@1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o=
sax@>=0.6.0, sax@^1.2.4, sax@~1.2.4:
version "1.2.4"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
@ -17717,6 +17769,14 @@ url-template@^2.0.8:
resolved "https://registry.yarnpkg.com/url-template/-/url-template-2.0.8.tgz#fc565a3cccbff7730c775f5641f9555791439f21"
integrity sha1-/FZaPMy/93MMd19WQflVV5FDnyE=
url@0.10.3:
version "0.10.3"
resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64"
integrity sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=
dependencies:
punycode "1.3.2"
querystring "0.2.0"
url@^0.11.0:
version "0.11.0"
resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
@ -17792,7 +17852,7 @@ utils-merge@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2:
uuid@3.3.2, uuid@^3.0.1, uuid@^3.1.0, uuid@^3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
@ -18364,6 +18424,24 @@ xml-name-validator@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
xml2js@0.4.19:
version "0.4.19"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
dependencies:
sax ">=0.6.0"
xmlbuilder "~9.0.1"
xml@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/xml/-/xml-1.0.1.tgz#78ba72020029c5bc87b8a81a3cfcd74b4a2fc1e5"
integrity sha1-eLpyAgApxbyHuKgaPPzXS0ovweU=
xmlbuilder@~9.0.1:
version "9.0.7"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=
xmlhttprequest@1:
version "1.8.0"
resolved "https://registry.yarnpkg.com/xmlhttprequest/-/xmlhttprequest-1.8.0.tgz#67fe075c5c24fef39f9d65f5f7b7fe75171968fc"