Chore: Remove console output from some jest tests (#45792)

* Chore: Remove console output from some jest tests

* Skip manual performance test
This commit is contained in:
Ashley Harrison 2022-02-24 10:49:48 +00:00 committed by GitHub
parent 5c6061acd2
commit d3700c4032
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 14 additions and 59 deletions

View File

@ -2,9 +2,11 @@ import { getStylesheetEntries, hasThemeStylesheets } from './loaders';
describe('Loaders', () => {
describe('stylesheet helpers', () => {
jest.spyOn(console, 'log').mockImplementation();
beforeEach(() => {
jest.spyOn(console, 'log').mockImplementation();
});
afterAll(() => {
afterEach(() => {
jest.restoreAllMocks();
});
@ -23,12 +25,12 @@ describe('Loaders', () => {
describe('hasThemeStylesheets', () => {
it('throws when only one theme file is defined', () => {
jest.spyOn(console, 'error').mockImplementation();
const errorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = () => {
hasThemeStylesheets(`${__dirname}/../mocks/stylesheetsSupport/missing-theme-file`);
};
expect(result).toThrow();
jest.restoreAllMocks();
errorSpy.mockRestore();
});
it('returns false when no theme files present', () => {

View File

@ -202,7 +202,8 @@ describe('nullInsertThreshold Transformer', () => {
expect(result).toBe(df);
});
test('perf stress test should be <= 10ms', () => {
// Leave this test skipped - it should be run manually
test.skip('perf stress test should be <= 10ms', () => {
// 10 fields x 3,000 values with 50% skip (output = 10 fields x 6,000 values)
let bigFrameA = genFrame();

View File

@ -113,31 +113,6 @@ describe('state functions', () => {
'{"expr":"super{foo=\\"x/z\\"}","refId":"B"}],"range":{"from":"now-5h","to":"now"}}'
);
});
// TODO: remove in 9.0
it('returns url parameter value for a state object', () => {
const state = {
...DEFAULT_EXPLORE_STATE,
datasource: 'foo',
queries: [
{
expr: 'metric{test="a/b"}',
refId: 'A',
},
{
expr: 'super{foo="x/z"}',
refId: 'B',
},
],
range: {
from: 'now-5h',
to: 'now',
},
};
expect(serializeStateToUrlParam(state, true)).toBe(
'{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}","refId":"A"},{"expr":"super{foo=\\"x/z\\"}","refId":"B"}],"range":{"from":"now-5h","to":"now"}}'
);
});
});
describe('interplay', () => {
@ -165,32 +140,6 @@ describe('state functions', () => {
expect(state).toMatchObject(parsed);
});
// TODO: remove in 9.0
it('can parse the compact serialized state into the original state', () => {
const state = {
...DEFAULT_EXPLORE_STATE,
datasource: 'foo',
queries: [
{
expr: 'metric{test="a/b"}',
refId: 'A',
},
{
expr: 'super{foo="x/z"}',
refId: 'B',
},
],
range: {
from: 'now - 5h',
to: 'now',
},
panelsState: undefined,
};
const serialized = serializeStateToUrlParam(state, true);
const parsed = parseUrlState(serialized);
expect(state).toMatchObject(parsed);
});
it('can parse serialized panelsState into the original state', () => {
const state = {
...DEFAULT_EXPLORE_STATE,
@ -215,7 +164,7 @@ describe('state functions', () => {
},
},
};
const serialized = serializeStateToUrlParam(state, true);
const serialized = serializeStateToUrlParam(state);
const parsed = parseUrlState(serialized);
expect(state).toMatchObject(parsed);
});

View File

@ -13,7 +13,7 @@ const props = {
{ text: 'Loki', value: 'ds-loki' },
],
},
variable: { ...initialDataSourceVariableModelState },
variable: { ...initialDataSourceVariableModelState, rootStateKey: 'foo' },
onPropChange: jest.fn(),
// connected actions

View File

@ -106,7 +106,6 @@ export function runWithRetry(
timerID = setTimeout(
() => {
retries++;
console.log(`Attempt ${retries}`);
run(errorData!.errors);
},
// We want to know how long to wait for the next retry. First time this will be 0.

View File

@ -382,6 +382,7 @@ describe('Language completion provider', () => {
});
it('returns a refresher on label context and unavailable metric', async () => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
const instance = new LanguageProvider(datasource);
const value = Plain.deserialize('metric{}');
const ed = new SlateEditor({ value });
@ -394,6 +395,7 @@ describe('Language completion provider', () => {
});
expect(result.context).toBeUndefined();
expect(result.suggestions).toEqual([]);
expect(console.warn).toHaveBeenCalledWith('Server did not return any values for selector = {__name__="metric"}');
});
it('returns label values on label context when given a metric and a label key', async () => {
@ -598,6 +600,7 @@ describe('Language completion provider', () => {
});
describe('disabled metrics lookup', () => {
it('does not issue any metadata requests when lookup is disabled', async () => {
jest.spyOn(console, 'warn').mockImplementation(() => {});
const datasource: PrometheusDatasource = {
metadataRequest: jest.fn(() => ({ data: { data: ['foo', 'bar'] as string[] } })),
getTimeRangeParams: jest.fn(() => ({ start: '0', end: '1' })),
@ -619,6 +622,7 @@ describe('Language completion provider', () => {
expect((datasource.metadataRequest as Mock).mock.calls.length).toBe(0);
await instance.provideCompletionItems(args);
expect((datasource.metadataRequest as Mock).mock.calls.length).toBe(0);
expect(console.warn).toHaveBeenCalledWith('Server did not return any values for selector = {}');
});
it('issues metadata requests when lookup is not disabled', async () => {
const datasource: PrometheusDatasource = {