mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Chore: InfluxDB unit test overhaul (#75436)
* Rename the mock function * Move tests * Refactor existing tests * add influxql_metadata_query tests * move to root * remove unnecessary file * adhoc test * Remove unused parameter * tests for future * fix mocks * betterer * changes after review
This commit is contained in:
@@ -6367,6 +6367,12 @@ exports[`better eslint`] = {
|
||||
"public/app/plugins/datasource/influxdb/migrations.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/mocks.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/query_part.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
@@ -6390,24 +6396,6 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/specs/datasource.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/specs/mocks.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
],
|
||||
"public/app/plugins/datasource/jaeger/CheatSheet.tsx:5381": [
|
||||
[0, 0, 0, "Styles should be written using objects.", "0"],
|
||||
[0, 0, 0, "Styles should be written using objects.", "1"]
|
||||
|
||||
@@ -12,11 +12,11 @@ jest.mock('../../../../../influxql_metadata_query', () => {
|
||||
return {
|
||||
__esModule: true,
|
||||
getAllPolicies: jest.fn().mockReturnValueOnce(Promise.resolve(['default', 'autogen'])),
|
||||
getFieldKeysForMeasurement: jest
|
||||
getFieldKeys: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(Promise.resolve(['free', 'total']))
|
||||
.mockReturnValueOnce(Promise.resolve([])),
|
||||
getTagKeysForMeasurementAndTags: jest
|
||||
getTagKeys: jest
|
||||
.fn()
|
||||
// first time we are called when the widget mounts,
|
||||
// we respond by saying `cpu, host, device` are the real tags
|
||||
@@ -31,7 +31,7 @@ jest.mock('../../../../../influxql_metadata_query', () => {
|
||||
// it does not matter what we return, as long as it is
|
||||
// promise-of-a-list-of-strings
|
||||
.mockReturnValueOnce(Promise.resolve([])),
|
||||
getAllMeasurementsForTags: jest
|
||||
getAllMeasurements: jest
|
||||
.fn()
|
||||
// it does not matter what we return, as long as it is
|
||||
// promise-of-a-list-of-strings
|
||||
@@ -48,8 +48,8 @@ jest.mock('@grafana/runtime', () => {
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
(mockedMeta.getTagKeysForMeasurementAndTags as jest.Mock).mockClear();
|
||||
(mockedMeta.getFieldKeysForMeasurement as jest.Mock).mockClear();
|
||||
(mockedMeta.getTagKeys as jest.Mock).mockClear();
|
||||
(mockedMeta.getFieldKeys as jest.Mock).mockClear();
|
||||
});
|
||||
|
||||
const ONLY_TAGS = [
|
||||
@@ -128,11 +128,11 @@ describe('InfluxDB InfluxQL Visual Editor field-filtering', () => {
|
||||
|
||||
await waitFor(() => {});
|
||||
|
||||
// when the editor-widget mounts, it calls getFieldKeysForMeasurement
|
||||
expect(mockedMeta.getFieldKeysForMeasurement).toHaveBeenCalledTimes(1);
|
||||
// when the editor-widget mounts, it calls getFieldKeys
|
||||
expect(mockedMeta.getFieldKeys).toHaveBeenCalledTimes(1);
|
||||
|
||||
// when the editor-widget mounts, it calls getTagKeysForMeasurementAndTags
|
||||
expect(mockedMeta.getTagKeysForMeasurementAndTags).toHaveBeenCalledTimes(1);
|
||||
// when the editor-widget mounts, it calls getTagKeys
|
||||
expect(mockedMeta.getTagKeys).toHaveBeenCalledTimes(1);
|
||||
|
||||
// now we click on the WHERE/host2 button
|
||||
await userEvent.click(screen.getByRole('button', { name: 'host2' }));
|
||||
@@ -145,7 +145,7 @@ describe('InfluxDB InfluxQL Visual Editor field-filtering', () => {
|
||||
await userEvent.click(screen.getByRole('button', { name: 'cpudata' }));
|
||||
|
||||
// verify `getTagValues` was called once, and in the tags-param we did not receive `field1`
|
||||
expect(mockedMeta.getAllMeasurementsForTags).toHaveBeenCalledTimes(1);
|
||||
expect((mockedMeta.getAllMeasurementsForTags as jest.Mock).mock.calls[0][1]).toStrictEqual(ONLY_TAGS);
|
||||
expect(mockedMeta.getAllMeasurements).toHaveBeenCalledTimes(1);
|
||||
expect((mockedMeta.getAllMeasurements as jest.Mock).mock.calls[0][1]).toStrictEqual(ONLY_TAGS);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@ import { render, waitFor } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
|
||||
import InfluxDatasource from '../../../../../datasource';
|
||||
import { getMockDS, getMockDSInstanceSettings } from '../../../../../specs/mocks';
|
||||
import { getMockInfluxDS, getMockDSInstanceSettings } from '../../../../../mocks';
|
||||
import { DEFAULT_POLICY, InfluxQuery } from '../../../../../types';
|
||||
|
||||
import { VisualInfluxQLEditor } from './VisualInfluxQLEditor';
|
||||
@@ -39,7 +39,7 @@ jest.mock('./Seg', () => {
|
||||
async function assertEditor(query: InfluxQuery, textContent: string) {
|
||||
const onChange = jest.fn();
|
||||
const onRunQuery = jest.fn();
|
||||
const datasource: InfluxDatasource = getMockDS(getMockDSInstanceSettings());
|
||||
const datasource: InfluxDatasource = getMockInfluxDS(getMockDSInstanceSettings());
|
||||
datasource.metricFindQuery = () => Promise.resolve([]);
|
||||
const { container } = render(
|
||||
<VisualInfluxQLEditor query={query} datasource={datasource} onChange={onChange} onRunQuery={onRunQuery} />
|
||||
|
||||
@@ -6,10 +6,10 @@ import { InlineLabel, SegmentSection, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import InfluxDatasource from '../../../../../datasource';
|
||||
import {
|
||||
getAllMeasurementsForTags,
|
||||
getAllMeasurements,
|
||||
getAllPolicies,
|
||||
getFieldKeysForMeasurement,
|
||||
getTagKeysForMeasurementAndTags,
|
||||
getFieldKeys,
|
||||
getTagKeys,
|
||||
getTagValues,
|
||||
} from '../../../../../influxql_metadata_query';
|
||||
import {
|
||||
@@ -53,13 +53,9 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
const { measurement, policy } = query;
|
||||
|
||||
const allTagKeys = useMemo(async () => {
|
||||
const tagKeys = (await getTagKeysForMeasurementAndTags(datasource, [], measurement, policy)).map(
|
||||
(tag) => `${tag}::tag`
|
||||
);
|
||||
const tagKeys = (await getTagKeys(datasource, measurement, policy)).map((tag) => `${tag}::tag`);
|
||||
|
||||
const fieldKeys = (await getFieldKeysForMeasurement(datasource, measurement || '', policy)).map(
|
||||
(field) => `${field}::field`
|
||||
);
|
||||
const fieldKeys = (await getFieldKeys(datasource, measurement || '', policy)).map((field) => `${field}::field`);
|
||||
|
||||
return new Set([...tagKeys, ...fieldKeys]);
|
||||
}, [measurement, policy, datasource]);
|
||||
@@ -69,9 +65,7 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
[
|
||||
'field_0',
|
||||
() => {
|
||||
return measurement !== undefined
|
||||
? getFieldKeysForMeasurement(datasource, measurement, policy)
|
||||
: Promise.resolve([]);
|
||||
return measurement !== undefined ? getFieldKeys(datasource, measurement, policy) : Promise.resolve([]);
|
||||
},
|
||||
],
|
||||
]);
|
||||
@@ -80,7 +74,7 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
|
||||
// the following function is not complicated enough to memoize, but it's result
|
||||
// is used in both memoized and un-memoized parts, so we have no choice
|
||||
const getTagKeys = useMemo(
|
||||
const getMemoizedTagKeys = useMemo(
|
||||
() => async () => {
|
||||
return [...(await allTagKeys)];
|
||||
},
|
||||
@@ -88,10 +82,10 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
);
|
||||
|
||||
const groupByList = useMemo(() => {
|
||||
const dynamicGroupByPartOptions = new Map([['tag_0', getTagKeys]]);
|
||||
const dynamicGroupByPartOptions = new Map([['tag_0', getMemoizedTagKeys]]);
|
||||
|
||||
return makePartList(query.groupBy ?? [], dynamicGroupByPartOptions);
|
||||
}, [getTagKeys, query.groupBy]);
|
||||
}, [getMemoizedTagKeys, query.groupBy]);
|
||||
|
||||
const onAppliedChange = (newQuery: InfluxQuery) => {
|
||||
props.onChange(newQuery);
|
||||
@@ -123,11 +117,7 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
getMeasurementOptions={(filter) =>
|
||||
withTemplateVariableOptions(
|
||||
allTagKeys.then((keys) =>
|
||||
getAllMeasurementsForTags(
|
||||
datasource,
|
||||
filterTags(query.tags ?? [], keys),
|
||||
filter === '' ? undefined : filter
|
||||
)
|
||||
getAllMeasurements(datasource, filterTags(query.tags ?? [], keys), filter === '' ? undefined : filter)
|
||||
),
|
||||
wrapRegex,
|
||||
filter
|
||||
@@ -141,7 +131,7 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
<TagsSection
|
||||
tags={query.tags ?? []}
|
||||
onChange={handleTagsSectionChange}
|
||||
getTagKeyOptions={getTagKeys}
|
||||
getTagKeyOptions={getMemoizedTagKeys}
|
||||
getTagValueOptions={(key) =>
|
||||
withTemplateVariableOptions(
|
||||
allTagKeys.then((keys) => getTagValues(datasource, filterTags(query.tags ?? [], keys), key)),
|
||||
@@ -171,7 +161,7 @@ export const VisualInfluxQLEditor = (props: Props): JSX.Element => {
|
||||
<SegmentSection label="GROUP BY" fill={true}>
|
||||
<PartListSection
|
||||
parts={groupByList}
|
||||
getNewPartOptions={() => getNewGroupByPartOptions(query, getTagKeys)}
|
||||
getNewPartOptions={() => getNewGroupByPartOptions(query, getMemoizedTagKeys)}
|
||||
onChange={(partIndex, newParams) => {
|
||||
const newQuery = changeGroupByPart(query, partIndex, newParams);
|
||||
onAppliedChange(newQuery);
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
type FieldsDefinition = {
|
||||
name: string;
|
||||
// String type, usually something like 'string' or 'float'.
|
||||
type: string;
|
||||
};
|
||||
type Measurements = { [measurement: string]: FieldsDefinition[] };
|
||||
type FieldReturnValue = { text: string };
|
||||
|
||||
/**
|
||||
* Datasource mock for influx. At the moment this only works for queries that should return measurements or their
|
||||
* fields and no other functionality is implemented.
|
||||
*/
|
||||
export class InfluxDatasourceMock {
|
||||
constructor(private measurements: Measurements) {}
|
||||
|
||||
metricFindQuery(query: string) {
|
||||
if (isMeasurementsQuery(query)) {
|
||||
return this.getMeasurements();
|
||||
} else {
|
||||
return this.getMeasurementFields(query);
|
||||
}
|
||||
}
|
||||
|
||||
private getMeasurements(): FieldReturnValue[] {
|
||||
return Object.keys(this.measurements).map((key) => ({ text: key }));
|
||||
}
|
||||
|
||||
private getMeasurementFields(query: string): FieldReturnValue[] {
|
||||
const match = query.match(/SHOW FIELD KEYS FROM \"(.+)\"/);
|
||||
if (!match) {
|
||||
throw new Error(`Failed to match query="${query}"`);
|
||||
}
|
||||
const measurementName = match[1];
|
||||
if (!measurementName) {
|
||||
throw new Error(`Failed to match measurement name from query="${query}"`);
|
||||
}
|
||||
|
||||
const fields = this.measurements[measurementName];
|
||||
if (!fields) {
|
||||
throw new Error(
|
||||
`Failed to find measurement with name="${measurementName}" in measurements="[${Object.keys(
|
||||
this.measurements
|
||||
).join(', ')}]"`
|
||||
);
|
||||
}
|
||||
|
||||
return fields.map((field) => ({
|
||||
text: field.name,
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
function isMeasurementsQuery(query: string) {
|
||||
return /SHOW MEASUREMENTS/.test(query);
|
||||
}
|
||||
318
public/app/plugins/datasource/influxdb/datasource.test.ts
Normal file
318
public/app/plugins/datasource/influxdb/datasource.test.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
|
||||
import { ScopedVars } from '@grafana/data';
|
||||
import { BackendSrvRequest } from '@grafana/runtime/';
|
||||
import config from 'app/core/config';
|
||||
|
||||
import { TemplateSrv } from '../../../features/templating/template_srv';
|
||||
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from './constants';
|
||||
import InfluxDatasource from './datasource';
|
||||
import {
|
||||
getMockDSInstanceSettings,
|
||||
getMockInfluxDS,
|
||||
mockBackendService,
|
||||
mockInfluxFetchResponse,
|
||||
mockInfluxQueryRequest,
|
||||
mockInfluxQueryWithTemplateVars,
|
||||
mockTemplateSrv,
|
||||
} from './mocks';
|
||||
import { InfluxQuery, InfluxVersion } from './types';
|
||||
|
||||
// we want only frontend mode in this file
|
||||
config.featureToggles.influxdbBackendMigration = false;
|
||||
const fetchMock = mockBackendService(mockInfluxFetchResponse());
|
||||
|
||||
describe('InfluxDataSource Frontend Mode', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should throw an error if there is 200 response with error', async () => {
|
||||
const ds = getMockInfluxDS();
|
||||
fetchMock.mockImplementation(() => {
|
||||
return of({
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
error: 'Query timeout',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
await lastValueFrom(ds.query(mockInfluxQueryRequest()));
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
expect(err.message).toBe('InfluxDB Error: Query timeout');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
describe('outdated browser mode', () => {
|
||||
it('should throw an error when querying data', async () => {
|
||||
expect.assertions(1);
|
||||
const instanceSettings = getMockDSInstanceSettings();
|
||||
instanceSettings.access = 'direct';
|
||||
const ds = getMockInfluxDS(instanceSettings);
|
||||
try {
|
||||
await lastValueFrom(ds.query(mockInfluxQueryRequest()));
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
expect(err.message).toBe(BROWSER_MODE_DISABLED_MESSAGE);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('metricFindQuery with HTTP GET', () => {
|
||||
let ds: InfluxDatasource;
|
||||
const query = 'SELECT max(value) FROM measurement WHERE $timeFilter';
|
||||
const queryOptions = {
|
||||
range: {
|
||||
from: '2018-01-01T00:00:00Z',
|
||||
to: '2018-01-02T00:00:00Z',
|
||||
},
|
||||
};
|
||||
|
||||
let requestQuery: string;
|
||||
let requestMethod: string | undefined;
|
||||
let requestData: string | null;
|
||||
const fetchMockImpl = (req: BackendSrvRequest) => {
|
||||
requestMethod = req.method;
|
||||
requestQuery = req.params?.q;
|
||||
requestData = req.data;
|
||||
return of({
|
||||
data: {
|
||||
status: 'success',
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
{
|
||||
name: 'measurement',
|
||||
columns: ['name'],
|
||||
values: [['cpu']],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks();
|
||||
fetchMock.mockImplementation(fetchMockImpl);
|
||||
});
|
||||
|
||||
it('should read the http method from jsonData', async () => {
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestMethod).toBe('GET');
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestMethod).toBe('POST');
|
||||
});
|
||||
|
||||
it('should replace $timefilter', async () => {
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestQuery).toMatch('time >= 1514764800000ms and time <= 1514851200000ms');
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestQuery).toBeFalsy();
|
||||
expect(requestData).toMatch('time%20%3E%3D%201514764800000ms%20and%20time%20%3C%3D%201514851200000ms');
|
||||
});
|
||||
|
||||
it('should not have any data in request body if http mode is GET', async () => {
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestData).toBeNull();
|
||||
});
|
||||
|
||||
it('should have data in request body if http mode is POST', async () => {
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' }));
|
||||
await ds.metricFindQuery(query, queryOptions);
|
||||
expect(requestData).not.toBeNull();
|
||||
expect(requestData).toMatch('q=SELECT');
|
||||
});
|
||||
|
||||
it('parse response correctly', async () => {
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'GET' }));
|
||||
let responseGet = await ds.metricFindQuery(query, queryOptions);
|
||||
expect(responseGet).toEqual([{ text: 'cpu' }]);
|
||||
ds = getMockInfluxDS(getMockDSInstanceSettings({ httpMode: 'POST' }));
|
||||
let responsePost = await ds.metricFindQuery(query, queryOptions);
|
||||
expect(responsePost).toEqual([{ text: 'cpu' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('adhoc variables', () => {
|
||||
const adhocFilters = [
|
||||
{
|
||||
key: 'adhoc_key',
|
||||
operator: '=',
|
||||
value: 'adhoc_val',
|
||||
condition: '',
|
||||
},
|
||||
];
|
||||
const mockTemplateService = new TemplateSrv();
|
||||
mockTemplateService.getAdhocFilters = jest.fn((_: string) => adhocFilters);
|
||||
let ds = getMockInfluxDS(getMockDSInstanceSettings(), mockTemplateService);
|
||||
it('query should contain the ad-hoc variable', () => {
|
||||
ds.query(mockInfluxQueryRequest());
|
||||
const expected = encodeURIComponent(
|
||||
'SELECT mean("value") FROM "cpu" WHERE time >= 0ms and time <= 10ms AND "adhoc_key" = \'adhoc_val\' GROUP BY time($__interval) fill(null)'
|
||||
);
|
||||
expect(fetchMock.mock.calls[0][0].data).toBe(`q=${expected}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('datasource contract', () => {
|
||||
let ds: InfluxDatasource;
|
||||
const metricFindQueryMock = jest.fn();
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
ds = getMockInfluxDS();
|
||||
ds.metricFindQuery = metricFindQueryMock;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should check the datasource has "getTagKeys" function defined', () => {
|
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ds))).toContain('getTagKeys');
|
||||
});
|
||||
|
||||
it('should check the datasource has "getTagValues" function defined', () => {
|
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ds))).toContain('getTagValues');
|
||||
});
|
||||
|
||||
it('should be able to call getTagKeys without specifying any parameter', () => {
|
||||
ds.getTagKeys();
|
||||
expect(metricFindQueryMock).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should be able to call getTagValues without specifying anything but key', () => {
|
||||
ds.getTagValues({ key: 'test', filters: [] });
|
||||
expect(metricFindQueryMock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('variable interpolation', () => {
|
||||
const text = 'interpolationText';
|
||||
const text2 = 'interpolationText2';
|
||||
const textWithoutFormatRegex = 'interpolationText,interpolationText2';
|
||||
const textWithFormatRegex = 'interpolationText|interpolationText2';
|
||||
const variableMap: Record<string, string> = {
|
||||
$interpolationVar: text,
|
||||
$interpolationVar2: text2,
|
||||
};
|
||||
const adhocFilters = [
|
||||
{
|
||||
key: 'adhoc',
|
||||
operator: '=',
|
||||
value: 'val',
|
||||
condition: '',
|
||||
},
|
||||
];
|
||||
const templateSrv = mockTemplateSrv(
|
||||
jest.fn((_: string) => adhocFilters),
|
||||
jest.fn((target?: string, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
if (!format) {
|
||||
return variableMap[target!] || '';
|
||||
}
|
||||
if (format === 'regex') {
|
||||
return textWithFormatRegex;
|
||||
}
|
||||
return textWithoutFormatRegex;
|
||||
})
|
||||
);
|
||||
const ds = new InfluxDatasource(getMockDSInstanceSettings(), templateSrv);
|
||||
|
||||
function influxChecks(query: InfluxQuery) {
|
||||
expect(templateSrv.replace).toBeCalledTimes(10);
|
||||
expect(query.alias).toBe(text);
|
||||
expect(query.measurement).toBe(textWithFormatRegex);
|
||||
expect(query.policy).toBe(textWithFormatRegex);
|
||||
expect(query.limit).toBe(textWithFormatRegex);
|
||||
expect(query.slimit).toBe(textWithFormatRegex);
|
||||
expect(query.tz).toBe(text);
|
||||
expect(query.tags![0].value).toBe(textWithFormatRegex);
|
||||
expect(query.groupBy![0].params![0]).toBe(textWithFormatRegex);
|
||||
expect(query.select![0][0].params![0]).toBe(textWithFormatRegex);
|
||||
expect(query.adhocFilters?.[0].key).toBe(adhocFilters[0].key);
|
||||
}
|
||||
|
||||
describe('when interpolating query variables for dashboard->explore', () => {
|
||||
it('should interpolate all variables with Flux mode', () => {
|
||||
ds.version = InfluxVersion.Flux;
|
||||
const fluxQuery = {
|
||||
refId: 'x',
|
||||
query: '$interpolationVar,$interpolationVar2',
|
||||
};
|
||||
const queries = ds.interpolateVariablesInQueries([fluxQuery], {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: text2, value: text2 },
|
||||
});
|
||||
expect(templateSrv.replace).toBeCalledTimes(1);
|
||||
expect(queries[0].query).toBe(textWithFormatRegex);
|
||||
});
|
||||
|
||||
it('should interpolate all variables with InfluxQL mode', () => {
|
||||
ds.version = InfluxVersion.InfluxQL;
|
||||
const queries = ds.interpolateVariablesInQueries([mockInfluxQueryWithTemplateVars(adhocFilters)], {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: text2, value: text2 },
|
||||
});
|
||||
influxChecks(queries[0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when interpolating template variables', () => {
|
||||
it('should apply all template variables with Flux mode', () => {
|
||||
ds.version = InfluxVersion.Flux;
|
||||
const fluxQuery = {
|
||||
refId: 'x',
|
||||
query: '$interpolationVar',
|
||||
};
|
||||
const query = ds.applyTemplateVariables(fluxQuery, {
|
||||
interpolationVar: {
|
||||
text: text,
|
||||
value: text,
|
||||
},
|
||||
});
|
||||
expect(templateSrv.replace).toBeCalledTimes(1);
|
||||
expect(query.query).toBe(text);
|
||||
});
|
||||
|
||||
it('should apply all template variables with InfluxQL mode', () => {
|
||||
ds.version = ds.version = InfluxVersion.InfluxQL;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(mockInfluxQueryWithTemplateVars(adhocFilters), {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
});
|
||||
influxChecks(query);
|
||||
});
|
||||
|
||||
it('should apply all scopedVars to tags', () => {
|
||||
ds.version = InfluxVersion.InfluxQL;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(mockInfluxQueryWithTemplateVars(adhocFilters), {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
});
|
||||
expect(query.tags?.length).toBeGreaterThan(0);
|
||||
const value = query.tags?.[0].value;
|
||||
const scopedVars = 'interpolationText|interpolationText2';
|
||||
expect(value).toBe(scopedVars);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -220,7 +220,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
}
|
||||
|
||||
if (this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||
query = this.applyVariables(query, scopedVars, rest);
|
||||
query = this.applyVariables(query, rest);
|
||||
if (query.adhocFilters?.length) {
|
||||
const adhocFiltersToTags: InfluxQueryTag[] = (query.adhocFilters ?? []).map((af) => {
|
||||
const { condition, ...asTag } = af;
|
||||
@@ -258,12 +258,12 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
return {
|
||||
...query,
|
||||
datasource: this.getRef(),
|
||||
...this.applyVariables(query, scopedVars, scopedVars),
|
||||
...this.applyVariables(query, scopedVars),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
applyVariables(query: InfluxQuery, scopedVars: ScopedVars, rest: ScopedVars) {
|
||||
applyVariables(query: InfluxQuery, scopedVars: ScopedVars) {
|
||||
const expandedQuery = { ...query };
|
||||
if (query.groupBy) {
|
||||
expandedQuery.groupBy = query.groupBy.map((groupBy) => {
|
||||
@@ -301,7 +301,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
return {
|
||||
...expandedQuery,
|
||||
adhocFilters: this.templateSrv.getAdhocFilters(this.name) ?? [],
|
||||
query: this.templateSrv.replace(query.query ?? '', rest, 'regex'), // The raw query text
|
||||
query: this.templateSrv.replace(query.query ?? '', scopedVars, 'regex'), // The raw query text
|
||||
alias: this.templateSrv.replace(query.alias ?? '', scopedVars),
|
||||
limit: this.templateSrv.replace(query.limit?.toString() ?? '', scopedVars, 'regex'),
|
||||
measurement: this.templateSrv.replace(query.measurement ?? '', scopedVars, 'regex'),
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { DataQueryRequest, dateTime, ScopedVars } from '@grafana/data/src';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import { FetchResponse } from '@grafana/runtime/src';
|
||||
import config from 'app/core/config';
|
||||
|
||||
import { InfluxQuery } from '../types';
|
||||
|
||||
import {
|
||||
getMockDS,
|
||||
getMockDSInstanceSettings,
|
||||
getMockInfluxDS,
|
||||
mockBackendService,
|
||||
mockInfluxFetchResponse,
|
||||
mockTemplateSrv,
|
||||
} from './mocks';
|
||||
import { InfluxQuery } from './types';
|
||||
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const fetchMock = mockBackendService(mockInfluxFetchResponse());
|
||||
@@ -116,7 +115,7 @@ describe('InfluxDataSource Backend Mode', () => {
|
||||
describe('adhoc filters', () => {
|
||||
let fetchReq: { queries: InfluxQuery[] };
|
||||
const ctx = {
|
||||
ds: getMockDS(getMockDSInstanceSettings(), templateSrv),
|
||||
ds: getMockInfluxDS(getMockDSInstanceSettings(), templateSrv),
|
||||
};
|
||||
beforeEach(async () => {
|
||||
fetchMock.mockImplementation((req) => {
|
||||
@@ -1,6 +1,6 @@
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import InfluxQueryModel from '../influx_query_model';
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
|
||||
describe('InfluxQuery', () => {
|
||||
const templateSrv = { replace: (val) => val } as TemplateSrv;
|
||||
@@ -1,6 +1,6 @@
|
||||
import { produce } from 'immer';
|
||||
|
||||
import InfluxSeries from '../influx_series';
|
||||
import InfluxSeries from './influx_series';
|
||||
|
||||
describe('when generating timeseries from influxdb response', () => {
|
||||
describe('given multiple fields for series', () => {
|
||||
@@ -0,0 +1,273 @@
|
||||
import config from 'app/core/config';
|
||||
|
||||
import { getAllMeasurements, getAllPolicies, getFieldKeys, getTagKeys, getTagValues } from './influxql_metadata_query';
|
||||
import { getMockInfluxDS } from './mocks';
|
||||
import { InfluxQuery } from './types';
|
||||
|
||||
describe('influx_metadata_query', () => {
|
||||
let query: string | undefined;
|
||||
let target: InfluxQuery;
|
||||
const mockMetricFindQuery = jest.fn();
|
||||
const mockRunMetadataQuery = jest.fn();
|
||||
mockMetricFindQuery.mockImplementation((q: string) => {
|
||||
query = q;
|
||||
return Promise.resolve([]);
|
||||
});
|
||||
mockRunMetadataQuery.mockImplementation((t: InfluxQuery) => {
|
||||
target = t;
|
||||
query = t.query;
|
||||
return Promise.resolve([]);
|
||||
});
|
||||
|
||||
const ds = getMockInfluxDS();
|
||||
ds.metricFindQuery = mockMetricFindQuery;
|
||||
ds.runMetadataQuery = mockRunMetadataQuery;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// This should be removed when backend mode is default
|
||||
describe('backend mode disabled', () => {
|
||||
beforeEach(() => {
|
||||
config.featureToggles.influxdbBackendMigration = false;
|
||||
});
|
||||
|
||||
function frontendModeChecks() {
|
||||
expect(mockRunMetadataQuery).not.toHaveBeenCalled();
|
||||
expect(mockMetricFindQuery).toHaveBeenCalled();
|
||||
}
|
||||
|
||||
describe('getAllPolicies', () => {
|
||||
it('should call metricFindQuery with SHOW RETENTION POLICIES', () => {
|
||||
getAllPolicies(ds);
|
||||
frontendModeChecks();
|
||||
expect(query).toMatch('SHOW RETENTION POLICIES');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllMeasurements', () => {
|
||||
it('no tags specified', () => {
|
||||
getAllMeasurements(ds, []);
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
|
||||
});
|
||||
|
||||
it('with tags', () => {
|
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }]);
|
||||
frontendModeChecks();
|
||||
expect(query).toMatch('SHOW MEASUREMENTS WHERE "key"');
|
||||
});
|
||||
|
||||
it('with measurement filter', () => {
|
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }], 'measurementFilter');
|
||||
frontendModeChecks();
|
||||
expect(query).toMatch('SHOW MEASUREMENTS WITH MEASUREMENT =~ /(?i)measurementFilter/ WHERE "key"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTagKeys', () => {
|
||||
it('no tags specified', () => {
|
||||
getTagKeys(ds);
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS');
|
||||
});
|
||||
|
||||
it('with measurement', () => {
|
||||
getTagKeys(ds, 'test_measurement');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS FROM "test_measurement"');
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getTagKeys(ds, 'test_measurement', 'rp');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS FROM "rp"."test_measurement"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTagValues', () => {
|
||||
it('with key', () => {
|
||||
getTagValues(ds, [], 'test_key');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"');
|
||||
});
|
||||
|
||||
it('with key ends with ::tag', () => {
|
||||
getTagValues(ds, [], 'test_key::tag');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"');
|
||||
});
|
||||
|
||||
it('with key ends with ::field', async () => {
|
||||
const result = await getTagValues(ds, [], 'test_key::field');
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
|
||||
it('with tags', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'');
|
||||
});
|
||||
|
||||
it('with measurement', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe(
|
||||
'SHOW TAG VALUES FROM "test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''
|
||||
);
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement', 'rp');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe(
|
||||
'SHOW TAG VALUES FROM "rp"."test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFieldKeys', () => {
|
||||
it('with no retention policy', () => {
|
||||
getFieldKeys(ds, 'test_measurement');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM "test_measurement"');
|
||||
});
|
||||
|
||||
it('with empty measurement', () => {
|
||||
getFieldKeys(ds, '');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS');
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getFieldKeys(ds, 'test_measurement', 'rp');
|
||||
frontendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM "rp"."test_measurement"');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('backend mode enabled', () => {
|
||||
beforeEach(() => {
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
});
|
||||
|
||||
function backendModeChecks() {
|
||||
expect(mockMetricFindQuery).not.toHaveBeenCalled();
|
||||
expect(mockRunMetadataQuery).toHaveBeenCalled();
|
||||
expect(target).toBeDefined();
|
||||
expect(target.refId).toBe('metadataQuery');
|
||||
expect(target.rawQuery).toBe(true);
|
||||
}
|
||||
|
||||
describe('getAllPolicies', () => {
|
||||
it('should call metricFindQuery with SHOW RETENTION POLICIES', () => {
|
||||
getAllPolicies(ds);
|
||||
backendModeChecks();
|
||||
expect(query).toMatch('SHOW RETENTION POLICIES');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAllMeasurements', () => {
|
||||
it('no tags specified', () => {
|
||||
getAllMeasurements(ds, []);
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW MEASUREMENTS LIMIT 100');
|
||||
});
|
||||
|
||||
it('with tags', () => {
|
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }]);
|
||||
backendModeChecks();
|
||||
expect(query).toMatch('SHOW MEASUREMENTS WHERE "key"');
|
||||
});
|
||||
|
||||
it('with measurement filter', () => {
|
||||
getAllMeasurements(ds, [{ key: 'key', value: 'val' }], 'measurementFilter');
|
||||
backendModeChecks();
|
||||
expect(query).toMatch('SHOW MEASUREMENTS WITH MEASUREMENT =~ /(?i)measurementFilter/ WHERE "key"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTagKeys', () => {
|
||||
it('no tags specified', () => {
|
||||
getTagKeys(ds);
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS');
|
||||
});
|
||||
|
||||
it('with measurement', () => {
|
||||
getTagKeys(ds, 'test_measurement');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS FROM "test_measurement"');
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getTagKeys(ds, 'test_measurement', 'rp');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG KEYS FROM "rp"."test_measurement"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTagValues', () => {
|
||||
it('with key', () => {
|
||||
getTagValues(ds, [], 'test_key');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"');
|
||||
});
|
||||
|
||||
it('with key ends with ::tag', () => {
|
||||
getTagValues(ds, [], 'test_key::tag');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key"');
|
||||
});
|
||||
|
||||
it('with key ends with ::field', async () => {
|
||||
const result = await getTagValues(ds, [], 'test_key::field');
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
|
||||
it('with tags', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW TAG VALUES WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\'');
|
||||
});
|
||||
|
||||
it('with measurement', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement');
|
||||
backendModeChecks();
|
||||
expect(query).toBe(
|
||||
'SHOW TAG VALUES FROM "test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''
|
||||
);
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getTagValues(ds, [{ key: 'tagKey', value: 'tag_val' }], 'test_key', 'test_measurement', 'rp');
|
||||
backendModeChecks();
|
||||
expect(query).toBe(
|
||||
'SHOW TAG VALUES FROM "rp"."test_measurement" WITH KEY = "test_key" WHERE "tagKey" = \'tag_val\''
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFieldKeys', () => {
|
||||
it('with no retention policy', () => {
|
||||
getFieldKeys(ds, 'test_measurement');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM "test_measurement"');
|
||||
});
|
||||
|
||||
it('with empty measurement', () => {
|
||||
getFieldKeys(ds, '');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS');
|
||||
});
|
||||
|
||||
it('with retention policy', () => {
|
||||
getFieldKeys(ds, 'test_measurement', 'rp');
|
||||
backendModeChecks();
|
||||
expect(query).toBe('SHOW FIELD KEYS FROM "rp"."test_measurement"');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -49,7 +49,7 @@ export async function getAllPolicies(datasource: InfluxDatasource): Promise<stri
|
||||
return data.map((item) => item.text);
|
||||
}
|
||||
|
||||
export async function getAllMeasurementsForTags(
|
||||
export async function getAllMeasurements(
|
||||
datasource: InfluxDatasource,
|
||||
tags: InfluxQueryTag[],
|
||||
withMeasurementFilter?: string
|
||||
@@ -58,9 +58,8 @@ export async function getAllMeasurementsForTags(
|
||||
return data.map((item) => item.text);
|
||||
}
|
||||
|
||||
export async function getTagKeysForMeasurementAndTags(
|
||||
export async function getTagKeys(
|
||||
datasource: InfluxDatasource,
|
||||
tags: InfluxQueryTag[],
|
||||
measurement?: string,
|
||||
retentionPolicy?: string
|
||||
): Promise<string[]> {
|
||||
@@ -71,16 +70,17 @@ export async function getTagKeysForMeasurementAndTags(
|
||||
export async function getTagValues(
|
||||
datasource: InfluxDatasource,
|
||||
tags: InfluxQueryTag[],
|
||||
tagKey: string,
|
||||
withKey: string,
|
||||
measurement?: string,
|
||||
retentionPolicy?: string
|
||||
): Promise<string[]> {
|
||||
if (tagKey.endsWith('::field')) {
|
||||
if (withKey.endsWith('::field')) {
|
||||
return [];
|
||||
}
|
||||
const data = await runExploreQuery({
|
||||
type: 'TAG_VALUES',
|
||||
withKey: tagKey,
|
||||
tags,
|
||||
withKey,
|
||||
datasource,
|
||||
measurement,
|
||||
retentionPolicy,
|
||||
@@ -88,7 +88,7 @@ export async function getTagValues(
|
||||
return data.map((item) => item.text);
|
||||
}
|
||||
|
||||
export async function getFieldKeysForMeasurement(
|
||||
export async function getFieldKeys(
|
||||
datasource: InfluxDatasource,
|
||||
measurement: string,
|
||||
retentionPolicy?: string
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { buildMetadataQuery } from './influxql_query_builder';
|
||||
import { templateSrvStub as templateService } from './specs/mocks';
|
||||
import { templateSrvStub as templateService } from './mocks';
|
||||
import { DEFAULT_POLICY } from './types';
|
||||
|
||||
describe('influxql-query-builder', () => {
|
||||
|
||||
@@ -17,9 +17,10 @@ import {
|
||||
VariableInterpolation,
|
||||
} from '@grafana/runtime/src';
|
||||
|
||||
import { TemplateSrv } from '../../../../features/templating/template_srv';
|
||||
import InfluxDatasource from '../datasource';
|
||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from '../types';
|
||||
import { TemplateSrv } from '../../../features/templating/template_srv';
|
||||
|
||||
import InfluxDatasource from './datasource';
|
||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
|
||||
const getAdhocFiltersMock = jest.fn().mockImplementation(() => []);
|
||||
const replaceMock = jest.fn().mockImplementation((a: string, ...rest: unknown[]) => a);
|
||||
@@ -54,14 +55,16 @@ export function mockBackendService(response: FetchResponse) {
|
||||
return fetchMock;
|
||||
}
|
||||
|
||||
export function getMockDS(
|
||||
instanceSettings: DataSourceInstanceSettings<InfluxOptions>,
|
||||
export function getMockInfluxDS(
|
||||
instanceSettings: DataSourceInstanceSettings<InfluxOptions> = getMockDSInstanceSettings(),
|
||||
templateSrv: TemplateSrv = templateSrvStub
|
||||
): InfluxDatasource {
|
||||
return new InfluxDatasource(instanceSettings, templateSrv);
|
||||
}
|
||||
|
||||
export function getMockDSInstanceSettings(): DataSourceInstanceSettings<InfluxOptions> {
|
||||
export function getMockDSInstanceSettings(
|
||||
overrideJsonData?: Partial<InfluxOptions>
|
||||
): DataSourceInstanceSettings<InfluxOptions> {
|
||||
return {
|
||||
id: 123,
|
||||
url: 'proxied',
|
||||
@@ -91,7 +94,12 @@ export function getMockDSInstanceSettings(): DataSourceInstanceSettings<InfluxOp
|
||||
module: '',
|
||||
baseUrl: '',
|
||||
},
|
||||
jsonData: { version: InfluxVersion.InfluxQL, httpMode: 'POST', dbName: 'site' },
|
||||
jsonData: {
|
||||
version: InfluxVersion.InfluxQL,
|
||||
httpMode: 'POST',
|
||||
dbName: 'site',
|
||||
...(overrideJsonData ? overrideJsonData : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -221,12 +229,9 @@ export const mockInfluxRetentionPolicyResponse = [
|
||||
},
|
||||
];
|
||||
|
||||
export const mockInfluxDataRequest = (
|
||||
targets: InfluxQuery[],
|
||||
overrides?: Partial<DataQueryRequest>
|
||||
): Partial<DataQueryRequest<InfluxQuery>> => {
|
||||
const defaults: DataQueryRequest<InfluxQuery> = {
|
||||
app: 'createDataRequest',
|
||||
export const mockInfluxQueryRequest = (targets?: InfluxQuery[]): DataQueryRequest<InfluxQuery> => {
|
||||
return {
|
||||
app: 'explore',
|
||||
interval: '1m',
|
||||
intervalMs: 60000,
|
||||
range: {
|
||||
@@ -241,8 +246,78 @@ export const mockInfluxDataRequest = (
|
||||
requestId: '',
|
||||
scopedVars: {},
|
||||
startTime: 0,
|
||||
targets: targets,
|
||||
targets: targets ?? mockTargets(),
|
||||
timezone: '',
|
||||
};
|
||||
return Object.assign(defaults, overrides ?? {});
|
||||
};
|
||||
|
||||
export const mockTargets = (): InfluxQuery[] => {
|
||||
return [
|
||||
{
|
||||
refId: 'A',
|
||||
datasource: {
|
||||
type: 'influxdb',
|
||||
uid: 'vA4bkHenk',
|
||||
},
|
||||
policy: 'default',
|
||||
resultFormat: 'time_series',
|
||||
orderByTime: 'ASC',
|
||||
tags: [],
|
||||
groupBy: [
|
||||
{
|
||||
type: 'time',
|
||||
params: ['$__interval'],
|
||||
},
|
||||
{
|
||||
type: 'fill',
|
||||
params: ['null'],
|
||||
},
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
],
|
||||
],
|
||||
measurement: 'cpu',
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
export const mockInfluxQueryWithTemplateVars = (adhocFilters: AdHocVariableFilter[]): InfluxQuery => ({
|
||||
refId: 'x',
|
||||
alias: '$interpolationVar',
|
||||
measurement: '$interpolationVar',
|
||||
policy: '$interpolationVar',
|
||||
limit: '$interpolationVar',
|
||||
slimit: '$interpolationVar',
|
||||
tz: '$interpolationVar',
|
||||
tags: [
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=~',
|
||||
value: '/^$interpolationVar,$interpolationVar2$/',
|
||||
},
|
||||
],
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'field',
|
||||
},
|
||||
],
|
||||
],
|
||||
adhocFilters,
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import queryPart from '../query_part';
|
||||
import queryPart from './query_part';
|
||||
|
||||
describe('InfluxQueryPart', () => {
|
||||
describe('series with measurement only', () => {
|
||||
@@ -7,8 +7,8 @@ import config from 'app/core/config';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
import { getMockDSInstanceSettings, getMockInfluxDS } from './mocks';
|
||||
import ResponseParser, { getSelectedParams } from './response_parser';
|
||||
import { getMockDS, getMockDSInstanceSettings } from './specs/mocks';
|
||||
import { InfluxQuery } from './types';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
@@ -319,7 +319,7 @@ describe('influxdb response parser', () => {
|
||||
|
||||
describe('When issuing annotationQuery', () => {
|
||||
const ctx = {
|
||||
ds: getMockDS(getMockDSInstanceSettings()),
|
||||
ds: getMockInfluxDS(getMockDSInstanceSettings()),
|
||||
};
|
||||
|
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
|
||||
@@ -1,393 +0,0 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { TemplateSrvStub } from 'test/specs/helpers';
|
||||
|
||||
import { ScopedVars } from '@grafana/data/src';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from '../constants';
|
||||
import InfluxDatasource from '../datasource';
|
||||
import { InfluxQuery, InfluxVersion } from '../types';
|
||||
|
||||
//@ts-ignore
|
||||
const templateSrv = new TemplateSrvStub();
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...(jest.requireActual('@grafana/runtime') as unknown as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
}));
|
||||
|
||||
describe('InfluxDataSource', () => {
|
||||
const ctx: any = {
|
||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'GET' } },
|
||||
};
|
||||
|
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
||||
ctx.instanceSettings.access = 'proxy';
|
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||
});
|
||||
|
||||
describe('When issuing metricFindQuery', () => {
|
||||
const query = 'SELECT max(value) FROM measurement WHERE $timeFilter';
|
||||
const queryOptions = {
|
||||
range: {
|
||||
from: '2018-01-01T00:00:00Z',
|
||||
to: '2018-01-02T00:00:00Z',
|
||||
},
|
||||
};
|
||||
let requestQuery: any;
|
||||
let requestMethod: string | undefined;
|
||||
let requestData: any;
|
||||
let response: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
fetchMock.mockImplementation((req) => {
|
||||
requestMethod = req.method;
|
||||
requestQuery = req.params?.q;
|
||||
requestData = req.data;
|
||||
return of({
|
||||
data: {
|
||||
status: 'success',
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
{
|
||||
name: 'measurement',
|
||||
columns: ['name'],
|
||||
values: [['cpu']],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
} as FetchResponse);
|
||||
});
|
||||
|
||||
response = await ctx.ds.metricFindQuery(query, queryOptions);
|
||||
});
|
||||
|
||||
it('should replace $timefilter', () => {
|
||||
expect(requestQuery).toMatch('time >= 1514764800000ms and time <= 1514851200000ms');
|
||||
});
|
||||
|
||||
it('should use the HTTP GET method', () => {
|
||||
expect(requestMethod).toBe('GET');
|
||||
});
|
||||
|
||||
it('should not have any data in request body', () => {
|
||||
expect(requestData).toBeNull();
|
||||
});
|
||||
|
||||
it('parse response correctly', () => {
|
||||
expect(response).toEqual([{ text: 'cpu' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When getting error on 200 after issuing a query', () => {
|
||||
const queryOptions = {
|
||||
range: {
|
||||
from: '2018-01-01T00:00:00Z',
|
||||
to: '2018-01-02T00:00:00Z',
|
||||
},
|
||||
rangeRaw: {
|
||||
from: '2018-01-01T00:00:00Z',
|
||||
to: '2018-01-02T00:00:00Z',
|
||||
},
|
||||
targets: [{}],
|
||||
timezone: 'UTC',
|
||||
scopedVars: {
|
||||
interval: { text: '1m', value: '1m' },
|
||||
__interval: { text: '1m', value: '1m' },
|
||||
__interval_ms: { text: 60000, value: 60000 },
|
||||
},
|
||||
};
|
||||
|
||||
it('throws an error', async () => {
|
||||
fetchMock.mockImplementation(() => {
|
||||
return of({
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
error: 'Query timeout',
|
||||
},
|
||||
],
|
||||
},
|
||||
} as FetchResponse);
|
||||
});
|
||||
|
||||
ctx.ds.retentionPolicies = [''];
|
||||
|
||||
try {
|
||||
await lastValueFrom(ctx.ds.query(queryOptions));
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
expect(err.message).toBe('InfluxDB Error: Query timeout');
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('When getting a request after issuing a query using outdated Browser Mode', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
||||
ctx.instanceSettings.access = 'direct';
|
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||
});
|
||||
|
||||
it('throws an error', async () => {
|
||||
try {
|
||||
await lastValueFrom(ctx.ds.query({}));
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
expect(err.message).toBe(BROWSER_MODE_DISABLED_MESSAGE);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('InfluxDataSource in POST query mode', () => {
|
||||
const ctx: any = {
|
||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'POST' } },
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||
});
|
||||
|
||||
describe('When issuing metricFindQuery', () => {
|
||||
const query = 'SELECT max(value) FROM measurement';
|
||||
const queryOptions = {};
|
||||
let requestMethod: string | undefined;
|
||||
let requestQueryParameter: Record<string, any> | undefined;
|
||||
let queryEncoded: any;
|
||||
let requestQuery: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
fetchMock.mockImplementation((req) => {
|
||||
requestMethod = req.method;
|
||||
requestQueryParameter = req.params;
|
||||
requestQuery = req.data;
|
||||
return of({
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
series: [
|
||||
{
|
||||
name: 'measurement',
|
||||
columns: ['max'],
|
||||
values: [[1]],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
} as FetchResponse);
|
||||
});
|
||||
|
||||
queryEncoded = await ctx.ds.serializeParams({ q: query });
|
||||
await ctx.ds.metricFindQuery(query, queryOptions).then(() => {});
|
||||
});
|
||||
|
||||
it('should have the query form urlencoded', () => {
|
||||
expect(requestQuery).toBe(queryEncoded);
|
||||
});
|
||||
|
||||
it('should use the HTTP POST method', () => {
|
||||
expect(requestMethod).toBe('POST');
|
||||
});
|
||||
|
||||
it('should not have q as a query parameter', () => {
|
||||
expect(requestQueryParameter).not.toHaveProperty('q');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Some functions are required by the parent datasource class to provide functionality
|
||||
// such as ad-hoc filters, which requires the definition of the getTagKeys, and getTagValues
|
||||
describe('Datasource contract', () => {
|
||||
const metricFindQueryMock = jest.fn();
|
||||
beforeEach(() => {
|
||||
ctx.ds.metricFindQuery = metricFindQueryMock;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('has function called getTagKeys', () => {
|
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ctx.ds))).toContain('getTagKeys');
|
||||
});
|
||||
|
||||
it('has function called getTagValues', () => {
|
||||
expect(Object.getOwnPropertyNames(Object.getPrototypeOf(ctx.ds))).toContain('getTagValues');
|
||||
});
|
||||
|
||||
it('should be able to call getTagKeys without specifying any parameter', () => {
|
||||
ctx.ds.getTagKeys();
|
||||
expect(metricFindQueryMock).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should be able to call getTagValues without specifying anything but key', () => {
|
||||
ctx.ds.getTagValues({ key: 'test' });
|
||||
expect(metricFindQueryMock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Variables should be interpolated correctly', () => {
|
||||
const instanceSettings: any = {};
|
||||
const text = 'interpolationText';
|
||||
const text2 = 'interpolationText2';
|
||||
const textWithoutFormatRegex = 'interpolationText,interpolationText2';
|
||||
const textWithFormatRegex = 'interpolationText|interpolationText2';
|
||||
const variableMap: Record<string, string> = {
|
||||
$interpolationVar: text,
|
||||
$interpolationVar2: text2,
|
||||
};
|
||||
const adhocFilters = [
|
||||
{
|
||||
key: 'adhoc',
|
||||
operator: '=',
|
||||
value: 'val',
|
||||
condition: '',
|
||||
},
|
||||
];
|
||||
const templateSrv: any = {
|
||||
getAdhocFilters: jest.fn((name: string) => {
|
||||
return adhocFilters;
|
||||
}),
|
||||
replace: jest.fn((target?: string, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
if (!format) {
|
||||
return variableMap[target!] || '';
|
||||
}
|
||||
if (format === 'regex') {
|
||||
return textWithFormatRegex;
|
||||
}
|
||||
return textWithoutFormatRegex;
|
||||
}),
|
||||
};
|
||||
const ds = new InfluxDatasource(instanceSettings, templateSrv);
|
||||
|
||||
const influxQuery = {
|
||||
refId: 'x',
|
||||
alias: '$interpolationVar',
|
||||
measurement: '$interpolationVar',
|
||||
policy: '$interpolationVar',
|
||||
limit: '$interpolationVar',
|
||||
slimit: '$interpolationVar',
|
||||
tz: '$interpolationVar',
|
||||
tags: [
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=~',
|
||||
value: '/^$interpolationVar,$interpolationVar2$/',
|
||||
},
|
||||
],
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'field',
|
||||
},
|
||||
],
|
||||
],
|
||||
adhocFilters,
|
||||
};
|
||||
|
||||
function influxChecks(query: InfluxQuery) {
|
||||
expect(templateSrv.replace).toBeCalledTimes(10);
|
||||
expect(query.alias).toBe(text);
|
||||
expect(query.measurement).toBe(textWithFormatRegex);
|
||||
expect(query.policy).toBe(textWithFormatRegex);
|
||||
expect(query.limit).toBe(textWithFormatRegex);
|
||||
expect(query.slimit).toBe(textWithFormatRegex);
|
||||
expect(query.tz).toBe(text);
|
||||
expect(query.tags![0].value).toBe(textWithFormatRegex);
|
||||
expect(query.groupBy![0].params![0]).toBe(textWithFormatRegex);
|
||||
expect(query.select![0][0].params![0]).toBe(textWithFormatRegex);
|
||||
expect(query.adhocFilters?.[0].key).toBe(adhocFilters[0].key);
|
||||
}
|
||||
|
||||
describe('when interpolating query variables for dashboard->explore', () => {
|
||||
it('should interpolate all variables with Flux mode', () => {
|
||||
ds.version = InfluxVersion.Flux;
|
||||
const fluxQuery = {
|
||||
refId: 'x',
|
||||
query: '$interpolationVar,$interpolationVar2',
|
||||
};
|
||||
const queries = ds.interpolateVariablesInQueries([fluxQuery], {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: text2, value: text2 },
|
||||
});
|
||||
expect(templateSrv.replace).toBeCalledTimes(1);
|
||||
expect(queries[0].query).toBe(textWithFormatRegex);
|
||||
});
|
||||
|
||||
it('should interpolate all variables with InfluxQL mode', () => {
|
||||
ds.version = InfluxVersion.InfluxQL;
|
||||
const queries = ds.interpolateVariablesInQueries([influxQuery], {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: text2, value: text2 },
|
||||
});
|
||||
influxChecks(queries[0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when interpolating template variables', () => {
|
||||
it('should apply all template variables with Flux mode', () => {
|
||||
ds.version = InfluxVersion.Flux;
|
||||
const fluxQuery = {
|
||||
refId: 'x',
|
||||
query: '$interpolationVar',
|
||||
};
|
||||
const query = ds.applyTemplateVariables(fluxQuery, {
|
||||
interpolationVar: {
|
||||
text: text,
|
||||
value: text,
|
||||
},
|
||||
});
|
||||
expect(templateSrv.replace).toBeCalledTimes(1);
|
||||
expect(query.query).toBe(text);
|
||||
});
|
||||
|
||||
it('should apply all template variables with InfluxQL mode', () => {
|
||||
ds.version = ds.version = InfluxVersion.InfluxQL;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(influxQuery, {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
});
|
||||
influxChecks(query);
|
||||
});
|
||||
|
||||
it('should apply all scopedVars to tags', () => {
|
||||
ds.version = InfluxVersion.InfluxQL;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(influxQuery, {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
});
|
||||
if (!query.tags?.length) {
|
||||
throw new Error('Tags are not defined');
|
||||
}
|
||||
const value = query.tags[0].value;
|
||||
const scopedVars = 'interpolationText|interpolationText2';
|
||||
expect(value).toBe(scopedVars);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user