mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
InfluxDB: Fix applying ad-hoc filters when querying with backend mode (#74973)
* Add adhoc filters to tags when querying with backend mode * betterer * Remove unnecessary parameters
This commit is contained in:
parent
53e5804049
commit
64dd8df441
@ -3710,7 +3710,9 @@ exports[`better eslint`] = {
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/specs/mocks.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"]
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"]
|
||||
],
|
||||
"public/app/plugins/datasource/jaeger/datasource.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
|
@ -8,6 +8,8 @@ import {
|
||||
DataQueryError,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceGetTagKeysOptions,
|
||||
DataSourceGetTagValuesOptions,
|
||||
DataSourceInstanceSettings,
|
||||
dateMath,
|
||||
DateTime,
|
||||
@ -20,8 +22,6 @@ import {
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
TimeSeries,
|
||||
toDataFrame,
|
||||
DataSourceGetTagKeysOptions,
|
||||
DataSourceGetTagValuesOptions,
|
||||
} from '@grafana/data';
|
||||
import {
|
||||
BackendDataSourceResponse,
|
||||
@ -42,7 +42,7 @@ import { buildMetadataQuery } from './influxql_query_builder';
|
||||
import { prepareAnnotation } from './migrations';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
import ResponseParser from './response_parser';
|
||||
import { DEFAULT_POLICY, InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
import { DEFAULT_POLICY, InfluxOptions, InfluxQuery, InfluxQueryTag, InfluxVersion } from './types';
|
||||
|
||||
export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery, InfluxOptions> {
|
||||
type: string;
|
||||
@ -214,6 +214,13 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
|
||||
if (this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||
query = this.applyVariables(query, scopedVars, rest);
|
||||
if (query.adhocFilters?.length) {
|
||||
const adhocFiltersToTags: InfluxQueryTag[] = (query.adhocFilters ?? []).map((af) => {
|
||||
const { condition, ...asTag } = af;
|
||||
return asTag;
|
||||
});
|
||||
query.tags = [...(query.tags ?? []), ...adhocFiltersToTags];
|
||||
}
|
||||
}
|
||||
|
||||
return query;
|
||||
@ -352,8 +359,6 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
type: 'TAG_KEYS',
|
||||
templateService: this.templateSrv,
|
||||
database: this.database,
|
||||
measurement: '',
|
||||
tags: [],
|
||||
});
|
||||
|
||||
return this.metricFindQuery(query);
|
||||
@ -365,8 +370,6 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
templateService: this.templateSrv,
|
||||
database: this.database,
|
||||
withKey: options.key,
|
||||
measurement: '',
|
||||
tags: [],
|
||||
});
|
||||
|
||||
return this.metricFindQuery(query);
|
||||
|
@ -0,0 +1,143 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { DataQueryRequest, dateTime, ScopedVars } from '@grafana/data/src';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
|
||||
import { InfluxQuery } from '../types';
|
||||
|
||||
import {
|
||||
getMockDS,
|
||||
getMockDSInstanceSettings,
|
||||
mockBackendService,
|
||||
mockInfluxFetchResponse,
|
||||
mockTemplateSrv,
|
||||
} from './mocks';
|
||||
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const fetchMock = mockBackendService(mockInfluxFetchResponse());
|
||||
|
||||
describe('InfluxDataSource Backend Mode', () => {
|
||||
const text = 'interpolationText';
|
||||
const text2 = 'interpolationText2';
|
||||
const textWithoutFormatRegex = 'interpolationText,interpolationText2';
|
||||
const textWithFormatRegex = 'interpolationText|interpolationText2';
|
||||
const variableMap: Record<string, string> = {
|
||||
$interpolationVar: text,
|
||||
$interpolationVar2: text2,
|
||||
};
|
||||
const adhocFilters = [
|
||||
{
|
||||
key: 'adhoc',
|
||||
operator: '=',
|
||||
value: 'val',
|
||||
condition: '',
|
||||
},
|
||||
];
|
||||
const templateSrv = mockTemplateSrv(
|
||||
jest.fn(() => {
|
||||
return adhocFilters;
|
||||
}),
|
||||
jest.fn((target?: string, scopedVars?: ScopedVars, format?: string | Function): string => {
|
||||
if (!format) {
|
||||
return variableMap[target!] || '';
|
||||
}
|
||||
if (format === 'regex') {
|
||||
return textWithFormatRegex;
|
||||
}
|
||||
return textWithoutFormatRegex;
|
||||
})
|
||||
);
|
||||
|
||||
let queryOptions: DataQueryRequest<InfluxQuery>;
|
||||
let influxQuery: InfluxQuery;
|
||||
const now = dateTime('2023-09-16T21:26:00Z');
|
||||
|
||||
beforeEach(() => {
|
||||
queryOptions = {
|
||||
app: 'dashboard',
|
||||
interval: '10',
|
||||
intervalMs: 10,
|
||||
requestId: 'A-testing',
|
||||
startTime: 0,
|
||||
range: {
|
||||
from: dateTime(now).subtract(15, 'minutes'),
|
||||
to: now,
|
||||
raw: {
|
||||
from: 'now-15m',
|
||||
to: 'now',
|
||||
},
|
||||
},
|
||||
rangeRaw: {
|
||||
from: 'now-15m',
|
||||
to: 'now',
|
||||
},
|
||||
targets: [],
|
||||
timezone: 'UTC',
|
||||
scopedVars: {
|
||||
interval: { text: '1m', value: '1m' },
|
||||
__interval: { text: '1m', value: '1m' },
|
||||
__interval_ms: { text: 60000, value: 60000 },
|
||||
},
|
||||
};
|
||||
|
||||
influxQuery = {
|
||||
refId: 'x',
|
||||
alias: '$interpolationVar',
|
||||
measurement: '$interpolationVar',
|
||||
policy: '$interpolationVar',
|
||||
limit: '$interpolationVar',
|
||||
slimit: '$interpolationVar',
|
||||
tz: '$interpolationVar',
|
||||
tags: [
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=~',
|
||||
value: '/^$interpolationVar,$interpolationVar2$/',
|
||||
},
|
||||
],
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{
|
||||
params: ['$interpolationVar'],
|
||||
type: 'field',
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
describe('adhoc filters', () => {
|
||||
let fetchReq: { queries: InfluxQuery[] };
|
||||
const ctx = {
|
||||
ds: getMockDS(getMockDSInstanceSettings(), templateSrv),
|
||||
};
|
||||
beforeEach(async () => {
|
||||
fetchMock.mockImplementation((req) => {
|
||||
fetchReq = req.data;
|
||||
return of(mockInfluxFetchResponse() as FetchResponse);
|
||||
});
|
||||
const req = {
|
||||
...queryOptions,
|
||||
targets: [...queryOptions.targets, { ...influxQuery, adhocFilters }],
|
||||
};
|
||||
await ctx.ds.query(req);
|
||||
});
|
||||
|
||||
it('should add adhocFilters to the tags in the query', () => {
|
||||
expect(fetchMock).toHaveBeenCalled();
|
||||
expect(fetchReq).not.toBeNull();
|
||||
expect(fetchReq.queries.length).toBe(1);
|
||||
expect(fetchReq.queries[0].tags).toBeDefined();
|
||||
expect(fetchReq.queries[0].tags?.length).toBe(2);
|
||||
expect(fetchReq.queries[0].tags?.[1].key).toBe(adhocFilters[0].key);
|
||||
expect(fetchReq.queries[0].tags?.[1].value).toBe(adhocFilters[0].value);
|
||||
});
|
||||
});
|
||||
});
|
@ -1,11 +1,25 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { DataSourceInstanceSettings, PluginType } from '@grafana/data/src';
|
||||
import { FetchResponse, getBackendSrv, setBackendSrv } from '@grafana/runtime/src';
|
||||
import {
|
||||
AdHocVariableFilter,
|
||||
DataQueryRequest,
|
||||
DataSourceInstanceSettings,
|
||||
dateTime,
|
||||
FieldType,
|
||||
PluginType,
|
||||
ScopedVars,
|
||||
} from '@grafana/data/src';
|
||||
import {
|
||||
BackendDataSourceResponse,
|
||||
FetchResponse,
|
||||
getBackendSrv,
|
||||
setBackendSrv,
|
||||
VariableInterpolation,
|
||||
} from '@grafana/runtime/src';
|
||||
|
||||
import { TemplateSrv } from '../../../../features/templating/template_srv';
|
||||
import InfluxDatasource from '../datasource';
|
||||
import { InfluxOptions, InfluxVersion } from '../types';
|
||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from '../types';
|
||||
|
||||
const getAdhocFiltersMock = jest.fn().mockImplementation(() => []);
|
||||
const replaceMock = jest.fn().mockImplementation((a: string, ...rest: unknown[]) => a);
|
||||
@ -15,6 +29,21 @@ export const templateSrvStub = {
|
||||
replace: replaceMock,
|
||||
} as unknown as TemplateSrv;
|
||||
|
||||
export function mockTemplateSrv(
|
||||
getAdhocFiltersMock: (datasourceName: string) => AdHocVariableFilter[],
|
||||
replaceMock: (
|
||||
target?: string,
|
||||
scopedVars?: ScopedVars,
|
||||
format?: string | Function | undefined,
|
||||
interpolations?: VariableInterpolation[]
|
||||
) => string
|
||||
): TemplateSrv {
|
||||
return {
|
||||
getAdhocFilters: getAdhocFiltersMock,
|
||||
replace: replaceMock,
|
||||
} as unknown as TemplateSrv;
|
||||
}
|
||||
|
||||
export function mockBackendService(response: FetchResponse) {
|
||||
const fetchMock = jest.fn().mockReturnValue(of(response));
|
||||
const origBackendSrv = getBackendSrv();
|
||||
@ -22,10 +51,14 @@ export function mockBackendService(response: FetchResponse) {
|
||||
...origBackendSrv,
|
||||
fetch: fetchMock,
|
||||
});
|
||||
return fetchMock;
|
||||
}
|
||||
|
||||
export function getMockDS(instanceSettings: DataSourceInstanceSettings<InfluxOptions>): InfluxDatasource {
|
||||
return new InfluxDatasource(instanceSettings, templateSrvStub);
|
||||
export function getMockDS(
|
||||
instanceSettings: DataSourceInstanceSettings<InfluxOptions>,
|
||||
templateSrv: TemplateSrv = templateSrvStub
|
||||
): InfluxDatasource {
|
||||
return new InfluxDatasource(instanceSettings, templateSrv);
|
||||
}
|
||||
|
||||
export function getMockDSInstanceSettings(): DataSourceInstanceSettings<InfluxOptions> {
|
||||
@ -61,3 +94,155 @@ export function getMockDSInstanceSettings(): DataSourceInstanceSettings<InfluxOp
|
||||
jsonData: { version: InfluxVersion.InfluxQL, httpMode: 'POST', dbName: 'site' },
|
||||
};
|
||||
}
|
||||
|
||||
export const mockInfluxFetchResponse = (
|
||||
overrides?: Partial<FetchResponse<BackendDataSourceResponse>>
|
||||
): FetchResponse<BackendDataSourceResponse> => {
|
||||
return {
|
||||
config: {
|
||||
url: 'mock-response-url',
|
||||
},
|
||||
headers: new Headers(),
|
||||
ok: false,
|
||||
redirected: false,
|
||||
status: 0,
|
||||
statusText: '',
|
||||
type: 'basic',
|
||||
url: '',
|
||||
data: {
|
||||
results: {
|
||||
A: {
|
||||
status: 200,
|
||||
frames: mockInfluxTSDBQueryResponse,
|
||||
},
|
||||
metadataQuery: {
|
||||
status: 200,
|
||||
frames: mockInfluxRetentionPolicyResponse,
|
||||
},
|
||||
},
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
};
|
||||
export const mockInfluxTSDBQueryResponse = [
|
||||
{
|
||||
schema: {
|
||||
name: 'logs.host',
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: FieldType.string,
|
||||
},
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[1645208701000, 1645208702000],
|
||||
['cbfa07e0e3bb 1', 'cbfa07e0e3bb 2'],
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
schema: {
|
||||
name: 'logs.message',
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: FieldType.string,
|
||||
},
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[1645208701000, 1645208702000],
|
||||
['Station softwareupdated[447]: Adding client 1', 'Station softwareupdated[447]: Adding client 2'],
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
schema: {
|
||||
name: 'logs.path',
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: FieldType.string,
|
||||
},
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[1645208701000, 1645208702000],
|
||||
['/var/log/host/install.log 1', '/var/log/host/install.log 2'],
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
schema: {
|
||||
name: 'textColumn',
|
||||
fields: [
|
||||
{
|
||||
name: 'time',
|
||||
type: FieldType.time,
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: FieldType.string,
|
||||
},
|
||||
],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[1645208701000, 1645208702000],
|
||||
['text 1', 'text 2'],
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
export const mockInfluxRetentionPolicyResponse = [
|
||||
{
|
||||
schema: {
|
||||
refId: 'metadataQuery',
|
||||
fields: [{ name: 'value', type: FieldType.string, typeInfo: { frame: 'string' } }],
|
||||
},
|
||||
data: { values: [['autogen', 'bar', '5m_avg', '1m_avg', 'default']] },
|
||||
},
|
||||
];
|
||||
|
||||
export const mockInfluxDataRequest = (
|
||||
targets: InfluxQuery[],
|
||||
overrides?: Partial<DataQueryRequest>
|
||||
): Partial<DataQueryRequest<InfluxQuery>> => {
|
||||
const defaults: DataQueryRequest<InfluxQuery> = {
|
||||
app: 'createDataRequest',
|
||||
interval: '1m',
|
||||
intervalMs: 60000,
|
||||
range: {
|
||||
from: dateTime(0),
|
||||
to: dateTime(10),
|
||||
raw: { from: dateTime(0), to: dateTime(10) },
|
||||
},
|
||||
rangeRaw: {
|
||||
from: dateTime(0),
|
||||
to: dateTime(10),
|
||||
},
|
||||
requestId: '',
|
||||
scopedVars: {},
|
||||
startTime: 0,
|
||||
targets: targets,
|
||||
timezone: '',
|
||||
};
|
||||
return Object.assign(defaults, overrides ?? {});
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user