mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
influxdb: more robust query-has-variables check (#37493)
* influxdb: moved queryUtils file to better place * influxdb: more robust query-has-variables check
This commit is contained in:
@@ -7,7 +7,7 @@ import { FluxQueryEditor } from './FluxQueryEditor';
|
||||
import { RawInfluxQLEditor } from './RawInfluxQLEditor';
|
||||
import { Editor as VisualInfluxQLEditor } from './VisualInfluxQLEditor/Editor';
|
||||
import { QueryEditorModeSwitcher } from './QueryEditorModeSwitcher';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
import { buildRawQuery } from '../queryUtils';
|
||||
|
||||
type Props = QueryEditorProps<InfluxDatasource, InfluxQuery, InfluxOptions>;
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
removeGroupByPart,
|
||||
changeSelectPart,
|
||||
changeGroupByPart,
|
||||
} from '../queryUtils';
|
||||
} from '../../queryUtils';
|
||||
import { FormatAsSection } from './FormatAsSection';
|
||||
import { SectionLabel } from './SectionLabel';
|
||||
import { SectionFill } from './SectionFill';
|
||||
|
||||
@@ -1,422 +0,0 @@
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { InfluxQuery } from '../types';
|
||||
import { buildRawQuery, normalizeQuery, changeSelectPart, changeGroupByPart } from './queryUtils';
|
||||
|
||||
describe('InfluxDB query utils', () => {
|
||||
describe('buildRawQuery', () => {
|
||||
it('should handle default query', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
refId: 'A',
|
||||
hide: false,
|
||||
policy: 'default',
|
||||
resultFormat: 'time_series',
|
||||
orderByTime: 'ASC',
|
||||
tags: [],
|
||||
groupBy: [
|
||||
{
|
||||
type: 'time',
|
||||
params: ['$__interval'],
|
||||
},
|
||||
{
|
||||
type: 'fill',
|
||||
params: ['null'],
|
||||
},
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
],
|
||||
],
|
||||
})
|
||||
).toBe('SELECT mean("value") FROM "measurement" WHERE $timeFilter GROUP BY time($__interval) fill(null)');
|
||||
});
|
||||
it('should handle small query', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
],
|
||||
],
|
||||
groupBy: [],
|
||||
})
|
||||
).toBe('SELECT "value" FROM "measurement" WHERE $timeFilter');
|
||||
});
|
||||
it('should handle string limit/slimit', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
],
|
||||
],
|
||||
groupBy: [],
|
||||
limit: '12',
|
||||
slimit: '23',
|
||||
})
|
||||
).toBe('SELECT "value" FROM "measurement" WHERE $timeFilter LIMIT 12 SLIMIT 23');
|
||||
});
|
||||
it('should handle number limit/slimit', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
],
|
||||
],
|
||||
groupBy: [],
|
||||
limit: 12,
|
||||
slimit: 23,
|
||||
})
|
||||
).toBe('SELECT "value" FROM "measurement" WHERE $timeFilter LIMIT 12 SLIMIT 23');
|
||||
});
|
||||
it('should handle all the tag-operators', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['value'],
|
||||
},
|
||||
],
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=',
|
||||
value: 'cpu0',
|
||||
},
|
||||
{
|
||||
condition: 'AND',
|
||||
key: 'cpu',
|
||||
operator: '!=',
|
||||
value: 'cpu0',
|
||||
},
|
||||
{
|
||||
condition: 'AND',
|
||||
key: 'cpu',
|
||||
operator: '<>',
|
||||
value: 'cpu0',
|
||||
},
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '<',
|
||||
value: 'cpu0',
|
||||
},
|
||||
{
|
||||
condition: 'AND',
|
||||
key: 'cpu',
|
||||
operator: '>',
|
||||
value: 'cpu0',
|
||||
},
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=~',
|
||||
value: '/cpu0/',
|
||||
},
|
||||
{
|
||||
condition: 'AND',
|
||||
key: 'cpu',
|
||||
operator: '!~',
|
||||
value: '/cpu0/',
|
||||
},
|
||||
],
|
||||
groupBy: [],
|
||||
})
|
||||
).toBe(
|
||||
`SELECT "value" FROM "measurement" WHERE ("cpu" = 'cpu0' AND "cpu" != 'cpu0' AND "cpu" <> 'cpu0' AND "cpu" < cpu0 AND "cpu" > cpu0 AND "cpu" =~ /cpu0/ AND "cpu" !~ /cpu0/) AND $timeFilter`
|
||||
);
|
||||
});
|
||||
it('should handle a complex query', () => {
|
||||
expect(
|
||||
buildRawQuery({
|
||||
alias: '',
|
||||
groupBy: [
|
||||
{
|
||||
params: ['$__interval'],
|
||||
type: 'time',
|
||||
},
|
||||
{
|
||||
params: ['cpu'],
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
params: ['host'],
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
params: ['none'],
|
||||
type: 'fill',
|
||||
},
|
||||
],
|
||||
hide: false,
|
||||
measurement: 'cpu',
|
||||
orderByTime: 'DESC',
|
||||
policy: 'default',
|
||||
rawQuery: false,
|
||||
refId: 'A',
|
||||
resultFormat: 'time_series',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_idle'],
|
||||
},
|
||||
{
|
||||
type: 'mean',
|
||||
params: [],
|
||||
},
|
||||
{
|
||||
type: 'holt_winters_with_fit',
|
||||
params: ['30', '5'],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_guest'],
|
||||
},
|
||||
{
|
||||
type: 'median',
|
||||
params: [],
|
||||
},
|
||||
],
|
||||
],
|
||||
tags: [
|
||||
{
|
||||
key: 'cpu',
|
||||
operator: '=',
|
||||
value: 'cpu2',
|
||||
},
|
||||
{
|
||||
condition: 'OR',
|
||||
key: 'cpu',
|
||||
operator: '=',
|
||||
value: 'cpu3',
|
||||
},
|
||||
{
|
||||
condition: 'AND',
|
||||
key: 'cpu',
|
||||
operator: '=',
|
||||
value: 'cpu1',
|
||||
},
|
||||
],
|
||||
limit: '12',
|
||||
slimit: '23',
|
||||
tz: 'UTC',
|
||||
})
|
||||
).toBe(
|
||||
`SELECT holt_winters_with_fit(mean("usage_idle"), 30, 5), median("usage_guest") FROM "cpu" WHERE ("cpu" = 'cpu2' OR "cpu" = 'cpu3' AND "cpu" = 'cpu1') AND $timeFilter GROUP BY time($__interval), "cpu", "host" fill(none) ORDER BY time DESC LIMIT 12 SLIMIT 23 tz('UTC')`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeQuery', () => {
|
||||
it('should handle minimal query', () => {
|
||||
const query: InfluxQuery = {
|
||||
refId: 'A',
|
||||
};
|
||||
|
||||
const queryClone = cloneDeep(query);
|
||||
|
||||
expect(normalizeQuery(query)).toStrictEqual({
|
||||
refId: 'A',
|
||||
policy: 'default',
|
||||
resultFormat: 'time_series',
|
||||
orderByTime: 'ASC',
|
||||
tags: [],
|
||||
groupBy: [
|
||||
{ type: 'time', params: ['$__interval'] },
|
||||
{ type: 'fill', params: ['null'] },
|
||||
],
|
||||
select: [
|
||||
[
|
||||
{ type: 'field', params: ['value'] },
|
||||
{ type: 'mean', params: [] },
|
||||
],
|
||||
],
|
||||
});
|
||||
|
||||
// make sure the call did not mutate the input
|
||||
expect(query).toStrictEqual(queryClone);
|
||||
});
|
||||
|
||||
it('should not change values if they already exist', () => {
|
||||
const query: InfluxQuery = {
|
||||
refId: 'A',
|
||||
groupBy: [],
|
||||
measurement: 'cpu',
|
||||
orderByTime: 'ASC',
|
||||
policy: 'default',
|
||||
resultFormat: 'table',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_idle'],
|
||||
},
|
||||
],
|
||||
],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const queryClone = cloneDeep(query);
|
||||
|
||||
const result = normalizeQuery(query);
|
||||
|
||||
// i will check two things:
|
||||
// 1. that the function-call does not mutate the input
|
||||
expect(query).toStrictEqual(queryClone);
|
||||
|
||||
// 2. that the returned object is the same object as the object i gave it.
|
||||
// (not just the same structure, literally the same object)
|
||||
expect(result === query).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('changeSelectPart', () => {
|
||||
it('should handle a normal situation', () => {
|
||||
const query: InfluxQuery = {
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_idle'],
|
||||
},
|
||||
{
|
||||
type: 'math',
|
||||
params: [' / 5'],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['test42'],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_guest'],
|
||||
},
|
||||
{
|
||||
type: 'math',
|
||||
params: ['*4'],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['test43'],
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
const queryClone = cloneDeep(query);
|
||||
const result = changeSelectPart(query, 1, 2, ['test55']);
|
||||
|
||||
// make sure the input did not get mutated
|
||||
expect(query).toStrictEqual(queryClone);
|
||||
|
||||
expect(result).toStrictEqual({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_idle'],
|
||||
},
|
||||
{
|
||||
type: 'math',
|
||||
params: [' / 5'],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['test42'],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
type: 'field',
|
||||
params: ['usage_guest'],
|
||||
},
|
||||
{
|
||||
type: 'math',
|
||||
params: ['*4'],
|
||||
},
|
||||
{
|
||||
type: 'alias',
|
||||
params: ['test55'],
|
||||
},
|
||||
],
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('changeGroupByPart', () => {
|
||||
it('should handle a normal situation', () => {
|
||||
const query: InfluxQuery = {
|
||||
refId: 'A',
|
||||
groupBy: [
|
||||
{
|
||||
type: 'time',
|
||||
params: ['$__interval'],
|
||||
},
|
||||
{
|
||||
type: 'tag',
|
||||
params: ['host'],
|
||||
},
|
||||
{
|
||||
type: 'fill',
|
||||
params: ['none'],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const queryClone = cloneDeep(query);
|
||||
const result = changeGroupByPart(query, 1, ['cpu']);
|
||||
|
||||
// make sure the input did not get mutated
|
||||
expect(query).toStrictEqual(queryClone);
|
||||
|
||||
expect(result).toStrictEqual({
|
||||
refId: 'A',
|
||||
groupBy: [
|
||||
{
|
||||
type: 'time',
|
||||
params: ['$__interval'],
|
||||
},
|
||||
{
|
||||
type: 'tag',
|
||||
params: ['cpu'],
|
||||
},
|
||||
{
|
||||
type: 'fill',
|
||||
params: ['none'],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,91 +0,0 @@
|
||||
import { cloneDeep } from 'lodash';
|
||||
import InfluxQueryModel from '../influx_query_model';
|
||||
import { InfluxQuery } from '../types';
|
||||
|
||||
// FIXME: these functions are a beginning of a refactoring of influx_query_model.ts
|
||||
// into a simpler approach with full typescript types.
|
||||
// later we should be able to migrate the unit-tests
|
||||
// that relate to these functions here, and then perhaps even move the implementation
|
||||
// to this place
|
||||
|
||||
export function buildRawQuery(query: InfluxQuery): string {
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
const model = new InfluxQueryModel(queryCopy);
|
||||
return model.render(false);
|
||||
}
|
||||
|
||||
export function normalizeQuery(query: InfluxQuery): InfluxQuery {
|
||||
// we return the original query if there is no need to update it
|
||||
if (
|
||||
query.policy !== undefined &&
|
||||
query.resultFormat !== undefined &&
|
||||
query.orderByTime !== undefined &&
|
||||
query.tags !== undefined &&
|
||||
query.groupBy !== undefined &&
|
||||
query.select !== undefined
|
||||
) {
|
||||
return query;
|
||||
}
|
||||
|
||||
// FIXME: we should move the whole normalizeQuery logic here,
|
||||
// and then have influxQueryModel call this function,
|
||||
// to concentrate the whole logic here
|
||||
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
return new InfluxQueryModel(queryCopy).target;
|
||||
}
|
||||
|
||||
export function addNewSelectPart(query: InfluxQuery, type: string, index: number): InfluxQuery {
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
const model = new InfluxQueryModel(queryCopy);
|
||||
model.addSelectPart(model.selectModels[index], type);
|
||||
return model.target;
|
||||
}
|
||||
|
||||
export function removeSelectPart(query: InfluxQuery, partIndex: number, index: number): InfluxQuery {
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
const model = new InfluxQueryModel(queryCopy);
|
||||
const selectModel = model.selectModels[index];
|
||||
model.removeSelectPart(selectModel, selectModel[partIndex]);
|
||||
return model.target;
|
||||
}
|
||||
|
||||
export function changeSelectPart(
|
||||
query: InfluxQuery,
|
||||
listIndex: number,
|
||||
partIndex: number,
|
||||
newParams: string[]
|
||||
): InfluxQuery {
|
||||
// we need to make shallow copy of `query.select` down to `query.select[listIndex][partIndex]`
|
||||
const newSel = [...(query.select ?? [])];
|
||||
newSel[listIndex] = [...newSel[listIndex]];
|
||||
newSel[listIndex][partIndex] = {
|
||||
...newSel[listIndex][partIndex],
|
||||
params: newParams,
|
||||
};
|
||||
return { ...query, select: newSel };
|
||||
}
|
||||
|
||||
export function addNewGroupByPart(query: InfluxQuery, type: string): InfluxQuery {
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
const model = new InfluxQueryModel(queryCopy);
|
||||
model.addGroupBy(type);
|
||||
return model.target;
|
||||
}
|
||||
|
||||
export function removeGroupByPart(query: InfluxQuery, partIndex: number): InfluxQuery {
|
||||
const queryCopy = cloneDeep(query); // the query-model mutates the query
|
||||
const model = new InfluxQueryModel(queryCopy);
|
||||
model.removeGroupByPart(model.groupByParts[partIndex], partIndex);
|
||||
return model.target;
|
||||
}
|
||||
|
||||
export function changeGroupByPart(query: InfluxQuery, partIndex: number, newParams: string[]): InfluxQuery {
|
||||
// we need to make shallow copy of `query.groupBy` down to `query.groupBy[partIndex]`
|
||||
const newGroupBy = [...(query.groupBy ?? [])];
|
||||
newGroupBy[partIndex] = {
|
||||
...newGroupBy[partIndex],
|
||||
params: newParams,
|
||||
};
|
||||
return { ...query, groupBy: newGroupBy };
|
||||
}
|
||||
Reference in New Issue
Block a user