mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
InfluxDB: backend migration (run query in explore) (#43352)
* InfluxDB backend migration * Multiple queries and more * Added types * Updated preferredVisualisationType * Updated model parser test to include limit,slimit,orderByTime * Added test for building query with limit, slimit * Added test for building query with limit, slimit, orderByTime and puts them in the correct order * Add test: Influxdb response parser should parse two responses with different refIDs * Moved methods to responds parser * Add test to ensure ExecutedQueryString is populated * Move functions out of response parser class * Test for getSelectedParams * Merge cases * Change to const * Test get table columns correctly * Removed unecessary fields * Test get table rows correctly * Removed getSeries function * Added test for preferredVisualisationType * Added test for executedQueryString * Modified response parser * Removed test * Improvements * Tests * Review changes * Feature flag rename and code gen
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
import { cloneDeep, extend, get, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { cloneDeep, extend, get, groupBy, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
@@ -22,8 +22,8 @@ import {
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
TimeSeries,
|
||||
CoreApp,
|
||||
} from '@grafana/data';
|
||||
|
||||
import InfluxSeries from './influx_series';
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
import ResponseParser from './response_parser';
|
||||
@@ -32,6 +32,7 @@ import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
import config from 'app/core/config';
|
||||
|
||||
// we detect the field type based on the value-array
|
||||
function getFieldType(values: unknown[]): FieldType {
|
||||
@@ -113,6 +114,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
database: any;
|
||||
basicAuth: any;
|
||||
withCredentials: any;
|
||||
access: 'direct' | 'proxy';
|
||||
interval: any;
|
||||
responseParser: any;
|
||||
httpMode: string;
|
||||
@@ -135,6 +137,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
this.database = instanceSettings.database;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.access = instanceSettings.access;
|
||||
const settingsData = instanceSettings.jsonData || ({} as InfluxOptions);
|
||||
this.interval = settingsData.timeInterval;
|
||||
this.httpMode = settingsData.httpMode || 'GET';
|
||||
@@ -150,17 +153,58 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
}
|
||||
|
||||
query(request: DataQueryRequest<InfluxQuery>): Observable<DataQueryResponse> {
|
||||
// for not-flux queries we call `this.classicQuery`, and that
|
||||
// handles the is-hidden situation.
|
||||
// for the flux-case, we do the filtering here
|
||||
const filteredRequest = {
|
||||
...request,
|
||||
targets: request.targets.filter((t) => t.hide !== true),
|
||||
};
|
||||
|
||||
if (this.isFlux) {
|
||||
// for not-flux queries we call `this.classicQuery`, and that
|
||||
// handles the is-hidden situation.
|
||||
// for the flux-case, we do the filtering here
|
||||
const filteredRequest = {
|
||||
...request,
|
||||
targets: request.targets.filter((t) => t.hide !== true),
|
||||
};
|
||||
return super.query(filteredRequest);
|
||||
}
|
||||
|
||||
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy' && request.app === CoreApp.Explore) {
|
||||
return super.query(filteredRequest).pipe(
|
||||
map((res) => {
|
||||
if (res.error) {
|
||||
throw {
|
||||
message: 'InfluxDB Error: ' + res.error.message,
|
||||
res,
|
||||
};
|
||||
}
|
||||
|
||||
const seriesList: any[] = [];
|
||||
|
||||
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
||||
if (Object.keys(groupedFrames).length > 0) {
|
||||
filteredRequest.targets.forEach((target) => {
|
||||
const filteredFrames = groupedFrames[target.refId] ?? [];
|
||||
switch (target.resultFormat) {
|
||||
case 'logs':
|
||||
case 'table':
|
||||
seriesList.push(
|
||||
this.responseParser.getTable(filteredFrames, target, {
|
||||
preferredVisualisationType: target.resultFormat,
|
||||
})
|
||||
);
|
||||
break;
|
||||
default: {
|
||||
for (let i = 0; i < filteredFrames.length; i++) {
|
||||
seriesList.push(filteredFrames[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Fallback to classic query support
|
||||
return this.classicQuery(request);
|
||||
}
|
||||
@@ -185,7 +229,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
applyTemplateVariables(query: InfluxQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||
// this only works in flux-mode, it should not be called in non-flux-mode
|
||||
if (!this.isFlux) {
|
||||
throw new Error('applyTemplateVariables called in influxql-mode. this should never happen');
|
||||
return query;
|
||||
}
|
||||
|
||||
// We want to interpolate these variables on backend
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { each, isArray } from 'lodash';
|
||||
import { DataFrame, FieldType, QueryResultMeta } from '@grafana/data';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { each, groupBy, isArray } from 'lodash';
|
||||
import { InfluxQuery } from './types';
|
||||
|
||||
export default class ResponseParser {
|
||||
parse(query: string, results: { results: any }) {
|
||||
@@ -50,6 +53,106 @@ export default class ResponseParser {
|
||||
// order is insertion-order, so this should be ok.
|
||||
return Array.from(res).map((v) => ({ text: v }));
|
||||
}
|
||||
|
||||
getTable(dfs: DataFrame[], target: InfluxQuery, meta: QueryResultMeta): TableModel {
|
||||
let table = new TableModel();
|
||||
|
||||
if (dfs.length > 0) {
|
||||
table.meta = {
|
||||
...meta,
|
||||
executedQueryString: dfs[0].meta?.executedQueryString,
|
||||
};
|
||||
|
||||
table.refId = target.refId;
|
||||
table = getTableCols(dfs, table, target);
|
||||
|
||||
// if group by tag(s) added
|
||||
if (dfs[0].fields[1].labels) {
|
||||
let dfsByLabels: any = groupBy(dfs, (df: DataFrame) =>
|
||||
df.fields[1].labels ? Object.values(df.fields[1].labels!) : null
|
||||
);
|
||||
const labels = Object.keys(dfsByLabels);
|
||||
dfsByLabels = Object.values(dfsByLabels);
|
||||
|
||||
for (let i = 0; i < dfsByLabels.length; i++) {
|
||||
table = getTableRows(dfsByLabels[i], table, [...labels[i].split(',')]);
|
||||
}
|
||||
} else {
|
||||
table = getTableRows(dfs, table, []);
|
||||
}
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
function getTableCols(dfs: DataFrame[], table: TableModel, target: InfluxQuery): TableModel {
|
||||
const selectedParams = getSelectedParams(target);
|
||||
|
||||
dfs[0].fields.forEach((field) => {
|
||||
// Time col
|
||||
if (field.name === 'time') {
|
||||
table.columns.push({ text: 'Time', type: FieldType.time });
|
||||
}
|
||||
|
||||
// Group by (label) column(s)
|
||||
else if (field.name === 'value') {
|
||||
if (field.labels) {
|
||||
Object.keys(field.labels).forEach((key) => {
|
||||
table.columns.push({ text: key });
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Select (metric) column(s)
|
||||
for (let i = 0; i < selectedParams.length; i++) {
|
||||
table.columns.push({ text: selectedParams[i] });
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
function getTableRows(dfs: DataFrame[], table: TableModel, labels: string[]): TableModel {
|
||||
const values = dfs[0].fields[0].values.toArray();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const time = values[i];
|
||||
const metrics = dfs.map((df: DataFrame) => {
|
||||
return df.fields[1].values.toArray()[i];
|
||||
});
|
||||
table.rows.push([time, ...labels, ...metrics]);
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
export function getSelectedParams(target: InfluxQuery): string[] {
|
||||
let allParams: string[] = [];
|
||||
target.select?.forEach((select) => {
|
||||
const selector = select.filter((x) => x.type !== 'field');
|
||||
if (selector.length > 0) {
|
||||
allParams.push(selector[0].type);
|
||||
} else {
|
||||
if (select[0] && select[0].params && select[0].params[0]) {
|
||||
allParams.push(select[0].params[0].toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let uniqueParams: string[] = [];
|
||||
allParams.forEach((param) => {
|
||||
uniqueParams.push(incrementName(param, param, uniqueParams, 0));
|
||||
});
|
||||
|
||||
return uniqueParams;
|
||||
}
|
||||
|
||||
function incrementName(name: string, nameIncremenet: string, params: string[], index: number): string {
|
||||
if (params.indexOf(nameIncremenet) > -1) {
|
||||
index++;
|
||||
return incrementName(name, name + '_' + index, params, index);
|
||||
}
|
||||
return nameIncremenet;
|
||||
}
|
||||
|
||||
function addUnique(s: Set<string>, value: string | number) {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { size } from 'lodash';
|
||||
import ResponseParser from '../response_parser';
|
||||
import ResponseParser, { getSelectedParams } from '../response_parser';
|
||||
import InfluxQueryModel from '../influx_query_model';
|
||||
import { FieldType, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
describe('influxdb response parser', () => {
|
||||
const parser = new ResponseParser();
|
||||
@@ -202,4 +204,82 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should name the selected params correctly', () => {
|
||||
it('when there are no duplicates', () => {
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [[{ type: 'field', params: ['usage_iowait'] }], [{ type: 'field', params: ['usage_idle'] }]],
|
||||
});
|
||||
|
||||
const selectedParams = getSelectedParams(query.target);
|
||||
|
||||
expect(selectedParams.length).toBe(2);
|
||||
expect(selectedParams[0]).toBe('usage_iowait');
|
||||
expect(selectedParams[1]).toBe('usage_idle');
|
||||
});
|
||||
|
||||
it('when there are duplicates', () => {
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_idle'] }],
|
||||
],
|
||||
});
|
||||
|
||||
const selectedParams = getSelectedParams(query.target);
|
||||
|
||||
expect(selectedParams.length).toBe(4);
|
||||
expect(selectedParams[0]).toBe('usage_iowait');
|
||||
expect(selectedParams[1]).toBe('usage_iowait_1');
|
||||
expect(selectedParams[2]).toBe('usage_iowait_2');
|
||||
expect(selectedParams[3]).toBe('usage_idle');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should get the table', () => {
|
||||
const dataFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1640257340000] },
|
||||
{ name: 'value', type: FieldType.number, values: [3234232323] },
|
||||
],
|
||||
meta: {
|
||||
executedQueryString: 'SELECT everything!',
|
||||
},
|
||||
});
|
||||
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [[{ type: 'field', params: ['usage_iowait'] }], [{ type: 'field', params: ['usage_idle'] }]],
|
||||
});
|
||||
|
||||
const table = parser.getTable([dataFrame], query.target, {
|
||||
preferredVisualisationType: 'table',
|
||||
});
|
||||
|
||||
it('columns correctly', () => {
|
||||
expect(table.columns.length).toBe(3);
|
||||
expect(table.columns[0].text).toBe('Time');
|
||||
expect(table.columns[1].text).toBe('usage_iowait');
|
||||
expect(table.columns[2].text).toBe('usage_idle');
|
||||
});
|
||||
|
||||
it('rows correctly', () => {
|
||||
expect(table.rows.length).toBe(1);
|
||||
expect(table.rows[0].length).toBe(2);
|
||||
expect(table.rows[0][0]).toBe(1640257340000);
|
||||
expect(table.rows[0][1]).toBe(3234232323);
|
||||
});
|
||||
|
||||
it('preferredVisualisationType correctly', () => {
|
||||
expect(table.meta?.preferredVisualisationType).toBe('table');
|
||||
});
|
||||
|
||||
it('executedQueryString correctly', () => {
|
||||
expect(table.meta?.executedQueryString).toBe('SELECT everything!');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user