InfluxDB: Backend parser compatibility with frontend parser (#69865)

* Reformatting and restructuring

* Update unit test

* Always send the default retention policy as first element

* Fix typo

* Update test

* Update test once more

* Field names start with capital letters

* Simplify the condition

* Case-insensitive checks

* Fix typo

* Update response_parser test

* Update imports
This commit is contained in:
ismail simsek 2023-06-26 15:03:02 +03:00 committed by GitHub
parent e50cf55649
commit 903af7e29c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 66 additions and 72 deletions

View File

@ -4186,9 +4186,6 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
[0, 0, 0, "Do not use any type assertions.", "3"]
],
"public/app/plugins/datasource/influxdb/specs/response_parser.test.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"public/app/plugins/datasource/jaeger/datasource.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],

View File

@ -122,7 +122,7 @@ func transformRows(rows []Row, query Query) data.Frames {
}
}
field := data.NewField("value", nil, values)
field := data.NewField("Value", nil, values)
frames = append(frames, data.NewFrame(row.Name, field))
} else {
for colIndex, column := range row.Columns {
@ -169,21 +169,21 @@ func transformRows(rows []Row, query Query) data.Frames {
name := string(formatFrameName(row, column, query, frameName[:]))
timeField := data.NewField("time", nil, timeArray)
timeField := data.NewField("Time", nil, timeArray)
if valType == "string" {
valueField := data.NewField("value", row.Tags, stringArray)
valueField := data.NewField("Value", row.Tags, stringArray)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
} else if valType == "json.Number" {
valueField := data.NewField("value", row.Tags, floatArray)
valueField := data.NewField("Value", row.Tags, floatArray)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
} else if valType == "bool" {
valueField := data.NewField("value", row.Tags, boolArray)
valueField := data.NewField("Value", row.Tags, boolArray)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
} else if valType == "null" {
valueField := data.NewField("value", row.Tags, floatArray)
valueField := data.NewField("Value", row.Tags, floatArray)
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
}

View File

@ -70,12 +70,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
labels, err := data.LabelsFromString("datacenter=America")
require.Nil(t, err)
floatField := data.NewField("value", labels, []*float64{
floatField := data.NewField("Value", labels, []*float64{
util.Pointer(222.0), util.Pointer(222.0), nil,
})
floatField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean { datacenter: America }"}
floatFrame := data.NewFrame("cpu.mean { datacenter: America }",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
@ -86,12 +86,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
floatFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
string_test := "/usr/path"
stringField := data.NewField("value", labels, []*string{
stringField := data.NewField("Value", labels, []*string{
nil, &string_test, &string_test,
})
stringField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.path { datacenter: America }"}
stringFrame := data.NewFrame("cpu.path { datacenter: America }",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
@ -103,12 +103,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
bool_true := true
bool_false := false
boolField := data.NewField("value", labels, []*bool{
boolField := data.NewField("Value", labels, []*bool{
nil, &bool_false, &bool_true,
})
boolField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.isActive { datacenter: America }"}
boolFrame := data.NewFrame("cpu.isActive { datacenter: America }",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
@ -157,7 +157,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
var queries []Query
queries = append(queries, Query{RefID: "metricFindQuery"})
newField := data.NewField("value", nil, []string{
newField := data.NewField("Value", nil, []string{
"cpu", "disk", "logs",
})
testFrame := data.NewFrame("cpu",
@ -196,7 +196,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
var queries []Query
queries = append(queries, Query{RawQuery: "SHOW TAG VALUES", RefID: "metricFindQuery"})
newField := data.NewField("value", nil, []string{
newField := data.NewField("Value", nil, []string{
"cpu-total", "cpu0", "cpu1",
})
testFrame := data.NewFrame("cpu",
@ -296,12 +296,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
query := &Query{}
newField := data.NewField("value", nil, []*float64{
newField := data.NewField("Value", nil, []*float64{
util.Pointer(50.0), nil, util.Pointer(52.0),
})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean"}
testFrame := data.NewFrame("cpu.mean",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 100000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 101000000, time.UTC),
@ -345,12 +345,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
query := &Query{}
newField := data.NewField("value", nil, []*float64{
newField := data.NewField("Value", nil, []*float64{
util.Pointer(50.0), util.Pointer(52.0),
})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean"}
testFrame := data.NewFrame("cpu.mean",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 100000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 102000000, time.UTC),
@ -398,12 +398,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
query := &Query{Alias: "series alias"}
labels, err := data.LabelsFromString("/cluster/name/=Cluster/, @cluster@name@=Cluster@, cluster-name=Cluster, datacenter=America, dc.region.name=Northeast")
require.Nil(t, err)
newField := data.NewField("value", labels, []*float64{
newField := data.NewField("Value", labels, []*float64{
util.Pointer(222.0),
})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "series alias"}
testFrame := data.NewFrame("series alias",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
}),
@ -439,7 +439,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
}
name = "alias sum"
testFrame.Name = name
newField = data.NewField("value", labels, []*float64{
newField = data.NewField("Value", labels, []*float64{
util.Pointer(333.0),
})
testFrame.Fields[1] = newField
@ -453,7 +453,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
frame = result.Responses["A"]
name = "alias America"
testFrame.Name = name
newField = data.NewField("value", labels, []*float64{
newField = data.NewField("Value", labels, []*float64{
util.Pointer(222.0),
})
testFrame.Fields[1] = newField
@ -467,7 +467,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
frame = result.Responses["A"]
name = "alias America/America"
testFrame.Name = name
newField = data.NewField("value", labels, []*float64{
newField = data.NewField("Value", labels, []*float64{
util.Pointer(222.0),
})
testFrame.Fields[1] = newField
@ -663,12 +663,12 @@ func TestInfluxdbResponseParser(t *testing.T) {
queries = append(queries, *queryB)
labels, err := data.LabelsFromString("datacenter=America")
require.Nil(t, err)
newField := data.NewField("value", labels, []*float64{
newField := data.NewField("Value", labels, []*float64{
util.Pointer(222.0), util.Pointer(222.0), nil,
})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean { datacenter: America }"}
testFrame := data.NewFrame("cpu.mean { datacenter: America }",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 111000000, time.UTC),
@ -792,7 +792,7 @@ func TestResponseParser_Parse_RetentionPolicy(t *testing.T) {
var queries []Query
queries = append(queries, Query{RefID: "metricFindQuery", RawQuery: "SHOW RETENTION POLICIES"})
policyFrame := data.NewFrame("",
data.NewField("value", nil, []string{
data.NewField("Value", nil, []string{
"bar", "autogen", "5m_avg", "1m_avg",
}),
)
@ -824,10 +824,10 @@ func TestResponseParser_Parse(t *testing.T) {
]
}]}]}`,
f: func(t *testing.T, got *backend.QueryDataResponse) {
newField := data.NewField("value", nil, []*float64{nil, nil, util.Pointer(52.0)})
newField := data.NewField("Value", nil, []*float64{nil, nil, util.Pointer(52.0)})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean"}
testFrame := data.NewFrame("cpu.mean",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 100000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 101000000, time.UTC),
@ -851,10 +851,10 @@ func TestResponseParser_Parse(t *testing.T) {
]
}]}]}`,
f: func(t *testing.T, got *backend.QueryDataResponse) {
newField := data.NewField("value", nil, []*float64{nil, nil, nil})
newField := data.NewField("Value", nil, []*float64{nil, nil, nil})
newField.Config = &data.FieldConfig{DisplayNameFromDS: "cpu.mean"}
testFrame := data.NewFrame("cpu.mean",
data.NewField("time", nil,
data.NewField("Time", nil,
[]time.Time{
time.Date(1970, 1, 1, 0, 0, 0, 100000000, time.UTC),
time.Date(1970, 1, 1, 0, 0, 0, 101000000, time.UTC),

View File

@ -163,31 +163,32 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
};
}
const seriesList: any[] = [];
const groupedFrames = groupBy(res.data, (x) => x.refId);
if (Object.keys(groupedFrames).length > 0) {
filteredRequest.targets.forEach((target) => {
const filteredFrames = groupedFrames[target.refId] ?? [];
switch (target.resultFormat) {
case 'logs':
case 'table':
seriesList.push(
this.responseParser.getTable(filteredFrames, target, {
preferredVisualisationType: target.resultFormat,
})
);
break;
default: {
for (let i = 0; i < filteredFrames.length; i++) {
seriesList.push(filteredFrames[i]);
}
break;
}
}
});
if (Object.keys(groupedFrames).length === 0) {
return { data: [] };
}
const seriesList: any[] = [];
filteredRequest.targets.forEach((target) => {
const filteredFrames = groupedFrames[target.refId] ?? [];
switch (target.resultFormat) {
case 'logs':
case 'table':
seriesList.push(
this.responseParser.getTable(filteredFrames, target, {
preferredVisualisationType: target.resultFormat,
})
);
break;
default: {
for (let i = 0; i < filteredFrames.length; i++) {
seriesList.push(filteredFrames[i]);
}
break;
}
}
});
return { data: seriesList };
})
);

View File

@ -1,18 +1,15 @@
import { size } from 'lodash';
import { of } from 'rxjs';
import { TemplateSrvStub } from 'test/specs/helpers';
import { AnnotationEvent, DataQueryRequest, FieldType, MutableDataFrame } from '@grafana/data';
import { FetchResponse } from '@grafana/runtime';
import config from 'app/core/config';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
import InfluxDatasource from '../datasource';
import InfluxQueryModel from '../influx_query_model';
import ResponseParser, { getSelectedParams } from '../response_parser';
//@ts-ignore
const templateSrv = new TemplateSrvStub();
import InfluxQueryModel from './influx_query_model';
import ResponseParser, { getSelectedParams } from './response_parser';
import { getMockDS, getMockDSInstanceSettings } from './specs/mocks';
import { InfluxQuery } from './types';
jest.mock('@grafana/runtime', () => ({
...jest.requireActual('@grafana/runtime'),
@ -300,13 +297,14 @@ describe('influxdb response parser', () => {
});
describe('When issuing annotationQuery', () => {
const ctx: any = {
instanceSettings: { url: 'url', name: 'influxDb' },
const ctx = {
ds: getMockDS(getMockDSInstanceSettings()),
};
const fetchMock = jest.spyOn(backendSrv, 'fetch');
const annotation = {
const annotation: InfluxQuery = {
refId: 'A',
fromAnnotations: true,
name: 'Anno',
query: 'select * from logs where time >= now() - 15m and time <= now()',
@ -424,8 +422,6 @@ describe('influxdb response parser', () => {
} as FetchResponse);
});
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
ctx.ds.access = 'proxy';
config.featureToggles.influxdbBackendMigration = true;
response = await ctx.ds.annotationEvents(queryOptions, annotation);
});

View File

@ -183,12 +183,12 @@ function getTableCols(dfs: DataFrame[], table: TableModel, target: InfluxQuery):
dfs[0].fields.forEach((field) => {
// Time col
if (field.name === 'time') {
if (field.name.toLowerCase() === 'time') {
table.columns.push({ text: 'Time', type: FieldType.time });
}
// Group by (label) column(s)
else if (field.name === 'value') {
else if (field.name.toLowerCase() === 'value') {
if (field.labels) {
Object.keys(field.labels).forEach((key) => {
table.columns.push({ text: key });
@ -269,12 +269,12 @@ export function getSelectedParams(target: InfluxQuery): string[] {
return uniqueParams;
}
function incrementName(name: string, nameIncremenet: string, params: string[], index: number): string {
if (params.indexOf(nameIncremenet) > -1) {
function incrementName(name: string, nameIncrement: string, params: string[], index: number): string {
if (params.indexOf(nameIncrement) > -1) {
index++;
return incrementName(name, name + '_' + index, params, index);
}
return nameIncremenet;
return nameIncrement;
}
function rawQuerySelectedFieldsInDataframe(query: string | undefined, dfs: DataFrame[]) {