mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Influxdb: Re-introduce backend migration feature toggle (#64829)
* Revert "Influxdb: Remove backend migration feature toggle (#61308)"
This reverts commit 67c02f66
* Put feature toggle back
This commit is contained in:
parent
ebb54aea8d
commit
0823672fce
@ -5009,19 +5009,19 @@ exports[`better eslint`] = {
|
|||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||||
],
|
],
|
||||||
"public/app/plugins/datasource/influxdb/datasource.ts:5381": [
|
"public/app/plugins/datasource/influxdb/datasource.ts:5381": [
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||||
[0, 0, 0, "Do not use any type assertions.", "5"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
[0, 0, 0, "Do not use any type assertions.", "6"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||||
[0, 0, 0, "Do not use any type assertions.", "12"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
||||||
@ -5031,13 +5031,18 @@ exports[`better eslint`] = {
|
|||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "20"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "20"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "21"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "21"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "22"],
|
[0, 0, 0, "Do not use any type assertions.", "22"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "23"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "23"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "24"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "24"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "25"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "25"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "26"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "26"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "27"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "27"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "28"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "28"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "29"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "30"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "31"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "32"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "33"]
|
||||||
],
|
],
|
||||||
"public/app/plugins/datasource/influxdb/influx_query_model.ts:5381": [
|
"public/app/plugins/datasource/influxdb/influx_query_model.ts:5381": [
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||||
@ -5062,6 +5067,27 @@ exports[`better eslint`] = {
|
|||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "20"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "20"]
|
||||||
],
|
],
|
||||||
|
"public/app/plugins/datasource/influxdb/influx_series.ts:5381": [
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "16"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "17"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "18"]
|
||||||
|
],
|
||||||
"public/app/plugins/datasource/influxdb/migrations.ts:5381": [
|
"public/app/plugins/datasource/influxdb/migrations.ts:5381": [
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||||
],
|
],
|
||||||
|
@ -93,6 +93,7 @@ Alpha features might be changed or removed without prior notice.
|
|||||||
| `traceqlSearch` | Enables the 'TraceQL Search' tab for the Tempo datasource which provides a UI to generate TraceQL queries |
|
| `traceqlSearch` | Enables the 'TraceQL Search' tab for the Tempo datasource which provides a UI to generate TraceQL queries |
|
||||||
| `prometheusMetricEncyclopedia` | Replaces the Prometheus query builder metric select option with a paginated and filterable component |
|
| `prometheusMetricEncyclopedia` | Replaces the Prometheus query builder metric select option with a paginated and filterable component |
|
||||||
| `timeSeriesTable` | Enable time series table transformer & sparkline cell type |
|
| `timeSeriesTable` | Enable time series table transformer & sparkline cell type |
|
||||||
|
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy |
|
||||||
|
|
||||||
## Development feature toggles
|
## Development feature toggles
|
||||||
|
|
||||||
|
@ -83,4 +83,5 @@ export interface FeatureToggles {
|
|||||||
traceqlSearch?: boolean;
|
traceqlSearch?: boolean;
|
||||||
prometheusMetricEncyclopedia?: boolean;
|
prometheusMetricEncyclopedia?: boolean;
|
||||||
timeSeriesTable?: boolean;
|
timeSeriesTable?: boolean;
|
||||||
|
influxdbBackendMigration?: boolean;
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ var (
|
|||||||
Name: "cloudWatchCrossAccountQuerying",
|
Name: "cloudWatchCrossAccountQuerying",
|
||||||
Description: "Enables cross-account querying in CloudWatch datasources",
|
Description: "Enables cross-account querying in CloudWatch datasources",
|
||||||
State: FeatureStateStable,
|
State: FeatureStateStable,
|
||||||
Expression: "true", //enabled by default
|
Expression: "true", // enabled by default
|
||||||
Owner: awsPluginsSquad,
|
Owner: awsPluginsSquad,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -382,7 +382,7 @@ var (
|
|||||||
Name: "logsSampleInExplore",
|
Name: "logsSampleInExplore",
|
||||||
Description: "Enables access to the logs sample feature in Explore",
|
Description: "Enables access to the logs sample feature in Explore",
|
||||||
State: FeatureStateStable,
|
State: FeatureStateStable,
|
||||||
Expression: "true", //turned on by default
|
Expression: "true", // turned on by default
|
||||||
FrontendOnly: true,
|
FrontendOnly: true,
|
||||||
Owner: grafanaObservabilityLogsSquad,
|
Owner: grafanaObservabilityLogsSquad,
|
||||||
},
|
},
|
||||||
@ -434,5 +434,12 @@ var (
|
|||||||
FrontendOnly: true,
|
FrontendOnly: true,
|
||||||
Owner: appO11ySquad,
|
Owner: appO11ySquad,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "influxdbBackendMigration",
|
||||||
|
Description: "Query InfluxDB InfluxQL without the proxy",
|
||||||
|
State: FeatureStateAlpha,
|
||||||
|
FrontendOnly: true,
|
||||||
|
Owner: grafanaObservabilityMetricsSquad,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -64,3 +64,4 @@ drawerDataSourcePicker,alpha,@grafana/grafana-bi-squad,false,false,false,true
|
|||||||
traceqlSearch,alpha,@grafana/observability-traces-and-profiling,false,false,false,true
|
traceqlSearch,alpha,@grafana/observability-traces-and-profiling,false,false,false,true
|
||||||
prometheusMetricEncyclopedia,alpha,@grafana/observability-metrics,false,false,false,true
|
prometheusMetricEncyclopedia,alpha,@grafana/observability-metrics,false,false,false,true
|
||||||
timeSeriesTable,alpha,@grafana/app-o11y,false,false,false,true
|
timeSeriesTable,alpha,@grafana/app-o11y,false,false,false,true
|
||||||
|
influxdbBackendMigration,alpha,@grafana/observability-metrics,false,false,false,true
|
||||||
|
|
@ -266,4 +266,8 @@ const (
|
|||||||
// FlagTimeSeriesTable
|
// FlagTimeSeriesTable
|
||||||
// Enable time series table transformer & sparkline cell type
|
// Enable time series table transformer & sparkline cell type
|
||||||
FlagTimeSeriesTable = "timeSeriesTable"
|
FlagTimeSeriesTable = "timeSeriesTable"
|
||||||
|
|
||||||
|
// FlagInfluxdbBackendMigration
|
||||||
|
// Query InfluxDB InfluxQL without the proxy
|
||||||
|
FlagInfluxdbBackendMigration = "influxdbBackendMigration"
|
||||||
)
|
)
|
||||||
|
@ -1,16 +1,23 @@
|
|||||||
import { extend, groupBy, has, isString, omit, pick, reduce } from 'lodash';
|
import { cloneDeep, extend, groupBy, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||||
import { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
|
import { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
|
||||||
import { catchError, map } from 'rxjs/operators';
|
import { catchError, map } from 'rxjs/operators';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
AnnotationEvent,
|
AnnotationEvent,
|
||||||
|
ArrayVector,
|
||||||
|
DataFrame,
|
||||||
DataQueryError,
|
DataQueryError,
|
||||||
DataQueryRequest,
|
DataQueryRequest,
|
||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataSourceInstanceSettings,
|
DataSourceInstanceSettings,
|
||||||
dateMath,
|
dateMath,
|
||||||
|
FieldType,
|
||||||
MetricFindValue,
|
MetricFindValue,
|
||||||
|
QueryResultMeta,
|
||||||
ScopedVars,
|
ScopedVars,
|
||||||
|
TIME_SERIES_TIME_FIELD_NAME,
|
||||||
|
TIME_SERIES_VALUE_FIELD_NAME,
|
||||||
|
TimeSeries,
|
||||||
toDataFrame,
|
toDataFrame,
|
||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import {
|
import {
|
||||||
@ -20,18 +27,91 @@ import {
|
|||||||
frameToMetricFindValue,
|
frameToMetricFindValue,
|
||||||
getBackendSrv,
|
getBackendSrv,
|
||||||
} from '@grafana/runtime';
|
} from '@grafana/runtime';
|
||||||
|
import config from 'app/core/config';
|
||||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||||
|
|
||||||
import { AnnotationEditor } from './components/AnnotationEditor';
|
import { AnnotationEditor } from './components/AnnotationEditor';
|
||||||
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
||||||
import { BROWSER_MODE_DISABLED_MESSAGE } from './constants';
|
import { BROWSER_MODE_DISABLED_MESSAGE } from './constants';
|
||||||
import InfluxQueryModel from './influx_query_model';
|
import InfluxQueryModel from './influx_query_model';
|
||||||
|
import InfluxSeries from './influx_series';
|
||||||
import { prepareAnnotation } from './migrations';
|
import { prepareAnnotation } from './migrations';
|
||||||
import { buildRawQuery } from './queryUtils';
|
import { buildRawQuery } from './queryUtils';
|
||||||
import { InfluxQueryBuilder } from './query_builder';
|
import { InfluxQueryBuilder } from './query_builder';
|
||||||
import ResponseParser from './response_parser';
|
import ResponseParser from './response_parser';
|
||||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||||
|
|
||||||
|
// we detect the field type based on the value-array
|
||||||
|
function getFieldType(values: unknown[]): FieldType {
|
||||||
|
// the values-array may contain a lot of nulls.
|
||||||
|
// we need the first not-null item
|
||||||
|
const firstNotNull = values.find((v) => v !== null);
|
||||||
|
|
||||||
|
if (firstNotNull === undefined) {
|
||||||
|
// we could not find any not-null values
|
||||||
|
return FieldType.number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const valueType = typeof firstNotNull;
|
||||||
|
|
||||||
|
switch (valueType) {
|
||||||
|
case 'string':
|
||||||
|
return FieldType.string;
|
||||||
|
case 'boolean':
|
||||||
|
return FieldType.boolean;
|
||||||
|
case 'number':
|
||||||
|
return FieldType.number;
|
||||||
|
default:
|
||||||
|
// this should never happen, influxql values
|
||||||
|
// can only be numbers, strings and booleans.
|
||||||
|
throw new Error(`InfluxQL: invalid value type ${valueType}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// this conversion function is specialized to work with the timeseries
|
||||||
|
// data returned by InfluxDatasource.getTimeSeries()
|
||||||
|
function timeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||||
|
const times: number[] = [];
|
||||||
|
const values: unknown[] = [];
|
||||||
|
|
||||||
|
// the data we process here is not correctly typed.
|
||||||
|
// the typescript types say every data-point is number|null,
|
||||||
|
// but in fact it can be string or boolean too.
|
||||||
|
|
||||||
|
const points = timeSeries.datapoints;
|
||||||
|
for (const point of points) {
|
||||||
|
values.push(point[0]);
|
||||||
|
times.push(point[1] as number);
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeField = {
|
||||||
|
name: TIME_SERIES_TIME_FIELD_NAME,
|
||||||
|
type: FieldType.time,
|
||||||
|
config: {},
|
||||||
|
values: new ArrayVector<number>(times),
|
||||||
|
};
|
||||||
|
|
||||||
|
const valueField = {
|
||||||
|
name: TIME_SERIES_VALUE_FIELD_NAME,
|
||||||
|
type: getFieldType(values),
|
||||||
|
config: {
|
||||||
|
displayNameFromDS: timeSeries.title,
|
||||||
|
},
|
||||||
|
values: new ArrayVector<unknown>(values),
|
||||||
|
labels: timeSeries.tags,
|
||||||
|
};
|
||||||
|
|
||||||
|
const fields = [timeField, valueField];
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: timeSeries.target,
|
||||||
|
refId: timeSeries.refId,
|
||||||
|
meta: timeSeries.meta,
|
||||||
|
fields,
|
||||||
|
length: values.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery, InfluxOptions> {
|
export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery, InfluxOptions> {
|
||||||
type: string;
|
type: string;
|
||||||
urls: string[];
|
urls: string[];
|
||||||
@ -103,62 +183,48 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
|||||||
return super.query(filteredRequest);
|
return super.query(filteredRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filteredRequest.targets.some((target: InfluxQuery) => target.fromAnnotations)) {
|
if (this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||||
const streams: Array<Observable<DataQueryResponse>> = [];
|
return super.query(filteredRequest).pipe(
|
||||||
|
map((res) => {
|
||||||
|
if (res.error) {
|
||||||
|
throw {
|
||||||
|
message: 'InfluxDB Error: ' + res.error.message,
|
||||||
|
res,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
for (const target of filteredRequest.targets) {
|
const seriesList: any[] = [];
|
||||||
if (target.query) {
|
|
||||||
streams.push(
|
|
||||||
new Observable((subscriber) => {
|
|
||||||
this.annotationEvents(filteredRequest, target)
|
|
||||||
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
|
|
||||||
.catch((ex) => subscriber.error(new Error(ex)))
|
|
||||||
.finally(() => subscriber.complete());
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return merge(...streams);
|
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
||||||
|
if (Object.keys(groupedFrames).length > 0) {
|
||||||
|
filteredRequest.targets.forEach((target) => {
|
||||||
|
const filteredFrames = groupedFrames[target.refId] ?? [];
|
||||||
|
switch (target.resultFormat) {
|
||||||
|
case 'logs':
|
||||||
|
case 'table':
|
||||||
|
seriesList.push(
|
||||||
|
this.responseParser.getTable(filteredFrames, target, {
|
||||||
|
preferredVisualisationType: target.resultFormat,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default: {
|
||||||
|
for (let i = 0; i < filteredFrames.length; i++) {
|
||||||
|
seriesList.push(filteredFrames[i]);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { data: seriesList };
|
||||||
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return super.query(filteredRequest).pipe(
|
// Fallback to classic query support
|
||||||
map((res) => {
|
return this.classicQuery(request);
|
||||||
if (res.error) {
|
|
||||||
throw {
|
|
||||||
message: 'InfluxDB Error: ' + res.error.message,
|
|
||||||
res,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const seriesList: any[] = [];
|
|
||||||
|
|
||||||
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
|
||||||
if (Object.keys(groupedFrames).length > 0) {
|
|
||||||
filteredRequest.targets.forEach((target) => {
|
|
||||||
const filteredFrames = groupedFrames[target.refId] ?? [];
|
|
||||||
switch (target.resultFormat) {
|
|
||||||
case 'logs':
|
|
||||||
case 'table':
|
|
||||||
seriesList.push(
|
|
||||||
this.responseParser.getTable(filteredFrames, target, {
|
|
||||||
preferredVisualisationType: target.resultFormat,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
default: {
|
|
||||||
for (let i = 0; i < filteredFrames.length; i++) {
|
|
||||||
seriesList.push(filteredFrames[i]);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return { data: seriesList };
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getQueryDisplayText(query: InfluxQuery) {
|
getQueryDisplayText(query: InfluxQuery) {
|
||||||
@ -189,11 +255,132 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
query = this.applyVariables(query, scopedVars, rest);
|
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy') {
|
||||||
|
query = this.applyVariables(query, scopedVars, rest);
|
||||||
|
}
|
||||||
|
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The unchanged pre 7.1 query implementation
|
||||||
|
*/
|
||||||
|
classicQuery(options: any): Observable<DataQueryResponse> {
|
||||||
|
// migrate annotations
|
||||||
|
if (options.targets.some((target: InfluxQuery) => target.fromAnnotations)) {
|
||||||
|
const streams: Array<Observable<DataQueryResponse>> = [];
|
||||||
|
|
||||||
|
for (const target of options.targets) {
|
||||||
|
if (target.query) {
|
||||||
|
streams.push(
|
||||||
|
new Observable((subscriber) => {
|
||||||
|
this.annotationEvents(options, target)
|
||||||
|
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
|
||||||
|
.catch((ex) => subscriber.error(new Error(ex)))
|
||||||
|
.finally(() => subscriber.complete());
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return merge(...streams);
|
||||||
|
}
|
||||||
|
|
||||||
|
let timeFilter = this.getTimeFilter(options);
|
||||||
|
const scopedVars = options.scopedVars;
|
||||||
|
const targets = cloneDeep(options.targets);
|
||||||
|
const queryTargets: any[] = [];
|
||||||
|
|
||||||
|
let i, y;
|
||||||
|
|
||||||
|
let allQueries = _map(targets, (target) => {
|
||||||
|
if (target.hide) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
queryTargets.push(target);
|
||||||
|
|
||||||
|
// backward compatibility
|
||||||
|
scopedVars.interval = scopedVars.__interval;
|
||||||
|
|
||||||
|
return new InfluxQueryModel(target, this.templateSrv, scopedVars).render(true);
|
||||||
|
}).reduce((acc, current) => {
|
||||||
|
if (current !== '') {
|
||||||
|
acc += ';' + current;
|
||||||
|
}
|
||||||
|
return acc;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (allQueries === '') {
|
||||||
|
return of({ data: [] });
|
||||||
|
}
|
||||||
|
|
||||||
|
// add global adhoc filters to timeFilter
|
||||||
|
const adhocFilters = this.templateSrv.getAdhocFilters(this.name);
|
||||||
|
const adhocFiltersFromDashboard = options.targets.flatMap((target: InfluxQuery) => target.adhocFilters ?? []);
|
||||||
|
if (adhocFilters?.length || adhocFiltersFromDashboard?.length) {
|
||||||
|
const ahFilters = adhocFilters?.length ? adhocFilters : adhocFiltersFromDashboard;
|
||||||
|
const tmpQuery = new InfluxQueryModel({ refId: 'A' }, this.templateSrv, scopedVars);
|
||||||
|
timeFilter += ' AND ' + tmpQuery.renderAdhocFilters(ahFilters);
|
||||||
|
}
|
||||||
|
// replace grafana variables
|
||||||
|
scopedVars.timeFilter = { value: timeFilter };
|
||||||
|
|
||||||
|
// replace templated variables
|
||||||
|
allQueries = this.templateSrv.replace(allQueries, scopedVars);
|
||||||
|
|
||||||
|
return this._seriesQuery(allQueries, options).pipe(
|
||||||
|
map((data: any) => {
|
||||||
|
if (!data || !data.results) {
|
||||||
|
return { data: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const seriesList = [];
|
||||||
|
for (i = 0; i < data.results.length; i++) {
|
||||||
|
const result = data.results[i];
|
||||||
|
if (!result || !result.series) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const target = queryTargets[i];
|
||||||
|
let alias = target.alias;
|
||||||
|
if (alias) {
|
||||||
|
alias = this.templateSrv.replace(target.alias, options.scopedVars);
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta: QueryResultMeta = {
|
||||||
|
executedQueryString: data.executedQueryString,
|
||||||
|
};
|
||||||
|
|
||||||
|
const influxSeries = new InfluxSeries({
|
||||||
|
refId: target.refId,
|
||||||
|
series: data.results[i].series,
|
||||||
|
alias: alias,
|
||||||
|
meta,
|
||||||
|
});
|
||||||
|
|
||||||
|
switch (target.resultFormat) {
|
||||||
|
case 'logs':
|
||||||
|
meta.preferredVisualisationType = 'logs';
|
||||||
|
case 'table': {
|
||||||
|
seriesList.push(influxSeries.getTable());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
const timeSeries = influxSeries.getTimeSeries();
|
||||||
|
for (y = 0; y < timeSeries.length; y++) {
|
||||||
|
seriesList.push(timeSeriesToDataFrame(timeSeries[y]));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { data: seriesList };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async annotationEvents(options: DataQueryRequest, annotation: InfluxQuery): Promise<AnnotationEvent[]> {
|
async annotationEvents(options: DataQueryRequest, annotation: InfluxQuery): Promise<AnnotationEvent[]> {
|
||||||
if (this.isFlux) {
|
if (this.isFlux) {
|
||||||
return Promise.reject({
|
return Promise.reject({
|
||||||
@ -208,34 +395,50 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// We want to send our query to the backend as a raw query
|
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy') {
|
||||||
const target: InfluxQuery = {
|
// We want to send our query to the backend as a raw query
|
||||||
refId: 'metricFindQuery',
|
const target: InfluxQuery = {
|
||||||
datasource: this.getRef(),
|
refId: 'metricFindQuery',
|
||||||
query: this.templateSrv.replace(annotation.query, undefined, 'regex'),
|
datasource: this.getRef(),
|
||||||
rawQuery: true,
|
query: this.templateSrv.replace(annotation.query, undefined, 'regex'),
|
||||||
};
|
rawQuery: true,
|
||||||
|
};
|
||||||
|
|
||||||
return lastValueFrom(
|
return lastValueFrom(
|
||||||
getBackendSrv()
|
getBackendSrv()
|
||||||
.fetch<BackendDataSourceResponse>({
|
.fetch<BackendDataSourceResponse>({
|
||||||
url: '/api/ds/query',
|
url: '/api/ds/query',
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: this.getRequestHeaders(),
|
headers: this.getRequestHeaders(),
|
||||||
data: {
|
data: {
|
||||||
from: options.range.from.valueOf().toString(),
|
from: options.range.from.valueOf().toString(),
|
||||||
to: options.range.to.valueOf().toString(),
|
to: options.range.to.valueOf().toString(),
|
||||||
queries: [target],
|
queries: [target],
|
||||||
},
|
},
|
||||||
requestId: annotation.name,
|
requestId: annotation.name,
|
||||||
})
|
})
|
||||||
.pipe(
|
.pipe(
|
||||||
map(
|
map(
|
||||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||||
await this.responseParser.transformAnnotationResponse(annotation, res, target)
|
await this.responseParser.transformAnnotationResponse(annotation, res, target)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
);
|
||||||
);
|
}
|
||||||
|
|
||||||
|
const timeFilter = this.getTimeFilter({ rangeRaw: options.range.raw, timezone: options.timezone });
|
||||||
|
let query = annotation.query.replace('$timeFilter', timeFilter);
|
||||||
|
query = this.templateSrv.replace(query, undefined, 'regex');
|
||||||
|
|
||||||
|
return lastValueFrom(this._seriesQuery(query, options)).then((data: any) => {
|
||||||
|
if (!data || !data.results || !data.results[0]) {
|
||||||
|
throw { message: 'No results in response from InfluxDB' };
|
||||||
|
}
|
||||||
|
return new InfluxSeries({
|
||||||
|
series: data.results[0].series,
|
||||||
|
annotation: annotation,
|
||||||
|
}).getAnnotations();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
targetContainsTemplate(target: any) {
|
targetContainsTemplate(target: any) {
|
||||||
@ -317,7 +520,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
|||||||
}
|
}
|
||||||
|
|
||||||
async metricFindQuery(query: string, options?: any): Promise<MetricFindValue[]> {
|
async metricFindQuery(query: string, options?: any): Promise<MetricFindValue[]> {
|
||||||
if (this.isFlux) {
|
if (this.isFlux || this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||||
const target: InfluxQuery = {
|
const target: InfluxQuery = {
|
||||||
refId: 'metricFindQuery',
|
refId: 'metricFindQuery',
|
||||||
query,
|
query,
|
||||||
@ -516,4 +719,8 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
|||||||
|
|
||||||
return date.valueOf() + 'ms';
|
return date.valueOf() + 'ms';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isMigrationToggleOnAndIsAccessProxy() {
|
||||||
|
return config.featureToggles.influxdbBackendMigration && this.access === 'proxy';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
218
public/app/plugins/datasource/influxdb/influx_series.ts
Normal file
218
public/app/plugins/datasource/influxdb/influx_series.ts
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
import { each, map, includes, flatten, keys } from 'lodash';
|
||||||
|
|
||||||
|
import { FieldType, QueryResultMeta, TimeSeries, TableData } from '@grafana/data';
|
||||||
|
import TableModel from 'app/core/TableModel';
|
||||||
|
|
||||||
|
export default class InfluxSeries {
|
||||||
|
refId?: string;
|
||||||
|
series: any;
|
||||||
|
alias: any;
|
||||||
|
annotation: any;
|
||||||
|
meta?: QueryResultMeta;
|
||||||
|
|
||||||
|
constructor(options: { series: any; alias?: any; annotation?: any; meta?: QueryResultMeta; refId?: string }) {
|
||||||
|
this.series = options.series;
|
||||||
|
this.alias = options.alias;
|
||||||
|
this.annotation = options.annotation;
|
||||||
|
this.meta = options.meta;
|
||||||
|
this.refId = options.refId;
|
||||||
|
}
|
||||||
|
|
||||||
|
getTimeSeries(): TimeSeries[] {
|
||||||
|
const output: TimeSeries[] = [];
|
||||||
|
let i, j;
|
||||||
|
|
||||||
|
if (this.series.length === 0) {
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
each(this.series, (series) => {
|
||||||
|
const columns = series.columns.length;
|
||||||
|
const tags = map(series.tags, (value, key) => {
|
||||||
|
return key + ': ' + value;
|
||||||
|
});
|
||||||
|
|
||||||
|
for (j = 1; j < columns; j++) {
|
||||||
|
let seriesName = series.name;
|
||||||
|
const columnName = series.columns[j];
|
||||||
|
if (columnName !== 'value') {
|
||||||
|
seriesName = seriesName + '.' + columnName;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.alias) {
|
||||||
|
seriesName = this._getSeriesName(series, j);
|
||||||
|
} else if (series.tags) {
|
||||||
|
seriesName = seriesName + ' {' + tags.join(', ') + '}';
|
||||||
|
}
|
||||||
|
|
||||||
|
const datapoints = [];
|
||||||
|
if (series.values) {
|
||||||
|
for (i = 0; i < series.values.length; i++) {
|
||||||
|
datapoints[i] = [series.values[i][j], series.values[i][0]];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push({
|
||||||
|
title: seriesName,
|
||||||
|
target: seriesName,
|
||||||
|
datapoints: datapoints,
|
||||||
|
tags: series.tags,
|
||||||
|
meta: this.meta,
|
||||||
|
refId: this.refId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
_getSeriesName(series: any, index: number) {
|
||||||
|
const regex = /\$(\w+)|\[\[([\s\S]+?)\]\]/g;
|
||||||
|
const segments = series.name.split('.');
|
||||||
|
|
||||||
|
return this.alias.replace(regex, (match: any, g1: any, g2: any) => {
|
||||||
|
const group = g1 || g2;
|
||||||
|
const segIndex = parseInt(group, 10);
|
||||||
|
|
||||||
|
if (group === 'm' || group === 'measurement') {
|
||||||
|
return series.name;
|
||||||
|
}
|
||||||
|
if (group === 'col') {
|
||||||
|
return series.columns[index];
|
||||||
|
}
|
||||||
|
if (!isNaN(segIndex)) {
|
||||||
|
return segments[segIndex] ?? match;
|
||||||
|
}
|
||||||
|
if (group.indexOf('tag_') !== 0) {
|
||||||
|
return match;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tag = group.replace('tag_', '');
|
||||||
|
if (!series.tags) {
|
||||||
|
return match;
|
||||||
|
}
|
||||||
|
return series.tags[tag];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getAnnotations() {
|
||||||
|
const list: any[] = [];
|
||||||
|
|
||||||
|
each(this.series, (series) => {
|
||||||
|
let titleCol: any = null;
|
||||||
|
let timeCol: any = null;
|
||||||
|
let timeEndCol: any = null;
|
||||||
|
const tagsCol: any = [];
|
||||||
|
let textCol: any = null;
|
||||||
|
|
||||||
|
each(series.columns, (column, index) => {
|
||||||
|
if (column === 'time') {
|
||||||
|
timeCol = index;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (column === 'sequence_number') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (column === this.annotation.titleColumn) {
|
||||||
|
titleCol = index;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (includes((this.annotation.tagsColumn || '').replace(' ', '').split(','), column)) {
|
||||||
|
tagsCol.push(index);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (column === this.annotation.textColumn) {
|
||||||
|
textCol = index;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (column === this.annotation.timeEndColumn) {
|
||||||
|
timeEndCol = index;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// legacy case
|
||||||
|
if (!titleCol && textCol !== index) {
|
||||||
|
titleCol = index;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
each(series.values, (value) => {
|
||||||
|
const data = {
|
||||||
|
annotation: this.annotation,
|
||||||
|
time: +new Date(value[timeCol]),
|
||||||
|
title: value[titleCol],
|
||||||
|
timeEnd: value[timeEndCol],
|
||||||
|
// Remove empty values, then split in different tags for comma separated values
|
||||||
|
tags: flatten(
|
||||||
|
tagsCol
|
||||||
|
.filter((t: any) => {
|
||||||
|
return value[t];
|
||||||
|
})
|
||||||
|
.map((t: any) => {
|
||||||
|
return value[t].split(',');
|
||||||
|
})
|
||||||
|
),
|
||||||
|
text: value[textCol],
|
||||||
|
};
|
||||||
|
|
||||||
|
list.push(data);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
|
getTable(): TableData {
|
||||||
|
const table = new TableModel();
|
||||||
|
let i, j;
|
||||||
|
|
||||||
|
table.refId = this.refId;
|
||||||
|
table.meta = this.meta;
|
||||||
|
|
||||||
|
if (this.series.length === 0) {
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
// the order is:
|
||||||
|
// - first the first item from the value-array (this is often (always?) the timestamp)
|
||||||
|
// - then all the tag-values
|
||||||
|
// - then the rest of the value-array
|
||||||
|
//
|
||||||
|
// we have to keep this order both in table.columns and table.rows
|
||||||
|
|
||||||
|
each(this.series, (series: any, seriesIndex: number) => {
|
||||||
|
if (seriesIndex === 0) {
|
||||||
|
const firstCol = series.columns[0];
|
||||||
|
// Check the first column's name, if it is `time`, we
|
||||||
|
// mark it as having the type time
|
||||||
|
const firstTableCol = firstCol === 'time' ? { text: 'Time', type: FieldType.time } : { text: firstCol };
|
||||||
|
table.columns.push(firstTableCol);
|
||||||
|
each(keys(series.tags), (key) => {
|
||||||
|
table.columns.push({ text: key });
|
||||||
|
});
|
||||||
|
for (j = 1; j < series.columns.length; j++) {
|
||||||
|
table.columns.push({ text: series.columns[j] });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (series.values) {
|
||||||
|
for (i = 0; i < series.values.length; i++) {
|
||||||
|
const values = series.values[i];
|
||||||
|
const reordered = [values[0]];
|
||||||
|
if (series.tags) {
|
||||||
|
for (const key in series.tags) {
|
||||||
|
if (series.tags.hasOwnProperty(key)) {
|
||||||
|
reordered.push(series.tags[key]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (j = 1; j < values.length; j++) {
|
||||||
|
reordered.push(values[j]);
|
||||||
|
}
|
||||||
|
table.rows.push(reordered);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,9 @@
|
|||||||
import { lastValueFrom, of } from 'rxjs';
|
import { lastValueFrom, of } from 'rxjs';
|
||||||
import { TemplateSrvStub } from 'test/specs/helpers';
|
import { TemplateSrvStub } from 'test/specs/helpers';
|
||||||
|
|
||||||
import { ScopedVars } from '@grafana/data';
|
import { ScopedVars } from '@grafana/data/src';
|
||||||
import { FetchResponse, setBackendSrv } from '@grafana/runtime';
|
import { FetchResponse } from '@grafana/runtime';
|
||||||
|
import config from 'app/core/config';
|
||||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||||
|
|
||||||
import { BROWSER_MODE_DISABLED_MESSAGE } from '../constants';
|
import { BROWSER_MODE_DISABLED_MESSAGE } from '../constants';
|
||||||
@ -11,6 +12,11 @@ import InfluxDatasource from '../datasource';
|
|||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
const templateSrv = new TemplateSrvStub();
|
const templateSrv = new TemplateSrvStub();
|
||||||
|
|
||||||
|
jest.mock('@grafana/runtime', () => ({
|
||||||
|
...(jest.requireActual('@grafana/runtime') as unknown as object),
|
||||||
|
getBackendSrv: () => backendSrv,
|
||||||
|
}));
|
||||||
|
|
||||||
describe('InfluxDataSource', () => {
|
describe('InfluxDataSource', () => {
|
||||||
const ctx: any = {
|
const ctx: any = {
|
||||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'GET' } },
|
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'GET' } },
|
||||||
@ -23,7 +29,6 @@ describe('InfluxDataSource', () => {
|
|||||||
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
||||||
ctx.instanceSettings.access = 'proxy';
|
ctx.instanceSettings.access = 'proxy';
|
||||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||||
setBackendSrv(backendSrv);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When issuing metricFindQuery', () => {
|
describe('When issuing metricFindQuery', () => {
|
||||||
@ -320,6 +325,7 @@ describe('InfluxDataSource', () => {
|
|||||||
it('should apply all template variables with InfluxQL mode', () => {
|
it('should apply all template variables with InfluxQL mode', () => {
|
||||||
ds.isFlux = false;
|
ds.isFlux = false;
|
||||||
ds.access = 'proxy';
|
ds.access = 'proxy';
|
||||||
|
config.featureToggles.influxdbBackendMigration = true;
|
||||||
const query = ds.applyTemplateVariables(influxQuery, {
|
const query = ds.applyTemplateVariables(influxQuery, {
|
||||||
interpolationVar: { text: text, value: text },
|
interpolationVar: { text: text, value: text },
|
||||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||||
@ -330,6 +336,7 @@ describe('InfluxDataSource', () => {
|
|||||||
it('should apply all scopedVars to tags', () => {
|
it('should apply all scopedVars to tags', () => {
|
||||||
ds.isFlux = false;
|
ds.isFlux = false;
|
||||||
ds.access = 'proxy';
|
ds.access = 'proxy';
|
||||||
|
config.featureToggles.influxdbBackendMigration = true;
|
||||||
const query = ds.applyTemplateVariables(influxQuery, {
|
const query = ds.applyTemplateVariables(influxQuery, {
|
||||||
interpolationVar: { text: text, value: text },
|
interpolationVar: { text: text, value: text },
|
||||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||||
|
@ -0,0 +1,371 @@
|
|||||||
|
import produce from 'immer';
|
||||||
|
|
||||||
|
import InfluxSeries from '../influx_series';
|
||||||
|
|
||||||
|
describe('when generating timeseries from influxdb response', () => {
|
||||||
|
describe('given multiple fields for series', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { app: 'test', server: 'server1' },
|
||||||
|
columns: ['time', 'mean', 'max', 'min'],
|
||||||
|
values: [
|
||||||
|
[1431946625000, 10, 11, 9],
|
||||||
|
[1431946626000, 20, 21, 19],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
describe('and no alias', () => {
|
||||||
|
it('should generate multiple datapoints for each column', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result.length).toBe(3);
|
||||||
|
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||||
|
expect(result[0].datapoints[0][0]).toBe(10);
|
||||||
|
expect(result[0].datapoints[0][1]).toBe(1431946625000);
|
||||||
|
expect(result[0].datapoints[1][0]).toBe(20);
|
||||||
|
expect(result[0].datapoints[1][1]).toBe(1431946626000);
|
||||||
|
|
||||||
|
expect(result[1].target).toBe('cpu.max {app: test, server: server1}');
|
||||||
|
expect(result[1].datapoints[0][0]).toBe(11);
|
||||||
|
expect(result[1].datapoints[0][1]).toBe(1431946625000);
|
||||||
|
expect(result[1].datapoints[1][0]).toBe(21);
|
||||||
|
expect(result[1].datapoints[1][1]).toBe(1431946626000);
|
||||||
|
|
||||||
|
expect(result[2].target).toBe('cpu.min {app: test, server: server1}');
|
||||||
|
expect(result[2].datapoints[0][0]).toBe(9);
|
||||||
|
expect(result[2].datapoints[0][1]).toBe(1431946625000);
|
||||||
|
expect(result[2].datapoints[1][0]).toBe(19);
|
||||||
|
expect(result[2].datapoints[1][1]).toBe(1431946626000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and simple alias', () => {
|
||||||
|
it('should use alias', () => {
|
||||||
|
options.alias = 'new series';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('new series');
|
||||||
|
expect(result[1].target).toBe('new series');
|
||||||
|
expect(result[2].target).toBe('new series');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and alias patterns', () => {
|
||||||
|
it('should replace patterns', () => {
|
||||||
|
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||||
|
expect(result[1].target).toBe('alias: cpu -> server1 (cpu)');
|
||||||
|
expect(result[2].target).toBe('alias: cpu -> server1 (cpu)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given measurement with default fieldname', () => {
|
||||||
|
const options = {
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { app: 'test', server: 'server1' },
|
||||||
|
columns: ['time', 'value'],
|
||||||
|
values: [
|
||||||
|
['2015-05-18T10:57:05Z', 10],
|
||||||
|
['2015-05-18T10:57:06Z', 12],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { app: 'test2', server: 'server2' },
|
||||||
|
columns: ['time', 'value'],
|
||||||
|
values: [
|
||||||
|
['2015-05-18T10:57:05Z', 15],
|
||||||
|
['2015-05-18T10:57:06Z', 16],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('and no alias', () => {
|
||||||
|
it('should generate label with no field', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('cpu {app: test, server: server1}');
|
||||||
|
expect(result[1].target).toBe('cpu {app: test2, server: server2}');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given two series', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { app: 'test', server: 'server1' },
|
||||||
|
columns: ['time', 'mean'],
|
||||||
|
values: [
|
||||||
|
[1431946625000, 10],
|
||||||
|
[1431946626000, 12],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { app: 'test2', server: 'server2' },
|
||||||
|
columns: ['time', 'mean'],
|
||||||
|
values: [
|
||||||
|
[1431946625000, 15],
|
||||||
|
[1431946626000, 16],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('and no alias', () => {
|
||||||
|
it('should generate two time series', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result.length).toBe(2);
|
||||||
|
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||||
|
expect(result[0].datapoints[0][0]).toBe(10);
|
||||||
|
expect(result[0].datapoints[0][1]).toBe(1431946625000);
|
||||||
|
expect(result[0].datapoints[1][0]).toBe(12);
|
||||||
|
expect(result[0].datapoints[1][1]).toBe(1431946626000);
|
||||||
|
expect(result[0].tags).toMatchObject({
|
||||||
|
app: 'test',
|
||||||
|
server: 'server1',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result[1].target).toBe('cpu.mean {app: test2, server: server2}');
|
||||||
|
expect(result[1].datapoints[0][0]).toBe(15);
|
||||||
|
expect(result[1].datapoints[0][1]).toBe(1431946625000);
|
||||||
|
expect(result[1].datapoints[1][0]).toBe(16);
|
||||||
|
expect(result[1].datapoints[1][1]).toBe(1431946626000);
|
||||||
|
expect(result[1].tags).toMatchObject({
|
||||||
|
app: 'test2',
|
||||||
|
server: 'server2',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and simple alias', () => {
|
||||||
|
it('should use alias', () => {
|
||||||
|
options.alias = 'new series';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('new series');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('and alias patterns', () => {
|
||||||
|
it('should replace patterns', () => {
|
||||||
|
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||||
|
expect(result[1].target).toBe('alias: cpu -> server2 (cpu)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given measurement with dots', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'app.prod.server1.count',
|
||||||
|
tags: {},
|
||||||
|
columns: ['time', 'mean'],
|
||||||
|
values: [
|
||||||
|
[1431946625000, 10],
|
||||||
|
[1431946626000, 12],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should replace patterns', () => {
|
||||||
|
options.alias = 'alias: $1 -> [[3]]';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('alias: prod -> count');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle too large indexes', () => {
|
||||||
|
options.alias = 'alias: $0 $1 $2 $3 $4 $5';
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const result = series.getTimeSeries();
|
||||||
|
|
||||||
|
expect(result[0].target).toBe('alias: app prod server1 count $4 $5');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given table response', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'app.prod.server1.count',
|
||||||
|
tags: { datacenter: 'Africa', server: 'server2' },
|
||||||
|
columns: ['time', 'value2', 'value'],
|
||||||
|
values: [
|
||||||
|
[1431946625000, 23, 10],
|
||||||
|
[1431946626000, 25, 12],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should return table', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const table = series.getTable();
|
||||||
|
|
||||||
|
expect(table.type).toBe('table');
|
||||||
|
expect(table.columns.length).toBe(5);
|
||||||
|
expect(table.columns[0].text).toEqual('Time');
|
||||||
|
expect(table.rows[0]).toEqual([1431946625000, 'Africa', 'server2', 23, 10]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given table response from SHOW CARDINALITY', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
columns: ['count'],
|
||||||
|
values: [[37]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should return table', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const table = series.getTable();
|
||||||
|
|
||||||
|
expect(table.type).toBe('table');
|
||||||
|
expect(table.columns.length).toBe(1);
|
||||||
|
expect(table.columns[0].text).toEqual('count');
|
||||||
|
expect(table.rows[0]).toEqual([37]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given annotation response', () => {
|
||||||
|
describe('with empty tagsColumn', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
annotation: {},
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'logins.count',
|
||||||
|
tags: { datacenter: 'Africa', server: 'server2' },
|
||||||
|
columns: ['time', 'datacenter', 'hostname', 'source', 'value'],
|
||||||
|
values: [[1481549440372, 'America', '10.1.100.10', 'backend', 215.7432653659507]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should multiple tags', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const annotations = series.getAnnotations();
|
||||||
|
|
||||||
|
expect(annotations[0].tags.length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given annotation response', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
annotation: {
|
||||||
|
tagsColumn: 'datacenter, source',
|
||||||
|
},
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'logins.count',
|
||||||
|
tags: { datacenter: 'Africa', server: 'server2' },
|
||||||
|
columns: ['time', 'datacenter', 'hostname', 'source', 'value'],
|
||||||
|
values: [[1481549440372, 'America', '10.1.100.10', 'backend', 215.7432653659507]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('should multiple tags', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const annotations = series.getAnnotations();
|
||||||
|
|
||||||
|
expect(annotations[0].tags.length).toBe(2);
|
||||||
|
expect(annotations[0].tags[0]).toBe('America');
|
||||||
|
expect(annotations[0].tags[1]).toBe('backend');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('given a time-column in the json-response', () => {
|
||||||
|
const options = {
|
||||||
|
alias: '',
|
||||||
|
series: [
|
||||||
|
{
|
||||||
|
name: 'cpu',
|
||||||
|
tags: { cpu: 'cpu1' },
|
||||||
|
columns: ['time', 'usage_idle'],
|
||||||
|
values: [[1481549440372, 42]],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
it('the column-names should be correct if the time-column is not renamed', () => {
|
||||||
|
const series = new InfluxSeries(options);
|
||||||
|
const table = series.getTable();
|
||||||
|
|
||||||
|
expect(table.columns).toStrictEqual([
|
||||||
|
{
|
||||||
|
text: 'Time',
|
||||||
|
type: 'time',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: 'cpu',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: 'usage_idle',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(table.rows).toStrictEqual([[1481549440372, 'cpu1', 42]]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('the column-names should be correct if the time-column is renamed', () => {
|
||||||
|
const renamedOptions = produce(options, (draft) => {
|
||||||
|
// we rename the time-column to `zeit`
|
||||||
|
draft.series[0].columns[0] = 'zeit';
|
||||||
|
});
|
||||||
|
const series = new InfluxSeries(renamedOptions);
|
||||||
|
const table = series.getTable();
|
||||||
|
|
||||||
|
expect(table.columns).toStrictEqual([
|
||||||
|
{
|
||||||
|
text: 'zeit',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: 'cpu',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
text: 'usage_idle',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(table.rows).toStrictEqual([[1481549440372, 'cpu1', 42]]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -4,6 +4,7 @@ import { TemplateSrvStub } from 'test/specs/helpers';
|
|||||||
|
|
||||||
import { AnnotationEvent, DataQueryRequest, FieldType, MutableDataFrame } from '@grafana/data';
|
import { AnnotationEvent, DataQueryRequest, FieldType, MutableDataFrame } from '@grafana/data';
|
||||||
import { FetchResponse } from '@grafana/runtime';
|
import { FetchResponse } from '@grafana/runtime';
|
||||||
|
import config from 'app/core/config';
|
||||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||||
|
|
||||||
import InfluxDatasource from '../datasource';
|
import InfluxDatasource from '../datasource';
|
||||||
@ -425,6 +426,7 @@ describe('influxdb response parser', () => {
|
|||||||
|
|
||||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||||
ctx.ds.access = 'proxy';
|
ctx.ds.access = 'proxy';
|
||||||
|
config.featureToggles.influxdbBackendMigration = true;
|
||||||
response = await ctx.ds.annotationEvents(queryOptions, annotation);
|
response = await ctx.ds.annotationEvents(queryOptions, annotation);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user