mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Influxdb: Remove backend migration feature toggle (#61308)
* Remove influxdbBackendMigration feature toggle * Remove code blocks related to influxdbBackendMigration feature toggle * Fix annotations * Remove commented lines * Fix unit tests
This commit is contained in:
parent
efed0151f8
commit
67c02f660a
@ -5716,19 +5716,19 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/datasource.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "6"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "12"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
||||
@ -5738,18 +5738,13 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "20"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "21"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "22"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "22"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "23"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "24"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "25"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "26"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "27"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "28"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "29"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "30"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "31"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "32"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "33"]
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "28"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/influx_query_model.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
@ -5774,27 +5769,6 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "20"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/influx_series.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "16"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "17"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "18"]
|
||||
],
|
||||
"public/app/plugins/datasource/influxdb/migrations.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
|
@ -62,7 +62,6 @@ Alpha features might be changed or removed without prior notice.
|
||||
| `live-service-web-worker` | This will use a webworker thread to processes events rather than the main thread |
|
||||
| `queryOverLive` | Use Grafana Live WebSocket to execute backend queries |
|
||||
| `tempoApmTable` | Show APM table |
|
||||
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy |
|
||||
| `publicDashboards` | Enables public access to dashboards |
|
||||
| `lokiLive` | Support WebSocket streaming for loki (early prototype) |
|
||||
| `lokiDataframeApi` | Use experimental loki api for WebSocket streaming (early prototype) |
|
||||
|
@ -30,7 +30,6 @@ export interface FeatureToggles {
|
||||
panelTitleSearch?: boolean;
|
||||
tempoApmTable?: boolean;
|
||||
prometheusAzureOverrideAudience?: boolean;
|
||||
influxdbBackendMigration?: boolean;
|
||||
showFeatureFlagsInUI?: boolean;
|
||||
publicDashboards?: boolean;
|
||||
publicDashboardsEmailSharing?: boolean;
|
||||
|
@ -83,12 +83,6 @@ var (
|
||||
Description: "Experimental. Allow override default AAD audience for Azure Prometheus endpoint",
|
||||
State: FeatureStateBeta,
|
||||
},
|
||||
{
|
||||
Name: "influxdbBackendMigration",
|
||||
Description: "Query InfluxDB InfluxQL without the proxy",
|
||||
State: FeatureStateAlpha,
|
||||
FrontendOnly: true,
|
||||
},
|
||||
{
|
||||
Name: "showFeatureFlagsInUI",
|
||||
Description: "Show feature flags in the settings UI",
|
||||
|
@ -63,10 +63,6 @@ const (
|
||||
// Experimental. Allow override default AAD audience for Azure Prometheus endpoint
|
||||
FlagPrometheusAzureOverrideAudience = "prometheusAzureOverrideAudience"
|
||||
|
||||
// FlagInfluxdbBackendMigration
|
||||
// Query InfluxDB InfluxQL without the proxy
|
||||
FlagInfluxdbBackendMigration = "influxdbBackendMigration"
|
||||
|
||||
// FlagShowFeatureFlagsInUI
|
||||
// Show feature flags in the settings UI
|
||||
FlagShowFeatureFlagsInUI = "showFeatureFlagsInUI"
|
||||
|
@ -1,23 +1,16 @@
|
||||
import { cloneDeep, extend, groupBy, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { extend, groupBy, has, isString, omit, pick, reduce } from 'lodash';
|
||||
import { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
AnnotationEvent,
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
DataQueryError,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
dateMath,
|
||||
FieldType,
|
||||
MetricFindValue,
|
||||
QueryResultMeta,
|
||||
ScopedVars,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
TimeSeries,
|
||||
toDataFrame,
|
||||
} from '@grafana/data';
|
||||
import {
|
||||
@ -27,91 +20,18 @@ import {
|
||||
frameToMetricFindValue,
|
||||
getBackendSrv,
|
||||
} from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { AnnotationEditor } from './components/AnnotationEditor';
|
||||
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from './constants';
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
import InfluxSeries from './influx_series';
|
||||
import { prepareAnnotation } from './migrations';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
import { InfluxQueryBuilder } from './query_builder';
|
||||
import ResponseParser from './response_parser';
|
||||
import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
|
||||
// we detect the field type based on the value-array
|
||||
function getFieldType(values: unknown[]): FieldType {
|
||||
// the values-array may contain a lot of nulls.
|
||||
// we need the first not-null item
|
||||
const firstNotNull = values.find((v) => v !== null);
|
||||
|
||||
if (firstNotNull === undefined) {
|
||||
// we could not find any not-null values
|
||||
return FieldType.number;
|
||||
}
|
||||
|
||||
const valueType = typeof firstNotNull;
|
||||
|
||||
switch (valueType) {
|
||||
case 'string':
|
||||
return FieldType.string;
|
||||
case 'boolean':
|
||||
return FieldType.boolean;
|
||||
case 'number':
|
||||
return FieldType.number;
|
||||
default:
|
||||
// this should never happen, influxql values
|
||||
// can only be numbers, strings and booleans.
|
||||
throw new Error(`InfluxQL: invalid value type ${valueType}`);
|
||||
}
|
||||
}
|
||||
|
||||
// this conversion function is specialized to work with the timeseries
|
||||
// data returned by InfluxDatasource.getTimeSeries()
|
||||
function timeSeriesToDataFrame(timeSeries: TimeSeries): DataFrame {
|
||||
const times: number[] = [];
|
||||
const values: unknown[] = [];
|
||||
|
||||
// the data we process here is not correctly typed.
|
||||
// the typescript types say every data-point is number|null,
|
||||
// but in fact it can be string or boolean too.
|
||||
|
||||
const points = timeSeries.datapoints;
|
||||
for (const point of points) {
|
||||
values.push(point[0]);
|
||||
times.push(point[1] as number);
|
||||
}
|
||||
|
||||
const timeField = {
|
||||
name: TIME_SERIES_TIME_FIELD_NAME,
|
||||
type: FieldType.time,
|
||||
config: {},
|
||||
values: new ArrayVector<number>(times),
|
||||
};
|
||||
|
||||
const valueField = {
|
||||
name: TIME_SERIES_VALUE_FIELD_NAME,
|
||||
type: getFieldType(values),
|
||||
config: {
|
||||
displayNameFromDS: timeSeries.title,
|
||||
},
|
||||
values: new ArrayVector<unknown>(values),
|
||||
labels: timeSeries.tags,
|
||||
};
|
||||
|
||||
const fields = [timeField, valueField];
|
||||
|
||||
return {
|
||||
name: timeSeries.target,
|
||||
refId: timeSeries.refId,
|
||||
meta: timeSeries.meta,
|
||||
fields,
|
||||
length: values.length,
|
||||
};
|
||||
}
|
||||
|
||||
export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery, InfluxOptions> {
|
||||
type: string;
|
||||
urls: string[];
|
||||
@ -183,48 +103,62 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
return super.query(filteredRequest);
|
||||
}
|
||||
|
||||
if (this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||
return super.query(filteredRequest).pipe(
|
||||
map((res) => {
|
||||
if (res.error) {
|
||||
throw {
|
||||
message: 'InfluxDB Error: ' + res.error.message,
|
||||
res,
|
||||
};
|
||||
}
|
||||
if (filteredRequest.targets.some((target: InfluxQuery) => target.fromAnnotations)) {
|
||||
const streams: Array<Observable<DataQueryResponse>> = [];
|
||||
|
||||
const seriesList: any[] = [];
|
||||
for (const target of filteredRequest.targets) {
|
||||
if (target.query) {
|
||||
streams.push(
|
||||
new Observable((subscriber) => {
|
||||
this.annotationEvents(filteredRequest, target)
|
||||
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
|
||||
.catch((ex) => subscriber.error(new Error(ex)))
|
||||
.finally(() => subscriber.complete());
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
||||
if (Object.keys(groupedFrames).length > 0) {
|
||||
filteredRequest.targets.forEach((target) => {
|
||||
const filteredFrames = groupedFrames[target.refId] ?? [];
|
||||
switch (target.resultFormat) {
|
||||
case 'logs':
|
||||
case 'table':
|
||||
seriesList.push(
|
||||
this.responseParser.getTable(filteredFrames, target, {
|
||||
preferredVisualisationType: target.resultFormat,
|
||||
})
|
||||
);
|
||||
break;
|
||||
default: {
|
||||
for (let i = 0; i < filteredFrames.length; i++) {
|
||||
seriesList.push(filteredFrames[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
})
|
||||
);
|
||||
return merge(...streams);
|
||||
}
|
||||
|
||||
// Fallback to classic query support
|
||||
return this.classicQuery(request);
|
||||
return super.query(filteredRequest).pipe(
|
||||
map((res) => {
|
||||
if (res.error) {
|
||||
throw {
|
||||
message: 'InfluxDB Error: ' + res.error.message,
|
||||
res,
|
||||
};
|
||||
}
|
||||
|
||||
const seriesList: any[] = [];
|
||||
|
||||
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
||||
if (Object.keys(groupedFrames).length > 0) {
|
||||
filteredRequest.targets.forEach((target) => {
|
||||
const filteredFrames = groupedFrames[target.refId] ?? [];
|
||||
switch (target.resultFormat) {
|
||||
case 'logs':
|
||||
case 'table':
|
||||
seriesList.push(
|
||||
this.responseParser.getTable(filteredFrames, target, {
|
||||
preferredVisualisationType: target.resultFormat,
|
||||
})
|
||||
);
|
||||
break;
|
||||
default: {
|
||||
for (let i = 0; i < filteredFrames.length; i++) {
|
||||
seriesList.push(filteredFrames[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
getQueryDisplayText(query: InfluxQuery) {
|
||||
@ -255,132 +189,11 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
};
|
||||
}
|
||||
|
||||
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy') {
|
||||
query = this.applyVariables(query, scopedVars, rest);
|
||||
}
|
||||
query = this.applyVariables(query, scopedVars, rest);
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* The unchanged pre 7.1 query implementation
|
||||
*/
|
||||
classicQuery(options: any): Observable<DataQueryResponse> {
|
||||
// migrate annotations
|
||||
if (options.targets.some((target: InfluxQuery) => target.fromAnnotations)) {
|
||||
const streams: Array<Observable<DataQueryResponse>> = [];
|
||||
|
||||
for (const target of options.targets) {
|
||||
if (target.query) {
|
||||
streams.push(
|
||||
new Observable((subscriber) => {
|
||||
this.annotationEvents(options, target)
|
||||
.then((events) => subscriber.next({ data: [toDataFrame(events)] }))
|
||||
.catch((ex) => subscriber.error(new Error(ex)))
|
||||
.finally(() => subscriber.complete());
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return merge(...streams);
|
||||
}
|
||||
|
||||
let timeFilter = this.getTimeFilter(options);
|
||||
const scopedVars = options.scopedVars;
|
||||
const targets = cloneDeep(options.targets);
|
||||
const queryTargets: any[] = [];
|
||||
|
||||
let i, y;
|
||||
|
||||
let allQueries = _map(targets, (target) => {
|
||||
if (target.hide) {
|
||||
return '';
|
||||
}
|
||||
|
||||
queryTargets.push(target);
|
||||
|
||||
// backward compatibility
|
||||
scopedVars.interval = scopedVars.__interval;
|
||||
|
||||
return new InfluxQueryModel(target, this.templateSrv, scopedVars).render(true);
|
||||
}).reduce((acc, current) => {
|
||||
if (current !== '') {
|
||||
acc += ';' + current;
|
||||
}
|
||||
return acc;
|
||||
});
|
||||
|
||||
if (allQueries === '') {
|
||||
return of({ data: [] });
|
||||
}
|
||||
|
||||
// add global adhoc filters to timeFilter
|
||||
const adhocFilters = this.templateSrv.getAdhocFilters(this.name);
|
||||
const adhocFiltersFromDashboard = options.targets.flatMap((target: InfluxQuery) => target.adhocFilters ?? []);
|
||||
if (adhocFilters?.length || adhocFiltersFromDashboard?.length) {
|
||||
const ahFilters = adhocFilters?.length ? adhocFilters : adhocFiltersFromDashboard;
|
||||
const tmpQuery = new InfluxQueryModel({ refId: 'A' }, this.templateSrv, scopedVars);
|
||||
timeFilter += ' AND ' + tmpQuery.renderAdhocFilters(ahFilters);
|
||||
}
|
||||
// replace grafana variables
|
||||
scopedVars.timeFilter = { value: timeFilter };
|
||||
|
||||
// replace templated variables
|
||||
allQueries = this.templateSrv.replace(allQueries, scopedVars);
|
||||
|
||||
return this._seriesQuery(allQueries, options).pipe(
|
||||
map((data: any) => {
|
||||
if (!data || !data.results) {
|
||||
return { data: [] };
|
||||
}
|
||||
|
||||
const seriesList = [];
|
||||
for (i = 0; i < data.results.length; i++) {
|
||||
const result = data.results[i];
|
||||
if (!result || !result.series) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const target = queryTargets[i];
|
||||
let alias = target.alias;
|
||||
if (alias) {
|
||||
alias = this.templateSrv.replace(target.alias, options.scopedVars);
|
||||
}
|
||||
|
||||
const meta: QueryResultMeta = {
|
||||
executedQueryString: data.executedQueryString,
|
||||
};
|
||||
|
||||
const influxSeries = new InfluxSeries({
|
||||
refId: target.refId,
|
||||
series: data.results[i].series,
|
||||
alias: alias,
|
||||
meta,
|
||||
});
|
||||
|
||||
switch (target.resultFormat) {
|
||||
case 'logs':
|
||||
meta.preferredVisualisationType = 'logs';
|
||||
case 'table': {
|
||||
seriesList.push(influxSeries.getTable());
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const timeSeries = influxSeries.getTimeSeries();
|
||||
for (y = 0; y < timeSeries.length; y++) {
|
||||
seriesList.push(timeSeriesToDataFrame(timeSeries[y]));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async annotationEvents(options: DataQueryRequest, annotation: InfluxQuery): Promise<AnnotationEvent[]> {
|
||||
if (this.isFlux) {
|
||||
return Promise.reject({
|
||||
@ -395,50 +208,34 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
});
|
||||
}
|
||||
|
||||
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy') {
|
||||
// We want to send our query to the backend as a raw query
|
||||
const target: InfluxQuery = {
|
||||
refId: 'metricFindQuery',
|
||||
datasource: this.getRef(),
|
||||
query: this.templateSrv.replace(annotation.query, undefined, 'regex'),
|
||||
rawQuery: true,
|
||||
};
|
||||
// We want to send our query to the backend as a raw query
|
||||
const target: InfluxQuery = {
|
||||
refId: 'metricFindQuery',
|
||||
datasource: this.getRef(),
|
||||
query: this.templateSrv.replace(annotation.query, undefined, 'regex'),
|
||||
rawQuery: true,
|
||||
};
|
||||
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
headers: this.getRequestHeaders(),
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [target],
|
||||
},
|
||||
requestId: annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(annotation, res, target)
|
||||
)
|
||||
return lastValueFrom(
|
||||
getBackendSrv()
|
||||
.fetch<BackendDataSourceResponse>({
|
||||
url: '/api/ds/query',
|
||||
method: 'POST',
|
||||
headers: this.getRequestHeaders(),
|
||||
data: {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: [target],
|
||||
},
|
||||
requestId: annotation.name,
|
||||
})
|
||||
.pipe(
|
||||
map(
|
||||
async (res: FetchResponse<BackendDataSourceResponse>) =>
|
||||
await this.responseParser.transformAnnotationResponse(annotation, res, target)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const timeFilter = this.getTimeFilter({ rangeRaw: options.range.raw, timezone: options.timezone });
|
||||
let query = annotation.query.replace('$timeFilter', timeFilter);
|
||||
query = this.templateSrv.replace(query, undefined, 'regex');
|
||||
|
||||
return lastValueFrom(this._seriesQuery(query, options)).then((data: any) => {
|
||||
if (!data || !data.results || !data.results[0]) {
|
||||
throw { message: 'No results in response from InfluxDB' };
|
||||
}
|
||||
return new InfluxSeries({
|
||||
series: data.results[0].series,
|
||||
annotation: annotation,
|
||||
}).getAnnotations();
|
||||
});
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
targetContainsTemplate(target: any) {
|
||||
@ -520,7 +317,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
}
|
||||
|
||||
async metricFindQuery(query: string, options?: any): Promise<MetricFindValue[]> {
|
||||
if (this.isFlux || this.isMigrationToggleOnAndIsAccessProxy()) {
|
||||
if (this.isFlux) {
|
||||
const target: InfluxQuery = {
|
||||
refId: 'metricFindQuery',
|
||||
query,
|
||||
@ -719,8 +516,4 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
|
||||
return date.valueOf() + 'ms';
|
||||
}
|
||||
|
||||
isMigrationToggleOnAndIsAccessProxy() {
|
||||
return config.featureToggles.influxdbBackendMigration && this.access === 'proxy';
|
||||
}
|
||||
}
|
||||
|
@ -1,218 +0,0 @@
|
||||
import { each, map, includes, flatten, keys } from 'lodash';
|
||||
|
||||
import { FieldType, QueryResultMeta, TimeSeries, TableData } from '@grafana/data';
|
||||
import TableModel from 'app/core/TableModel';
|
||||
|
||||
export default class InfluxSeries {
|
||||
refId?: string;
|
||||
series: any;
|
||||
alias: any;
|
||||
annotation: any;
|
||||
meta?: QueryResultMeta;
|
||||
|
||||
constructor(options: { series: any; alias?: any; annotation?: any; meta?: QueryResultMeta; refId?: string }) {
|
||||
this.series = options.series;
|
||||
this.alias = options.alias;
|
||||
this.annotation = options.annotation;
|
||||
this.meta = options.meta;
|
||||
this.refId = options.refId;
|
||||
}
|
||||
|
||||
getTimeSeries(): TimeSeries[] {
|
||||
const output: TimeSeries[] = [];
|
||||
let i, j;
|
||||
|
||||
if (this.series.length === 0) {
|
||||
return output;
|
||||
}
|
||||
|
||||
each(this.series, (series) => {
|
||||
const columns = series.columns.length;
|
||||
const tags = map(series.tags, (value, key) => {
|
||||
return key + ': ' + value;
|
||||
});
|
||||
|
||||
for (j = 1; j < columns; j++) {
|
||||
let seriesName = series.name;
|
||||
const columnName = series.columns[j];
|
||||
if (columnName !== 'value') {
|
||||
seriesName = seriesName + '.' + columnName;
|
||||
}
|
||||
|
||||
if (this.alias) {
|
||||
seriesName = this._getSeriesName(series, j);
|
||||
} else if (series.tags) {
|
||||
seriesName = seriesName + ' {' + tags.join(', ') + '}';
|
||||
}
|
||||
|
||||
const datapoints = [];
|
||||
if (series.values) {
|
||||
for (i = 0; i < series.values.length; i++) {
|
||||
datapoints[i] = [series.values[i][j], series.values[i][0]];
|
||||
}
|
||||
}
|
||||
|
||||
output.push({
|
||||
title: seriesName,
|
||||
target: seriesName,
|
||||
datapoints: datapoints,
|
||||
tags: series.tags,
|
||||
meta: this.meta,
|
||||
refId: this.refId,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
_getSeriesName(series: any, index: number) {
|
||||
const regex = /\$(\w+)|\[\[([\s\S]+?)\]\]/g;
|
||||
const segments = series.name.split('.');
|
||||
|
||||
return this.alias.replace(regex, (match: any, g1: any, g2: any) => {
|
||||
const group = g1 || g2;
|
||||
const segIndex = parseInt(group, 10);
|
||||
|
||||
if (group === 'm' || group === 'measurement') {
|
||||
return series.name;
|
||||
}
|
||||
if (group === 'col') {
|
||||
return series.columns[index];
|
||||
}
|
||||
if (!isNaN(segIndex)) {
|
||||
return segments[segIndex] ?? match;
|
||||
}
|
||||
if (group.indexOf('tag_') !== 0) {
|
||||
return match;
|
||||
}
|
||||
|
||||
const tag = group.replace('tag_', '');
|
||||
if (!series.tags) {
|
||||
return match;
|
||||
}
|
||||
return series.tags[tag];
|
||||
});
|
||||
}
|
||||
|
||||
getAnnotations() {
|
||||
const list: any[] = [];
|
||||
|
||||
each(this.series, (series) => {
|
||||
let titleCol: any = null;
|
||||
let timeCol: any = null;
|
||||
let timeEndCol: any = null;
|
||||
const tagsCol: any = [];
|
||||
let textCol: any = null;
|
||||
|
||||
each(series.columns, (column, index) => {
|
||||
if (column === 'time') {
|
||||
timeCol = index;
|
||||
return;
|
||||
}
|
||||
if (column === 'sequence_number') {
|
||||
return;
|
||||
}
|
||||
if (column === this.annotation.titleColumn) {
|
||||
titleCol = index;
|
||||
return;
|
||||
}
|
||||
if (includes((this.annotation.tagsColumn || '').replace(' ', '').split(','), column)) {
|
||||
tagsCol.push(index);
|
||||
return;
|
||||
}
|
||||
if (column === this.annotation.textColumn) {
|
||||
textCol = index;
|
||||
return;
|
||||
}
|
||||
if (column === this.annotation.timeEndColumn) {
|
||||
timeEndCol = index;
|
||||
return;
|
||||
}
|
||||
// legacy case
|
||||
if (!titleCol && textCol !== index) {
|
||||
titleCol = index;
|
||||
}
|
||||
});
|
||||
|
||||
each(series.values, (value) => {
|
||||
const data = {
|
||||
annotation: this.annotation,
|
||||
time: +new Date(value[timeCol]),
|
||||
title: value[titleCol],
|
||||
timeEnd: value[timeEndCol],
|
||||
// Remove empty values, then split in different tags for comma separated values
|
||||
tags: flatten(
|
||||
tagsCol
|
||||
.filter((t: any) => {
|
||||
return value[t];
|
||||
})
|
||||
.map((t: any) => {
|
||||
return value[t].split(',');
|
||||
})
|
||||
),
|
||||
text: value[textCol],
|
||||
};
|
||||
|
||||
list.push(data);
|
||||
});
|
||||
});
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
getTable(): TableData {
|
||||
const table = new TableModel();
|
||||
let i, j;
|
||||
|
||||
table.refId = this.refId;
|
||||
table.meta = this.meta;
|
||||
|
||||
if (this.series.length === 0) {
|
||||
return table;
|
||||
}
|
||||
|
||||
// the order is:
|
||||
// - first the first item from the value-array (this is often (always?) the timestamp)
|
||||
// - then all the tag-values
|
||||
// - then the rest of the value-array
|
||||
//
|
||||
// we have to keep this order both in table.columns and table.rows
|
||||
|
||||
each(this.series, (series: any, seriesIndex: number) => {
|
||||
if (seriesIndex === 0) {
|
||||
const firstCol = series.columns[0];
|
||||
// Check the first column's name, if it is `time`, we
|
||||
// mark it as having the type time
|
||||
const firstTableCol = firstCol === 'time' ? { text: 'Time', type: FieldType.time } : { text: firstCol };
|
||||
table.columns.push(firstTableCol);
|
||||
each(keys(series.tags), (key) => {
|
||||
table.columns.push({ text: key });
|
||||
});
|
||||
for (j = 1; j < series.columns.length; j++) {
|
||||
table.columns.push({ text: series.columns[j] });
|
||||
}
|
||||
}
|
||||
|
||||
if (series.values) {
|
||||
for (i = 0; i < series.values.length; i++) {
|
||||
const values = series.values[i];
|
||||
const reordered = [values[0]];
|
||||
if (series.tags) {
|
||||
for (const key in series.tags) {
|
||||
if (series.tags.hasOwnProperty(key)) {
|
||||
reordered.push(series.tags[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (j = 1; j < values.length; j++) {
|
||||
reordered.push(values[j]);
|
||||
}
|
||||
table.rows.push(reordered);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return table;
|
||||
}
|
||||
}
|
@ -1,9 +1,8 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { TemplateSrvStub } from 'test/specs/helpers';
|
||||
|
||||
import { ScopedVars } from '@grafana/data/src';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
import { ScopedVars } from '@grafana/data';
|
||||
import { FetchResponse, setBackendSrv } from '@grafana/runtime';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
import { BROWSER_MODE_DISABLED_MESSAGE } from '../constants';
|
||||
@ -12,11 +11,6 @@ import InfluxDatasource from '../datasource';
|
||||
//@ts-ignore
|
||||
const templateSrv = new TemplateSrvStub();
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...(jest.requireActual('@grafana/runtime') as unknown as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
}));
|
||||
|
||||
describe('InfluxDataSource', () => {
|
||||
const ctx: any = {
|
||||
instanceSettings: { url: 'url', name: 'influxDb', jsonData: { httpMode: 'GET' } },
|
||||
@ -29,6 +23,7 @@ describe('InfluxDataSource', () => {
|
||||
ctx.instanceSettings.url = '/api/datasources/proxy/1';
|
||||
ctx.instanceSettings.access = 'proxy';
|
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||
setBackendSrv(backendSrv);
|
||||
});
|
||||
|
||||
describe('When issuing metricFindQuery', () => {
|
||||
@ -325,7 +320,6 @@ describe('InfluxDataSource', () => {
|
||||
it('should apply all template variables with InfluxQL mode', () => {
|
||||
ds.isFlux = false;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(influxQuery, {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
@ -336,7 +330,6 @@ describe('InfluxDataSource', () => {
|
||||
it('should apply all scopedVars to tags', () => {
|
||||
ds.isFlux = false;
|
||||
ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
const query = ds.applyTemplateVariables(influxQuery, {
|
||||
interpolationVar: { text: text, value: text },
|
||||
interpolationVar2: { text: 'interpolationText2', value: 'interpolationText2' },
|
||||
|
@ -1,371 +0,0 @@
|
||||
import produce from 'immer';
|
||||
|
||||
import InfluxSeries from '../influx_series';
|
||||
|
||||
describe('when generating timeseries from influxdb response', () => {
|
||||
describe('given multiple fields for series', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { app: 'test', server: 'server1' },
|
||||
columns: ['time', 'mean', 'max', 'min'],
|
||||
values: [
|
||||
[1431946625000, 10, 11, 9],
|
||||
[1431946626000, 20, 21, 19],
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
describe('and no alias', () => {
|
||||
it('should generate multiple datapoints for each column', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).toBe(3);
|
||||
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).toBe(10);
|
||||
expect(result[0].datapoints[0][1]).toBe(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).toBe(20);
|
||||
expect(result[0].datapoints[1][1]).toBe(1431946626000);
|
||||
|
||||
expect(result[1].target).toBe('cpu.max {app: test, server: server1}');
|
||||
expect(result[1].datapoints[0][0]).toBe(11);
|
||||
expect(result[1].datapoints[0][1]).toBe(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).toBe(21);
|
||||
expect(result[1].datapoints[1][1]).toBe(1431946626000);
|
||||
|
||||
expect(result[2].target).toBe('cpu.min {app: test, server: server1}');
|
||||
expect(result[2].datapoints[0][0]).toBe(9);
|
||||
expect(result[2].datapoints[0][1]).toBe(1431946625000);
|
||||
expect(result[2].datapoints[1][0]).toBe(19);
|
||||
expect(result[2].datapoints[1][1]).toBe(1431946626000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', () => {
|
||||
it('should use alias', () => {
|
||||
options.alias = 'new series';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('new series');
|
||||
expect(result[1].target).toBe('new series');
|
||||
expect(result[2].target).toBe('new series');
|
||||
});
|
||||
});
|
||||
|
||||
describe('and alias patterns', () => {
|
||||
it('should replace patterns', () => {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
expect(result[2].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('given measurement with default fieldname', () => {
|
||||
const options = {
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { app: 'test', server: 'server1' },
|
||||
columns: ['time', 'value'],
|
||||
values: [
|
||||
['2015-05-18T10:57:05Z', 10],
|
||||
['2015-05-18T10:57:06Z', 12],
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { app: 'test2', server: 'server2' },
|
||||
columns: ['time', 'value'],
|
||||
values: [
|
||||
['2015-05-18T10:57:05Z', 15],
|
||||
['2015-05-18T10:57:06Z', 16],
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('and no alias', () => {
|
||||
it('should generate label with no field', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('cpu {app: test, server: server1}');
|
||||
expect(result[1].target).toBe('cpu {app: test2, server: server2}');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('given two series', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { app: 'test', server: 'server1' },
|
||||
columns: ['time', 'mean'],
|
||||
values: [
|
||||
[1431946625000, 10],
|
||||
[1431946626000, 12],
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { app: 'test2', server: 'server2' },
|
||||
columns: ['time', 'mean'],
|
||||
values: [
|
||||
[1431946625000, 15],
|
||||
[1431946626000, 16],
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('and no alias', () => {
|
||||
it('should generate two time series', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result.length).toBe(2);
|
||||
expect(result[0].target).toBe('cpu.mean {app: test, server: server1}');
|
||||
expect(result[0].datapoints[0][0]).toBe(10);
|
||||
expect(result[0].datapoints[0][1]).toBe(1431946625000);
|
||||
expect(result[0].datapoints[1][0]).toBe(12);
|
||||
expect(result[0].datapoints[1][1]).toBe(1431946626000);
|
||||
expect(result[0].tags).toMatchObject({
|
||||
app: 'test',
|
||||
server: 'server1',
|
||||
});
|
||||
|
||||
expect(result[1].target).toBe('cpu.mean {app: test2, server: server2}');
|
||||
expect(result[1].datapoints[0][0]).toBe(15);
|
||||
expect(result[1].datapoints[0][1]).toBe(1431946625000);
|
||||
expect(result[1].datapoints[1][0]).toBe(16);
|
||||
expect(result[1].datapoints[1][1]).toBe(1431946626000);
|
||||
expect(result[1].tags).toMatchObject({
|
||||
app: 'test2',
|
||||
server: 'server2',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('and simple alias', () => {
|
||||
it('should use alias', () => {
|
||||
options.alias = 'new series';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('new series');
|
||||
});
|
||||
});
|
||||
|
||||
describe('and alias patterns', () => {
|
||||
it('should replace patterns', () => {
|
||||
options.alias = 'alias: $m -> $tag_server ([[measurement]])';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: cpu -> server1 (cpu)');
|
||||
expect(result[1].target).toBe('alias: cpu -> server2 (cpu)');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('given measurement with dots', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'app.prod.server1.count',
|
||||
tags: {},
|
||||
columns: ['time', 'mean'],
|
||||
values: [
|
||||
[1431946625000, 10],
|
||||
[1431946626000, 12],
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should replace patterns', () => {
|
||||
options.alias = 'alias: $1 -> [[3]]';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: prod -> count');
|
||||
});
|
||||
|
||||
it('should handle too large indexes', () => {
|
||||
options.alias = 'alias: $0 $1 $2 $3 $4 $5';
|
||||
const series = new InfluxSeries(options);
|
||||
const result = series.getTimeSeries();
|
||||
|
||||
expect(result[0].target).toBe('alias: app prod server1 count $4 $5');
|
||||
});
|
||||
});
|
||||
|
||||
describe('given table response', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'app.prod.server1.count',
|
||||
tags: { datacenter: 'Africa', server: 'server2' },
|
||||
columns: ['time', 'value2', 'value'],
|
||||
values: [
|
||||
[1431946625000, 23, 10],
|
||||
[1431946626000, 25, 12],
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should return table', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns.length).toBe(5);
|
||||
expect(table.columns[0].text).toEqual('Time');
|
||||
expect(table.rows[0]).toEqual([1431946625000, 'Africa', 'server2', 23, 10]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('given table response from SHOW CARDINALITY', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
columns: ['count'],
|
||||
values: [[37]],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should return table', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns.length).toBe(1);
|
||||
expect(table.columns[0].text).toEqual('count');
|
||||
expect(table.rows[0]).toEqual([37]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('given annotation response', () => {
|
||||
describe('with empty tagsColumn', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
annotation: {},
|
||||
series: [
|
||||
{
|
||||
name: 'logins.count',
|
||||
tags: { datacenter: 'Africa', server: 'server2' },
|
||||
columns: ['time', 'datacenter', 'hostname', 'source', 'value'],
|
||||
values: [[1481549440372, 'America', '10.1.100.10', 'backend', 215.7432653659507]],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should multiple tags', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const annotations = series.getAnnotations();
|
||||
|
||||
expect(annotations[0].tags.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('given annotation response', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
annotation: {
|
||||
tagsColumn: 'datacenter, source',
|
||||
},
|
||||
series: [
|
||||
{
|
||||
name: 'logins.count',
|
||||
tags: { datacenter: 'Africa', server: 'server2' },
|
||||
columns: ['time', 'datacenter', 'hostname', 'source', 'value'],
|
||||
values: [[1481549440372, 'America', '10.1.100.10', 'backend', 215.7432653659507]],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('should multiple tags', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const annotations = series.getAnnotations();
|
||||
|
||||
expect(annotations[0].tags.length).toBe(2);
|
||||
expect(annotations[0].tags[0]).toBe('America');
|
||||
expect(annotations[0].tags[1]).toBe('backend');
|
||||
});
|
||||
});
|
||||
|
||||
describe('given a time-column in the json-response', () => {
|
||||
const options = {
|
||||
alias: '',
|
||||
series: [
|
||||
{
|
||||
name: 'cpu',
|
||||
tags: { cpu: 'cpu1' },
|
||||
columns: ['time', 'usage_idle'],
|
||||
values: [[1481549440372, 42]],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
it('the column-names should be correct if the time-column is not renamed', () => {
|
||||
const series = new InfluxSeries(options);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.columns).toStrictEqual([
|
||||
{
|
||||
text: 'Time',
|
||||
type: 'time',
|
||||
},
|
||||
{
|
||||
text: 'cpu',
|
||||
},
|
||||
{
|
||||
text: 'usage_idle',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(table.rows).toStrictEqual([[1481549440372, 'cpu1', 42]]);
|
||||
});
|
||||
|
||||
it('the column-names should be correct if the time-column is renamed', () => {
|
||||
const renamedOptions = produce(options, (draft) => {
|
||||
// we rename the time-column to `zeit`
|
||||
draft.series[0].columns[0] = 'zeit';
|
||||
});
|
||||
const series = new InfluxSeries(renamedOptions);
|
||||
const table = series.getTable();
|
||||
|
||||
expect(table.columns).toStrictEqual([
|
||||
{
|
||||
text: 'zeit',
|
||||
},
|
||||
{
|
||||
text: 'cpu',
|
||||
},
|
||||
{
|
||||
text: 'usage_idle',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(table.rows).toStrictEqual([[1481549440372, 'cpu1', 42]]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -4,7 +4,6 @@ import { TemplateSrvStub } from 'test/specs/helpers';
|
||||
|
||||
import { FieldType, MutableDataFrame } from '@grafana/data';
|
||||
import { FetchResponse } from '@grafana/runtime';
|
||||
import config from 'app/core/config';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
|
||||
import InfluxDatasource from '../datasource';
|
||||
@ -426,7 +425,6 @@ describe('influxdb response parser', () => {
|
||||
|
||||
ctx.ds = new InfluxDatasource(ctx.instanceSettings, templateSrv);
|
||||
ctx.ds.access = 'proxy';
|
||||
config.featureToggles.influxdbBackendMigration = true;
|
||||
response = await ctx.ds.annotationEvents(queryOptions, annotation);
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user