mirror of
https://github.com/grafana/grafana.git
synced 2024-11-25 18:30:41 -06:00
CloudWatch: Refactoring - decouple logs and metrics in datasource file (#55079)
* break out query execution related logic * betterer fixes * remove unused * cleanup * remove unused variables * remove not used file * fix broken test * pr feedback * add comments
This commit is contained in:
parent
1a0cbdeabe
commit
7bca193ecd
@ -5895,14 +5895,6 @@ exports[`better eslint`] = {
|
||||
"public/app/plugins/datasource/cloud-monitoring/types.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/__mocks__/CloudWatchDataSource.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/__mocks__/monarch/Monaco.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
@ -5947,10 +5939,7 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/datasource.d.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/datasource.test.ts:5381": [
|
||||
"public/app/plugins/datasource/cloudwatch/datasource.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
@ -5959,42 +5948,7 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/datasource.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "3"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "9"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "14"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "16"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "17"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "18"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "20"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "21"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "22"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "23"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "24"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "25"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "26"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "27"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "28"]
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/guards.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
@ -6021,23 +5975,9 @@ exports[`better eslint`] = {
|
||||
"public/app/plugins/datasource/cloudwatch/metric-math/completion/CompletionItemProvider.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/specs/datasource.test.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "11"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"]
|
||||
"public/app/plugins/datasource/cloudwatch/query-runner/CloudWatchLogsQueryRunner.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
|
||||
],
|
||||
"public/app/plugins/datasource/cloudwatch/types.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
|
@ -0,0 +1,41 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { CustomVariableModel, DataQueryRequest } from '@grafana/data';
|
||||
import { getBackendSrv, setBackendSrv } from '@grafana/runtime';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { CloudWatchAnnotationQueryRunner } from '../query-runner/CloudWatchAnnotationQueryRunner';
|
||||
import { CloudWatchQuery } from '../types';
|
||||
|
||||
import { CloudWatchSettings, setupMockedTemplateService } from './CloudWatchDataSource';
|
||||
import { timeRange } from './timeRange';
|
||||
|
||||
export function setupMockedAnnotationQueryRunner({ variables }: { variables?: CustomVariableModel[] }) {
|
||||
let templateService = new TemplateSrv();
|
||||
if (variables) {
|
||||
templateService = setupMockedTemplateService(variables);
|
||||
}
|
||||
|
||||
const runner = new CloudWatchAnnotationQueryRunner(CloudWatchSettings, templateService);
|
||||
const fetchMock = jest.fn().mockReturnValue(of({}));
|
||||
|
||||
setBackendSrv({
|
||||
...getBackendSrv(),
|
||||
fetch: fetchMock,
|
||||
});
|
||||
|
||||
const request: DataQueryRequest<CloudWatchQuery> = {
|
||||
range: timeRange,
|
||||
rangeRaw: { from: '1483228800', to: '1483232400' },
|
||||
targets: [],
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
};
|
||||
|
||||
return { runner, fetchMock, templateService, request, timeRange };
|
||||
}
|
@ -1,62 +1,92 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { dateTime } from '@grafana/data';
|
||||
import { setBackendSrv } from '@grafana/runtime';
|
||||
import {
|
||||
DataSourceInstanceSettings,
|
||||
DataSourcePluginMeta,
|
||||
PluginMetaInfo,
|
||||
PluginType,
|
||||
VariableHide,
|
||||
} from '@grafana/data';
|
||||
import { getBackendSrv, setBackendSrv } from '@grafana/runtime';
|
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { initialCustomVariableModelState } from 'app/features/variables/custom/reducer';
|
||||
import { CustomVariableModel } from 'app/features/variables/types';
|
||||
|
||||
import { TemplateSrvMock } from '../../../../features/templating/template_srv.mock';
|
||||
import { CloudWatchDatasource } from '../datasource';
|
||||
import { CloudWatchJsonData } from '../types';
|
||||
|
||||
export function setupMockedTemplateService(variables: CustomVariableModel[]) {
|
||||
const templateService = new TemplateSrv();
|
||||
templateService.init(variables);
|
||||
templateService.getVariables = jest.fn().mockReturnValue(variables);
|
||||
return templateService;
|
||||
}
|
||||
|
||||
const info: PluginMetaInfo = {
|
||||
author: {
|
||||
name: '',
|
||||
},
|
||||
description: '',
|
||||
links: [],
|
||||
logos: {
|
||||
large: '',
|
||||
small: '',
|
||||
},
|
||||
screenshots: [],
|
||||
updated: '',
|
||||
version: '',
|
||||
};
|
||||
|
||||
export const meta: DataSourcePluginMeta<CloudWatchJsonData> = {
|
||||
id: '',
|
||||
name: '',
|
||||
type: PluginType.datasource,
|
||||
info,
|
||||
module: '',
|
||||
baseUrl: '',
|
||||
};
|
||||
|
||||
export const CloudWatchSettings: DataSourceInstanceSettings<CloudWatchJsonData> = {
|
||||
jsonData: { defaultRegion: 'us-west-1', tracingDatasourceUid: 'xray' },
|
||||
id: 0,
|
||||
uid: '',
|
||||
type: '',
|
||||
name: 'CloudWatch Test Datasource',
|
||||
meta,
|
||||
readOnly: false,
|
||||
access: 'direct',
|
||||
};
|
||||
|
||||
export function setupMockedDataSource({
|
||||
data = [],
|
||||
variables,
|
||||
mockGetVariableName = true,
|
||||
}: { data?: any; variables?: any; mockGetVariableName?: boolean } = {}) {
|
||||
let templateService = new TemplateSrvMock({
|
||||
region: 'templatedRegion',
|
||||
fields: 'templatedField',
|
||||
group: 'templatedGroup',
|
||||
}) as any;
|
||||
}: {
|
||||
variables?: CustomVariableModel[];
|
||||
mockGetVariableName?: boolean;
|
||||
} = {}) {
|
||||
let templateService = new TemplateSrv();
|
||||
if (variables) {
|
||||
templateService = new TemplateSrv();
|
||||
templateService.init(variables);
|
||||
templateService.getVariables = jest.fn().mockReturnValue(variables);
|
||||
templateService = setupMockedTemplateService(variables);
|
||||
if (mockGetVariableName) {
|
||||
templateService.getVariableName = (name: string) => name;
|
||||
}
|
||||
}
|
||||
|
||||
const datasource = new CloudWatchDatasource(
|
||||
{
|
||||
jsonData: { defaultRegion: 'us-west-1', tracingDatasourceUid: 'xray' },
|
||||
} as any,
|
||||
templateService,
|
||||
{
|
||||
timeRange() {
|
||||
const time = dateTime('2021-01-01T01:00:00Z');
|
||||
const range = {
|
||||
from: time.subtract(6, 'hour'),
|
||||
to: time,
|
||||
};
|
||||
|
||||
return {
|
||||
...range,
|
||||
raw: range,
|
||||
};
|
||||
},
|
||||
} as any
|
||||
);
|
||||
const timeSrv = getTimeSrv();
|
||||
const datasource = new CloudWatchDatasource(CloudWatchSettings, templateService, timeSrv);
|
||||
datasource.getVariables = () => ['test'];
|
||||
|
||||
datasource.getNamespaces = jest.fn().mockResolvedValue([]);
|
||||
datasource.getRegions = jest.fn().mockResolvedValue([]);
|
||||
datasource.defaultLogGroups = [];
|
||||
const fetchMock = jest.fn().mockReturnValue(of({ data }));
|
||||
setBackendSrv({ fetch: fetchMock } as any);
|
||||
datasource.logsQueryRunner.defaultLogGroups = [];
|
||||
const fetchMock = jest.fn().mockReturnValue(of({}));
|
||||
setBackendSrv({
|
||||
...getBackendSrv(),
|
||||
fetch: fetchMock,
|
||||
});
|
||||
|
||||
return { datasource, fetchMock, templateService };
|
||||
return { datasource, fetchMock, templateService, timeSrv };
|
||||
}
|
||||
|
||||
export const metricVariable: CustomVariableModel = {
|
||||
@ -180,7 +210,7 @@ export const regionVariable: CustomVariableModel = {
|
||||
multi: false,
|
||||
};
|
||||
|
||||
export const expressionVariable: CustomVariableModel = {
|
||||
export const fieldsVariable: CustomVariableModel = {
|
||||
...initialCustomVariableModelState,
|
||||
id: 'fields',
|
||||
name: 'fields',
|
||||
@ -192,3 +222,17 @@ export const expressionVariable: CustomVariableModel = {
|
||||
options: [{ value: 'templatedField', text: 'templatedField', selected: true }],
|
||||
multi: false,
|
||||
};
|
||||
|
||||
export const periodIntervalVariable: CustomVariableModel = {
|
||||
...initialCustomVariableModelState,
|
||||
id: 'period',
|
||||
name: 'period',
|
||||
index: 0,
|
||||
current: { value: '10m', text: '10m', selected: true },
|
||||
options: [{ value: '10m', text: '10m', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
|
@ -0,0 +1,62 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { DataFrame } from '@grafana/data';
|
||||
import { BackendDataSourceResponse, getBackendSrv, setBackendSrv } from '@grafana/runtime';
|
||||
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { CloudWatchLogsQueryRunner } from '../query-runner/CloudWatchLogsQueryRunner';
|
||||
import { CloudWatchLogsQueryStatus } from '../types';
|
||||
|
||||
import { CloudWatchSettings, setupMockedTemplateService } from './CloudWatchDataSource';
|
||||
|
||||
export function setupMockedLogsQueryRunner({
|
||||
data = {
|
||||
results: {},
|
||||
},
|
||||
variables,
|
||||
mockGetVariableName = true,
|
||||
}: { data?: BackendDataSourceResponse; variables?: any; mockGetVariableName?: boolean } = {}) {
|
||||
let templateService = new TemplateSrv();
|
||||
if (variables) {
|
||||
templateService = setupMockedTemplateService(variables);
|
||||
if (mockGetVariableName) {
|
||||
templateService.getVariableName = (name: string) => name;
|
||||
}
|
||||
}
|
||||
|
||||
const runner = new CloudWatchLogsQueryRunner(CloudWatchSettings, templateService, getTimeSrv());
|
||||
const fetchMock = jest.fn().mockReturnValue(of({ data }));
|
||||
setBackendSrv({
|
||||
...getBackendSrv(),
|
||||
fetch: fetchMock,
|
||||
});
|
||||
|
||||
return { runner, fetchMock, templateService };
|
||||
}
|
||||
|
||||
export function genMockFrames(numResponses: number): DataFrame[] {
|
||||
const recordIncrement = 50;
|
||||
const mockFrames: DataFrame[] = [];
|
||||
|
||||
for (let i = 0; i < numResponses; i++) {
|
||||
mockFrames.push({
|
||||
fields: [],
|
||||
meta: {
|
||||
custom: {
|
||||
Status: i === numResponses - 1 ? CloudWatchLogsQueryStatus.Complete : CloudWatchLogsQueryStatus.Running,
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: (i + 1) * recordIncrement,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
length: 0,
|
||||
});
|
||||
}
|
||||
|
||||
return mockFrames;
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
import { of, throwError } from 'rxjs';
|
||||
|
||||
import { CustomVariableModel, DataQueryError, DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { BackendDataSourceResponse, getBackendSrv, setBackendSrv } from '@grafana/runtime';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { CloudWatchMetricsQueryRunner } from '../query-runner/CloudWatchMetricsQueryRunner';
|
||||
import { CloudWatchJsonData, CloudWatchQuery } from '../types';
|
||||
|
||||
import { CloudWatchSettings, setupMockedTemplateService } from './CloudWatchDataSource';
|
||||
import { timeRange } from './timeRange';
|
||||
|
||||
export function setupMockedMetricsQueryRunner({
|
||||
data = {
|
||||
results: {},
|
||||
},
|
||||
variables,
|
||||
mockGetVariableName = true,
|
||||
throws = false,
|
||||
instanceSettings = CloudWatchSettings,
|
||||
}: {
|
||||
data?: BackendDataSourceResponse | DataQueryError;
|
||||
variables?: CustomVariableModel[];
|
||||
mockGetVariableName?: boolean;
|
||||
throws?: boolean;
|
||||
instanceSettings?: DataSourceInstanceSettings<CloudWatchJsonData>;
|
||||
} = {}) {
|
||||
let templateService = new TemplateSrv();
|
||||
if (variables) {
|
||||
templateService = setupMockedTemplateService(variables);
|
||||
if (mockGetVariableName) {
|
||||
templateService.getVariableName = (name: string) => name.replace('$', '');
|
||||
}
|
||||
}
|
||||
|
||||
const runner = new CloudWatchMetricsQueryRunner(instanceSettings, templateService);
|
||||
const fetchMock = throws
|
||||
? jest.fn().mockImplementation(() => throwError(data))
|
||||
: jest.fn().mockReturnValue(of({ data }));
|
||||
|
||||
setBackendSrv({
|
||||
...getBackendSrv(),
|
||||
fetch: fetchMock,
|
||||
});
|
||||
|
||||
const request: DataQueryRequest<CloudWatchQuery> = {
|
||||
range: timeRange,
|
||||
rangeRaw: { from: '1483228800', to: '1483232400' },
|
||||
targets: [],
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
};
|
||||
|
||||
return { runner, fetchMock, templateService, instanceSettings, request, timeRange };
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
import { Observable, of } from 'rxjs';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
MutableDataFrame,
|
||||
ArrayVector,
|
||||
DataSourceInstanceSettings,
|
||||
DataSourceJsonData,
|
||||
DataSourceRef,
|
||||
ScopedVars,
|
||||
DataSourceApi,
|
||||
DataQuery,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
} from '@grafana/data';
|
||||
import { GetDataSourceListFilters, setDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
import { CloudWatchDatasource } from '../datasource';
|
||||
import { CloudWatchLogsQueryStatus } from '../types';
|
||||
|
||||
import { meta, setupMockedDataSource } from './CloudWatchDataSource';
|
||||
|
||||
export function setupForLogs() {
|
||||
function envelope(frame: DataFrame) {
|
||||
return { data: { results: { a: { refId: 'a', frames: [dataFrameToJSON(frame)] } } } };
|
||||
}
|
||||
|
||||
const { datasource, fetchMock, timeSrv } = setupMockedDataSource();
|
||||
|
||||
const startQueryFrame = new MutableDataFrame({ fields: [{ name: 'queryId', values: ['queryid'] }] });
|
||||
fetchMock.mockReturnValueOnce(of(envelope(startQueryFrame)));
|
||||
|
||||
const logsFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: '@message',
|
||||
values: new ArrayVector(['something']),
|
||||
},
|
||||
{
|
||||
name: '@timestamp',
|
||||
values: new ArrayVector([1]),
|
||||
},
|
||||
{
|
||||
name: '@xrayTraceId',
|
||||
values: new ArrayVector(['1-613f0d6b-3e7cb34375b60662359611bd']),
|
||||
},
|
||||
],
|
||||
meta: { custom: { Status: CloudWatchLogsQueryStatus.Complete } },
|
||||
});
|
||||
|
||||
fetchMock.mockReturnValueOnce(of(envelope(logsFrame)));
|
||||
|
||||
setDataSourceSrv({
|
||||
async get() {
|
||||
const ds: DataSourceApi = {
|
||||
name: 'Xray',
|
||||
id: 0,
|
||||
type: '',
|
||||
uid: '',
|
||||
query: function (
|
||||
request: DataQueryRequest<DataQuery>
|
||||
): Observable<DataQueryResponse> | Promise<DataQueryResponse> {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
testDatasource: function (): Promise<CloudWatchDatasource> {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
meta: meta,
|
||||
getRef: function (): DataSourceRef {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
};
|
||||
|
||||
return ds;
|
||||
},
|
||||
getList: function (
|
||||
filters?: GetDataSourceListFilters | undefined
|
||||
): Array<DataSourceInstanceSettings<DataSourceJsonData>> {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
getInstanceSettings: function (
|
||||
ref?: string | DataSourceRef | null | undefined,
|
||||
scopedVars?: ScopedVars | undefined
|
||||
): DataSourceInstanceSettings<DataSourceJsonData> | undefined {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
reload: function (): void {
|
||||
throw new Error('Function not implemented.');
|
||||
},
|
||||
});
|
||||
|
||||
return { datasource, fetchMock, timeSrv };
|
||||
}
|
@ -0,0 +1,6 @@
|
||||
import { dateTime, TimeRange } from '@grafana/data';
|
||||
|
||||
const start = 1483196400 * 1000;
|
||||
const from = dateTime(start);
|
||||
const to = dateTime(start + 3600 * 1000);
|
||||
export const timeRange: TimeRange = { from, to, raw: { from, to } };
|
@ -9,8 +9,6 @@ import { setupMockedDataSource } from '../__mocks__/CloudWatchDataSource';
|
||||
|
||||
import { ConfigEditor, Props } from './ConfigEditor';
|
||||
|
||||
const ds = setupMockedDataSource();
|
||||
|
||||
jest.mock('app/features/plugins/datasource_srv', () => ({
|
||||
getDatasourceSrv: () => ({
|
||||
loadDatasource: jest.fn().mockResolvedValue({
|
||||
@ -20,9 +18,11 @@ jest.mock('app/features/plugins/datasource_srv', () => ({
|
||||
value: 'ap-east-1',
|
||||
},
|
||||
]),
|
||||
describeLogGroups: jest.fn().mockResolvedValue(['logGroup-foo', 'logGroup-bar']),
|
||||
getActualRegion: jest.fn().mockReturnValue('ap-east-1'),
|
||||
getVariables: jest.fn().mockReturnValue([]),
|
||||
logsQueryRunner: {
|
||||
describeLogGroups: jest.fn().mockResolvedValue(['logGroup-foo', 'logGroup-bar']),
|
||||
},
|
||||
}),
|
||||
}),
|
||||
}));
|
||||
@ -31,10 +31,11 @@ jest.mock('./XrayLinkConfig', () => ({
|
||||
XrayLinkConfig: () => <></>,
|
||||
}));
|
||||
|
||||
const putMock = jest.fn();
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getBackendSrv: () => ({
|
||||
put: jest.fn().mockResolvedValue({ datasource: ds.datasource }),
|
||||
put: putMock,
|
||||
}),
|
||||
}));
|
||||
|
||||
@ -86,6 +87,7 @@ const setup = (propOverrides?: object) => {
|
||||
describe('Render', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
putMock.mockImplementation(async () => ({ datasource: setupMockedDataSource().datasource }));
|
||||
});
|
||||
it('should render component', () => {
|
||||
const wrapper = setup();
|
||||
|
@ -25,13 +25,15 @@ describe('LogGroupSelector', () => {
|
||||
});
|
||||
|
||||
it('updates upstream query log groups on region change', async () => {
|
||||
ds.datasource.describeLogGroups = jest.fn().mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
if (params.region === 'region1') {
|
||||
return Promise.resolve(['log_group_1']);
|
||||
} else {
|
||||
return Promise.resolve(['log_group_2']);
|
||||
}
|
||||
});
|
||||
ds.datasource.logsQueryRunner.describeLogGroups = jest
|
||||
.fn()
|
||||
.mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
if (params.region === 'region1') {
|
||||
return Promise.resolve(['log_group_1']);
|
||||
} else {
|
||||
return Promise.resolve(['log_group_2']);
|
||||
}
|
||||
});
|
||||
const props = {
|
||||
...defaultProps,
|
||||
selectedLogGroups: ['log_group_1'],
|
||||
@ -48,13 +50,15 @@ describe('LogGroupSelector', () => {
|
||||
});
|
||||
|
||||
it('does not update upstream query log groups if saved is false', async () => {
|
||||
ds.datasource.describeLogGroups = jest.fn().mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
if (params.region === 'region1') {
|
||||
return Promise.resolve(['log_group_1']);
|
||||
} else {
|
||||
return Promise.resolve(['log_group_2']);
|
||||
}
|
||||
});
|
||||
ds.datasource.logsQueryRunner.describeLogGroups = jest
|
||||
.fn()
|
||||
.mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
if (params.region === 'region1') {
|
||||
return Promise.resolve(['log_group_1']);
|
||||
} else {
|
||||
return Promise.resolve(['log_group_2']);
|
||||
}
|
||||
});
|
||||
const props = {
|
||||
...defaultProps,
|
||||
selectedLogGroups: ['log_group_1'],
|
||||
@ -94,12 +98,14 @@ describe('LogGroupSelector', () => {
|
||||
];
|
||||
const testLimit = 10;
|
||||
|
||||
ds.datasource.describeLogGroups = jest.fn().mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
const theLogGroups = allLogGroups
|
||||
.filter((logGroupName) => logGroupName.startsWith(params.logGroupNamePrefix ?? ''))
|
||||
.slice(0, Math.max(params.limit ?? testLimit, testLimit));
|
||||
return Promise.resolve(theLogGroups);
|
||||
});
|
||||
ds.datasource.logsQueryRunner.describeLogGroups = jest
|
||||
.fn()
|
||||
.mockImplementation(async (params: DescribeLogGroupsRequest) => {
|
||||
const theLogGroups = allLogGroups
|
||||
.filter((logGroupName) => logGroupName.startsWith(params.logGroupNamePrefix ?? ''))
|
||||
.slice(0, Math.max(params.limit ?? testLimit, testLimit));
|
||||
return Promise.resolve(theLogGroups);
|
||||
});
|
||||
const props = {
|
||||
...defaultProps,
|
||||
};
|
||||
@ -123,7 +129,7 @@ describe('LogGroupSelector', () => {
|
||||
|
||||
it('should render template variables a selectable option', async () => {
|
||||
lodash.debounce = jest.fn().mockImplementation((fn) => fn);
|
||||
ds.datasource.describeLogGroups = jest.fn().mockResolvedValue([]);
|
||||
ds.datasource.logsQueryRunner.describeLogGroups = jest.fn().mockResolvedValue([]);
|
||||
const onChange = jest.fn();
|
||||
const props = {
|
||||
...defaultProps,
|
||||
|
@ -52,7 +52,7 @@ export const LogGroupSelector: React.FC<LogGroupSelectorProps> = ({
|
||||
return [];
|
||||
}
|
||||
try {
|
||||
const logGroups: string[] = await datasource.describeLogGroups({
|
||||
const logGroups: string[] = await datasource.logsQueryRunner.describeLogGroups({
|
||||
refId,
|
||||
region,
|
||||
logGroupNamePrefix,
|
||||
|
@ -38,7 +38,7 @@ describe('CloudWatchLogsQueryField', () => {
|
||||
it('loads defaultLogGroups', async () => {
|
||||
const onRunQuery = jest.fn();
|
||||
const ds = setupMockedDataSource();
|
||||
ds.datasource.defaultLogGroups = ['foo'];
|
||||
ds.datasource.logsQueryRunner.defaultLogGroups = ['foo'];
|
||||
|
||||
render(
|
||||
<CloudWatchLogsQueryField
|
||||
|
@ -70,7 +70,7 @@ export class CloudWatchLogsQueryField extends React.PureComponent<CloudWatchLogs
|
||||
const { query, datasource, onChange } = this.props;
|
||||
|
||||
if (onChange) {
|
||||
onChange({ ...query, logGroupNames: query.logGroupNames ?? datasource.defaultLogGroups });
|
||||
onChange({ ...query, logGroupNames: query.logGroupNames ?? datasource.logsQueryRunner.defaultLogGroups });
|
||||
}
|
||||
};
|
||||
|
||||
@ -136,7 +136,7 @@ export class CloudWatchLogsQueryField extends React.PureComponent<CloudWatchLogs
|
||||
inputEl={
|
||||
<LogGroupSelector
|
||||
region={region}
|
||||
selectedLogGroups={logGroupNames ?? datasource.defaultLogGroups}
|
||||
selectedLogGroups={logGroupNames ?? datasource.logsQueryRunner.defaultLogGroups}
|
||||
datasource={datasource}
|
||||
onChange={function (logGroups: string[]): void {
|
||||
onChange({ ...query, logGroupNames: logGroups });
|
||||
|
@ -1,2 +0,0 @@
|
||||
declare let CloudWatchDatasource: any;
|
||||
export default CloudWatchDatasource;
|
@ -1,56 +1,37 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import { toArray } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
DataQueryRequest,
|
||||
dateTime,
|
||||
Field,
|
||||
FieldType,
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
MutableDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { setDataSourceSrv } from '@grafana/runtime';
|
||||
import { dateTime, Field } from '@grafana/data';
|
||||
|
||||
import {
|
||||
dimensionVariable,
|
||||
expressionVariable,
|
||||
labelsVariable,
|
||||
limitVariable,
|
||||
fieldsVariable,
|
||||
logGroupNamesVariable,
|
||||
metricVariable,
|
||||
namespaceVariable,
|
||||
setupMockedDataSource,
|
||||
regionVariable,
|
||||
} from './__mocks__/CloudWatchDataSource';
|
||||
import { setupForLogs } from './__mocks__/logsTestContext';
|
||||
import { validLogsQuery, validMetricsQuery } from './__mocks__/queries';
|
||||
import { LOGSTREAM_IDENTIFIER_INTERNAL, LOG_IDENTIFIER_INTERNAL } from './datasource';
|
||||
import {
|
||||
CloudWatchAnnotationQuery,
|
||||
CloudWatchLogsQueryStatus,
|
||||
CloudWatchMetricsQuery,
|
||||
CloudWatchQuery,
|
||||
MetricEditorMode,
|
||||
MetricQueryType,
|
||||
} from './types';
|
||||
|
||||
const mockTimeRange = {
|
||||
from: dateTime(1546372800000),
|
||||
to: dateTime(1546380000000),
|
||||
raw: {
|
||||
from: dateTime(1546372800000),
|
||||
to: dateTime(1546380000000),
|
||||
},
|
||||
};
|
||||
import { timeRange } from './__mocks__/timeRange';
|
||||
import { CloudWatchLogsQuery, CloudWatchMetricsQuery, CloudWatchQuery } from './types';
|
||||
|
||||
describe('datasource', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('query', () => {
|
||||
it('should return error if log query and log groups is not specified', async () => {
|
||||
const { datasource } = setupMockedDataSource();
|
||||
const observable = datasource.query({ targets: [{ queryMode: 'Logs' as 'Logs' }] } as any);
|
||||
const observable = datasource.query({
|
||||
targets: [{ queryMode: 'Logs', id: '', refId: '', region: '' }],
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
range: timeRange,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
});
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
@ -60,7 +41,17 @@ describe('datasource', () => {
|
||||
|
||||
it('should return empty response if queries are hidden', async () => {
|
||||
const { datasource } = setupMockedDataSource();
|
||||
const observable = datasource.query({ targets: [{ queryMode: 'Logs' as 'Logs', hide: true }] } as any);
|
||||
const observable = datasource.query({
|
||||
targets: [{ queryMode: 'Logs', hide: true, id: '', refId: '', region: '' }],
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
range: timeRange,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
});
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
@ -82,31 +73,43 @@ describe('datasource', () => {
|
||||
});
|
||||
|
||||
it('should interpolate variables in the query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
const { datasource, fetchMock } = setupMockedDataSource({
|
||||
variables: [fieldsVariable, regionVariable],
|
||||
});
|
||||
await lastValueFrom(
|
||||
datasource
|
||||
.query({
|
||||
targets: [
|
||||
{
|
||||
id: '',
|
||||
refId: '',
|
||||
queryMode: 'Logs',
|
||||
region: '$region',
|
||||
expression: 'fields $fields',
|
||||
logGroupNames: ['/some/$group'],
|
||||
logGroupNames: ['/some/group'],
|
||||
},
|
||||
],
|
||||
} as any)
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
range: timeRange,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
})
|
||||
.pipe(toArray())
|
||||
);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
|
||||
queryString: 'fields templatedField',
|
||||
logGroupNames: ['/some/templatedGroup'],
|
||||
logGroupNames: ['/some/group'],
|
||||
region: 'templatedRegion',
|
||||
});
|
||||
});
|
||||
|
||||
it('should interpolate multi-value template variable for log group names in the query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource({
|
||||
variables: [expressionVariable, logGroupNamesVariable, regionVariable],
|
||||
variables: [fieldsVariable, logGroupNamesVariable, regionVariable],
|
||||
mockGetVariableName: false,
|
||||
});
|
||||
await lastValueFrom(
|
||||
@ -114,13 +117,23 @@ describe('datasource', () => {
|
||||
.query({
|
||||
targets: [
|
||||
{
|
||||
id: '',
|
||||
refId: '',
|
||||
queryMode: 'Logs',
|
||||
region: '$region',
|
||||
expression: 'fields $fields',
|
||||
logGroupNames: ['$groups'],
|
||||
},
|
||||
],
|
||||
} as any)
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
range: timeRange,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
})
|
||||
.pipe(toArray())
|
||||
);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
|
||||
@ -131,16 +144,39 @@ describe('datasource', () => {
|
||||
});
|
||||
|
||||
it('should add links to log queries', async () => {
|
||||
const { datasource } = setupForLogs();
|
||||
const { datasource, timeSrv } = setupForLogs();
|
||||
timeSrv.timeRange = () => {
|
||||
const time = dateTime('2021-01-01T01:00:00Z');
|
||||
const range = {
|
||||
from: time.subtract(6, 'hour'),
|
||||
to: time,
|
||||
};
|
||||
|
||||
return {
|
||||
...range,
|
||||
raw: range,
|
||||
};
|
||||
};
|
||||
|
||||
const observable = datasource.query({
|
||||
targets: [
|
||||
{
|
||||
id: '',
|
||||
region: '',
|
||||
queryMode: 'Logs',
|
||||
logGroupNames: ['test'],
|
||||
refId: 'a',
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
range: timeRange,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
});
|
||||
|
||||
const emits = await lastValueFrom(observable.pipe(toArray()));
|
||||
expect(emits).toHaveLength(1);
|
||||
@ -163,209 +199,6 @@ describe('datasource', () => {
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
describe('debouncedCustomAlert', () => {
|
||||
const debouncedAlert = jest.fn();
|
||||
beforeEach(() => {
|
||||
const { datasource } = setupMockedDataSource({
|
||||
variables: [
|
||||
{ ...namespaceVariable, multi: true },
|
||||
{ ...metricVariable, multi: true },
|
||||
],
|
||||
});
|
||||
datasource.debouncedCustomAlert = debouncedAlert;
|
||||
datasource.performTimeSeriesQuery = jest.fn().mockResolvedValue([]);
|
||||
datasource.query({
|
||||
targets: [
|
||||
{
|
||||
queryMode: 'Metrics',
|
||||
id: '',
|
||||
region: 'us-east-2',
|
||||
namespace: namespaceVariable.id,
|
||||
metricName: metricVariable.id,
|
||||
period: '',
|
||||
alias: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
refId: '',
|
||||
expression: 'x * 2',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
});
|
||||
it('should show debounced alert for namespace and metric name', async () => {
|
||||
expect(debouncedAlert).toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for namespace'
|
||||
);
|
||||
expect(debouncedAlert).toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for metric name'
|
||||
);
|
||||
});
|
||||
|
||||
it('should not show debounced alert for region', async () => {
|
||||
expect(debouncedAlert).not.toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for region'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterMetricsQuery', () => {
|
||||
const datasource = setupMockedDataSource().datasource;
|
||||
let baseQuery: CloudWatchMetricsQuery;
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
id: '',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
alias: '',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
refId: '',
|
||||
};
|
||||
});
|
||||
|
||||
it('should error if invalid mode', async () => {
|
||||
expect(() => datasource.filterMetricQuery(baseQuery)).toThrowError('invalid metric editor mode');
|
||||
});
|
||||
|
||||
describe('metric search queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
statistic: 'Average',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow builder queries that dont have namespace, metric or statistic', async () => {
|
||||
expect(datasource.filterMetricQuery({ ...baseQuery, statistic: undefined })).toBeFalsy();
|
||||
expect(datasource.filterMetricQuery({ ...baseQuery, metricName: undefined })).toBeFalsy();
|
||||
expect(datasource.filterMetricQuery({ ...baseQuery, namespace: '' })).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow builder queries that have namespace, metric or statistic', async () => {
|
||||
expect(datasource.filterMetricQuery(baseQuery)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should not allow code queries that dont have an expression', async () => {
|
||||
expect(
|
||||
datasource.filterMetricQuery({
|
||||
...baseQuery,
|
||||
expression: undefined,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
})
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow code queries that have an expression', async () => {
|
||||
expect(
|
||||
datasource.filterMetricQuery({ ...baseQuery, expression: 'x * 2', metricEditorMode: MetricEditorMode.Code })
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('metric search expression queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow queries that dont have an expression', async () => {
|
||||
const valid = datasource.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow queries that have an expression', async () => {
|
||||
baseQuery.expression = 'SUM([a,x])';
|
||||
const valid = datasource.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('metric query queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow queries that dont have a sql expresssion', async () => {
|
||||
const valid = datasource.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow queries that have a sql expresssion', async () => {
|
||||
baseQuery.sqlExpression = 'select SUM(CPUUtilization) from "AWS/EC2"';
|
||||
const valid = datasource.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('annotation query', () => {
|
||||
const query: DataQueryRequest<CloudWatchAnnotationQuery> = {
|
||||
range: mockTimeRange,
|
||||
rangeRaw: mockTimeRange.raw,
|
||||
targets: [
|
||||
{
|
||||
actionPrefix: '',
|
||||
alarmNamePrefix: '',
|
||||
datasource: { type: 'cloudwatch' },
|
||||
dimensions: { InstanceId: 'i-12345678' },
|
||||
matchExact: true,
|
||||
metricName: 'CPUUtilization',
|
||||
period: '300',
|
||||
prefixMatching: false,
|
||||
queryMode: 'Annotations',
|
||||
refId: 'Anno',
|
||||
namespace: `$${namespaceVariable.name}`,
|
||||
region: `$${regionVariable.name}`,
|
||||
statistic: 'Average',
|
||||
},
|
||||
],
|
||||
requestId: '',
|
||||
interval: '',
|
||||
intervalMs: 0,
|
||||
scopedVars: {},
|
||||
timezone: '',
|
||||
app: '',
|
||||
startTime: 0,
|
||||
};
|
||||
|
||||
it('should issue the correct query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource({ variables: [namespaceVariable, regionVariable] });
|
||||
await expect(datasource.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject(
|
||||
expect.objectContaining({
|
||||
region: regionVariable.current.value,
|
||||
namespace: namespaceVariable.current.value,
|
||||
metricName: query.targets[0].metricName,
|
||||
dimensions: { InstanceId: ['i-12345678'] },
|
||||
statistic: query.targets[0].statistic,
|
||||
period: query.targets[0].period,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resource requests', () => {
|
||||
@ -389,298 +222,62 @@ describe('datasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('performTimeSeriesQuery', () => {
|
||||
it('should return the same length of data as result', async () => {
|
||||
const { datasource } = setupMockedDataSource({
|
||||
data: {
|
||||
results: {
|
||||
a: { refId: 'a', series: [{ name: 'cpu', points: [1, 1] }], meta: {} },
|
||||
b: { refId: 'b', series: [{ name: 'memory', points: [2, 2] }], meta: {} },
|
||||
},
|
||||
},
|
||||
});
|
||||
describe('when interpolating variables', () => {
|
||||
it('should return an empty array if no queries are provided', () => {
|
||||
const { datasource } = setupMockedDataSource();
|
||||
|
||||
const observable = datasource.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [
|
||||
{ datasourceId: 1, refId: 'a' },
|
||||
{ datasourceId: 1, refId: 'b' },
|
||||
],
|
||||
} as any,
|
||||
{ from: dateTime(), to: dateTime() } as any
|
||||
);
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
expect(response.data.length).toEqual(2);
|
||||
});
|
||||
expect(datasource.interpolateVariablesInQueries([], {})).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('sets fields.config.interval based on period', async () => {
|
||||
const { datasource } = setupMockedDataSource({
|
||||
data: {
|
||||
results: {
|
||||
a: {
|
||||
refId: 'a',
|
||||
series: [{ name: 'cpu', points: [1, 2], meta: { custom: { period: 60 } } }],
|
||||
},
|
||||
b: {
|
||||
refId: 'b',
|
||||
series: [{ name: 'cpu', points: [1, 2], meta: { custom: { period: 120 } } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const observable = datasource.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [{ datasourceId: 1, refId: 'a' }],
|
||||
} as any,
|
||||
{ from: dateTime(), to: dateTime() } as any
|
||||
);
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
expect(response.data[0].fields[0].config.interval).toEqual(60000);
|
||||
expect(response.data[1].fields[0].config.interval).toEqual(120000);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('describeLogGroup', () => {
|
||||
it('replaces region correctly in the query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
await datasource.describeLogGroups({ region: 'default' });
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
|
||||
|
||||
await datasource.describeLogGroups({ region: 'eu-east' });
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLogRowContext', () => {
|
||||
it('replaces parameters correctly in the query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
const row: LogRowModel = {
|
||||
entryFieldIndex: 0,
|
||||
rowIndex: 0,
|
||||
dataFrame: new MutableDataFrame({
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'ts', type: FieldType.time, values: [1] },
|
||||
{ name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {} },
|
||||
{ name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {} },
|
||||
],
|
||||
}),
|
||||
entry: '4',
|
||||
labels: {},
|
||||
hasAnsi: false,
|
||||
hasUnescapedContent: false,
|
||||
raw: '4',
|
||||
logLevel: LogLevel.info,
|
||||
timeEpochMs: 4,
|
||||
timeEpochNs: '4000000',
|
||||
timeFromNow: '',
|
||||
timeLocal: '',
|
||||
timeUtc: '',
|
||||
uid: '1',
|
||||
it('should replace correct variables in CloudWatchLogsQuery', () => {
|
||||
const { datasource, templateService } = setupMockedDataSource();
|
||||
templateService.replace = jest.fn();
|
||||
const variableName = 'someVar';
|
||||
const logQuery: CloudWatchLogsQuery = {
|
||||
queryMode: 'Logs',
|
||||
expression: `$${variableName}`,
|
||||
region: `$${variableName}`,
|
||||
id: '',
|
||||
refId: '',
|
||||
};
|
||||
await datasource.getLogRowContext(row);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
|
||||
|
||||
await datasource.getLogRowContext(row, { direction: 'FORWARD' }, { ...validLogsQuery, region: 'eu-east' });
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
datasource.interpolateVariablesInQueries([logQuery], {});
|
||||
|
||||
expect(templateService.replace).toHaveBeenCalledWith(`$${variableName}`, {});
|
||||
expect(templateService.replace).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('template variable interpolation', () => {
|
||||
it('interpolates variables correctly', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource({
|
||||
variables: [namespaceVariable, metricVariable, labelsVariable, limitVariable],
|
||||
});
|
||||
datasource.handleMetricQueries(
|
||||
[
|
||||
{
|
||||
id: '',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
alias: '',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
|
||||
},
|
||||
],
|
||||
{ range: { from: dateTime(), to: dateTime() } } as any
|
||||
);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('timezoneUTCOffset', () => {
|
||||
const testQuery = {
|
||||
id: '',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
label: '${MAX_TIME_RELATIVE}',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
|
||||
};
|
||||
const testTable = [
|
||||
['Europe/Stockholm', '+0200'],
|
||||
['America/New_York', '-0400'],
|
||||
['Asia/Tokyo', '+0900'],
|
||||
['UTC', '+0000'],
|
||||
];
|
||||
describe.each(testTable)('should use the right time zone offset', (ianaTimezone, expectedOffset) => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
datasource.handleMetricQueries([testQuery], {
|
||||
range: { from: dateTime(), to: dateTime() },
|
||||
timezone: ianaTimezone,
|
||||
} as any);
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
timezoneUTCOffset: expectedOffset,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('interpolateMetricsQueryVariables', () => {
|
||||
it('interpolates dimensions correctly', () => {
|
||||
const testQuery = {
|
||||
id: 'a',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
dimensions: { InstanceId: '$dimension' },
|
||||
it('should replace correct variables in CloudWatchMetricsQuery', () => {
|
||||
const { datasource, templateService } = setupMockedDataSource();
|
||||
templateService.replace = jest.fn();
|
||||
templateService.getVariableName = jest.fn();
|
||||
const variableName = 'someVar';
|
||||
const metricsQuery: CloudWatchMetricsQuery = {
|
||||
queryMode: 'Metrics',
|
||||
id: 'someId',
|
||||
refId: 'someRefId',
|
||||
expression: `$${variableName}`,
|
||||
region: `$${variableName}`,
|
||||
period: `$${variableName}`,
|
||||
alias: `$${variableName}`,
|
||||
metricName: `$${variableName}`,
|
||||
namespace: `$${variableName}`,
|
||||
dimensions: {
|
||||
[`$${variableName}`]: `$${variableName}`,
|
||||
},
|
||||
matchExact: false,
|
||||
statistic: '',
|
||||
sqlExpression: `$${variableName}`,
|
||||
};
|
||||
const ds = setupMockedDataSource({ variables: [dimensionVariable], mockGetVariableName: false });
|
||||
const result = ds.datasource.interpolateMetricsQueryVariables(testQuery, {
|
||||
dimension: { text: 'foo', value: 'foo' },
|
||||
});
|
||||
expect(result).toStrictEqual({
|
||||
alias: '',
|
||||
metricName: '',
|
||||
namespace: '',
|
||||
period: '',
|
||||
sqlExpression: '',
|
||||
dimensions: { InstanceId: ['foo'] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertMultiFiltersFormat', () => {
|
||||
const ds = setupMockedDataSource({ variables: [labelsVariable, dimensionVariable], mockGetVariableName: false });
|
||||
it('converts keys and values correctly', () => {
|
||||
const filters = { $dimension: ['b'], a: ['$labels', 'bar'] };
|
||||
const result = ds.datasource.convertMultiFilterFormat(filters);
|
||||
expect(result).toStrictEqual({
|
||||
env: ['b'],
|
||||
a: ['InstanceId', 'InstanceType', 'bar'],
|
||||
});
|
||||
});
|
||||
});
|
||||
datasource.interpolateVariablesInQueries([metricsQuery], {});
|
||||
|
||||
describe('getLogGroupFields', () => {
|
||||
it('passes region correctly', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
fetchMock.mockReturnValueOnce(
|
||||
of({
|
||||
data: {
|
||||
results: {
|
||||
A: {
|
||||
frames: [
|
||||
dataFrameToJSON(
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'key', values: [] },
|
||||
{ name: 'val', values: [] },
|
||||
],
|
||||
})
|
||||
),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
await datasource.getLogGroupFields({ region: 'us-west-1', logGroupName: 'test' });
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
|
||||
// We interpolate `expression`, `region`, `period`, `alias`, `metricName`, and `nameSpace` in CloudWatchMetricsQuery
|
||||
expect(templateService.replace).toHaveBeenCalledWith(`$${variableName}`, {});
|
||||
expect(templateService.replace).toHaveBeenCalledTimes(7);
|
||||
|
||||
expect(templateService.getVariableName).toHaveBeenCalledWith(`$${variableName}`);
|
||||
expect(templateService.getVariableName).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function setupForLogs() {
|
||||
function envelope(frame: DataFrame) {
|
||||
return { data: { results: { a: { refId: 'a', frames: [dataFrameToJSON(frame)] } } } };
|
||||
}
|
||||
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
|
||||
const startQueryFrame = new MutableDataFrame({ fields: [{ name: 'queryId', values: ['queryid'] }] });
|
||||
fetchMock.mockReturnValueOnce(of(envelope(startQueryFrame)));
|
||||
|
||||
const logsFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{
|
||||
name: '@message',
|
||||
values: new ArrayVector(['something']),
|
||||
},
|
||||
{
|
||||
name: '@timestamp',
|
||||
values: new ArrayVector([1]),
|
||||
},
|
||||
{
|
||||
name: '@xrayTraceId',
|
||||
values: new ArrayVector(['1-613f0d6b-3e7cb34375b60662359611bd']),
|
||||
},
|
||||
],
|
||||
meta: { custom: { Status: CloudWatchLogsQueryStatus.Complete } },
|
||||
});
|
||||
|
||||
fetchMock.mockReturnValueOnce(of(envelope(logsFrame)));
|
||||
|
||||
setDataSourceSrv({
|
||||
async get() {
|
||||
return {
|
||||
name: 'Xray',
|
||||
};
|
||||
},
|
||||
} as any);
|
||||
|
||||
return { datasource, fetchMock };
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -109,8 +109,10 @@ async function runSuggestionTest(query: string, expectedItems: string[][]) {
|
||||
|
||||
function makeDatasource(): CloudWatchDatasource {
|
||||
return {
|
||||
getLogGroupFields(): Promise<GetLogGroupFieldsResponse> {
|
||||
return Promise.resolve({ logGroupFields: [{ name: 'field1' }, { name: '@message' }] });
|
||||
logsQueryRunner: {
|
||||
getLogGroupFields(): Promise<GetLogGroupFieldsResponse> {
|
||||
return Promise.resolve({ logGroupFields: [{ name: 'field1' }, { name: '@message' }] });
|
||||
},
|
||||
},
|
||||
} as any;
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ export class CloudWatchLanguageProvider extends LanguageProvider {
|
||||
}
|
||||
|
||||
request = (url: string, params?: any): Promise<TSDBResponse> => {
|
||||
return lastValueFrom(this.datasource.awsRequest(url, params));
|
||||
return lastValueFrom(this.datasource.logsQueryRunner.awsRequest(url, params));
|
||||
};
|
||||
|
||||
start = () => {
|
||||
@ -145,7 +145,7 @@ export class CloudWatchLanguageProvider extends LanguageProvider {
|
||||
}
|
||||
|
||||
const results = await Promise.all(
|
||||
logGroups.map((logGroup) => this.datasource.getLogGroupFields({ logGroupName: logGroup, region }))
|
||||
logGroups.map((logGroup) => this.datasource.logsQueryRunner.getLogGroupFields({ logGroupName: logGroup, region }))
|
||||
);
|
||||
|
||||
const fields = [
|
||||
|
@ -0,0 +1,41 @@
|
||||
import { setupMockedAnnotationQueryRunner } from '../__mocks__/AnnotationQueryRunner';
|
||||
import { namespaceVariable, regionVariable } from '../__mocks__/CloudWatchDataSource';
|
||||
import { CloudWatchAnnotationQuery } from '../types';
|
||||
|
||||
describe('CloudWatchAnnotationQueryRunner', () => {
|
||||
const queries: CloudWatchAnnotationQuery[] = [
|
||||
{
|
||||
actionPrefix: '',
|
||||
alarmNamePrefix: '',
|
||||
datasource: { type: 'cloudwatch' },
|
||||
dimensions: { InstanceId: 'i-12345678' },
|
||||
matchExact: true,
|
||||
metricName: 'CPUUtilization',
|
||||
period: '300',
|
||||
prefixMatching: false,
|
||||
queryMode: 'Annotations',
|
||||
refId: 'Anno',
|
||||
namespace: `$${namespaceVariable.name}`,
|
||||
region: `$${regionVariable.name}`,
|
||||
statistic: 'Average',
|
||||
},
|
||||
];
|
||||
|
||||
it('should issue the correct query', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedAnnotationQueryRunner({
|
||||
variables: [namespaceVariable, regionVariable],
|
||||
});
|
||||
await expect(runner.handleAnnotationQuery(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject(
|
||||
expect.objectContaining({
|
||||
region: regionVariable.current.value,
|
||||
namespace: namespaceVariable.current.value,
|
||||
metricName: queries[0].metricName,
|
||||
dimensions: { InstanceId: ['i-12345678'] },
|
||||
statistic: queries[0].statistic,
|
||||
period: queries[0].period,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,44 @@
|
||||
import { map, Observable } from 'rxjs';
|
||||
|
||||
import { DataQueryRequest, DataQueryResponse, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { toDataQueryResponse } from '@grafana/runtime';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { CloudWatchAnnotationQuery, CloudWatchJsonData, CloudWatchQuery } from '../types';
|
||||
|
||||
import { CloudWatchQueryRunner } from './CloudWatchQueryRunner';
|
||||
|
||||
// This class handles execution of CloudWatch annotation queries
|
||||
export class CloudWatchAnnotationQueryRunner extends CloudWatchQueryRunner {
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
|
||||
super(instanceSettings, templateSrv);
|
||||
}
|
||||
|
||||
handleAnnotationQuery(
|
||||
queries: CloudWatchAnnotationQuery[],
|
||||
options: DataQueryRequest<CloudWatchQuery>
|
||||
): Observable<DataQueryResponse> {
|
||||
return this.awsRequest(this.dsQueryEndpoint, {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: queries.map((query) => ({
|
||||
...query,
|
||||
statistic: this.templateSrv.replace(query.statistic),
|
||||
region: this.templateSrv.replace(this.getActualRegion(query.region)),
|
||||
namespace: this.templateSrv.replace(query.namespace),
|
||||
metricName: this.templateSrv.replace(query.metricName),
|
||||
dimensions: this.convertDimensionFormat(query.dimensions ?? {}, {}),
|
||||
period: query.period ?? '',
|
||||
actionPrefix: query.actionPrefix ?? '',
|
||||
alarmNamePrefix: query.alarmNamePrefix ?? '',
|
||||
type: 'annotationQuery',
|
||||
datasource: this.ref,
|
||||
})),
|
||||
}).pipe(
|
||||
map((r) => {
|
||||
const frames = toDataQueryResponse({ data: r }).data;
|
||||
return { data: frames };
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
@ -0,0 +1,325 @@
|
||||
import { interval, lastValueFrom, of } from 'rxjs';
|
||||
|
||||
import { LogRowModel, MutableDataFrame, FieldType, LogLevel, dataFrameToJSON, DataQueryErrorType } from '@grafana/data';
|
||||
import { BackendDataSourceResponse } from '@grafana/runtime';
|
||||
|
||||
import { genMockFrames, setupMockedLogsQueryRunner } from '../__mocks__/LogsQueryRunner';
|
||||
import { validLogsQuery } from '../__mocks__/queries';
|
||||
import { LogAction } from '../types';
|
||||
import * as rxjsUtils from '../utils/rxjs/increasingInterval';
|
||||
|
||||
import { LOG_IDENTIFIER_INTERNAL, LOGSTREAM_IDENTIFIER_INTERNAL } from './CloudWatchLogsQueryRunner';
|
||||
|
||||
describe('CloudWatchLogsQueryRunner', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
describe('describeLogGroup', () => {
|
||||
it('replaces region correctly in the query', async () => {
|
||||
const { runner, fetchMock } = setupMockedLogsQueryRunner();
|
||||
await runner.describeLogGroups({ region: 'default' });
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
|
||||
|
||||
await runner.describeLogGroups({ region: 'eu-east' });
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
});
|
||||
|
||||
it('should return log groups as an array of strings', async () => {
|
||||
const data: BackendDataSourceResponse = {
|
||||
results: {
|
||||
A: {
|
||||
frames: [
|
||||
{
|
||||
schema: {
|
||||
name: 'logGroups',
|
||||
refId: 'A',
|
||||
fields: [{ name: 'logGroupName', type: FieldType.string }],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[
|
||||
'/aws/containerinsights/dev303-workshop/application',
|
||||
'/aws/containerinsights/dev303-workshop/dataplane',
|
||||
'/aws/containerinsights/dev303-workshop/flowlogs',
|
||||
'/aws/containerinsights/dev303-workshop/host',
|
||||
'/aws/containerinsights/dev303-workshop/performance',
|
||||
'/aws/containerinsights/dev303-workshop/prometheus',
|
||||
'/aws/containerinsights/ecommerce-sockshop/application',
|
||||
'/aws/containerinsights/ecommerce-sockshop/dataplane',
|
||||
'/aws/containerinsights/ecommerce-sockshop/host',
|
||||
'/aws/containerinsights/ecommerce-sockshop/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/application',
|
||||
'/aws/containerinsights/watchdemo-perf/dataplane',
|
||||
'/aws/containerinsights/watchdemo-perf/host',
|
||||
'/aws/containerinsights/watchdemo-perf/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/prometheus',
|
||||
'/aws/containerinsights/watchdemo-prod-us-east-1/performance',
|
||||
'/aws/containerinsights/watchdemo-staging/application',
|
||||
'/aws/containerinsights/watchdemo-staging/dataplane',
|
||||
'/aws/containerinsights/watchdemo-staging/host',
|
||||
'/aws/containerinsights/watchdemo-staging/performance',
|
||||
'/aws/ecs/containerinsights/bugbash-ec2/performance',
|
||||
'/aws/ecs/containerinsights/ecs-demoworkshop/performance',
|
||||
'/aws/ecs/containerinsights/ecs-workshop-dev/performance',
|
||||
'/aws/eks/dev303-workshop/cluster',
|
||||
'/aws/events/cloudtrail',
|
||||
'/aws/events/ecs',
|
||||
'/aws/lambda/cwsyn-mycanary-fac97ded-f134-499a-9d71-4c3be1f63182',
|
||||
'/aws/lambda/cwsyn-watch-linkchecks-ef7ef273-5da2-4663-af54-d2f52d55b060',
|
||||
'/ecs/ecs-cwagent-daemon-service',
|
||||
'/ecs/ecs-demo-limitTask',
|
||||
'CloudTrail/DefaultLogGroup',
|
||||
'container-insights-prometheus-beta',
|
||||
'container-insights-prometheus-demo',
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const { runner } = setupMockedLogsQueryRunner({ data });
|
||||
const expectedLogGroups = [
|
||||
'/aws/containerinsights/dev303-workshop/application',
|
||||
'/aws/containerinsights/dev303-workshop/dataplane',
|
||||
'/aws/containerinsights/dev303-workshop/flowlogs',
|
||||
'/aws/containerinsights/dev303-workshop/host',
|
||||
'/aws/containerinsights/dev303-workshop/performance',
|
||||
'/aws/containerinsights/dev303-workshop/prometheus',
|
||||
'/aws/containerinsights/ecommerce-sockshop/application',
|
||||
'/aws/containerinsights/ecommerce-sockshop/dataplane',
|
||||
'/aws/containerinsights/ecommerce-sockshop/host',
|
||||
'/aws/containerinsights/ecommerce-sockshop/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/application',
|
||||
'/aws/containerinsights/watchdemo-perf/dataplane',
|
||||
'/aws/containerinsights/watchdemo-perf/host',
|
||||
'/aws/containerinsights/watchdemo-perf/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/prometheus',
|
||||
'/aws/containerinsights/watchdemo-prod-us-east-1/performance',
|
||||
'/aws/containerinsights/watchdemo-staging/application',
|
||||
'/aws/containerinsights/watchdemo-staging/dataplane',
|
||||
'/aws/containerinsights/watchdemo-staging/host',
|
||||
'/aws/containerinsights/watchdemo-staging/performance',
|
||||
'/aws/ecs/containerinsights/bugbash-ec2/performance',
|
||||
'/aws/ecs/containerinsights/ecs-demoworkshop/performance',
|
||||
'/aws/ecs/containerinsights/ecs-workshop-dev/performance',
|
||||
'/aws/eks/dev303-workshop/cluster',
|
||||
'/aws/events/cloudtrail',
|
||||
'/aws/events/ecs',
|
||||
'/aws/lambda/cwsyn-mycanary-fac97ded-f134-499a-9d71-4c3be1f63182',
|
||||
'/aws/lambda/cwsyn-watch-linkchecks-ef7ef273-5da2-4663-af54-d2f52d55b060',
|
||||
'/ecs/ecs-cwagent-daemon-service',
|
||||
'/ecs/ecs-demo-limitTask',
|
||||
'CloudTrail/DefaultLogGroup',
|
||||
'container-insights-prometheus-beta',
|
||||
'container-insights-prometheus-demo',
|
||||
];
|
||||
|
||||
const logGroups = await runner.describeLogGroups({ region: 'default' });
|
||||
|
||||
expect(logGroups).toEqual(expectedLogGroups);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLogRowContext', () => {
|
||||
it('replaces parameters correctly in the query', async () => {
|
||||
const { runner, fetchMock } = setupMockedLogsQueryRunner();
|
||||
const row: LogRowModel = {
|
||||
entryFieldIndex: 0,
|
||||
rowIndex: 0,
|
||||
dataFrame: new MutableDataFrame({
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'ts', type: FieldType.time, values: [1] },
|
||||
{ name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {} },
|
||||
{ name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {} },
|
||||
],
|
||||
}),
|
||||
entry: '4',
|
||||
labels: {},
|
||||
hasAnsi: false,
|
||||
hasUnescapedContent: false,
|
||||
raw: '4',
|
||||
logLevel: LogLevel.info,
|
||||
timeEpochMs: 4,
|
||||
timeEpochNs: '4000000',
|
||||
timeFromNow: '',
|
||||
timeLocal: '',
|
||||
timeUtc: '',
|
||||
uid: '1',
|
||||
};
|
||||
await runner.getLogRowContext(row);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
|
||||
|
||||
await runner.getLogRowContext(row, { direction: 'FORWARD' }, { ...validLogsQuery, region: 'eu-east' });
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLogGroupFields', () => {
|
||||
it('passes region correctly', async () => {
|
||||
const { runner, fetchMock } = setupMockedLogsQueryRunner();
|
||||
fetchMock.mockReturnValueOnce(
|
||||
of({
|
||||
data: {
|
||||
results: {
|
||||
A: {
|
||||
frames: [
|
||||
dataFrameToJSON(
|
||||
new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'key', values: [] },
|
||||
{ name: 'val', values: [] },
|
||||
],
|
||||
})
|
||||
),
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
await runner.getLogGroupFields({ region: 'us-west-1', logGroupName: 'test' });
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('logs query', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(rxjsUtils, 'increasingInterval').mockImplementation(() => interval(100));
|
||||
});
|
||||
|
||||
it('should stop querying when timed out', async () => {
|
||||
const { runner } = setupMockedLogsQueryRunner();
|
||||
const fakeFrames = genMockFrames(20);
|
||||
const initialRecordsMatched = fakeFrames[0].meta!.stats!.find((stat) => stat.displayName === 'Records scanned')!
|
||||
.value!;
|
||||
for (let i = 1; i < 4; i++) {
|
||||
fakeFrames[i].meta!.stats = [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: initialRecordsMatched,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const finalRecordsMatched = fakeFrames[9].meta!.stats!.find((stat) => stat.displayName === 'Records scanned')!
|
||||
.value!;
|
||||
for (let i = 10; i < fakeFrames.length; i++) {
|
||||
fakeFrames[i].meta!.stats = [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: finalRecordsMatched,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
jest.spyOn(runner, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const iterations = 15;
|
||||
// Times out after 15 passes for consistent testing
|
||||
const timeoutFunc = () => {
|
||||
return i >= iterations;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
runner.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
|
||||
const expectedData = [
|
||||
{
|
||||
...fakeFrames[14],
|
||||
meta: {
|
||||
custom: {
|
||||
Status: 'Cancelled',
|
||||
},
|
||||
stats: fakeFrames[14].meta!.stats,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
expect(myResponse).toEqual({
|
||||
data: expectedData,
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
error: {
|
||||
type: DataQueryErrorType.Timeout,
|
||||
message: `error: query timed out after 5 attempts`,
|
||||
},
|
||||
});
|
||||
expect(i).toBe(iterations);
|
||||
});
|
||||
|
||||
it('should continue querying as long as new data is being received', async () => {
|
||||
const { runner } = setupMockedLogsQueryRunner();
|
||||
const fakeFrames = genMockFrames(15);
|
||||
|
||||
let i = 0;
|
||||
jest.spyOn(runner, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const startTime = new Date();
|
||||
const timeoutFunc = () => {
|
||||
return Date.now() >= startTime.valueOf() + 6000;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
runner.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[fakeFrames.length - 1]],
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
});
|
||||
expect(i).toBe(15);
|
||||
});
|
||||
|
||||
it('should stop querying when results come back with status "Complete"', async () => {
|
||||
const { runner } = setupMockedLogsQueryRunner();
|
||||
const fakeFrames = genMockFrames(3);
|
||||
let i = 0;
|
||||
jest.spyOn(runner, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const startTime = new Date();
|
||||
const timeoutFunc = () => {
|
||||
return Date.now() >= startTime.valueOf() + 6000;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
runner.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[2]],
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
});
|
||||
expect(i).toBe(3);
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,473 @@
|
||||
import { isEmpty, set } from 'lodash';
|
||||
import {
|
||||
Observable,
|
||||
of,
|
||||
mergeMap,
|
||||
map,
|
||||
from,
|
||||
concatMap,
|
||||
finalize,
|
||||
repeat,
|
||||
scan,
|
||||
share,
|
||||
takeWhile,
|
||||
tap,
|
||||
zip,
|
||||
catchError,
|
||||
lastValueFrom,
|
||||
} from 'rxjs';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryError,
|
||||
DataQueryErrorType,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
LoadingState,
|
||||
LogRowModel,
|
||||
rangeUtil,
|
||||
ScopedVars,
|
||||
} from '@grafana/data';
|
||||
import { BackendDataSourceResponse, config, FetchError, FetchResponse, toDataQueryResponse } from '@grafana/runtime';
|
||||
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import {
|
||||
CloudWatchJsonData,
|
||||
CloudWatchLogsQuery,
|
||||
CloudWatchLogsQueryStatus,
|
||||
CloudWatchLogsRequest,
|
||||
CloudWatchQuery,
|
||||
DescribeLogGroupsRequest,
|
||||
GetLogEventsRequest,
|
||||
GetLogGroupFieldsRequest,
|
||||
GetLogGroupFieldsResponse,
|
||||
LogAction,
|
||||
StartQueryRequest,
|
||||
} from '../types';
|
||||
import { addDataLinksToLogsResponse } from '../utils/datalinks';
|
||||
import { runWithRetry } from '../utils/logsRetry';
|
||||
import { increasingInterval } from '../utils/rxjs/increasingInterval';
|
||||
|
||||
import { CloudWatchQueryRunner } from './CloudWatchQueryRunner';
|
||||
|
||||
export const LOG_IDENTIFIER_INTERNAL = '__log__grafana_internal__';
|
||||
export const LOGSTREAM_IDENTIFIER_INTERNAL = '__logstream__grafana_internal__';
|
||||
|
||||
// This class handles execution of CloudWatch logs query data queries
|
||||
export class CloudWatchLogsQueryRunner extends CloudWatchQueryRunner {
|
||||
logsTimeout: string;
|
||||
defaultLogGroups: string[];
|
||||
logQueries: Record<string, { id: string; region: string; statsQuery: boolean }> = {};
|
||||
tracingDataSourceUid?: string;
|
||||
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
|
||||
templateSrv: TemplateSrv,
|
||||
private readonly timeSrv: TimeSrv
|
||||
) {
|
||||
super(instanceSettings, templateSrv);
|
||||
|
||||
this.tracingDataSourceUid = instanceSettings.jsonData.tracingDatasourceUid;
|
||||
this.logsTimeout = instanceSettings.jsonData.logsTimeout || '15m';
|
||||
this.defaultLogGroups = instanceSettings.jsonData.defaultLogGroups || [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle log query. The log query works by starting the query on the CloudWatch and then periodically polling for
|
||||
* results.
|
||||
* @param logQueries
|
||||
* @param options
|
||||
*/
|
||||
handleLogQueries = (
|
||||
logQueries: CloudWatchLogsQuery[],
|
||||
options: DataQueryRequest<CloudWatchQuery>
|
||||
): Observable<DataQueryResponse> => {
|
||||
const queryParams = logQueries.map((target: CloudWatchLogsQuery) => ({
|
||||
queryString: target.expression || '',
|
||||
refId: target.refId,
|
||||
logGroupNames: target.logGroupNames || this.defaultLogGroups,
|
||||
region: super.replaceVariableAndDisplayWarningIfMulti(
|
||||
this.getActualRegion(target.region),
|
||||
options.scopedVars,
|
||||
true,
|
||||
'region'
|
||||
),
|
||||
}));
|
||||
|
||||
const validLogQueries = queryParams.filter((item) => item.logGroupNames?.length);
|
||||
if (logQueries.length > validLogQueries.length) {
|
||||
return of({ data: [], error: { message: 'Log group is required' } });
|
||||
}
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (isEmpty(validLogQueries)) {
|
||||
return of({ data: [], state: LoadingState.Done });
|
||||
}
|
||||
|
||||
const startTime = new Date();
|
||||
const timeoutFunc = () => {
|
||||
return Date.now() >= startTime.valueOf() + rangeUtil.intervalToMs(this.logsTimeout);
|
||||
};
|
||||
|
||||
return runWithRetry(
|
||||
(targets: StartQueryRequest[]) => {
|
||||
return this.makeLogActionRequest('StartQuery', targets, {
|
||||
makeReplacements: true,
|
||||
scopedVars: options.scopedVars,
|
||||
skipCache: true,
|
||||
});
|
||||
},
|
||||
queryParams,
|
||||
timeoutFunc
|
||||
).pipe(
|
||||
mergeMap(({ frames, error }: { frames: DataFrame[]; error?: DataQueryError }) =>
|
||||
// This queries for the results
|
||||
this.logsQuery(
|
||||
frames.map((dataFrame) => ({
|
||||
queryId: dataFrame.fields[0].values.get(0),
|
||||
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
|
||||
refId: dataFrame.refId!,
|
||||
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
|
||||
})),
|
||||
timeoutFunc
|
||||
).pipe(
|
||||
map((response: DataQueryResponse) => {
|
||||
if (!response.error && error) {
|
||||
response.error = error;
|
||||
}
|
||||
return response;
|
||||
})
|
||||
)
|
||||
),
|
||||
mergeMap((dataQueryResponse) => {
|
||||
return from(
|
||||
(async () => {
|
||||
await addDataLinksToLogsResponse(
|
||||
dataQueryResponse,
|
||||
options,
|
||||
this.timeSrv.timeRange(),
|
||||
this.replaceVariableAndDisplayWarningIfMulti.bind(this),
|
||||
this.expandVariableToArray.bind(this),
|
||||
this.getActualRegion.bind(this),
|
||||
this.tracingDataSourceUid
|
||||
);
|
||||
|
||||
return dataQueryResponse;
|
||||
})()
|
||||
);
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks progress and polls data of a started logs query with some retry logic.
|
||||
* @param queryParams
|
||||
*/
|
||||
logsQuery(
|
||||
queryParams: Array<{
|
||||
queryId: string;
|
||||
refId: string;
|
||||
limit?: number;
|
||||
region: string;
|
||||
statsGroups?: string[];
|
||||
}>,
|
||||
timeoutFunc: () => boolean
|
||||
): Observable<DataQueryResponse> {
|
||||
this.logQueries = {};
|
||||
queryParams.forEach((param) => {
|
||||
this.logQueries[param.refId] = {
|
||||
id: param.queryId,
|
||||
region: param.region,
|
||||
statsQuery: (param.statsGroups?.length ?? 0) > 0 ?? false,
|
||||
};
|
||||
});
|
||||
|
||||
const dataFrames = increasingInterval({ startPeriod: 100, endPeriod: 1000, step: 300 }).pipe(
|
||||
concatMap((_) => this.makeLogActionRequest('GetQueryResults', queryParams, { skipCache: true })),
|
||||
repeat(),
|
||||
share()
|
||||
);
|
||||
|
||||
const initialValue: { failures: number; prevRecordsMatched: Record<string, number> } = {
|
||||
failures: 0,
|
||||
prevRecordsMatched: {},
|
||||
};
|
||||
const consecutiveFailedAttempts = dataFrames.pipe(
|
||||
scan(({ failures, prevRecordsMatched }, frames) => {
|
||||
failures++;
|
||||
for (const frame of frames) {
|
||||
const recordsMatched = frame.meta?.stats?.find((stat) => stat.displayName === 'Records scanned')?.value!;
|
||||
if (recordsMatched > (prevRecordsMatched[frame.refId!] ?? 0)) {
|
||||
failures = 0;
|
||||
}
|
||||
prevRecordsMatched[frame.refId!] = recordsMatched;
|
||||
}
|
||||
|
||||
return { failures, prevRecordsMatched };
|
||||
}, initialValue),
|
||||
map(({ failures }) => failures),
|
||||
share()
|
||||
);
|
||||
|
||||
const queryResponse: Observable<DataQueryResponse> = zip(dataFrames, consecutiveFailedAttempts).pipe(
|
||||
tap(([dataFrames]) => {
|
||||
for (const frame of dataFrames) {
|
||||
if (
|
||||
[
|
||||
CloudWatchLogsQueryStatus.Complete,
|
||||
CloudWatchLogsQueryStatus.Cancelled,
|
||||
CloudWatchLogsQueryStatus.Failed,
|
||||
].includes(frame.meta?.custom?.['Status']) &&
|
||||
this.logQueries.hasOwnProperty(frame.refId!)
|
||||
) {
|
||||
delete this.logQueries[frame.refId!];
|
||||
}
|
||||
}
|
||||
}),
|
||||
map(([dataFrames, failedAttempts]) => {
|
||||
if (timeoutFunc()) {
|
||||
for (const frame of dataFrames) {
|
||||
set(frame, 'meta.custom.Status', CloudWatchLogsQueryStatus.Cancelled);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
data: dataFrames,
|
||||
key: 'test-key',
|
||||
state: dataFrames.every((dataFrame) =>
|
||||
[
|
||||
CloudWatchLogsQueryStatus.Complete,
|
||||
CloudWatchLogsQueryStatus.Cancelled,
|
||||
CloudWatchLogsQueryStatus.Failed,
|
||||
].includes(dataFrame.meta?.custom?.['Status'])
|
||||
)
|
||||
? LoadingState.Done
|
||||
: LoadingState.Loading,
|
||||
error: timeoutFunc()
|
||||
? {
|
||||
message: `error: query timed out after ${failedAttempts} attempts`,
|
||||
type: DataQueryErrorType.Timeout,
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}),
|
||||
takeWhile(({ state }) => state !== LoadingState.Error && state !== LoadingState.Done, true)
|
||||
);
|
||||
|
||||
return withTeardown(queryResponse, () => this.stopQueries());
|
||||
}
|
||||
|
||||
stopQueries() {
|
||||
if (Object.keys(this.logQueries).length > 0) {
|
||||
this.makeLogActionRequest(
|
||||
'StopQuery',
|
||||
Object.values(this.logQueries).map((logQuery) => ({ queryId: logQuery.id, region: logQuery.region })),
|
||||
{
|
||||
makeReplacements: false,
|
||||
skipCache: true,
|
||||
}
|
||||
).pipe(
|
||||
finalize(() => {
|
||||
this.logQueries = {};
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
makeLogActionRequest(
|
||||
subtype: LogAction,
|
||||
queryParams: CloudWatchLogsRequest[],
|
||||
options: {
|
||||
scopedVars?: ScopedVars;
|
||||
makeReplacements?: boolean;
|
||||
skipCache?: boolean;
|
||||
} = {
|
||||
makeReplacements: true,
|
||||
skipCache: false,
|
||||
}
|
||||
): Observable<DataFrame[]> {
|
||||
const range = this.timeSrv.timeRange();
|
||||
|
||||
const requestParams = {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: queryParams.map((param: CloudWatchLogsRequest) => ({
|
||||
// eslint-ignore-next-line
|
||||
refId: (param as StartQueryRequest).refId || 'A',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasource: this.ref,
|
||||
type: 'logAction',
|
||||
subtype: subtype,
|
||||
...param,
|
||||
})),
|
||||
};
|
||||
|
||||
if (options.makeReplacements) {
|
||||
requestParams.queries.forEach((query: CloudWatchLogsRequest) => {
|
||||
const fieldsToReplace: Array<
|
||||
keyof (GetLogEventsRequest & StartQueryRequest & DescribeLogGroupsRequest & GetLogGroupFieldsRequest)
|
||||
> = ['queryString', 'logGroupNames', 'logGroupName', 'logGroupNamePrefix'];
|
||||
|
||||
// eslint-ignore-next-line
|
||||
const anyQuery: any = query;
|
||||
for (const fieldName of fieldsToReplace) {
|
||||
if (query.hasOwnProperty(fieldName)) {
|
||||
if (Array.isArray(anyQuery[fieldName])) {
|
||||
anyQuery[fieldName] = anyQuery[fieldName].flatMap((val: string) => {
|
||||
if (fieldName === 'logGroupNames') {
|
||||
return this.expandVariableToArray(val, options.scopedVars || {});
|
||||
}
|
||||
return this.replaceVariableAndDisplayWarningIfMulti(val, options.scopedVars, true, fieldName);
|
||||
});
|
||||
} else {
|
||||
anyQuery[fieldName] = this.replaceVariableAndDisplayWarningIfMulti(
|
||||
anyQuery[fieldName],
|
||||
options.scopedVars,
|
||||
true,
|
||||
fieldName
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (anyQuery.region) {
|
||||
anyQuery.region = this.replaceVariableAndDisplayWarningIfMulti(
|
||||
anyQuery.region,
|
||||
options.scopedVars,
|
||||
true,
|
||||
'region'
|
||||
);
|
||||
anyQuery.region = this.getActualRegion(anyQuery.region);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const resultsToDataFrames = (
|
||||
val:
|
||||
| { data: BackendDataSourceResponse | undefined }
|
||||
| FetchResponse<BackendDataSourceResponse | undefined>
|
||||
| DataQueryError
|
||||
): DataFrame[] => toDataQueryResponse(val).data || [];
|
||||
let headers = {};
|
||||
if (options.skipCache) {
|
||||
headers = {
|
||||
'X-Cache-Skip': true,
|
||||
};
|
||||
}
|
||||
|
||||
return this.awsRequest(this.dsQueryEndpoint, requestParams, headers).pipe(
|
||||
map((response) => resultsToDataFrames({ data: response })),
|
||||
catchError((err: FetchError) => {
|
||||
if (config.featureToggles.datasourceQueryMultiStatus && err.status === 207) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (err.status === 400) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (err.data?.error) {
|
||||
throw err.data.error;
|
||||
} else if (err.data?.message) {
|
||||
// In PROD we do not supply .error
|
||||
throw err.data.message;
|
||||
}
|
||||
|
||||
throw err;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
getLogRowContext = async (
|
||||
row: LogRowModel,
|
||||
{ limit = 10, direction = 'BACKWARD' }: RowContextOptions = {},
|
||||
query?: CloudWatchLogsQuery
|
||||
): Promise<{ data: DataFrame[] }> => {
|
||||
let logStreamField = null;
|
||||
let logField = null;
|
||||
|
||||
for (const field of row.dataFrame.fields) {
|
||||
if (field.name === LOGSTREAM_IDENTIFIER_INTERNAL) {
|
||||
logStreamField = field;
|
||||
if (logField !== null) {
|
||||
break;
|
||||
}
|
||||
} else if (field.name === LOG_IDENTIFIER_INTERNAL) {
|
||||
logField = field;
|
||||
if (logStreamField !== null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const requestParams: GetLogEventsRequest = {
|
||||
limit,
|
||||
startFromHead: direction !== 'BACKWARD',
|
||||
region: query?.region,
|
||||
logGroupName: parseLogGroupName(logField!.values.get(row.rowIndex)),
|
||||
logStreamName: logStreamField!.values.get(row.rowIndex),
|
||||
};
|
||||
|
||||
if (direction === 'BACKWARD') {
|
||||
requestParams.endTime = row.timeEpochMs;
|
||||
} else {
|
||||
requestParams.startTime = row.timeEpochMs;
|
||||
}
|
||||
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('GetLogEvents', [requestParams]));
|
||||
|
||||
return {
|
||||
data: dataFrames,
|
||||
};
|
||||
};
|
||||
|
||||
async describeLogGroups(params: DescribeLogGroupsRequest): Promise<string[]> {
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('DescribeLogGroups', [params]));
|
||||
|
||||
const logGroupNames = dataFrames[0]?.fields[0]?.values.toArray() ?? [];
|
||||
return logGroupNames;
|
||||
}
|
||||
|
||||
async describeAllLogGroups(params: DescribeLogGroupsRequest): Promise<string[]> {
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('DescribeAllLogGroups', [params]));
|
||||
|
||||
const logGroupNames = dataFrames[0]?.fields[0]?.values.toArray() ?? [];
|
||||
return logGroupNames;
|
||||
}
|
||||
|
||||
async getLogGroupFields(params: GetLogGroupFieldsRequest): Promise<GetLogGroupFieldsResponse> {
|
||||
const dataFrames = await lastValueFrom(this.makeLogActionRequest('GetLogGroupFields', [params]));
|
||||
|
||||
const fieldNames = dataFrames[0].fields[0].values.toArray();
|
||||
const fieldPercentages = dataFrames[0].fields[1].values.toArray();
|
||||
const getLogGroupFieldsResponse = {
|
||||
logGroupFields: fieldNames.map((val, i) => ({ name: val, percent: fieldPercentages[i] })) ?? [],
|
||||
};
|
||||
|
||||
return getLogGroupFieldsResponse;
|
||||
}
|
||||
}
|
||||
|
||||
function withTeardown<T = DataQueryResponse>(observable: Observable<T>, onUnsubscribe: () => void): Observable<T> {
|
||||
return new Observable<T>((subscriber) => {
|
||||
const innerSub = observable.subscribe({
|
||||
next: (val) => subscriber.next(val),
|
||||
error: (err) => subscriber.next(err),
|
||||
complete: () => subscriber.complete(),
|
||||
});
|
||||
|
||||
return () => {
|
||||
innerSub.unsubscribe();
|
||||
onUnsubscribe();
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function parseLogGroupName(logIdentifier: string): string {
|
||||
const colonIndex = logIdentifier.lastIndexOf(':');
|
||||
return logIdentifier.slice(colonIndex + 1);
|
||||
}
|
@ -0,0 +1,862 @@
|
||||
import { of } from 'rxjs';
|
||||
|
||||
import { CustomVariableModel, getFrameDisplayName, VariableHide } from '@grafana/data';
|
||||
import { dateTime } from '@grafana/data/src/datetime/moment_wrapper';
|
||||
import { BackendDataSourceResponse } from '@grafana/runtime';
|
||||
import { initialVariableModelState } from 'app/features/variables/types';
|
||||
import * as redux from 'app/store/store';
|
||||
|
||||
import {
|
||||
namespaceVariable,
|
||||
metricVariable,
|
||||
labelsVariable,
|
||||
limitVariable,
|
||||
dimensionVariable,
|
||||
periodIntervalVariable,
|
||||
} from '../__mocks__/CloudWatchDataSource';
|
||||
import { setupMockedMetricsQueryRunner } from '../__mocks__/MetricsQueryRunner';
|
||||
import { MetricQueryType, MetricEditorMode, CloudWatchMetricsQuery, DataQueryError } from '../types';
|
||||
|
||||
describe('CloudWatchMetricsQueryRunner', () => {
|
||||
describe('performTimeSeriesQuery', () => {
|
||||
it('should return the same length of data as result', async () => {
|
||||
const { runner, timeRange } = setupMockedMetricsQueryRunner({
|
||||
data: {
|
||||
results: {
|
||||
a: { refId: 'a', series: [{ target: 'cpu', datapoints: [[1, 1]] }] },
|
||||
b: { refId: 'b', series: [{ target: 'memory', datapoints: [[2, 2]] }] },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const observable = runner.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [
|
||||
{ datasourceId: 1, refId: 'a' },
|
||||
{ datasourceId: 1, refId: 'b' },
|
||||
],
|
||||
from: '',
|
||||
to: '',
|
||||
},
|
||||
timeRange
|
||||
);
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
expect(response.data.length).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets fields.config.interval based on period', async () => {
|
||||
const { runner, timeRange } = setupMockedMetricsQueryRunner({
|
||||
data: {
|
||||
results: {
|
||||
a: {
|
||||
refId: 'a',
|
||||
series: [{ target: 'cpu', datapoints: [[1, 2]], meta: { custom: { period: 60 } } }],
|
||||
},
|
||||
b: {
|
||||
refId: 'b',
|
||||
series: [{ target: 'cpu', datapoints: [[1, 2]], meta: { custom: { period: 120 } } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const observable = runner.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [{ datasourceId: 1, refId: 'a' }],
|
||||
from: '',
|
||||
to: '',
|
||||
},
|
||||
timeRange
|
||||
);
|
||||
|
||||
await expect(observable).toEmitValuesWith((received) => {
|
||||
const response = received[0];
|
||||
expect(response.data[0].fields[0].config.interval).toEqual(60000);
|
||||
expect(response.data[1].fields[0].config.interval).toEqual(120000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch metrics query', () => {
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
expression: '',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
},
|
||||
];
|
||||
|
||||
const data: BackendDataSourceResponse = {
|
||||
results: {
|
||||
A: {
|
||||
tables: [],
|
||||
error: '',
|
||||
refId: 'A',
|
||||
series: [
|
||||
{
|
||||
target: 'CPUUtilization_Average',
|
||||
datapoints: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('should generate the correct query', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ data });
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries).toMatchObject(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
namespace: queries[0].namespace,
|
||||
metricName: queries[0].metricName,
|
||||
dimensions: { InstanceId: ['i-12345678'] },
|
||||
statistic: queries[0].statistic,
|
||||
period: queries[0].period,
|
||||
}),
|
||||
])
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query with interval variable', async () => {
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '[[period]]',
|
||||
},
|
||||
];
|
||||
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
|
||||
data,
|
||||
variables: [periodIntervalVariable],
|
||||
});
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].period).toEqual('600');
|
||||
});
|
||||
});
|
||||
|
||||
it('should return series list', async () => {
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({ data });
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith((received) => {
|
||||
const result = received[0];
|
||||
expect(getFrameDisplayName(result.data[0])).toBe(
|
||||
data.results.A.series?.length && data.results.A.series[0].target
|
||||
);
|
||||
expect(result.data[0].fields[1].values.buffer[0]).toBe(
|
||||
data.results.A.series?.length && data.results.A.series[0].datapoints[0][0]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and throttling exception is thrown', () => {
|
||||
const partialQuery: CloudWatchMetricsQuery = {
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
expression: '',
|
||||
id: '',
|
||||
region: '',
|
||||
refId: '',
|
||||
};
|
||||
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{ ...partialQuery, refId: 'A', region: 'us-east-1' },
|
||||
{ ...partialQuery, refId: 'B', region: 'us-east-2' },
|
||||
{ ...partialQuery, refId: 'C', region: 'us-east-1' },
|
||||
{ ...partialQuery, refId: 'D', region: 'us-east-2' },
|
||||
{ ...partialQuery, refId: 'E', region: 'eu-north-1' },
|
||||
];
|
||||
|
||||
const backendErrorResponse: DataQueryError<CloudWatchMetricsQuery> = {
|
||||
data: {
|
||||
message: 'Throttling: exception',
|
||||
results: {
|
||||
A: {
|
||||
frames: [],
|
||||
series: [],
|
||||
tables: [],
|
||||
error: 'Throttling: exception',
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
},
|
||||
B: {
|
||||
frames: [],
|
||||
series: [],
|
||||
tables: [],
|
||||
error: 'Throttling: exception',
|
||||
refId: 'B',
|
||||
meta: {},
|
||||
},
|
||||
C: {
|
||||
frames: [],
|
||||
series: [],
|
||||
tables: [],
|
||||
error: 'Throttling: exception',
|
||||
refId: 'C',
|
||||
meta: {},
|
||||
},
|
||||
D: {
|
||||
frames: [],
|
||||
series: [],
|
||||
tables: [],
|
||||
error: 'Throttling: exception',
|
||||
refId: 'D',
|
||||
meta: {},
|
||||
},
|
||||
E: {
|
||||
frames: [],
|
||||
series: [],
|
||||
tables: [],
|
||||
error: 'Throttling: exception',
|
||||
refId: 'E',
|
||||
meta: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
redux.setStore({
|
||||
...redux.store,
|
||||
dispatch: jest.fn(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should display one alert error message per region+datasource combination', async () => {
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({ data: backendErrorResponse, throws: true });
|
||||
const memoizedDebounceSpy = jest.spyOn(runner, 'debouncedAlert');
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('CloudWatch Test Datasource', 'us-east-1');
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('CloudWatch Test Datasource', 'us-east-2');
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('CloudWatch Test Datasource', 'eu-north-1');
|
||||
expect(memoizedDebounceSpy).toBeCalledTimes(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleMetricQueries ', () => {
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/ApplicationELB',
|
||||
metricName: 'TargetResponseTime',
|
||||
dimensions: {
|
||||
LoadBalancer: 'lb',
|
||||
TargetGroup: 'tg',
|
||||
},
|
||||
statistic: 'p90.00',
|
||||
period: '300s',
|
||||
},
|
||||
];
|
||||
|
||||
const data: BackendDataSourceResponse = {
|
||||
results: {
|
||||
A: {
|
||||
tables: [],
|
||||
error: '',
|
||||
refId: 'A',
|
||||
series: [
|
||||
{
|
||||
target: 'TargetResponseTime_p90.00',
|
||||
datapoints: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
LoadBalancer: 'lb',
|
||||
TargetGroup: 'tg',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('should return series list', async () => {
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({ data });
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith((received) => {
|
||||
const result = received[0];
|
||||
expect(getFrameDisplayName(result.data[0])).toBe(
|
||||
data.results.A.series?.length && data.results.A.series[0].target
|
||||
);
|
||||
expect(result.data[0].fields[1].values.buffer[0]).toBe(
|
||||
data.results.A.series?.length && data.results.A.series[0].datapoints[0][0]
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('template variable interpolation', () => {
|
||||
it('interpolates variables correctly', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
|
||||
variables: [namespaceVariable, metricVariable, labelsVariable, limitVariable],
|
||||
});
|
||||
runner.handleMetricQueries(
|
||||
[
|
||||
{
|
||||
id: '',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
alias: '',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
|
||||
},
|
||||
],
|
||||
request
|
||||
);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
describe('When performing CloudWatch query with template variables', () => {
|
||||
const key = 'key';
|
||||
const var1: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var1',
|
||||
rootStateKey: key,
|
||||
name: 'var1',
|
||||
index: 0,
|
||||
current: { value: 'var1-foo', text: 'var1-foo', selected: true },
|
||||
options: [{ value: 'var1-foo', text: 'var1-foo', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var2: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var2',
|
||||
rootStateKey: key,
|
||||
name: 'var2',
|
||||
index: 1,
|
||||
current: { value: 'var2-foo', text: 'var2-foo', selected: true },
|
||||
options: [{ value: 'var2-foo', text: 'var2-foo', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var3: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var3',
|
||||
rootStateKey: key,
|
||||
name: 'var3',
|
||||
index: 2,
|
||||
current: { value: ['var3-foo', 'var3-baz'], text: 'var3-foo + var3-baz', selected: true },
|
||||
options: [
|
||||
{ selected: true, value: 'var3-foo', text: 'var3-foo' },
|
||||
{ selected: false, value: 'var3-bar', text: 'var3-bar' },
|
||||
{ selected: true, value: 'var3-baz', text: 'var3-baz' },
|
||||
],
|
||||
multi: true,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var4: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var4',
|
||||
rootStateKey: key,
|
||||
name: 'var4',
|
||||
index: 3,
|
||||
options: [
|
||||
{ selected: true, value: 'var4-foo', text: 'var4-foo' },
|
||||
{ selected: false, value: 'var4-bar', text: 'var4-bar' },
|
||||
{ selected: true, value: 'var4-baz', text: 'var4-baz' },
|
||||
],
|
||||
current: { value: ['var4-foo', 'var4-baz'], text: 'var4-foo + var4-baz', selected: true },
|
||||
multi: true,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
|
||||
it('should generate the correct query for single template variable', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim2: '$var2',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
];
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of one multilple template variables', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '$var1',
|
||||
dim2: '$var2',
|
||||
dim3: '$var3',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
];
|
||||
|
||||
await expect(
|
||||
runner.handleMetricQueries(queries, {
|
||||
...request,
|
||||
scopedVars: {
|
||||
var1: { selected: true, value: 'var1-foo', text: '' },
|
||||
var2: { selected: true, value: 'var2-foo', text: '' },
|
||||
},
|
||||
})
|
||||
).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of multilple multi template variables', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '$var1',
|
||||
dim3: '$var3',
|
||||
dim4: '$var4',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
];
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query for multilple template variables, lack scopedVars', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '$var1',
|
||||
dim2: '$var2',
|
||||
dim3: '$var3',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
},
|
||||
];
|
||||
|
||||
await expect(
|
||||
runner.handleMetricQueries(queries, {
|
||||
...request,
|
||||
scopedVars: {
|
||||
var1: { selected: true, value: 'var1-foo', text: '' },
|
||||
},
|
||||
})
|
||||
).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('timezoneUTCOffset', () => {
|
||||
const testQuery = {
|
||||
id: '',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
label: '${MAX_TIME_RELATIVE}',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
|
||||
};
|
||||
const testTable = [
|
||||
['Europe/Stockholm', '+0200'],
|
||||
['America/New_York', '-0400'],
|
||||
['Asia/Tokyo', '+0900'],
|
||||
['UTC', '+0000'],
|
||||
];
|
||||
describe.each(testTable)('should use the right time zone offset', (ianaTimezone, expectedOffset) => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner();
|
||||
runner.handleMetricQueries([testQuery], {
|
||||
...request,
|
||||
range: { ...request.range, from: dateTime(), to: dateTime() },
|
||||
timezone: ianaTimezone,
|
||||
});
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
timezoneUTCOffset: expectedOffset,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('debouncedCustomAlert', () => {
|
||||
const debouncedAlert = jest.fn();
|
||||
beforeEach(() => {
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({
|
||||
variables: [
|
||||
{ ...namespaceVariable, multi: true },
|
||||
{ ...metricVariable, multi: true },
|
||||
],
|
||||
});
|
||||
runner.debouncedCustomAlert = debouncedAlert;
|
||||
runner.performTimeSeriesQuery = jest.fn().mockResolvedValue([]);
|
||||
runner.handleMetricQueries(
|
||||
[
|
||||
{
|
||||
queryMode: 'Metrics',
|
||||
id: '',
|
||||
region: 'us-east-2',
|
||||
namespace: namespaceVariable.id,
|
||||
metricName: metricVariable.id,
|
||||
period: '',
|
||||
alias: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
refId: '',
|
||||
expression: 'x * 2',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
},
|
||||
],
|
||||
request
|
||||
);
|
||||
});
|
||||
it('should show debounced alert for namespace and metric name', async () => {
|
||||
expect(debouncedAlert).toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for namespace'
|
||||
);
|
||||
expect(debouncedAlert).toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for metric name'
|
||||
);
|
||||
});
|
||||
|
||||
it('should not show debounced alert for region', async () => {
|
||||
expect(debouncedAlert).not.toHaveBeenCalledWith(
|
||||
'CloudWatch templating error',
|
||||
'Multi template variables are not supported for region'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('interpolateMetricsQueryVariables', () => {
|
||||
it('interpolates dimensions correctly', () => {
|
||||
const testQuery = {
|
||||
id: 'a',
|
||||
refId: 'a',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
dimensions: { InstanceId: '$dimension' },
|
||||
};
|
||||
const { runner } = setupMockedMetricsQueryRunner({ variables: [dimensionVariable], mockGetVariableName: false });
|
||||
const result = runner.interpolateMetricsQueryVariables(testQuery, {
|
||||
dimension: { text: 'foo', value: 'foo' },
|
||||
});
|
||||
expect(result).toStrictEqual({
|
||||
alias: '',
|
||||
metricName: '',
|
||||
namespace: '',
|
||||
period: '',
|
||||
sqlExpression: '',
|
||||
dimensions: { InstanceId: ['foo'] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertMultiFiltersFormat', () => {
|
||||
const { runner } = setupMockedMetricsQueryRunner({
|
||||
variables: [labelsVariable, dimensionVariable],
|
||||
mockGetVariableName: false,
|
||||
});
|
||||
it('converts keys and values correctly', () => {
|
||||
const filters = { $dimension: ['b'], a: ['$labels', 'bar'] };
|
||||
const result = runner.convertMultiFilterFormat(filters);
|
||||
expect(result).toStrictEqual({
|
||||
env: ['b'],
|
||||
a: ['InstanceId', 'InstanceType', 'bar'],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterMetricsQuery', () => {
|
||||
const runner = setupMockedMetricsQueryRunner().runner;
|
||||
let baseQuery: CloudWatchMetricsQuery;
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
id: '',
|
||||
region: 'us-east-2',
|
||||
namespace: '',
|
||||
period: '',
|
||||
alias: '',
|
||||
metricName: '',
|
||||
dimensions: {},
|
||||
matchExact: true,
|
||||
statistic: '',
|
||||
expression: '',
|
||||
refId: '',
|
||||
};
|
||||
});
|
||||
|
||||
it('should error if invalid mode', async () => {
|
||||
expect(() => runner.filterMetricQuery(baseQuery)).toThrowError('invalid metric editor mode');
|
||||
});
|
||||
|
||||
describe('metric search queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
statistic: 'Average',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow builder queries that dont have namespace, metric or statistic', async () => {
|
||||
expect(runner.filterMetricQuery({ ...baseQuery, statistic: undefined })).toBeFalsy();
|
||||
expect(runner.filterMetricQuery({ ...baseQuery, metricName: undefined })).toBeFalsy();
|
||||
expect(runner.filterMetricQuery({ ...baseQuery, namespace: '' })).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow builder queries that have namespace, metric or statistic', async () => {
|
||||
expect(runner.filterMetricQuery(baseQuery)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should not allow code queries that dont have an expression', async () => {
|
||||
expect(
|
||||
runner.filterMetricQuery({
|
||||
...baseQuery,
|
||||
expression: undefined,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
})
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow code queries that have an expression', async () => {
|
||||
expect(
|
||||
runner.filterMetricQuery({ ...baseQuery, expression: 'x * 2', metricEditorMode: MetricEditorMode.Code })
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('metric search expression queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow queries that dont have an expression', async () => {
|
||||
const valid = runner.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow queries that have an expression', async () => {
|
||||
baseQuery.expression = 'SUM([a,x])';
|
||||
const valid = runner.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('metric query queries', () => {
|
||||
beforeEach(() => {
|
||||
baseQuery = {
|
||||
...baseQuery,
|
||||
metricQueryType: MetricQueryType.Query,
|
||||
metricEditorMode: MetricEditorMode.Code,
|
||||
};
|
||||
});
|
||||
|
||||
it('should not allow queries that dont have a sql expresssion', async () => {
|
||||
const valid = runner.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should allow queries that have a sql expresssion', async () => {
|
||||
baseQuery.sqlExpression = 'select SUM(CPUUtilization) from "AWS/EC2"';
|
||||
const valid = runner.filterMetricQuery(baseQuery);
|
||||
expect(valid).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When query region is "default"', () => {
|
||||
it('should return the datasource region if empty or "default"', () => {
|
||||
const { runner, instanceSettings } = setupMockedMetricsQueryRunner();
|
||||
const defaultRegion = instanceSettings.jsonData.defaultRegion;
|
||||
|
||||
expect(runner.getActualRegion()).toBe(defaultRegion);
|
||||
expect(runner.getActualRegion('')).toBe(defaultRegion);
|
||||
expect(runner.getActualRegion('default')).toBe(defaultRegion);
|
||||
});
|
||||
|
||||
it('should return the specified region if specified', () => {
|
||||
const { runner } = setupMockedMetricsQueryRunner();
|
||||
|
||||
expect(runner.getActualRegion('some-fake-region-1')).toBe('some-fake-region-1');
|
||||
});
|
||||
|
||||
it('should query for the datasource region if empty or "default"', async () => {
|
||||
const { runner, instanceSettings, request } = setupMockedMetricsQueryRunner();
|
||||
const performTimeSeriesQueryMock = jest
|
||||
.spyOn(runner, 'performTimeSeriesQuery')
|
||||
.mockReturnValue(of({ data: [], error: undefined }));
|
||||
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
queryMode: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'default',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
];
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(performTimeSeriesQueryMock.mock.calls[0][0].queries[0].region).toBe(
|
||||
instanceSettings.jsonData.defaultRegion
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
@ -0,0 +1,239 @@
|
||||
import { findLast, isEmpty } from 'lodash';
|
||||
import React from 'react';
|
||||
import { catchError, map, Observable, of, throwError } from 'rxjs';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
dateTimeFormat,
|
||||
FieldType,
|
||||
rangeUtil,
|
||||
ScopedVars,
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { toDataQueryResponse } from '@grafana/runtime';
|
||||
import { notifyApp } from 'app/core/actions';
|
||||
import { createErrorNotification } from 'app/core/copy/appNotification';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { store } from 'app/store/store';
|
||||
import { AppNotificationTimeout } from 'app/types';
|
||||
|
||||
import { ThrottlingErrorMessage } from '../components/ThrottlingErrorMessage';
|
||||
import memoizedDebounce from '../memoizedDebounce';
|
||||
import { migrateMetricQuery } from '../migrations/metricQueryMigrations';
|
||||
import {
|
||||
CloudWatchJsonData,
|
||||
CloudWatchMetricsQuery,
|
||||
CloudWatchQuery,
|
||||
DataQueryError,
|
||||
MetricEditorMode,
|
||||
MetricQuery,
|
||||
MetricQueryType,
|
||||
MetricRequest,
|
||||
} from '../types';
|
||||
|
||||
import { CloudWatchQueryRunner } from './CloudWatchQueryRunner';
|
||||
|
||||
const displayAlert = (datasourceName: string, region: string) =>
|
||||
store.dispatch(
|
||||
notifyApp(
|
||||
createErrorNotification(
|
||||
`CloudWatch request limit reached in ${region} for data source ${datasourceName}`,
|
||||
'',
|
||||
undefined,
|
||||
React.createElement(ThrottlingErrorMessage, { region }, null)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// This class handles execution of CloudWatch metrics query data queries
|
||||
export class CloudWatchMetricsQueryRunner extends CloudWatchQueryRunner {
|
||||
debouncedAlert: (datasourceName: string, region: string) => void = memoizedDebounce(
|
||||
displayAlert,
|
||||
AppNotificationTimeout.Error
|
||||
);
|
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
|
||||
super(instanceSettings, templateSrv);
|
||||
}
|
||||
|
||||
handleMetricQueries = (
|
||||
metricQueries: CloudWatchMetricsQuery[],
|
||||
options: DataQueryRequest<CloudWatchQuery>
|
||||
): Observable<DataQueryResponse> => {
|
||||
const timezoneUTCOffset = dateTimeFormat(Date.now(), {
|
||||
timeZone: options.timezone,
|
||||
format: 'Z',
|
||||
}).replace(':', '');
|
||||
|
||||
const validMetricsQueries = metricQueries
|
||||
.filter(this.filterMetricQuery)
|
||||
.map((q: CloudWatchMetricsQuery): MetricQuery => {
|
||||
const migratedQuery = migrateMetricQuery(q);
|
||||
const migratedAndIterpolatedQuery = this.replaceMetricQueryVars(migratedQuery, options);
|
||||
|
||||
return {
|
||||
timezoneUTCOffset,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
...migratedAndIterpolatedQuery,
|
||||
type: 'timeSeriesQuery',
|
||||
datasource: this.ref,
|
||||
};
|
||||
});
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (isEmpty(validMetricsQueries)) {
|
||||
return of({ data: [] });
|
||||
}
|
||||
|
||||
const request = {
|
||||
from: options?.range?.from.valueOf().toString(),
|
||||
to: options?.range?.to.valueOf().toString(),
|
||||
queries: validMetricsQueries,
|
||||
};
|
||||
|
||||
return this.performTimeSeriesQuery(request, options.range);
|
||||
};
|
||||
|
||||
interpolateMetricsQueryVariables(
|
||||
query: CloudWatchMetricsQuery,
|
||||
scopedVars: ScopedVars
|
||||
): Pick<CloudWatchMetricsQuery, 'alias' | 'metricName' | 'namespace' | 'period' | 'dimensions' | 'sqlExpression'> {
|
||||
return {
|
||||
alias: this.replaceVariableAndDisplayWarningIfMulti(query.alias, scopedVars),
|
||||
metricName: this.replaceVariableAndDisplayWarningIfMulti(query.metricName, scopedVars),
|
||||
namespace: this.replaceVariableAndDisplayWarningIfMulti(query.namespace, scopedVars),
|
||||
period: this.replaceVariableAndDisplayWarningIfMulti(query.period, scopedVars),
|
||||
sqlExpression: this.replaceVariableAndDisplayWarningIfMulti(query.sqlExpression, scopedVars),
|
||||
dimensions: this.convertDimensionFormat(query.dimensions ?? {}, scopedVars),
|
||||
};
|
||||
}
|
||||
|
||||
performTimeSeriesQuery(request: MetricRequest, { from, to }: TimeRange): Observable<DataQueryResponse> {
|
||||
return this.awsRequest(this.dsQueryEndpoint, request).pipe(
|
||||
map((res) => {
|
||||
const dataframes: DataFrame[] = toDataQueryResponse({ data: res }).data;
|
||||
if (!dataframes || dataframes.length <= 0) {
|
||||
return { data: [] };
|
||||
}
|
||||
|
||||
const lastError = findLast(res.results, (v) => !!v.error);
|
||||
|
||||
dataframes.forEach((frame) => {
|
||||
frame.fields.forEach((field) => {
|
||||
if (field.type === FieldType.time) {
|
||||
// field.config.interval is populated in order for Grafana to fill in null values at frame intervals
|
||||
field.config.interval = frame.meta?.custom?.period * 1000;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
data: dataframes,
|
||||
error: lastError ? { message: lastError.error } : undefined,
|
||||
};
|
||||
}),
|
||||
catchError((err: DataQueryError<CloudWatchMetricsQuery>) => {
|
||||
const isFrameError = err.data?.results;
|
||||
|
||||
// Error is not frame specific
|
||||
if (!isFrameError && err.data && err.data.message === 'Metric request error' && err.data.error) {
|
||||
err.message = err.data.error;
|
||||
return throwError(() => err);
|
||||
}
|
||||
|
||||
// The error is either for a specific frame or for all the frames
|
||||
const results: Array<{ error?: string }> = Object.values(err.data?.results ?? {});
|
||||
const firstErrorResult = results.find((r) => r.error);
|
||||
if (firstErrorResult) {
|
||||
err.message = firstErrorResult.error;
|
||||
}
|
||||
|
||||
if (results.some((r) => r.error && /^Throttling:.*/.test(r.error))) {
|
||||
const failedRedIds = Object.keys(err.data?.results ?? {});
|
||||
const regionsAffected = Object.values(request.queries).reduce(
|
||||
(res: string[], { refId, region }) =>
|
||||
(refId && !failedRedIds.includes(refId)) || res.includes(region) ? res : [...res, region],
|
||||
[]
|
||||
);
|
||||
regionsAffected.forEach((region) => {
|
||||
const actualRegion = this.getActualRegion(region);
|
||||
if (actualRegion) {
|
||||
this.debouncedAlert(this.instanceSettings.name, actualRegion);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return throwError(() => err);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
filterMetricQuery(query: CloudWatchMetricsQuery): boolean {
|
||||
const { region, metricQueryType, metricEditorMode, expression, metricName, namespace, sqlExpression, statistic } =
|
||||
query;
|
||||
if (!region) {
|
||||
return false;
|
||||
}
|
||||
if (metricQueryType === MetricQueryType.Search && metricEditorMode === MetricEditorMode.Builder) {
|
||||
return !!namespace && !!metricName && !!statistic;
|
||||
} else if (metricQueryType === MetricQueryType.Search && metricEditorMode === MetricEditorMode.Code) {
|
||||
return !!expression;
|
||||
} else if (metricQueryType === MetricQueryType.Query) {
|
||||
// still TBD how to validate the visual query builder for SQL
|
||||
return !!sqlExpression;
|
||||
}
|
||||
|
||||
throw new Error('invalid metric editor mode');
|
||||
}
|
||||
|
||||
replaceMetricQueryVars(
|
||||
query: CloudWatchMetricsQuery,
|
||||
options: DataQueryRequest<CloudWatchQuery>
|
||||
): CloudWatchMetricsQuery {
|
||||
query.region = this.templateSrv.replace(this.getActualRegion(query.region), options.scopedVars);
|
||||
query.namespace = this.replaceVariableAndDisplayWarningIfMulti(
|
||||
query.namespace,
|
||||
options.scopedVars,
|
||||
true,
|
||||
'namespace'
|
||||
);
|
||||
query.metricName = this.replaceVariableAndDisplayWarningIfMulti(
|
||||
query.metricName,
|
||||
options.scopedVars,
|
||||
true,
|
||||
'metric name'
|
||||
);
|
||||
query.dimensions = this.convertDimensionFormat(query.dimensions ?? {}, options.scopedVars);
|
||||
query.statistic = this.templateSrv.replace(query.statistic, options.scopedVars);
|
||||
query.period = String(this.getPeriod(query, options)); // use string format for period in graph query, and alerting
|
||||
query.id = this.templateSrv.replace(query.id, options.scopedVars);
|
||||
query.expression = this.templateSrv.replace(query.expression, options.scopedVars);
|
||||
query.sqlExpression = this.templateSrv.replace(query.sqlExpression, options.scopedVars, 'raw');
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
getPeriod(target: CloudWatchMetricsQuery, options: DataQueryRequest<CloudWatchQuery>) {
|
||||
let period = this.templateSrv.replace(target.period, options.scopedVars);
|
||||
if (period && period.toLowerCase() !== 'auto') {
|
||||
let p: number;
|
||||
if (/^\d+$/.test(period)) {
|
||||
p = parseInt(period, 10);
|
||||
} else {
|
||||
p = rangeUtil.intervalToSeconds(period);
|
||||
}
|
||||
|
||||
if (p < 1) {
|
||||
p = 1;
|
||||
}
|
||||
|
||||
return String(p);
|
||||
}
|
||||
|
||||
return period;
|
||||
}
|
||||
}
|
@ -0,0 +1,122 @@
|
||||
import { Observable, map } from 'rxjs';
|
||||
|
||||
import { DataSourceInstanceSettings, DataSourceRef, getDataSourceRef, ScopedVars } from '@grafana/data';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { notifyApp } from 'app/core/actions';
|
||||
import { createErrorNotification } from 'app/core/copy/appNotification';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { store } from 'app/store/store';
|
||||
import { AppNotificationTimeout } from 'app/types';
|
||||
|
||||
import memoizedDebounce from '../memoizedDebounce';
|
||||
import { CloudWatchJsonData, Dimensions, MetricRequest, MultiFilters, TSDBResponse } from '../types';
|
||||
|
||||
export abstract class CloudWatchQueryRunner {
|
||||
templateSrv: TemplateSrv;
|
||||
ref: DataSourceRef;
|
||||
dsQueryEndpoint = '/api/ds/query';
|
||||
debouncedCustomAlert: (title: string, message: string) => void = memoizedDebounce(
|
||||
displayCustomError,
|
||||
AppNotificationTimeout.Error
|
||||
);
|
||||
|
||||
constructor(public instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
|
||||
this.templateSrv = templateSrv;
|
||||
this.ref = getDataSourceRef(instanceSettings);
|
||||
}
|
||||
|
||||
awsRequest(url: string, data: MetricRequest, headers: Record<string, string> = {}): Observable<TSDBResponse> {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url,
|
||||
data,
|
||||
headers,
|
||||
};
|
||||
|
||||
return getBackendSrv()
|
||||
.fetch<TSDBResponse>(options)
|
||||
.pipe(map((result) => result.data));
|
||||
}
|
||||
|
||||
convertDimensionFormat(dimensions: Dimensions, scopedVars: ScopedVars): Dimensions {
|
||||
return Object.entries(dimensions).reduce((result, [key, value]) => {
|
||||
key = this.replaceVariableAndDisplayWarningIfMulti(key, scopedVars, true, 'dimension keys');
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return { ...result, [key]: value };
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
return { ...result, [key]: null };
|
||||
}
|
||||
|
||||
const newValues = this.expandVariableToArray(value, scopedVars);
|
||||
return { ...result, [key]: newValues };
|
||||
}, {});
|
||||
}
|
||||
|
||||
// get the value for a given template variable
|
||||
expandVariableToArray(value: string, scopedVars: ScopedVars): string[] {
|
||||
const variableName = this.templateSrv.getVariableName(value);
|
||||
const valueVar = this.templateSrv.getVariables().find(({ name }) => {
|
||||
return name === variableName;
|
||||
});
|
||||
|
||||
if (variableName && valueVar) {
|
||||
const isMultiVariable =
|
||||
valueVar?.type === 'custom' || valueVar?.type === 'query' || valueVar?.type === 'datasource';
|
||||
if (isMultiVariable && valueVar.multi) {
|
||||
return this.templateSrv.replace(value, scopedVars, 'pipe').split('|');
|
||||
}
|
||||
return [this.templateSrv.replace(value, scopedVars)];
|
||||
}
|
||||
return [value];
|
||||
}
|
||||
|
||||
convertMultiFilterFormat(multiFilters: MultiFilters, fieldName?: string) {
|
||||
return Object.entries(multiFilters).reduce((result, [key, values]) => {
|
||||
const interpolatedKey = this.replaceVariableAndDisplayWarningIfMulti(key, {}, true, fieldName);
|
||||
if (!values) {
|
||||
return { ...result, [interpolatedKey]: null };
|
||||
}
|
||||
const initialVal: string[] = [];
|
||||
const newValues = values.reduce((result, value) => {
|
||||
const vals = this.expandVariableToArray(value, {});
|
||||
return [...result, ...vals];
|
||||
}, initialVal);
|
||||
return { ...result, [interpolatedKey]: newValues };
|
||||
}, {});
|
||||
}
|
||||
|
||||
replaceVariableAndDisplayWarningIfMulti(
|
||||
target?: string,
|
||||
scopedVars?: ScopedVars,
|
||||
displayErrorIfIsMultiTemplateVariable?: boolean,
|
||||
fieldName?: string
|
||||
) {
|
||||
if (displayErrorIfIsMultiTemplateVariable && !!target) {
|
||||
const variables = this.templateSrv.getVariables();
|
||||
const variable = variables.find(({ name }) => name === this.templateSrv.getVariableName(target));
|
||||
const isMultiVariable =
|
||||
variable?.type === 'custom' || variable?.type === 'query' || variable?.type === 'datasource';
|
||||
if (isMultiVariable && variable.multi) {
|
||||
this.debouncedCustomAlert(
|
||||
'CloudWatch templating error',
|
||||
`Multi template variables are not supported for ${fieldName || target}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return this.templateSrv.replace(target, scopedVars);
|
||||
}
|
||||
|
||||
getActualRegion(region?: string) {
|
||||
if (region === 'default' || region === undefined || region === '') {
|
||||
return this.instanceSettings.jsonData.defaultRegion ?? '';
|
||||
}
|
||||
return region;
|
||||
}
|
||||
}
|
||||
|
||||
const displayCustomError = (title: string, message: string) =>
|
||||
store.dispatch(notifyApp(createErrorNotification(title, message)));
|
@ -1,914 +0,0 @@
|
||||
import { interval, lastValueFrom, of, throwError } from 'rxjs';
|
||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||
import { getTemplateSrvDependencies } from 'test/helpers/getTemplateSrvDependencies';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryErrorType,
|
||||
DataSourceInstanceSettings,
|
||||
dateMath,
|
||||
getFrameDisplayName,
|
||||
} from '@grafana/data';
|
||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import * as redux from 'app/store/store';
|
||||
|
||||
import { convertToStoreState } from '../../../../../test/helpers/convertToStoreState';
|
||||
import { CustomVariableModel, initialVariableModelState, VariableHide } from '../../../../features/variables/types';
|
||||
import { CloudWatchDatasource } from '../datasource';
|
||||
import {
|
||||
CloudWatchJsonData,
|
||||
CloudWatchLogsQuery,
|
||||
CloudWatchLogsQueryStatus,
|
||||
CloudWatchMetricsQuery,
|
||||
LogAction,
|
||||
MetricEditorMode,
|
||||
MetricQueryType,
|
||||
} from '../types';
|
||||
import * as rxjsUtils from '../utils/rxjs/increasingInterval';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...(jest.requireActual('@grafana/runtime') as unknown as object),
|
||||
getBackendSrv: () => backendSrv,
|
||||
}));
|
||||
|
||||
type Args = { response?: any; throws?: boolean; templateSrv?: TemplateSrv };
|
||||
|
||||
function getTestContext({ response = {}, throws = false, templateSrv = new TemplateSrv() }: Args = {}) {
|
||||
jest.clearAllMocks();
|
||||
|
||||
const fetchMock = jest.spyOn(backendSrv, 'fetch');
|
||||
|
||||
throws
|
||||
? fetchMock.mockImplementation(() => throwError(response))
|
||||
: fetchMock.mockImplementation(() => of(createFetchResponse(response)));
|
||||
|
||||
const instanceSettings = {
|
||||
jsonData: { defaultRegion: 'us-east-1' },
|
||||
name: 'TestDatasource',
|
||||
} as DataSourceInstanceSettings<CloudWatchJsonData>;
|
||||
|
||||
const timeSrv = {
|
||||
time: { from: '2016-12-31 15:00:00Z', to: '2016-12-31 16:00:00Z' },
|
||||
timeRange: () => {
|
||||
return {
|
||||
from: dateMath.parse(timeSrv.time.from, false),
|
||||
to: dateMath.parse(timeSrv.time.to, true),
|
||||
};
|
||||
},
|
||||
} as TimeSrv;
|
||||
|
||||
const ds = new CloudWatchDatasource(instanceSettings, templateSrv, timeSrv);
|
||||
|
||||
return { ds, fetchMock, instanceSettings };
|
||||
}
|
||||
|
||||
describe('CloudWatchDatasource', () => {
|
||||
const start = 1483196400 * 1000;
|
||||
const defaultTimeRange = { from: new Date(start), to: new Date(start + 3600 * 1000) };
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('When getting log groups', () => {
|
||||
it('should return log groups as an array of strings', async () => {
|
||||
const response = {
|
||||
results: {
|
||||
A: {
|
||||
frames: [
|
||||
{
|
||||
schema: {
|
||||
name: 'logGroups',
|
||||
refId: 'A',
|
||||
fields: [{ name: 'logGroupName', type: 'string', typeInfo: { frame: 'string', nullable: true } }],
|
||||
},
|
||||
data: {
|
||||
values: [
|
||||
[
|
||||
'/aws/containerinsights/dev303-workshop/application',
|
||||
'/aws/containerinsights/dev303-workshop/dataplane',
|
||||
'/aws/containerinsights/dev303-workshop/flowlogs',
|
||||
'/aws/containerinsights/dev303-workshop/host',
|
||||
'/aws/containerinsights/dev303-workshop/performance',
|
||||
'/aws/containerinsights/dev303-workshop/prometheus',
|
||||
'/aws/containerinsights/ecommerce-sockshop/application',
|
||||
'/aws/containerinsights/ecommerce-sockshop/dataplane',
|
||||
'/aws/containerinsights/ecommerce-sockshop/host',
|
||||
'/aws/containerinsights/ecommerce-sockshop/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/application',
|
||||
'/aws/containerinsights/watchdemo-perf/dataplane',
|
||||
'/aws/containerinsights/watchdemo-perf/host',
|
||||
'/aws/containerinsights/watchdemo-perf/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/prometheus',
|
||||
'/aws/containerinsights/watchdemo-prod-us-east-1/performance',
|
||||
'/aws/containerinsights/watchdemo-staging/application',
|
||||
'/aws/containerinsights/watchdemo-staging/dataplane',
|
||||
'/aws/containerinsights/watchdemo-staging/host',
|
||||
'/aws/containerinsights/watchdemo-staging/performance',
|
||||
'/aws/ecs/containerinsights/bugbash-ec2/performance',
|
||||
'/aws/ecs/containerinsights/ecs-demoworkshop/performance',
|
||||
'/aws/ecs/containerinsights/ecs-workshop-dev/performance',
|
||||
'/aws/eks/dev303-workshop/cluster',
|
||||
'/aws/events/cloudtrail',
|
||||
'/aws/events/ecs',
|
||||
'/aws/lambda/cwsyn-mycanary-fac97ded-f134-499a-9d71-4c3be1f63182',
|
||||
'/aws/lambda/cwsyn-watch-linkchecks-ef7ef273-5da2-4663-af54-d2f52d55b060',
|
||||
'/ecs/ecs-cwagent-daemon-service',
|
||||
'/ecs/ecs-demo-limitTask',
|
||||
'CloudTrail/DefaultLogGroup',
|
||||
'container-insights-prometheus-beta',
|
||||
'container-insights-prometheus-demo',
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const { ds } = getTestContext({ response });
|
||||
const expectedLogGroups = [
|
||||
'/aws/containerinsights/dev303-workshop/application',
|
||||
'/aws/containerinsights/dev303-workshop/dataplane',
|
||||
'/aws/containerinsights/dev303-workshop/flowlogs',
|
||||
'/aws/containerinsights/dev303-workshop/host',
|
||||
'/aws/containerinsights/dev303-workshop/performance',
|
||||
'/aws/containerinsights/dev303-workshop/prometheus',
|
||||
'/aws/containerinsights/ecommerce-sockshop/application',
|
||||
'/aws/containerinsights/ecommerce-sockshop/dataplane',
|
||||
'/aws/containerinsights/ecommerce-sockshop/host',
|
||||
'/aws/containerinsights/ecommerce-sockshop/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/application',
|
||||
'/aws/containerinsights/watchdemo-perf/dataplane',
|
||||
'/aws/containerinsights/watchdemo-perf/host',
|
||||
'/aws/containerinsights/watchdemo-perf/performance',
|
||||
'/aws/containerinsights/watchdemo-perf/prometheus',
|
||||
'/aws/containerinsights/watchdemo-prod-us-east-1/performance',
|
||||
'/aws/containerinsights/watchdemo-staging/application',
|
||||
'/aws/containerinsights/watchdemo-staging/dataplane',
|
||||
'/aws/containerinsights/watchdemo-staging/host',
|
||||
'/aws/containerinsights/watchdemo-staging/performance',
|
||||
'/aws/ecs/containerinsights/bugbash-ec2/performance',
|
||||
'/aws/ecs/containerinsights/ecs-demoworkshop/performance',
|
||||
'/aws/ecs/containerinsights/ecs-workshop-dev/performance',
|
||||
'/aws/eks/dev303-workshop/cluster',
|
||||
'/aws/events/cloudtrail',
|
||||
'/aws/events/ecs',
|
||||
'/aws/lambda/cwsyn-mycanary-fac97ded-f134-499a-9d71-4c3be1f63182',
|
||||
'/aws/lambda/cwsyn-watch-linkchecks-ef7ef273-5da2-4663-af54-d2f52d55b060',
|
||||
'/ecs/ecs-cwagent-daemon-service',
|
||||
'/ecs/ecs-demo-limitTask',
|
||||
'CloudTrail/DefaultLogGroup',
|
||||
'container-insights-prometheus-beta',
|
||||
'container-insights-prometheus-demo',
|
||||
];
|
||||
|
||||
const logGroups = await ds.describeLogGroups({ region: 'default' });
|
||||
|
||||
expect(logGroups).toEqual(expectedLogGroups);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch logs query', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(rxjsUtils, 'increasingInterval').mockImplementation(() => interval(100));
|
||||
});
|
||||
|
||||
it('should stop querying when timed out', async () => {
|
||||
const { ds } = getTestContext();
|
||||
const fakeFrames = genMockFrames(20);
|
||||
const initialRecordsMatched = fakeFrames[0].meta!.stats!.find((stat) => stat.displayName === 'Records scanned')!
|
||||
.value!;
|
||||
for (let i = 1; i < 4; i++) {
|
||||
fakeFrames[i].meta!.stats = [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: initialRecordsMatched,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const finalRecordsMatched = fakeFrames[9].meta!.stats!.find((stat) => stat.displayName === 'Records scanned')!
|
||||
.value!;
|
||||
for (let i = 10; i < fakeFrames.length; i++) {
|
||||
fakeFrames[i].meta!.stats = [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: finalRecordsMatched,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
jest.spyOn(ds, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const iterations = 15;
|
||||
// Times out after 15 passes for consistent testing
|
||||
const timeoutFunc = () => {
|
||||
return i >= iterations;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
|
||||
const expectedData = [
|
||||
{
|
||||
...fakeFrames[14],
|
||||
meta: {
|
||||
custom: {
|
||||
Status: 'Cancelled',
|
||||
},
|
||||
stats: fakeFrames[14].meta!.stats,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
expect(myResponse).toEqual({
|
||||
data: expectedData,
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
error: {
|
||||
type: DataQueryErrorType.Timeout,
|
||||
message: `error: query timed out after 5 attempts`,
|
||||
},
|
||||
});
|
||||
expect(i).toBe(iterations);
|
||||
});
|
||||
|
||||
it('should continue querying as long as new data is being received', async () => {
|
||||
const { ds } = getTestContext();
|
||||
const fakeFrames = genMockFrames(15);
|
||||
|
||||
let i = 0;
|
||||
jest.spyOn(ds, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const startTime = new Date();
|
||||
const timeoutFunc = () => {
|
||||
return Date.now() >= startTime.valueOf() + 6000;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[fakeFrames.length - 1]],
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
});
|
||||
expect(i).toBe(15);
|
||||
});
|
||||
|
||||
it('should stop querying when results come back with status "Complete"', async () => {
|
||||
const { ds } = getTestContext();
|
||||
const fakeFrames = genMockFrames(3);
|
||||
let i = 0;
|
||||
jest.spyOn(ds, 'makeLogActionRequest').mockImplementation((subtype: LogAction) => {
|
||||
if (subtype === 'GetQueryResults') {
|
||||
const mockObservable = of([fakeFrames[i]]);
|
||||
i++;
|
||||
return mockObservable;
|
||||
} else {
|
||||
return of([]);
|
||||
}
|
||||
});
|
||||
|
||||
const startTime = new Date();
|
||||
const timeoutFunc = () => {
|
||||
return Date.now() >= startTime.valueOf() + 6000;
|
||||
};
|
||||
const myResponse = await lastValueFrom(
|
||||
ds.logsQuery([{ queryId: 'fake-query-id', region: 'default', refId: 'A' }], timeoutFunc)
|
||||
);
|
||||
|
||||
expect(myResponse).toEqual({
|
||||
data: [fakeFrames[2]],
|
||||
key: 'test-key',
|
||||
state: 'Done',
|
||||
});
|
||||
expect(i).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch metrics query', () => {
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
expression: '',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const response: any = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
type: 'Metrics',
|
||||
error: '',
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'CPUUtilization_Average',
|
||||
points: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('should generate the correct query', async () => {
|
||||
const { ds, fetchMock } = getTestContext({ response });
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries).toMatchObject(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
namespace: query.targets[0].namespace,
|
||||
metricName: query.targets[0].metricName,
|
||||
dimensions: { InstanceId: ['i-12345678'] },
|
||||
statistic: query.targets[0].statistic,
|
||||
period: query.targets[0].period,
|
||||
}),
|
||||
])
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query with interval variable', async () => {
|
||||
const period: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'period',
|
||||
name: 'period',
|
||||
index: 0,
|
||||
current: { value: '10m', text: '10m', selected: true },
|
||||
options: [{ value: '10m', text: '10m', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const templateSrv = new TemplateSrv();
|
||||
templateSrv.init([period]);
|
||||
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '[[period]]',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const { ds, fetchMock } = getTestContext({ response, templateSrv });
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].period).toEqual('600');
|
||||
});
|
||||
});
|
||||
|
||||
it('should return series list', async () => {
|
||||
const { ds } = getTestContext({ response });
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith((received) => {
|
||||
const result = received[0];
|
||||
expect(getFrameDisplayName(result.data[0])).toBe(response.results.A.series[0].name);
|
||||
expect(result.data[0].fields[1].values.buffer[0]).toBe(response.results.A.series[0].points[0][0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('and throttling exception is thrown', () => {
|
||||
const partialQuery = {
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
expression: '',
|
||||
};
|
||||
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{ ...partialQuery, refId: 'A', region: 'us-east-1' },
|
||||
{ ...partialQuery, refId: 'B', region: 'us-east-2' },
|
||||
{ ...partialQuery, refId: 'C', region: 'us-east-1' },
|
||||
{ ...partialQuery, refId: 'D', region: 'us-east-2' },
|
||||
{ ...partialQuery, refId: 'E', region: 'eu-north-1' },
|
||||
],
|
||||
};
|
||||
|
||||
const backendErrorResponse = {
|
||||
data: {
|
||||
message: 'Throttling: exception',
|
||||
results: {
|
||||
A: {
|
||||
error: 'Throttling: exception',
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
},
|
||||
B: {
|
||||
error: 'Throttling: exception',
|
||||
refId: 'B',
|
||||
meta: {},
|
||||
},
|
||||
C: {
|
||||
error: 'Throttling: exception',
|
||||
refId: 'C',
|
||||
meta: {},
|
||||
},
|
||||
D: {
|
||||
error: 'Throttling: exception',
|
||||
refId: 'D',
|
||||
meta: {},
|
||||
},
|
||||
E: {
|
||||
error: 'Throttling: exception',
|
||||
refId: 'E',
|
||||
meta: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
redux.setStore({
|
||||
dispatch: jest.fn(),
|
||||
} as any);
|
||||
});
|
||||
|
||||
it('should display one alert error message per region+datasource combination', async () => {
|
||||
const { ds } = getTestContext({ response: backendErrorResponse, throws: true });
|
||||
const memoizedDebounceSpy = jest.spyOn(ds, 'debouncedAlert');
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith((received) => {
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('TestDatasource', 'us-east-1');
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('TestDatasource', 'us-east-2');
|
||||
expect(memoizedDebounceSpy).toHaveBeenCalledWith('TestDatasource', 'eu-north-1');
|
||||
expect(memoizedDebounceSpy).toBeCalledTimes(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When query region is "default"', () => {
|
||||
it('should return the datasource region if empty or "default"', () => {
|
||||
const { ds, instanceSettings } = getTestContext();
|
||||
const defaultRegion = instanceSettings.jsonData.defaultRegion;
|
||||
|
||||
expect(ds.getActualRegion()).toBe(defaultRegion);
|
||||
expect(ds.getActualRegion('')).toBe(defaultRegion);
|
||||
expect(ds.getActualRegion('default')).toBe(defaultRegion);
|
||||
});
|
||||
|
||||
it('should return the specified region if specified', () => {
|
||||
const { ds } = getTestContext();
|
||||
|
||||
expect(ds.getActualRegion('some-fake-region-1')).toBe('some-fake-region-1');
|
||||
});
|
||||
|
||||
it('should query for the datasource region if empty or "default"', async () => {
|
||||
const { ds, instanceSettings } = getTestContext();
|
||||
const performTimeSeriesQueryMock = jest.spyOn(ds, 'performTimeSeriesQuery').mockReturnValue(of({}));
|
||||
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'default',
|
||||
namespace: 'AWS/EC2',
|
||||
metricName: 'CPUUtilization',
|
||||
dimensions: {
|
||||
InstanceId: 'i-12345678',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(performTimeSeriesQueryMock.mock.calls[0][0].queries[0].region).toBe(
|
||||
instanceSettings.jsonData.defaultRegion
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When interpolating variables', () => {
|
||||
it('should return an empty array if no queries are provided', () => {
|
||||
const templateSrv: any = { replace: jest.fn(), getVariables: () => [] };
|
||||
const { ds } = getTestContext({ templateSrv });
|
||||
|
||||
expect(ds.interpolateVariablesInQueries([], {})).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should replace correct variables in CloudWatchLogsQuery', () => {
|
||||
const templateSrv: any = { replace: jest.fn(), getVariables: () => [] };
|
||||
const { ds } = getTestContext({ templateSrv });
|
||||
const variableName = 'someVar';
|
||||
const logQuery: CloudWatchLogsQuery = {
|
||||
id: 'someId',
|
||||
refId: 'someRefId',
|
||||
queryMode: 'Logs',
|
||||
expression: `$${variableName}`,
|
||||
region: `$${variableName}`,
|
||||
};
|
||||
|
||||
ds.interpolateVariablesInQueries([logQuery], {});
|
||||
|
||||
// We interpolate `region` in CloudWatchLogsQuery
|
||||
expect(templateSrv.replace).toHaveBeenCalledWith(`$${variableName}`, {});
|
||||
expect(templateSrv.replace).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should replace correct variables in CloudWatchMetricsQuery', () => {
|
||||
const templateSrv: any = {
|
||||
replace: jest.fn(),
|
||||
getVariables: () => [],
|
||||
getVariableName: jest.fn((name: string) => name),
|
||||
};
|
||||
const { ds } = getTestContext({ templateSrv });
|
||||
const variableName = 'someVar';
|
||||
const logQuery: CloudWatchMetricsQuery = {
|
||||
queryMode: 'Metrics',
|
||||
id: 'someId',
|
||||
refId: 'someRefId',
|
||||
expression: `$${variableName}`,
|
||||
region: `$${variableName}`,
|
||||
period: `$${variableName}`,
|
||||
alias: `$${variableName}`,
|
||||
metricName: `$${variableName}`,
|
||||
namespace: `$${variableName}`,
|
||||
dimensions: {
|
||||
[`$${variableName}`]: `$${variableName}`,
|
||||
},
|
||||
matchExact: false,
|
||||
statistic: '',
|
||||
sqlExpression: `$${variableName}`,
|
||||
};
|
||||
|
||||
ds.interpolateVariablesInQueries([logQuery], {});
|
||||
|
||||
// We interpolate `expression`, `region`, `period`, `alias`, `metricName`, and `nameSpace` in CloudWatchMetricsQuery
|
||||
expect(templateSrv.replace).toHaveBeenCalledWith(`$${variableName}`, {});
|
||||
expect(templateSrv.replace).toHaveBeenCalledTimes(7);
|
||||
|
||||
expect(templateSrv.getVariableName).toHaveBeenCalledWith(`$${variableName}`);
|
||||
expect(templateSrv.getVariableName).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch query for extended statistic', () => {
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'AWS/ApplicationELB',
|
||||
metricName: 'TargetResponseTime',
|
||||
dimensions: {
|
||||
LoadBalancer: 'lb',
|
||||
TargetGroup: 'tg',
|
||||
},
|
||||
statistic: 'p90.00',
|
||||
period: '300s',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const response: any = {
|
||||
timings: [null],
|
||||
results: {
|
||||
A: {
|
||||
error: '',
|
||||
refId: 'A',
|
||||
meta: {},
|
||||
series: [
|
||||
{
|
||||
name: 'TargetResponseTime_p90.00',
|
||||
points: [
|
||||
[1, 1483228800000],
|
||||
[2, 1483229100000],
|
||||
[5, 1483229700000],
|
||||
],
|
||||
tags: {
|
||||
LoadBalancer: 'lb',
|
||||
TargetGroup: 'tg',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('should return series list', async () => {
|
||||
const { ds } = getTestContext({ response });
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith((received) => {
|
||||
const result = received[0];
|
||||
expect(getFrameDisplayName(result.data[0])).toBe(response.results.A.series[0].name);
|
||||
expect(result.data[0].fields[1].values.buffer[0]).toBe(response.results.A.series[0].points[0][0]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('When performing CloudWatch query with template variables', () => {
|
||||
let templateSrv: TemplateSrv;
|
||||
beforeEach(() => {
|
||||
const key = 'key';
|
||||
const var1: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var1',
|
||||
rootStateKey: key,
|
||||
name: 'var1',
|
||||
index: 0,
|
||||
current: { value: 'var1-foo', text: 'var1-foo', selected: true },
|
||||
options: [{ value: 'var1-foo', text: 'var1-foo', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var2: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var2',
|
||||
rootStateKey: key,
|
||||
name: 'var2',
|
||||
index: 1,
|
||||
current: { value: 'var2-foo', text: 'var2-foo', selected: true },
|
||||
options: [{ value: 'var2-foo', text: 'var2-foo', selected: true }],
|
||||
multi: false,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var3: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var3',
|
||||
rootStateKey: key,
|
||||
name: 'var3',
|
||||
index: 2,
|
||||
current: { value: ['var3-foo', 'var3-baz'], text: 'var3-foo + var3-baz', selected: true },
|
||||
options: [
|
||||
{ selected: true, value: 'var3-foo', text: 'var3-foo' },
|
||||
{ selected: false, value: 'var3-bar', text: 'var3-bar' },
|
||||
{ selected: true, value: 'var3-baz', text: 'var3-baz' },
|
||||
],
|
||||
multi: true,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const var4: CustomVariableModel = {
|
||||
...initialVariableModelState,
|
||||
id: 'var4',
|
||||
rootStateKey: key,
|
||||
name: 'var4',
|
||||
index: 3,
|
||||
options: [
|
||||
{ selected: true, value: 'var4-foo', text: 'var4-foo' },
|
||||
{ selected: false, value: 'var4-bar', text: 'var4-bar' },
|
||||
{ selected: true, value: 'var4-baz', text: 'var4-baz' },
|
||||
],
|
||||
current: { value: ['var4-foo', 'var4-baz'], text: 'var4-foo + var4-baz', selected: true },
|
||||
multi: true,
|
||||
includeAll: false,
|
||||
query: '',
|
||||
hide: VariableHide.dontHide,
|
||||
type: 'custom',
|
||||
};
|
||||
const variables = [var1, var2, var3, var4];
|
||||
const state = convertToStoreState(key, variables);
|
||||
templateSrv = new TemplateSrv(getTemplateSrvDependencies(state));
|
||||
templateSrv.init(variables);
|
||||
});
|
||||
|
||||
it('should generate the correct query for single template variable', async () => {
|
||||
const { ds, fetchMock } = getTestContext({ templateSrv });
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim2: '[[var2]]',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of one multilple template variables', async () => {
|
||||
const { ds, fetchMock } = getTestContext({ templateSrv });
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '[[var1]]',
|
||||
dim2: '[[var2]]',
|
||||
dim3: '[[var3]]',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
],
|
||||
scopedVars: {
|
||||
var1: { selected: true, value: 'var1-foo' },
|
||||
var2: { selected: true, value: 'var2-foo' },
|
||||
},
|
||||
};
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of multilple multi template variables', async () => {
|
||||
const { ds, fetchMock } = getTestContext({ templateSrv });
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '[[var1]]',
|
||||
dim3: '[[var3]]',
|
||||
dim4: '[[var4]]',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300s',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query for multilple template variables, lack scopedVars', async () => {
|
||||
const { ds, fetchMock } = getTestContext({ templateSrv });
|
||||
const query: any = {
|
||||
range: defaultTimeRange,
|
||||
rangeRaw: { from: 1483228800, to: 1483232400 },
|
||||
targets: [
|
||||
{
|
||||
metricQueryType: MetricQueryType.Search,
|
||||
metricEditorMode: MetricEditorMode.Builder,
|
||||
type: 'Metrics',
|
||||
refId: 'A',
|
||||
region: 'us-east-1',
|
||||
namespace: 'TestNamespace',
|
||||
metricName: 'TestMetricName',
|
||||
dimensions: {
|
||||
dim1: '[[var1]]',
|
||||
dim2: '[[var2]]',
|
||||
dim3: '[[var3]]',
|
||||
},
|
||||
statistic: 'Average',
|
||||
period: '300',
|
||||
},
|
||||
],
|
||||
scopedVars: {
|
||||
var1: { selected: true, value: 'var1-foo' },
|
||||
},
|
||||
};
|
||||
|
||||
await expect(ds.query(query)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function genMockFrames(numResponses: number): DataFrame[] {
|
||||
const recordIncrement = 50;
|
||||
const mockFrames: DataFrame[] = [];
|
||||
|
||||
for (let i = 0; i < numResponses; i++) {
|
||||
mockFrames.push({
|
||||
fields: [],
|
||||
meta: {
|
||||
custom: {
|
||||
Status: i === numResponses - 1 ? CloudWatchLogsQueryStatus.Complete : CloudWatchLogsQueryStatus.Running,
|
||||
},
|
||||
stats: [
|
||||
{
|
||||
displayName: 'Records scanned',
|
||||
value: (i + 1) * recordIncrement,
|
||||
},
|
||||
],
|
||||
},
|
||||
refId: 'A',
|
||||
length: 0,
|
||||
});
|
||||
}
|
||||
|
||||
return mockFrames;
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
import { AwsAuthDataSourceJsonData, AwsAuthDataSourceSecureJsonData } from '@grafana/aws-sdk';
|
||||
import { DataQuery, DataSourceRef, SelectableValue } from '@grafana/data';
|
||||
import { DataFrame, DataQuery, DataSourceRef, SelectableValue } from '@grafana/data';
|
||||
|
||||
import {
|
||||
QueryEditorArrayExpression,
|
||||
@ -244,6 +244,7 @@ export interface TSDBQueryResult<T = any> {
|
||||
refId: string;
|
||||
series: TSDBTimeSeries[];
|
||||
tables: Array<TSDBTable<T>>;
|
||||
frames: DataFrame[];
|
||||
|
||||
error?: string;
|
||||
meta?: any;
|
||||
@ -254,6 +255,15 @@ export interface TSDBTable<T = any> {
|
||||
rows: T[];
|
||||
}
|
||||
|
||||
export interface DataQueryError<CloudWatchMetricsQuery> {
|
||||
data?: {
|
||||
message?: string;
|
||||
error?: string;
|
||||
results: Record<string, TSDBQueryResult<CloudWatchMetricsQuery>>;
|
||||
};
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export interface TSDBTimeSeries {
|
||||
name: string;
|
||||
points: TSDBTimePoint[];
|
||||
|
@ -19,7 +19,7 @@ ds.datasource.getRegions = jest.fn().mockResolvedValue([{ label: 'a', value: 'a'
|
||||
ds.datasource.getNamespaces = jest.fn().mockResolvedValue([{ label: 'b', value: 'b' }]);
|
||||
ds.datasource.getMetrics = jest.fn().mockResolvedValue([{ label: 'c', value: 'c' }]);
|
||||
ds.datasource.getDimensionKeys = jest.fn().mockResolvedValue([{ label: 'd', value: 'd' }]);
|
||||
ds.datasource.describeAllLogGroups = jest.fn().mockResolvedValue(['a', 'b']);
|
||||
ds.datasource.logsQueryRunner.describeAllLogGroups = jest.fn().mockResolvedValue(['a', 'b']);
|
||||
const getDimensionValues = jest.fn().mockResolvedValue([{ label: 'e', value: 'e' }]);
|
||||
const getEbsVolumeIds = jest.fn().mockResolvedValue([{ label: 'f', value: 'f' }]);
|
||||
const getEc2InstanceAttribute = jest.fn().mockResolvedValue([{ label: 'g', value: 'g' }]);
|
||||
|
@ -55,7 +55,10 @@ export class CloudWatchVariableSupport extends CustomVariableSupport<CloudWatchD
|
||||
}
|
||||
|
||||
async handleLogGroupsQuery({ region, logGroupPrefix }: VariableQuery) {
|
||||
const logGroups = await this.datasource.describeAllLogGroups({ region, logGroupNamePrefix: logGroupPrefix });
|
||||
const logGroups = await this.datasource.logsQueryRunner.describeAllLogGroups({
|
||||
region,
|
||||
logGroupNamePrefix: logGroupPrefix,
|
||||
});
|
||||
return logGroups.map((s) => ({
|
||||
text: s,
|
||||
value: s,
|
||||
|
Loading…
Reference in New Issue
Block a user