mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
CloudWatch: Call query method from DataSourceWithBackend to support public dashboards (#77532)
* CloudWatch: call query method from DataSourceWithBackend to support public dashboards * add test * remove unneeded properties from test case * update betterer * add parens to group related logic * remove unnecessary aliasing of variable * use t.Cleanup * remove redundant check * add comment
This commit is contained in:
@@ -22,11 +22,11 @@ describe('CloudWatchAnnotationQueryRunner', () => {
|
||||
];
|
||||
|
||||
it('should issue the correct query', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedAnnotationQueryRunner({
|
||||
const { runner, queryMock, request } = setupMockedAnnotationQueryRunner({
|
||||
variables: [namespaceVariable, regionVariable],
|
||||
});
|
||||
await expect(runner.handleAnnotationQuery(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject(
|
||||
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject(
|
||||
expect.objectContaining({
|
||||
region: regionVariable.current.value,
|
||||
namespace: namespaceVariable.current.value,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { map, Observable } from 'rxjs';
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
import { DataQueryRequest, DataQueryResponse, DataSourceInstanceSettings } from '@grafana/data';
|
||||
import { toDataQueryResponse } from '@grafana/runtime';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
import { CloudWatchAnnotationQuery, CloudWatchJsonData, CloudWatchQuery } from '../types';
|
||||
@@ -10,18 +9,21 @@ import { CloudWatchRequest } from './CloudWatchRequest';
|
||||
|
||||
// This class handles execution of CloudWatch annotation queries
|
||||
export class CloudWatchAnnotationQueryRunner extends CloudWatchRequest {
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
|
||||
super(instanceSettings, templateSrv);
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
|
||||
templateSrv: TemplateSrv,
|
||||
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
|
||||
) {
|
||||
super(instanceSettings, templateSrv, queryFn);
|
||||
}
|
||||
|
||||
handleAnnotationQuery(
|
||||
queries: CloudWatchAnnotationQuery[],
|
||||
options: DataQueryRequest<CloudWatchQuery>
|
||||
): Observable<DataQueryResponse> {
|
||||
return this.awsRequest(this.dsQueryEndpoint, {
|
||||
from: options.range.from.valueOf().toString(),
|
||||
to: options.range.to.valueOf().toString(),
|
||||
queries: queries.map((query) => ({
|
||||
return this.query({
|
||||
...options,
|
||||
targets: queries.map((query) => ({
|
||||
...query,
|
||||
statistic: this.templateSrv.replace(query.statistic),
|
||||
region: this.templateSrv.replace(this.getActualRegion(query.region)),
|
||||
@@ -34,11 +36,6 @@ export class CloudWatchAnnotationQueryRunner extends CloudWatchRequest {
|
||||
type: 'annotationQuery',
|
||||
datasource: this.ref,
|
||||
})),
|
||||
}).pipe(
|
||||
map((r) => {
|
||||
const frames = toDataQueryResponse(r).data;
|
||||
return { data: frames };
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('CloudWatchLogsQueryRunner', () => {
|
||||
|
||||
describe('getLogRowContext', () => {
|
||||
it('replaces parameters correctly in the query', async () => {
|
||||
const { runner, fetchMock } = setupMockedLogsQueryRunner();
|
||||
const { runner, queryMock } = setupMockedLogsQueryRunner();
|
||||
const row: LogRowModel = {
|
||||
entryFieldIndex: 0,
|
||||
rowIndex: 0,
|
||||
@@ -59,16 +59,16 @@ describe('CloudWatchLogsQueryRunner', () => {
|
||||
uid: '1',
|
||||
};
|
||||
await runner.getLogRowContext(row);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].endTime).toBe(4);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].region).toBe('');
|
||||
|
||||
await runner.getLogRowContext(
|
||||
row,
|
||||
{ direction: LogRowContextQueryDirection.Forward },
|
||||
{ ...validLogsQuery, region: 'eu-east' }
|
||||
);
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
expect(queryMock.mock.calls[1][0].targets[0].startTime).toBe(4);
|
||||
expect(queryMock.mock.calls[1][0].targets[0].region).toBe('eu-east');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -292,8 +292,7 @@ describe('CloudWatchLogsQueryRunner', () => {
|
||||
|
||||
describe('makeLogActionRequest', () => {
|
||||
it('should use the time range from the options if it is available', async () => {
|
||||
const { runner } = setupMockedLogsQueryRunner();
|
||||
const spy = jest.spyOn(runner, 'awsRequest');
|
||||
const { runner, queryMock } = setupMockedLogsQueryRunner();
|
||||
const from = dateTime(0);
|
||||
const to = dateTime(1000);
|
||||
const options: DataQueryRequest<CloudWatchLogsQuery> = {
|
||||
@@ -301,24 +300,24 @@ describe('CloudWatchLogsQueryRunner', () => {
|
||||
range: { from, to, raw: { from, to } },
|
||||
};
|
||||
await lastValueFrom(runner.makeLogActionRequest('StartQuery', [genMockCloudWatchLogsRequest()], options));
|
||||
expect(spy).toHaveBeenNthCalledWith(1, '/api/ds/query', expect.objectContaining({ from: '0', to: '1000' }), {
|
||||
'X-Cache-Skip': 'true',
|
||||
});
|
||||
expect(queryMock.mock.calls[0][0].skipQueryCache).toBe(true);
|
||||
expect(queryMock.mock.calls[0][0]).toEqual(expect.objectContaining({ range: { from, to, raw: { from, to } } }));
|
||||
});
|
||||
|
||||
it('should use the time range from the timeSrv if the time range in the options is not available', async () => {
|
||||
const timeSrv = getTimeSrv();
|
||||
const from = dateTime(1111);
|
||||
const to = dateTime(2222);
|
||||
timeSrv.timeRange = jest.fn().mockReturnValue({
|
||||
from: dateTime(1111),
|
||||
to: dateTime(2222),
|
||||
raw: { from: dateTime(1111), to: dateTime(2222) },
|
||||
from,
|
||||
to,
|
||||
raw: { from, to },
|
||||
});
|
||||
const { runner } = setupMockedLogsQueryRunner({ timeSrv });
|
||||
const spy = jest.spyOn(runner, 'awsRequest');
|
||||
const { runner, queryMock } = setupMockedLogsQueryRunner({ timeSrv });
|
||||
await lastValueFrom(runner.makeLogActionRequest('StartQuery', [genMockCloudWatchLogsRequest()]));
|
||||
expect(spy).toHaveBeenNthCalledWith(1, '/api/ds/query', expect.objectContaining({ from: '1111', to: '2222' }), {
|
||||
'X-Cache-Skip': 'true',
|
||||
});
|
||||
|
||||
expect(queryMock.mock.calls[0][0].skipQueryCache).toBe(true);
|
||||
expect(queryMock.mock.calls[0][0]).toEqual(expect.objectContaining({ range: { from, to, raw: { from, to } } }));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
map,
|
||||
mergeMap,
|
||||
Observable,
|
||||
of,
|
||||
repeat,
|
||||
scan,
|
||||
share,
|
||||
@@ -29,7 +30,7 @@ import {
|
||||
LogRowModel,
|
||||
rangeUtil,
|
||||
} from '@grafana/data';
|
||||
import { BackendDataSourceResponse, config, FetchError, FetchResponse, toDataQueryResponse } from '@grafana/runtime';
|
||||
import { config, FetchError } from '@grafana/runtime';
|
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
|
||||
@@ -63,14 +64,66 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
|
||||
templateSrv: TemplateSrv,
|
||||
private readonly timeSrv: TimeSrv
|
||||
private readonly timeSrv: TimeSrv,
|
||||
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
|
||||
) {
|
||||
super(instanceSettings, templateSrv);
|
||||
super(instanceSettings, templateSrv, queryFn);
|
||||
|
||||
this.tracingDataSourceUid = instanceSettings.jsonData.tracingDatasourceUid;
|
||||
this.logsTimeout = instanceSettings.jsonData.logsTimeout || '30m';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the query is complete and returns results if it is. Otherwise it will poll for results.
|
||||
*/
|
||||
getQueryResults = ({
|
||||
frames,
|
||||
error,
|
||||
logQueries,
|
||||
timeoutFunc,
|
||||
}: {
|
||||
frames: DataFrame[];
|
||||
logQueries: CloudWatchLogsQuery[];
|
||||
timeoutFunc: () => boolean;
|
||||
error?: DataQueryError;
|
||||
}) => {
|
||||
// If every frame is already finished, we can return the result as the
|
||||
// query was run synchronously. Otherwise, we return `this.logsQuery`
|
||||
// which will poll for the results.
|
||||
if (
|
||||
frames.every((frame) =>
|
||||
[
|
||||
CloudWatchLogsQueryStatus.Complete,
|
||||
CloudWatchLogsQueryStatus.Cancelled,
|
||||
CloudWatchLogsQueryStatus.Failed,
|
||||
].includes(frame.meta?.custom?.['Status'])
|
||||
)
|
||||
) {
|
||||
return of({
|
||||
data: frames,
|
||||
key: 'test-key',
|
||||
state: LoadingState.Done,
|
||||
});
|
||||
}
|
||||
|
||||
return this.logsQuery(
|
||||
frames.map((dataFrame) => ({
|
||||
queryId: dataFrame.fields[0].values[0],
|
||||
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
|
||||
refId: dataFrame.refId!,
|
||||
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
|
||||
})),
|
||||
timeoutFunc
|
||||
).pipe(
|
||||
map((response: DataQueryResponse) => {
|
||||
if (!response.error && error) {
|
||||
response.error = error;
|
||||
}
|
||||
return response;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle log query. The log query works by starting the query on the CloudWatch and then periodically polling for
|
||||
* results.
|
||||
@@ -118,30 +171,14 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
|
||||
};
|
||||
|
||||
return runWithRetry(
|
||||
(targets: StartQueryRequest[]) => {
|
||||
(targets) => {
|
||||
return this.makeLogActionRequest('StartQuery', targets, options);
|
||||
},
|
||||
startQueryRequests,
|
||||
timeoutFunc
|
||||
).pipe(
|
||||
mergeMap(({ frames, error }: { frames: DataFrame[]; error?: DataQueryError }) =>
|
||||
// This queries for the results
|
||||
this.logsQuery(
|
||||
frames.map((dataFrame) => ({
|
||||
queryId: dataFrame.fields[0].values[0],
|
||||
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
|
||||
refId: dataFrame.refId!,
|
||||
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
|
||||
})),
|
||||
timeoutFunc
|
||||
).pipe(
|
||||
map((response: DataQueryResponse) => {
|
||||
if (!response.error && error) {
|
||||
response.error = error;
|
||||
}
|
||||
return response;
|
||||
})
|
||||
)
|
||||
this.getQueryResults({ frames, logQueries, timeoutFunc, error })
|
||||
),
|
||||
mergeMap((dataQueryResponse) => {
|
||||
return from(
|
||||
@@ -277,32 +314,32 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
|
||||
): Observable<DataFrame[]> {
|
||||
const range = options?.range || this.timeSrv.timeRange();
|
||||
|
||||
const requestParams = {
|
||||
from: range.from.valueOf().toString(),
|
||||
to: range.to.valueOf().toString(),
|
||||
queries: queryParams.map((param: CloudWatchLogsRequest) => ({
|
||||
// eslint-ignore-next-line
|
||||
refId: (param as StartQueryRequest).refId || 'A',
|
||||
const requestParams: DataQueryRequest<CloudWatchLogsQuery> = {
|
||||
...options,
|
||||
range,
|
||||
skipQueryCache: true,
|
||||
requestId: options?.requestId || '', // dummy
|
||||
interval: options?.interval || '', // dummy
|
||||
intervalMs: options?.intervalMs || 1, // dummy
|
||||
scopedVars: options?.scopedVars || {}, // dummy
|
||||
timezone: options?.timezone || '', // dummy
|
||||
app: options?.app || '', // dummy
|
||||
startTime: options?.startTime || 0, // dummy
|
||||
targets: queryParams.map((param) => ({
|
||||
...param,
|
||||
id: '',
|
||||
queryMode: 'Logs',
|
||||
refId: param.refId || 'A',
|
||||
intervalMs: 1, // dummy
|
||||
maxDataPoints: 1, // dummy
|
||||
datasource: this.ref,
|
||||
type: 'logAction',
|
||||
subtype: subtype,
|
||||
...param,
|
||||
})),
|
||||
};
|
||||
|
||||
const resultsToDataFrames = (
|
||||
val:
|
||||
| { data: BackendDataSourceResponse | undefined }
|
||||
| FetchResponse<BackendDataSourceResponse | undefined>
|
||||
| DataQueryError
|
||||
): DataFrame[] => toDataQueryResponse(val).data || [];
|
||||
|
||||
return this.awsRequest(this.dsQueryEndpoint, requestParams, {
|
||||
'X-Cache-Skip': 'true',
|
||||
}).pipe(
|
||||
map((response) => resultsToDataFrames(response)),
|
||||
return this.query(requestParams).pipe(
|
||||
map((response) => response.data),
|
||||
catchError((err: FetchError) => {
|
||||
if (config.featureToggles.datasourceQueryMultiStatus && err.status === 207) {
|
||||
throw err;
|
||||
@@ -347,9 +384,10 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
|
||||
}
|
||||
|
||||
const requestParams: GetLogEventsRequest = {
|
||||
refId: query?.refId || 'A', // dummy
|
||||
limit,
|
||||
startFromHead: direction !== LogRowContextQueryDirection.Backward,
|
||||
region: query?.region,
|
||||
region: query?.region || '',
|
||||
logGroupName: parseLogGroupName(logField!.values[row.rowIndex]),
|
||||
logStreamName: logStreamField!.values[row.rowIndex],
|
||||
};
|
||||
|
||||
@@ -16,13 +16,13 @@ import {
|
||||
accountIdVariable,
|
||||
} from '../__mocks__/CloudWatchDataSource';
|
||||
import { setupMockedMetricsQueryRunner } from '../__mocks__/MetricsQueryRunner';
|
||||
import { validMetricSearchBuilderQuery } from '../__mocks__/queries';
|
||||
import { validMetricSearchBuilderQuery, validMetricSearchCodeQuery } from '../__mocks__/queries';
|
||||
import { MetricQueryType, MetricEditorMode, CloudWatchMetricsQuery, DataQueryError } from '../types';
|
||||
|
||||
describe('CloudWatchMetricsQueryRunner', () => {
|
||||
describe('performTimeSeriesQuery', () => {
|
||||
it('should return the same length of data as result', async () => {
|
||||
const { runner, timeRange } = setupMockedMetricsQueryRunner({
|
||||
const { runner, timeRange, request } = setupMockedMetricsQueryRunner({
|
||||
data: {
|
||||
results: {
|
||||
a: { refId: 'a', series: [{ target: 'cpu', datapoints: [[1, 1]] }] },
|
||||
@@ -33,12 +33,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
|
||||
const observable = runner.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [
|
||||
{ datasourceId: 1, refId: 'a' },
|
||||
{ datasourceId: 1, refId: 'b' },
|
||||
],
|
||||
from: '',
|
||||
to: '',
|
||||
...request,
|
||||
targets: [validMetricSearchCodeQuery, validMetricSearchCodeQuery],
|
||||
range: timeRange,
|
||||
},
|
||||
timeRange
|
||||
);
|
||||
@@ -50,7 +47,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
});
|
||||
|
||||
it('sets fields.config.interval based on period', async () => {
|
||||
const { runner, timeRange } = setupMockedMetricsQueryRunner({
|
||||
const { runner, timeRange, request } = setupMockedMetricsQueryRunner({
|
||||
data: {
|
||||
results: {
|
||||
a: {
|
||||
@@ -67,9 +64,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
|
||||
const observable = runner.performTimeSeriesQuery(
|
||||
{
|
||||
queries: [{ datasourceId: 1, refId: 'a' }],
|
||||
from: '',
|
||||
to: '',
|
||||
...request,
|
||||
targets: [validMetricSearchCodeQuery, validMetricSearchCodeQuery],
|
||||
range: timeRange,
|
||||
},
|
||||
timeRange
|
||||
);
|
||||
@@ -125,10 +122,10 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
};
|
||||
|
||||
it('should generate the correct query', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ data });
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ data });
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries).toMatchObject(
|
||||
expect(queryMock.mock.calls[0][0].targets).toMatchObject(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
namespace: queries[0].namespace,
|
||||
@@ -161,13 +158,13 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
},
|
||||
];
|
||||
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({
|
||||
data,
|
||||
variables: [periodIntervalVariable],
|
||||
});
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].period).toEqual('600');
|
||||
expect(queryMock.mock.calls[0][0].targets[0].period).toEqual('600');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -267,7 +264,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
});
|
||||
|
||||
it('should display one alert error message per region+datasource combination', async () => {
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({ data: backendErrorResponse, throws: true });
|
||||
const { runner, request } = setupMockedMetricsQueryRunner({ errorResponse: backendErrorResponse });
|
||||
const memoizedDebounceSpy = jest.spyOn(runner, 'debouncedAlert');
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
@@ -360,7 +357,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
});
|
||||
|
||||
it('interpolates variables correctly', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({
|
||||
variables: [namespaceVariable, metricVariable, labelsVariable, limitVariable],
|
||||
});
|
||||
runner.handleMetricQueries(
|
||||
@@ -384,15 +381,13 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
],
|
||||
request
|
||||
);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect(queryMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
targets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
|
||||
}),
|
||||
]),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -464,7 +459,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
};
|
||||
|
||||
it('should generate the correct query for single template variable', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
@@ -483,12 +478,12 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
},
|
||||
];
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of one multiple template variables', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
@@ -518,14 +513,14 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
},
|
||||
})
|
||||
).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query in the case of multiple multi template variables', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
@@ -547,14 +542,14 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
];
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
|
||||
});
|
||||
});
|
||||
|
||||
it('should generate the correct query for multiple template variables, lack scopedVars', async () => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
|
||||
const queries: CloudWatchMetricsQuery[] = [
|
||||
{
|
||||
id: '',
|
||||
@@ -583,9 +578,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
},
|
||||
})
|
||||
).toEmitValuesWith(() => {
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
|
||||
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -622,22 +617,20 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
['UTC', '+0000'],
|
||||
];
|
||||
test.each(testTable)('should use the right time zone offset', (ianaTimezone, expectedOffset) => {
|
||||
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner();
|
||||
const { runner, queryMock, request } = setupMockedMetricsQueryRunner();
|
||||
runner.handleMetricQueries([testQuery], {
|
||||
...request,
|
||||
range: { ...request.range, from: dateTime(), to: dateTime() },
|
||||
timezone: ianaTimezone,
|
||||
});
|
||||
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
expect(queryMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
queries: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
timezoneUTCOffset: expectedOffset,
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
targets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
timezoneUTCOffset: expectedOffset,
|
||||
}),
|
||||
]),
|
||||
})
|
||||
);
|
||||
});
|
||||
@@ -879,7 +872,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
|
||||
];
|
||||
|
||||
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
|
||||
expect(performTimeSeriesQueryMock.mock.calls[0][0].queries[0].region).toBe(
|
||||
expect(performTimeSeriesQueryMock.mock.calls[0][0].targets[0].region).toBe(
|
||||
instanceSettings.jsonData.defaultRegion
|
||||
);
|
||||
});
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
ScopedVars,
|
||||
TimeRange,
|
||||
} from '@grafana/data';
|
||||
import { toDataQueryResponse } from '@grafana/runtime';
|
||||
import { notifyApp } from 'app/core/actions';
|
||||
import { createErrorNotification } from 'app/core/copy/appNotification';
|
||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||
@@ -23,14 +22,7 @@ import { AppNotificationTimeout } from 'app/types';
|
||||
import { ThrottlingErrorMessage } from '../components/Errors/ThrottlingErrorMessage';
|
||||
import memoizedDebounce from '../memoizedDebounce';
|
||||
import { migrateMetricQuery } from '../migrations/metricQueryMigrations';
|
||||
import {
|
||||
CloudWatchJsonData,
|
||||
CloudWatchMetricsQuery,
|
||||
CloudWatchQuery,
|
||||
DataQueryError,
|
||||
MetricQuery,
|
||||
MetricRequest,
|
||||
} from '../types';
|
||||
import { CloudWatchJsonData, CloudWatchMetricsQuery, CloudWatchQuery, DataQueryError } from '../types';
|
||||
import { filterMetricsQuery } from '../utils/utils';
|
||||
|
||||
import { CloudWatchRequest } from './CloudWatchRequest';
|
||||
@@ -53,8 +45,12 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
|
||||
AppNotificationTimeout.Error
|
||||
);
|
||||
|
||||
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
|
||||
super(instanceSettings, templateSrv);
|
||||
constructor(
|
||||
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
|
||||
templateSrv: TemplateSrv,
|
||||
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
|
||||
) {
|
||||
super(instanceSettings, templateSrv, queryFn);
|
||||
}
|
||||
|
||||
handleMetricQueries = (
|
||||
@@ -66,31 +62,28 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
|
||||
format: 'Z',
|
||||
}).replace(':', '');
|
||||
|
||||
const validMetricsQueries = metricQueries
|
||||
.filter(this.filterMetricQuery)
|
||||
.map((q: CloudWatchMetricsQuery): MetricQuery => {
|
||||
const migratedQuery = migrateMetricQuery(q);
|
||||
const migratedAndIterpolatedQuery = this.replaceMetricQueryVars(migratedQuery, options.scopedVars);
|
||||
const validMetricsQueries = metricQueries.filter(this.filterMetricQuery).map((q) => {
|
||||
const migratedQuery = migrateMetricQuery(q);
|
||||
const migratedAndIterpolatedQuery = this.replaceMetricQueryVars(migratedQuery, options.scopedVars);
|
||||
|
||||
return {
|
||||
timezoneUTCOffset,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
...migratedAndIterpolatedQuery,
|
||||
type: 'timeSeriesQuery',
|
||||
datasource: this.ref,
|
||||
};
|
||||
});
|
||||
return {
|
||||
timezoneUTCOffset,
|
||||
intervalMs: options.intervalMs,
|
||||
maxDataPoints: options.maxDataPoints,
|
||||
...migratedAndIterpolatedQuery,
|
||||
type: 'timeSeriesQuery',
|
||||
datasource: this.ref,
|
||||
};
|
||||
});
|
||||
|
||||
// No valid targets, return the empty result to save a round trip.
|
||||
if (isEmpty(validMetricsQueries)) {
|
||||
return of({ data: [] });
|
||||
}
|
||||
|
||||
const request = {
|
||||
from: options?.range?.from.valueOf().toString(),
|
||||
to: options?.range?.to.valueOf().toString(),
|
||||
queries: validMetricsQueries,
|
||||
const request: DataQueryRequest<CloudWatchQuery> = {
|
||||
...options,
|
||||
targets: validMetricsQueries,
|
||||
};
|
||||
|
||||
return this.performTimeSeriesQuery(request, options.range);
|
||||
@@ -114,15 +107,18 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
|
||||
};
|
||||
}
|
||||
|
||||
performTimeSeriesQuery(request: MetricRequest, { from, to }: TimeRange): Observable<DataQueryResponse> {
|
||||
return this.awsRequest(this.dsQueryEndpoint, request).pipe(
|
||||
performTimeSeriesQuery(
|
||||
request: DataQueryRequest<CloudWatchQuery>,
|
||||
{ from, to }: TimeRange
|
||||
): Observable<DataQueryResponse> {
|
||||
return this.query(request).pipe(
|
||||
map((res) => {
|
||||
const dataframes: DataFrame[] = toDataQueryResponse(res).data;
|
||||
const dataframes: DataFrame[] = res.data;
|
||||
if (!dataframes || dataframes.length <= 0) {
|
||||
return { data: [] };
|
||||
}
|
||||
|
||||
const lastError = findLast(res.data.results, (v) => !!v.error);
|
||||
const lastError = findLast(res.data, (v) => !!v.error);
|
||||
|
||||
dataframes.forEach((frame) => {
|
||||
frame.fields.forEach((field) => {
|
||||
@@ -156,7 +152,7 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
|
||||
|
||||
if (results.some((r) => r.error && /^Throttling:.*/.test(r.error))) {
|
||||
const failedRedIds = Object.keys(err.data?.results ?? {});
|
||||
const regionsAffected = Object.values(request.queries).reduce(
|
||||
const regionsAffected = Object.values(request.targets).reduce(
|
||||
(res: string[], { refId, region }) =>
|
||||
(refId && !failedRedIds.includes(refId)) || res.includes(region) ? res : [...res, region],
|
||||
[]
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import { Observable } from 'rxjs';
|
||||
import { Observable, of } from 'rxjs';
|
||||
|
||||
import { DataSourceInstanceSettings, DataSourceRef, getDataSourceRef, ScopedVars } from '@grafana/data';
|
||||
import {
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
DataSourceRef,
|
||||
getDataSourceRef,
|
||||
ScopedVars,
|
||||
} from '@grafana/data';
|
||||
import { BackendDataSourceResponse, FetchResponse, getBackendSrv } from '@grafana/runtime';
|
||||
import { notifyApp } from 'app/core/actions';
|
||||
import { createErrorNotification } from 'app/core/copy/appNotification';
|
||||
@@ -9,12 +16,13 @@ import { store } from 'app/store/store';
|
||||
import { AppNotificationTimeout } from 'app/types';
|
||||
|
||||
import memoizedDebounce from '../memoizedDebounce';
|
||||
import { CloudWatchJsonData, Dimensions, MetricRequest, MultiFilters } from '../types';
|
||||
import { CloudWatchJsonData, CloudWatchQuery, Dimensions, MetricRequest, MultiFilters } from '../types';
|
||||
|
||||
export abstract class CloudWatchRequest {
|
||||
templateSrv: TemplateSrv;
|
||||
ref: DataSourceRef;
|
||||
dsQueryEndpoint = '/api/ds/query';
|
||||
query: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>;
|
||||
debouncedCustomAlert: (title: string, message: string) => void = memoizedDebounce(
|
||||
displayCustomError,
|
||||
AppNotificationTimeout.Error
|
||||
@@ -22,10 +30,12 @@ export abstract class CloudWatchRequest {
|
||||
|
||||
constructor(
|
||||
public instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
|
||||
templateSrv: TemplateSrv
|
||||
templateSrv: TemplateSrv,
|
||||
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse> = () => of({ data: [] })
|
||||
) {
|
||||
this.templateSrv = templateSrv;
|
||||
this.ref = getDataSourceRef(instanceSettings);
|
||||
this.query = queryFn;
|
||||
}
|
||||
|
||||
awsRequest(
|
||||
|
||||
Reference in New Issue
Block a user