CloudWatch: Call query method from DataSourceWithBackend to support public dashboards (#77532)

* CloudWatch: call query method from DataSourceWithBackend to support public dashboards

* add test

* remove unneeded properties from test case

* update betterer

* add parens to group related logic

* remove unnecessary aliasing of variable

* use t.Cleanup

* remove redundant check

* add comment
This commit is contained in:
Kevin Yu 2023-11-20 14:44:22 -08:00 committed by GitHub
parent efdfa29fe2
commit 7f7d912af7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 354 additions and 225 deletions

View File

@ -5427,9 +5427,6 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
],
"public/app/plugins/datasource/cloudwatch/query-runner/CloudWatchLogsQueryRunner.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"]
],
"public/app/plugins/datasource/cloudwatch/types.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],

View File

@ -547,6 +547,7 @@ export interface DataQueryRequest<TQuery extends DataQuery = DataQuery> {
cacheTimeout?: string | null;
queryCachingTTL?: number | null;
skipQueryCache?: boolean;
rangeRaw?: RawTimeRange;
timeInfo?: string; // The query time description (blue text in the upper right)
panelId?: number;

View File

@ -350,6 +350,63 @@ describe('DataSourceWithBackend', () => {
});
});
test('check that queries can skip the query cache', () => {
const { mock, ds } = createMockDatasource();
ds.query({
maxDataPoints: 10,
intervalMs: 5000,
targets: [{ refId: 'A' }],
dashboardUID: 'dashA',
panelId: 123,
range: getDefaultTimeRange(),
skipQueryCache: true,
requestId: 'request-123',
interval: '5s',
scopedVars: {},
timezone: '',
app: '',
startTime: 0,
});
const args = mock.calls[0][0];
expect(mock.calls.length).toBe(1);
expect(args).toMatchInlineSnapshot(`
{
"data": {
"from": "1697133600000",
"queries": [
{
"applyTemplateVariablesCalled": true,
"datasource": {
"type": "dummy",
"uid": "abc",
},
"datasourceId": 1234,
"filters": undefined,
"intervalMs": 5000,
"maxDataPoints": 10,
"queryCachingTTL": undefined,
"refId": "A",
},
],
"to": "1697155200000",
},
"headers": {
"X-Cache-Skip": "true",
"X-Dashboard-Uid": "dashA",
"X-Datasource-Uid": "abc",
"X-Panel-Id": "123",
"X-Plugin-Id": "dummy",
},
"hideFromInspector": false,
"method": "POST",
"requestId": "request-123",
"url": "/api/ds/query?ds_type=dummy&requestId=request-123",
}
`);
});
describe('isExpressionReference', () => {
test('check all possible expression references', () => {
expect(isExpressionReference('__expr__')).toBeTruthy(); // New UID

View File

@ -82,6 +82,7 @@ enum PluginRequestHeaders {
PanelID = 'X-Panel-Id', // mainly useful for debugging slow queries
QueryGroupID = 'X-Query-Group-Id', // mainly useful to find related queries with query splitting
FromExpression = 'X-Grafana-From-Expr', // used by datasources to identify expression queries
SkipQueryCache = 'X-Cache-Skip', // used by datasources to skip the query cache
}
/**
@ -228,6 +229,9 @@ class DataSourceWithBackend<
if (request.queryGroupId) {
headers[PluginRequestHeaders.QueryGroupID] = `${request.queryGroupId}`;
}
if (request.skipQueryCache) {
headers[PluginRequestHeaders.SkipQueryCache] = 'true';
}
return getBackendSrv()
.fetch<BackendDataSourceResponse>({
url,

View File

@ -170,7 +170,11 @@ func (e *cloudWatchExecutor) QueryData(ctx context.Context, req *backend.QueryDa
_, fromAlert := req.Headers[ngalertmodels.FromAlertHeaderName]
fromExpression := req.GetHTTPHeader(query.HeaderFromExpression) != ""
isSyncLogQuery := (fromAlert || fromExpression) && model.QueryMode == logsQueryMode
// Public dashboard queries execute like alert queries, i.e. they execute on the backend, therefore, we need to handle them synchronously.
// Since `model.Type` is set during execution on the frontend by the query runner and isn't saved with the query, we are checking here is
// missing the `model.Type` property and if it is a log query in order to determine if it is a public dashboard query.
fromPublicDashboard := (model.Type == "" && model.QueryMode == logsQueryMode)
isSyncLogQuery := ((fromAlert || fromExpression) && model.QueryMode == logsQueryMode) || fromPublicDashboard
if isSyncLogQuery {
return executeSyncLogQuery(ctx, e, req)
}

View File

@ -152,6 +152,42 @@ func Test_executeSyncLogQuery(t *testing.T) {
executeSyncLogQuery = origExecuteSyncLogQuery
})
t.Run("when query mode is 'Logs' and does not include type or subtype", func(t *testing.T) {
origExecuteSyncLogQuery := executeSyncLogQuery
syncCalled := false
executeSyncLogQuery = func(ctx context.Context, e *cloudWatchExecutor, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
syncCalled = true
return nil, nil
}
t.Cleanup(func() {
executeSyncLogQuery = origExecuteSyncLogQuery
})
cli = fakeCWLogsClient{queryResults: cloudwatchlogs.GetQueryResultsOutput{Status: aws.String("Complete")}}
im := datasource.NewInstanceManager(func(ctx context.Context, s backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
return DataSource{Settings: models.CloudWatchSettings{AWSDatasourceSettings: awsds.AWSDatasourceSettings{Region: "instance manager's region"}}}, nil
})
sess := fakeSessionCache{}
executor := newExecutor(im, newTestConfig(), &sess, featuremgmt.WithFeatures())
_, err := executor.QueryData(context.Background(), &backend.QueryDataRequest{
PluginContext: backend.PluginContext{DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{}},
Queries: []backend.DataQuery{
{
TimeRange: backend.TimeRange{From: time.Unix(0, 0), To: time.Unix(1, 0)},
JSON: json.RawMessage(`{
"queryMode": "Logs",
"region": "default",
"queryString": "fields @message"
}`),
},
},
})
assert.NoError(t, err)
assert.Equal(t, true, syncCalled)
})
}
func Test_executeSyncLogQuery_handles_RefId_from_input_queries(t *testing.T) {
origNewCWClient := NewCWClient

View File

@ -1,7 +1,6 @@
import { of } from 'rxjs';
import { CustomVariableModel, DataQueryRequest } from '@grafana/data';
import { getBackendSrv, setBackendSrv } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CloudWatchAnnotationQueryRunner } from '../query-runner/CloudWatchAnnotationQueryRunner';
@ -16,13 +15,8 @@ export function setupMockedAnnotationQueryRunner({ variables }: { variables?: Cu
templateService = setupMockedTemplateService(variables);
}
const runner = new CloudWatchAnnotationQueryRunner(CloudWatchSettings, templateService);
const fetchMock = jest.fn().mockReturnValue(of({}));
setBackendSrv({
...getBackendSrv(),
fetch: fetchMock,
});
const queryMock = jest.fn().mockReturnValue(of({}));
const runner = new CloudWatchAnnotationQueryRunner(CloudWatchSettings, templateService, queryMock);
const request: DataQueryRequest<CloudWatchQuery> = {
range: TimeRangeMock,
@ -37,5 +31,5 @@ export function setupMockedAnnotationQueryRunner({ variables }: { variables?: Cu
startTime: 0,
};
return { runner, fetchMock, templateService, request, timeRange: TimeRangeMock };
return { runner, queryMock, templateService, request, timeRange: TimeRangeMock };
}

View File

@ -8,7 +8,7 @@ import {
PluginType,
VariableHide,
} from '@grafana/data';
import { getBackendSrv, setBackendSrv } from '@grafana/runtime';
import { getBackendSrv, setBackendSrv, DataSourceWithBackend } from '@grafana/runtime';
import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { initialCustomVariableModelState } from 'app/features/variables/custom/reducer';
@ -16,6 +16,9 @@ import { initialCustomVariableModelState } from 'app/features/variables/custom/r
import { CloudWatchDatasource } from '../datasource';
import { CloudWatchJsonData } from '../types';
const queryMock = jest.fn().mockReturnValue(of({ data: [] }));
jest.spyOn(DataSourceWithBackend.prototype, 'query').mockImplementation((args) => queryMock(args));
export function setupMockedTemplateService(variables: CustomVariableModel[]) {
const templateService = new TemplateSrv();
templateService.init(variables);
@ -93,7 +96,7 @@ export function setupMockedDataSource({
get: getMock,
});
return { datasource, fetchMock, templateService, timeSrv };
return { datasource, fetchMock, queryMock, templateService, timeSrv };
}
export const metricVariable: CustomVariableModel = {

View File

@ -1,7 +1,7 @@
import { of } from 'rxjs';
import { CustomVariableModel, DataFrame, DataSourceInstanceSettings } from '@grafana/data';
import { BackendDataSourceResponse, getBackendSrv, setBackendSrv } from '@grafana/runtime';
import { BackendDataSourceResponse, toDataQueryResponse } from '@grafana/runtime';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { TemplateSrv } from 'app/features/templating/template_srv';
@ -33,14 +33,10 @@ export function setupMockedLogsQueryRunner({
}
}
const runner = new CloudWatchLogsQueryRunner(settings, templateService, timeSrv);
const fetchMock = jest.fn().mockReturnValue(of({ data }));
setBackendSrv({
...getBackendSrv(),
fetch: fetchMock,
});
const queryMock = jest.fn().mockReturnValue(of(toDataQueryResponse({ data })));
const runner = new CloudWatchLogsQueryRunner(settings, templateService, timeSrv, queryMock);
return { runner, fetchMock, templateService };
return { runner, queryMock, templateService };
}
export function genMockFrames(numResponses: number): DataFrame[] {

View File

@ -1,7 +1,7 @@
import { of, throwError } from 'rxjs';
import { CustomVariableModel, DataQueryError, DataQueryRequest, DataSourceInstanceSettings } from '@grafana/data';
import { BackendDataSourceResponse, getBackendSrv, setBackendSrv } from '@grafana/runtime';
import { BackendDataSourceResponse, toDataQueryResponse } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CloudWatchMetricsQueryRunner } from '../query-runner/CloudWatchMetricsQueryRunner';
@ -16,13 +16,13 @@ export function setupMockedMetricsQueryRunner({
},
variables,
mockGetVariableName = true,
throws = false,
errorResponse,
instanceSettings = CloudWatchSettings,
}: {
data?: BackendDataSourceResponse | DataQueryError;
data?: BackendDataSourceResponse;
variables?: CustomVariableModel[];
mockGetVariableName?: boolean;
throws?: boolean;
errorResponse?: DataQueryError;
instanceSettings?: DataSourceInstanceSettings<CloudWatchJsonData>;
} = {}) {
let templateService = new TemplateSrv();
@ -33,15 +33,10 @@ export function setupMockedMetricsQueryRunner({
}
}
const runner = new CloudWatchMetricsQueryRunner(instanceSettings, templateService);
const fetchMock = throws
? jest.fn().mockImplementation(() => throwError(data))
: jest.fn().mockReturnValue(of({ data }));
setBackendSrv({
...getBackendSrv(),
fetch: fetchMock,
});
const queryMock = errorResponse
? jest.fn().mockImplementation(() => throwError(errorResponse))
: jest.fn().mockReturnValue(of(toDataQueryResponse({ data })));
const runner = new CloudWatchMetricsQueryRunner(instanceSettings, templateService, queryMock);
const request: DataQueryRequest<CloudWatchQuery> = {
range: TimeRangeMock,
@ -56,5 +51,5 @@ export function setupMockedMetricsQueryRunner({
startTime: 0,
};
return { runner, fetchMock, templateService, instanceSettings, request, timeRange: TimeRangeMock };
return { runner, queryMock, templateService, instanceSettings, request, timeRange: TimeRangeMock };
}

View File

@ -2,8 +2,8 @@ import { Observable, of } from 'rxjs';
import {
DataFrame,
createDataFrame,
dataFrameToJSON,
MutableDataFrame,
DataSourceInstanceSettings,
DataSourceJsonData,
DataSourceRef,
@ -14,7 +14,7 @@ import {
DataQueryResponse,
TestDataSourceResponse,
} from '@grafana/data';
import { GetDataSourceListFilters, setDataSourceSrv } from '@grafana/runtime';
import { GetDataSourceListFilters, setDataSourceSrv, toDataQueryResponse } from '@grafana/runtime';
import { CloudWatchLogsQueryStatus } from '../types';
@ -22,15 +22,15 @@ import { meta, setupMockedDataSource } from './CloudWatchDataSource';
export function setupForLogs() {
function envelope(frame: DataFrame) {
return { data: { results: { a: { refId: 'a', frames: [dataFrameToJSON(frame)] } } } };
return toDataQueryResponse({ data: { results: { a: { refId: 'a', frames: [dataFrameToJSON(frame)] } } } });
}
const { datasource, fetchMock, timeSrv } = setupMockedDataSource();
const { datasource, queryMock, timeSrv } = setupMockedDataSource();
const startQueryFrame = new MutableDataFrame({ fields: [{ name: 'queryId', values: ['queryid'] }] });
fetchMock.mockReturnValueOnce(of(envelope(startQueryFrame)));
const startQueryFrame: DataFrame = createDataFrame({ fields: [{ name: 'queryId', values: ['queryid'] }] });
queryMock.mockReturnValueOnce(of(envelope(startQueryFrame)));
const logsFrame = new MutableDataFrame({
const logsFrame: DataFrame = createDataFrame({
fields: [
{
name: '@message',
@ -48,7 +48,7 @@ export function setupForLogs() {
meta: { custom: { Status: CloudWatchLogsQueryStatus.Complete } },
});
fetchMock.mockReturnValueOnce(of(envelope(logsFrame)));
queryMock.mockReturnValueOnce(of(envelope(logsFrame)));
setDataSourceSrv({
async get() {
@ -89,5 +89,5 @@ export function setupForLogs() {
},
});
return { datasource, fetchMock, timeSrv };
return { datasource, queryMock, timeSrv };
}

View File

@ -28,7 +28,7 @@ describe('datasource', () => {
});
describe('query', () => {
it('should not run a query if log groups is not specified', async () => {
const { datasource, fetchMock } = setupMockedDataSource();
const { datasource, queryMock } = setupMockedDataSource();
await lastValueFrom(
datasource.query({
targets: [
@ -59,8 +59,8 @@ describe('datasource', () => {
})
);
expect(fetchMock.mock.calls[0][0].data.queries).toHaveLength(1);
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
expect(queryMock.mock.calls[0][0].targets).toHaveLength(1);
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject({
queryString: 'some query string',
logGroupNames: ['/some/group'],
region: 'us-west-1',
@ -68,7 +68,7 @@ describe('datasource', () => {
});
it('should not run a query if query expression is not specified', async () => {
const { datasource, fetchMock } = setupMockedDataSource();
const { datasource, queryMock } = setupMockedDataSource();
await lastValueFrom(
datasource.query({
targets: [
@ -99,8 +99,8 @@ describe('datasource', () => {
})
);
expect(fetchMock.mock.calls[0][0].data.queries).toHaveLength(1);
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
expect(queryMock.mock.calls[0][0].targets).toHaveLength(1);
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject({
queryString: 'some query string',
logGroupNames: ['/some/group'],
region: 'us-west-1',
@ -141,7 +141,7 @@ describe('datasource', () => {
});
it('should interpolate variables in the query', async () => {
const { datasource, fetchMock } = setupMockedDataSource({
const { datasource, queryMock } = setupMockedDataSource({
variables: [fieldsVariable, regionVariable],
});
await lastValueFrom(
@ -168,7 +168,7 @@ describe('datasource', () => {
})
.pipe(toArray())
);
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject({
queryString: 'fields templatedField',
logGroupNames: ['/some/group'],
region: 'templatedRegion',
@ -176,7 +176,7 @@ describe('datasource', () => {
});
it('should interpolate multi-value template variable for log group names in the query', async () => {
const { datasource, fetchMock } = setupMockedDataSource({
const { datasource, queryMock } = setupMockedDataSource({
variables: [fieldsVariable, logGroupNamesVariable, regionVariable],
mockGetVariableName: false,
});
@ -204,7 +204,7 @@ describe('datasource', () => {
})
.pipe(toArray())
);
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject({
queryString: 'fields templatedField',
logGroupNames: ['templatedGroup-1', 'templatedGroup-2'],
region: 'templatedRegion',

View File

@ -66,10 +66,19 @@ export class CloudWatchDatasource
this.languageProvider = new CloudWatchLogsLanguageProvider(this);
this.sqlCompletionItemProvider = new SQLCompletionItemProvider(this.resources, this.templateSrv);
this.metricMathCompletionItemProvider = new MetricMathCompletionItemProvider(this.resources, this.templateSrv);
this.metricsQueryRunner = new CloudWatchMetricsQueryRunner(instanceSettings, templateSrv);
this.metricsQueryRunner = new CloudWatchMetricsQueryRunner(instanceSettings, templateSrv, super.query.bind(this));
this.logsCompletionItemProvider = new LogsCompletionItemProvider(this.resources, this.templateSrv);
this.logsQueryRunner = new CloudWatchLogsQueryRunner(instanceSettings, templateSrv, timeSrv);
this.annotationQueryRunner = new CloudWatchAnnotationQueryRunner(instanceSettings, templateSrv);
this.logsQueryRunner = new CloudWatchLogsQueryRunner(
instanceSettings,
templateSrv,
timeSrv,
super.query.bind(this)
);
this.annotationQueryRunner = new CloudWatchAnnotationQueryRunner(
instanceSettings,
templateSrv,
super.query.bind(this)
);
this.variables = new CloudWatchVariableSupport(this.resources);
this.annotations = CloudWatchAnnotationSupport;
this.defaultLogGroups = instanceSettings.jsonData.defaultLogGroups;

View File

@ -22,11 +22,11 @@ describe('CloudWatchAnnotationQueryRunner', () => {
];
it('should issue the correct query', async () => {
const { runner, fetchMock, request } = setupMockedAnnotationQueryRunner({
const { runner, queryMock, request } = setupMockedAnnotationQueryRunner({
variables: [namespaceVariable, regionVariable],
});
await expect(runner.handleAnnotationQuery(queries, request)).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject(
expect(queryMock.mock.calls[0][0].targets[0]).toMatchObject(
expect.objectContaining({
region: regionVariable.current.value,
namespace: namespaceVariable.current.value,

View File

@ -1,7 +1,6 @@
import { map, Observable } from 'rxjs';
import { Observable } from 'rxjs';
import { DataQueryRequest, DataQueryResponse, DataSourceInstanceSettings } from '@grafana/data';
import { toDataQueryResponse } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CloudWatchAnnotationQuery, CloudWatchJsonData, CloudWatchQuery } from '../types';
@ -10,18 +9,21 @@ import { CloudWatchRequest } from './CloudWatchRequest';
// This class handles execution of CloudWatch annotation queries
export class CloudWatchAnnotationQueryRunner extends CloudWatchRequest {
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
super(instanceSettings, templateSrv);
constructor(
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
templateSrv: TemplateSrv,
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
) {
super(instanceSettings, templateSrv, queryFn);
}
handleAnnotationQuery(
queries: CloudWatchAnnotationQuery[],
options: DataQueryRequest<CloudWatchQuery>
): Observable<DataQueryResponse> {
return this.awsRequest(this.dsQueryEndpoint, {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries.map((query) => ({
return this.query({
...options,
targets: queries.map((query) => ({
...query,
statistic: this.templateSrv.replace(query.statistic),
region: this.templateSrv.replace(this.getActualRegion(query.region)),
@ -34,11 +36,6 @@ export class CloudWatchAnnotationQueryRunner extends CloudWatchRequest {
type: 'annotationQuery',
datasource: this.ref,
})),
}).pipe(
map((r) => {
const frames = toDataQueryResponse(r).data;
return { data: frames };
})
);
});
}
}

View File

@ -33,7 +33,7 @@ describe('CloudWatchLogsQueryRunner', () => {
describe('getLogRowContext', () => {
it('replaces parameters correctly in the query', async () => {
const { runner, fetchMock } = setupMockedLogsQueryRunner();
const { runner, queryMock } = setupMockedLogsQueryRunner();
const row: LogRowModel = {
entryFieldIndex: 0,
rowIndex: 0,
@ -59,16 +59,16 @@ describe('CloudWatchLogsQueryRunner', () => {
uid: '1',
};
await runner.getLogRowContext(row);
expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
expect(queryMock.mock.calls[0][0].targets[0].endTime).toBe(4);
expect(queryMock.mock.calls[0][0].targets[0].region).toBe('');
await runner.getLogRowContext(
row,
{ direction: LogRowContextQueryDirection.Forward },
{ ...validLogsQuery, region: 'eu-east' }
);
expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
expect(queryMock.mock.calls[1][0].targets[0].startTime).toBe(4);
expect(queryMock.mock.calls[1][0].targets[0].region).toBe('eu-east');
});
});
@ -292,8 +292,7 @@ describe('CloudWatchLogsQueryRunner', () => {
describe('makeLogActionRequest', () => {
it('should use the time range from the options if it is available', async () => {
const { runner } = setupMockedLogsQueryRunner();
const spy = jest.spyOn(runner, 'awsRequest');
const { runner, queryMock } = setupMockedLogsQueryRunner();
const from = dateTime(0);
const to = dateTime(1000);
const options: DataQueryRequest<CloudWatchLogsQuery> = {
@ -301,24 +300,24 @@ describe('CloudWatchLogsQueryRunner', () => {
range: { from, to, raw: { from, to } },
};
await lastValueFrom(runner.makeLogActionRequest('StartQuery', [genMockCloudWatchLogsRequest()], options));
expect(spy).toHaveBeenNthCalledWith(1, '/api/ds/query', expect.objectContaining({ from: '0', to: '1000' }), {
'X-Cache-Skip': 'true',
});
expect(queryMock.mock.calls[0][0].skipQueryCache).toBe(true);
expect(queryMock.mock.calls[0][0]).toEqual(expect.objectContaining({ range: { from, to, raw: { from, to } } }));
});
it('should use the time range from the timeSrv if the time range in the options is not available', async () => {
const timeSrv = getTimeSrv();
const from = dateTime(1111);
const to = dateTime(2222);
timeSrv.timeRange = jest.fn().mockReturnValue({
from: dateTime(1111),
to: dateTime(2222),
raw: { from: dateTime(1111), to: dateTime(2222) },
from,
to,
raw: { from, to },
});
const { runner } = setupMockedLogsQueryRunner({ timeSrv });
const spy = jest.spyOn(runner, 'awsRequest');
const { runner, queryMock } = setupMockedLogsQueryRunner({ timeSrv });
await lastValueFrom(runner.makeLogActionRequest('StartQuery', [genMockCloudWatchLogsRequest()]));
expect(spy).toHaveBeenNthCalledWith(1, '/api/ds/query', expect.objectContaining({ from: '1111', to: '2222' }), {
'X-Cache-Skip': 'true',
});
expect(queryMock.mock.calls[0][0].skipQueryCache).toBe(true);
expect(queryMock.mock.calls[0][0]).toEqual(expect.objectContaining({ range: { from, to, raw: { from, to } } }));
});
});
});

View File

@ -8,6 +8,7 @@ import {
map,
mergeMap,
Observable,
of,
repeat,
scan,
share,
@ -29,7 +30,7 @@ import {
LogRowModel,
rangeUtil,
} from '@grafana/data';
import { BackendDataSourceResponse, config, FetchError, FetchResponse, toDataQueryResponse } from '@grafana/runtime';
import { config, FetchError } from '@grafana/runtime';
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { TemplateSrv } from 'app/features/templating/template_srv';
@ -63,14 +64,66 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
constructor(
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
templateSrv: TemplateSrv,
private readonly timeSrv: TimeSrv
private readonly timeSrv: TimeSrv,
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
) {
super(instanceSettings, templateSrv);
super(instanceSettings, templateSrv, queryFn);
this.tracingDataSourceUid = instanceSettings.jsonData.tracingDatasourceUid;
this.logsTimeout = instanceSettings.jsonData.logsTimeout || '30m';
}
/**
* Check if the query is complete and returns results if it is. Otherwise it will poll for results.
*/
getQueryResults = ({
frames,
error,
logQueries,
timeoutFunc,
}: {
frames: DataFrame[];
logQueries: CloudWatchLogsQuery[];
timeoutFunc: () => boolean;
error?: DataQueryError;
}) => {
// If every frame is already finished, we can return the result as the
// query was run synchronously. Otherwise, we return `this.logsQuery`
// which will poll for the results.
if (
frames.every((frame) =>
[
CloudWatchLogsQueryStatus.Complete,
CloudWatchLogsQueryStatus.Cancelled,
CloudWatchLogsQueryStatus.Failed,
].includes(frame.meta?.custom?.['Status'])
)
) {
return of({
data: frames,
key: 'test-key',
state: LoadingState.Done,
});
}
return this.logsQuery(
frames.map((dataFrame) => ({
queryId: dataFrame.fields[0].values[0],
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
refId: dataFrame.refId!,
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
})),
timeoutFunc
).pipe(
map((response: DataQueryResponse) => {
if (!response.error && error) {
response.error = error;
}
return response;
})
);
};
/**
* Handle log query. The log query works by starting the query on the CloudWatch and then periodically polling for
* results.
@ -118,30 +171,14 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
};
return runWithRetry(
(targets: StartQueryRequest[]) => {
(targets) => {
return this.makeLogActionRequest('StartQuery', targets, options);
},
startQueryRequests,
timeoutFunc
).pipe(
mergeMap(({ frames, error }: { frames: DataFrame[]; error?: DataQueryError }) =>
// This queries for the results
this.logsQuery(
frames.map((dataFrame) => ({
queryId: dataFrame.fields[0].values[0],
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
refId: dataFrame.refId!,
statsGroups: logQueries.find((target) => target.refId === dataFrame.refId)?.statsGroups,
})),
timeoutFunc
).pipe(
map((response: DataQueryResponse) => {
if (!response.error && error) {
response.error = error;
}
return response;
})
)
this.getQueryResults({ frames, logQueries, timeoutFunc, error })
),
mergeMap((dataQueryResponse) => {
return from(
@ -277,32 +314,32 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
): Observable<DataFrame[]> {
const range = options?.range || this.timeSrv.timeRange();
const requestParams = {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: queryParams.map((param: CloudWatchLogsRequest) => ({
// eslint-ignore-next-line
refId: (param as StartQueryRequest).refId || 'A',
const requestParams: DataQueryRequest<CloudWatchLogsQuery> = {
...options,
range,
skipQueryCache: true,
requestId: options?.requestId || '', // dummy
interval: options?.interval || '', // dummy
intervalMs: options?.intervalMs || 1, // dummy
scopedVars: options?.scopedVars || {}, // dummy
timezone: options?.timezone || '', // dummy
app: options?.app || '', // dummy
startTime: options?.startTime || 0, // dummy
targets: queryParams.map((param) => ({
...param,
id: '',
queryMode: 'Logs',
refId: param.refId || 'A',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasource: this.ref,
type: 'logAction',
subtype: subtype,
...param,
})),
};
const resultsToDataFrames = (
val:
| { data: BackendDataSourceResponse | undefined }
| FetchResponse<BackendDataSourceResponse | undefined>
| DataQueryError
): DataFrame[] => toDataQueryResponse(val).data || [];
return this.awsRequest(this.dsQueryEndpoint, requestParams, {
'X-Cache-Skip': 'true',
}).pipe(
map((response) => resultsToDataFrames(response)),
return this.query(requestParams).pipe(
map((response) => response.data),
catchError((err: FetchError) => {
if (config.featureToggles.datasourceQueryMultiStatus && err.status === 207) {
throw err;
@ -347,9 +384,10 @@ export class CloudWatchLogsQueryRunner extends CloudWatchRequest {
}
const requestParams: GetLogEventsRequest = {
refId: query?.refId || 'A', // dummy
limit,
startFromHead: direction !== LogRowContextQueryDirection.Backward,
region: query?.region,
region: query?.region || '',
logGroupName: parseLogGroupName(logField!.values[row.rowIndex]),
logStreamName: logStreamField!.values[row.rowIndex],
};

View File

@ -16,13 +16,13 @@ import {
accountIdVariable,
} from '../__mocks__/CloudWatchDataSource';
import { setupMockedMetricsQueryRunner } from '../__mocks__/MetricsQueryRunner';
import { validMetricSearchBuilderQuery } from '../__mocks__/queries';
import { validMetricSearchBuilderQuery, validMetricSearchCodeQuery } from '../__mocks__/queries';
import { MetricQueryType, MetricEditorMode, CloudWatchMetricsQuery, DataQueryError } from '../types';
describe('CloudWatchMetricsQueryRunner', () => {
describe('performTimeSeriesQuery', () => {
it('should return the same length of data as result', async () => {
const { runner, timeRange } = setupMockedMetricsQueryRunner({
const { runner, timeRange, request } = setupMockedMetricsQueryRunner({
data: {
results: {
a: { refId: 'a', series: [{ target: 'cpu', datapoints: [[1, 1]] }] },
@ -33,12 +33,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
const observable = runner.performTimeSeriesQuery(
{
queries: [
{ datasourceId: 1, refId: 'a' },
{ datasourceId: 1, refId: 'b' },
],
from: '',
to: '',
...request,
targets: [validMetricSearchCodeQuery, validMetricSearchCodeQuery],
range: timeRange,
},
timeRange
);
@ -50,7 +47,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
});
it('sets fields.config.interval based on period', async () => {
const { runner, timeRange } = setupMockedMetricsQueryRunner({
const { runner, timeRange, request } = setupMockedMetricsQueryRunner({
data: {
results: {
a: {
@ -67,9 +64,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
const observable = runner.performTimeSeriesQuery(
{
queries: [{ datasourceId: 1, refId: 'a' }],
from: '',
to: '',
...request,
targets: [validMetricSearchCodeQuery, validMetricSearchCodeQuery],
range: timeRange,
},
timeRange
);
@ -125,10 +122,10 @@ describe('CloudWatchMetricsQueryRunner', () => {
};
it('should generate the correct query', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ data });
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ data });
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries).toMatchObject(
expect(queryMock.mock.calls[0][0].targets).toMatchObject(
expect.arrayContaining([
expect.objectContaining({
namespace: queries[0].namespace,
@ -161,13 +158,13 @@ describe('CloudWatchMetricsQueryRunner', () => {
},
];
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({
data,
variables: [periodIntervalVariable],
});
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0].period).toEqual('600');
expect(queryMock.mock.calls[0][0].targets[0].period).toEqual('600');
});
});
@ -267,7 +264,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
});
it('should display one alert error message per region+datasource combination', async () => {
const { runner, request } = setupMockedMetricsQueryRunner({ data: backendErrorResponse, throws: true });
const { runner, request } = setupMockedMetricsQueryRunner({ errorResponse: backendErrorResponse });
const memoizedDebounceSpy = jest.spyOn(runner, 'debouncedAlert');
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
@ -360,7 +357,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
});
it('interpolates variables correctly', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({
variables: [namespaceVariable, metricVariable, labelsVariable, limitVariable],
});
runner.handleMetricQueries(
@ -384,15 +381,13 @@ describe('CloudWatchMetricsQueryRunner', () => {
],
request
);
expect(fetchMock).toHaveBeenCalledWith(
expect(queryMock).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
queries: expect.arrayContaining([
targets: expect.arrayContaining([
expect.objectContaining({
sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
}),
]),
}),
})
);
});
@ -464,7 +459,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
};
it('should generate the correct query for single template variable', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const queries: CloudWatchMetricsQuery[] = [
{
id: '',
@ -483,12 +478,12 @@ describe('CloudWatchMetricsQueryRunner', () => {
},
];
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
});
});
it('should generate the correct query in the case of one multiple template variables', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const queries: CloudWatchMetricsQuery[] = [
{
id: '',
@ -518,14 +513,14 @@ describe('CloudWatchMetricsQueryRunner', () => {
},
})
).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
});
});
it('should generate the correct query in the case of multiple multi template variables', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const queries: CloudWatchMetricsQuery[] = [
{
id: '',
@ -547,14 +542,14 @@ describe('CloudWatchMetricsQueryRunner', () => {
];
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim4']).toStrictEqual(['var4-foo', 'var4-baz']);
});
});
it('should generate the correct query for multiple template variables, lack scopedVars', async () => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const { runner, queryMock, request } = setupMockedMetricsQueryRunner({ variables: [var1, var2, var3, var4] });
const queries: CloudWatchMetricsQuery[] = [
{
id: '',
@ -583,9 +578,9 @@ describe('CloudWatchMetricsQueryRunner', () => {
},
})
).toEmitValuesWith(() => {
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
expect(fetchMock.mock.calls[0][0].data.queries[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim1']).toStrictEqual(['var1-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim2']).toStrictEqual(['var2-foo']);
expect(queryMock.mock.calls[0][0].targets[0].dimensions['dim3']).toStrictEqual(['var3-foo', 'var3-baz']);
});
});
});
@ -622,22 +617,20 @@ describe('CloudWatchMetricsQueryRunner', () => {
['UTC', '+0000'],
];
test.each(testTable)('should use the right time zone offset', (ianaTimezone, expectedOffset) => {
const { runner, fetchMock, request } = setupMockedMetricsQueryRunner();
const { runner, queryMock, request } = setupMockedMetricsQueryRunner();
runner.handleMetricQueries([testQuery], {
...request,
range: { ...request.range, from: dateTime(), to: dateTime() },
timezone: ianaTimezone,
});
expect(fetchMock).toHaveBeenCalledWith(
expect(queryMock).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
queries: expect.arrayContaining([
targets: expect.arrayContaining([
expect.objectContaining({
timezoneUTCOffset: expectedOffset,
}),
]),
}),
})
);
});
@ -879,7 +872,7 @@ describe('CloudWatchMetricsQueryRunner', () => {
];
await expect(runner.handleMetricQueries(queries, request)).toEmitValuesWith(() => {
expect(performTimeSeriesQueryMock.mock.calls[0][0].queries[0].region).toBe(
expect(performTimeSeriesQueryMock.mock.calls[0][0].targets[0].region).toBe(
instanceSettings.jsonData.defaultRegion
);
});

View File

@ -13,7 +13,6 @@ import {
ScopedVars,
TimeRange,
} from '@grafana/data';
import { toDataQueryResponse } from '@grafana/runtime';
import { notifyApp } from 'app/core/actions';
import { createErrorNotification } from 'app/core/copy/appNotification';
import { TemplateSrv } from 'app/features/templating/template_srv';
@ -23,14 +22,7 @@ import { AppNotificationTimeout } from 'app/types';
import { ThrottlingErrorMessage } from '../components/Errors/ThrottlingErrorMessage';
import memoizedDebounce from '../memoizedDebounce';
import { migrateMetricQuery } from '../migrations/metricQueryMigrations';
import {
CloudWatchJsonData,
CloudWatchMetricsQuery,
CloudWatchQuery,
DataQueryError,
MetricQuery,
MetricRequest,
} from '../types';
import { CloudWatchJsonData, CloudWatchMetricsQuery, CloudWatchQuery, DataQueryError } from '../types';
import { filterMetricsQuery } from '../utils/utils';
import { CloudWatchRequest } from './CloudWatchRequest';
@ -53,8 +45,12 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
AppNotificationTimeout.Error
);
constructor(instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>, templateSrv: TemplateSrv) {
super(instanceSettings, templateSrv);
constructor(
instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
templateSrv: TemplateSrv,
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>
) {
super(instanceSettings, templateSrv, queryFn);
}
handleMetricQueries = (
@ -66,9 +62,7 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
format: 'Z',
}).replace(':', '');
const validMetricsQueries = metricQueries
.filter(this.filterMetricQuery)
.map((q: CloudWatchMetricsQuery): MetricQuery => {
const validMetricsQueries = metricQueries.filter(this.filterMetricQuery).map((q) => {
const migratedQuery = migrateMetricQuery(q);
const migratedAndIterpolatedQuery = this.replaceMetricQueryVars(migratedQuery, options.scopedVars);
@ -87,10 +81,9 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
return of({ data: [] });
}
const request = {
from: options?.range?.from.valueOf().toString(),
to: options?.range?.to.valueOf().toString(),
queries: validMetricsQueries,
const request: DataQueryRequest<CloudWatchQuery> = {
...options,
targets: validMetricsQueries,
};
return this.performTimeSeriesQuery(request, options.range);
@ -114,15 +107,18 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
};
}
performTimeSeriesQuery(request: MetricRequest, { from, to }: TimeRange): Observable<DataQueryResponse> {
return this.awsRequest(this.dsQueryEndpoint, request).pipe(
performTimeSeriesQuery(
request: DataQueryRequest<CloudWatchQuery>,
{ from, to }: TimeRange
): Observable<DataQueryResponse> {
return this.query(request).pipe(
map((res) => {
const dataframes: DataFrame[] = toDataQueryResponse(res).data;
const dataframes: DataFrame[] = res.data;
if (!dataframes || dataframes.length <= 0) {
return { data: [] };
}
const lastError = findLast(res.data.results, (v) => !!v.error);
const lastError = findLast(res.data, (v) => !!v.error);
dataframes.forEach((frame) => {
frame.fields.forEach((field) => {
@ -156,7 +152,7 @@ export class CloudWatchMetricsQueryRunner extends CloudWatchRequest {
if (results.some((r) => r.error && /^Throttling:.*/.test(r.error))) {
const failedRedIds = Object.keys(err.data?.results ?? {});
const regionsAffected = Object.values(request.queries).reduce(
const regionsAffected = Object.values(request.targets).reduce(
(res: string[], { refId, region }) =>
(refId && !failedRedIds.includes(refId)) || res.includes(region) ? res : [...res, region],
[]

View File

@ -1,6 +1,13 @@
import { Observable } from 'rxjs';
import { Observable, of } from 'rxjs';
import { DataSourceInstanceSettings, DataSourceRef, getDataSourceRef, ScopedVars } from '@grafana/data';
import {
DataQueryRequest,
DataQueryResponse,
DataSourceInstanceSettings,
DataSourceRef,
getDataSourceRef,
ScopedVars,
} from '@grafana/data';
import { BackendDataSourceResponse, FetchResponse, getBackendSrv } from '@grafana/runtime';
import { notifyApp } from 'app/core/actions';
import { createErrorNotification } from 'app/core/copy/appNotification';
@ -9,12 +16,13 @@ import { store } from 'app/store/store';
import { AppNotificationTimeout } from 'app/types';
import memoizedDebounce from '../memoizedDebounce';
import { CloudWatchJsonData, Dimensions, MetricRequest, MultiFilters } from '../types';
import { CloudWatchJsonData, CloudWatchQuery, Dimensions, MetricRequest, MultiFilters } from '../types';
export abstract class CloudWatchRequest {
templateSrv: TemplateSrv;
ref: DataSourceRef;
dsQueryEndpoint = '/api/ds/query';
query: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse>;
debouncedCustomAlert: (title: string, message: string) => void = memoizedDebounce(
displayCustomError,
AppNotificationTimeout.Error
@ -22,10 +30,12 @@ export abstract class CloudWatchRequest {
constructor(
public instanceSettings: DataSourceInstanceSettings<CloudWatchJsonData>,
templateSrv: TemplateSrv
templateSrv: TemplateSrv,
queryFn: (request: DataQueryRequest<CloudWatchQuery>) => Observable<DataQueryResponse> = () => of({ data: [] })
) {
this.templateSrv = templateSrv;
this.ref = getDataSourceRef(instanceSettings);
this.query = queryFn;
}
awsRequest(

View File

@ -56,7 +56,7 @@ export interface CloudWatchSecureJsonData extends AwsAuthDataSourceSecureJsonDat
export type CloudWatchLogsRequest = GetLogEventsRequest | StartQueryRequest | QueryParam;
export interface GetLogEventsRequest {
export interface GetLogEventsRequest extends DataQuery {
/**
* The name of the log group.
*/
@ -85,7 +85,7 @@ export interface GetLogEventsRequest {
* If the value is true, the earliest log events are returned first. If the value is false, the latest log events are returned first. The default value is false. If you are using nextToken in this operation, you must specify true for startFromHead.
*/
startFromHead?: boolean;
region?: string;
region: string;
}
export interface TSDBResponse<T = any> {
@ -135,7 +135,7 @@ export interface LogGroupField {
percent?: number;
}
export interface StartQueryRequest {
export interface StartQueryRequest extends DataQuery {
/**
* The log group on which to perform the query. A StartQuery operation must include a logGroupNames or a logGroupName parameter, but not both.
*/
@ -157,7 +157,7 @@ export interface StartQueryRequest {
region: string;
}
export interface QueryParam {
export interface QueryParam extends DataQuery {
queryId: string;
refId: string;
limit?: number;