mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
CloudWatch: fix log explorer context (#52118)
This commit is contained in:
parent
e1785f4eb4
commit
146c90d59e
@ -34,8 +34,8 @@ type LogQueryJson struct {
|
||||
SubType string
|
||||
Limit *int64
|
||||
Time int64
|
||||
StartTime int64
|
||||
EndTime int64
|
||||
StartTime *int64
|
||||
EndTime *int64
|
||||
LogGroupName string
|
||||
LogGroupNames []string
|
||||
LogGroupNamePrefix string
|
||||
@ -168,8 +168,13 @@ func (e *cloudWatchExecutor) handleGetLogEvents(ctx context.Context, logsClient
|
||||
}
|
||||
queryRequest.SetLogStreamName(parameters.LogStreamName)
|
||||
|
||||
queryRequest.SetStartTime(parameters.StartTime)
|
||||
queryRequest.SetEndTime(parameters.EndTime)
|
||||
if parameters.StartTime != nil && *parameters.StartTime != 0 {
|
||||
queryRequest.SetStartTime(*parameters.StartTime)
|
||||
}
|
||||
|
||||
if parameters.EndTime != nil && *parameters.EndTime != 0 {
|
||||
queryRequest.SetEndTime(*parameters.EndTime)
|
||||
}
|
||||
|
||||
logEvents, err := logsClient.GetLogEventsWithContext(ctx, queryRequest)
|
||||
if err != nil {
|
||||
|
@ -20,6 +20,92 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestQuery_GetLogEvents(t *testing.T) {
|
||||
origNewCWLogsClient := NewCWLogsClient
|
||||
t.Cleanup(func() {
|
||||
NewCWLogsClient = origNewCWLogsClient
|
||||
})
|
||||
|
||||
var cli fakeCWLogsClient
|
||||
|
||||
NewCWLogsClient = func(sess *session.Session) cloudwatchlogsiface.CloudWatchLogsAPI {
|
||||
return &cli
|
||||
}
|
||||
const refID = "A"
|
||||
|
||||
testCases := map[string]struct {
|
||||
query string
|
||||
expectedInput []*cloudwatchlogs.GetLogEventsInput
|
||||
}{
|
||||
"Nil startTime": {
|
||||
query: `{
|
||||
"type": "logAction",
|
||||
"subtype": "GetLogEvents",
|
||||
"logGroupName": "foo",
|
||||
"logStreamName": "bar",
|
||||
"endTime": 1,
|
||||
"startFromHead": false
|
||||
}`,
|
||||
expectedInput: []*cloudwatchlogs.GetLogEventsInput{
|
||||
{
|
||||
EndTime: aws.Int64(1),
|
||||
Limit: aws.Int64(10),
|
||||
LogGroupName: aws.String("foo"),
|
||||
LogStreamName: aws.String("bar"),
|
||||
StartFromHead: aws.Bool(false),
|
||||
},
|
||||
},
|
||||
},
|
||||
"Nil endTime": {
|
||||
query: `{
|
||||
"type": "logAction",
|
||||
"subtype": "GetLogEvents",
|
||||
"logGroupName": "foo",
|
||||
"logStreamName": "bar",
|
||||
"startTime": 1,
|
||||
"startFromHead": true
|
||||
}`,
|
||||
expectedInput: []*cloudwatchlogs.GetLogEventsInput{
|
||||
{
|
||||
StartTime: aws.Int64(1),
|
||||
Limit: aws.Int64(10),
|
||||
LogGroupName: aws.String("foo"),
|
||||
LogStreamName: aws.String("bar"),
|
||||
StartFromHead: aws.Bool(true),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for name, test := range testCases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
cli = fakeCWLogsClient{}
|
||||
|
||||
im := datasource.NewInstanceManager(func(s backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
|
||||
return datasourceInfo{}, nil
|
||||
})
|
||||
|
||||
executor := newExecutor(im, newTestConfig(), &fakeSessionCache{}, featuremgmt.WithFeatures())
|
||||
_, err := executor.QueryData(context.Background(), &backend.QueryDataRequest{
|
||||
PluginContext: backend.PluginContext{
|
||||
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{},
|
||||
},
|
||||
Queries: []backend.DataQuery{
|
||||
{
|
||||
RefID: refID,
|
||||
TimeRange: backend.TimeRange{From: time.Unix(0, 0), To: time.Unix(1, 0)},
|
||||
JSON: json.RawMessage(test.query),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cli.calls.getEventsWithContext, 1)
|
||||
assert.Equal(t, test.expectedInput, cli.calls.getEventsWithContext)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuery_DescribeLogGroups(t *testing.T) {
|
||||
origNewCWLogsClient := NewCWLogsClient
|
||||
t.Cleanup(func() {
|
||||
|
@ -30,6 +30,7 @@ type fakeCWLogsClient struct {
|
||||
|
||||
type logsQueryCalls struct {
|
||||
startQueryWithContext []*cloudwatchlogs.StartQueryInput
|
||||
getEventsWithContext []*cloudwatchlogs.GetLogEventsInput
|
||||
}
|
||||
|
||||
func (m *fakeCWLogsClient) GetQueryResultsWithContext(ctx context.Context, input *cloudwatchlogs.GetQueryResultsInput, option ...request.Option) (*cloudwatchlogs.GetQueryResultsOutput, error) {
|
||||
@ -58,6 +59,14 @@ func (m *fakeCWLogsClient) GetLogGroupFieldsWithContext(ctx context.Context, inp
|
||||
return &m.logGroupFields, nil
|
||||
}
|
||||
|
||||
func (m *fakeCWLogsClient) GetLogEventsWithContext(ctx context.Context, input *cloudwatchlogs.GetLogEventsInput, option ...request.Option) (*cloudwatchlogs.GetLogEventsOutput, error) {
|
||||
m.calls.getEventsWithContext = append(m.calls.getEventsWithContext, input)
|
||||
|
||||
return &cloudwatchlogs.GetLogEventsOutput{
|
||||
Events: []*cloudwatchlogs.OutputLogEvent{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type fakeCWClient struct {
|
||||
cloudwatchiface.CloudWatchAPI
|
||||
cloudwatch.GetMetricDataOutput
|
||||
|
@ -1,7 +1,17 @@
|
||||
import { lastValueFrom, of } from 'rxjs';
|
||||
import { toArray } from 'rxjs/operators';
|
||||
|
||||
import { ArrayVector, DataFrame, dataFrameToJSON, dateTime, Field, MutableDataFrame } from '@grafana/data';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
dataFrameToJSON,
|
||||
dateTime,
|
||||
Field,
|
||||
FieldType,
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
MutableDataFrame,
|
||||
} from '@grafana/data';
|
||||
import { setDataSourceSrv } from '@grafana/runtime';
|
||||
|
||||
import {
|
||||
@ -16,6 +26,7 @@ import {
|
||||
regionVariable,
|
||||
} from './__mocks__/CloudWatchDataSource';
|
||||
import { validLogsQuery, validMetricsQuery } from './__mocks__/queries';
|
||||
import { LOGSTREAM_IDENTIFIER_INTERNAL, LOG_IDENTIFIER_INTERNAL } from './datasource';
|
||||
import {
|
||||
CloudWatchLogsQueryStatus,
|
||||
CloudWatchMetricsQuery,
|
||||
@ -389,6 +400,43 @@ describe('datasource', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLogRowContext', () => {
|
||||
it('replaces parameters correctly in the query', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource();
|
||||
const row: LogRowModel = {
|
||||
entryFieldIndex: 0,
|
||||
rowIndex: 0,
|
||||
dataFrame: new MutableDataFrame({
|
||||
refId: 'B',
|
||||
fields: [
|
||||
{ name: 'ts', type: FieldType.time, values: [1] },
|
||||
{ name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {} },
|
||||
{ name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {} },
|
||||
],
|
||||
}),
|
||||
entry: '4',
|
||||
labels: {},
|
||||
hasAnsi: false,
|
||||
hasUnescapedContent: false,
|
||||
raw: '4',
|
||||
logLevel: LogLevel.info,
|
||||
timeEpochMs: 4,
|
||||
timeEpochNs: '4000000',
|
||||
timeFromNow: '',
|
||||
timeLocal: '',
|
||||
timeUtc: '',
|
||||
uid: '1',
|
||||
};
|
||||
await datasource.getLogRowContext(row);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
|
||||
|
||||
await datasource.getLogRowContext(row, { direction: 'FORWARD' }, { ...validLogsQuery, region: 'eu-east' });
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
|
||||
expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
|
||||
});
|
||||
});
|
||||
|
||||
describe('template variable interpolation', () => {
|
||||
it('interpolates variables correctly', async () => {
|
||||
const { datasource, fetchMock } = setupMockedDataSource({
|
||||
|
@ -69,8 +69,8 @@ import { CloudWatchVariableSupport } from './variables';
|
||||
const DS_QUERY_ENDPOINT = '/api/ds/query';
|
||||
|
||||
// Constants also defined in tsdb/cloudwatch/cloudwatch.go
|
||||
const LOG_IDENTIFIER_INTERNAL = '__log__grafana_internal__';
|
||||
const LOGSTREAM_IDENTIFIER_INTERNAL = '__logstream__grafana_internal__';
|
||||
export const LOG_IDENTIFIER_INTERNAL = '__log__grafana_internal__';
|
||||
export const LOGSTREAM_IDENTIFIER_INTERNAL = '__logstream__grafana_internal__';
|
||||
|
||||
const displayAlert = (datasourceName: string, region: string) =>
|
||||
store.dispatch(
|
||||
@ -89,7 +89,7 @@ const displayCustomError = (title: string, message: string) =>
|
||||
|
||||
export class CloudWatchDatasource
|
||||
extends DataSourceWithBackend<CloudWatchQuery, CloudWatchJsonData>
|
||||
implements DataSourceWithLogsContextSupport
|
||||
implements DataSourceWithLogsContextSupport<CloudWatchLogsQuery>
|
||||
{
|
||||
proxyUrl: any;
|
||||
defaultRegion: any;
|
||||
@ -488,7 +488,8 @@ export class CloudWatchDatasource
|
||||
|
||||
getLogRowContext = async (
|
||||
row: LogRowModel,
|
||||
{ limit = 10, direction = 'BACKWARD' }: RowContextOptions = {}
|
||||
{ limit = 10, direction = 'BACKWARD' }: RowContextOptions = {},
|
||||
query?: CloudWatchLogsQuery
|
||||
): Promise<{ data: DataFrame[] }> => {
|
||||
let logStreamField = null;
|
||||
let logField = null;
|
||||
@ -510,6 +511,7 @@ export class CloudWatchDatasource
|
||||
const requestParams: GetLogEventsRequest = {
|
||||
limit,
|
||||
startFromHead: direction !== 'BACKWARD',
|
||||
region: query?.region,
|
||||
logGroupName: parseLogGroupName(logField!.values.get(row.rowIndex)),
|
||||
logStreamName: logStreamField!.values.get(row.rowIndex),
|
||||
};
|
||||
|
@ -199,6 +199,7 @@ export interface GetLogEventsRequest {
|
||||
* If the value is true, the earliest log events are returned first. If the value is false, the latest log events are returned first. The default value is false. If you are using nextToken in this operation, you must specify true for startFromHead.
|
||||
*/
|
||||
startFromHead?: boolean;
|
||||
region?: string;
|
||||
}
|
||||
|
||||
export interface GetQueryResultsResponse {
|
||||
|
Loading…
Reference in New Issue
Block a user