mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Datasource/CloudWatch: Prevents hidden dataframe fields from displaying in tables (#24580)
* Datasource/CloudWatch: Prevents hidden dataframe fields from displaying in tables
This commit is contained in:
parent
cd9cbe5e16
commit
277aee8642
@ -57,9 +57,9 @@ export class FieldCache {
|
||||
return types && types.length > 0;
|
||||
}
|
||||
|
||||
getFirstFieldOfType(type: FieldType): FieldWithIndex | undefined {
|
||||
getFirstFieldOfType(type: FieldType, includeHidden = false): FieldWithIndex | undefined {
|
||||
const fields = this.fieldByType[type];
|
||||
const firstField = fields.find(field => !(field.config.custom && field.config.custom['Hidden']));
|
||||
const firstField = fields.find(field => includeHidden || !field.config.custom?.hidden);
|
||||
return firstField;
|
||||
}
|
||||
|
||||
|
@ -96,6 +96,7 @@ export interface Column {
|
||||
text: string; // For a Column, the 'text' is the field name
|
||||
filterable?: boolean;
|
||||
unit?: string;
|
||||
custom?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface TableData extends QueryResultBase {
|
||||
|
@ -83,8 +83,10 @@ class UnThemedLogDetails extends PureComponent<Props> {
|
||||
return (
|
||||
row.dataFrame.fields
|
||||
.map((field, index) => ({ ...field, index }))
|
||||
// Remove Id which we use for react key and entry field which we are showing as the log message.
|
||||
.filter((field, index) => 'id' !== field.name && row.entryFieldIndex !== index)
|
||||
// Remove Id which we use for react key and entry field which we are showing as the log message. Also remove hidden fields.
|
||||
.filter(
|
||||
(field, index) => !('id' === field.name || row.entryFieldIndex === index || field.config.custom?.hidden)
|
||||
)
|
||||
// Filter out fields without values. For example in elastic the fields are parsed from the document which can
|
||||
// have different structure per row and so the dataframe is pretty sparse.
|
||||
.filter(field => {
|
||||
|
@ -7,6 +7,7 @@ export interface TableFieldOptions {
|
||||
width: number;
|
||||
align: FieldTextAlignment;
|
||||
displayMode: TableCellDisplayMode;
|
||||
hidden?: boolean;
|
||||
}
|
||||
|
||||
export enum TableCellDisplayMode {
|
||||
|
@ -38,10 +38,13 @@ export function getColumns(data: DataFrame, availableWidth: number, columnMinWid
|
||||
const columns: Column[] = [];
|
||||
let fieldCountWithoutWidth = data.fields.length;
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < data.fields.length; fieldIndex++) {
|
||||
const field = data.fields[fieldIndex];
|
||||
for (const [fieldIndex, field] of data.fields.entries()) {
|
||||
const fieldTableOptions = (field.config.custom || {}) as TableFieldOptions;
|
||||
|
||||
if (fieldTableOptions.hidden) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fieldTableOptions.width) {
|
||||
availableWidth -= fieldTableOptions.width;
|
||||
fieldCountWithoutWidth -= 1;
|
||||
|
@ -40,6 +40,10 @@ type DatasourceInfo struct {
|
||||
|
||||
const CLOUDWATCH_TS_FORMAT = "2006-01-02 15:04:05.000"
|
||||
|
||||
// Constants also defined in datasource/cloudwatch/datasource.ts
|
||||
const LOG_IDENTIFIER_INTERNAL = "__log__grafana_internal__"
|
||||
const LOGSTREAM_IDENTIFIER_INTERNAL = "__logstream__grafana_internal__"
|
||||
|
||||
func (e *CloudWatchExecutor) getLogsClient(region string) (*cloudwatchlogs.CloudWatchLogs, error) {
|
||||
e.mux.Lock()
|
||||
defer e.mux.Unlock()
|
||||
|
@ -234,12 +234,16 @@ func (e *CloudWatchExecutor) executeStartQuery(ctx context.Context, logsClient c
|
||||
return nil, fmt.Errorf("invalid time range: Start time must be before end time")
|
||||
}
|
||||
|
||||
// The fields @log and @logStream are always included in the results of a user's query
|
||||
// so that a row's context can be retrieved later if necessary.
|
||||
// The usage of ltrim around the @log/@logStream fields is a necessary workaround, as without it,
|
||||
// CloudWatch wouldn't consider a query using a non-alised @log/@logStream valid.
|
||||
startQueryInput := &cloudwatchlogs.StartQueryInput{
|
||||
StartTime: aws.Int64(startTime.Unix()),
|
||||
EndTime: aws.Int64(endTime.Unix()),
|
||||
Limit: aws.Int64(parameters.Get("limit").MustInt64(1000)),
|
||||
LogGroupNames: aws.StringSlice(parameters.Get("logGroupNames").MustStringArray()),
|
||||
QueryString: aws.String("fields @timestamp,@log,@logStream|" + parameters.Get("queryString").MustString("")),
|
||||
QueryString: aws.String("fields @timestamp,ltrim(@log) as " + LOG_IDENTIFIER_INTERNAL + ",ltrim(@logStream) as " + LOGSTREAM_IDENTIFIER_INTERNAL + "|" + parameters.Get("queryString").MustString("")),
|
||||
}
|
||||
return logsClient.StartQueryWithContext(ctx, startQueryInput)
|
||||
}
|
||||
|
@ -52,11 +52,11 @@ func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*d
|
||||
|
||||
if *fieldName == "@timestamp" {
|
||||
newFields[len(newFields)-1].SetConfig(&data.FieldConfig{Title: "Time"})
|
||||
} else if *fieldName == "@logStream" || *fieldName == "@log" {
|
||||
} else if *fieldName == LOGSTREAM_IDENTIFIER_INTERNAL || *fieldName == LOG_IDENTIFIER_INTERNAL {
|
||||
newFields[len(newFields)-1].SetConfig(
|
||||
&data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
"Hidden": true,
|
||||
"hidden": true,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
@ -39,6 +39,14 @@ func TestLogsResultsToDataframes(t *testing.T) {
|
||||
Field: aws.String("@log"),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOGSTREAM_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelogstream"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOG_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
},
|
||||
{
|
||||
&cloudwatchlogs.ResultField{
|
||||
@ -61,6 +69,14 @@ func TestLogsResultsToDataframes(t *testing.T) {
|
||||
Field: aws.String("@log"),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOGSTREAM_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelogstream"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOG_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
},
|
||||
{
|
||||
&cloudwatchlogs.ResultField{
|
||||
@ -83,6 +99,14 @@ func TestLogsResultsToDataframes(t *testing.T) {
|
||||
Field: aws.String("@log"),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOGSTREAM_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelogstream"),
|
||||
},
|
||||
&cloudwatchlogs.ResultField{
|
||||
Field: aws.String(LOG_IDENTIFIER_INTERNAL),
|
||||
Value: aws.String("fakelog"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Status: aws.String("ok"),
|
||||
@ -114,20 +138,32 @@ func TestLogsResultsToDataframes(t *testing.T) {
|
||||
aws.String("fakelogstream"),
|
||||
aws.String("fakelogstream"),
|
||||
})
|
||||
logStreamField.SetConfig(&data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
"Hidden": true,
|
||||
},
|
||||
})
|
||||
|
||||
logField := data.NewField("@log", nil, []*string{
|
||||
aws.String("fakelog"),
|
||||
aws.String("fakelog"),
|
||||
aws.String("fakelog"),
|
||||
})
|
||||
logField.SetConfig(&data.FieldConfig{
|
||||
|
||||
hiddenLogStreamField := data.NewField(LOGSTREAM_IDENTIFIER_INTERNAL, nil, []*string{
|
||||
aws.String("fakelogstream"),
|
||||
aws.String("fakelogstream"),
|
||||
aws.String("fakelogstream"),
|
||||
})
|
||||
hiddenLogStreamField.SetConfig(&data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
"Hidden": true,
|
||||
"hidden": true,
|
||||
},
|
||||
})
|
||||
|
||||
hiddenLogField := data.NewField(LOG_IDENTIFIER_INTERNAL, nil, []*string{
|
||||
aws.String("fakelog"),
|
||||
aws.String("fakelog"),
|
||||
aws.String("fakelog"),
|
||||
})
|
||||
hiddenLogField.SetConfig(&data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
"hidden": true,
|
||||
},
|
||||
})
|
||||
|
||||
@ -138,6 +174,8 @@ func TestLogsResultsToDataframes(t *testing.T) {
|
||||
lineField,
|
||||
logStreamField,
|
||||
logField,
|
||||
hiddenLogStreamField,
|
||||
hiddenLogField,
|
||||
},
|
||||
RefID: "",
|
||||
Meta: &data.FrameMeta{
|
||||
|
@ -5,7 +5,7 @@ import { Column, TableData, QueryResultMeta } from '@grafana/data';
|
||||
* Extends the standard Column class with variables that get
|
||||
* mutated in the angular table panel.
|
||||
*/
|
||||
interface MutableColumn extends Column {
|
||||
export interface MutableColumn extends Column {
|
||||
title?: string;
|
||||
sort?: boolean;
|
||||
desc?: boolean;
|
||||
|
@ -10,7 +10,7 @@ import {
|
||||
PreferredVisualisationType,
|
||||
} from '@grafana/data';
|
||||
import { ExploreItemState } from 'app/types/explore';
|
||||
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
|
||||
import TableModel, { mergeTablesIntoModel, MutableColumn } from 'app/core/table_model';
|
||||
import { sortLogsResult, refreshIntervalToSortOrder } from 'app/core/utils/explore';
|
||||
import { dataFrameToLogsModel } from 'app/core/logs_model';
|
||||
import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
|
||||
@ -61,10 +61,11 @@ export class ResultProcessor {
|
||||
const fieldCount = fields.length;
|
||||
const rowCount = frame.length;
|
||||
|
||||
const columns = fields.map(field => ({
|
||||
const columns: MutableColumn[] = fields.map(field => ({
|
||||
text: field.name,
|
||||
type: field.type,
|
||||
filterable: field.config.filterable,
|
||||
custom: field.config.custom,
|
||||
}));
|
||||
|
||||
const rows: any[][] = [];
|
||||
|
@ -45,11 +45,16 @@ import { from, empty, Observable } from 'rxjs';
|
||||
import { delay, expand, map, mergeMap, tap, finalize, catchError } from 'rxjs/operators';
|
||||
import { CloudWatchLanguageProvider } from './language_provider';
|
||||
|
||||
const TSDB_QUERY_ENDPOINT = '/api/tsdb/query';
|
||||
import { VariableWithMultiSupport } from 'app/features/templating/types';
|
||||
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
|
||||
import { AwsUrl, encodeUrl } from './aws_url';
|
||||
|
||||
const TSDB_QUERY_ENDPOINT = '/api/tsdb/query';
|
||||
|
||||
// Constants also defined in tsdb/cloudwatch/cloudwatch.go
|
||||
const LOG_IDENTIFIER_INTERNAL = '__log__grafana_internal__';
|
||||
const LOGSTREAM_IDENTIFIER_INTERNAL = '__logstream__grafana_internal__';
|
||||
|
||||
const displayAlert = (datasourceName: string, region: string) =>
|
||||
store.dispatch(
|
||||
notifyApp(
|
||||
@ -348,12 +353,12 @@ export class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery, CloudWa
|
||||
let logField = null;
|
||||
|
||||
for (const field of row.dataFrame.fields) {
|
||||
if (field.name === '@logStream') {
|
||||
if (field.name === LOGSTREAM_IDENTIFIER_INTERNAL) {
|
||||
logStreamField = field;
|
||||
if (logField !== null) {
|
||||
break;
|
||||
}
|
||||
} else if (field.name === '@log') {
|
||||
} else if (field.name === LOG_IDENTIFIER_INTERNAL) {
|
||||
logField = field;
|
||||
if (logStreamField !== null) {
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user