Loki: Add unpack query builder hint (#65608)

* add unpack hint

* use `hasOwnProperty`

* add unpack completion to monaco

* adjust test name

* fix tests

* fix tests
This commit is contained in:
Sven Grossmann 2023-03-30 15:18:02 +02:00 committed by GitHub
parent e758b017d0
commit d790cb1c09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 169 additions and 39 deletions

View File

@ -353,25 +353,27 @@ describe('Query imports', () => {
it('identifies selectors with JSON parser data', async () => {
jest.spyOn(datasource, 'getDataSamples').mockResolvedValue([{}] as DataFrame[]);
extractLogParserFromDataFrameMock.mockReturnValueOnce({ hasLogfmt: false, hasJSON: true });
extractLogParserFromDataFrameMock.mockReturnValueOnce({ hasLogfmt: false, hasJSON: true, hasPack: false });
expect(await languageProvider.getParserAndLabelKeys('{place="luna"}')).toEqual({
extractedLabelKeys,
unwrapLabelKeys,
hasJSON: true,
hasLogfmt: false,
hasPack: false,
});
});
it('identifies selectors with Logfmt parser data', async () => {
jest.spyOn(datasource, 'getDataSamples').mockResolvedValue([{}] as DataFrame[]);
extractLogParserFromDataFrameMock.mockReturnValueOnce({ hasLogfmt: true, hasJSON: false });
extractLogParserFromDataFrameMock.mockReturnValueOnce({ hasLogfmt: true, hasJSON: false, hasPack: false });
expect(await languageProvider.getParserAndLabelKeys('{place="luna"}')).toEqual({
extractedLabelKeys,
unwrapLabelKeys,
hasJSON: false,
hasLogfmt: true,
hasPack: false,
});
});
@ -384,6 +386,7 @@ describe('Query imports', () => {
unwrapLabelKeys: [],
hasJSON: false,
hasLogfmt: false,
hasPack: false,
});
expect(extractLogParserFromDataFrameMock).not.toHaveBeenCalled();
});

View File

@ -462,21 +462,26 @@ export default class LokiLanguageProvider extends LanguageProvider {
return labelValues ?? [];
}
async getParserAndLabelKeys(
selector: string
): Promise<{ extractedLabelKeys: string[]; hasJSON: boolean; hasLogfmt: boolean; unwrapLabelKeys: string[] }> {
async getParserAndLabelKeys(selector: string): Promise<{
extractedLabelKeys: string[];
hasJSON: boolean;
hasLogfmt: boolean;
hasPack: boolean;
unwrapLabelKeys: string[];
}> {
const series = await this.datasource.getDataSamples({ expr: selector, refId: 'data-samples' });
if (!series.length) {
return { extractedLabelKeys: [], unwrapLabelKeys: [], hasJSON: false, hasLogfmt: false };
return { extractedLabelKeys: [], unwrapLabelKeys: [], hasJSON: false, hasLogfmt: false, hasPack: false };
}
const { hasLogfmt, hasJSON } = extractLogParserFromDataFrame(series[0]);
const { hasLogfmt, hasJSON, hasPack } = extractLogParserFromDataFrame(series[0]);
return {
extractedLabelKeys: extractLabelKeysFromDataFrame(series[0]),
unwrapLabelKeys: extractUnwrapLabelKeysFromDataFrame(series[0]),
hasJSON,
hasPack,
hasLogfmt,
};
}

View File

@ -53,6 +53,7 @@ const parserAndLabelKeys = {
unwrapLabelKeys: ['unwrap', 'labels'],
hasJSON: true,
hasLogfmt: false,
hasPack: false,
};
describe('CompletionDataProvider', () => {

View File

@ -184,6 +184,7 @@ describe('getCompletions', () => {
unwrapLabelKeys,
hasJSON: false,
hasLogfmt: false,
hasPack: false,
});
});
@ -317,6 +318,7 @@ describe('getCompletions', () => {
unwrapLabelKeys,
hasJSON: true,
hasLogfmt: false,
hasPack: false,
});
const situation: Situation = { type: 'AFTER_SELECTOR', logQuery: '{job="grafana"}', afterPipe, hasSpace };
const completions = await getCompletions(situation, completionProvider);
@ -334,6 +336,7 @@ describe('getCompletions', () => {
unwrapLabelKeys,
hasJSON: false,
hasLogfmt: true,
hasPack: false,
});
const situation: Situation = { type: 'AFTER_SELECTOR', logQuery: '', afterPipe, hasSpace: true };
const completions = await getCompletions(situation, completionProvider);
@ -392,6 +395,7 @@ describe('getAfterSelectorCompletions', () => {
unwrapLabelKeys: [],
hasJSON: true,
hasLogfmt: false,
hasPack: false,
});
});
it('should remove trailing pipeline from logQuery', () => {
@ -407,6 +411,21 @@ describe('getAfterSelectorCompletions', () => {
expect(parsersInSuggestions).toStrictEqual(['json (detected)', 'logfmt', 'pattern', 'regexp', 'unpack']);
});
it('should show detected unpack parser if query has no parser', async () => {
jest.spyOn(completionProvider, 'getParserAndLabelKeys').mockResolvedValue({
extractedLabelKeys: ['abc', 'def'],
unwrapLabelKeys: [],
hasJSON: true,
hasLogfmt: false,
hasPack: true,
});
const suggestions = await getAfterSelectorCompletions(`{job="grafana"} | `, true, true, completionProvider);
const parsersInSuggestions = suggestions
.filter((suggestion) => suggestion.type === 'PARSER')
.map((parser) => parser.label);
expect(parsersInSuggestions).toStrictEqual(['unpack (detected)', 'json', 'logfmt', 'pattern', 'regexp']);
});
it('should not show detected parser if query already has parser', async () => {
const suggestions = await getAfterSelectorCompletions(
`{job="grafana"} | logfmt | `,

View File

@ -174,6 +174,7 @@ async function getParserCompletions(
prefix: string,
hasJSON: boolean,
hasLogfmt: boolean,
hasPack: boolean,
extractedLabelKeys: string[],
hasParserInQuery: boolean
) {
@ -183,17 +184,27 @@ async function getParserCompletions(
const hasLevelInExtractedLabels = extractedLabelKeys.some((key) => key === 'level');
if (hasJSON) {
allParsers.delete('json');
// We show "detected" label only if there is no previous parser in the query
const extra = hasParserInQuery ? '' : ' (detected)';
completions.push({
type: 'PARSER',
label: `json${extra}`,
insertText: `${prefix}json`,
documentation: hasLevelInExtractedLabels
? 'Use it to get log-levels in the histogram'
: explainOperator(LokiOperationId.Json),
});
if (hasPack) {
allParsers.delete('unpack');
completions.push({
type: 'PARSER',
label: `unpack${extra}`,
insertText: `${prefix}unpack`,
documentation: explainOperator(LokiOperationId.Unpack),
});
} else {
allParsers.delete('json');
completions.push({
type: 'PARSER',
label: `json${extra}`,
insertText: `${prefix}json`,
documentation: hasLevelInExtractedLabels
? 'Use it to get log-levels in the histogram'
: explainOperator(LokiOperationId.Json),
});
}
}
if (hasLogfmt) {
@ -234,7 +245,7 @@ export async function getAfterSelectorCompletions(
query = trimEnd(logQuery, '| ');
}
const { extractedLabelKeys, hasJSON, hasLogfmt } = await dataProvider.getParserAndLabelKeys(query);
const { extractedLabelKeys, hasJSON, hasLogfmt, hasPack } = await dataProvider.getParserAndLabelKeys(query);
const hasQueryParser = isQueryWithParser(query).queryWithParser;
const prefix = `${hasSpace ? '' : ' '}${afterPipe ? '' : '| '}`;
@ -242,6 +253,7 @@ export async function getAfterSelectorCompletions(
prefix,
hasJSON,
hasLogfmt,
hasPack,
extractedLabelKeys,
hasQueryParser
);

View File

@ -604,6 +604,10 @@ export class LokiDatasource
expression = addParserToQuery(expression, 'json');
break;
}
case 'ADD_UNPACK_PARSER': {
expression = addParserToQuery(expression, 'unpack');
break;
}
case 'ADD_NO_PIPELINE_ERROR': {
expression = addNoPipelineErrorToQuery(expression);
break;

View File

@ -1,4 +1,4 @@
import { isLogLineJSON, isLogLineLogfmt } from './lineParser';
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';
describe('isLogLineJSON', () => {
test('should return false on empty line', () => {
@ -35,3 +35,25 @@ describe('isLogLineLogfmt', () => {
expect(isLogLineLogfmt('{"foo": "bar", "baz": "41 + 1"}')).toBe(false);
});
});
describe('isLogLinePacked', () => {
test('should return false on empty line', () => {
expect(isLogLinePacked('')).toBe(false);
});
test('should return false on unknown line pattern', () => {
expect(isLogLinePacked('To Be or not to be')).toBe(false);
});
test('should return false on key value patterns', () => {
expect(isLogLinePacked('foo=bar baz="41 + 1')).toBe(false);
});
test('should return false on JSON log lines', () => {
expect(isLogLinePacked('{"foo": "bar", "baz": "41 + 1"}')).toBe(false);
});
test('should return true on packed log lines', () => {
expect(isLogLinePacked('{"foo": "bar", "_entry": "41 + 1"}')).toBe(true);
});
});

View File

@ -16,3 +16,13 @@ const LOGFMT_REGEXP = /(?:^|\s)([\w\(\)\[\]\{\}]+)=(""|(?:".*?[^\\]"|[^"\s]\S*))
export function isLogLineLogfmt(line: string): boolean {
return LOGFMT_REGEXP.test(line);
}
export function isLogLinePacked(line: string): boolean {
let parsed;
try {
parsed = JSON.parse(line);
return parsed.hasOwnProperty('_entry');
} catch (error) {
return false;
}
}

View File

@ -81,7 +81,7 @@ describe('getQueryHints', () => {
});
it('does not suggest parser when parser in query', () => {
expect(getQueryHints('{job="grafana" | json', [jsonAndLogfmtSeries])).not.toEqual(
expect(getQueryHints('{job="grafana"} | json', [jsonAndLogfmtSeries])).not.toEqual(
expect.arrayContaining([
expect.objectContaining({ type: 'ADD_JSON_PARSER' }),
expect.objectContaining({ type: 'ADD_LOGFMT_PARSER' }),
@ -90,6 +90,39 @@ describe('getQueryHints', () => {
});
});
describe('when series with json and packed logs', () => {
const jsonAndPackSeries: DataFrame = {
name: 'logs',
length: 2,
fields: [
{
name: 'Line',
type: FieldType.string,
config: {},
values: new ArrayVector(['{"_entry": "bar", "bar": "baz"}']),
},
],
};
it('suggest unpack parser when no parser in query', () => {
expect(getQueryHints('{job="grafana"', [jsonAndPackSeries])).toEqual(
expect.arrayContaining([expect.objectContaining({ type: 'ADD_UNPACK_PARSER' })])
);
});
it('does not suggest json parser', () => {
expect(getQueryHints('{job="grafana"', [jsonAndPackSeries])).not.toEqual(
expect.arrayContaining([expect.objectContaining({ type: 'ADD_JSON_PARSER' })])
);
});
it('does not suggest unpack parser when unpack in query', () => {
expect(getQueryHints('{job="grafana"} | unpack', [jsonAndPackSeries])).not.toEqual(
expect.arrayContaining([expect.objectContaining({ type: 'ADD_UNPACK_PARSER' })])
);
});
});
describe('when series with level-like label', () => {
const createSeriesWithLabel = (labelName?: string): DataFrame => {
const labelVariable: { [key: string]: string } = { job: 'a' };
@ -117,7 +150,6 @@ describe('getQueryHints', () => {
],
};
};
it('suggest level renaming when no level label', () => {
expect(getQueryHints('{job="grafana"', [createSeriesWithLabel('lvl')])).toEqual(
expect.arrayContaining([expect.objectContaining({ type: 'ADD_LEVEL_LABEL_FORMAT' })])

View File

@ -23,20 +23,35 @@ export function getQueryHints(query: string, series: DataFrame[]): QueryHint[] {
const { queryWithParser, parserCount } = isQueryWithParser(query);
if (!queryWithParser) {
const { hasLogfmt, hasJSON } = extractLogParserFromDataFrame(series[0]);
const { hasLogfmt, hasJSON, hasPack } = extractLogParserFromDataFrame(series[0]);
if (hasJSON) {
hints.push({
type: 'ADD_JSON_PARSER',
label: 'Selected log stream selector has JSON formatted logs.',
fix: {
title: 'add json parser',
label: 'Consider using JSON parser.',
action: {
type: 'ADD_JSON_PARSER',
query,
if (hasPack) {
hints.push({
type: 'ADD_UNPACK_PARSER',
label: 'Selected log stream selector has packed logs.',
fix: {
title: 'add unpack parser',
label: 'Consider using unpack parser.',
action: {
type: 'ADD_UNPACK_PARSER',
query,
},
},
},
});
});
} else {
hints.push({
type: 'ADD_JSON_PARSER',
label: 'Selected log stream selector has JSON formatted logs.',
fix: {
title: 'add json parser',
label: 'Consider using JSON parser.',
action: {
type: 'ADD_JSON_PARSER',
query,
},
},
});
}
}
if (hasLogfmt) {

View File

@ -84,17 +84,17 @@ describe('extractLevelLikeLabelFromDataFrame', () => {
describe('extractLogParserFromDataFrame', () => {
it('returns false by default', () => {
const input = cloneDeep(frame);
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: false, hasLogfmt: false });
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: false, hasLogfmt: false, hasPack: false });
});
it('identifies JSON', () => {
const input = cloneDeep(frame);
input.fields[2].values = new ArrayVector(['{"a":"b"}']);
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: true, hasLogfmt: false });
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: true, hasLogfmt: false, hasPack: false });
});
it('identifies logfmt', () => {
const input = cloneDeep(frame);
input.fields[2].values = new ArrayVector(['a=b']);
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: false, hasLogfmt: true });
expect(extractLogParserFromDataFrame(input)).toEqual({ hasJSON: false, hasLogfmt: true, hasPack: false });
});
});

View File

@ -11,7 +11,7 @@ import {
} from '@grafana/data';
import { isBytesString } from './languageUtils';
import { isLogLineJSON, isLogLineLogfmt } from './lineParser';
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';
export function dataFrameHasLokiError(frame: DataFrame): boolean {
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values.toArray() ?? [];
@ -23,27 +23,34 @@ export function dataFrameHasLevelLabel(frame: DataFrame): boolean {
return labelSets.some((labels) => labels.level !== undefined);
}
export function extractLogParserFromDataFrame(frame: DataFrame): { hasLogfmt: boolean; hasJSON: boolean } {
export function extractLogParserFromDataFrame(frame: DataFrame): {
hasLogfmt: boolean;
hasJSON: boolean;
hasPack: boolean;
} {
const lineField = frame.fields.find((field) => field.type === FieldType.string);
if (lineField == null) {
return { hasJSON: false, hasLogfmt: false };
return { hasJSON: false, hasLogfmt: false, hasPack: false };
}
const logLines: string[] = lineField.values.toArray();
let hasJSON = false;
let hasLogfmt = false;
let hasPack = false;
logLines.forEach((line) => {
if (isLogLineJSON(line)) {
hasJSON = true;
hasPack = isLogLinePacked(line);
}
if (isLogLineLogfmt(line)) {
hasLogfmt = true;
}
});
return { hasLogfmt, hasJSON };
return { hasLogfmt, hasJSON, hasPack };
}
export function extractLabelKeysFromDataFrame(frame: DataFrame): string[] {