Loki Query Editor: Add support for new logfmt features (#74619)

* Loki autocomplete: add IN_LOGFMT situation for log queries

* Loki autocomplete: add IN_LOGFMT situation for metric queries

* Loki autocomplete: improve handling of trailing pipes and spaces

* Loki autocomplete: add logfmt arguments completion

* Loki autocomplete: add flags support to IN_LOGFMT

* Loki autocomplete: extend IN_LOGFMT situation with labels and flag

* Loki autocomplete: return logQuery in IN_LOGFMT situation

* Loki autocomplete: offer label completions when IN_LOGFMT

* Query utils: update parser detection method

* Validation: update test

* Loki autocomplete: improve IN_LOGFMT detection when in metric query

* Loki autocomplete: improve logfmt suggestions

* Loki autocomplete: improve logfmt suggestions in different scenarios

* Loki autocomplete situation: refactor resolvers to support multiple paths

* Situation: add test case

* Loki autocomplete: allow user to use 2 flags

* Situation: change flag to flags

* Remove console log

* Validation: import test parser

* Completions: better handling of trailing comma scenario

* Upgrade lezer-logql

* Revert temporary imports

* Loki Query Builder: Add support for new logfmt features (#74858)

* Query builder: add params to logfmt definition

* Logfmt operation: add default params

* Query builder: update deprecated JsonExpression

* Operation utils: update logfmt renderer

* Query builder: parse LogfmtParser

* Query builder: parse LogfmtExpressionParser

* Remove console log

* Remove unused variable

* Remove extra character from render

* Update unit tests

* Fix unit tests

* Operations: remove restParams from logfmt booleans

* Parsing: group cases

* Formatting

* Formatting

* Update modifyQuery

* LogContextProvider: update with parser changes

* LogContextProvider: remove unnecessary type castings

It takes more energy to write `as unknow as LokiQuery` than to write a refId.

* Formatting

* Situation: use charAt instead of substring with endsWith

* Situation: explain logfmt suggestions

* Logfmt: improve flag suggestions

* Remove console log

* Completions: update test
This commit is contained in:
Matias Chomicki
2023-09-22 11:34:17 +02:00
committed by GitHub
parent c358135a63
commit 91ed2a6afe
18 changed files with 959 additions and 162 deletions

View File

@@ -247,7 +247,7 @@
"@grafana/faro-web-sdk": "1.1.2", "@grafana/faro-web-sdk": "1.1.2",
"@grafana/flamegraph": "workspace:*", "@grafana/flamegraph": "workspace:*",
"@grafana/google-sdk": "0.1.1", "@grafana/google-sdk": "0.1.1",
"@grafana/lezer-logql": "0.1.11", "@grafana/lezer-logql": "0.2.0",
"@grafana/lezer-traceql": "0.0.6", "@grafana/lezer-traceql": "0.0.6",
"@grafana/monaco-logql": "^0.0.7", "@grafana/monaco-logql": "^0.0.7",
"@grafana/runtime": "workspace:*", "@grafana/runtime": "workspace:*",

View File

@@ -77,12 +77,13 @@ describe('LogContextProvider', () => {
}, },
{ {
expr: '{bar="baz"}', expr: '{bar="baz"}',
} as LokiQuery refId: 'A',
}
); );
expect(logContextProvider.getInitContextFilters).toBeCalled(); expect(logContextProvider.getInitContextFilters).toBeCalled();
expect(logContextProvider.getInitContextFilters).toHaveBeenCalledWith( expect(logContextProvider.getInitContextFilters).toHaveBeenCalledWith(
{ bar: 'baz', foo: 'uniqueParsedLabel', xyz: 'abc' }, { bar: 'baz', foo: 'uniqueParsedLabel', xyz: 'abc' },
{ expr: '{bar="baz"}' } { expr: '{bar="baz"}', refId: 'A' }
); );
expect(logContextProvider.appliedContextFilters).toHaveLength(1); expect(logContextProvider.appliedContextFilters).toHaveLength(1);
}); });
@@ -135,7 +136,8 @@ describe('LogContextProvider', () => {
describe('query with no parser', () => { describe('query with no parser', () => {
const query = { const query = {
expr: '{bar="baz"}', expr: '{bar="baz"}',
} as LokiQuery; refId: 'A',
};
it('returns empty expression if no appliedContextFilters', async () => { it('returns empty expression if no appliedContextFilters', async () => {
logContextProvider.appliedContextFilters = []; logContextProvider.appliedContextFilters = [];
const result = await logContextProvider.prepareLogRowContextQueryTarget( const result = await logContextProvider.prepareLogRowContextQueryTarget(
@@ -176,7 +178,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt', expr: '{bar="baz"} | logfmt',
} as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual('{bar="baz",xyz="abc"} | logfmt'); expect(contextQuery.query.expr).toEqual('{bar="baz",xyz="abc"} | logfmt');
@@ -194,7 +197,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt', expr: '{bar="baz"} | logfmt',
} as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual('{bar="baz",xyz="abc"} | logfmt | foo=`uniqueParsedLabel`'); expect(contextQuery.query.expr).toEqual('{bar="baz",xyz="abc"} | logfmt | foo=`uniqueParsedLabel`');
@@ -212,7 +216,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | json', expr: '{bar="baz"} | logfmt | json',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"}`); expect(contextQuery.query.expr).toEqual(`{bar="baz"}`);
@@ -225,8 +230,9 @@ describe('LogContextProvider', () => {
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo"', expr: '{bar="baz"} | logfmt | line_format "foo"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt`);
@@ -240,8 +246,9 @@ describe('LogContextProvider', () => {
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo"', expr: '{bar="baz"} | logfmt | line_format "foo"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt`);
@@ -255,11 +262,12 @@ describe('LogContextProvider', () => {
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo"', expr: '{bar="baz"} | logfmt | line_format "foo"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format = "foo"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo"`);
}); });
it('should not apply line filters if flag is set', async () => { it('should not apply line filters if flag is set', async () => {
@@ -270,44 +278,48 @@ describe('LogContextProvider', () => {
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo" |= "bar"', expr: '{bar="baz"} | logfmt | line_format "foo" |= "bar"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format = "foo"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo"`);
contextQuery = await logContextProvider.prepareLogRowContextQueryTarget( contextQuery = await logContextProvider.prepareLogRowContextQueryTarget(
defaultLogRow, defaultLogRow,
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo" |~ "bar"', expr: '{bar="baz"} | logfmt | line_format "foo" |~ "bar"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format = "foo"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo"`);
contextQuery = await logContextProvider.prepareLogRowContextQueryTarget( contextQuery = await logContextProvider.prepareLogRowContextQueryTarget(
defaultLogRow, defaultLogRow,
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo" !~ "bar"', expr: '{bar="baz"} | logfmt | line_format "foo" !~ "bar"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format = "foo"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo"`);
contextQuery = await logContextProvider.prepareLogRowContextQueryTarget( contextQuery = await logContextProvider.prepareLogRowContextQueryTarget(
defaultLogRow, defaultLogRow,
10, 10,
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format = "foo" != "bar"', expr: '{bar="baz"} | logfmt | line_format "foo" != "bar"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format = "foo"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo"`);
}); });
it('should not apply line filters if nested between two operations', async () => { it('should not apply line filters if nested between two operations', async () => {
@@ -319,7 +331,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format "foo" |= "bar" | label_format a="baz"', expr: '{bar="baz"} | logfmt | line_format "foo" |= "bar" | label_format a="baz"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo" | label_format a="baz"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo" | label_format a="baz"`);
@@ -334,7 +347,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format "foo" | bar > 1 | label_format a="baz"', expr: '{bar="baz"} | logfmt | line_format "foo" | bar > 1 | label_format a="baz"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo" | label_format a="baz"`); expect(contextQuery.query.expr).toEqual(`{bar="baz"} | logfmt | line_format "foo" | label_format a="baz"`);
@@ -349,7 +363,8 @@ describe('LogContextProvider', () => {
LogRowContextQueryDirection.Backward, LogRowContextQueryDirection.Backward,
{ {
expr: '{bar="baz"} | logfmt | line_format "foo" | json | label_format a="baz"', expr: '{bar="baz"} | logfmt | line_format "foo" | json | label_format a="baz"',
} as unknown as LokiQuery refId: 'A',
}
); );
expect(contextQuery.query.expr).toEqual(`{bar="baz"}`); expect(contextQuery.query.expr).toEqual(`{bar="baz"}`);
@@ -358,9 +373,10 @@ describe('LogContextProvider', () => {
describe('getInitContextFiltersFromLabels', () => { describe('getInitContextFiltersFromLabels', () => {
describe('query with no parser', () => { describe('query with no parser', () => {
const queryWithoutParser = { const queryWithoutParser: LokiQuery = {
expr: '{bar="baz"}', expr: '{bar="baz"}',
} as LokiQuery; refId: 'A',
};
it('should correctly create contextFilters', async () => { it('should correctly create contextFilters', async () => {
const filters = await logContextProvider.getInitContextFilters(defaultLogRow.labels, queryWithoutParser); const filters = await logContextProvider.getInitContextFilters(defaultLogRow.labels, queryWithoutParser);
@@ -383,9 +399,10 @@ describe('LogContextProvider', () => {
}); });
describe('query with parser', () => { describe('query with parser', () => {
const queryWithParser = { const queryWithParser: LokiQuery = {
expr: '{bar="baz"} | logfmt', expr: '{bar="baz"} | logfmt',
} as LokiQuery; refId: 'A',
};
it('should correctly create contextFilters', async () => { it('should correctly create contextFilters', async () => {
const filters = await logContextProvider.getInitContextFilters(defaultLogRow.labels, queryWithParser); const filters = await logContextProvider.getInitContextFilters(defaultLogRow.labels, queryWithParser);
@@ -408,9 +425,10 @@ describe('LogContextProvider', () => {
}); });
describe('with preserved labels', () => { describe('with preserved labels', () => {
const queryWithParser = { const queryWithParser: LokiQuery = {
expr: '{bar="baz"} | logfmt', expr: '{bar="baz"} | logfmt',
} as LokiQuery; refId: 'A',
};
it('should correctly apply preserved labels', async () => { it('should correctly apply preserved labels', async () => {
window.localStorage.setItem( window.localStorage.setItem(
@@ -465,24 +483,24 @@ describe('LogContextProvider', () => {
describe('queryContainsValidPipelineStages', () => { describe('queryContainsValidPipelineStages', () => {
it('should return true if query contains a line_format stage', () => { it('should return true if query contains a line_format stage', () => {
expect( expect(
logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | line_format "foo"' } as LokiQuery) logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | line_format "foo"', refId: 'A' })
).toBe(true); ).toBe(true);
}); });
it('should return true if query contains a label_format stage', () => { it('should return true if query contains a label_format stage', () => {
expect( expect(
logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | label_format a="foo"' } as LokiQuery) logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | label_format a="foo"', refId: 'A' })
).toBe(true); ).toBe(true);
}); });
it('should return false if query contains a parser', () => { it('should return false if query contains a parser', () => {
expect(logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | json' } as LokiQuery)).toBe( expect(logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} | json', refId: 'A' })).toBe(
false false
); );
}); });
it('should return false if query contains a line filter', () => { it('should return false if query contains a line filter', () => {
expect(logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} |= "test"' } as LokiQuery)).toBe( expect(logContextProvider.queryContainsValidPipelineStages({ expr: '{foo="bar"} |= "test"', refId: 'A' })).toBe(
false false
); );
}); });
@@ -491,7 +509,8 @@ describe('LogContextProvider', () => {
expect( expect(
logContextProvider.queryContainsValidPipelineStages({ logContextProvider.queryContainsValidPipelineStages({
expr: '{foo="bar"} |= "test" | label_format a="foo"', expr: '{foo="bar"} |= "test" | label_format a="foo"',
} as LokiQuery) refId: 'A',
})
).toBe(true); ).toBe(true);
}); });
}); });

View File

@@ -14,7 +14,7 @@ import {
LogRowContextQueryDirection, LogRowContextQueryDirection,
LogRowContextOptions, LogRowContextOptions,
} from '@grafana/data'; } from '@grafana/data';
import { LabelParser, LabelFilter, LineFilters, PipelineStage } from '@grafana/lezer-logql'; import { LabelParser, LabelFilter, LineFilters, PipelineStage, Logfmt, Json } from '@grafana/lezer-logql';
import { Labels } from '@grafana/schema'; import { Labels } from '@grafana/schema';
import { notifyApp } from 'app/core/actions'; import { notifyApp } from 'app/core/actions';
import { createSuccessNotification } from 'app/core/copy/appNotification'; import { createSuccessNotification } from 'app/core/copy/appNotification';
@@ -249,6 +249,8 @@ export class LogContextProvider {
const allNodePositions = getNodePositionsFromQuery(origExpr, [ const allNodePositions = getNodePositionsFromQuery(origExpr, [
PipelineStage, PipelineStage,
LabelParser, LabelParser,
Logfmt,
Json,
LineFilters, LineFilters,
LabelFilter, LabelFilter,
]); ]);

View File

@@ -469,7 +469,7 @@ describe('getAfterSelectorCompletions', () => {
expect(parsersInSuggestions).toStrictEqual(['unpack (detected)', 'json', 'logfmt', 'pattern', 'regexp']); expect(parsersInSuggestions).toStrictEqual(['unpack (detected)', 'json', 'logfmt', 'pattern', 'regexp']);
}); });
it('should not show detected parser if query already has parser', async () => { it('should not show the detected parser if query already has parser', async () => {
const suggestions = await getAfterSelectorCompletions( const suggestions = await getAfterSelectorCompletions(
`{job="grafana"} | logfmt | `, `{job="grafana"} | logfmt | `,
true, true,
@@ -511,3 +511,346 @@ describe('getAfterSelectorCompletions', () => {
expect(labelFiltersInSuggestions.length).toBe(0); expect(labelFiltersInSuggestions.length).toBe(0);
}); });
}); });
describe('IN_LOGFMT completions', () => {
let datasource: LokiDatasource;
let languageProvider: LokiLanguageProvider;
let completionProvider: CompletionDataProvider;
beforeEach(() => {
datasource = createLokiDatasource();
languageProvider = new LokiLanguageProvider(datasource);
completionProvider = new CompletionDataProvider(languageProvider, {
current: history,
});
jest.spyOn(completionProvider, 'getParserAndLabelKeys').mockResolvedValue({
extractedLabelKeys: ['label1', 'label2'],
unwrapLabelKeys: [],
hasJSON: true,
hasLogfmt: false,
hasPack: false,
});
});
it('autocompleting logfmt should return flags, parsers, pipe operations, and labels', async () => {
const situation: Situation = {
type: 'IN_LOGFMT',
logQuery: `{job="grafana"} | logfmt`,
flags: false,
otherLabels: [],
};
expect(await getCompletions(situation, completionProvider)).toMatchInlineSnapshot(`
[
{
"documentation": "Strict parsing. The logfmt parser stops scanning the log line and returns early with an error when it encounters any poorly formatted key/value pair.",
"insertText": "--strict",
"label": "--strict",
"type": "FUNCTION",
},
{
"documentation": "Retain standalone keys with empty value. The logfmt parser retains standalone keys (keys without a value) as labels with value set to empty string.",
"insertText": "--keep-empty",
"label": "--keep-empty",
"type": "FUNCTION",
},
{
"documentation": "Operator docs",
"insertText": "| json",
"label": "json",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| logfmt",
"label": "logfmt",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| pattern",
"label": "pattern",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| regexp",
"label": "regexp",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| unpack",
"label": "unpack",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| line_format "{{.$0}}"",
"isSnippet": true,
"label": "line_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| label_format",
"isSnippet": true,
"label": "label_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| unwrap",
"label": "unwrap",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| decolorize",
"label": "decolorize",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| drop",
"label": "drop",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| keep",
"label": "keep",
"type": "PIPE_OPERATION",
},
{
"insertText": "label1",
"label": "label1",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
{
"insertText": "label2",
"label": "label2",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
]
`);
});
it('autocompleting logfmt with flags should return parser, pipe operations, and labels', async () => {
const situation: Situation = {
type: 'IN_LOGFMT',
logQuery: `{job="grafana"} | logfmt`,
flags: true,
otherLabels: [],
};
expect(await getCompletions(situation, completionProvider)).toMatchInlineSnapshot(`
[
{
"documentation": "Operator docs",
"insertText": "| json",
"label": "json",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| logfmt",
"label": "logfmt",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| pattern",
"label": "pattern",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| regexp",
"label": "regexp",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| unpack",
"label": "unpack",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| line_format "{{.$0}}"",
"isSnippet": true,
"label": "line_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| label_format",
"isSnippet": true,
"label": "label_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| unwrap",
"label": "unwrap",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| decolorize",
"label": "decolorize",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| drop",
"label": "drop",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| keep",
"label": "keep",
"type": "PIPE_OPERATION",
},
{
"insertText": "label1",
"label": "label1",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
{
"insertText": "label2",
"label": "label2",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
]
`);
});
it('autocompleting logfmt should exclude already used labels from the suggestions', async () => {
const situation: Situation = {
type: 'IN_LOGFMT',
logQuery: `{job="grafana"} | logfmt`,
flags: true,
otherLabels: ['label1', 'label2'],
};
expect(await getCompletions(situation, completionProvider)).toMatchInlineSnapshot(`
[
{
"documentation": "Operator docs",
"insertText": "| json",
"label": "json",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| logfmt",
"label": "logfmt",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| pattern",
"label": "pattern",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| regexp",
"label": "regexp",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| unpack",
"label": "unpack",
"type": "PARSER",
},
{
"documentation": "Operator docs",
"insertText": "| line_format "{{.$0}}"",
"isSnippet": true,
"label": "line_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| label_format",
"isSnippet": true,
"label": "label_format",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| unwrap",
"label": "unwrap",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| decolorize",
"label": "decolorize",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| drop",
"label": "drop",
"type": "PIPE_OPERATION",
},
{
"documentation": "Operator docs",
"insertText": "| keep",
"label": "keep",
"type": "PIPE_OPERATION",
},
]
`);
});
it('autocompleting logfmt without flags should only offer labels when the user has a trailing comma', async () => {
const situation: Situation = {
type: 'IN_LOGFMT',
logQuery: `{job="grafana"} | logfmt --strict label3,`,
flags: false,
otherLabels: ['label1'],
};
expect(await getCompletions(situation, completionProvider)).toMatchInlineSnapshot(`
[
{
"insertText": "label2",
"label": "label2",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
]
`);
});
it('autocompleting logfmt with flags should only offer labels when the user has a trailing comma', async () => {
const situation: Situation = {
type: 'IN_LOGFMT',
logQuery: `{job="grafana"} | logfmt --strict label3,`,
flags: true,
otherLabels: ['label1'],
};
expect(await getCompletions(situation, completionProvider)).toMatchInlineSnapshot(`
[
{
"insertText": "label2",
"label": "label2",
"triggerOnInsert": false,
"type": "LABEL_NAME",
},
]
`);
});
});

View File

@@ -98,6 +98,23 @@ const UNWRAP_FUNCTION_COMPLETIONS: Completion[] = [
}, },
]; ];
const LOGFMT_ARGUMENT_COMPLETIONS: Completion[] = [
{
type: 'FUNCTION',
label: '--strict',
documentation:
'Strict parsing. The logfmt parser stops scanning the log line and returns early with an error when it encounters any poorly formatted key/value pair.',
insertText: '--strict',
},
{
type: 'FUNCTION',
label: '--keep-empty',
documentation:
'Retain standalone keys with empty value. The logfmt parser retains standalone keys (keys without a value) as labels with value set to empty string.',
insertText: '--keep-empty',
},
];
const LINE_FILTER_COMPLETIONS = [ const LINE_FILTER_COMPLETIONS = [
{ {
operator: '|=', operator: '|=',
@@ -131,6 +148,55 @@ function getLineFilterCompletions(afterPipe: boolean): Completion[] {
); );
} }
function getPipeOperationsCompletions(prefix = ''): Completion[] {
const completions: Completion[] = [];
completions.push({
type: 'PIPE_OPERATION',
label: 'line_format',
insertText: `${prefix}line_format "{{.$0}}"`,
isSnippet: true,
documentation: explainOperator(LokiOperationId.LineFormat),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'label_format',
insertText: `${prefix}label_format`,
isSnippet: true,
documentation: explainOperator(LokiOperationId.LabelFormat),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'unwrap',
insertText: `${prefix}unwrap`,
documentation: explainOperator(LokiOperationId.Unwrap),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'decolorize',
insertText: `${prefix}decolorize`,
documentation: explainOperator(LokiOperationId.Decolorize),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'drop',
insertText: `${prefix}drop`,
documentation: explainOperator(LokiOperationId.Drop),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'keep',
insertText: `${prefix}keep`,
documentation: explainOperator(LokiOperationId.Keep),
});
return completions;
}
async function getAllHistoryCompletions(dataProvider: CompletionDataProvider): Promise<Completion[]> { async function getAllHistoryCompletions(dataProvider: CompletionDataProvider): Promise<Completion[]> {
const history = await dataProvider.getHistory(); const history = await dataProvider.getHistory();
@@ -247,7 +313,8 @@ export async function getAfterSelectorCompletions(
const hasQueryParser = isQueryWithParser(query).queryWithParser; const hasQueryParser = isQueryWithParser(query).queryWithParser;
const prefix = `${hasSpace ? '' : ' '}${afterPipe ? '' : '| '}`; const prefix = `${hasSpace ? '' : ' '}${afterPipe ? '' : '| '}`;
const completions: Completion[] = await getParserCompletions(
const parserCompletions = await getParserCompletions(
prefix, prefix,
hasJSON, hasJSON,
hasLogfmt, hasLogfmt,
@@ -255,50 +322,9 @@ export async function getAfterSelectorCompletions(
extractedLabelKeys, extractedLabelKeys,
hasQueryParser hasQueryParser
); );
const pipeOperations = getPipeOperationsCompletions(prefix);
completions.push({ const completions = [...parserCompletions, ...pipeOperations];
type: 'PIPE_OPERATION',
label: 'line_format',
insertText: `${prefix}line_format "{{.$0}}"`,
isSnippet: true,
documentation: explainOperator(LokiOperationId.LineFormat),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'label_format',
insertText: `${prefix}label_format`,
isSnippet: true,
documentation: explainOperator(LokiOperationId.LabelFormat),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'unwrap',
insertText: `${prefix}unwrap`,
documentation: explainOperator(LokiOperationId.Unwrap),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'decolorize',
insertText: `${prefix}decolorize`,
documentation: explainOperator(LokiOperationId.Decolorize),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'drop',
insertText: `${prefix}drop`,
documentation: explainOperator(LokiOperationId.Drop),
});
completions.push({
type: 'PIPE_OPERATION',
label: 'keep',
insertText: `${prefix}keep`,
documentation: explainOperator(LokiOperationId.Keep),
});
// Let's show label options only if query has parser // Let's show label options only if query has parser
if (hasQueryParser) { if (hasQueryParser) {
@@ -322,6 +348,51 @@ export async function getAfterSelectorCompletions(
return [...lineFilters, ...completions]; return [...lineFilters, ...completions];
} }
export async function getLogfmtCompletions(
logQuery: string,
flags: boolean,
otherLabels: string[],
dataProvider: CompletionDataProvider
): Promise<Completion[]> {
const trailingComma = logQuery.trimEnd().endsWith(',');
if (trailingComma) {
// The user is typing a new label, so we remove the last comma
logQuery = trimEnd(logQuery, ', ');
}
const { extractedLabelKeys, hasJSON, hasLogfmt, hasPack } = await dataProvider.getParserAndLabelKeys(logQuery);
const hasQueryParser = isQueryWithParser(logQuery).queryWithParser;
let completions: Completion[] = [];
const parserCompletions = await getParserCompletions(
'| ',
hasJSON,
hasLogfmt,
hasPack,
extractedLabelKeys,
hasQueryParser
);
const pipeOperations = getPipeOperationsCompletions('| ');
if (!flags && !trailingComma) {
completions = [...completions, ...LOGFMT_ARGUMENT_COMPLETIONS, ...parserCompletions, ...pipeOperations];
} else if (!trailingComma) {
completions = [...completions, ...parserCompletions, ...pipeOperations];
}
const labelPrefix = otherLabels.length === 0 || trailingComma ? '' : ', ';
const labels = extractedLabelKeys.filter((label) => !otherLabels.includes(label));
const labelCompletions: Completion[] = labels.map((label) => ({
type: 'LABEL_NAME',
label,
insertText: labelPrefix + label,
triggerOnInsert: false,
}));
completions = [...completions, ...labelCompletions];
return completions;
}
async function getLabelValuesForMetricCompletions( async function getLabelValuesForMetricCompletions(
labelName: string, labelName: string,
betweenQuotes: boolean, betweenQuotes: boolean,
@@ -400,6 +471,8 @@ export async function getCompletions(
return [...FUNCTION_COMPLETIONS, ...AGGREGATION_COMPLETIONS]; return [...FUNCTION_COMPLETIONS, ...AGGREGATION_COMPLETIONS];
case 'AFTER_KEEP_AND_DROP': case 'AFTER_KEEP_AND_DROP':
return getAfterKeepAndDropCompletions(situation.logQuery, dataProvider); return getAfterKeepAndDropCompletions(situation.logQuery, dataProvider);
case 'IN_LOGFMT':
return getLogfmtCompletions(situation.logQuery, situation.flags, situation.otherLabels, dataProvider);
default: default:
throw new NeverCaseError(situation); throw new NeverCaseError(situation);
} }

View File

@@ -32,7 +32,7 @@ describe('situation', () => {
}); });
}); });
it('identifies EMPTY autocomplete situations', () => { it('identifies AT_ROOT autocomplete situations', () => {
assertSituation('s^', { assertSituation('s^', {
type: 'AT_ROOT', type: 'AT_ROOT',
}); });
@@ -84,13 +84,6 @@ describe('situation', () => {
logQuery: '{level="info"}', logQuery: '{level="info"}',
}); });
assertSituation('{level="info"} |= "a" | logfmt ^', {
type: 'AFTER_SELECTOR',
afterPipe: false,
hasSpace: true,
logQuery: '{level="info"} |= "a" | logfmt',
});
assertSituation('sum(count_over_time({place="luna"} | logfmt |^)) by (place)', { assertSituation('sum(count_over_time({place="luna"} | logfmt |^)) by (place)', {
type: 'AFTER_SELECTOR', type: 'AFTER_SELECTOR',
afterPipe: true, afterPipe: true,
@@ -99,6 +92,93 @@ describe('situation', () => {
}); });
}); });
it('identifies AFTER_LOGFMT autocomplete situations', () => {
assertSituation('{level="info"} | logfmt ^', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt',
});
assertSituation('{level="info"} | logfmt --strict ^', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt --strict',
});
assertSituation('{level="info"} | logfmt --strict --keep-empty^', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: true,
logQuery: '{level="info"} | logfmt --strict --keep-empty',
});
assertSituation('{level="info"} | logfmt --strict label, label1="expression"^', {
type: 'IN_LOGFMT',
otherLabels: ['label', 'label1'],
flags: false,
logQuery: '{level="info"} | logfmt --strict label, label1="expression"',
});
assertSituation('{level="info"} | logfmt --strict label, label1="expression",^', {
type: 'IN_LOGFMT',
otherLabels: ['label', 'label1'],
flags: false,
logQuery: '{level="info"} | logfmt --strict label, label1="expression",',
});
assertSituation('count_over_time({level="info"} | logfmt ^', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt',
});
assertSituation('count_over_time({level="info"} | logfmt ^)', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt',
});
assertSituation('count_over_time({level="info"} | logfmt ^ [$__auto])', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt',
});
assertSituation('count_over_time({level="info"} | logfmt --keep-empty^)', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt --keep-empty',
});
assertSituation('count_over_time({level="info"} | logfmt --keep-empty label1, label2^)', {
type: 'IN_LOGFMT',
otherLabels: ['label1', 'label2'],
flags: false,
logQuery: '{level="info"} | logfmt --keep-empty label1, label2',
});
assertSituation('sum by (test) (count_over_time({level="info"} | logfmt ^))', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt',
});
assertSituation('sum by (test) (count_over_time({level="info"} | logfmt label ^))', {
type: 'IN_LOGFMT',
otherLabels: ['label'],
flags: false,
logQuery: '{level="info"} | logfmt label',
});
assertSituation('sum by (test) (count_over_time({level="info"} | logfmt label,^))', {
type: 'IN_LOGFMT',
otherLabels: ['label'],
flags: false,
logQuery: '{level="info"} | logfmt label,',
});
assertSituation('sum by (test) (count_over_time({level="info"} | logfmt --strict ^))', {
type: 'IN_LOGFMT',
otherLabels: [],
flags: false,
logQuery: '{level="info"} | logfmt --strict',
});
});
it('identifies IN_AGGREGATION autocomplete situations', () => { it('identifies IN_AGGREGATION autocomplete situations', () => {
assertSituation('sum(^)', { assertSituation('sum(^)', {
type: 'IN_AGGREGATION', type: 'IN_AGGREGATION',

View File

@@ -14,6 +14,7 @@ import {
LogQL, LogQL,
LogRangeExpr, LogRangeExpr,
LogExpr, LogExpr,
Logfmt,
Identifier, Identifier,
Grouping, Grouping,
Expr, Expr,
@@ -24,9 +25,12 @@ import {
KeepLabelsExpr, KeepLabelsExpr,
DropLabels, DropLabels,
KeepLabels, KeepLabels,
ParserFlag,
LabelExtractionExpression,
LabelExtractionExpressionList,
} from '@grafana/lezer-logql'; } from '@grafana/lezer-logql';
import { getLogQueryFromMetricsQuery } from '../../../queryUtils'; import { getLogQueryFromMetricsQuery, getNodesFromQuery } from '../../../queryUtils';
type Direction = 'parent' | 'firstChild' | 'lastChild' | 'nextSibling'; type Direction = 'parent' | 'firstChild' | 'lastChild' | 'nextSibling';
type NodeType = number; type NodeType = number;
@@ -100,6 +104,12 @@ export type Situation =
| { | {
type: 'AT_ROOT'; type: 'AT_ROOT';
} }
| {
type: 'IN_LOGFMT';
otherLabels: string[];
flags: boolean;
logQuery: string;
}
| { | {
type: 'IN_RANGE'; type: 'IN_RANGE';
} }
@@ -136,7 +146,7 @@ export type Situation =
}; };
type Resolver = { type Resolver = {
path: NodeType[]; paths: NodeType[][];
fun: (node: SyntaxNode, text: string, pos: number) => Situation | null; fun: (node: SyntaxNode, text: string, pos: number) => Situation | null;
}; };
@@ -148,71 +158,72 @@ const ERROR_NODE_ID = 0;
const RESOLVERS: Resolver[] = [ const RESOLVERS: Resolver[] = [
{ {
path: [Selector], paths: [[Selector]],
fun: resolveSelector, fun: resolveSelector,
}, },
{ {
path: [ERROR_NODE_ID, Matchers, Selector], paths: [[ERROR_NODE_ID, Matchers, Selector]],
fun: resolveSelector, fun: resolveSelector,
}, },
{ {
path: [LogQL], paths: [
[LogQL],
[RangeAggregationExpr],
[ERROR_NODE_ID, LogRangeExpr, RangeAggregationExpr],
[ERROR_NODE_ID, LabelExtractionExpressionList],
[LogRangeExpr],
[ERROR_NODE_ID, LabelExtractionExpressionList],
[LabelExtractionExpressionList],
],
fun: resolveLogfmtParser,
},
{
paths: [[LogQL]],
fun: resolveTopLevel, fun: resolveTopLevel,
}, },
{ {
path: [String, Matcher], paths: [[String, Matcher]],
fun: resolveMatcher, fun: resolveMatcher,
}, },
{ {
path: [Grouping], paths: [[Grouping]],
fun: resolveLabelsForGrouping, fun: resolveLabelsForGrouping,
}, },
{ {
path: [LogRangeExpr], paths: [[LogRangeExpr]],
fun: resolveLogRange, fun: resolveLogRange,
}, },
{ {
path: [ERROR_NODE_ID, Matcher], paths: [[ERROR_NODE_ID, Matcher]],
fun: resolveMatcher, fun: resolveMatcher,
}, },
{ {
path: [ERROR_NODE_ID, Range], paths: [[ERROR_NODE_ID, Range]],
fun: resolveDurations, fun: resolveDurations,
}, },
{ {
path: [ERROR_NODE_ID, LogRangeExpr], paths: [[ERROR_NODE_ID, LogRangeExpr]],
fun: resolveLogRangeFromError, fun: resolveLogRangeFromError,
}, },
{ {
path: [ERROR_NODE_ID, LiteralExpr, MetricExpr, VectorAggregationExpr], paths: [[ERROR_NODE_ID, LiteralExpr, MetricExpr, VectorAggregationExpr]],
fun: () => ({ type: 'IN_AGGREGATION' }), fun: () => ({ type: 'IN_AGGREGATION' }),
}, },
{ {
path: [ERROR_NODE_ID, PipelineStage, PipelineExpr], paths: [[ERROR_NODE_ID, PipelineStage, PipelineExpr]],
fun: resolvePipeError, fun: resolvePipeError,
}, },
{ {
path: [ERROR_NODE_ID, UnwrapExpr], paths: [[ERROR_NODE_ID, UnwrapExpr], [UnwrapExpr]],
fun: resolveAfterUnwrap, fun: resolveAfterUnwrap,
}, },
{ {
path: [UnwrapExpr], paths: [
fun: resolveAfterUnwrap, [ERROR_NODE_ID, DropLabelsExpr],
}, [ERROR_NODE_ID, DropLabels],
{ [ERROR_NODE_ID, KeepLabelsExpr],
path: [ERROR_NODE_ID, DropLabelsExpr], [ERROR_NODE_ID, KeepLabels],
fun: resolveAfterKeepAndDrop, ],
},
{
path: [ERROR_NODE_ID, DropLabels],
fun: resolveAfterKeepAndDrop,
},
{
path: [ERROR_NODE_ID, KeepLabelsExpr],
fun: resolveAfterKeepAndDrop,
},
{
path: [ERROR_NODE_ID, KeepLabels],
fun: resolveAfterKeepAndDrop, fun: resolveAfterKeepAndDrop,
}, },
]; ];
@@ -413,6 +424,51 @@ function resolveMatcher(node: SyntaxNode, text: string, pos: number): Situation
}; };
} }
function resolveLogfmtParser(_: SyntaxNode, text: string, cursorPosition: number): Situation | null {
// We want to know if the cursor if after a log query with logfmt parser.
// E.g. `{x="y"} | logfmt ^`
const tree = parser.parse(text);
// Adjust the cursor position if there are spaces at the end of the text.
const trimRightTextLen = text.substring(0, cursorPosition).trimEnd().length;
const position = trimRightTextLen < cursorPosition ? trimRightTextLen : cursorPosition;
const cursor = tree.cursorAt(position);
// Check if the user cursor is in any node that requires logfmt suggestions.
const expectedNodes = [Logfmt, ParserFlag, LabelExtractionExpression, LabelExtractionExpressionList];
let inLogfmt = false;
do {
const { node } = cursor;
if (!expectedNodes.includes(node.type.id)) {
continue;
}
if (cursor.from <= position && cursor.to >= position) {
inLogfmt = true;
break;
}
} while (cursor.next());
if (!inLogfmt) {
return null;
}
const flags = getNodesFromQuery(text, [ParserFlag]).length > 1;
const labelNodes = getNodesFromQuery(text, [LabelExtractionExpression]);
const otherLabels = labelNodes
.map((label: SyntaxNode) => label.getChild(Identifier))
.filter((label: SyntaxNode | null): label is SyntaxNode => label !== null)
.map((label: SyntaxNode) => getNodeText(label, text));
return {
type: 'IN_LOGFMT',
otherLabels,
flags,
logQuery: getLogQueryFromMetricsQuery(text).trim(),
};
}
function resolveTopLevel(node: SyntaxNode, text: string, pos: number): Situation | null { function resolveTopLevel(node: SyntaxNode, text: string, pos: number): Situation | null {
// we try a couply specific paths here. // we try a couply specific paths here.
// `{x="y"}` situation, with the cursor at the end // `{x="y"}` situation, with the cursor at the end
@@ -451,7 +507,10 @@ function resolveDurations(node: SyntaxNode, text: string, pos: number): Situatio
} }
function resolveLogRange(node: SyntaxNode, text: string, pos: number): Situation | null { function resolveLogRange(node: SyntaxNode, text: string, pos: number): Situation | null {
return resolveLogOrLogRange(node, text, pos, false); const partialQuery = text.substring(0, pos).trimEnd();
const afterPipe = partialQuery.endsWith('|');
return resolveLogOrLogRange(node, text, pos, afterPipe);
} }
function resolveLogRangeFromError(node: SyntaxNode, text: string, pos: number): Situation | null { function resolveLogRangeFromError(node: SyntaxNode, text: string, pos: number): Situation | null {
@@ -460,7 +519,10 @@ function resolveLogRangeFromError(node: SyntaxNode, text: string, pos: number):
return null; return null;
} }
return resolveLogOrLogRange(parent, text, pos, false); const partialQuery = text.substring(0, pos).trimEnd();
const afterPipe = partialQuery.endsWith('|');
return resolveLogOrLogRange(parent, text, pos, afterPipe);
} }
function resolveLogOrLogRange(node: SyntaxNode, text: string, pos: number, afterPipe: boolean): Situation | null { function resolveLogOrLogRange(node: SyntaxNode, text: string, pos: number, afterPipe: boolean): Situation | null {
@@ -476,7 +538,7 @@ function resolveLogOrLogRange(node: SyntaxNode, text: string, pos: number, after
return { return {
type: 'AFTER_SELECTOR', type: 'AFTER_SELECTOR',
afterPipe, afterPipe,
hasSpace: text.endsWith(' '), hasSpace: text.charAt(pos - 1) === ' ',
logQuery: getLogQueryFromMetricsQuery(text).trim(), logQuery: getLogQueryFromMetricsQuery(text).trim(),
}; };
} }
@@ -594,8 +656,13 @@ export function getSituation(text: string, pos: number): Situation | null {
} }
for (let resolver of RESOLVERS) { for (let resolver of RESOLVERS) {
if (isPathMatch(resolver.path, ids)) { for (let path of resolver.paths) {
return resolver.fun(currentNode, text, pos); if (isPathMatch(path, ids)) {
const situation = resolver.fun(currentNode, text, pos);
if (situation) {
return situation;
}
}
} }
} }

View File

@@ -85,7 +85,7 @@ describe('Monaco Query Validation', () => {
{place="luna"} {place="luna"}
# this is a comment # this is a comment
| |
logfmt fail unpack fail
|= "a"`; |= "a"`;
const queryLines = query.split('\n'); const queryLines = query.split('\n');
expect(validateQuery(query, query, queryLines)).toEqual([ expect(validateQuery(query, query, queryLines)).toEqual([

View File

@@ -3,7 +3,6 @@ import { sortBy } from 'lodash';
import { import {
Identifier, Identifier,
JsonExpressionParser,
LabelFilter, LabelFilter,
LabelParser, LabelParser,
LineComment, LineComment,
@@ -17,6 +16,9 @@ import {
UnwrapExpr, UnwrapExpr,
String, String,
PipelineStage, PipelineStage,
LogfmtParser,
JsonExpressionParser,
LogfmtExpressionParser,
Expr, Expr,
} from '@grafana/lezer-logql'; } from '@grafana/lezer-logql';
@@ -315,9 +317,10 @@ function getMatcherInStreamPositions(query: string): NodePosition[] {
export function getParserPositions(query: string): NodePosition[] { export function getParserPositions(query: string): NodePosition[] {
const tree = parser.parse(query); const tree = parser.parse(query);
const positions: NodePosition[] = []; const positions: NodePosition[] = [];
const parserNodeTypes = [LabelParser, JsonExpressionParser, LogfmtParser, LogfmtExpressionParser];
tree.iterate({ tree.iterate({
enter: ({ type, node }): false | void => { enter: ({ type, node }): false | void => {
if (type.id === LabelParser || type.id === JsonExpressionParser) { if (parserNodeTypes.includes(type.id)) {
positions.push(NodePosition.fromNode(node)); positions.push(NodePosition.fromNode(node));
return false; return false;
} }

View File

@@ -342,6 +342,26 @@ describe('getParserFromQuery', () => {
parser parser
); );
}); });
it('supports json parser with arguments', () => {
// Redundant, but gives us a baseline
expect(getParserFromQuery('{job="grafana"} | json')).toBe('json');
expect(getParserFromQuery('{job="grafana"} | json field="otherField"')).toBe('json');
expect(getParserFromQuery('{job="grafana"} | json field="otherField", label="field2"')).toBe('json');
});
it('supports logfmt parser with arguments and flags', () => {
// Redundant, but gives us a baseline
expect(getParserFromQuery('{job="grafana"} | logfmt')).toBe('logfmt');
expect(getParserFromQuery('{job="grafana"} | logfmt --strict')).toBe('logfmt');
expect(getParserFromQuery('{job="grafana"} | logfmt --strict --keep-empty')).toBe('logfmt');
expect(getParserFromQuery('{job="grafana"} | logfmt field="otherField"')).toBe('logfmt');
expect(getParserFromQuery('{job="grafana"} | logfmt field="otherField", label')).toBe('logfmt');
expect(getParserFromQuery('{job="grafana"} | logfmt --strict field="otherField"')).toBe('logfmt');
expect(
getParserFromQuery('{job="grafana"} | logfmt --strict --keep-empty field="otherField", label="field2"')
).toBe('logfmt');
});
}); });
describe('requestSupportsSplitting', () => { describe('requestSupportsSplitting', () => {

View File

@@ -19,6 +19,8 @@ import {
Identifier, Identifier,
Range, Range,
formatLokiQuery, formatLokiQuery,
Logfmt,
Json,
} from '@grafana/lezer-logql'; } from '@grafana/lezer-logql';
import { reportInteraction } from '@grafana/runtime'; import { reportInteraction } from '@grafana/runtime';
import { DataQuery } from '@grafana/schema'; import { DataQuery } from '@grafana/schema';
@@ -193,13 +195,13 @@ export function isLogsQuery(query: string): boolean {
} }
export function isQueryWithParser(query: string): { queryWithParser: boolean; parserCount: number } { export function isQueryWithParser(query: string): { queryWithParser: boolean; parserCount: number } {
const nodes = getNodesFromQuery(query, [LabelParser, JsonExpressionParser]); const nodes = getNodesFromQuery(query, [LabelParser, JsonExpressionParser, Logfmt]);
const parserCount = nodes.length; const parserCount = nodes.length;
return { queryWithParser: parserCount > 0, parserCount }; return { queryWithParser: parserCount > 0, parserCount };
} }
export function getParserFromQuery(query: string): string | undefined { export function getParserFromQuery(query: string): string | undefined {
const parsers = getNodesFromQuery(query, [LabelParser, JsonExpressionParser]); const parsers = getNodesFromQuery(query, [LabelParser, Json, Logfmt]);
return parsers.length > 0 ? query.substring(parsers[0].from, parsers[0].to).trim() : undefined; return parsers.length > 0 ? query.substring(parsers[0].from, parsers[0].to).trim() : undefined;
} }

View File

@@ -49,6 +49,51 @@ describe('LokiQueryModeller', () => {
).toBe('{app="grafana"} | logfmt'); ).toBe('{app="grafana"} | logfmt');
}); });
it('Models a logfmt query with strict flag', () => {
expect(
modeller.renderQuery({
labels: [{ label: 'app', op: '=', value: 'grafana' }],
operations: [{ id: LokiOperationId.Logfmt, params: [true] }],
})
).toBe('{app="grafana"} | logfmt --strict');
});
it('Models a logfmt query with keep empty flag', () => {
expect(
modeller.renderQuery({
labels: [{ label: 'app', op: '=', value: 'grafana' }],
operations: [{ id: LokiOperationId.Logfmt, params: [false, true] }],
})
).toBe('{app="grafana"} | logfmt --keep-empty');
});
it('Models a logfmt query with multiple flags', () => {
expect(
modeller.renderQuery({
labels: [{ label: 'app', op: '=', value: 'grafana' }],
operations: [{ id: LokiOperationId.Logfmt, params: [true, true] }],
})
).toBe('{app="grafana"} | logfmt --strict --keep-empty');
});
it('Models a logfmt query with multiple flags and labels', () => {
expect(
modeller.renderQuery({
labels: [{ label: 'app', op: '=', value: 'grafana' }],
operations: [{ id: LokiOperationId.Logfmt, params: [true, true, 'label', 'label2="label3'] }],
})
).toBe('{app="grafana"} | logfmt --strict --keep-empty label, label2="label3');
});
it('Models a logfmt query with labels', () => {
expect(
modeller.renderQuery({
labels: [{ label: 'app', op: '=', value: 'grafana' }],
operations: [{ id: LokiOperationId.Logfmt, params: [false, false, 'label', 'label2="label3'] }],
})
).toBe('{app="grafana"} | logfmt label, label2="label3');
});
it('Can query with pipeline operation regexp', () => { it('Can query with pipeline operation regexp', () => {
expect( expect(
modeller.renderQuery({ modeller.renderQuery({

View File

@@ -1,4 +1,4 @@
import { QueryBuilderOperationDef } from '../../prometheus/querybuilder/shared/types'; import { QueryBuilderOperation, QueryBuilderOperationDef } from '../../prometheus/querybuilder/shared/types';
import { import {
createRangeOperation, createRangeOperation,
@@ -6,8 +6,10 @@ import {
getLineFilterRenderer, getLineFilterRenderer,
isConflictingFilter, isConflictingFilter,
labelFilterRenderer, labelFilterRenderer,
pipelineRenderer,
} from './operationUtils'; } from './operationUtils';
import { LokiVisualQueryOperationCategory } from './types'; import { getOperationDefinitions } from './operations';
import { LokiOperationId, LokiVisualQueryOperationCategory } from './types';
describe('createRangeOperation', () => { describe('createRangeOperation', () => {
it('should create basic range operation without possible grouping', () => { it('should create basic range operation without possible grouping', () => {
@@ -205,3 +207,19 @@ describe('isConflictingFilter', () => {
expect(isConflictingFilter(operation, queryOperations)).toBe(false); expect(isConflictingFilter(operation, queryOperations)).toBe(false);
}); });
}); });
describe('pipelineRenderer', () => {
let definitions: QueryBuilderOperationDef[];
beforeEach(() => {
definitions = getOperationDefinitions();
});
it('Correctly renders unpack expressions', () => {
const model: QueryBuilderOperation = {
id: LokiOperationId.Unpack,
params: [],
};
const definition = definitions.find((def) => def.id === LokiOperationId.Unpack);
expect(pipelineRenderer(model, definition!, '{}')).toBe('{} | unpack');
});
});

View File

@@ -184,7 +184,15 @@ export function isConflictingFilter(
} }
export function pipelineRenderer(model: QueryBuilderOperation, def: QueryBuilderOperationDef, innerExpr: string) { export function pipelineRenderer(model: QueryBuilderOperation, def: QueryBuilderOperationDef, innerExpr: string) {
return `${innerExpr} | ${model.id}`; switch (model.id) {
case LokiOperationId.Logfmt:
const [strict = false, keepEmpty = false, ...labels] = model.params;
return `${innerExpr} | logfmt${strict ? ' --strict' : ''}${keepEmpty ? ' --keep-empty' : ''} ${labels.join(
', '
)}`.trim();
default:
return `${innerExpr} | ${model.id}`;
}
} }
function isRangeVectorFunction(def: QueryBuilderOperationDef) { function isRangeVectorFunction(def: QueryBuilderOperationDef) {

View File

@@ -100,8 +100,33 @@ export function getOperationDefinitions(): QueryBuilderOperationDef[] {
{ {
id: LokiOperationId.Logfmt, id: LokiOperationId.Logfmt,
name: 'Logfmt', name: 'Logfmt',
params: [], params: [
defaultParams: [], {
name: 'Strict',
type: 'boolean',
optional: true,
description:
'With strict parsing enabled, the logfmt parser immediately stops scanning the log line and returns early with an error when it encounters any poorly formatted key/value pair.',
},
{
name: 'Keep empty',
type: 'boolean',
optional: true,
description:
'The logfmt parser retains standalone keys (keys without a value) as labels with its value set to empty string. ',
},
{
name: 'Expression',
type: 'string',
optional: true,
restParam: true,
minWidth: 18,
placeholder: 'field_name',
description:
'Using expressions with your logfmt parser will extract and rename (if provided) only the specified fields to labels. You can specify one or more expressions in this way.',
},
],
defaultParams: [false, false],
alternativesKey: 'format', alternativesKey: 'format',
category: LokiVisualQueryOperationCategory.Formats, category: LokiVisualQueryOperationCategory.Formats,
orderRank: LokiOperationOrder.Parsers, orderRank: LokiOperationOrder.Parsers,

View File

@@ -119,7 +119,7 @@ describe('buildVisualQueryFromString', () => {
], ],
operations: [ operations: [
{ id: LokiOperationId.LineFilterIpMatches, params: ['|=', '192.168.4.5/16'] }, { id: LokiOperationId.LineFilterIpMatches, params: ['|=', '192.168.4.5/16'] },
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
], ],
}) })
); );
@@ -209,7 +209,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.LabelFilterIpMatches, params: ['address', '=', '192.168.4.5/16'] }, { id: LokiOperationId.LabelFilterIpMatches, params: ['address', '=', '192.168.4.5/16'] },
], ],
}) })
@@ -260,7 +260,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.Unwrap, params: ['bytes_processed', ''] }, { id: LokiOperationId.Unwrap, params: ['bytes_processed', ''] },
{ id: LokiOperationId.SumOverTime, params: ['1m'] }, { id: LokiOperationId.SumOverTime, params: ['1m'] },
], ],
@@ -281,7 +281,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.Unwrap, params: ['duration', ''] }, { id: LokiOperationId.Unwrap, params: ['duration', ''] },
{ id: LokiOperationId.LabelFilterNoErrors, params: [] }, { id: LokiOperationId.LabelFilterNoErrors, params: [] },
{ id: LokiOperationId.SumOverTime, params: ['1m'] }, { id: LokiOperationId.SumOverTime, params: ['1m'] },
@@ -303,7 +303,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.Unwrap, params: ['duration', ''] }, { id: LokiOperationId.Unwrap, params: ['duration', ''] },
{ id: LokiOperationId.LabelFilter, params: ['label', '=', 'value'] }, { id: LokiOperationId.LabelFilter, params: ['label', '=', 'value'] },
{ id: LokiOperationId.SumOverTime, params: ['1m'] }, { id: LokiOperationId.SumOverTime, params: ['1m'] },
@@ -326,7 +326,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.Unwrap, params: ['label', 'duration'] }, { id: LokiOperationId.Unwrap, params: ['label', 'duration'] },
{ id: LokiOperationId.SumOverTime, params: ['5m'] }, { id: LokiOperationId.SumOverTime, params: ['5m'] },
], ],
@@ -360,7 +360,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.Decolorize, params: [] }, { id: LokiOperationId.Decolorize, params: [] },
{ id: LokiOperationId.LabelFilterNoErrors, params: [] }, { id: LokiOperationId.LabelFilterNoErrors, params: [] },
], ],
@@ -477,7 +477,7 @@ describe('buildVisualQueryFromString', () => {
}, },
], ],
operations: [ operations: [
{ id: LokiOperationId.Logfmt, params: [] }, { id: LokiOperationId.Logfmt, params: [false, false] },
{ id: LokiOperationId.LabelFilterNoErrors, params: [] }, { id: LokiOperationId.LabelFilterNoErrors, params: [] },
{ id: LokiOperationId.CountOverTime, params: ['5m'] }, { id: LokiOperationId.CountOverTime, params: ['5m'] },
{ id: LokiOperationId.Sum, params: [] }, { id: LokiOperationId.Sum, params: [] },
@@ -836,6 +836,53 @@ describe('buildVisualQueryFromString', () => {
}) })
); );
}); });
it('parses query with logfmt parser', () => {
expect(buildVisualQueryFromString('{label="value"} | logfmt')).toEqual(
noErrors({
labels: [
{
op: '=',
value: 'value',
label: 'label',
},
],
operations: [{ id: LokiOperationId.Logfmt, params: [false, false] }],
})
);
});
it('parses query with logfmt parser and flags', () => {
expect(buildVisualQueryFromString('{label="value"} | logfmt --keep-empty --strict')).toEqual(
noErrors({
labels: [
{
op: '=',
value: 'value',
label: 'label',
},
],
operations: [{ id: LokiOperationId.Logfmt, params: [true, true] }],
})
);
});
it('parses query with logfmt parser, flags, and labels', () => {
expect(
buildVisualQueryFromString('{label="value"} | logfmt --keep-empty --strict label1, label2, label3="label4"')
).toEqual(
noErrors({
labels: [
{
op: '=',
value: 'value',
label: 'label',
},
],
operations: [{ id: LokiOperationId.Logfmt, params: [true, true, 'label1', 'label2', 'label3="label4"'] }],
})
);
});
}); });
function noErrors(query: LokiVisualQuery) { function noErrors(query: LokiVisualQuery) {

View File

@@ -20,16 +20,18 @@ import {
Ip, Ip,
IpLabelFilter, IpLabelFilter,
Json, Json,
JsonExpression,
JsonExpressionParser, JsonExpressionParser,
KeepLabel, KeepLabel,
KeepLabels, KeepLabels,
KeepLabelsExpr, KeepLabelsExpr,
LabelExtractionExpression,
LabelFilter, LabelFilter,
LabelFormatMatcher, LabelFormatMatcher,
LabelParser, LabelParser,
LineFilter, LineFilter,
LineFormatExpr, LineFormatExpr,
LogfmtExpressionParser,
LogfmtParser,
LogRangeExpr, LogRangeExpr,
Matcher, Matcher,
MetricExpr, MetricExpr,
@@ -37,6 +39,7 @@ import {
On, On,
Or, Or,
parser, parser,
ParserFlag,
Range, Range,
RangeAggregationExpr, RangeAggregationExpr,
RangeOp, RangeOp,
@@ -80,6 +83,11 @@ interface ParsingError {
parentType?: string; parentType?: string;
} }
interface GetOperationResult {
operation?: QueryBuilderOperation;
error?: string;
}
export function buildVisualQueryFromString(expr: string): Context { export function buildVisualQueryFromString(expr: string): Context {
const replacedExpr = replaceVariables(expr); const replacedExpr = replaceVariables(expr);
const tree = parser.parse(replacedExpr); const tree = parser.parse(replacedExpr);
@@ -160,6 +168,18 @@ export function handleExpression(expr: string, node: SyntaxNode, context: Contex
break; break;
} }
case LogfmtParser:
case LogfmtExpressionParser: {
const { operation, error } = getLogfmtParser(expr, node);
if (operation) {
visQuery.operations.push(operation);
}
if (error) {
context.errors.push(createNotSupportedError(expr, node, error));
}
break;
}
case LineFormatExpr: { case LineFormatExpr: {
visQuery.operations.push(getLineFormat(expr, node)); visQuery.operations.push(getLineFormat(expr, node));
break; break;
@@ -250,7 +270,7 @@ function getLabel(expr: string, node: SyntaxNode): QueryBuilderLabelFilter {
}; };
} }
function getLineFilter(expr: string, node: SyntaxNode): { operation?: QueryBuilderOperation; error?: string } { function getLineFilter(expr: string, node: SyntaxNode): GetOperationResult {
const filter = getString(expr, node.getChild(Filter)); const filter = getString(expr, node.getChild(Filter));
const filterExpr = handleQuotes(getString(expr, node.getChild(String))); const filterExpr = handleQuotes(getString(expr, node.getChild(String)));
const ipLineFilter = node.getChild(FilterOp)?.getChild(Ip); const ipLineFilter = node.getChild(FilterOp)?.getChild(Ip);
@@ -299,14 +319,43 @@ function getJsonExpressionParser(expr: string, node: SyntaxNode): QueryBuilderOp
const parserNode = node.getChild(Json); const parserNode = node.getChild(Json);
const parser = getString(expr, parserNode); const parser = getString(expr, parserNode);
const params = [...getAllByType(expr, node, JsonExpression)]; const params = [...getAllByType(expr, node, LabelExtractionExpression)];
return { return {
id: parser, id: parser,
params, params,
}; };
} }
function getLabelFilter(expr: string, node: SyntaxNode): { operation?: QueryBuilderOperation; error?: string } { function getLogfmtParser(expr: string, node: SyntaxNode): GetOperationResult {
const flags: string[] = [];
const labels: string[] = [];
let error: string | undefined = undefined;
const offset = node.from;
node.toTree().iterate({
enter: (subNode) => {
if (subNode.type.id === ParserFlag) {
flags.push(expr.substring(subNode.from + offset, subNode.to + offset));
} else if (subNode.type.id === LabelExtractionExpression) {
labels.push(expr.substring(subNode.from + offset, subNode.to + offset));
} else if (subNode.type.id === ErrorId) {
error = `Unexpected string "${expr.substring(subNode.from + offset, subNode.to + offset)}"`;
}
},
});
const operation = {
id: LokiOperationId.Logfmt,
params: [flags.includes('--strict'), flags.includes('--keep-empty'), ...labels],
};
return {
operation,
error,
};
}
function getLabelFilter(expr: string, node: SyntaxNode): GetOperationResult {
// Check for nodes not supported in visual builder and return error // Check for nodes not supported in visual builder and return error
if (node.getChild(Or) || node.getChild(And) || node.getChild('Comma')) { if (node.getChild(Or) || node.getChild(And) || node.getChild('Comma')) {
return { return {
@@ -399,11 +448,7 @@ function getDecolorize(): QueryBuilderOperation {
}; };
} }
function handleUnwrapExpr( function handleUnwrapExpr(expr: string, node: SyntaxNode, context: Context): GetOperationResult {
expr: string,
node: SyntaxNode,
context: Context
): { operation?: QueryBuilderOperation; error?: string } {
const unwrapExprChild = node.getChild(UnwrapExpr); const unwrapExprChild = node.getChild(UnwrapExpr);
const labelFilterChild = node.getChild(LabelFilter); const labelFilterChild = node.getChild(LabelFilter);
const unwrapChild = node.getChild(Unwrap); const unwrapChild = node.getChild(Unwrap);

View File

@@ -3939,12 +3939,12 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@grafana/lezer-logql@npm:0.1.11": "@grafana/lezer-logql@npm:0.2.0":
version: 0.1.11 version: 0.2.0
resolution: "@grafana/lezer-logql@npm:0.1.11" resolution: "@grafana/lezer-logql@npm:0.2.0"
peerDependencies: peerDependencies:
"@lezer/lr": ^1.0.0 "@lezer/lr": ^1.0.0
checksum: 6a624b9a8d31ff854fcf9708c35e6a7498e78c4bda884639681d0b6d0fffe5527fbaeab1198e5a7694f913181657334345f31156a4a15ff64e3019b30ba6ca2a checksum: 7f4382291f9f745b39fcd64aea146140723c5c30d1b86ba5418db2c3a5121bc12742c71129462bc0b78620f6a02598e1dafe555b437e4b4cacef7e2268a15b65
languageName: node languageName: node
linkType: hard linkType: hard
@@ -19697,7 +19697,7 @@ __metadata:
"@grafana/faro-web-sdk": 1.1.2 "@grafana/faro-web-sdk": 1.1.2
"@grafana/flamegraph": "workspace:*" "@grafana/flamegraph": "workspace:*"
"@grafana/google-sdk": 0.1.1 "@grafana/google-sdk": 0.1.1
"@grafana/lezer-logql": 0.1.11 "@grafana/lezer-logql": 0.2.0
"@grafana/lezer-traceql": 0.0.6 "@grafana/lezer-traceql": 0.0.6
"@grafana/monaco-logql": ^0.0.7 "@grafana/monaco-logql": ^0.0.7
"@grafana/runtime": "workspace:*" "@grafana/runtime": "workspace:*"