Tempo: Remove Loki tab (#84346)

* Update docs

* Remove loki tab from config settings

* Remove loki query field

* Remove loki search from ds, resultTransformer, tracking and tests

* Cleanup removal of loki search

* Remove loki section from query editor docs

* Remove search type
This commit is contained in:
Joey 2024-03-13 14:22:20 +00:00 committed by GitHub
parent 06b7f6befa
commit 1f2e9a544d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 16 additions and 2658 deletions

View File

@ -5558,38 +5558,12 @@ exports[`better eslint`] = {
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
],
"public/app/plugins/datasource/tempo/LokiSearch.tsx:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"],
[0, 0, 0, "Do not use any type assertions.", "1"]
],
"public/app/plugins/datasource/tempo/ServiceGraphSection.tsx:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/components/AdHocFilter/AdHocFilterRenderer.tsx:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/LanguageProvider.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/monaco-query-field/MonacoQueryField.tsx:5381": [
[0, 0, 0, "Styles should be written using objects.", "0"],
[0, 0, 0, "Styles should be written using objects.", "1"],
[0, 0, 0, "Use data-testid for E2E selectors instead of aria-label", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/monaco-query-field/monaco-completion-provider/CompletionDataProvider.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/monaco-query-field/monaco-completion-provider/index.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/types.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/prometheus/language_utils.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"]
],
@ -5606,12 +5580,11 @@ exports[`better eslint`] = {
[0, 0, 0, "Do not use any type assertions.", "0"]
],
"public/app/plugins/datasource/tempo/datasource.ts:5381": [
[0, 0, 0, "Do not use any type assertions.", "0"],
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
[0, 0, 0, "Do not use any type assertions.", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
[0, 0, 0, "Do not use any type assertions.", "1"],
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
[0, 0, 0, "Unexpected any. Specify a different type.", "5"]
[0, 0, 0, "Unexpected any. Specify a different type.", "4"]
],
"public/app/plugins/datasource/tempo/language_provider.ts:5381": [
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
@ -6976,11 +6949,6 @@ exports[`no gf-form usage`] = {
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
],
"public/app/plugins/datasource/tempo/_importedDependencies/datasources/loki/LokiQueryField.tsx:5381": [
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]
],
"public/app/plugins/datasource/zipkin/QueryField.tsx:5381": [
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"],
[0, 0, 0, "gf-form usage has been deprecated. Use a component from @grafana/ui or custom CSS instead.", "5381"]

View File

@ -186,13 +186,6 @@ The **Search** setting configures [Tempo search](/docs/tempo/latest/configuratio
You can configure the **Hide search** setting to hide the search query option in **Explore** if search is not configured in the Tempo instance.
## Loki search
The **Loki search** setting configures the Loki search query type.
Configure the **Data source** setting to define which Loki instance you want to use to search traces.
You must configure [derived fields]({{< relref "../loki#configure-derived-fields" >}}) in the Loki instance.
## TraceID query
The **TraceID query** setting modifies how TraceID queries are run. The time range can be used when there are performance issues or timeouts since it will narrow down the search to the defined range. This setting is disabled by default.
@ -266,8 +259,6 @@ datasources:
enabled: true
search:
hide: false
lokiSearch:
datasourceUid: 'loki'
traceQuery:
timeShiftEnabled: true
spanStartTimeShift: '1h'

View File

@ -99,13 +99,6 @@ To query a particular trace:
{{< figure src="/static/img/docs/tempo/query-editor-traceid.png" class="docs-image--no-shadow" max-width="750px" caption="Screenshot of the Tempo TraceID query type" >}}
## Query Loki for traces
To find traces to visualize, you can use the [Loki query editor]({{< relref "../../loki#loki-query-editor" >}}).
For results, you must configure [derived fields]({{< relref "../../loki#configure-derived-fields" >}}) in the Loki data source that point to this data source.
{{< figure src="/static/img/docs/tempo/query-editor-search.png" class="docs-image--no-shadow" max-width="750px" caption="Screenshot of the Tempo query editor showing the Loki Search tab" >}}
{{% docs/reference %}}
[explore]: "/docs/grafana/ -> /docs/grafana/<GRAFANA VERSION>/explore"
[explore]: "/docs/grafana-cloud/ -> /docs/grafana/<GRAFANA VERSION>/explore"

View File

@ -70,9 +70,9 @@ export const defaultTempoQuery: Partial<TempoQuery> = {
};
/**
* search = Loki search, nativeSearch = Tempo search for backwards compatibility
* nativeSearch = Tempo search for backwards compatibility
*/
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
/**
* The state of the TraceQL streaming search query

View File

@ -27,7 +27,6 @@ const (
const (
TempoQueryTypeClear TempoQueryType = "clear"
TempoQueryTypeNativeSearch TempoQueryType = "nativeSearch"
TempoQueryTypeSearch TempoQueryType = "search"
TempoQueryTypeServiceMap TempoQueryType = "serviceMap"
TempoQueryTypeTraceId TempoQueryType = "traceId"
TempoQueryTypeTraceql TempoQueryType = "traceql"
@ -142,7 +141,7 @@ type TempoQuery struct {
TableType *SearchTableType `json:"tableType,omitempty"`
}
// TempoQueryType search = Loki search, nativeSearch = Tempo search for backwards compatibility
// TempoQueryType nativeSearch = Tempo search for backwards compatibility
type TempoQueryType string
// TraceqlFilter defines model for TraceqlFilter.

View File

@ -1,55 +0,0 @@
import React from 'react';
import useAsync from 'react-use/lib/useAsync';
import { InlineLabel } from '@grafana/ui';
import { LokiQueryField } from './_importedDependencies/datasources/loki/LokiQueryField';
import { LokiDatasource, LokiQuery } from './_importedDependencies/datasources/loki/types';
import { TempoQuery } from './types';
import { getDS } from './utils';
interface LokiSearchProps {
logsDatasourceUid?: string;
onChange: (value: LokiQuery) => void;
onRunQuery: () => void;
query: TempoQuery;
}
export function LokiSearch({ logsDatasourceUid, onChange, onRunQuery, query }: LokiSearchProps) {
const dsState = useAsync(() => getDS(logsDatasourceUid), [logsDatasourceUid]);
if (dsState.loading) {
return null;
}
const ds = dsState.value as LokiDatasource;
if (ds) {
return (
<>
<InlineLabel>Tempo uses {ds.name} to find traces.</InlineLabel>
<LokiQueryField
datasource={ds}
onChange={onChange}
onRunQuery={onRunQuery}
query={query.linkedQuery ?? ({ refId: 'linked' } as LokiQuery)}
history={[]}
/>
</>
);
}
if (!logsDatasourceUid) {
return <div className="text-warning">Please set up a Loki search datasource in the datasource settings.</div>;
}
if (logsDatasourceUid && !ds) {
return (
<div className="text-warning">
Loki search datasource is configured but the data source no longer exists. Please configure existing data source
to use the search.
</div>
);
}
return null;
}

View File

@ -15,11 +15,9 @@ import {
withTheme2,
} from '@grafana/ui';
import { LokiSearch } from './LokiSearch';
import NativeSearch from './NativeSearch/NativeSearch';
import TraceQLSearch from './SearchTraceQLEditor/TraceQLSearch';
import { ServiceGraphSection } from './ServiceGraphSection';
import { LokiQuery } from './_importedDependencies/datasources/loki/types';
import { TempoQueryType } from './dataquery.gen';
import { TempoDatasource } from './datasource';
import { QueryEditor } from './traceql/QueryEditor';
@ -55,18 +53,6 @@ class TempoQueryFieldComponent extends React.PureComponent<Props, State> {
}
}
onChangeLinkedQuery = (value: LokiQuery) => {
const { query, onChange } = this.props;
onChange({
...query,
linkedQuery: { ...value, refId: 'linked' },
});
};
onRunLinkedQuery = () => {
this.props.onRunQuery();
};
onClearResults = () => {
// Run clear query to clear results
const { onChange, query, onRunQuery } = this.props;
@ -80,8 +66,6 @@ class TempoQueryFieldComponent extends React.PureComponent<Props, State> {
render() {
const { query, onChange, datasource, app } = this.props;
const logsDatasourceUid = datasource.getLokiSearchDS();
const graphDatasourceUid = datasource.serviceMap?.datasourceUid;
let queryTypeOptions: Array<SelectableValue<TempoQueryType>> = [
@ -90,16 +74,6 @@ class TempoQueryFieldComponent extends React.PureComponent<Props, State> {
{ value: 'serviceMap', label: 'Service Graph' },
];
if (logsDatasourceUid) {
if (datasource?.search?.hide) {
// Place at beginning as Search if no native search
queryTypeOptions.unshift({ value: 'search', label: 'Search' });
} else {
// Place at end as Loki Search if native search is enabled
queryTypeOptions.push({ value: 'search', label: 'Loki Search' });
}
}
// Show the deprecated search option if any of the deprecated search fields are set
if (
query.spanName ||
@ -172,14 +146,6 @@ class TempoQueryFieldComponent extends React.PureComponent<Props, State> {
</HorizontalGroup>
</InlineField>
</InlineFieldRow>
{query.queryType === 'search' && (
<LokiSearch
logsDatasourceUid={logsDatasourceUid}
query={query}
onRunQuery={this.onRunLinkedQuery}
onChange={this.onChangeLinkedQuery}
/>
)}
{query.queryType === 'nativeSearch' && (
<NativeSearch
datasource={this.props.datasource}

View File

@ -1,265 +0,0 @@
import { LRUCache } from 'lru-cache';
import Prism from 'prismjs';
import { LanguageProvider, AbstractQuery, KeyValue } from '@grafana/data';
import { extractLabelMatchers, processLabels, toPromLikeExpr } from '../prometheus/language_utils';
import {
extractLabelKeysFromDataFrame,
extractLogParserFromDataFrame,
extractUnwrapLabelKeysFromDataFrame,
} from './responseUtils';
import syntax from './syntax';
import { ParserAndLabelKeysResult, LokiDatasource, LokiQuery, LokiQueryType } from './types';
const DEFAULT_MAX_LINES_SAMPLE = 10;
const NS_IN_MS = 1000000;
export default class LokiLanguageProvider extends LanguageProvider {
labelKeys: string[];
started = false;
datasource: LokiDatasource;
/**
* Cache for labels of series. This is bit simplistic in the sense that it just counts responses each as a 1 and does
* not account for different size of a response. If that is needed a `length` function can be added in the options.
* 10 as a max size is totally arbitrary right now.
*/
private seriesCache = new LRUCache<string, Record<string, string[]>>({ max: 10 });
private labelsCache = new LRUCache<string, string[]>({ max: 10 });
constructor(datasource: LokiDatasource, initialValues?: any) {
super();
this.datasource = datasource;
this.labelKeys = [];
Object.assign(this, initialValues);
}
request = async (url: string, params?: any) => {
try {
return await this.datasource.metadataRequest(url, params);
} catch (error) {
console.error(error);
}
return undefined;
};
/**
* Initialize the language provider by fetching set of labels.
*/
start = () => {
if (!this.startTask) {
this.startTask = this.fetchLabels().then(() => {
this.started = true;
return [];
});
}
return this.startTask;
};
/**
* Returns the label keys that have been fetched.
* If labels have not been fetched yet, it will return an empty array.
* For updated labels (which should not happen often), use fetchLabels.
* It is quite complicated to know when to use fetchLabels and when to use getLabelKeys.
* We should consider simplifying this and use caching in the same way as with seriesCache and labelsCache
* and just always use fetchLabels.
* Caching should be thought out properly, so we are not fetching this often, as labelKeys should not be changing often.
*
* @returns {string[]} An array of label keys or an empty array if labels have not been fetched.
*/
getLabelKeys(): string[] {
return this.labelKeys;
}
importFromAbstractQuery(labelBasedQuery: AbstractQuery): LokiQuery {
return {
refId: labelBasedQuery.refId,
expr: toPromLikeExpr(labelBasedQuery),
// queryType: LokiQueryType.Range,
queryType: LokiQueryType.Range,
};
}
exportToAbstractQuery(query: LokiQuery): AbstractQuery {
const lokiQuery = query.expr;
if (!lokiQuery || lokiQuery.length === 0) {
return { refId: query.refId, labelMatchers: [] };
}
const tokens = Prism.tokenize(lokiQuery, syntax);
return {
refId: query.refId,
labelMatchers: extractLabelMatchers(tokens),
};
}
/**
* Fetch all label keys
* This asynchronous function returns all available label keys from the data source.
* It returns a promise that resolves to an array of strings containing the label keys.
*
* @returns A promise containing an array of label keys.
* @throws An error if the fetch operation fails.
*/
async fetchLabels(): Promise<string[]> {
const url = 'labels';
const timeRange = this.datasource.getTimeRangeParams();
const res = await this.request(url, timeRange);
if (Array.isArray(res)) {
const labels = res
.slice()
.sort()
.filter((label) => label !== '__name__');
this.labelKeys = labels;
return this.labelKeys;
}
return [];
}
/**
* Fetch series labels for a selector
*
* This method fetches labels for a given stream selector, such as `{job="grafana"}`.
* It returns a promise that resolves to a record mapping label names to their corresponding values.
*
* @param streamSelector - The stream selector for which you want to retrieve labels.
* @returns A promise containing a record of label names and their values.
* @throws An error if the fetch operation fails.
*/
fetchSeriesLabels = async (streamSelector: string): Promise<Record<string, string[]>> => {
const interpolatedMatch = this.datasource.interpolateString(streamSelector);
const url = 'series';
const { start, end } = this.datasource.getTimeRangeParams();
const cacheKey = this.generateCacheKey(url, start, end, interpolatedMatch);
let value = this.seriesCache.get(cacheKey);
if (!value) {
const params = { 'match[]': interpolatedMatch, start, end };
const data = await this.request(url, params);
const { values } = processLabels(data);
value = values;
this.seriesCache.set(cacheKey, value);
}
return value;
};
/**
* Fetch series for a selector. Use this for raw results. Use fetchSeriesLabels() to get labels.
* @param match
*/
fetchSeries = async (match: string): Promise<Array<Record<string, string>>> => {
const url = 'series';
const { start, end } = this.datasource.getTimeRangeParams();
const params = { 'match[]': match, start, end };
return await this.request(url, params);
};
// Cache key is a bit different here. We round up to a minute the intervals.
// The rounding may seem strange but makes relative intervals like now-1h less prone to need separate request every
// millisecond while still actually getting all the keys for the correct interval. This still can create problems
// when user does not the newest values for a minute if already cached.
private generateCacheKey(url: string, start: number, end: number, param: string): string {
return [url, this.roundTime(start), this.roundTime(end), param].join();
}
// Round nanoseconds epoch to nearest 5 minute interval
private roundTime(nanoseconds: number): number {
return nanoseconds ? Math.floor(nanoseconds / NS_IN_MS / 1000 / 60 / 5) : 0;
}
/**
* Fetch label values
*
* This asynchronous function fetches values associated with a specified label name.
* It returns a promise that resolves to an array of strings containing the label values.
*
* @param labelName - The name of the label for which you want to retrieve values.
* @param options - (Optional) An object containing additional options - currently only stream selector.
* @param options.streamSelector - (Optional) The stream selector to filter label values. If not provided, all label values are fetched.
* @returns A promise containing an array of label values.
* @throws An error if the fetch operation fails.
*/
async fetchLabelValues(labelName: string, options?: { streamSelector?: string }): Promise<string[]> {
const label = encodeURIComponent(this.datasource.interpolateString(labelName));
const streamParam = options?.streamSelector
? encodeURIComponent(this.datasource.interpolateString(options.streamSelector))
: undefined;
const url = `label/${label}/values`;
const rangeParams = this.datasource.getTimeRangeParams();
const { start, end } = rangeParams;
const params: KeyValue<string | number> = { start, end };
let paramCacheKey = label;
if (streamParam) {
params.query = streamParam;
paramCacheKey += streamParam;
}
const cacheKey = this.generateCacheKey(url, start, end, paramCacheKey);
let labelValues = this.labelsCache.get(cacheKey);
if (!labelValues) {
// Clear value when requesting new one. Empty object being truthy also makes sure we don't request twice.
this.labelsCache.set(cacheKey, []);
const res = await this.request(url, params);
if (Array.isArray(res)) {
labelValues = res.slice().sort();
this.labelsCache.set(cacheKey, labelValues);
}
}
return labelValues ?? [];
}
/**
* Get parser and label keys for a selector
*
* This asynchronous function is used to fetch parsers and label keys for a selected log stream based on sampled lines.
* It returns a promise that resolves to an object with the following properties:
*
* - `extractedLabelKeys`: An array of available label keys associated with the log stream.
* - `hasJSON`: A boolean indicating whether JSON parsing is available for the stream.
* - `hasLogfmt`: A boolean indicating whether Logfmt parsing is available for the stream.
* - `hasPack`: A boolean indicating whether Pack parsing is available for the stream.
* - `unwrapLabelKeys`: An array of label keys that can be used for unwrapping log data.
*
* @param streamSelector - The selector for the log stream you want to analyze.
* @param {Object} [options] - Optional parameters.
* @param {number} [options.maxLines] - The number of log lines requested when determining parsers and label keys.
* Smaller maxLines is recommended for improved query performance. The default count is 10.
* @returns A promise containing an object with parser and label key information.
* @throws An error if the fetch operation fails.
*/
async getParserAndLabelKeys(
streamSelector: string,
options?: { maxLines?: number }
): Promise<ParserAndLabelKeysResult> {
const series = await this.datasource.getDataSamples({
expr: streamSelector,
refId: 'data-samples',
maxLines: options?.maxLines || DEFAULT_MAX_LINES_SAMPLE,
});
if (!series.length) {
return { extractedLabelKeys: [], unwrapLabelKeys: [], hasJSON: false, hasLogfmt: false, hasPack: false };
}
const { hasLogfmt, hasJSON, hasPack } = extractLogParserFromDataFrame(series[0]);
return {
extractedLabelKeys: extractLabelKeysFromDataFrame(series[0]),
unwrapLabelKeys: extractUnwrapLabelKeysFromDataFrame(series[0]),
hasJSON,
hasPack,
hasLogfmt,
};
}
}

View File

@ -1,90 +0,0 @@
import React, { ReactNode } from 'react';
import { QueryEditorProps } from '@grafana/data';
import { shouldRefreshLabels } from './languageUtils';
import { MonacoQueryFieldWrapper } from './monaco-query-field/MonacoQueryFieldWrapper';
import { LokiQuery, LokiOptions, LokiDatasource } from './types';
export interface LokiQueryFieldProps extends QueryEditorProps<LokiDatasource, LokiQuery, LokiOptions> {
ExtraFieldElement?: ReactNode;
placeholder?: string;
'data-testid'?: string;
}
interface LokiQueryFieldState {
labelsLoaded: boolean;
}
export class LokiQueryField extends React.PureComponent<LokiQueryFieldProps, LokiQueryFieldState> {
_isMounted = false;
constructor(props: LokiQueryFieldProps) {
super(props);
this.state = { labelsLoaded: false };
}
async componentDidMount() {
this._isMounted = true;
await this.props.datasource.languageProvider.start();
if (this._isMounted) {
this.setState({ labelsLoaded: true });
}
}
componentWillUnmount() {
this._isMounted = false;
}
componentDidUpdate(prevProps: LokiQueryFieldProps) {
const {
range,
datasource: { languageProvider },
} = this.props;
const refreshLabels = shouldRefreshLabels(range, prevProps.range);
// We want to refresh labels when range changes (we round up intervals to a minute)
if (refreshLabels) {
languageProvider.fetchLabels();
}
}
onChangeQuery = (value: string, override?: boolean) => {
// Send text change to parent
const { query, onChange, onRunQuery } = this.props;
if (onChange) {
const nextQuery = { ...query, expr: value };
onChange(nextQuery);
if (override && onRunQuery) {
onRunQuery();
}
}
};
render() {
const { ExtraFieldElement, query, datasource, history, onRunQuery } = this.props;
const placeholder = this.props.placeholder ?? 'Enter a Loki query (run with Shift+Enter)';
return (
<>
<div
className="gf-form-inline gf-form-inline--xs-view-flex-column flex-grow-1"
data-testid={this.props['data-testid']}
>
<div className="gf-form--grow flex-shrink-1 min-width-15">
<MonacoQueryFieldWrapper
datasource={datasource}
history={history ?? []}
onChange={this.onChangeQuery}
onRunQuery={onRunQuery}
initialValue={query.expr ?? ''}
placeholder={placeholder}
/>
</div>
</div>
{ExtraFieldElement}
</>
);
}
}

View File

@ -1,65 +0,0 @@
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
//
// Generated by:
// public/app/plugins/gen.go
// Using jennies:
// TSTypesJenny
// PluginTSTypesJenny
//
// Run 'make gen-cue' from repository root to regenerate.
import * as common from '@grafana/schema';
export enum QueryEditorMode {
Builder = 'builder',
Code = 'code',
}
export enum LokiQueryType {
Instant = 'instant',
Range = 'range',
Stream = 'stream',
}
export enum SupportingQueryType {
DataSample = 'dataSample',
LogsSample = 'logsSample',
LogsVolume = 'logsVolume',
}
export enum LokiQueryDirection {
Backward = 'backward',
Forward = 'forward',
}
export interface Loki extends common.DataQuery {
editorMode?: QueryEditorMode;
/**
* The LogQL query.
*/
expr: string;
/**
* @deprecated, now use queryType.
*/
instant?: boolean;
/**
* Used to override the name of the series.
*/
legendFormat?: string;
/**
* Used to limit the number of log rows returned.
*/
maxLines?: number;
/**
* @deprecated, now use queryType.
*/
range?: boolean;
/**
* @deprecated, now use step.
*/
resolution?: number;
/**
* Used to set step value for range queries.
*/
step?: string;
}

View File

@ -1,90 +0,0 @@
import { TimeRange } from '@grafana/data';
function roundMsToMin(milliseconds: number): number {
return roundSecToMin(milliseconds / 1000);
}
function roundSecToMin(seconds: number): number {
return Math.floor(seconds / 60);
}
export function shouldRefreshLabels(range?: TimeRange, prevRange?: TimeRange): boolean {
if (range && prevRange) {
const sameMinuteFrom = roundMsToMin(range.from.valueOf()) === roundMsToMin(prevRange.from.valueOf());
const sameMinuteTo = roundMsToMin(range.to.valueOf()) === roundMsToMin(prevRange.to.valueOf());
// If both are same, don't need to refresh
return !(sameMinuteFrom && sameMinuteTo);
}
return false;
}
// Loki regular-expressions use the RE2 syntax (https://github.com/google/re2/wiki/Syntax),
// so every character that matches something in that list has to be escaped.
// the list of meta characters is: *+?()|\.[]{}^$
// we make a javascript regular expression that matches those characters:
const RE2_METACHARACTERS = /[*+?()|\\.\[\]{}^$]/g;
function escapeLokiRegexp(value: string): string {
return value.replace(RE2_METACHARACTERS, '\\$&');
}
// based on the openmetrics-documentation, the 3 symbols we have to handle are:
// - \n ... the newline character
// - \ ... the backslash character
// - " ... the double-quote character
export function escapeLabelValueInExactSelector(labelValue: string): string {
return labelValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/"/g, '\\"');
}
export function unescapeLabelValue(labelValue: string): string {
return labelValue.replace(/\\n/g, '\n').replace(/\\"/g, '"').replace(/\\\\/g, '\\');
}
export function escapeLabelValueInRegexSelector(labelValue: string): string {
return escapeLabelValueInExactSelector(escapeLokiRegexp(labelValue));
}
export function escapeLabelValueInSelector(labelValue: string, selector?: string): string {
return isRegexSelector(selector)
? escapeLabelValueInRegexSelector(labelValue)
: escapeLabelValueInExactSelector(labelValue);
}
export function isRegexSelector(selector?: string) {
if (selector && (selector.includes('=~') || selector.includes('!~'))) {
return true;
}
return false;
}
export function isBytesString(string: string) {
const BYTES_KEYWORDS = [
'b',
'kib',
'Kib',
'kb',
'KB',
'mib',
'Mib',
'mb',
'MB',
'gib',
'Gib',
'gb',
'GB',
'tib',
'Tib',
'tb',
'TB',
'pib',
'Pib',
'pb',
'PB',
'eib',
'Eib',
'eb',
'EB',
];
const regex = new RegExp(`^(?:-?\\d+(?:\\.\\d+)?)(?:${BYTES_KEYWORDS.join('|')})$`);
const match = string.match(regex);
return !!match;
}

View File

@ -1,28 +0,0 @@
export function isLogLineJSON(line: string): boolean {
let parsed;
try {
parsed = JSON.parse(line);
} catch (error) {}
// The JSON parser should only be used for log lines that are valid serialized JSON objects.
return typeof parsed === 'object';
}
// This matches:
// first a label from start of the string or first white space, then any word chars until "="
// second either an empty quotes, or anything that starts with quote and ends with unescaped quote,
// or any non whitespace chars that do not start with quote
const LOGFMT_REGEXP = /(?:^|\s)([\w\(\)\[\]\{\}]+)=(""|(?:".*?[^\\]"|[^"\s]\S*))/;
export function isLogLineLogfmt(line: string): boolean {
return LOGFMT_REGEXP.test(line);
}
export function isLogLinePacked(line: string): boolean {
let parsed;
try {
parsed = JSON.parse(line);
return parsed.hasOwnProperty('_entry');
} catch (error) {
return false;
}
}

View File

@ -1,273 +0,0 @@
import { css } from '@emotion/css';
import { debounce } from 'lodash';
import React, { useRef, useEffect } from 'react';
import { useLatest } from 'react-use';
import { v4 as uuidv4 } from 'uuid';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { parser } from '@grafana/lezer-logql';
import { languageConfiguration, monarchlanguage } from '@grafana/monaco-logql';
import { useTheme2, ReactMonacoEditor, Monaco, monacoTypes, MonacoEditor } from '@grafana/ui';
import { Props } from './MonacoQueryFieldProps';
import { getOverrideServices } from './getOverrideServices';
import { getCompletionProvider, getSuggestOptions } from './monaco-completion-provider';
import { CompletionDataProvider } from './monaco-completion-provider/CompletionDataProvider';
import { placeHolderScopedVars, validateQuery } from './monaco-completion-provider/validation';
const options: monacoTypes.editor.IStandaloneEditorConstructionOptions = {
codeLens: false,
contextmenu: false,
// we need `fixedOverflowWidgets` because otherwise in grafana-dashboards
// the popup is clipped by the panel-visualizations.
fixedOverflowWidgets: true,
folding: false,
fontSize: 14,
lineDecorationsWidth: 8, // used as "padding-left"
lineNumbers: 'off',
minimap: { enabled: false },
overviewRulerBorder: false,
overviewRulerLanes: 0,
padding: {
// these numbers were picked so that visually this matches the previous version
// of the query-editor the best
top: 4,
bottom: 5,
},
renderLineHighlight: 'none',
scrollbar: {
vertical: 'hidden',
verticalScrollbarSize: 8, // used as "padding-right"
horizontal: 'hidden',
horizontalScrollbarSize: 0,
alwaysConsumeMouseWheel: false,
},
scrollBeyondLastLine: false,
suggest: getSuggestOptions(),
suggestFontSize: 12,
wordWrap: 'on',
};
// this number was chosen by testing various values. it might be necessary
// because of the width of the border, not sure.
//it needs to do 2 things:
// 1. when the editor is single-line, it should make the editor height be visually correct
// 2. when the editor is multi-line, the editor should not be "scrollable" (meaning,
// you do a scroll-movement in the editor, and it will scroll the content by a couple pixels
// up & down. this we want to avoid)
const EDITOR_HEIGHT_OFFSET = 2;
const LANG_ID = 'logql';
// we must only run the lang-setup code once
let LANGUAGE_SETUP_STARTED = false;
export const defaultWordPattern = /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g;
function ensureLogQL(monaco: Monaco) {
if (LANGUAGE_SETUP_STARTED === false) {
LANGUAGE_SETUP_STARTED = true;
monaco.languages.register({ id: LANG_ID });
monaco.languages.setMonarchTokensProvider(LANG_ID, monarchlanguage);
monaco.languages.setLanguageConfiguration(LANG_ID, {
...languageConfiguration,
wordPattern: /(-?\d*\.\d\w*)|([^`~!#%^&*()+\[{\]}\\|;:',.<>\/?\s]+)/g,
// Default: /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g
// Removed `"`, `=`, and `-`, from the exclusion list, so now the completion provider can decide to overwrite any matching words, or just insert text at the cursor
});
}
}
const getStyles = (theme: GrafanaTheme2, placeholder: string) => {
return {
container: css`
border-radius: ${theme.shape.radius.default};
border: 1px solid ${theme.components.input.borderColor};
width: 100%;
.monaco-editor .suggest-widget {
min-width: 50%;
}
`,
placeholder: css`
::after {
content: '${placeholder}';
font-family: ${theme.typography.fontFamilyMonospace};
opacity: 0.3;
}
`,
};
};
const MonacoQueryField = ({ history, onBlur, onRunQuery, initialValue, datasource, placeholder, onChange }: Props) => {
const id = uuidv4();
// we need only one instance of `overrideServices` during the lifetime of the react component
const overrideServicesRef = useRef(getOverrideServices());
const containerRef = useRef<HTMLDivElement>(null);
const langProviderRef = useLatest(datasource.languageProvider);
const historyRef = useLatest(history);
const onRunQueryRef = useLatest(onRunQuery);
const onBlurRef = useLatest(onBlur);
const autocompleteCleanupCallback = useRef<(() => void) | null>(null);
const theme = useTheme2();
const styles = getStyles(theme, placeholder);
useEffect(() => {
// when we unmount, we unregister the autocomplete-function, if it was registered
return () => {
autocompleteCleanupCallback.current?.();
};
}, []);
const setPlaceholder = (monaco: Monaco, editor: MonacoEditor) => {
const placeholderDecorators = [
{
range: new monaco.Range(1, 1, 1, 1),
options: {
className: styles.placeholder,
isWholeLine: true,
},
},
];
let decorators: string[] = [];
const checkDecorators: () => void = () => {
const model = editor.getModel();
if (!model) {
return;
}
const newDecorators = model.getValueLength() === 0 ? placeholderDecorators : [];
decorators = model.deltaDecorations(decorators, newDecorators);
};
checkDecorators();
editor.onDidChangeModelContent(checkDecorators);
};
const onTypeDebounced = debounce(async (query: string) => {
onChange(query);
}, 1000);
return (
<div
aria-label={selectors.components.QueryField.container}
className={styles.container}
// NOTE: we will be setting inline-style-width/height on this element
ref={containerRef}
>
<ReactMonacoEditor
overrideServices={overrideServicesRef.current}
options={options}
language={LANG_ID}
value={initialValue}
beforeMount={(monaco) => {
ensureLogQL(monaco);
}}
onMount={(editor, monaco) => {
// Monaco has a bug where it runs actions on all instances (https://github.com/microsoft/monaco-editor/issues/2947), so we ensure actions are executed on instance-level with this ContextKey.
const isEditorFocused = editor.createContextKey<boolean>('isEditorFocused' + id, false);
// we setup on-blur
editor.onDidBlurEditorWidget(() => {
isEditorFocused.set(false);
onBlurRef.current(editor.getValue());
});
editor.onDidChangeModelContent((e) => {
const model = editor.getModel();
if (!model) {
return;
}
const query = model.getValue();
const errors =
validateQuery(
query,
datasource.interpolateString(query, placeHolderScopedVars),
model.getLinesContent(),
parser
) || [];
const markers = errors.map(({ error, ...boundary }: any) => ({
message: `${
error ? `Error parsing "${error}"` : 'Parse error'
}. The query appears to be incorrect and could fail to be executed.`,
severity: monaco.MarkerSeverity.Error,
...boundary,
}));
onTypeDebounced(query);
monaco.editor.setModelMarkers(model, 'owner', markers);
});
const dataProvider = new CompletionDataProvider(langProviderRef.current, historyRef);
const completionProvider = getCompletionProvider(monaco, dataProvider);
// completion-providers in monaco are not registered directly to editor-instances,
// they are registered to languages. this makes it hard for us to have
// separate completion-providers for every query-field-instance
// (but we need that, because they might connect to different datasources).
// the trick we do is, we wrap the callback in a "proxy",
// and in the proxy, the first thing is, we check if we are called from
// "our editor instance", and if not, we just return nothing. if yes,
// we call the completion-provider.
const filteringCompletionProvider: monacoTypes.languages.CompletionItemProvider = {
...completionProvider,
provideCompletionItems: (model, position, context, token) => {
// if the model-id does not match, then this call is from a different editor-instance,
// not "our instance", so return nothing
if (editor.getModel()?.id !== model.id) {
return { suggestions: [] };
}
return completionProvider.provideCompletionItems(model, position, context, token);
},
};
const { dispose } = monaco.languages.registerCompletionItemProvider(LANG_ID, filteringCompletionProvider);
autocompleteCleanupCallback.current = dispose;
// this code makes the editor resize itself so that the content fits
// (it will grow taller when necessary)
// FIXME: maybe move this functionality into CodeEditor, like:
// <CodeEditor resizingMode="single-line"/>
const handleResize = () => {
const containerDiv = containerRef.current;
if (containerDiv !== null) {
const pixelHeight = editor.getContentHeight();
containerDiv.style.height = `${pixelHeight + EDITOR_HEIGHT_OFFSET}px`;
const pixelWidth = containerDiv.clientWidth;
editor.layout({ width: pixelWidth, height: pixelHeight });
}
};
editor.onDidContentSizeChange(handleResize);
handleResize();
// handle: shift + enter
// FIXME: maybe move this functionality into CodeEditor?
editor.addCommand(
monaco.KeyMod.Shift | monaco.KeyCode.Enter,
() => {
onRunQueryRef.current(editor.getValue());
},
'isEditorFocused' + id
);
editor.onDidFocusEditorText(() => {
isEditorFocused.set(true);
if (editor.getValue().trim() === '') {
editor.trigger('', 'editor.action.triggerSuggest', {});
}
});
setPlaceholder(monaco, editor);
}}
/>
</div>
);
};
// Default export for lazy load.
export default MonacoQueryField;

View File

@ -1,13 +0,0 @@
import React, { Suspense } from 'react';
import { Props } from './MonacoQueryFieldProps';
const Field = React.lazy(() => import(/* webpackChunkName: "loki-query-field" */ './MonacoQueryField'));
export const MonacoQueryFieldLazy = (props: Props) => {
return (
<Suspense fallback={null}>
<Field {...props} />
</Suspense>
);
};

View File

@ -1,17 +0,0 @@
import { HistoryItem } from '@grafana/data';
import { LokiDatasource, LokiQuery } from '../types';
// we need to store this in a separate file,
// because we have an async-wrapper around,
// the react-component, and it needs the same
// props as the sync-component.
export type Props = {
initialValue: string;
history: Array<HistoryItem<LokiQuery>>;
onRunQuery: (value: string) => void;
onBlur: (value: string) => void;
placeholder: string;
datasource: LokiDatasource;
onChange: (query: string) => void;
};

View File

@ -1,27 +0,0 @@
import React, { useRef } from 'react';
import { MonacoQueryFieldLazy } from './MonacoQueryFieldLazy';
import { Props as MonacoProps } from './MonacoQueryFieldProps';
export type Props = Omit<MonacoProps, 'onRunQuery' | 'onBlur'> & {
onChange: (query: string) => void;
onRunQuery: () => void;
onQueryType?: (query: string) => void;
};
export const MonacoQueryFieldWrapper = (props: Props) => {
const lastRunValueRef = useRef<string | null>(null);
const { onRunQuery, onChange, ...rest } = props;
const handleRunQuery = (value: string) => {
lastRunValueRef.current = value;
onChange(value);
onRunQuery();
};
const handleBlur = (value: string) => {
onChange(value);
};
return <MonacoQueryFieldLazy onRunQuery={handleRunQuery} onBlur={handleBlur} onChange={onChange} {...rest} />;
};

View File

@ -1,112 +0,0 @@
import { monacoTypes } from '@grafana/ui';
// this thing here is a workaround in a way.
// what we want to achieve, is that when the autocomplete-window
// opens, the "second, extra popup" with the extra help,
// also opens automatically.
// but there is no API to achieve it.
// the way to do it is to implement the `storageService`
// interface, and provide our custom implementation,
// which will default to `true` for the correct string-key.
// unfortunately, while the typescript-interface exists,
// it is not exported from monaco-editor,
// so we cannot rely on typescript to make sure
// we do it right. all we can do is to manually
// lookup the interface, and make sure we code our code right.
// our code is a "best effort" approach,
// i am not 100% how the `scope` and `target` things work,
// but so far it seems to work ok.
// i would use an another approach, if there was one available.
function makeStorageService() {
// we need to return an object that fulfills this interface:
// https://github.com/microsoft/vscode/blob/ff1e16eebb93af79fd6d7af1356c4003a120c563/src/vs/platform/storage/common/storage.ts#L37
// unfortunately it is not export from monaco-editor
const strings = new Map<string, string>();
// we want this to be true by default
strings.set('expandSuggestionDocs', true.toString());
return {
// we do not implement the on* handlers
onDidChangeValue: (data: unknown): void => undefined,
onDidChangeTarget: (data: unknown): void => undefined,
onWillSaveState: (data: unknown): void => undefined,
get: (key: string, scope: unknown, fallbackValue?: string): string | undefined => {
return strings.get(key) ?? fallbackValue;
},
getBoolean: (key: string, scope: unknown, fallbackValue?: boolean): boolean | undefined => {
const val = strings.get(key);
if (val !== undefined) {
// the interface docs say the value will be converted
// to a boolean but do not specify how, so we improvise
return val === 'true';
} else {
return fallbackValue;
}
},
getNumber: (key: string, scope: unknown, fallbackValue?: number): number | undefined => {
const val = strings.get(key);
if (val !== undefined) {
return parseInt(val, 10);
} else {
return fallbackValue;
}
},
store: (
key: string,
value: string | boolean | number | undefined | null,
scope: unknown,
target: unknown
): void => {
// the interface docs say if the value is nullish, it should act as delete
if (value === null || value === undefined) {
strings.delete(key);
} else {
strings.set(key, value.toString());
}
},
remove: (key: string, scope: unknown): void => {
strings.delete(key);
},
keys: (scope: unknown, target: unknown): string[] => {
return Array.from(strings.keys());
},
logStorage: (): void => {
console.log('logStorage: not implemented');
},
migrate: (): Promise<void> => {
// we do not implement this
return Promise.resolve(undefined);
},
isNew: (scope: unknown): boolean => {
// we create a new storage for every session, we do not persist it,
// so we return `true`.
return true;
},
flush: (reason?: unknown): Promise<void> => {
// we do not implement this
return Promise.resolve(undefined);
},
};
}
let overrideServices: monacoTypes.editor.IEditorOverrideServices = {
storageService: makeStorageService(),
};
export function getOverrideServices(): monacoTypes.editor.IEditorOverrideServices {
// One instance of this for every query editor
return overrideServices;
}

View File

@ -1,97 +0,0 @@
import { chain } from 'lodash';
import { HistoryItem } from '@grafana/data';
import { LokiQuery, ParserAndLabelKeysResult, LanguageProvider } from '../../types';
export function escapeLabelValueInExactSelector(labelValue: string): string {
return labelValue.replace(/\\/g, '\\\\').replace(/\n/g, '\\n').replace(/"/g, '\\"');
}
import { Label } from './situation';
interface HistoryRef {
current: Array<HistoryItem<LokiQuery>>;
}
export class CompletionDataProvider {
constructor(
private languageProvider: LanguageProvider,
private historyRef: HistoryRef = { current: [] }
) {
this.queryToLabelKeysCache = new Map();
}
private queryToLabelKeysCache: Map<string, ParserAndLabelKeysResult>;
private buildSelector(labels: Label[]): string {
const allLabelTexts = labels.map(
(label) => `${label.name}${label.op}"${escapeLabelValueInExactSelector(label.value)}"`
);
return `{${allLabelTexts.join(',')}}`;
}
getHistory() {
return chain(this.historyRef.current)
.map((history: HistoryItem<LokiQuery>) => history.query.expr)
.filter()
.uniq()
.value();
}
async getLabelNames(otherLabels: Label[] = []) {
if (otherLabels.length === 0) {
// if there is no filtering, we have to use a special endpoint
return this.languageProvider.getLabelKeys();
}
const data = await this.getSeriesLabels(otherLabels);
const possibleLabelNames = Object.keys(data); // all names from datasource
const usedLabelNames = new Set(otherLabels.map((l) => l.name)); // names used in the query
return possibleLabelNames.filter((label) => !usedLabelNames.has(label));
}
async getLabelValues(labelName: string, otherLabels: Label[]) {
if (otherLabels.length === 0) {
// if there is no filtering, we have to use a special endpoint
return await this.languageProvider.fetchLabelValues(labelName);
}
const data = await this.getSeriesLabels(otherLabels);
return data[labelName] ?? [];
}
/**
* Runs a Loki query to extract label keys from the result.
* The result is cached for the query string.
*
* Since various "situations" in the monaco code editor trigger this function, it is prone to being called multiple times for the same query
* Here is a lightweight and simple cache to avoid calling the backend multiple times for the same query.
*
* @param logQuery
*/
async getParserAndLabelKeys(logQuery: string): Promise<ParserAndLabelKeysResult> {
const EXTRACTED_LABEL_KEYS_MAX_CACHE_SIZE = 2;
const cachedLabelKeys = this.queryToLabelKeysCache.has(logQuery) ? this.queryToLabelKeysCache.get(logQuery) : null;
if (cachedLabelKeys) {
// cache hit! Serve stale result from cache
return cachedLabelKeys;
} else {
// If cache is larger than max size, delete the first (oldest) index
if (this.queryToLabelKeysCache.size >= EXTRACTED_LABEL_KEYS_MAX_CACHE_SIZE) {
// Make room in the cache for the fresh result by deleting the "first" index
const keys = this.queryToLabelKeysCache.keys();
const firstKey = keys.next().value;
this.queryToLabelKeysCache.delete(firstKey);
}
// Fetch a fresh result from the backend
const labelKeys = await this.languageProvider.getParserAndLabelKeys(logQuery);
// Add the result to the cache
this.queryToLabelKeysCache.set(logQuery, labelKeys);
return labelKeys;
}
}
async getSeriesLabels(labels: Label[]) {
return await this.languageProvider.fetchSeriesLabels(this.buildSelector(labels)).then((data: any) => data ?? {});
}
}

View File

@ -1,22 +0,0 @@
// This helper class is used to make typescript warn you when you miss a case-block in a switch statement.
// For example:
//
// const x:'A'|'B'|'C' = 'A';
//
// switch(x) {
// case 'A':
// // something
// case 'B':
// // something
// default:
// throw new NeverCaseError(x);
// }
//
//
// TypeScript detect the missing case and display an error.
export class NeverCaseError extends Error {
constructor(value: never) {
super(`Unexpected case in switch statement: ${JSON.stringify(value)}`);
}
}

View File

@ -1,201 +0,0 @@
import type { Monaco, monacoTypes } from '@grafana/ui';
import { CompletionDataProvider } from './CompletionDataProvider';
import { NeverCaseError } from './NeverCaseError';
import { Situation, getSituation } from './situation';
type CompletionType =
| 'HISTORY'
| 'FUNCTION'
| 'DURATION'
| 'LABEL_NAME'
| 'LABEL_VALUE'
| 'PATTERN'
| 'PARSER'
| 'LINE_FILTER'
| 'PIPE_OPERATION';
type Completion = {
type: CompletionType;
label: string;
insertText: string;
detail?: string;
documentation?: string;
triggerOnInsert?: boolean;
isSnippet?: boolean;
};
const DURATION_COMPLETIONS: Completion[] = ['$__auto', '1m', '5m', '10m', '30m', '1h', '1d'].map((text) => ({
type: 'DURATION',
label: text,
insertText: text,
}));
const getCompletions = async (situation: Situation, dataProvider: CompletionDataProvider) => {
return DURATION_COMPLETIONS;
};
// from: monacoTypes.languages.CompletionItemInsertTextRule.InsertAsSnippet
const INSERT_AS_SNIPPET_ENUM_VALUE = 4;
export function getSuggestOptions(): monacoTypes.editor.ISuggestOptions {
return {
// monaco-editor sometimes provides suggestions automatically, i am not
// sure based on what, seems to be by analyzing the words already
// written.
// to try it out:
// - enter `go_goroutines{job~`
// - have the cursor at the end of the string
// - press ctrl-enter
// - you will get two suggestions
// those were not provided by grafana, they are offered automatically.
// i want to remove those. the only way i found is:
// - every suggestion-item has a `kind` attribute,
// that controls the icon to the left of the suggestion.
// - items auto-generated by monaco have `kind` set to `text`.
// - we make sure grafana-provided suggestions do not have `kind` set to `text`.
// - and then we tell monaco not to show suggestions of kind `text`
showWords: false,
};
}
function getMonacoCompletionItemKind(type: CompletionType, monaco: Monaco): monacoTypes.languages.CompletionItemKind {
switch (type) {
case 'DURATION':
return monaco.languages.CompletionItemKind.Unit;
case 'FUNCTION':
return monaco.languages.CompletionItemKind.Variable;
case 'HISTORY':
return monaco.languages.CompletionItemKind.Snippet;
case 'LABEL_NAME':
return monaco.languages.CompletionItemKind.Enum;
case 'LABEL_VALUE':
return monaco.languages.CompletionItemKind.EnumMember;
case 'PATTERN':
return monaco.languages.CompletionItemKind.Constructor;
case 'PARSER':
return monaco.languages.CompletionItemKind.Class;
case 'LINE_FILTER':
return monaco.languages.CompletionItemKind.TypeParameter;
case 'PIPE_OPERATION':
return monaco.languages.CompletionItemKind.Interface;
default:
throw new NeverCaseError(type as never);
}
}
export function getCompletionProvider(
monaco: Monaco,
dataProvider: CompletionDataProvider
): monacoTypes.languages.CompletionItemProvider {
const provideCompletionItems = (
model: monacoTypes.editor.ITextModel,
position: monacoTypes.Position
): monacoTypes.languages.ProviderResult<monacoTypes.languages.CompletionList> => {
const word = model.getWordAtPosition(position);
const wordUntil = model.getWordUntilPosition(position);
// documentation says `position` will be "adjusted" in `getOffsetAt`
// i don't know what that means, to be sure i clone it
const positionClone = {
column: position.column,
lineNumber: position.lineNumber,
};
const offset = model.getOffsetAt(positionClone);
const situation = getSituation(model.getValue(), offset);
const range = calculateRange(situation, word, wordUntil, monaco, position);
const completionsPromise = situation != null ? getCompletions(situation, dataProvider) : Promise.resolve([]);
return completionsPromise.then((items) => {
// monaco by default alphabetically orders the items.
// to stop it, we use a number-as-string sortkey,
// so that monaco keeps the order we use
const maxIndexDigits = items.length.toString().length;
const suggestions: monacoTypes.languages.CompletionItem[] = items.map((item, index) => ({
kind: getMonacoCompletionItemKind(item.type, monaco),
label: item.label,
insertText: item.insertText,
insertTextRules: item.isSnippet ? INSERT_AS_SNIPPET_ENUM_VALUE : undefined,
detail: item.detail,
documentation: item.documentation,
sortText: index.toString().padStart(maxIndexDigits, '0'), // to force the order we have
range: range,
command: item.triggerOnInsert
? {
id: 'editor.action.triggerSuggest',
title: '',
}
: undefined,
}));
return { suggestions };
});
};
return {
triggerCharacters: ['{', ',', '[', '(', '=', '~', ' ', '"', '|'],
provideCompletionItems,
};
}
export const calculateRange = (
situation: Situation | null,
word: monacoTypes.editor.IWordAtPosition | null,
wordUntil: monacoTypes.editor.IWordAtPosition,
monaco: Monaco,
position: monacoTypes.Position
): monacoTypes.Range => {
if (
situation &&
situation?.type === 'IN_LABEL_SELECTOR_WITH_LABEL_NAME' &&
'betweenQuotes' in situation &&
situation.betweenQuotes
) {
// Word until won't have second quote if they are between quotes
const indexOfFirstQuote = wordUntil?.word?.indexOf('"') ?? 0;
const indexOfLastQuote = word?.word?.lastIndexOf('"') ?? 0;
const indexOfEquals = word?.word.indexOf('=');
const indexOfLastEquals = word?.word.lastIndexOf('=');
// Just one equals "=" the cursor is somewhere within a label value
// e.g. value="labe^l-value" or value="^label-value" etc
// We want the word to include everything within the quotes, so the result from autocomplete overwrites the existing label value
if (
indexOfLastEquals === indexOfEquals &&
indexOfFirstQuote !== -1 &&
indexOfLastQuote !== -1 &&
indexOfLastEquals !== -1
) {
return word != null
? monaco.Range.lift({
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: wordUntil.startColumn + indexOfFirstQuote + 1,
endColumn: wordUntil.startColumn + indexOfLastQuote,
})
: monaco.Range.fromPositions(position);
}
}
if (situation && situation.type === 'IN_LABEL_SELECTOR_WITH_LABEL_NAME') {
// Otherwise we want the range to be calculated as the cursor position, as we want to insert the autocomplete, instead of overwriting existing text
// The cursor position is the length of the wordUntil
return word != null
? monaco.Range.lift({
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: wordUntil.endColumn,
endColumn: wordUntil.endColumn,
})
: monaco.Range.fromPositions(position);
}
// And for all other non-label cases, we want to use the word start and end column
return word != null
? monaco.Range.lift({
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: word.startColumn,
endColumn: word.endColumn,
})
: monaco.Range.fromPositions(position);
};

View File

@ -1,67 +0,0 @@
export type LabelOperator = '=' | '!=' | '=~' | '!~';
export type Label = {
name: string;
value: string;
op: LabelOperator;
};
export type Situation =
| {
type: 'EMPTY';
}
| {
type: 'AT_ROOT';
}
| {
type: 'IN_LOGFMT';
otherLabels: string[];
flags: boolean;
trailingSpace: boolean;
trailingComma: boolean;
logQuery: string;
}
| {
type: 'IN_RANGE';
}
| {
type: 'IN_AGGREGATION';
}
| {
type: 'IN_GROUPING';
logQuery: string;
}
| {
type: 'IN_LABEL_SELECTOR_NO_LABEL_NAME';
otherLabels: Label[];
}
| {
type: 'IN_LABEL_SELECTOR_WITH_LABEL_NAME';
labelName: string;
betweenQuotes: boolean;
otherLabels: Label[];
}
| {
type: 'AFTER_SELECTOR';
afterPipe: boolean;
hasSpace: boolean;
logQuery: string;
}
| {
type: 'AFTER_UNWRAP';
logQuery: string;
}
| {
type: 'AFTER_KEEP_AND_DROP';
logQuery: string;
};
/**
* THIS METHOD IS KNOWN TO BE INCOMPLETE due to the decoupling of the Tempo datasource from Grafana core:
* Incomplete support for LogQL autocomplete from 'public/app/plugins/datasource/loki/components/monaco-query-field/monaco-completion-provider/situation.ts';
*/
export const getSituation = (text: string, pos: number): Situation | null => {
return {
type: 'EMPTY',
};
};

View File

@ -1,126 +0,0 @@
import { SyntaxNode } from '@lezer/common';
import { LRParser } from '@lezer/lr';
// import { ErrorId } from 'app/plugins/datasource/prometheus/querybuilder/shared/parsingUtils';
const ErrorId = 0;
interface ParserErrorBoundary {
startLineNumber: number;
startColumn: number;
endLineNumber: number;
endColumn: number;
error: string;
}
interface ParseError {
text: string;
node: SyntaxNode;
}
/**
* Conceived to work in combination with the MonacoQueryField component.
* Given an original query, and it's interpolated version, it will return an array of ParserErrorBoundary
* objects containing nodes which are actual errors. The interpolated version (even with placeholder variables)
* is required because variables look like errors for Lezer.
* @internal
*/
export function validateQuery(
query: string,
interpolatedQuery: string,
queryLines: string[],
parser: LRParser
): ParserErrorBoundary[] | false {
if (!query) {
return false;
}
/**
* To provide support to variable interpolation in query validation, we run the parser in the interpolated
* query. If there are errors there, we trace them back to the original unparsed query, so we can more
* accurately highlight the error in the query, since it's likely that the variable name and variable value
* have different lengths. With this, we also exclude irrelevant parser errors that are produced by
* lezer not understanding $variables and $__variables, which usually generate 2 or 3 error SyntaxNode.
*/
const interpolatedErrors: ParseError[] = parseQuery(interpolatedQuery, parser);
if (!interpolatedErrors.length) {
return false;
}
let parseErrors: ParseError[] = interpolatedErrors;
if (query !== interpolatedQuery) {
const queryErrors: ParseError[] = parseQuery(query, parser);
parseErrors = interpolatedErrors.flatMap(
(interpolatedError) =>
queryErrors.filter((queryError) => interpolatedError.text === queryError.text) || interpolatedError
);
}
return parseErrors.map((parseError) => findErrorBoundary(query, queryLines, parseError)).filter(isErrorBoundary);
}
function parseQuery(query: string, parser: LRParser) {
const parseErrors: ParseError[] = [];
const tree = parser.parse(query);
tree.iterate({
enter: (nodeRef): false | void => {
if (nodeRef.type.id === ErrorId) {
const node = nodeRef.node;
parseErrors.push({
node: node,
text: query.substring(node.from, node.to),
});
}
},
});
return parseErrors;
}
function findErrorBoundary(query: string, queryLines: string[], parseError: ParseError): ParserErrorBoundary | null {
if (queryLines.length === 1) {
const isEmptyString = parseError.node.from === parseError.node.to;
const errorNode = isEmptyString && parseError.node.parent ? parseError.node.parent : parseError.node;
const error = isEmptyString ? query.substring(errorNode.from, errorNode.to) : parseError.text;
return {
startLineNumber: 1,
startColumn: errorNode.from + 1,
endLineNumber: 1,
endColumn: errorNode.to + 1,
error,
};
}
let startPos = 0,
endPos = 0;
for (let line = 0; line < queryLines.length; line++) {
endPos = startPos + queryLines[line].length;
if (parseError.node.from > endPos) {
startPos += queryLines[line].length + 1;
continue;
}
return {
startLineNumber: line + 1,
startColumn: parseError.node.from - startPos + 1,
endLineNumber: line + 1,
endColumn: parseError.node.to - startPos + 1,
error: parseError.text,
};
}
return null;
}
function isErrorBoundary(boundary: ParserErrorBoundary | null): boundary is ParserErrorBoundary {
return boundary !== null;
}
export const placeHolderScopedVars = {
__interval: { text: '1s', value: '1s' },
__rate_interval: { text: '1s', value: '1s' },
__auto: { text: '1s', value: '1s' },
__interval_ms: { text: '1000', value: 1000 },
__range_ms: { text: '1000', value: 1000 },
__range_s: { text: '1', value: 1 },
__range: { text: '1s', value: '1s' },
};

View File

@ -1,260 +0,0 @@
import {
DataFrame,
DataFrameType,
DataQueryResponse,
DataQueryResponseData,
Field,
FieldType,
isValidGoDuration,
Labels,
QueryResultMetaStat,
shallowCompare,
} from '@grafana/data';
import { isBytesString } from './languageUtils';
import { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';
export function dataFrameHasLokiError(frame: DataFrame): boolean {
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];
return labelSets.some((labels) => labels.__error__ !== undefined);
}
export function dataFrameHasLevelLabel(frame: DataFrame): boolean {
const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];
return labelSets.some((labels) => labels.level !== undefined);
}
export function extractLogParserFromDataFrame(frame: DataFrame): {
hasLogfmt: boolean;
hasJSON: boolean;
hasPack: boolean;
} {
const lineField = frame.fields.find((field) => field.type === FieldType.string);
if (lineField == null) {
return { hasJSON: false, hasLogfmt: false, hasPack: false };
}
const logLines: string[] = lineField.values;
let hasJSON = false;
let hasLogfmt = false;
let hasPack = false;
logLines.forEach((line) => {
if (isLogLineJSON(line)) {
hasJSON = true;
hasPack = isLogLinePacked(line);
}
if (isLogLineLogfmt(line)) {
hasLogfmt = true;
}
});
return { hasLogfmt, hasJSON, hasPack };
}
export function extractLabelKeysFromDataFrame(frame: DataFrame): string[] {
const labelsArray: Array<{ [key: string]: string }> | undefined =
frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];
if (!labelsArray?.length) {
return [];
}
return Object.keys(labelsArray[0]);
}
export function extractUnwrapLabelKeysFromDataFrame(frame: DataFrame): string[] {
const labelsArray: Array<{ [key: string]: string }> | undefined =
frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];
if (!labelsArray?.length) {
return [];
}
// We do this only for first label object, because we want to consider only labels that are present in all log lines
// possibleUnwrapLabels are labels with 1. number value OR 2. value that is valid go duration OR 3. bytes string value
const possibleUnwrapLabels = Object.keys(labelsArray[0]).filter((key) => {
const value = labelsArray[0][key];
if (!value) {
return false;
}
return !isNaN(Number(value)) || isValidGoDuration(value) || isBytesString(value);
});
// Add only labels that are present in every line to unwrapLabels
return possibleUnwrapLabels.filter((label) => labelsArray.every((obj) => obj[label]));
}
export function extractHasErrorLabelFromDataFrame(frame: DataFrame): boolean {
const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other);
if (labelField == null) {
return false;
}
const labels: Array<{ [key: string]: string }> = labelField.values;
return labels.some((label) => label['__error__']);
}
export function extractLevelLikeLabelFromDataFrame(frame: DataFrame): string | null {
const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other);
if (labelField == null) {
return null;
}
// Depending on number of labels, this can be pretty heavy operation.
// Let's just look at first 2 lines If needed, we can introduce more later.
const labelsArray: Array<{ [key: string]: string }> = labelField.values.slice(0, 2);
let levelLikeLabel: string | null = null;
// Find first level-like label
for (let labels of labelsArray) {
const label = Object.keys(labels).find((label) => label === 'lvl' || label.includes('level'));
if (label) {
levelLikeLabel = label;
break;
}
}
return levelLikeLabel;
}
function shouldCombine(frame1: DataFrame, frame2: DataFrame): boolean {
if (frame1.refId !== frame2.refId) {
return false;
}
const frameType1 = frame1.meta?.type;
const frameType2 = frame2.meta?.type;
if (frameType1 !== frameType2) {
// we do not join things that have a different type
return false;
}
// metric range query data
if (frameType1 === DataFrameType.TimeSeriesMulti) {
const field1 = frame1.fields.find((f) => f.type === FieldType.number);
const field2 = frame2.fields.find((f) => f.type === FieldType.number);
if (field1 === undefined || field2 === undefined) {
// should never happen
return false;
}
return shallowCompare(field1.labels ?? {}, field2.labels ?? {});
}
// logs query data
// logs use a special attribute in the dataframe's "custom" section
// because we do not have a good "frametype" value for them yet.
const customType1 = frame1.meta?.custom?.frameType;
const customType2 = frame2.meta?.custom?.frameType;
if (customType1 === 'LabeledTimeValues' && customType2 === 'LabeledTimeValues') {
return true;
}
// should never reach here
return false;
}
export function combineResponses(currentResult: DataQueryResponse | null, newResult: DataQueryResponse) {
if (!currentResult) {
return cloneQueryResponse(newResult);
}
newResult.data.forEach((newFrame) => {
const currentFrame = currentResult.data.find((frame) => shouldCombine(frame, newFrame));
if (!currentFrame) {
currentResult.data.push(cloneDataFrame(newFrame));
return;
}
combineFrames(currentFrame, newFrame);
});
const mergedErrors = [...(currentResult.errors ?? []), ...(newResult.errors ?? [])];
// we make sure to have `.errors` as undefined, instead of empty-array
// when no errors.
if (mergedErrors.length > 0) {
currentResult.errors = mergedErrors;
}
// the `.error` attribute is obsolete now,
// but we have to maintain it, otherwise
// some grafana parts do not behave well.
// we just choose the old error, if it exists,
// otherwise the new error, if it exists.
const mergedError = currentResult.error ?? newResult.error;
if (mergedError != null) {
currentResult.error = mergedError;
}
const mergedTraceIds = [...(currentResult.traceIds ?? []), ...(newResult.traceIds ?? [])];
if (mergedTraceIds.length > 0) {
currentResult.traceIds = mergedTraceIds;
}
return currentResult;
}
function combineFrames(dest: DataFrame, source: DataFrame) {
const totalFields = dest.fields.length;
for (let i = 0; i < totalFields; i++) {
dest.fields[i].values = [].concat.apply(source.fields[i].values, dest.fields[i].values);
if (source.fields[i].nanos) {
const nanos: number[] = dest.fields[i].nanos?.slice() || [];
dest.fields[i].nanos = source.fields[i].nanos?.concat(nanos);
}
}
dest.length += source.length;
dest.meta = {
...dest.meta,
stats: getCombinedMetadataStats(dest.meta?.stats ?? [], source.meta?.stats ?? []),
};
}
const TOTAL_BYTES_STAT = 'Summary: total bytes processed';
function getCombinedMetadataStats(
destStats: QueryResultMetaStat[],
sourceStats: QueryResultMetaStat[]
): QueryResultMetaStat[] {
// in the current approach, we only handle a single stat
const destStat = destStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
const sourceStat = sourceStats.find((s) => s.displayName === TOTAL_BYTES_STAT);
if (sourceStat != null && destStat != null) {
return [{ value: sourceStat.value + destStat.value, displayName: TOTAL_BYTES_STAT, unit: destStat.unit }];
}
// maybe one of them exist
const eitherStat = sourceStat ?? destStat;
if (eitherStat != null) {
return [eitherStat];
}
return [];
}
/**
* Deep clones a DataQueryResponse
*/
export function cloneQueryResponse(response: DataQueryResponse): DataQueryResponse {
const newResponse = {
...response,
data: response.data.map(cloneDataFrame),
};
return newResponse;
}
function cloneDataFrame(frame: DataQueryResponseData): DataQueryResponseData {
return {
...frame,
fields: frame.fields.map((field: Field) => ({
...field,
values: field.values,
})),
};
}

View File

@ -1,281 +0,0 @@
import { Grammar } from 'prismjs';
import { CompletionItem } from '@grafana/ui';
export const AGGREGATION_OPERATORS: CompletionItem[] = [
{
label: 'avg',
insertText: 'avg',
documentation: 'Calculate the average over dimensions',
},
{
label: 'bottomk',
insertText: 'bottomk',
documentation: 'Smallest k elements by sample value',
},
{
label: 'count',
insertText: 'count',
documentation: 'Count number of elements in the vector',
},
{
label: 'max',
insertText: 'max',
documentation: 'Select maximum over dimensions',
},
{
label: 'min',
insertText: 'min',
documentation: 'Select minimum over dimensions',
},
{
label: 'stddev',
insertText: 'stddev',
documentation: 'Calculate population standard deviation over dimensions',
},
{
label: 'stdvar',
insertText: 'stdvar',
documentation: 'Calculate population standard variance over dimensions',
},
{
label: 'sum',
insertText: 'sum',
documentation: 'Calculate sum over dimensions',
},
{
label: 'topk',
insertText: 'topk',
documentation: 'Largest k elements by sample value',
},
];
export const PIPE_PARSERS: CompletionItem[] = [
{
label: 'json',
insertText: 'json',
documentation: 'Extracting labels from the log line using json parser.',
},
{
label: 'regexp',
insertText: 'regexp ""',
documentation: 'Extracting labels from the log line using regexp parser.',
move: -1,
},
{
label: 'logfmt',
insertText: 'logfmt',
documentation: 'Extracting labels from the log line using logfmt parser.',
},
{
label: 'pattern',
insertText: 'pattern',
documentation: 'Extracting labels from the log line using pattern parser. Only available in Loki 2.3+.',
},
{
label: 'unpack',
insertText: 'unpack',
detail: 'unpack identifier',
documentation:
'Parses a JSON log line, unpacking all embedded labels in the pack stage. A special property "_entry" will also be used to replace the original log line. Only available in Loki 2.2+.',
},
];
export const PIPE_OPERATORS: CompletionItem[] = [
{
label: 'unwrap',
insertText: 'unwrap',
detail: 'unwrap identifier',
documentation: 'Take labels and use the values as sample data for metric aggregations.',
},
{
label: 'label_format',
insertText: 'label_format',
documentation: 'Use to rename, modify or add labels. For example, | label_format foo=bar .',
},
{
label: 'line_format',
insertText: 'line_format',
documentation: 'Rewrites log line content. For example, | line_format "{{.query}} {{.duration}}" .',
},
];
export const RANGE_VEC_FUNCTIONS = [
{
insertText: 'avg_over_time',
label: 'avg_over_time',
detail: 'avg_over_time(range-vector)',
documentation: 'The average of all values in the specified interval.',
},
{
insertText: 'bytes_over_time',
label: 'bytes_over_time',
detail: 'bytes_over_time(range-vector)',
documentation: 'Counts the amount of bytes used by each log stream for a given range',
},
{
insertText: 'bytes_rate',
label: 'bytes_rate',
detail: 'bytes_rate(range-vector)',
documentation: 'Calculates the number of bytes per second for each stream.',
},
{
insertText: 'first_over_time',
label: 'first_over_time',
detail: 'first_over_time(range-vector)',
documentation: 'The first of all values in the specified interval. Only available in Loki 2.3+.',
},
{
insertText: 'last_over_time',
label: 'last_over_time',
detail: 'last_over_time(range-vector)',
documentation: 'The last of all values in the specified interval. Only available in Loki 2.3+.',
},
{
insertText: 'sum_over_time',
label: 'sum_over_time',
detail: 'sum_over_time(range-vector)',
documentation: 'The sum of all values in the specified interval.',
},
{
insertText: 'count_over_time',
label: 'count_over_time',
detail: 'count_over_time(range-vector)',
documentation: 'The count of all values in the specified interval.',
},
{
insertText: 'max_over_time',
label: 'max_over_time',
detail: 'max_over_time(range-vector)',
documentation: 'The maximum of all values in the specified interval.',
},
{
insertText: 'min_over_time',
label: 'min_over_time',
detail: 'min_over_time(range-vector)',
documentation: 'The minimum of all values in the specified interval.',
},
{
insertText: 'quantile_over_time',
label: 'quantile_over_time',
detail: 'quantile_over_time(scalar, range-vector)',
documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.',
},
{
insertText: 'rate',
label: 'rate',
detail: 'rate(v range-vector)',
documentation: 'Calculates the number of entries per second.',
},
{
insertText: 'stddev_over_time',
label: 'stddev_over_time',
detail: 'stddev_over_time(range-vector)',
documentation: 'The population standard deviation of the values in the specified interval.',
},
{
insertText: 'stdvar_over_time',
label: 'stdvar_over_time',
detail: 'stdvar_over_time(range-vector)',
documentation: 'The population standard variance of the values in the specified interval.',
},
];
export const BUILT_IN_FUNCTIONS = [
{
insertText: 'vector',
label: 'vector',
detail: 'vector(scalar)',
documentation: 'Returns the scalar as a vector with no labels.',
},
];
export const FUNCTIONS = [...AGGREGATION_OPERATORS, ...RANGE_VEC_FUNCTIONS, ...BUILT_IN_FUNCTIONS];
// Loki grammar is used for query highlight in query previews outside of code editor
export const lokiGrammar: Grammar = {
comment: {
pattern: /#.*/,
},
'context-aggregation': {
pattern: /((without|by)\s*)\([^)]*\)/, // by ()
lookbehind: true,
inside: {
'label-key': {
pattern: /[^(),\s][^,)]*[^),\s]*/,
alias: 'attr-name',
},
punctuation: /[()]/,
},
},
'context-labels': {
pattern: /\{[^}]*(?=}?)/,
greedy: true,
inside: {
comment: {
pattern: /#.*/,
},
'label-key': {
pattern: /[a-zA-Z_]\w*(?=\s*(=|!=|=~|!~))/,
alias: 'attr-name',
greedy: true,
},
'label-value': {
pattern: /"(?:\\.|[^\\"])*"/,
greedy: true,
alias: 'attr-value',
},
punctuation: /[{]/,
},
},
'context-pipe': {
pattern: /\s\|[^=~]\s?\w*/i,
inside: {
'pipe-operator': {
pattern: /\|/i,
alias: 'operator',
},
'pipe-operations': {
pattern: new RegExp(`${[...PIPE_PARSERS, ...PIPE_OPERATORS].map((f) => f.label).join('|')}`, 'i'),
alias: 'keyword',
},
},
},
function: new RegExp(`\\b(?:${FUNCTIONS.map((f) => f.label).join('|')})(?=\\s*\\()`, 'i'),
'context-range': [
{
pattern: /\[[^\]]*(?=\])/, // [1m]
inside: {
'range-duration': {
pattern: /\b\d+[smhdwy]\b/i,
alias: 'number',
},
},
},
{
pattern: /(offset\s+)\w+/, // offset 1m
lookbehind: true,
inside: {
'range-duration': {
pattern: /\b\d+[smhdwy]\b/i,
alias: 'number',
},
},
},
],
quote: {
pattern: /"(?:\\.|[^\\"])*"/,
alias: 'string',
greedy: true,
},
backticks: {
pattern: /`(?:\\.|[^\\`])*`/,
alias: 'string',
greedy: true,
},
number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
operator: /\s?(\|[=~]?|!=?|<(?:=>?|<|>)?|>[>=]?)\s?/i,
punctuation: /[{}(),.]/,
};
export default lokiGrammar;

View File

@ -1,62 +1,3 @@
import { Observable } from 'rxjs';
import {
DataFrame,
DataQueryRequest,
DataQueryResponse,
DataSourceJsonData,
DataSourcePluginMeta,
DataSourceRef,
ScopedVars,
TestDataSourceResponse,
} from '@grafana/data';
import { BackendSrvRequest } from '@grafana/runtime';
import LokiLanguageProvider from './LanguageProvider';
import { Loki as LokiQueryFromSchema, LokiQueryType, SupportingQueryType, LokiQueryDirection } from './dataquery.gen';
export { LokiQueryType };
export enum LokiResultType {
Stream = 'streams',
Vector = 'vector',
Matrix = 'matrix',
}
export interface LokiQuery extends LokiQueryFromSchema {
direction?: LokiQueryDirection;
/** Used only to identify supporting queries, e.g. logs volume, logs sample and data sample */
supportingQueryType?: SupportingQueryType;
// CUE autogenerates `queryType` as `?string`, as that's how it is defined
// in the parent-interface (in DataQuery).
// the temporary fix (until this gets improved in the codegen), is to
// override it here
queryType?: LokiQueryType;
/**
* This is a property for the experimental query splitting feature.
* @experimental
*/
splitDuration?: string;
}
export interface LokiOptions extends DataSourceJsonData {
maxLines?: string;
derivedFields?: DerivedFieldConfig[];
alertmanager?: string;
keepCookies?: string[];
predefinedOperations?: string;
}
export type DerivedFieldConfig = {
matcherRegex: string;
name: string;
url?: string;
urlDisplayLabel?: string;
datasourceUid?: string;
matcherType?: 'label' | 'regex';
};
export interface QueryStats {
streams: number;
chunks: number;
@ -65,33 +6,3 @@ export interface QueryStats {
// The error message displayed in the UI when we cant estimate the size of the query.
message?: string;
}
export type LokiDatasource = {
name: string;
id: number;
type: string;
uid: string;
query: (request: DataQueryRequest<any>) => Observable<DataQueryResponse> | Promise<DataQueryResponse>;
testDatasource: () => Promise<TestDataSourceResponse>;
meta: DataSourcePluginMeta<{}>;
getRef: () => DataSourceRef;
metadataRequest: (
url: string,
params?: Record<string, string | number>,
options?: Partial<BackendSrvRequest>
) => Promise<any>;
getTimeRangeParams: () => any;
interpolateString: (string: string, scopedVars?: ScopedVars) => string;
getDataSamples: (query: LokiQuery) => Promise<DataFrame[]>;
languageProvider: any;
};
export interface ParserAndLabelKeysResult {
extractedLabelKeys: string[];
hasJSON: boolean;
hasLogfmt: boolean;
hasPack: boolean;
unwrapLabelKeys: string[];
}
export type LanguageProvider = LokiLanguageProvider;

View File

@ -22,7 +22,6 @@ import {
import { config } from '@grafana/runtime';
import { SecureSocksProxySettings, useStyles2, Divider, Stack } from '@grafana/ui';
import { LokiSearchSettings } from './LokiSearchSettings';
import { QuerySettings } from './QuerySettings';
import { ServiceGraphSettings } from './ServiceGraphSettings';
import { TraceQLSearchSettings } from './TraceQLSearchSettings';
@ -104,19 +103,6 @@ export const ConfigEditor = ({ options, onOptionsChange }: Props) => {
<TraceQLSearchSettings options={options} onOptionsChange={onOptionsChange} />
</ConfigSubSection>
<ConfigSubSection
title="Loki search"
description={
<ConfigDescriptionLink
description="Select a Loki data source to search for traces. Derived fields must be configured in the Loki data source."
suffix="tempo/configure-tempo-data-source/#loki-search"
feature="Loki search"
/>
}
>
<LokiSearchSettings options={options} onOptionsChange={onOptionsChange} />
</ConfigSubSection>
<ConfigSubSection
title="TraceID query"
description={

View File

@ -1,65 +0,0 @@
import React from 'react';
import {
DataSourceInstanceSettings,
DataSourcePluginOptionsEditorProps,
updateDatasourcePluginJsonDataOption,
} from '@grafana/data';
import { DataSourcePicker } from '@grafana/runtime';
import { Button, InlineField, InlineFieldRow, useStyles2 } from '@grafana/ui';
import { TempoJsonData } from '../types';
import { getStyles } from './QuerySettings';
interface Props extends DataSourcePluginOptionsEditorProps<TempoJsonData> {}
export function LokiSearchSettings({ options, onOptionsChange }: Props) {
const styles = useStyles2(getStyles);
// Default to the trace to logs datasource if configured and loki search was enabled
// but only if jsonData.lokiSearch hasn't been set
const legacyDatasource =
options.jsonData.tracesToLogs?.lokiSearch !== false ? options.jsonData.tracesToLogs?.datasourceUid : undefined;
if (legacyDatasource && options.jsonData.lokiSearch === undefined) {
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'lokiSearch', {
datasourceUid: legacyDatasource,
});
}
return (
<div className={styles.container}>
<InlineFieldRow className={styles.row}>
<InlineField tooltip="The Loki data source with the service graph data" label="Data source" labelWidth={26}>
<DataSourcePicker
inputId="loki-search-data-source-picker"
pluginId="loki"
current={options.jsonData.lokiSearch?.datasourceUid}
noDefault={true}
width={40}
onChange={(ds: DataSourceInstanceSettings) =>
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'lokiSearch', {
datasourceUid: ds.uid,
})
}
/>
</InlineField>
{options.jsonData.lokiSearch?.datasourceUid ? (
<Button
type={'button'}
variant={'secondary'}
size={'sm'}
fill={'text'}
onClick={() => {
updateDatasourcePluginJsonDataOption({ onOptionsChange, options }, 'lokiSearch', {
datasourceUid: undefined,
});
}}
>
Clear
</Button>
) : null}
</InlineFieldRow>
</div>
);
}

View File

@ -53,8 +53,8 @@ composableKinds: DataQuery: {
tableType?: #SearchTableType
} @cuetsy(kind="interface") @grafana(TSVeneer="type")
// search = Loki search, nativeSearch = Tempo search for backwards compatibility
#TempoQueryType: "traceql" | "traceqlSearch" | "search" | "serviceMap" | "upload" | "nativeSearch" | "traceId" | "clear" @cuetsy(kind="type")
// nativeSearch = Tempo search for backwards compatibility
#TempoQueryType: "traceql" | "traceqlSearch" | "serviceMap" | "upload" | "nativeSearch" | "traceId" | "clear" @cuetsy(kind="type")
// The state of the TraceQL streaming search query
#SearchStreamingState: "pending" | "streaming" | "done" | "error" @cuetsy(kind="enum")

View File

@ -68,9 +68,9 @@ export const defaultTempoQuery: Partial<TempoQuery> = {
};
/**
* search = Loki search, nativeSearch = Tempo search for backwards compatibility
* nativeSearch = Tempo search for backwards compatibility
*/
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
/**
* The state of the TraceQL streaming search query

View File

@ -82,10 +82,6 @@ describe('Tempo data source', () => {
return {
refId: 'x',
queryType: 'traceql',
linkedQuery: {
refId: 'linked',
expr: '{instance="$interpolationVar"}',
},
query: '$interpolationVarWithPipe',
spanName: '$interpolationVar',
serviceName: '$interpolationVar',
@ -138,7 +134,6 @@ describe('Tempo data source', () => {
const ds = new TempoDatasource(defaultSettings, templateSrv);
const queries = ds.interpolateVariablesInQueries([getQuery()], {});
expect(queries[0].linkedQuery?.expr).toBe(`{instance=\"${text}\"}`);
expect(queries[0].query).toBe(textWithPipe);
expect(queries[0].serviceName).toBe(text);
expect(queries[0].spanName).toBe(text);
@ -157,7 +152,6 @@ describe('Tempo data source', () => {
const resp = ds.applyTemplateVariables(getQuery(), {
interpolationVar: { text: scopedText, value: scopedText },
});
expect(resp.linkedQuery?.expr).toBe(`{instance=\"${scopedText}\"}`);
expect(resp.query).toBe(textWithPipe);
expect(resp.serviceName).toBe(scopedText);
expect(resp.spanName).toBe(scopedText);
@ -294,7 +288,7 @@ describe('Tempo data source', () => {
const templateSrv = { replace: jest.fn().mockReturnValue(duration) } as unknown as TemplateSrv;
const ds = new TempoDatasource(defaultSettings, templateSrv);
const tempoQuery: TempoQuery = {
queryType: 'search',
queryType: 'nativeSearch',
refId: 'A',
query: '',
serviceName: 'frontend',
@ -329,7 +323,7 @@ describe('Tempo data source', () => {
it('should include a default limit', () => {
const ds = new TempoDatasource(defaultSettings);
const tempoQuery: TempoQuery = {
queryType: 'search',
queryType: 'nativeSearch',
refId: 'A',
query: '',
search: '',
@ -345,7 +339,7 @@ describe('Tempo data source', () => {
it('should include time range if provided', () => {
const ds = new TempoDatasource(defaultSettings);
const tempoQuery: TempoQuery = {
queryType: 'search',
queryType: 'nativeSearch',
refId: 'A',
query: '',
search: '',
@ -381,55 +375,6 @@ describe('Tempo data source', () => {
);
});
it('should get loki search datasource', () => {
// 1. Get lokiSearch.datasource if present
const ds1 = new TempoDatasource({
...defaultSettings,
jsonData: {
lokiSearch: {
datasourceUid: 'loki-1',
},
},
});
const lokiDS1 = ds1.getLokiSearchDS();
expect(lokiDS1).toBe('loki-1');
// 2. Get traceToLogs.datasource
const ds2 = new TempoDatasource({
...defaultSettings,
jsonData: {
tracesToLogs: {
lokiSearch: true,
datasourceUid: 'loki-2',
},
},
});
const lokiDS2 = ds2.getLokiSearchDS();
expect(lokiDS2).toBe('loki-2');
// 3. Return undefined if neither is available
const ds3 = new TempoDatasource(defaultSettings);
const lokiDS3 = ds3.getLokiSearchDS();
expect(lokiDS3).toBe(undefined);
// 4. Return undefined if lokiSearch is undefined, even if traceToLogs is present
// since this indicates the user cleared the fallback setting
const ds4 = new TempoDatasource({
...defaultSettings,
jsonData: {
tracesToLogs: {
lokiSearch: true,
datasourceUid: 'loki-2',
},
lokiSearch: {
datasourceUid: undefined,
},
},
});
const lokiDS4 = ds4.getLokiSearchDS();
expect(lokiDS4).toBe(undefined);
});
describe('test the testDatasource function', () => {
it('should return a success msg if response.ok is true', async () => {
mockObservable = () => of({ ok: true });

View File

@ -1,5 +1,5 @@
import { groupBy, identity, pick, pickBy, startCase } from 'lodash';
import { EMPTY, from, lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
import { EMPTY, from, lastValueFrom, merge, Observable, of } from 'rxjs';
import { catchError, concatMap, map, mergeMap, toArray } from 'rxjs/operators';
import semver from 'semver';
@ -10,7 +10,6 @@ import {
DataQueryRequest,
DataQueryResponse,
DataQueryResponseData,
DataSourceApi,
DataSourceGetTagValuesOptions,
DataSourceInstanceSettings,
dateTime,
@ -38,7 +37,6 @@ import { BarGaugeDisplayMode, TableCellDisplayMode, VariableFormatID } from '@gr
import { generateQueryFromFilters } from './SearchTraceQLEditor/utils';
import { TempoVariableQuery, TempoVariableQueryType } from './VariableQueryEditor';
import { LokiOptions } from './_importedDependencies/datasources/loki/types';
import { PrometheusDatasource, PromQuery } from './_importedDependencies/datasources/prometheus/types';
import { TraceqlFilter, TraceqlSearchScope } from './dataquery.gen';
import {
@ -60,7 +58,6 @@ import {
formatTraceQLResponse,
transformFromOTLP as transformFromOTEL,
transformTrace,
transformTraceList,
} from './resultTransformer';
import { doTempoChannelStream } from './streaming';
import { SearchQueryParams, TempoJsonData, TempoQuery } from './types';
@ -107,9 +104,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
filters?: TraceqlFilter[];
};
nodeGraph?: NodeGraphOptions;
lokiSearch?: {
datasourceUid?: string;
};
traceQuery?: {
timeShiftEnabled?: boolean;
spanStartTimeShift?: string;
@ -132,7 +126,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
this.serviceMap = instanceSettings.jsonData.serviceMap;
this.search = instanceSettings.jsonData.search;
this.nodeGraph = instanceSettings.jsonData.nodeGraph;
this.lokiSearch = instanceSettings.jsonData.lokiSearch;
this.traceQuery = instanceSettings.jsonData.traceQuery;
this.languageProvider = new TempoLanguageProvider(this);
@ -272,52 +265,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
return of({ data: [], state: LoadingState.Done });
}
const logsDatasourceUid = this.getLokiSearchDS();
// Run search queries on linked datasource
if (logsDatasourceUid && targets.search?.length > 0) {
reportInteraction('grafana_traces_loki_search_queried', {
datasourceType: 'tempo',
app: options.app ?? '',
grafana_version: config.buildInfo.version,
hasLinkedQueryExpr:
targets.search[0].linkedQuery?.expr && targets.search[0].linkedQuery?.expr !== '' ? true : false,
});
const dsSrv = getDataSourceSrv();
subQueries.push(
from(dsSrv.get(logsDatasourceUid)).pipe(
mergeMap((linkedDatasource: DataSourceApi) => {
// Wrap linked query into a data request based on original request
const linkedRequest: DataQueryRequest = { ...options, targets: targets.search.map((t) => t.linkedQuery!) };
// Find trace matchers in derived fields of the linked datasource that's identical to this datasource
const settings: DataSourceInstanceSettings<LokiOptions> = (linkedDatasource as TempoDatasource)
.instanceSettings;
const traceLinkMatcher: string[] =
settings.jsonData.derivedFields
?.filter((field) => field.datasourceUid === this.uid && field.matcherRegex)
.map((field) => field.matcherRegex) || [];
if (!traceLinkMatcher || traceLinkMatcher.length === 0) {
return throwError(
() =>
new Error(
'No Loki datasource configured for search. Set up Derived Fields for traces in a Loki datasource settings and link it to this Tempo datasource.'
)
);
} else {
const response = linkedDatasource.query(linkedRequest);
return from(response).pipe(
map((response) =>
response.error ? response : transformTraceList(response, this.uid, this.name, traceLinkMatcher)
)
);
}
})
)
);
}
if (targets.nativeSearch?.length) {
try {
reportInteraction('grafana_traces_search_queried', {
@ -534,13 +481,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
applyVariables(query: TempoQuery, scopedVars: ScopedVars) {
const expandedQuery = { ...query };
if (query.linkedQuery) {
expandedQuery.linkedQuery = {
...query.linkedQuery,
expr: this.templateSrv.replace(query.linkedQuery?.expr ?? '', scopedVars),
};
}
if (query.filters) {
expandedQuery.filters = query.filters.map((filter) => {
const updatedFilter = {
@ -867,15 +807,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
return searchQuery;
}
// Get linked loki search datasource. Fall back to legacy loki search/trace to logs config
getLokiSearchDS = (): string | undefined => {
const legacyLogsDatasourceUid =
this.tracesToLogs?.lokiSearch !== false && this.lokiSearch === undefined
? this.tracesToLogs?.datasourceUid
: undefined;
return this.lokiSearch?.datasourceUid ?? legacyLogsDatasourceUid;
};
}
function queryPrometheus(request: DataQueryRequest<PromQuery>, datasourceUid: string) {

View File

@ -1,16 +1,8 @@
import { collectorTypes } from '@opentelemetry/exporter-collector';
import {
FieldType,
createDataFrame,
PluginType,
DataSourceInstanceSettings,
dateTime,
PluginMetaInfo,
} from '@grafana/data';
import { PluginType, DataSourceInstanceSettings, dateTime, PluginMetaInfo } from '@grafana/data';
import {
createTableFrame,
transformToOTLP,
transformFromOTLP,
createTableFrameFromSearch,
@ -45,41 +37,6 @@ const defaultSettings: DataSourceInstanceSettings = {
jsonData: {},
};
describe('transformTraceList()', () => {
const lokiDataFrame = createDataFrame({
fields: [
{
name: 'ts',
type: FieldType.time,
values: ['2020-02-12T15:05:14.265Z', '2020-02-12T15:05:15.265Z', '2020-02-12T15:05:16.265Z'],
},
{
name: 'line',
type: FieldType.string,
values: [
't=2020-02-12T15:04:51+0000 lvl=info msg="Starting Grafana" logger=server',
't=2020-02-12T15:04:52+0000 lvl=info msg="Starting Grafana" logger=server traceID=asdfa1234',
't=2020-02-12T15:04:53+0000 lvl=info msg="Starting Grafana" logger=server traceID=asdf88',
],
},
],
meta: {
preferredVisualisationType: 'table',
},
});
test('extracts traceIDs from log lines', () => {
const frame = createTableFrame(lokiDataFrame, 't1', 'tempo', ['traceID=(\\w+)', 'traceID=(\\w\\w)']);
expect(frame.fields[0].name).toBe('Time');
expect(frame.fields[0].values[0]).toBe('2020-02-12T15:05:15.265Z');
expect(frame.fields[1].name).toBe('traceID');
expect(frame.fields[1].values[0]).toBe('asdfa1234');
// Second match in new line
expect(frame.fields[0].values[1]).toBe('2020-02-12T15:05:15.265Z');
expect(frame.fields[1].values[1]).toBe('as');
});
});
describe('transformToOTLP()', () => {
test('transforms dataframe to OTLP format', () => {
const otlp = transformToOTLP(otlpDataFrameToResponse);

View File

@ -6,7 +6,6 @@ import {
createDataFrame,
createTheme,
DataFrame,
DataFrameDTO,
DataLink,
DataLinkConfigOrigin,
DataQueryResponse,
@ -39,106 +38,6 @@ import {
TraceSearchMetadata,
} from './types';
export function createTableFrame(
logsFrame: DataFrame | DataFrameDTO,
datasourceUid: string,
datasourceName: string,
traceRegexs: string[]
): DataFrame {
const tableFrame = new MutableDataFrame({
fields: [
{
name: 'Time',
type: FieldType.time,
config: {
custom: {
width: 200,
},
},
values: [],
},
{
name: 'traceID',
type: FieldType.string,
config: {
displayNameFromDS: 'Trace ID',
custom: { width: 180 },
links: [
{
title: 'Click to open trace ${__value.raw}',
url: '',
internal: {
datasourceUid,
datasourceName,
query: {
query: '${__value.raw}',
},
},
},
],
},
values: [],
},
{
name: 'Message',
type: FieldType.string,
values: [],
},
],
meta: {
preferredVisualisationType: 'table',
},
});
if (!logsFrame || traceRegexs.length === 0) {
return tableFrame;
}
const timeField = logsFrame.fields.find((f) => f.type === FieldType.time);
// Going through all string fields to look for trace IDs
for (let field of logsFrame.fields) {
let hasMatch = false;
if (field.type === FieldType.string) {
const values = field.values!;
for (let i = 0; i < values.length; i++) {
const line = values[i];
if (line) {
for (let traceRegex of traceRegexs) {
const match = line.match(traceRegex);
if (match) {
const traceId = match[1];
const time = timeField ? timeField.values![i] : null;
tableFrame.fields[0].values.push(time);
tableFrame.fields[1].values.push(traceId);
tableFrame.fields[2].values.push(line);
hasMatch = true;
}
}
}
}
}
if (hasMatch) {
break;
}
}
return tableFrame;
}
export function transformTraceList(
response: DataQueryResponse,
datasourceId: string,
datasourceName: string,
traceRegexs: string[]
): DataQueryResponse {
response.data.forEach((data, index) => {
const frame = createTableFrame(data, datasourceId, datasourceName, traceRegexs);
response.data[index] = frame;
});
return response;
}
function getAttributeValue(value: collectorTypes.opentelemetryProto.common.v1.AnyValue): any {
if (value.stringValue) {
return value.stringValue;

View File

@ -34,22 +34,6 @@ jest.mock('@grafana/runtime', () => {
spanName: '$var',
refId: 'A',
},
{
datasource: { type: 'tempo', uid: 'abc' },
queryType: 'search',
linkedQuery: {
expr: '{}',
},
refId: 'A',
},
{
datasource: { type: 'tempo', uid: 'abc' },
queryType: 'search',
linkedQuery: {
expr: '{$var}',
},
refId: 'A',
},
{
datasource: { type: 'tempo', uid: 'abc' },
queryType: 'serviceMap',
@ -100,12 +84,10 @@ describe('on dashboard loaded', () => {
dashboard_id: 'dash',
org_id: 1,
traceql_query_count: 2,
search_query_count: 2,
service_map_query_count: 2,
upload_query_count: 1,
native_search_query_count: 3,
traceql_queries_with_template_variables_count: 1,
search_queries_with_template_variables_count: 1,
service_map_queries_with_template_variables_count: 1,
native_search_queries_with_template_variables_count: 1,
});

View File

@ -9,12 +9,10 @@ type TempoOnDashboardLoadedTrackingEvent = {
dashboard_id?: string;
org_id?: number;
native_search_query_count: number;
search_query_count: number;
service_map_query_count: number;
traceql_query_count: number;
upload_query_count: number;
native_search_queries_with_template_variables_count: number;
search_queries_with_template_variables_count: number;
service_map_queries_with_template_variables_count: number;
traceql_queries_with_template_variables_count: number;
};
@ -34,12 +32,10 @@ export const onDashboardLoadedHandler = ({
dashboard_id: dashboardId,
org_id: orgId,
native_search_query_count: 0,
search_query_count: 0,
service_map_query_count: 0,
traceql_query_count: 0,
upload_query_count: 0,
native_search_queries_with_template_variables_count: 0,
search_queries_with_template_variables_count: 0,
service_map_queries_with_template_variables_count: 0,
traceql_queries_with_template_variables_count: 0,
};
@ -60,11 +56,6 @@ export const onDashboardLoadedHandler = ({
) {
stats.native_search_queries_with_template_variables_count++;
}
} else if (query.queryType === 'search') {
stats.search_query_count++;
if (query.linkedQuery && query.linkedQuery.expr && hasTemplateVariables(query.linkedQuery.expr)) {
stats.search_queries_with_template_variables_count++;
}
} else if (query.queryType === 'serviceMap') {
stats.service_map_query_count++;
if (query.serviceMapQuery && hasTemplateVariables(query.serviceMapQuery)) {

View File

@ -1,7 +1,6 @@
import { DataSourceJsonData } from '@grafana/data/src';
import { NodeGraphOptions, TraceToLogsOptions } from '@grafana/o11y-ds-frontend';
import { LokiQuery } from './_importedDependencies/datasources/loki/types';
import { TempoQuery as TempoBase, TempoQueryType, TraceqlFilter } from './dataquery.gen';
export interface SearchQueryParams {
@ -23,9 +22,6 @@ export interface TempoJsonData extends DataSourceJsonData {
filters?: TraceqlFilter[];
};
nodeGraph?: NodeGraphOptions;
lokiSearch?: {
datasourceUid?: string;
};
spanBar?: {
tag: string;
};
@ -37,9 +33,6 @@ export interface TempoJsonData extends DataSourceJsonData {
}
export interface TempoQuery extends TempoBase {
// Query to find list of traces, e.g., via Loki
// Improvement: change this field to the schema type when LokiQuery exists in the schema
linkedQuery?: LokiQuery;
queryType: TempoQueryType;
}