mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Explore: Generic query import/export (#40987)
* Add basic implementation * Split import/export query interface * Rename abstract query type * Rename abstract query type * Split loki/prom parsing * Update docs * Test importing abstract queries to Elastic * Test exporting abstract queries from Graphite * Test Prom and Loki query import/export * Give better control to import/export all queries to data sources * Fix unit test * Fix unit test * Filter out non-existing labels when importing queries to Loki * Fix relative imports, names and docs * Fix import type * Move toPromLike query to Prometheus code * Dedup label operator mappings * importAbstractQueries -> importFromAbstractQueries * Fix unit tests
This commit is contained in:
@@ -210,7 +210,7 @@ abstract class DataSourceApi<
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports queries from a different datasource
|
* @deprecated use DataSourceWithQueryImportSupport and DataSourceWithQueryExportSupport
|
||||||
*/
|
*/
|
||||||
async importQueries?(queries: DataQuery[], originDataSource: DataSourceApi<DataQuery>): Promise<TQuery[]>;
|
async importQueries?(queries: DataQuery[], originDataSource: DataSourceApi<DataQuery>): Promise<TQuery[]>;
|
||||||
|
|
||||||
|
|||||||
@@ -52,3 +52,62 @@ export interface DataQuery {
|
|||||||
*/
|
*/
|
||||||
datasource?: DataSourceRef | null;
|
datasource?: DataSourceRef | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract representation of any label-based query
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export interface AbstractQuery extends DataQuery {
|
||||||
|
labelMatchers: AbstractLabelMatcher[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export enum AbstractLabelOperator {
|
||||||
|
Equal = 'Equal',
|
||||||
|
NotEqual = 'NotEqual',
|
||||||
|
EqualRegEx = 'EqualRegEx',
|
||||||
|
NotEqualRegEx = 'NotEqualRegEx',
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export type AbstractLabelMatcher = {
|
||||||
|
name: string;
|
||||||
|
value: string;
|
||||||
|
operator: AbstractLabelOperator;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export interface DataSourceWithQueryImportSupport<TQuery extends DataQuery> {
|
||||||
|
importFromAbstractQueries(labelBasedQuery: AbstractQuery[]): Promise<TQuery[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export interface DataSourceWithQueryExportSupport<TQuery extends DataQuery> {
|
||||||
|
exportToAbstractQueries(query: TQuery[]): Promise<AbstractQuery[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export const hasQueryImportSupport = <TQuery extends DataQuery>(
|
||||||
|
datasource: any
|
||||||
|
): datasource is DataSourceWithQueryImportSupport<TQuery> => {
|
||||||
|
return (datasource as DataSourceWithQueryImportSupport<TQuery>).importFromAbstractQueries !== undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @internal
|
||||||
|
*/
|
||||||
|
export const hasQueryExportSupport = <TQuery extends DataQuery>(
|
||||||
|
datasource: any
|
||||||
|
): datasource is DataSourceWithQueryExportSupport<TQuery> => {
|
||||||
|
return (datasource as DataSourceWithQueryExportSupport<TQuery>).exportToAbstractQueries !== undefined;
|
||||||
|
};
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ import {
|
|||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataSourceApi,
|
DataSourceApi,
|
||||||
hasLogsVolumeSupport,
|
hasLogsVolumeSupport,
|
||||||
|
hasQueryExportSupport,
|
||||||
|
hasQueryImportSupport,
|
||||||
HistoryItem,
|
HistoryItem,
|
||||||
LoadingState,
|
LoadingState,
|
||||||
PanelData,
|
PanelData,
|
||||||
@@ -265,6 +267,9 @@ export const importQueries = (
|
|||||||
if (sourceDataSource.meta?.id === targetDataSource.meta?.id) {
|
if (sourceDataSource.meta?.id === targetDataSource.meta?.id) {
|
||||||
// Keep same queries if same type of datasource, but delete datasource query property to prevent mismatch of new and old data source instance
|
// Keep same queries if same type of datasource, but delete datasource query property to prevent mismatch of new and old data source instance
|
||||||
importedQueries = queries.map(({ datasource, ...query }) => query);
|
importedQueries = queries.map(({ datasource, ...query }) => query);
|
||||||
|
} else if (hasQueryExportSupport(sourceDataSource) && hasQueryImportSupport(targetDataSource)) {
|
||||||
|
const abstractQueries = await sourceDataSource.exportToAbstractQueries(queries);
|
||||||
|
importedQueries = await targetDataSource.importFromAbstractQueries(abstractQueries);
|
||||||
} else if (targetDataSource.importQueries) {
|
} else if (targetDataSource.importQueries) {
|
||||||
// Datasource-specific importers
|
// Datasource-specific importers
|
||||||
importedQueries = await targetDataSource.importQueries(queries, sourceDataSource);
|
importedQueries = await targetDataSource.importQueries(queries, sourceDataSource);
|
||||||
|
|||||||
@@ -6,17 +6,18 @@ import { BackendSrvRequest, getBackendSrv, getDataSourceSrv } from '@grafana/run
|
|||||||
import {
|
import {
|
||||||
DataFrame,
|
DataFrame,
|
||||||
DataLink,
|
DataLink,
|
||||||
DataQuery,
|
|
||||||
DataQueryRequest,
|
DataQueryRequest,
|
||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataSourceApi,
|
DataSourceApi,
|
||||||
DataSourceInstanceSettings,
|
DataSourceInstanceSettings,
|
||||||
DataSourceWithLogsContextSupport,
|
DataSourceWithLogsContextSupport,
|
||||||
|
DataSourceWithQueryImportSupport,
|
||||||
DataSourceWithLogsVolumeSupport,
|
DataSourceWithLogsVolumeSupport,
|
||||||
DateTime,
|
DateTime,
|
||||||
dateTime,
|
dateTime,
|
||||||
Field,
|
Field,
|
||||||
getDefaultTimeRange,
|
getDefaultTimeRange,
|
||||||
|
AbstractQuery,
|
||||||
getLogLevelFromKey,
|
getLogLevelFromKey,
|
||||||
LogLevel,
|
LogLevel,
|
||||||
LogRowModel,
|
LogRowModel,
|
||||||
@@ -63,7 +64,10 @@ const ELASTIC_META_FIELDS = [
|
|||||||
|
|
||||||
export class ElasticDatasource
|
export class ElasticDatasource
|
||||||
extends DataSourceApi<ElasticsearchQuery, ElasticsearchOptions>
|
extends DataSourceApi<ElasticsearchQuery, ElasticsearchOptions>
|
||||||
implements DataSourceWithLogsContextSupport, DataSourceWithLogsVolumeSupport<ElasticsearchQuery> {
|
implements
|
||||||
|
DataSourceWithLogsContextSupport,
|
||||||
|
DataSourceWithQueryImportSupport<ElasticsearchQuery>,
|
||||||
|
DataSourceWithLogsVolumeSupport<ElasticsearchQuery> {
|
||||||
basicAuth?: string;
|
basicAuth?: string;
|
||||||
withCredentials?: boolean;
|
withCredentials?: boolean;
|
||||||
url: string;
|
url: string;
|
||||||
@@ -163,8 +167,8 @@ export class ElasticDatasource
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async importQueries(queries: DataQuery[], originDataSource: DataSourceApi): Promise<ElasticsearchQuery[]> {
|
async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<ElasticsearchQuery[]> {
|
||||||
return this.languageProvider.importQueries(queries, originDataSource.meta.id);
|
return abstractQueries.map((abstractQuery) => this.languageProvider.importFromAbstractQuery(abstractQuery));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import LanguageProvider from './language_provider';
|
import LanguageProvider from './language_provider';
|
||||||
import { PromQuery } from '../prometheus/types';
|
|
||||||
import { ElasticDatasource } from './datasource';
|
import { ElasticDatasource } from './datasource';
|
||||||
import { DataSourceInstanceSettings } from '@grafana/data';
|
import { AbstractLabelOperator, AbstractQuery, DataSourceInstanceSettings } from '@grafana/data';
|
||||||
import { ElasticsearchOptions, ElasticsearchQuery } from './types';
|
import { ElasticsearchOptions, ElasticsearchQuery } from './types';
|
||||||
import { TemplateSrv } from '../../../features/templating/template_srv';
|
import { TemplateSrv } from '../../../features/templating/template_srv';
|
||||||
|
|
||||||
@@ -27,145 +26,36 @@ const baseLogsQuery: Partial<ElasticsearchQuery> = {
|
|||||||
metrics: [{ type: 'logs', id: '1' }],
|
metrics: [{ type: 'logs', id: '1' }],
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('transform prometheus query to elasticsearch query', () => {
|
describe('transform abstract query to elasticsearch query', () => {
|
||||||
it('With exact equals labels ( 2 labels ) and metric __name__', () => {
|
it('With some labels', () => {
|
||||||
const instance = new LanguageProvider(dataSource);
|
const instance = new LanguageProvider(dataSource);
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{label1="value1",label2="value2"}' };
|
const abstractQuery: AbstractQuery = {
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '__name__:"my_metric" AND label1:"value1" AND label2:"value2"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With exact equals labels ( 1 labels ) and metric __name__', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{label1="value1"}' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '__name__:"my_metric" AND label1:"value1"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With exact equals labels ( 1 labels )', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: '{label1="value1"}' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: 'label1:"value1"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With no label and metric __name__', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: 'my_metric{}' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '__name__:"my_metric"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With no label and metric __name__ without bracket', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: 'my_metric' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '__name__:"my_metric"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With rate function and exact equals labels ( 2 labels ) and metric __name__', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: 'rate(my_metric{label1="value1",label2="value2"}[5m])' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '__name__:"my_metric" AND label1:"value1" AND label2:"value2"',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('With rate function and exact equals labels not equals labels regex and not regex labels and metric __name__', () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = {
|
|
||||||
refId: 'bar',
|
refId: 'bar',
|
||||||
expr: 'rate(my_metric{label1="value1",label2!="value2",label3=~"value.+",label4!~".*tothemoon"}[5m])',
|
labelMatchers: [
|
||||||
|
{ name: 'label1', operator: AbstractLabelOperator.Equal, value: 'value1' },
|
||||||
|
{ name: 'label2', operator: AbstractLabelOperator.NotEqual, value: 'value2' },
|
||||||
|
{ name: 'label3', operator: AbstractLabelOperator.EqualRegEx, value: 'value3' },
|
||||||
|
{ name: 'label4', operator: AbstractLabelOperator.NotEqualRegEx, value: 'value4' },
|
||||||
|
],
|
||||||
};
|
};
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
const result = instance.importFromAbstractQuery(abstractQuery);
|
||||||
|
|
||||||
expect(result).toEqual([
|
expect(result).toEqual({
|
||||||
{
|
...baseLogsQuery,
|
||||||
...baseLogsQuery,
|
query: 'label1:"value1" AND NOT label2:"value2" AND label3:/value3/ AND NOT label4:/value4/',
|
||||||
query:
|
refId: abstractQuery.refId,
|
||||||
'__name__:"my_metric" AND label1:"value1" AND NOT label2:"value2" AND label3:/value.+/ AND NOT label4:/.*tothemoon/',
|
});
|
||||||
refId: promQuery.refId,
|
});
|
||||||
},
|
|
||||||
]);
|
it('Empty query', () => {
|
||||||
});
|
const instance = new LanguageProvider(dataSource);
|
||||||
});
|
const abstractQuery = { labelMatchers: [], refId: 'foo' };
|
||||||
|
const result = instance.importFromAbstractQuery(abstractQuery);
|
||||||
describe('transform malformed prometheus query to elasticsearch query', () => {
|
|
||||||
it('With only bracket', () => {
|
expect(result).toEqual({
|
||||||
const instance = new LanguageProvider(dataSource);
|
...baseLogsQuery,
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: '{' };
|
query: '',
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
refId: abstractQuery.refId,
|
||||||
|
});
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Empty query', async () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const promQuery: PromQuery = { refId: 'bar', expr: '' };
|
|
||||||
const result = instance.importQueries([promQuery], 'prometheus');
|
|
||||||
|
|
||||||
expect(result).toEqual([
|
|
||||||
{
|
|
||||||
...baseLogsQuery,
|
|
||||||
query: '',
|
|
||||||
refId: promQuery.refId,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Unsupportated datasources', () => {
|
|
||||||
it('Generates a default query', async () => {
|
|
||||||
const instance = new LanguageProvider(dataSource);
|
|
||||||
const someQuery = { refId: 'bar' };
|
|
||||||
const result = instance.importQueries([someQuery], 'THIS DATASOURCE TYPE DOESNT EXIST');
|
|
||||||
expect(result).toEqual([{ refId: someQuery.refId }]);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,96 +1,7 @@
|
|||||||
import { ElasticsearchQuery } from './types';
|
import { AbstractLabelOperator, AbstractLabelMatcher, LanguageProvider, AbstractQuery } from '@grafana/data';
|
||||||
import { DataQuery, LanguageProvider } from '@grafana/data';
|
|
||||||
|
|
||||||
import { ElasticDatasource } from './datasource';
|
import { ElasticDatasource } from './datasource';
|
||||||
|
import { ElasticsearchQuery } from './types';
|
||||||
import { PromQuery } from '../prometheus/types';
|
|
||||||
|
|
||||||
import Prism, { Token } from 'prismjs';
|
|
||||||
import grammar from '../prometheus/promql';
|
|
||||||
|
|
||||||
function getNameLabelValue(promQuery: string, tokens: any): string {
|
|
||||||
let nameLabelValue = '';
|
|
||||||
for (let prop in tokens) {
|
|
||||||
if (typeof tokens[prop] === 'string') {
|
|
||||||
nameLabelValue = tokens[prop] as string;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nameLabelValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractPrometheusLabels(promQuery: string): string[][] {
|
|
||||||
const labels: string[][] = [];
|
|
||||||
if (!promQuery || promQuery.length === 0) {
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
const tokens = Prism.tokenize(promQuery, grammar);
|
|
||||||
const nameLabelValue = getNameLabelValue(promQuery, tokens);
|
|
||||||
if (nameLabelValue && nameLabelValue.length > 0) {
|
|
||||||
labels.push(['__name__', '=', '"' + nameLabelValue + '"']);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let prop in tokens) {
|
|
||||||
if (tokens[prop] instanceof Token) {
|
|
||||||
let token: Token = tokens[prop] as Token;
|
|
||||||
if (token.type === 'context-labels') {
|
|
||||||
let labelKey = '';
|
|
||||||
let labelValue = '';
|
|
||||||
let labelOperator = '';
|
|
||||||
let contentTokens: any[] = token.content as any[];
|
|
||||||
for (let currentToken in contentTokens) {
|
|
||||||
if (typeof contentTokens[currentToken] === 'string') {
|
|
||||||
let currentStr: string;
|
|
||||||
currentStr = contentTokens[currentToken] as string;
|
|
||||||
if (currentStr === '=' || currentStr === '!=' || currentStr === '=~' || currentStr === '!~') {
|
|
||||||
labelOperator = currentStr;
|
|
||||||
}
|
|
||||||
} else if (contentTokens[currentToken] instanceof Token) {
|
|
||||||
switch (contentTokens[currentToken].type) {
|
|
||||||
case 'label-key':
|
|
||||||
labelKey = contentTokens[currentToken].content as string;
|
|
||||||
break;
|
|
||||||
case 'label-value':
|
|
||||||
labelValue = contentTokens[currentToken].content as string;
|
|
||||||
labels.push([labelKey, labelOperator, labelValue]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getElasticsearchQuery(prometheusLabels: string[][]): string {
|
|
||||||
let elasticsearchLuceneLabels = [];
|
|
||||||
for (let keyOperatorValue of prometheusLabels) {
|
|
||||||
switch (keyOperatorValue[1]) {
|
|
||||||
case '=': {
|
|
||||||
elasticsearchLuceneLabels.push(keyOperatorValue[0] + ':' + keyOperatorValue[2]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case '!=': {
|
|
||||||
elasticsearchLuceneLabels.push('NOT ' + keyOperatorValue[0] + ':' + keyOperatorValue[2]);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case '=~': {
|
|
||||||
elasticsearchLuceneLabels.push(
|
|
||||||
keyOperatorValue[0] + ':/' + keyOperatorValue[2].substring(1, keyOperatorValue[2].length - 1) + '/'
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case '!~': {
|
|
||||||
elasticsearchLuceneLabels.push(
|
|
||||||
'NOT ' + keyOperatorValue[0] + ':/' + keyOperatorValue[2].substring(1, keyOperatorValue[2].length - 1) + '/'
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return elasticsearchLuceneLabels.join(' AND ');
|
|
||||||
}
|
|
||||||
|
|
||||||
export default class ElasticsearchLanguageProvider extends LanguageProvider {
|
export default class ElasticsearchLanguageProvider extends LanguageProvider {
|
||||||
declare request: (url: string, params?: any) => Promise<any>;
|
declare request: (url: string, params?: any) => Promise<any>;
|
||||||
@@ -105,31 +16,39 @@ export default class ElasticsearchLanguageProvider extends LanguageProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The current implementation only supports switching from Prometheus/Loki queries.
|
* Queries are transformed to an ES Logs query since it's the behaviour most users expect.
|
||||||
* For them we transform the query to an ES Logs query since it's the behaviour most users expect.
|
**/
|
||||||
* For every other datasource we just copy the refId and let the query editor initialize a default query.
|
importFromAbstractQuery(abstractQuery: AbstractQuery): ElasticsearchQuery {
|
||||||
* */
|
return {
|
||||||
importQueries(queries: DataQuery[], datasourceType: string): ElasticsearchQuery[] {
|
metrics: [
|
||||||
if (datasourceType === 'prometheus' || datasourceType === 'loki') {
|
{
|
||||||
return queries.map((query) => {
|
id: '1',
|
||||||
let prometheusQuery = query as PromQuery;
|
type: 'logs',
|
||||||
const expr = getElasticsearchQuery(extractPrometheusLabels(prometheusQuery.expr));
|
},
|
||||||
return {
|
],
|
||||||
metrics: [
|
query: this.getElasticsearchQuery(abstractQuery.labelMatchers),
|
||||||
{
|
refId: abstractQuery.refId,
|
||||||
id: '1',
|
};
|
||||||
type: 'logs',
|
}
|
||||||
},
|
|
||||||
],
|
getElasticsearchQuery(labels: AbstractLabelMatcher[]): string {
|
||||||
query: expr,
|
return labels
|
||||||
refId: query.refId,
|
.map((label) => {
|
||||||
};
|
switch (label.operator) {
|
||||||
});
|
case AbstractLabelOperator.Equal: {
|
||||||
}
|
return label.name + ':"' + label.value + '"';
|
||||||
return queries.map((query) => {
|
}
|
||||||
return {
|
case AbstractLabelOperator.NotEqual: {
|
||||||
refId: query.refId,
|
return 'NOT ' + label.name + ':"' + label.value + '"';
|
||||||
};
|
}
|
||||||
});
|
case AbstractLabelOperator.EqualRegEx: {
|
||||||
|
return label.name + ':/' + label.value + '/';
|
||||||
|
}
|
||||||
|
case AbstractLabelOperator.NotEqualRegEx: {
|
||||||
|
return 'NOT ' + label.name + ':/' + label.value + '/';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.join(' AND ');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,11 +2,12 @@ import { GraphiteDatasource } from './datasource';
|
|||||||
import { isArray } from 'lodash';
|
import { isArray } from 'lodash';
|
||||||
|
|
||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||||
import { dateTime, getFrameDisplayName } from '@grafana/data';
|
import { AbstractLabelMatcher, AbstractLabelOperator, dateTime, getFrameDisplayName } from '@grafana/data';
|
||||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||||
import { of } from 'rxjs';
|
import { of } from 'rxjs';
|
||||||
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
import { createFetchResponse } from 'test/helpers/createFetchResponse';
|
||||||
import { DEFAULT_GRAPHITE_VERSION } from './versions';
|
import { DEFAULT_GRAPHITE_VERSION } from './versions';
|
||||||
|
import { fromString } from './configuration/parseLokiLabelMappings';
|
||||||
|
|
||||||
jest.mock('@grafana/runtime', () => ({
|
jest.mock('@grafana/runtime', () => ({
|
||||||
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
...((jest.requireActual('@grafana/runtime') as unknown) as object),
|
||||||
@@ -523,6 +524,80 @@ describe('graphiteDatasource', () => {
|
|||||||
expect(results).not.toBe(null);
|
expect(results).not.toBe(null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('exporting to abstract query', () => {
|
||||||
|
async function assertQueryExport(target: string, labelMatchers: AbstractLabelMatcher[]): Promise<void> {
|
||||||
|
let abstractQueries = await ctx.ds.exportToAbstractQueries([
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
target,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
expect(abstractQueries).toMatchObject([
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
labelMatchers: labelMatchers,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
ctx.ds.getImportQueryConfiguration = jest.fn().mockReturnValue({
|
||||||
|
loki: {
|
||||||
|
mappings: ['servers.(cluster).(server).*'].map(fromString),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
ctx.ds.createFuncInstance = jest.fn().mockImplementation((name: string) => ({
|
||||||
|
name,
|
||||||
|
params: [],
|
||||||
|
def: {
|
||||||
|
name,
|
||||||
|
params: [{ multiple: true }],
|
||||||
|
},
|
||||||
|
updateText: () => {},
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('extracts metric name based on configuration', async () => {
|
||||||
|
await assertQueryExport('interpolate(alias(servers.west.001.cpu,1,2))', [
|
||||||
|
{ name: 'cluster', operator: AbstractLabelOperator.Equal, value: 'west' },
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.Equal, value: '001' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
await assertQueryExport('interpolate(alias(servers.east.001.request.POST.200,1,2))', [
|
||||||
|
{ name: 'cluster', operator: AbstractLabelOperator.Equal, value: 'east' },
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.Equal, value: '001' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
await assertQueryExport('interpolate(alias(servers.*.002.*,1,2))', [
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.Equal, value: '002' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('extracts tags', async () => {
|
||||||
|
await assertQueryExport("interpolate(seriesByTag('cluster=west', 'server=002'), inf))", [
|
||||||
|
{ name: 'cluster', operator: AbstractLabelOperator.Equal, value: 'west' },
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.Equal, value: '002' },
|
||||||
|
]);
|
||||||
|
await assertQueryExport("interpolate(seriesByTag('foo=bar', 'server=002'), inf))", [
|
||||||
|
{ name: 'foo', operator: AbstractLabelOperator.Equal, value: 'bar' },
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.Equal, value: '002' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('extracts regular expressions', async () => {
|
||||||
|
await assertQueryExport('interpolate(alias(servers.eas*.{001,002}.request.POST.200,1,2))', [
|
||||||
|
{ name: 'cluster', operator: AbstractLabelOperator.EqualRegEx, value: '^eas.*' },
|
||||||
|
{ name: 'server', operator: AbstractLabelOperator.EqualRegEx, value: '^(001|002)' },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not extract metrics when the config does not match', async () => {
|
||||||
|
await assertQueryExport('interpolate(alias(test.west.001.cpu))', []);
|
||||||
|
await assertQueryExport('interpolate(alias(servers.west.001))', []);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
function accessScenario(name: string, url: string, fn: any) {
|
function accessScenario(name: string, url: string, fn: any) {
|
||||||
|
|||||||
@@ -7,7 +7,11 @@ import {
|
|||||||
DataQueryRequest,
|
DataQueryRequest,
|
||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataSourceApi,
|
DataSourceApi,
|
||||||
|
DataSourceWithQueryExportSupport,
|
||||||
dateMath,
|
dateMath,
|
||||||
|
AbstractQuery,
|
||||||
|
AbstractLabelOperator,
|
||||||
|
AbstractLabelMatcher,
|
||||||
MetricFindValue,
|
MetricFindValue,
|
||||||
QueryResultMetaStat,
|
QueryResultMetaStat,
|
||||||
ScopedVars,
|
ScopedVars,
|
||||||
@@ -21,6 +25,7 @@ import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_sr
|
|||||||
// Types
|
// Types
|
||||||
import {
|
import {
|
||||||
GraphiteLokiMapping,
|
GraphiteLokiMapping,
|
||||||
|
GraphiteMetricLokiMatcher,
|
||||||
GraphiteOptions,
|
GraphiteOptions,
|
||||||
GraphiteQuery,
|
GraphiteQuery,
|
||||||
GraphiteQueryImportConfiguration,
|
GraphiteQueryImportConfiguration,
|
||||||
@@ -31,12 +36,29 @@ import { getRollupNotice, getRuntimeConsolidationNotice } from 'app/plugins/data
|
|||||||
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
|
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
|
||||||
import { DEFAULT_GRAPHITE_VERSION } from './versions';
|
import { DEFAULT_GRAPHITE_VERSION } from './versions';
|
||||||
import { reduceError } from './utils';
|
import { reduceError } from './utils';
|
||||||
|
import { default as GraphiteQueryModel } from './graphite_query';
|
||||||
|
|
||||||
export class GraphiteDatasource extends DataSourceApi<
|
const GRAPHITE_TAG_COMPARATORS = {
|
||||||
GraphiteQuery,
|
'=': AbstractLabelOperator.Equal,
|
||||||
GraphiteOptions,
|
'!=': AbstractLabelOperator.NotEqual,
|
||||||
GraphiteQueryImportConfiguration
|
'=~': AbstractLabelOperator.EqualRegEx,
|
||||||
> {
|
'!=~': AbstractLabelOperator.NotEqualRegEx,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts Graphite glob-like pattern to a regular expression
|
||||||
|
*/
|
||||||
|
function convertGlobToRegEx(text: string): string {
|
||||||
|
if (text.includes('*') || text.includes('{')) {
|
||||||
|
return '^' + text.replace(/\*/g, '.*').replace(/\{/g, '(').replace(/}/g, ')').replace(/,/g, '|');
|
||||||
|
} else {
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GraphiteDatasource
|
||||||
|
extends DataSourceApi<GraphiteQuery, GraphiteOptions, GraphiteQueryImportConfiguration>
|
||||||
|
implements DataSourceWithQueryExportSupport<GraphiteQuery> {
|
||||||
basicAuth: string;
|
basicAuth: string;
|
||||||
url: string;
|
url: string;
|
||||||
name: string;
|
name: string;
|
||||||
@@ -91,6 +113,67 @@ export class GraphiteDatasource extends DataSourceApi<
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async exportToAbstractQueries(queries: GraphiteQuery[]): Promise<AbstractQuery[]> {
|
||||||
|
return queries.map((query) => this.exportToAbstractQuery(query));
|
||||||
|
}
|
||||||
|
|
||||||
|
exportToAbstractQuery(query: GraphiteQuery): AbstractQuery {
|
||||||
|
const graphiteQuery: GraphiteQueryModel = new GraphiteQueryModel(
|
||||||
|
this,
|
||||||
|
{
|
||||||
|
...query,
|
||||||
|
target: query.target || '',
|
||||||
|
textEditor: false,
|
||||||
|
},
|
||||||
|
getTemplateSrv()
|
||||||
|
);
|
||||||
|
graphiteQuery.parseTarget();
|
||||||
|
|
||||||
|
let labels: AbstractLabelMatcher[] = [];
|
||||||
|
const config = this.getImportQueryConfiguration().loki;
|
||||||
|
|
||||||
|
if (graphiteQuery.seriesByTagUsed) {
|
||||||
|
graphiteQuery.tags.forEach((tag) => {
|
||||||
|
labels.push({
|
||||||
|
name: tag.key,
|
||||||
|
operator: GRAPHITE_TAG_COMPARATORS[tag.operator],
|
||||||
|
value: tag.value,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const targetNodes = graphiteQuery.segments.map((segment) => segment.value);
|
||||||
|
let mappings = config.mappings.filter((mapping) => mapping.matchers.length <= targetNodes.length);
|
||||||
|
|
||||||
|
for (let mapping of mappings) {
|
||||||
|
const matchers = mapping.matchers.concat();
|
||||||
|
|
||||||
|
matchers.every((matcher: GraphiteMetricLokiMatcher, index: number) => {
|
||||||
|
if (matcher.labelName) {
|
||||||
|
let value = (targetNodes[index] as string)!;
|
||||||
|
|
||||||
|
if (value === '*') {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const converted = convertGlobToRegEx(value);
|
||||||
|
labels.push({
|
||||||
|
name: matcher.labelName,
|
||||||
|
operator: converted !== value ? AbstractLabelOperator.EqualRegEx : AbstractLabelOperator.Equal,
|
||||||
|
value: converted,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return targetNodes[index] === matcher.value || matcher.value === '*';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
refId: query.refId,
|
||||||
|
labelMatchers: labels,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
query(options: DataQueryRequest<GraphiteQuery>): Observable<DataQueryResponse> {
|
query(options: DataQueryRequest<GraphiteQuery>): Observable<DataQueryResponse> {
|
||||||
const graphOptions = {
|
const graphOptions = {
|
||||||
from: this.translateTime(options.range.raw.from, false, options.timezone),
|
from: this.translateTime(options.range.raw.from, false, options.timezone),
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
import { lastValueFrom, of, throwError } from 'rxjs';
|
import { lastValueFrom, of, throwError } from 'rxjs';
|
||||||
import { take } from 'rxjs/operators';
|
import { take } from 'rxjs/operators';
|
||||||
import {
|
import {
|
||||||
|
AbstractLabelOperator,
|
||||||
AnnotationQueryRequest,
|
AnnotationQueryRequest,
|
||||||
CoreApp,
|
CoreApp,
|
||||||
DataFrame,
|
DataFrame,
|
||||||
dateTime,
|
dateTime,
|
||||||
FieldCache,
|
FieldCache,
|
||||||
TimeSeries,
|
FieldType,
|
||||||
toUtc,
|
|
||||||
LogRowModel,
|
LogRowModel,
|
||||||
MutableDataFrame,
|
MutableDataFrame,
|
||||||
FieldType,
|
TimeSeries,
|
||||||
|
toUtc,
|
||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import { BackendSrvRequest, FetchResponse, config } from '@grafana/runtime';
|
import { BackendSrvRequest, FetchResponse, config } from '@grafana/runtime';
|
||||||
|
|
||||||
@@ -1015,6 +1016,38 @@ describe('LokiDatasource', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('importing queries', () => {
|
||||||
|
it('keeps all labels when no labels are loaded', async () => {
|
||||||
|
const ds = createLokiDSForTests();
|
||||||
|
fetchMock.mockImplementation(() => of(createFetchResponse({ data: [] })));
|
||||||
|
const queries = await ds.importFromAbstractQueries([
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
labelMatchers: [
|
||||||
|
{ name: 'foo', operator: AbstractLabelOperator.Equal, value: 'bar' },
|
||||||
|
{ name: 'foo2', operator: AbstractLabelOperator.Equal, value: 'bar2' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
expect(queries[0].expr).toBe('{foo="bar", foo2="bar2"}');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('filters out non existing labels', async () => {
|
||||||
|
const ds = createLokiDSForTests();
|
||||||
|
fetchMock.mockImplementation(() => of(createFetchResponse({ data: ['foo'] })));
|
||||||
|
const queries = await ds.importFromAbstractQueries([
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
labelMatchers: [
|
||||||
|
{ name: 'foo', operator: AbstractLabelOperator.Equal, value: 'bar' },
|
||||||
|
{ name: 'foo2', operator: AbstractLabelOperator.Equal, value: 'bar2' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
expect(queries[0].expr).toBe('{foo="bar"}');
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
function assertAdHocFilters(query: string, expectedResults: string, ds: LokiDatasource) {
|
function assertAdHocFilters(query: string, expectedResults: string, ds: LokiDatasource) {
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import {
|
|||||||
AnnotationQueryRequest,
|
AnnotationQueryRequest,
|
||||||
DataFrame,
|
DataFrame,
|
||||||
DataFrameView,
|
DataFrameView,
|
||||||
DataQuery,
|
|
||||||
DataQueryError,
|
DataQueryError,
|
||||||
DataQueryRequest,
|
DataQueryRequest,
|
||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
@@ -18,9 +17,12 @@ import {
|
|||||||
DataSourceInstanceSettings,
|
DataSourceInstanceSettings,
|
||||||
DataSourceWithLogsContextSupport,
|
DataSourceWithLogsContextSupport,
|
||||||
DataSourceWithLogsVolumeSupport,
|
DataSourceWithLogsVolumeSupport,
|
||||||
|
DataSourceWithQueryExportSupport,
|
||||||
|
DataSourceWithQueryImportSupport,
|
||||||
dateMath,
|
dateMath,
|
||||||
DateTime,
|
DateTime,
|
||||||
FieldCache,
|
FieldCache,
|
||||||
|
AbstractQuery,
|
||||||
FieldType,
|
FieldType,
|
||||||
getLogLevelFromKey,
|
getLogLevelFromKey,
|
||||||
Labels,
|
Labels,
|
||||||
@@ -83,7 +85,11 @@ const DEFAULT_QUERY_PARAMS: Partial<LokiRangeQueryRequest> = {
|
|||||||
|
|
||||||
export class LokiDatasource
|
export class LokiDatasource
|
||||||
extends DataSourceApi<LokiQuery, LokiOptions>
|
extends DataSourceApi<LokiQuery, LokiOptions>
|
||||||
implements DataSourceWithLogsContextSupport, DataSourceWithLogsVolumeSupport<LokiQuery> {
|
implements
|
||||||
|
DataSourceWithLogsContextSupport,
|
||||||
|
DataSourceWithLogsVolumeSupport<LokiQuery>,
|
||||||
|
DataSourceWithQueryImportSupport<LokiQuery>,
|
||||||
|
DataSourceWithQueryExportSupport<LokiQuery> {
|
||||||
private streams = new LiveStreams();
|
private streams = new LiveStreams();
|
||||||
languageProvider: LanguageProvider;
|
languageProvider: LanguageProvider;
|
||||||
maxLines: number;
|
maxLines: number;
|
||||||
@@ -366,8 +372,24 @@ export class LokiDatasource
|
|||||||
return { start: timeRange.from.valueOf() * NS_IN_MS, end: timeRange.to.valueOf() * NS_IN_MS };
|
return { start: timeRange.from.valueOf() * NS_IN_MS, end: timeRange.to.valueOf() * NS_IN_MS };
|
||||||
}
|
}
|
||||||
|
|
||||||
async importQueries(queries: DataQuery[], originDataSource: DataSourceApi): Promise<LokiQuery[]> {
|
async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<LokiQuery[]> {
|
||||||
return this.languageProvider.importQueries(queries, originDataSource);
|
await this.languageProvider.start();
|
||||||
|
const existingKeys = this.languageProvider.labelKeys;
|
||||||
|
|
||||||
|
if (existingKeys && existingKeys.length) {
|
||||||
|
abstractQueries = abstractQueries.map((abstractQuery) => {
|
||||||
|
abstractQuery.labelMatchers = abstractQuery.labelMatchers.filter((labelMatcher) => {
|
||||||
|
return existingKeys.includes(labelMatcher.name);
|
||||||
|
});
|
||||||
|
return abstractQuery;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return abstractQueries.map((abstractQuery) => this.languageProvider.importFromAbstractQuery(abstractQuery));
|
||||||
|
}
|
||||||
|
|
||||||
|
async exportToAbstractQueries(queries: LokiQuery[]): Promise<AbstractQuery[]> {
|
||||||
|
return queries.map((query) => this.languageProvider.exportToAbstractQuery(query));
|
||||||
}
|
}
|
||||||
|
|
||||||
async metadataRequest(url: string, params?: Record<string, string | number>) {
|
async metadataRequest(url: string, params?: Record<string, string | number>) {
|
||||||
|
|||||||
@@ -1,95 +0,0 @@
|
|||||||
import { default as GraphiteQueryModel } from '../../graphite/graphite_query';
|
|
||||||
import { map } from 'lodash';
|
|
||||||
import { LokiQuery } from '../types';
|
|
||||||
import { GraphiteDatasource } from '../../graphite/datasource';
|
|
||||||
import { getTemplateSrv } from '../../../../features/templating/template_srv';
|
|
||||||
import { GraphiteMetricLokiMatcher, GraphiteQuery, GraphiteToLokiQueryImportConfiguration } from '../../graphite/types';
|
|
||||||
|
|
||||||
const GRAPHITE_TO_LOKI_OPERATOR = {
|
|
||||||
'=': '=',
|
|
||||||
'!=': '!=',
|
|
||||||
'=~': '=~',
|
|
||||||
'!=~': '!~',
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts Graphite glob-like pattern to a regular expression
|
|
||||||
*/
|
|
||||||
function convertGlobToRegEx(text: string): string {
|
|
||||||
if (text.includes('*') || text.includes('{')) {
|
|
||||||
return '^' + text.replace(/\*/g, '.*').replace(/\{/g, '(').replace(/}/g, ')').replace(/,/g, '|');
|
|
||||||
} else {
|
|
||||||
return text;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function fromGraphiteQueries(
|
|
||||||
graphiteQueries: GraphiteQuery[],
|
|
||||||
graphiteDataSource: GraphiteDatasource
|
|
||||||
): LokiQuery[] {
|
|
||||||
return graphiteQueries.map((query) => {
|
|
||||||
const model: GraphiteQueryModel = new GraphiteQueryModel(
|
|
||||||
graphiteDataSource,
|
|
||||||
{
|
|
||||||
...query,
|
|
||||||
target: query.target || '',
|
|
||||||
textEditor: false,
|
|
||||||
},
|
|
||||||
getTemplateSrv()
|
|
||||||
);
|
|
||||||
model.parseTarget();
|
|
||||||
|
|
||||||
return {
|
|
||||||
refId: query.refId,
|
|
||||||
expr: fromGraphite(model, graphiteDataSource.getImportQueryConfiguration().loki),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function fromGraphite(graphiteQuery: GraphiteQueryModel, config: GraphiteToLokiQueryImportConfiguration): string {
|
|
||||||
let matchingFound = false;
|
|
||||||
let labels: any = {};
|
|
||||||
|
|
||||||
if (graphiteQuery.seriesByTagUsed) {
|
|
||||||
matchingFound = true;
|
|
||||||
graphiteQuery.tags.forEach((tag) => {
|
|
||||||
labels[tag.key] = {
|
|
||||||
value: tag.value,
|
|
||||||
operator: GRAPHITE_TO_LOKI_OPERATOR[tag.operator],
|
|
||||||
};
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const targetNodes = graphiteQuery.segments.map((segment) => segment.value);
|
|
||||||
let mappings = config.mappings.filter((mapping) => mapping.matchers.length <= targetNodes.length);
|
|
||||||
|
|
||||||
for (let mapping of mappings) {
|
|
||||||
const matchers = mapping.matchers.concat();
|
|
||||||
|
|
||||||
matchingFound = matchers.every((matcher: GraphiteMetricLokiMatcher, index: number) => {
|
|
||||||
if (matcher.labelName) {
|
|
||||||
let value = (targetNodes[index] as string)!;
|
|
||||||
|
|
||||||
if (value === '*') {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const converted = convertGlobToRegEx(value);
|
|
||||||
labels[matcher.labelName] = {
|
|
||||||
value: converted,
|
|
||||||
operator: converted !== value ? '=~' : '=',
|
|
||||||
};
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return targetNodes[index] === matcher.value || matcher.value === '*';
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let pairs = map(labels, (value, key) => `${key}${value.operator}"${value.value}"`);
|
|
||||||
if (matchingFound && pairs.length) {
|
|
||||||
return `{${pairs.join(', ')}}`;
|
|
||||||
} else {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
import { fromString } from '../../graphite/configuration/parseLokiLabelMappings';
|
|
||||||
import fromGraphiteQueries from './fromGraphite';
|
|
||||||
import { GraphiteQuery } from '../../graphite/types';
|
|
||||||
import { GraphiteDatasource } from '../../graphite/datasource';
|
|
||||||
|
|
||||||
describe('importing from Graphite queries', () => {
|
|
||||||
let graphiteDatasourceMock: GraphiteDatasource;
|
|
||||||
|
|
||||||
function mockSettings(stringMappings: string[]) {
|
|
||||||
graphiteDatasourceMock = ({
|
|
||||||
getImportQueryConfiguration: () => ({
|
|
||||||
loki: {
|
|
||||||
mappings: stringMappings.map(fromString),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
createFuncInstance: (name: string) => ({
|
|
||||||
name,
|
|
||||||
params: [],
|
|
||||||
def: {
|
|
||||||
name,
|
|
||||||
params: [{ multiple: true }],
|
|
||||||
},
|
|
||||||
updateText: () => {},
|
|
||||||
}),
|
|
||||||
} as any) as GraphiteDatasource;
|
|
||||||
}
|
|
||||||
|
|
||||||
function mockGraphiteQuery(raw: string): GraphiteQuery {
|
|
||||||
return {
|
|
||||||
refId: 'A',
|
|
||||||
target: raw,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeEach(() => {});
|
|
||||||
|
|
||||||
it('test matching mappings', () => {
|
|
||||||
mockSettings(['servers.(cluster).(server).*']);
|
|
||||||
const lokiQueries = fromGraphiteQueries(
|
|
||||||
[
|
|
||||||
// metrics: captured
|
|
||||||
mockGraphiteQuery('interpolate(alias(servers.west.001.cpu,1,2))'),
|
|
||||||
mockGraphiteQuery('interpolate(alias(servers.east.001.request.POST.200,1,2))'),
|
|
||||||
mockGraphiteQuery('interpolate(alias(servers.*.002.*,1,2))'),
|
|
||||||
// tags: captured
|
|
||||||
mockGraphiteQuery("interpolate(seriesByTag('cluster=west', 'server=002'), inf))"),
|
|
||||||
mockGraphiteQuery("interpolate(seriesByTag('foo=bar', 'server=002'), inf))"),
|
|
||||||
// regexp
|
|
||||||
mockGraphiteQuery('interpolate(alias(servers.eas*.{001,002}.request.POST.200,1,2))'),
|
|
||||||
// not captured
|
|
||||||
mockGraphiteQuery('interpolate(alias(test.west.001.cpu))'),
|
|
||||||
mockGraphiteQuery('interpolate(alias(servers.west.001))'),
|
|
||||||
],
|
|
||||||
graphiteDatasourceMock
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(lokiQueries).toMatchObject([
|
|
||||||
{ refId: 'A', expr: '{cluster="west", server="001"}' },
|
|
||||||
{ refId: 'A', expr: '{cluster="east", server="001"}' },
|
|
||||||
{ refId: 'A', expr: '{server="002"}' },
|
|
||||||
|
|
||||||
{ refId: 'A', expr: '{cluster="west", server="002"}' },
|
|
||||||
{ refId: 'A', expr: '{foo="bar", server="002"}' },
|
|
||||||
|
|
||||||
{ refId: 'A', expr: '{cluster=~"^eas.*", server=~"^(001|002)"}' },
|
|
||||||
|
|
||||||
{ refId: 'A', expr: '' },
|
|
||||||
{ refId: 'A', expr: '' },
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -5,7 +5,7 @@ import { TypeaheadInput } from '@grafana/ui';
|
|||||||
|
|
||||||
import { makeMockLokiDatasource } from './mocks';
|
import { makeMockLokiDatasource } from './mocks';
|
||||||
import LokiDatasource from './datasource';
|
import LokiDatasource from './datasource';
|
||||||
import { DataQuery, DataSourceApi } from '@grafana/data';
|
import { AbstractLabelOperator } from '@grafana/data';
|
||||||
|
|
||||||
jest.mock('app/store/store', () => ({
|
jest.mock('app/store/store', () => ({
|
||||||
store: {
|
store: {
|
||||||
@@ -245,52 +245,30 @@ describe('Request URL', () => {
|
|||||||
describe('Query imports', () => {
|
describe('Query imports', () => {
|
||||||
const datasource = makeMockLokiDatasource({});
|
const datasource = makeMockLokiDatasource({});
|
||||||
|
|
||||||
it('returns empty queries for unknown origin datasource', async () => {
|
it('returns empty queries', async () => {
|
||||||
const instance = new LanguageProvider(datasource);
|
const instance = new LanguageProvider(datasource);
|
||||||
const result = await instance.importQueries([{ refId: 'bar', expr: 'foo' } as DataQuery], {
|
const result = await instance.importFromAbstractQuery({ refId: 'bar', labelMatchers: [] });
|
||||||
meta: { id: 'unknown' },
|
expect(result).toEqual({ refId: 'bar', expr: '', range: true });
|
||||||
} as DataSourceApi);
|
|
||||||
expect(result).toEqual([{ refId: 'bar', expr: '' }]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('prometheus query imports', () => {
|
describe('exporting to abstract query', () => {
|
||||||
it('always results in range query type', async () => {
|
it('exports labels', async () => {
|
||||||
const instance = new LanguageProvider(datasource);
|
const instance = new LanguageProvider(datasource);
|
||||||
const result = await instance.importQueries(
|
const abstractQuery = instance.exportToAbstractQuery({
|
||||||
[{ refId: 'bar', expr: '{job="grafana"}', instant: true, range: false } as DataQuery],
|
refId: 'bar',
|
||||||
{
|
expr: '{label1="value1", label2!="value2", label3=~"value3", label4!~"value4"}',
|
||||||
meta: { id: 'prometheus' },
|
instant: true,
|
||||||
} as DataSourceApi
|
range: false,
|
||||||
);
|
});
|
||||||
expect(result).toEqual([{ refId: 'bar', expr: '{job="grafana"}', range: true }]);
|
expect(abstractQuery).toMatchObject({
|
||||||
expect(result).not.toHaveProperty('instant');
|
refId: 'bar',
|
||||||
});
|
labelMatchers: [
|
||||||
|
{ name: 'label1', operator: AbstractLabelOperator.Equal, value: 'value1' },
|
||||||
it('returns empty query from metric-only query', async () => {
|
{ name: 'label2', operator: AbstractLabelOperator.NotEqual, value: 'value2' },
|
||||||
const instance = new LanguageProvider(datasource);
|
{ name: 'label3', operator: AbstractLabelOperator.EqualRegEx, value: 'value3' },
|
||||||
const result = await instance.importPrometheusQuery('foo');
|
{ name: 'label4', operator: AbstractLabelOperator.NotEqualRegEx, value: 'value4' },
|
||||||
expect(result).toEqual('');
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns empty query from selector query if label is not available', async () => {
|
|
||||||
const datasourceWithLabels = makeMockLokiDatasource({ other: [] });
|
|
||||||
const instance = new LanguageProvider(datasourceWithLabels);
|
|
||||||
const result = await instance.importPrometheusQuery('{foo="bar"}');
|
|
||||||
expect(result).toEqual('{}');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns selector query from selector query with common labels', async () => {
|
|
||||||
const datasourceWithLabels = makeMockLokiDatasource({ foo: [] });
|
|
||||||
const instance = new LanguageProvider(datasourceWithLabels);
|
|
||||||
const result = await instance.importPrometheusQuery('metric{foo="bar",baz="42"}');
|
|
||||||
expect(result).toEqual('{foo="bar"}');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns selector query from selector query with all labels if logging label list is empty', async () => {
|
|
||||||
const datasourceWithLabels = makeMockLokiDatasource({});
|
|
||||||
const instance = new LanguageProvider(datasourceWithLabels);
|
|
||||||
const result = await instance.importPrometheusQuery('metric{foo="bar",baz="42"}');
|
|
||||||
expect(result).toEqual('{baz="42",foo="bar"}');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,24 +4,20 @@ import LRU from 'lru-cache';
|
|||||||
|
|
||||||
// Services & Utils
|
// Services & Utils
|
||||||
import {
|
import {
|
||||||
|
extractLabelMatchers,
|
||||||
parseSelector,
|
parseSelector,
|
||||||
labelRegexp,
|
|
||||||
selectorRegexp,
|
|
||||||
processLabels,
|
processLabels,
|
||||||
|
toPromLikeQuery,
|
||||||
} from 'app/plugins/datasource/prometheus/language_utils';
|
} from 'app/plugins/datasource/prometheus/language_utils';
|
||||||
import syntax, { FUNCTIONS, PIPE_PARSERS, PIPE_OPERATORS } from './syntax';
|
import syntax, { FUNCTIONS, PIPE_PARSERS, PIPE_OPERATORS } from './syntax';
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
import { LokiQuery } from './types';
|
import { LokiQuery } from './types';
|
||||||
import { dateTime, AbsoluteTimeRange, LanguageProvider, HistoryItem, DataQuery, DataSourceApi } from '@grafana/data';
|
import { dateTime, AbsoluteTimeRange, LanguageProvider, HistoryItem, AbstractQuery } from '@grafana/data';
|
||||||
import { PromQuery } from '../prometheus/types';
|
|
||||||
import { GraphiteQuery } from '../graphite/types';
|
|
||||||
|
|
||||||
import LokiDatasource from './datasource';
|
import LokiDatasource from './datasource';
|
||||||
import { CompletionItem, TypeaheadInput, TypeaheadOutput, CompletionItemGroup } from '@grafana/ui';
|
import { CompletionItem, TypeaheadInput, TypeaheadOutput, CompletionItemGroup } from '@grafana/ui';
|
||||||
import { Grammar } from 'prismjs';
|
import Prism, { Grammar } from 'prismjs';
|
||||||
import fromGraphite from './importing/fromGraphite';
|
|
||||||
import { GraphiteDatasource } from '../graphite/datasource';
|
|
||||||
|
|
||||||
const DEFAULT_KEYS = ['job', 'namespace'];
|
const DEFAULT_KEYS = ['job', 'namespace'];
|
||||||
const EMPTY_SELECTOR = '{}';
|
const EMPTY_SELECTOR = '{}';
|
||||||
@@ -335,75 +331,20 @@ export default class LokiLanguageProvider extends LanguageProvider {
|
|||||||
return { context, suggestions };
|
return { context, suggestions };
|
||||||
}
|
}
|
||||||
|
|
||||||
async importQueries(
|
importFromAbstractQuery(labelBasedQuery: AbstractQuery): LokiQuery {
|
||||||
queries: PromQuery[] | GraphiteQuery[] | DataQuery[],
|
return toPromLikeQuery(labelBasedQuery);
|
||||||
originDataSource: DataSourceApi
|
|
||||||
): Promise<LokiQuery[]> {
|
|
||||||
const datasourceType = originDataSource.meta.id;
|
|
||||||
if (datasourceType === 'prometheus') {
|
|
||||||
return Promise.all(
|
|
||||||
[...(queries as PromQuery[])].map(async (query) => {
|
|
||||||
const expr = await this.importPrometheusQuery(query.expr);
|
|
||||||
const { refId } = query;
|
|
||||||
return {
|
|
||||||
expr,
|
|
||||||
refId,
|
|
||||||
range: true,
|
|
||||||
};
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (datasourceType === 'graphite') {
|
|
||||||
return fromGraphite(queries, originDataSource as GraphiteDatasource);
|
|
||||||
}
|
|
||||||
// Return a cleaned LokiQuery
|
|
||||||
return queries.map((query) => ({
|
|
||||||
refId: query.refId,
|
|
||||||
expr: '',
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async importPrometheusQuery(query: string): Promise<string> {
|
exportToAbstractQuery(query: LokiQuery): AbstractQuery {
|
||||||
if (!query) {
|
const lokiQuery = query.expr;
|
||||||
return '';
|
if (!lokiQuery || lokiQuery.length === 0) {
|
||||||
|
return { refId: query.refId, labelMatchers: [] };
|
||||||
}
|
}
|
||||||
|
const tokens = Prism.tokenize(lokiQuery, syntax);
|
||||||
// Consider only first selector in query
|
return {
|
||||||
const selectorMatch = query.match(selectorRegexp);
|
refId: query.refId,
|
||||||
if (!selectorMatch) {
|
labelMatchers: extractLabelMatchers(tokens),
|
||||||
return '';
|
};
|
||||||
}
|
|
||||||
|
|
||||||
const selector = selectorMatch[0];
|
|
||||||
const labels: { [key: string]: { value: any; operator: any } } = {};
|
|
||||||
selector.replace(labelRegexp, (_, key, operator, value) => {
|
|
||||||
labels[key] = { value, operator };
|
|
||||||
return '';
|
|
||||||
});
|
|
||||||
|
|
||||||
// Keep only labels that exist on origin and target datasource
|
|
||||||
await this.start(); // fetches all existing label keys
|
|
||||||
const existingKeys = this.labelKeys;
|
|
||||||
let labelsToKeep: { [key: string]: { value: any; operator: any } } = {};
|
|
||||||
if (existingKeys && existingKeys.length) {
|
|
||||||
// Check for common labels
|
|
||||||
for (const key in labels) {
|
|
||||||
if (existingKeys && existingKeys.includes(key)) {
|
|
||||||
// Should we check for label value equality here?
|
|
||||||
labelsToKeep[key] = labels[key];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Keep all labels by default
|
|
||||||
labelsToKeep = labels;
|
|
||||||
}
|
|
||||||
|
|
||||||
const labelKeys = Object.keys(labelsToKeep).sort();
|
|
||||||
const cleanSelector = labelKeys
|
|
||||||
.map((key) => `${key}${labelsToKeep[key].operator}${labelsToKeep[key].value}`)
|
|
||||||
.join(',');
|
|
||||||
|
|
||||||
return ['{', cleanSelector, '}'].join('');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSeriesLabels(selector: string) {
|
async getSeriesLabels(selector: string) {
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ import {
|
|||||||
DataQueryRequest,
|
DataQueryRequest,
|
||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataSourceInstanceSettings,
|
DataSourceInstanceSettings,
|
||||||
|
DataSourceWithQueryExportSupport,
|
||||||
|
DataSourceWithQueryImportSupport,
|
||||||
dateMath,
|
dateMath,
|
||||||
DateTime,
|
DateTime,
|
||||||
|
AbstractQuery,
|
||||||
LoadingState,
|
LoadingState,
|
||||||
rangeUtil,
|
rangeUtil,
|
||||||
ScopedVars,
|
ScopedVars,
|
||||||
@@ -55,7 +58,9 @@ import PrometheusMetricFindQuery from './metric_find_query';
|
|||||||
export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
|
export const ANNOTATION_QUERY_STEP_DEFAULT = '60s';
|
||||||
const GET_AND_POST_METADATA_ENDPOINTS = ['api/v1/query', 'api/v1/query_range', 'api/v1/series', 'api/v1/labels'];
|
const GET_AND_POST_METADATA_ENDPOINTS = ['api/v1/query', 'api/v1/query_range', 'api/v1/series', 'api/v1/labels'];
|
||||||
|
|
||||||
export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromOptions> {
|
export class PrometheusDatasource
|
||||||
|
extends DataSourceWithBackend<PromQuery, PromOptions>
|
||||||
|
implements DataSourceWithQueryImportSupport<PromQuery>, DataSourceWithQueryExportSupport<PromQuery> {
|
||||||
type: string;
|
type: string;
|
||||||
editorSrc: string;
|
editorSrc: string;
|
||||||
ruleMappings: { [index: string]: string };
|
ruleMappings: { [index: string]: string };
|
||||||
@@ -170,6 +175,14 @@ export class PrometheusDatasource extends DataSourceWithBackend<PromQuery, PromO
|
|||||||
return getBackendSrv().fetch<T>(options);
|
return getBackendSrv().fetch<T>(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<PromQuery[]> {
|
||||||
|
return abstractQueries.map((abstractQuery) => this.languageProvider.importFromAbstractQuery(abstractQuery));
|
||||||
|
}
|
||||||
|
|
||||||
|
async exportToAbstractQueries(queries: PromQuery[]): Promise<AbstractQuery[]> {
|
||||||
|
return queries.map((query) => this.languageProvider.exportToAbstractQuery(query));
|
||||||
|
}
|
||||||
|
|
||||||
// Use this for tab completion features, wont publish response to other components
|
// Use this for tab completion features, wont publish response to other components
|
||||||
async metadataRequest<T = any>(url: string, params = {}) {
|
async metadataRequest<T = any>(url: string, params = {}) {
|
||||||
// If URL includes endpoint that supports POST and GET method, try to use configured method. This might fail as POST is supported only in v2.10+.
|
// If URL includes endpoint that supports POST and GET method, try to use configured method. This might fail as POST is supported only in v2.10+.
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import Plain from 'slate-plain-serializer';
|
|||||||
import { Editor as SlateEditor } from 'slate';
|
import { Editor as SlateEditor } from 'slate';
|
||||||
import LanguageProvider from './language_provider';
|
import LanguageProvider from './language_provider';
|
||||||
import { PrometheusDatasource } from './datasource';
|
import { PrometheusDatasource } from './datasource';
|
||||||
import { HistoryItem } from '@grafana/data';
|
import { AbstractLabelOperator, HistoryItem } from '@grafana/data';
|
||||||
import { PromQuery } from './types';
|
import { PromQuery } from './types';
|
||||||
import Mock = jest.Mock;
|
import Mock = jest.Mock;
|
||||||
import { SearchFunctionType } from '@grafana/ui';
|
import { SearchFunctionType } from '@grafana/ui';
|
||||||
@@ -594,6 +594,36 @@ describe('Language completion provider', () => {
|
|||||||
expect((datasource.metadataRequest as Mock).mock.calls.length).toBeGreaterThan(0);
|
expect((datasource.metadataRequest as Mock).mock.calls.length).toBeGreaterThan(0);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Query imports', () => {
|
||||||
|
it('returns empty queries', async () => {
|
||||||
|
const instance = new LanguageProvider(datasource);
|
||||||
|
const result = await instance.importFromAbstractQuery({ refId: 'bar', labelMatchers: [] });
|
||||||
|
expect(result).toEqual({ refId: 'bar', expr: '', range: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('exporting to abstract query', () => {
|
||||||
|
it('exports labels with metric name', async () => {
|
||||||
|
const instance = new LanguageProvider(datasource);
|
||||||
|
const abstractQuery = instance.exportToAbstractQuery({
|
||||||
|
refId: 'bar',
|
||||||
|
expr: 'metric_name{label1="value1", label2!="value2", label3=~"value3", label4!~"value4"}',
|
||||||
|
instant: true,
|
||||||
|
range: false,
|
||||||
|
});
|
||||||
|
expect(abstractQuery).toMatchObject({
|
||||||
|
refId: 'bar',
|
||||||
|
labelMatchers: [
|
||||||
|
{ name: 'label1', operator: AbstractLabelOperator.Equal, value: 'value1' },
|
||||||
|
{ name: 'label2', operator: AbstractLabelOperator.NotEqual, value: 'value2' },
|
||||||
|
{ name: 'label3', operator: AbstractLabelOperator.EqualRegEx, value: 'value3' },
|
||||||
|
{ name: 'label4', operator: AbstractLabelOperator.NotEqualRegEx, value: 'value4' },
|
||||||
|
{ name: '__name__', operator: AbstractLabelOperator.Equal, value: 'metric_name' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const simpleMetricLabelsResponse = {
|
const simpleMetricLabelsResponse = {
|
||||||
|
|||||||
@@ -1,17 +1,27 @@
|
|||||||
import { once, chain, difference } from 'lodash';
|
import { once, chain, difference } from 'lodash';
|
||||||
import LRU from 'lru-cache';
|
import LRU from 'lru-cache';
|
||||||
import { Value } from 'slate';
|
import { Value } from 'slate';
|
||||||
|
import Prism from 'prismjs';
|
||||||
|
|
||||||
import { dateTime, HistoryItem, LanguageProvider } from '@grafana/data';
|
import {
|
||||||
|
AbstractLabelMatcher,
|
||||||
|
AbstractLabelOperator,
|
||||||
|
AbstractQuery,
|
||||||
|
dateTime,
|
||||||
|
HistoryItem,
|
||||||
|
LanguageProvider,
|
||||||
|
} from '@grafana/data';
|
||||||
import { CompletionItem, CompletionItemGroup, SearchFunctionType, TypeaheadInput, TypeaheadOutput } from '@grafana/ui';
|
import { CompletionItem, CompletionItemGroup, SearchFunctionType, TypeaheadInput, TypeaheadOutput } from '@grafana/ui';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
addLimitInfo,
|
addLimitInfo,
|
||||||
|
extractLabelMatchers,
|
||||||
fixSummariesMetadata,
|
fixSummariesMetadata,
|
||||||
parseSelector,
|
parseSelector,
|
||||||
processHistogramMetrics,
|
processHistogramMetrics,
|
||||||
processLabels,
|
processLabels,
|
||||||
roundSecToMin,
|
roundSecToMin,
|
||||||
|
toPromLikeQuery,
|
||||||
} from './language_utils';
|
} from './language_utils';
|
||||||
import PromqlSyntax, { FUNCTIONS, RATE_RANGES } from './promql';
|
import PromqlSyntax, { FUNCTIONS, RATE_RANGES } from './promql';
|
||||||
|
|
||||||
@@ -404,6 +414,32 @@ export default class PromQlLanguageProvider extends LanguageProvider {
|
|||||||
return { context, suggestions };
|
return { context, suggestions };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
importFromAbstractQuery(labelBasedQuery: AbstractQuery): PromQuery {
|
||||||
|
return toPromLikeQuery(labelBasedQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
exportToAbstractQuery(query: PromQuery): AbstractQuery {
|
||||||
|
const promQuery = query.expr;
|
||||||
|
if (!promQuery || promQuery.length === 0) {
|
||||||
|
return { refId: query.refId, labelMatchers: [] };
|
||||||
|
}
|
||||||
|
const tokens = Prism.tokenize(promQuery, PromqlSyntax);
|
||||||
|
const labelMatchers: AbstractLabelMatcher[] = extractLabelMatchers(tokens);
|
||||||
|
const nameLabelValue = getNameLabelValue(promQuery, tokens);
|
||||||
|
if (nameLabelValue && nameLabelValue.length > 0) {
|
||||||
|
labelMatchers.push({
|
||||||
|
name: '__name__',
|
||||||
|
operator: AbstractLabelOperator.Equal,
|
||||||
|
value: nameLabelValue,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
refId: query.refId,
|
||||||
|
labelMatchers,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
async getSeries(selector: string, withName?: boolean): Promise<Record<string, string[]>> {
|
async getSeries(selector: string, withName?: boolean): Promise<Record<string, string[]>> {
|
||||||
if (this.datasource.lookupsDisabled) {
|
if (this.datasource.lookupsDisabled) {
|
||||||
return {};
|
return {};
|
||||||
@@ -503,3 +539,14 @@ export default class PromQlLanguageProvider extends LanguageProvider {
|
|||||||
return DEFAULT_KEYS.reduce((acc, key, i) => ({ ...acc, [key]: values[i] }), {});
|
return DEFAULT_KEYS.reduce((acc, key, i) => ({ ...acc, [key]: values[i] }), {});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getNameLabelValue(promQuery: string, tokens: any): string {
|
||||||
|
let nameLabelValue = '';
|
||||||
|
for (let prop in tokens) {
|
||||||
|
if (typeof tokens[prop] === 'string') {
|
||||||
|
nameLabelValue = tokens[prop] as string;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nameLabelValue;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
|
import { AbstractLabelOperator, AbstractQuery } from '@grafana/data';
|
||||||
import {
|
import {
|
||||||
escapeLabelValueInExactSelector,
|
escapeLabelValueInExactSelector,
|
||||||
escapeLabelValueInRegexSelector,
|
escapeLabelValueInRegexSelector,
|
||||||
expandRecordingRules,
|
expandRecordingRules,
|
||||||
fixSummariesMetadata,
|
fixSummariesMetadata,
|
||||||
parseSelector,
|
parseSelector,
|
||||||
|
toPromLikeQuery,
|
||||||
} from './language_utils';
|
} from './language_utils';
|
||||||
|
|
||||||
describe('parseSelector()', () => {
|
describe('parseSelector()', () => {
|
||||||
@@ -219,3 +221,23 @@ describe('escapeLabelValueInRegexSelector()', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('toPromLikeQuery', () => {
|
||||||
|
it('export abstract query to PromQL-like query', () => {
|
||||||
|
const abstractQuery: AbstractQuery = {
|
||||||
|
refId: 'bar',
|
||||||
|
labelMatchers: [
|
||||||
|
{ name: 'label1', operator: AbstractLabelOperator.Equal, value: 'value1' },
|
||||||
|
{ name: 'label2', operator: AbstractLabelOperator.NotEqual, value: 'value2' },
|
||||||
|
{ name: 'label3', operator: AbstractLabelOperator.EqualRegEx, value: 'value3' },
|
||||||
|
{ name: 'label4', operator: AbstractLabelOperator.NotEqualRegEx, value: 'value4' },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(toPromLikeQuery(abstractQuery)).toMatchObject({
|
||||||
|
refId: 'bar',
|
||||||
|
expr: '{label1="value1", label2!="value2", label3=~"value3", label4!~"value4"}',
|
||||||
|
range: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
import { PromMetricsMetadata, PromMetricsMetadataItem } from './types';
|
import { PromMetricsMetadata, PromMetricsMetadataItem } from './types';
|
||||||
import { addLabelToQuery } from './add_label_to_query';
|
import { addLabelToQuery } from './add_label_to_query';
|
||||||
import { SUGGESTIONS_LIMIT } from './language_provider';
|
import { SUGGESTIONS_LIMIT } from './language_provider';
|
||||||
|
import { DataQuery, AbstractQuery, AbstractLabelOperator, AbstractLabelMatcher } from '@grafana/data';
|
||||||
|
import { Token } from 'prismjs';
|
||||||
|
import { invert } from 'lodash';
|
||||||
|
|
||||||
export const processHistogramMetrics = (metrics: string[]) => {
|
export const processHistogramMetrics = (metrics: string[]) => {
|
||||||
const resultSet: Set<string> = new Set();
|
const resultSet: Set<string> = new Set();
|
||||||
@@ -259,3 +262,80 @@ export function escapeLabelValueInExactSelector(labelValue: string): string {
|
|||||||
export function escapeLabelValueInRegexSelector(labelValue: string): string {
|
export function escapeLabelValueInRegexSelector(labelValue: string): string {
|
||||||
return escapeLabelValueInExactSelector(escapePrometheusRegexp(labelValue));
|
return escapeLabelValueInExactSelector(escapePrometheusRegexp(labelValue));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const FromPromLikeMap: Record<string, AbstractLabelOperator> = {
|
||||||
|
'=': AbstractLabelOperator.Equal,
|
||||||
|
'!=': AbstractLabelOperator.NotEqual,
|
||||||
|
'=~': AbstractLabelOperator.EqualRegEx,
|
||||||
|
'!~': AbstractLabelOperator.NotEqualRegEx,
|
||||||
|
};
|
||||||
|
const ToPromLikeMap: Record<AbstractLabelOperator, string> = invert(FromPromLikeMap) as Record<
|
||||||
|
AbstractLabelOperator,
|
||||||
|
string
|
||||||
|
>;
|
||||||
|
|
||||||
|
export function toPromLikeQuery(labelBasedQuery: AbstractQuery): PromLikeQuery {
|
||||||
|
const expr = labelBasedQuery.labelMatchers
|
||||||
|
.map((selector: AbstractLabelMatcher) => {
|
||||||
|
const operator = ToPromLikeMap[selector.operator];
|
||||||
|
if (operator) {
|
||||||
|
return `${selector.name}${operator}"${selector.value}"`;
|
||||||
|
} else {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((e: string) => e !== '')
|
||||||
|
.join(', ');
|
||||||
|
|
||||||
|
return {
|
||||||
|
refId: labelBasedQuery.refId,
|
||||||
|
expr: expr ? `{${expr}}` : '',
|
||||||
|
range: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PromLikeQuery extends DataQuery {
|
||||||
|
expr: string;
|
||||||
|
range: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractLabelMatchers(tokens: Array<string | Token>): AbstractLabelMatcher[] {
|
||||||
|
const labelMatchers: AbstractLabelMatcher[] = [];
|
||||||
|
|
||||||
|
for (let prop in tokens) {
|
||||||
|
if (tokens[prop] instanceof Token) {
|
||||||
|
let token: Token = tokens[prop] as Token;
|
||||||
|
if (token.type === 'context-labels') {
|
||||||
|
let labelKey = '';
|
||||||
|
let labelValue = '';
|
||||||
|
let labelOperator = '';
|
||||||
|
let contentTokens: any[] = token.content as any[];
|
||||||
|
for (let currentToken in contentTokens) {
|
||||||
|
if (typeof contentTokens[currentToken] === 'string') {
|
||||||
|
let currentStr: string;
|
||||||
|
currentStr = contentTokens[currentToken] as string;
|
||||||
|
if (currentStr === '=' || currentStr === '!=' || currentStr === '=~' || currentStr === '!~') {
|
||||||
|
labelOperator = currentStr;
|
||||||
|
}
|
||||||
|
} else if (contentTokens[currentToken] instanceof Token) {
|
||||||
|
switch (contentTokens[currentToken].type) {
|
||||||
|
case 'label-key':
|
||||||
|
labelKey = contentTokens[currentToken].content as string;
|
||||||
|
break;
|
||||||
|
case 'label-value':
|
||||||
|
labelValue = contentTokens[currentToken].content as string;
|
||||||
|
labelValue = labelValue.substring(1, labelValue.length - 1);
|
||||||
|
const labelComparator = FromPromLikeMap[labelOperator];
|
||||||
|
if (labelComparator) {
|
||||||
|
labelMatchers.push({ name: labelKey, operator: labelComparator, value: labelValue });
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return labelMatchers;
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user