Prometheus: Respect dashboard queries when querying ad hoc filter labels (#85674)

* use queries

* extract metric name from query

* unit tests
This commit is contained in:
ismail simsek 2024-04-15 19:05:19 +02:00 committed by GitHub
parent 82f910c0c5
commit cc87281d71
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 224 additions and 7 deletions

View File

@ -679,7 +679,7 @@ export class PrometheusDatasource
// and in Tempo here grafana/public/app/plugins/datasource/tempo/QueryEditor/ServiceGraphSection.tsx
async getTagKeys(options: DataSourceGetTagKeysOptions<PromQuery>): Promise<MetricFindValue[]> {
if (!options || options.filters.length === 0) {
await this.languageProvider.fetchLabels(options.timeRange);
await this.languageProvider.fetchLabels(options.timeRange, options.queries);
return this.languageProvider.getLabelKeys().map((k) => ({ value: k, text: k }));
}

View File

@ -4,7 +4,7 @@ import { Label } from './components/monaco-query-field/monaco-completion-provide
import { PrometheusDatasource } from './datasource';
import LanguageProvider from './language_provider';
import { getClientCacheDurationInMinutes, getPrometheusTime, getRangeSnapInterval } from './language_utils';
import { PrometheusCacheLevel } from './types';
import { PrometheusCacheLevel, PromQuery } from './types';
const now = new Date(1681300293392).getTime();
const timeRangeDurationSeconds = 1;
@ -306,6 +306,207 @@ describe('Language completion provider', () => {
});
});
describe('fetchLabels', () => {
const tr = getMockTimeRange();
const getParams = (requestSpy: ReturnType<typeof jest.spyOn>) => {
// Following is equal to `URLSearchParams().toString()`
return requestSpy.mock.calls[0][2]?.toString() ?? 'undefined';
};
describe('with POST', () => {
let languageProvider: LanguageProvider;
beforeEach(() => {
languageProvider = new LanguageProvider({
...defaultDatasource,
httpMethod: 'POST',
} as PrometheusDatasource);
});
it('should send query metrics to the POST request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'C',
expr: 'go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
const params = getParams(requestSpy);
expect(params).toMatch(encodeURI('match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from complex query to the POST request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'C',
expr: 'histogram_quantile(0.95, sum(rate(go_gc_pauses_seconds_bucket[$__rate_interval])) by (le))',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
const params = getParams(requestSpy);
expect(params).toMatch(encodeURI('match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from multiple queries to the POST request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'B',
expr: 'process_cpu_seconds_total',
},
{
refId: 'C',
expr: 'go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
const params = getParams(requestSpy);
expect(params).toMatch(encodeURI('match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from a query contains multiple metrics to the POST request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'B',
expr: 'process_cpu_seconds_total + go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
const params = getParams(requestSpy);
expect(params).toMatch(encodeURI('match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from a query contains multiple metrics and queries to the POST request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'A',
expr: 'histogram_quantile(0.95, sum(rate(process_max_fds[$__rate_interval])) by (le)) + go_gc_heap_frees_by_size_bytes_bucket',
},
{
refId: 'B',
expr: 'process_cpu_seconds_total + go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
const params = getParams(requestSpy);
expect(params).toMatch(
encodeURI(
'match[]=process_max_fds&match[]=go_gc_heap_frees_by_size_bytes_bucket&match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket'
)
);
});
});
describe('with GET', () => {
let languageProvider: LanguageProvider;
beforeEach(() => {
languageProvider = new LanguageProvider({
...defaultDatasource,
httpMethod: 'GET',
} as PrometheusDatasource);
});
it('should send query metrics to the GET request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'C',
expr: 'go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
expect(requestSpy.mock.calls[0][0]).toMatch(encodeURI('match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from complex query to the GET request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'C',
expr: 'histogram_quantile(0.95, sum(rate(go_gc_pauses_seconds_bucket[$__rate_interval])) by (le))',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
expect(requestSpy.mock.calls[0][0]).toMatch(encodeURI('match[]=go_gc_pauses_seconds_bucket'));
});
it('should send metrics from multiple queries to the GET request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'B',
expr: 'process_cpu_seconds_total',
},
{
refId: 'C',
expr: 'go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
expect(requestSpy.mock.calls[0][0]).toMatch(
encodeURI('match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket')
);
});
it('should send metrics from a query contains multiple metrics to the GET request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'B',
expr: 'process_cpu_seconds_total + go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
expect(requestSpy.mock.calls[0][0]).toMatch(
encodeURI('match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket')
);
});
it('should send metrics from a query contains multiple metrics and queries to the GET request', async () => {
const mockQueries: PromQuery[] = [
{
refId: 'A',
expr: 'histogram_quantile(0.95, sum(rate(process_max_fds[$__rate_interval])) by (le)) + go_gc_heap_frees_by_size_bytes_bucket',
},
{
refId: 'B',
expr: 'process_cpu_seconds_total + go_gc_pauses_seconds_bucket',
},
];
const fetchLabel = languageProvider.fetchLabels;
const requestSpy = jest.spyOn(languageProvider, 'request');
await fetchLabel(tr, mockQueries);
expect(requestSpy).toHaveBeenCalled();
expect(requestSpy.mock.calls[0][0]).toMatch(
encodeURI(
'match[]=process_max_fds&match[]=go_gc_heap_frees_by_size_bytes_bucket&match[]=process_cpu_seconds_total&match[]=go_gc_pauses_seconds_bucket'
)
);
});
});
});
describe('fetchLabelValues', () => {
it('should interpolate variable in series', () => {
const languageProvider = new LanguageProvider({

View File

@ -21,6 +21,7 @@ import {
toPromLikeQuery,
} from './language_utils';
import PromqlSyntax from './promql';
import { buildVisualQueryFromString } from './querybuilder/parsing';
import { PrometheusCacheLevel, PromMetricsMetadata, PromQuery } from './types';
const DEFAULT_KEYS = ['job', 'instance'];
@ -204,21 +205,36 @@ export default class PromQlLanguageProvider extends LanguageProvider {
/**
* Fetches all label keys
*/
async fetchLabels(timeRange?: TimeRange): Promise<string[]> {
fetchLabels = async (timeRange?: TimeRange, queries?: PromQuery[]): Promise<string[]> => {
if (timeRange) {
this.timeRange = timeRange;
}
const url = '/api/v1/labels';
const params = this.datasource.getAdjustedInterval(this.timeRange);
let url = '/api/v1/labels';
const timeParams = this.datasource.getAdjustedInterval(this.timeRange);
this.labelFetchTs = Date.now().valueOf();
const res = await this.request(url, [], params, this.getDefaultCacheHeaders());
const searchParams = new URLSearchParams({ ...timeParams });
queries?.forEach((q) => {
const visualQuery = buildVisualQueryFromString(q.expr);
searchParams.append('match[]', visualQuery.query.metric);
if (visualQuery.query.binaryQueries) {
visualQuery.query.binaryQueries.forEach((bq) => {
searchParams.append('match[]', bq.query.metric);
});
}
});
if (this.datasource.httpMethod === 'GET') {
url += `?${searchParams.toString()}`;
}
const res = await this.request(url, [], searchParams, this.getDefaultCacheHeaders());
if (Array.isArray(res)) {
this.labelKeys = res.slice().sort();
}
return [];
}
};
/**
* Gets series values