mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Chore: Fix legend changing when using incremental querying (#93529)
* rename variables * fix setting legend * yarn prettier:write * only update displayNameFromDS
This commit is contained in:
parent
d75fee5207
commit
f8fd45892d
@ -7,7 +7,11 @@ import { QueryEditorMode } from '../querybuilder/shared/types';
|
||||
import { PromQuery } from '../types';
|
||||
|
||||
import { CacheRequestInfo, findDatapointStep, QueryCache } from './QueryCache';
|
||||
import { IncrementalStorageDataFrameScenarios, trimmedFirstPointInPromFrames } from './QueryCacheTestData';
|
||||
import {
|
||||
differentDisplayNameFromDS,
|
||||
trimmedFirstPointInPromFrames,
|
||||
IncrementalStorageDataFrameScenarios,
|
||||
} from './QueryCacheTestData';
|
||||
|
||||
// Will not interpolate vars!
|
||||
const interpolateStringTest = (query: PromQuery) => {
|
||||
@ -124,7 +128,7 @@ describe('QueryCache: Generic', function () {
|
||||
}),
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
},
|
||||
firstFrames
|
||||
@ -148,7 +152,7 @@ describe('QueryCache: Generic', function () {
|
||||
secondRequest,
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
},
|
||||
secondFrames
|
||||
@ -241,7 +245,7 @@ describe('QueryCache: Prometheus', function () {
|
||||
request,
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: targetSignatures,
|
||||
targetSignatures: targetSignatures,
|
||||
shouldCache: true,
|
||||
},
|
||||
firstFrames
|
||||
@ -265,7 +269,7 @@ describe('QueryCache: Prometheus', function () {
|
||||
secondRequest,
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: targetSignatures,
|
||||
targetSignatures: targetSignatures,
|
||||
shouldCache: true,
|
||||
},
|
||||
secondFrames
|
||||
@ -387,9 +391,9 @@ describe('QueryCache: Prometheus', function () {
|
||||
panelId: panelId,
|
||||
});
|
||||
|
||||
const requestInfo = {
|
||||
const requestInfo: CacheRequestInfo<PromQuery> = {
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
};
|
||||
const targetSignature = `1=1|${interval}|${JSON.stringify(request.rangeRaw ?? '')}`;
|
||||
@ -407,7 +411,7 @@ describe('QueryCache: Prometheus', function () {
|
||||
}),
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
},
|
||||
secondFrames
|
||||
@ -430,7 +434,7 @@ describe('QueryCache: Prometheus', function () {
|
||||
}),
|
||||
{
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
},
|
||||
thirdFrames
|
||||
@ -537,7 +541,7 @@ describe('QueryCache: Prometheus', function () {
|
||||
request.targets[0].interval = '1m';
|
||||
const requestInfo: CacheRequestInfo<PromQuery> = {
|
||||
requests: [], // unused
|
||||
targSigs: cache,
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
};
|
||||
const targetSignature = `1=1|${interval}|${JSON.stringify(request.rangeRaw ?? '')}`;
|
||||
@ -558,6 +562,87 @@ describe('QueryCache: Prometheus', function () {
|
||||
expect(cacheRequest.requests[0]).toBe(request);
|
||||
expect(cacheRequest.shouldCache).toBe(true);
|
||||
});
|
||||
|
||||
it('should always use the newest config information', () => {
|
||||
const storage = new QueryCache<PromQuery>({
|
||||
getTargetSignature: getPrometheusTargetSignature,
|
||||
overlapString: '10m',
|
||||
});
|
||||
|
||||
// Initial request with custom legend info {{org}}-customLegend
|
||||
const firstFrames = differentDisplayNameFromDS.first.dataFrames as unknown as DataFrame[];
|
||||
|
||||
// Second request with legend __auto which results having no displayNameFromDS
|
||||
const secondFrames = differentDisplayNameFromDS.second.dataFrames as unknown as DataFrame[];
|
||||
|
||||
const cache = new Map<string, string>();
|
||||
const interval = 15000;
|
||||
|
||||
// start time of scenario
|
||||
const firstFrom = dateTime(new Date(1726829205000));
|
||||
const firstTo = dateTime(new Date(1726829832515));
|
||||
const firstRange: TimeRange = {
|
||||
from: firstFrom,
|
||||
to: firstTo,
|
||||
raw: {
|
||||
from: 'now-1h',
|
||||
to: 'now',
|
||||
},
|
||||
};
|
||||
|
||||
const secondFrom = dateTime(new Date(1726829220000));
|
||||
const secondTo = dateTime(new Date(1726829903931));
|
||||
const secondRange: TimeRange = {
|
||||
from: secondFrom,
|
||||
to: secondTo,
|
||||
raw: {
|
||||
from: 'now-1h',
|
||||
to: 'now',
|
||||
},
|
||||
};
|
||||
|
||||
// Signifier definition
|
||||
const dashboardId = `dashid`;
|
||||
const panelId = 200;
|
||||
const targetIdentity = `${dashboardId}|${panelId}|A`;
|
||||
|
||||
const request = mockPromRequest({
|
||||
range: firstRange,
|
||||
dashboardUID: dashboardId,
|
||||
panelId: panelId,
|
||||
app: 'first_app',
|
||||
});
|
||||
|
||||
const requestInfo: CacheRequestInfo<PromQuery> = {
|
||||
requests: [], // unused
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
};
|
||||
const targetSignature = `1=1|${interval}|${JSON.stringify(request.rangeRaw ?? '')}`;
|
||||
cache.set(targetIdentity, targetSignature);
|
||||
|
||||
const firstQueryResult = storage.procFrames(request, requestInfo, firstFrames);
|
||||
|
||||
expect(firstQueryResult[0].fields[1].config.displayNameFromDS).toBeDefined();
|
||||
expect(firstQueryResult[0].fields[1].config.displayNameFromDS).toEqual('rutgerkerkhoffdevuseast-customLegend');
|
||||
|
||||
const secondQueryResult = storage.procFrames(
|
||||
mockPromRequest({
|
||||
range: secondRange,
|
||||
dashboardUID: dashboardId,
|
||||
panelId: panelId,
|
||||
app: 'second_app',
|
||||
}),
|
||||
{
|
||||
requests: [], // unused
|
||||
targetSignatures: cache,
|
||||
shouldCache: true,
|
||||
},
|
||||
secondFrames
|
||||
);
|
||||
|
||||
expect(secondQueryResult[0].fields[1].config.displayNameFromDS).not.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('findDataPointStep', () => {
|
||||
|
@ -23,7 +23,7 @@ type TargetIdent = string;
|
||||
|
||||
// query + template variables + interval + raw time range
|
||||
// used for full target cache busting -> full range re-query
|
||||
type TargetSig = string;
|
||||
type TargetSignature = string;
|
||||
type TimestampMs = number;
|
||||
type SupportedQueryTypes = PromQuery;
|
||||
type ApplyInterpolation = (str: string, scopedVars?: ScopedVars) => string;
|
||||
@ -32,14 +32,14 @@ type ApplyInterpolation = (str: string, scopedVars?: ScopedVars) => string;
|
||||
export const defaultPrometheusQueryOverlapWindow = '10m';
|
||||
|
||||
interface TargetCache {
|
||||
sig: TargetSig;
|
||||
signature: TargetSignature;
|
||||
prevTo: TimestampMs;
|
||||
frames: DataFrame[];
|
||||
}
|
||||
|
||||
export interface CacheRequestInfo<T extends SupportedQueryTypes> {
|
||||
requests: Array<DataQueryRequest<T>>;
|
||||
targSigs: Map<TargetIdent, TargetSig>;
|
||||
targetSignatures: Map<TargetIdent, TargetSignature>;
|
||||
shouldCache: boolean;
|
||||
}
|
||||
|
||||
@ -48,13 +48,13 @@ export interface CacheRequestInfo<T extends SupportedQueryTypes> {
|
||||
* This is the string used to uniquely identify a field within a "target"
|
||||
* @param field
|
||||
*/
|
||||
export const getFieldIdent = (field: Field) => `${field.type}|${field.name}|${JSON.stringify(field.labels ?? '')}`;
|
||||
export const getFieldIdentity = (field: Field) => `${field.type}|${field.name}|${JSON.stringify(field.labels ?? '')}`;
|
||||
|
||||
/**
|
||||
* NOMENCLATURE
|
||||
* Target: The request target (DataQueryRequest), i.e. a specific query reference within a panel
|
||||
* Ident: Identity: the string that is not expected to change
|
||||
* Sig: Signature: the string that is expected to change, upon which we wipe the cache fields
|
||||
* Identity: the string that is not expected to change
|
||||
* Signature: the string that is expected to change, upon which we wipe the cache fields
|
||||
*/
|
||||
export class QueryCache<T extends SupportedQueryTypes> {
|
||||
private overlapWindowMs: number;
|
||||
@ -97,20 +97,20 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
let doPartialQuery = shouldCache;
|
||||
let prevTo: TimestampMs | undefined = undefined;
|
||||
|
||||
// pre-compute reqTargSigs
|
||||
const reqTargSigs = new Map<TargetIdent, TargetSig>();
|
||||
request.targets.forEach((targ) => {
|
||||
let targIdent = `${request.dashboardUID}|${request.panelId}|${targ.refId}`;
|
||||
let targSig = this.getTargetSignature(request, targ); // ${request.maxDataPoints} ?
|
||||
reqTargSigs.set(targIdent, targSig);
|
||||
// pre-compute reqTargetSignatures
|
||||
const reqTargetSignatures = new Map<TargetIdent, TargetSignature>();
|
||||
request.targets.forEach((target) => {
|
||||
let targetIdentity = `${request.dashboardUID}|${request.panelId}|${target.refId}`;
|
||||
let targetSignature = this.getTargetSignature(request, target); // ${request.maxDataPoints} ?
|
||||
reqTargetSignatures.set(targetIdentity, targetSignature);
|
||||
});
|
||||
|
||||
// figure out if new query range or new target props trigger full cache invalidation & re-query
|
||||
for (const [targIdent, targSig] of reqTargSigs) {
|
||||
let cached = this.cache.get(targIdent);
|
||||
let cachedSig = cached?.sig;
|
||||
for (const [targetIdentity, targetSignature] of reqTargetSignatures) {
|
||||
let cached = this.cache.get(targetIdentity);
|
||||
let cachedSig = cached?.signature;
|
||||
|
||||
if (cachedSig !== targSig) {
|
||||
if (cachedSig !== targetSignature) {
|
||||
doPartialQuery = false;
|
||||
} else {
|
||||
// only do partial queries when new request range follows prior request range (possibly with overlap)
|
||||
@ -142,14 +142,14 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
},
|
||||
};
|
||||
} else {
|
||||
reqTargSigs.forEach((targSig, targIdent) => {
|
||||
reqTargetSignatures.forEach((targSig, targIdent) => {
|
||||
this.cache.delete(targIdent);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
requests: [request],
|
||||
targSigs: reqTargSigs,
|
||||
targetSignatures: reqTargetSignatures,
|
||||
shouldCache,
|
||||
};
|
||||
}
|
||||
@ -168,13 +168,13 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
const respByTarget = new Map<TargetIdent, DataFrame[]>();
|
||||
|
||||
respFrames.forEach((frame: DataFrame) => {
|
||||
let targIdent = `${request.dashboardUID}|${request.panelId}|${frame.refId}`;
|
||||
let targetIdent = `${request.dashboardUID}|${request.panelId}|${frame.refId}`;
|
||||
|
||||
let frames = respByTarget.get(targIdent);
|
||||
let frames = respByTarget.get(targetIdent);
|
||||
|
||||
if (!frames) {
|
||||
frames = [];
|
||||
respByTarget.set(targIdent, frames);
|
||||
respByTarget.set(targetIdent, frames);
|
||||
}
|
||||
|
||||
frames.push(frame);
|
||||
@ -182,8 +182,8 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
|
||||
let outFrames: DataFrame[] = [];
|
||||
|
||||
respByTarget.forEach((respFrames, targIdent) => {
|
||||
let cachedFrames = (targIdent ? this.cache.get(targIdent)?.frames : null) ?? [];
|
||||
respByTarget.forEach((respFrames, targetIdentity) => {
|
||||
let cachedFrames = (targetIdentity ? this.cache.get(targetIdentity)?.frames : null) ?? [];
|
||||
|
||||
respFrames.forEach((respFrame: DataFrame) => {
|
||||
// skip empty frames
|
||||
@ -193,9 +193,9 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
|
||||
// frames are identified by their second (non-time) field's name + labels
|
||||
// TODO: maybe also frame.meta.type?
|
||||
let respFrameIdent = getFieldIdent(respFrame.fields[1]);
|
||||
let respFrameIdentity = getFieldIdentity(respFrame.fields[1]);
|
||||
|
||||
let cachedFrame = cachedFrames.find((cached) => getFieldIdent(cached.fields[1]) === respFrameIdent);
|
||||
let cachedFrame = cachedFrames.find((cached) => getFieldIdentity(cached.fields[1]) === respFrameIdentity);
|
||||
|
||||
if (!cachedFrame) {
|
||||
// append new unknown frames
|
||||
@ -213,6 +213,9 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
if (amendedTable) {
|
||||
for (let i = 0; i < amendedTable.length; i++) {
|
||||
cachedFrame.fields[i].values = amendedTable[i];
|
||||
if (cachedFrame.fields[i].config.displayNameFromDS !== respFrame.fields[i].config.displayNameFromDS) {
|
||||
cachedFrame.fields[i].config.displayNameFromDS = respFrame.fields[i].config.displayNameFromDS;
|
||||
}
|
||||
}
|
||||
cachedFrame.length = cachedFrame.fields[0].values.length;
|
||||
}
|
||||
@ -239,8 +242,8 @@ export class QueryCache<T extends SupportedQueryTypes> {
|
||||
}
|
||||
});
|
||||
|
||||
this.cache.set(targIdent, {
|
||||
sig: requestInfo.targSigs.get(targIdent)!,
|
||||
this.cache.set(targetIdentity, {
|
||||
signature: requestInfo.targetSignatures.get(targetIdentity)!,
|
||||
frames: nonEmptyCachedFrames,
|
||||
prevTo: newTo,
|
||||
});
|
||||
|
@ -1006,3 +1006,258 @@ export const IncrementalStorageDataFrameScenarios = {
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const differentDisplayNameFromDS = {
|
||||
first: {
|
||||
dataFrames: [
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
executedQueryString:
|
||||
'Expr: sum by (org, stackId)(count_over_time(hosted_grafana:grafana_datasource_loki_orgs_stacks_queries:count2m{org=~".*", stackId=~".*"}[5m]))\nStep: 5m0s',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'rutgerkerkhoffdevuseast',
|
||||
stackId: '2791',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'rutgerkerkhoffdevuseast-customLegend',
|
||||
},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'securityops',
|
||||
stackId: '1533',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'securityops-customLegend',
|
||||
},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'stephaniehingtgen',
|
||||
stackId: '3740',
|
||||
},
|
||||
config: {
|
||||
displayNameFromDS: 'stephaniehingtgen-customLegend',
|
||||
},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
second: {
|
||||
dataFrames: [
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
executedQueryString:
|
||||
'Expr: sum by (org, stackId)(count_over_time(hosted_grafana:grafana_datasource_loki_orgs_stacks_queries:count2m{org=~".*", stackId=~".*"}[5m]))\nStep: 5m0s',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'rutgerkerkhoffdevuseast',
|
||||
stackId: '2791',
|
||||
},
|
||||
config: {},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'securityops',
|
||||
stackId: '1533',
|
||||
},
|
||||
config: {},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
{
|
||||
refId: 'A',
|
||||
meta: {
|
||||
type: 'timeseries-multi',
|
||||
typeVersion: [0, 1],
|
||||
custom: {
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: 'time',
|
||||
typeInfo: {
|
||||
frame: 'time.Time',
|
||||
},
|
||||
config: {
|
||||
interval: 300000,
|
||||
},
|
||||
values: [1726829100000, 1726829400000, 1726829700000],
|
||||
entities: {},
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: 'number',
|
||||
typeInfo: {
|
||||
frame: 'float64',
|
||||
},
|
||||
labels: {
|
||||
org: 'stephaniehingtgen',
|
||||
stackId: '3740',
|
||||
},
|
||||
config: {},
|
||||
values: [3, 2, 3],
|
||||
entities: {},
|
||||
},
|
||||
],
|
||||
length: 3,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user