Loki: Full range logs volume (#39327)

* Basic implementation of getLogsVolumeQuery method

* Add todos

* Add a switcher to automatically load logs volume

* De-scope dismissing logs volume panel

* De-scope logs volume query cancellation

* Remove todo

* Aggregate logs volume components in single panel

* Show logs volume only when it's available

* Aggregate logs volume by level

* Simplify aggregation

* Handle no logs volume data

* Add error handling

* Do not show auto-load logs volume switcher when loading logs volume is not available

* Remove old logs volume graph

* Clean up

* Make getting data provider more generic

* Provide complete logs volume data (error, isLoading)

* Display more specific error message

* Add missing props to mocks

* Remove setRequest method

* Mark getQueryRelatedDataProviders as internal

* Add missing dataQueryRequest and add a todo

* Remove redundant loading state

* Do not mutate existing queries

* Apply fix for zooming-in from main

* Post-merge fixes

* Create collection for data provider results

* Use more generic names

* Move aggregation logic to Loki logs volume provider

* Move LogsVolume to common types

* Update tests

* Post-merge fixes

* Fix mapping related data values

* Simplify prop mappings

* Add docs

* Fix property name

* Clean-up

* Mark new types as internal

* Reduce number of providers to logs volume only

* Simplify data structure to DataQueryResponse

* Move Logs Volume panel to a separate component

* Test logsVolumeProvider.ts

* Add observable version of datasource mock

* Test getLogsVolumeDataProvider method

* Test LogsVolumePanel

* Test logs volume reducer

* Clean up

* Clean up

* Fix test

* Use sum by to use level field directly

* Fix strict type errors

* Fix strict type errors

* Use "logs" instead of "unknown" if only one level was detected

* Add docs about logs volume

* Rename histogramRequest to logsVolumeRequest

* Use LogsVolumeContentWrapper all content types

* Move `autoLoadLogsVolume` local storage handling

* Fix strict error

* Move getting autoLoadLogsVolume to initial state

* Cancel current logs volume subscription

* Test cancelling subscriptions

* Update docs/sources/datasources/loki.md

Co-authored-by: achatterjee-grafana <70489351+achatterjee-grafana@users.noreply.github.com>

* Update packages/grafana-data/src/types/explore.ts

Co-authored-by: achatterjee-grafana <70489351+achatterjee-grafana@users.noreply.github.com>

* Inline container styles

* Ensure logs volume is aggregated per each subscription separately

* Simplify logs volume provider

* Type-guard support for logs volume provider

* Simplify event handlers to avoid casting

* Clean up and docs

* Move auto-load switcher to logs volume panel

* Fix test

* Move DataSourceWithLogsVolumeSupport to avoid cross referencing

* Simplify interface

* Bring back old histogram and hide the new one behind a feature flag

* Add missing props to logs histogram panel

* Clean up the provider when it's not supported

* Simplify storing autoLoadLogsVolume

* Remove docs

* Update packages/grafana-data/src/types/logsVolume.ts

Co-authored-by: Andrej Ocenas <mr.ocenas@gmail.com>

* Skip dataframes without fields (instant queries)

* Revert styles changes

* Revert styles changes

* Add release tag

Co-authored-by: achatterjee-grafana <70489351+achatterjee-grafana@users.noreply.github.com>
Co-authored-by: Andrej Ocenas <mr.ocenas@gmail.com>
This commit is contained in:
Piotr Jamróz
2021-09-30 15:46:11 +02:00
committed by GitHub
parent b7a68a9516
commit 124e9daf26
22 changed files with 1004 additions and 57 deletions

View File

@@ -0,0 +1,107 @@
import { MockObservableDataSourceApi } from '../../../../../test/mocks/datasource_srv';
import { createLokiLogsVolumeProvider } from './logsVolumeProvider';
import LokiDatasource from '../datasource';
import { DataQueryRequest, DataQueryResponse, FieldType, LoadingState, toDataFrame } from '@grafana/data';
import { LokiQuery } from '../types';
import { Observable } from 'rxjs';
function createFrame(labels: object, timestamps: number[], values: number[]) {
return toDataFrame({
fields: [
{ name: 'Time', type: FieldType.time, values: timestamps },
{
name: 'Number',
type: FieldType.number,
values,
labels,
},
],
});
}
function createExpectedFields(levelName: string, timestamps: number[], values: number[]) {
return [
{ name: 'Time', values: { buffer: timestamps } },
{
name: 'Value',
config: { displayNameFromDS: levelName },
values: { buffer: values },
},
];
}
describe('LokiLogsVolumeProvider', () => {
let volumeProvider: Observable<DataQueryResponse>,
datasource: MockObservableDataSourceApi,
request: DataQueryRequest<LokiQuery>;
function setup(datasourceSetup: () => void) {
datasourceSetup();
request = ({
targets: [{ expr: '{app="app01"}' }, { expr: '{app="app02"}' }],
} as unknown) as DataQueryRequest<LokiQuery>;
volumeProvider = createLokiLogsVolumeProvider((datasource as unknown) as LokiDatasource, request);
}
function setupMultipleResults() {
// level=unknown
const resultAFrame1 = createFrame({ app: 'app01' }, [100, 200, 300], [5, 5, 5]);
// level=error
const resultAFrame2 = createFrame({ app: 'app01', level: 'error' }, [100, 200, 300], [0, 1, 0]);
// level=unknown
const resultBFrame1 = createFrame({ app: 'app02' }, [100, 200, 300], [1, 2, 3]);
// level=error
const resultBFrame2 = createFrame({ app: 'app02', level: 'error' }, [100, 200, 300], [1, 1, 1]);
datasource = new MockObservableDataSourceApi('loki', [
{
data: [resultAFrame1, resultAFrame2],
},
{
data: [resultBFrame1, resultBFrame2],
},
]);
}
function setupErrorResponse() {
datasource = new MockObservableDataSourceApi('loki', [], undefined, 'Error message');
}
it('aggregates data frames by level', async () => {
setup(setupMultipleResults);
await expect(volumeProvider).toEmitValuesWith((received) => {
expect(received).toMatchObject([
{ state: LoadingState.Loading, error: undefined, data: [] },
{
state: LoadingState.Done,
error: undefined,
data: [
{
fields: createExpectedFields('unknown', [100, 200, 300], [6, 7, 8]),
},
{
fields: createExpectedFields('error', [100, 200, 300], [1, 2, 1]),
},
],
},
]);
});
});
it('returns error', async () => {
setup(setupErrorResponse);
await expect(volumeProvider).toEmitValuesWith((received) => {
expect(received).toMatchObject([
{ state: LoadingState.Loading, error: undefined, data: [] },
{
state: LoadingState.Error,
error: 'Error message',
data: [],
},
'Error message',
]);
});
});
});

View File

@@ -0,0 +1,175 @@
import {
DataFrame,
DataQueryRequest,
DataQueryResponse,
FieldCache,
FieldColorModeId,
FieldConfig,
FieldType,
getLogLevelFromKey,
Labels,
LoadingState,
LogLevel,
MutableDataFrame,
toDataFrame,
} from '@grafana/data';
import { LokiQuery } from '../types';
import { Observable } from 'rxjs';
import { cloneDeep } from 'lodash';
import LokiDatasource, { isMetricsQuery } from '../datasource';
import { LogLevelColor } from '../../../../core/logs_model';
import { BarAlignment, GraphDrawStyle, StackingMode } from '@grafana/schema';
export function createLokiLogsVolumeProvider(
datasource: LokiDatasource,
dataQueryRequest: DataQueryRequest<LokiQuery>
): Observable<DataQueryResponse> {
const logsVolumeRequest = cloneDeep(dataQueryRequest);
logsVolumeRequest.targets = logsVolumeRequest.targets
.filter((target) => target.expr && !isMetricsQuery(target.expr))
.map((target) => {
return {
...target,
expr: `sum by (level) (count_over_time(${target.expr}[$__interval]))`,
};
});
return new Observable((observer) => {
let rawLogsVolume: DataFrame[] = [];
observer.next({
state: LoadingState.Loading,
error: undefined,
data: [],
});
const subscription = datasource.query(logsVolumeRequest).subscribe({
complete: () => {
const aggregatedLogsVolume = aggregateRawLogsVolume(rawLogsVolume);
observer.next({
state: LoadingState.Done,
error: undefined,
data: aggregatedLogsVolume,
});
observer.complete();
},
next: (dataQueryResponse: DataQueryResponse) => {
rawLogsVolume = rawLogsVolume.concat(dataQueryResponse.data.map(toDataFrame));
},
error: (error) => {
observer.next({
state: LoadingState.Error,
error: error,
data: [],
});
observer.error(error);
},
});
return () => {
subscription?.unsubscribe();
};
});
}
/**
* Add up values for the same level and create a single data frame for each level
*/
function aggregateRawLogsVolume(rawLogsVolume: DataFrame[]): DataFrame[] {
const logsVolumeByLevelMap: { [level in LogLevel]?: DataFrame[] } = {};
let levels = 0;
rawLogsVolume.forEach((dataFrame) => {
let valueField;
try {
valueField = new FieldCache(dataFrame).getFirstFieldOfType(FieldType.number);
} catch {}
// If value field doesn't exist skip the frame (it may happen with instant queries)
if (!valueField) {
return;
}
const level: LogLevel = valueField.labels ? getLogLevelFromLabels(valueField.labels) : LogLevel.unknown;
if (!logsVolumeByLevelMap[level]) {
logsVolumeByLevelMap[level] = [];
levels++;
}
logsVolumeByLevelMap[level]!.push(dataFrame);
});
return Object.keys(logsVolumeByLevelMap).map((level: string) => {
return aggregateFields(logsVolumeByLevelMap[level as LogLevel]!, getFieldConfig(level as LogLevel, levels));
});
}
function getFieldConfig(level: LogLevel, levels: number) {
const name = levels === 1 && level === LogLevel.unknown ? 'logs' : level;
const color = LogLevelColor[level];
return {
displayNameFromDS: name,
color: {
mode: FieldColorModeId.Fixed,
fixedColor: color,
},
custom: {
drawStyle: GraphDrawStyle.Bars,
barAlignment: BarAlignment.Center,
barWidthFactor: 0.9,
barMaxWidth: 5,
lineColor: color,
pointColor: color,
fillColor: color,
lineWidth: 1,
fillOpacity: 100,
stacking: {
mode: StackingMode.Normal,
group: 'A',
},
},
};
}
/**
* Create a new data frame with a single field and values creating by adding field values
* from all provided data frames
*/
function aggregateFields(dataFrames: DataFrame[], config: FieldConfig): DataFrame {
const aggregatedDataFrame = new MutableDataFrame();
if (!dataFrames.length) {
return aggregatedDataFrame;
}
const totalLength = dataFrames[0].length;
const timeField = new FieldCache(dataFrames[0]).getFirstFieldOfType(FieldType.time);
if (!timeField) {
return aggregatedDataFrame;
}
aggregatedDataFrame.addField({ name: 'Time', type: FieldType.time }, totalLength);
aggregatedDataFrame.addField({ name: 'Value', type: FieldType.number, config }, totalLength);
dataFrames.forEach((dataFrame) => {
dataFrame.fields.forEach((field) => {
if (field.type === FieldType.number) {
for (let pointIndex = 0; pointIndex < totalLength; pointIndex++) {
const currentValue = aggregatedDataFrame.get(pointIndex).Value;
const valueToAdd = field.values.get(pointIndex);
const totalValue =
currentValue === null && valueToAdd === null ? null : (currentValue || 0) + (valueToAdd || 0);
aggregatedDataFrame.set(pointIndex, { Value: totalValue, Time: timeField.values.get(pointIndex) });
}
}
});
});
return aggregatedDataFrame;
}
function getLogLevelFromLabels(labels: Labels): LogLevel {
const labelNames = ['level', 'lvl', 'loglevel'];
let levelLabel;
for (let labelName of labelNames) {
if (labelName in labels) {
levelLabel = labelName;
break;
}
}
return levelLabel ? getLogLevelFromKey(labels[levelLabel]) : LogLevel.unknown;
}

View File

@@ -920,6 +920,38 @@ describe('LokiDatasource', () => {
expect(contextQuery.expr).not.toContain('uniqueParsedLabel');
});
});
describe('logs volume data provider', () => {
it('creates provider for logs query', () => {
const ds = createLokiDSForTests();
const options = getQueryOptions<LokiQuery>({
targets: [{ expr: '{label=value}', refId: 'A' }],
});
expect(ds.getLogsVolumeDataProvider(options)).toBeDefined();
});
it('does not create provider for metrics query', () => {
const ds = createLokiDSForTests();
const options = getQueryOptions<LokiQuery>({
targets: [{ expr: 'rate({label=value}[1m])', refId: 'A' }],
});
expect(ds.getLogsVolumeDataProvider(options)).not.toBeDefined();
});
it('creates provider if at least one query is a logs query', () => {
const ds = createLokiDSForTests();
const options = getQueryOptions<LokiQuery>({
targets: [
{ expr: 'rate({label=value}[1m])', refId: 'A' },
{ expr: '{label=value}', refId: 'B' },
],
});
expect(ds.getLogsVolumeDataProvider(options)).toBeDefined();
});
});
});
function assertAdHocFilters(query: string, expectedResults: string, ds: LokiDatasource) {

View File

@@ -16,6 +16,7 @@ import {
DataQueryResponse,
DataSourceApi,
DataSourceInstanceSettings,
DataSourceWithLogsVolumeSupport,
dateMath,
DateTime,
FieldCache,
@@ -52,6 +53,7 @@ import { serializeParams } from '../../../core/utils/fetch';
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
import syntax from './syntax';
import { DEFAULT_RESOLUTION } from './components/LokiOptionFields';
import { createLokiLogsVolumeProvider } from './dataProviders/logsVolumeProvider';
export type RangeQueryOptions = DataQueryRequest<LokiQuery> | AnnotationQueryRequest<LokiQuery>;
export const DEFAULT_MAX_LINES = 1000;
@@ -67,7 +69,9 @@ const DEFAULT_QUERY_PARAMS: Partial<LokiRangeQueryRequest> = {
query: '',
};
export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
export class LokiDatasource
extends DataSourceApi<LokiQuery, LokiOptions>
implements DataSourceWithLogsVolumeSupport<LokiQuery> {
private streams = new LiveStreams();
languageProvider: LanguageProvider;
maxLines: number;
@@ -102,6 +106,11 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return getBackendSrv().fetch<Record<string, any>>(req);
}
getLogsVolumeDataProvider(request: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> | undefined {
const isLogsVolumeAvailable = request.targets.some((target) => target.expr && !isMetricsQuery(target.expr));
return isLogsVolumeAvailable ? createLokiLogsVolumeProvider(this, request) : undefined;
}
query(options: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> {
const subQueries: Array<Observable<DataQueryResponse>> = [];
const scopedVars = {
@@ -703,7 +712,7 @@ export function lokiSpecialRegexEscape(value: any) {
* Checks if the query expression uses function and so should return a time series instead of logs.
* Sometimes important to know that before we actually do the query.
*/
function isMetricsQuery(query: string): boolean {
export function isMetricsQuery(query: string): boolean {
const tokens = Prism.tokenize(query, syntax);
return tokens.some((t) => {
// Not sure in which cases it can be string maybe if nothing matched which means it should not be a function