mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Logs: Merge Log Volumes by data source name (#65392)
* Merge log volume by data source name * Fix creating response for multiple fallback volumes * Fix unit tests * Hide title if there's only one log volume visible * Make hide title optional * Remove redundant parentheses * Do not use frame.name, so the visualization can pick displayNameFromDS from the field config * Simplify setting aggregated data frame meta data * Update public/app/features/logs/utils.ts Co-authored-by: Giordano Ricci <me@giordanoricci.com> * Fix legend toggling * Ensure limited graph info is shown * Always show the data source name --------- Co-authored-by: Giordano Ricci <me@giordanoricci.com>
This commit is contained in:
parent
383148bcd1
commit
5c138e16d7
@ -212,13 +212,12 @@ export const getLogsVolumeAbsoluteRange = (
|
||||
return dataFrames[0].meta?.custom?.absoluteRange || defaultRange;
|
||||
};
|
||||
|
||||
export const getLogsVolumeDataSourceInfo = (dataFrames: DataFrame[]): { name: string; refId: string } | null => {
|
||||
export const getLogsVolumeDataSourceInfo = (dataFrames: DataFrame[]): { name: string } | null => {
|
||||
const customMeta = dataFrames[0]?.meta?.custom;
|
||||
|
||||
if (customMeta && customMeta.datasourceName && customMeta.sourceQuery?.refId) {
|
||||
if (customMeta && customMeta.datasourceName) {
|
||||
return {
|
||||
name: customMeta.datasourceName,
|
||||
refId: customMeta.sourceQuery.refId,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { identity } from 'lodash';
|
||||
import React from 'react';
|
||||
|
||||
import {
|
||||
@ -43,13 +44,15 @@ export function LogsVolumePanel(props: Props) {
|
||||
const logsVolumeData = props.logsVolumeData;
|
||||
|
||||
const logsVolumeInfo = getLogsVolumeDataSourceInfo(logsVolumeData?.data);
|
||||
let extraInfo = logsVolumeInfo ? `${logsVolumeInfo.refId} (${logsVolumeInfo.name})` : '';
|
||||
let extraInfo = logsVolumeInfo ? `${logsVolumeInfo.name}` : '';
|
||||
|
||||
if (isLogsVolumeLimited(logsVolumeData.data)) {
|
||||
extraInfo = [
|
||||
extraInfo,
|
||||
'This datasource does not support full-range histograms. The graph below is based on the logs seen in the response.',
|
||||
].join('. ');
|
||||
]
|
||||
.filter(identity)
|
||||
.join('. ');
|
||||
}
|
||||
|
||||
const range = isLogsVolumeLimited(logsVolumeData.data)
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { groupBy } from 'lodash';
|
||||
import { groupBy, mapValues } from 'lodash';
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import {
|
||||
@ -15,6 +15,8 @@ import {
|
||||
} from '@grafana/data';
|
||||
import { Button, InlineField, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { mergeLogsVolumeDataFrames } from '../logs/utils';
|
||||
|
||||
import { LogsVolumePanel } from './LogsVolumePanel';
|
||||
import { SupplementaryResultError } from './SupplementaryResultError';
|
||||
|
||||
@ -41,10 +43,12 @@ export const LogsVolumePanelList = ({
|
||||
splitOpen,
|
||||
timeZone,
|
||||
}: Props) => {
|
||||
const logVolumes = useMemo(
|
||||
() => groupBy(logsVolumeData?.data || [], 'meta.custom.sourceQuery.refId'),
|
||||
[logsVolumeData]
|
||||
);
|
||||
const logVolumes: Record<string, DataFrame[]> = useMemo(() => {
|
||||
const grouped = groupBy(logsVolumeData?.data || [], 'meta.custom.datasourceName');
|
||||
return mapValues(grouped, (value) => {
|
||||
return mergeLogsVolumeDataFrames(value);
|
||||
});
|
||||
}, [logsVolumeData]);
|
||||
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
|
@ -201,6 +201,25 @@ describe('SupplementaryQueries utils', function () {
|
||||
const testProvider = await setup('no-data-providers', SupplementaryQueryType.LogsSample);
|
||||
await expect(testProvider).toBe(undefined);
|
||||
});
|
||||
it('Creates single fallback result', async () => {
|
||||
const testProvider = await setup('no-data-providers', SupplementaryQueryType.LogsVolume, [
|
||||
'no-data-providers',
|
||||
'no-data-providers-2',
|
||||
]);
|
||||
|
||||
await expect(testProvider).toEmitValuesWith((received) => {
|
||||
expect(received).toMatchObject([
|
||||
{
|
||||
data: assertDataFromLogsResults(),
|
||||
state: LoadingState.Done,
|
||||
},
|
||||
{
|
||||
data: [...assertDataFromLogsResults(), ...assertDataFromLogsResults()],
|
||||
state: LoadingState.Done,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mixed data source', function () {
|
||||
|
@ -3,6 +3,7 @@ import { distinct, from, mergeMap, Observable, of } from 'rxjs';
|
||||
import { scan } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQuery,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
@ -84,9 +85,11 @@ const createFallbackLogVolumeProvider = (
|
||||
const bucketSize = exploreData.logsResult.bucketSize;
|
||||
const targetRefIds = queryTargets.map((query) => query.refId);
|
||||
const rowsByRefId = groupBy(exploreData.logsResult.rows, 'dataFrame.refId');
|
||||
let allSeries: DataFrame[] = [];
|
||||
targetRefIds.forEach((refId) => {
|
||||
if (rowsByRefId[refId]?.length) {
|
||||
const series = makeDataFramesForLogs(rowsByRefId[refId], bucketSize);
|
||||
allSeries = [...allSeries, ...series];
|
||||
const logVolumeCustomMetaData: LogsVolumeCustomMetaData = {
|
||||
logsVolumeType: LogsVolumeType.Limited,
|
||||
absoluteRange: exploreData.logsResult?.visibleRange!,
|
||||
@ -95,7 +98,7 @@ const createFallbackLogVolumeProvider = (
|
||||
};
|
||||
|
||||
observer.next({
|
||||
data: series.map((d) => {
|
||||
data: allSeries.map((d) => {
|
||||
const custom = d.meta?.custom || {};
|
||||
return {
|
||||
...d,
|
||||
|
@ -1,13 +1,24 @@
|
||||
import { Labels, LogLevel, LogsModel, LogRowModel, LogsSortOrder, MutableDataFrame } from '@grafana/data';
|
||||
import {
|
||||
ArrayVector,
|
||||
DataFrame,
|
||||
FieldType,
|
||||
Labels,
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
LogsModel,
|
||||
LogsSortOrder,
|
||||
MutableDataFrame,
|
||||
} from '@grafana/data';
|
||||
|
||||
import {
|
||||
getLogLevel,
|
||||
calculateLogsLabelStats,
|
||||
calculateStats,
|
||||
getLogLevelFromKey,
|
||||
sortLogsResult,
|
||||
checkLogsError,
|
||||
getLogLevel,
|
||||
getLogLevelFromKey,
|
||||
logRowsToReadableJson,
|
||||
mergeLogsVolumeDataFrames,
|
||||
sortLogsResult,
|
||||
} from './utils';
|
||||
|
||||
describe('getLoglevel()', () => {
|
||||
@ -261,3 +272,98 @@ describe('logRowsToReadableJson', () => {
|
||||
expect(result).toEqual([{ line: 'test entry', timestamp: '123456789', fields: { foo: 'bar', foo2: 'bar2' } }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeLogsVolumeDataFrames', () => {
|
||||
function mockLogVolume(level: string, timestamps: number[], values: number[]): DataFrame {
|
||||
const frame = new MutableDataFrame();
|
||||
frame.addField({ name: 'Time', type: FieldType.time, values: timestamps });
|
||||
frame.addField({ name: 'Value', type: FieldType.number, values, config: { displayNameFromDS: level } });
|
||||
return frame;
|
||||
}
|
||||
|
||||
it('merges log volumes', () => {
|
||||
// timestamps: 1 2 3 4 5 6
|
||||
|
||||
// info 1: 1 - 1 - - -
|
||||
// info 2: 2 3 - - - -
|
||||
// total: 3 3 1 - - -
|
||||
const infoVolume1 = mockLogVolume('info', [1, 3], [1, 1]);
|
||||
const infoVolume2 = mockLogVolume('info', [1, 2], [2, 3]);
|
||||
|
||||
// debug 1: - 2 3 - - -
|
||||
// debug 2: 1 - - - 0 -
|
||||
// total: 1 2 3 - 0 -
|
||||
const debugVolume1 = mockLogVolume('debug', [2, 3], [2, 3]);
|
||||
const debugVolume2 = mockLogVolume('debug', [1, 5], [1, 0]);
|
||||
|
||||
// error 1: - - - - - 1
|
||||
// error 2: 1 - - - - 1
|
||||
// total: 1 - - - - 2
|
||||
const errorVolume1 = mockLogVolume('error', [1, 6], [1, 1]);
|
||||
const errorVolume2 = mockLogVolume('error', [1], [1]);
|
||||
|
||||
const merged = mergeLogsVolumeDataFrames([
|
||||
infoVolume1,
|
||||
infoVolume2,
|
||||
debugVolume1,
|
||||
debugVolume2,
|
||||
errorVolume1,
|
||||
errorVolume2,
|
||||
]);
|
||||
|
||||
expect(merged).toHaveLength(3);
|
||||
expect(merged).toMatchObject([
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1, 2, 3]),
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([3, 3, 1]),
|
||||
config: {
|
||||
displayNameFromDS: 'info',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1, 2, 3, 5]),
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([1, 2, 3, 0]),
|
||||
config: {
|
||||
displayNameFromDS: 'debug',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
fields: [
|
||||
{
|
||||
name: 'Time',
|
||||
type: FieldType.time,
|
||||
values: new ArrayVector([1, 6]),
|
||||
},
|
||||
{
|
||||
name: 'Value',
|
||||
type: FieldType.number,
|
||||
values: new ArrayVector([2, 1]),
|
||||
config: {
|
||||
displayNameFromDS: 'error',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,18 @@
|
||||
import { countBy, chain } from 'lodash';
|
||||
|
||||
import { LogLevel, LogRowModel, LogLabelStatsModel, LogsModel, LogsSortOrder } from '@grafana/data';
|
||||
import {
|
||||
LogLevel,
|
||||
LogRowModel,
|
||||
LogLabelStatsModel,
|
||||
LogsModel,
|
||||
LogsSortOrder,
|
||||
DataFrame,
|
||||
FieldConfig,
|
||||
FieldCache,
|
||||
FieldType,
|
||||
MutableDataFrame,
|
||||
QueryResultMeta,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { getDataframeFields } from './components/logParser';
|
||||
|
||||
@ -149,3 +161,68 @@ export function logRowsToReadableJson(logs: LogRowModel[]) {
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export const mergeLogsVolumeDataFrames = (dataFrames: DataFrame[]): DataFrame[] => {
|
||||
if (dataFrames.length === 0) {
|
||||
throw new Error('Cannot aggregate data frames: there must be at least one data frame to aggregate');
|
||||
}
|
||||
|
||||
const aggregated: Record<string, Record<number, number>> = {};
|
||||
const configs: Record<
|
||||
string,
|
||||
{ meta?: QueryResultMeta; valueFieldConfig: FieldConfig; timeFieldConfig: FieldConfig }
|
||||
> = {};
|
||||
let results: DataFrame[] = [];
|
||||
|
||||
// collect and aggregate into aggregated object
|
||||
dataFrames.forEach((dataFrame) => {
|
||||
const fieldCache = new FieldCache(dataFrame);
|
||||
const timeField = fieldCache.getFirstFieldOfType(FieldType.time);
|
||||
const valueField = fieldCache.getFirstFieldOfType(FieldType.number);
|
||||
|
||||
if (!timeField) {
|
||||
throw new Error('Missing time field');
|
||||
}
|
||||
if (!valueField) {
|
||||
throw new Error('Missing value field');
|
||||
}
|
||||
|
||||
const level = valueField.config.displayNameFromDS || dataFrame.name || 'logs';
|
||||
const length = valueField.values.length;
|
||||
configs[level] = {
|
||||
meta: dataFrame.meta,
|
||||
valueFieldConfig: valueField.config,
|
||||
timeFieldConfig: timeField.config,
|
||||
};
|
||||
|
||||
for (let pointIndex = 0; pointIndex < length; pointIndex++) {
|
||||
const time: number = timeField.values.get(pointIndex);
|
||||
const value: number = valueField.values.get(pointIndex);
|
||||
aggregated[level] ??= {};
|
||||
aggregated[level][time] = (aggregated[level][time] || 0) + value;
|
||||
}
|
||||
});
|
||||
|
||||
// convert aggregated into data frames
|
||||
Object.keys(aggregated).forEach((level) => {
|
||||
const levelDataFrame = new MutableDataFrame();
|
||||
const { meta, timeFieldConfig, valueFieldConfig } = configs[level];
|
||||
// Log Volume visualization uses the name when toggling the legend
|
||||
levelDataFrame.name = level;
|
||||
levelDataFrame.meta = meta;
|
||||
levelDataFrame.addField({ name: 'Time', type: FieldType.time, config: timeFieldConfig });
|
||||
levelDataFrame.addField({ name: 'Value', type: FieldType.number, config: valueFieldConfig });
|
||||
|
||||
for (const time in aggregated[level]) {
|
||||
const value = aggregated[level][time];
|
||||
levelDataFrame.add({
|
||||
Time: Number(time),
|
||||
Value: value,
|
||||
});
|
||||
}
|
||||
|
||||
results.push(levelDataFrame);
|
||||
});
|
||||
|
||||
return results;
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user