2018-10-30 10:14:01 -05:00
|
|
|
import _ from 'lodash';
|
2019-10-31 04:48:05 -05:00
|
|
|
import { colors, ansicolor } from '@grafana/ui';
|
2019-07-06 01:05:53 -05:00
|
|
|
|
2019-04-30 11:21:22 -05:00
|
|
|
import {
|
|
|
|
Labels,
|
|
|
|
LogLevel,
|
2019-07-01 14:00:29 -05:00
|
|
|
DataFrame,
|
2019-04-30 11:21:22 -05:00
|
|
|
findCommonLabels,
|
|
|
|
findUniqueLabels,
|
|
|
|
getLogLevel,
|
|
|
|
FieldType,
|
2019-05-27 11:43:04 -05:00
|
|
|
getLogLevelFromKey,
|
2019-05-22 16:10:05 -05:00
|
|
|
LogRowModel,
|
|
|
|
LogsModel,
|
|
|
|
LogsMetaItem,
|
|
|
|
LogsMetaKind,
|
|
|
|
LogsDedupStrategy,
|
2019-08-13 00:32:43 -05:00
|
|
|
GraphSeriesXY,
|
2020-04-27 08:28:06 -05:00
|
|
|
dateTimeFormat,
|
|
|
|
dateTimeFormatTimeAgo,
|
2019-08-13 00:32:43 -05:00
|
|
|
NullValueMode,
|
2019-08-15 11:18:51 -05:00
|
|
|
toDataFrame,
|
2019-09-01 07:44:22 -05:00
|
|
|
FieldCache,
|
2019-09-30 07:44:15 -05:00
|
|
|
FieldWithIndex,
|
2019-10-31 04:48:05 -05:00
|
|
|
getFlotPairs,
|
2019-11-07 05:37:46 -06:00
|
|
|
TimeZone,
|
|
|
|
getDisplayProcessor,
|
2020-04-20 00:37:38 -05:00
|
|
|
textUtil,
|
2020-04-25 15:48:20 -05:00
|
|
|
dateTime,
|
2020-05-29 08:39:13 -05:00
|
|
|
AbsoluteTimeRange,
|
2020-08-06 11:35:49 -05:00
|
|
|
sortInAscendingOrder,
|
2019-07-06 01:05:53 -05:00
|
|
|
} from '@grafana/data';
|
2019-01-10 06:34:23 -06:00
|
|
|
import { getThemeColor } from 'app/core/utils/colors';
|
2020-04-20 00:37:38 -05:00
|
|
|
|
2020-09-11 03:42:54 -05:00
|
|
|
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters';
|
2018-10-30 10:14:01 -05:00
|
|
|
|
2018-11-06 05:00:05 -06:00
|
|
|
export const LogLevelColor = {
|
2018-11-23 09:29:55 -06:00
|
|
|
[LogLevel.critical]: colors[7],
|
|
|
|
[LogLevel.warning]: colors[1],
|
2018-11-06 05:00:05 -06:00
|
|
|
[LogLevel.error]: colors[4],
|
|
|
|
[LogLevel.info]: colors[0],
|
2018-11-23 09:29:55 -06:00
|
|
|
[LogLevel.debug]: colors[5],
|
|
|
|
[LogLevel.trace]: colors[2],
|
2019-02-22 06:13:10 -06:00
|
|
|
[LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'),
|
2018-11-06 05:00:05 -06:00
|
|
|
};
|
|
|
|
|
2018-11-18 03:38:06 -06:00
|
|
|
const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
|
2019-11-26 03:01:32 -06:00
|
|
|
function isDuplicateRow(row: LogRowModel, other: LogRowModel, strategy?: LogsDedupStrategy): boolean {
|
2018-11-18 03:38:06 -06:00
|
|
|
switch (strategy) {
|
|
|
|
case LogsDedupStrategy.exact:
|
|
|
|
// Exact still strips dates
|
|
|
|
return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
|
|
|
|
|
|
|
|
case LogsDedupStrategy.numbers:
|
|
|
|
return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
|
|
|
|
|
|
|
|
case LogsDedupStrategy.signature:
|
|
|
|
return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
|
|
|
|
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-26 03:01:32 -06:00
|
|
|
export function dedupLogRows(rows: LogRowModel[], strategy?: LogsDedupStrategy): LogRowModel[] {
|
2018-11-18 03:38:06 -06:00
|
|
|
if (strategy === LogsDedupStrategy.none) {
|
2019-11-01 10:38:34 -05:00
|
|
|
return rows;
|
2018-11-18 03:38:06 -06:00
|
|
|
}
|
|
|
|
|
2019-11-01 10:38:34 -05:00
|
|
|
return rows.reduce((result: LogRowModel[], row: LogRowModel, index) => {
|
2019-03-11 05:48:14 -05:00
|
|
|
const rowCopy = { ...row };
|
2018-11-18 03:38:06 -06:00
|
|
|
const previous = result[result.length - 1];
|
|
|
|
if (index > 0 && isDuplicateRow(row, previous, strategy)) {
|
2019-11-26 03:01:32 -06:00
|
|
|
previous.duplicates!++;
|
2018-11-18 03:38:06 -06:00
|
|
|
} else {
|
2019-03-11 05:48:14 -05:00
|
|
|
rowCopy.duplicates = 0;
|
|
|
|
result.push(rowCopy);
|
2018-11-18 03:38:06 -06:00
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}, []);
|
|
|
|
}
|
|
|
|
|
2019-11-01 10:38:34 -05:00
|
|
|
export function filterLogLevels(logRows: LogRowModel[], hiddenLogLevels: Set<LogLevel>): LogRowModel[] {
|
2018-11-23 09:29:55 -06:00
|
|
|
if (hiddenLogLevels.size === 0) {
|
2019-11-01 10:38:34 -05:00
|
|
|
return logRows;
|
2018-11-23 09:29:55 -06:00
|
|
|
}
|
|
|
|
|
2019-11-01 10:38:34 -05:00
|
|
|
return logRows.filter((row: LogRowModel) => {
|
|
|
|
return !hiddenLogLevels.has(row.logLevel);
|
|
|
|
});
|
2018-11-23 09:29:55 -06:00
|
|
|
}
|
|
|
|
|
2020-05-29 08:39:13 -05:00
|
|
|
export function makeSeriesForLogs(sortedRows: LogRowModel[], bucketSize: number, timeZone: TimeZone): GraphSeriesXY[] {
|
2018-12-06 05:12:43 -06:00
|
|
|
// currently interval is rangeMs / resolution, which is too low for showing series as bars.
|
2020-05-29 08:39:13 -05:00
|
|
|
// Should be solved higher up the chain when executing queries & interval calculated and not here but this is a temporary fix.
|
2018-12-06 05:12:43 -06:00
|
|
|
|
2018-11-08 07:24:54 -06:00
|
|
|
// Graph time series by log level
|
2019-05-13 02:38:19 -05:00
|
|
|
const seriesByLevel: any = {};
|
|
|
|
const seriesList: any[] = [];
|
2018-12-06 05:12:43 -06:00
|
|
|
|
2019-09-13 06:58:29 -05:00
|
|
|
for (const row of sortedRows) {
|
2018-12-08 13:15:02 -06:00
|
|
|
let series = seriesByLevel[row.logLevel];
|
|
|
|
|
|
|
|
if (!series) {
|
|
|
|
seriesByLevel[row.logLevel] = series = {
|
|
|
|
lastTs: null,
|
|
|
|
datapoints: [],
|
|
|
|
alias: row.logLevel,
|
2019-11-07 05:37:46 -06:00
|
|
|
target: row.logLevel,
|
2018-12-08 13:15:02 -06:00
|
|
|
color: LogLevelColor[row.logLevel],
|
|
|
|
};
|
2018-12-06 05:12:43 -06:00
|
|
|
|
2018-12-08 13:15:02 -06:00
|
|
|
seriesList.push(series);
|
|
|
|
}
|
2018-11-08 07:24:54 -06:00
|
|
|
|
2019-09-13 06:58:29 -05:00
|
|
|
// align time to bucket size - used Math.floor for calculation as time of the bucket
|
|
|
|
// must be in the past (before Date.now()) to be displayed on the graph
|
|
|
|
const time = Math.floor(row.timeEpochMs / bucketSize) * bucketSize;
|
2018-12-06 05:12:43 -06:00
|
|
|
|
2018-11-08 07:24:54 -06:00
|
|
|
// Entry for time
|
2018-12-08 13:15:02 -06:00
|
|
|
if (time === series.lastTs) {
|
|
|
|
series.datapoints[series.datapoints.length - 1][0]++;
|
2018-11-08 07:24:54 -06:00
|
|
|
} else {
|
2018-12-08 13:15:02 -06:00
|
|
|
series.datapoints.push([1, time]);
|
|
|
|
series.lastTs = time;
|
2018-11-08 07:24:54 -06:00
|
|
|
}
|
|
|
|
|
2018-12-08 13:15:02 -06:00
|
|
|
// add zero to other levels to aid stacking so each level series has same number of points
|
|
|
|
for (const other of seriesList) {
|
|
|
|
if (other !== series && other.lastTs !== time) {
|
|
|
|
other.datapoints.push([0, time]);
|
|
|
|
other.lastTs = time;
|
|
|
|
}
|
2018-11-08 07:24:54 -06:00
|
|
|
}
|
2018-12-08 13:15:02 -06:00
|
|
|
}
|
|
|
|
|
2019-11-07 05:37:46 -06:00
|
|
|
return seriesList.map((series, i) => {
|
2020-04-08 03:07:12 -05:00
|
|
|
series.datapoints.sort((a: number[], b: number[]) => a[1] - b[1]);
|
2018-12-08 13:15:02 -06:00
|
|
|
|
2019-08-15 11:18:51 -05:00
|
|
|
// EEEP: converts GraphSeriesXY to DataFrame and back again!
|
|
|
|
const data = toDataFrame(series);
|
2020-04-08 03:07:12 -05:00
|
|
|
const fieldCache = new FieldCache(data);
|
2019-08-13 00:32:43 -05:00
|
|
|
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
const timeField = fieldCache.getFirstFieldOfType(FieldType.time)!;
|
|
|
|
timeField.display = getDisplayProcessor({
|
|
|
|
field: timeField,
|
|
|
|
timeZone,
|
|
|
|
});
|
2019-11-07 05:37:46 -06:00
|
|
|
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
const valueField = fieldCache.getFirstFieldOfType(FieldType.number)!;
|
|
|
|
valueField.config = {
|
|
|
|
...valueField.config,
|
|
|
|
color: series.color,
|
|
|
|
};
|
|
|
|
|
|
|
|
valueField.name = series.alias;
|
|
|
|
const fieldDisplayProcessor = getDisplayProcessor({ field: valueField, timeZone });
|
|
|
|
valueField.display = (value: any) => ({ ...fieldDisplayProcessor(value), color: series.color });
|
2019-11-07 07:58:50 -06:00
|
|
|
|
2020-04-08 03:07:12 -05:00
|
|
|
const points = getFlotPairs({
|
|
|
|
xField: timeField,
|
|
|
|
yField: valueField,
|
|
|
|
nullValueMode: NullValueMode.Null,
|
|
|
|
});
|
|
|
|
|
2019-08-13 00:32:43 -05:00
|
|
|
const graphSeries: GraphSeriesXY = {
|
2019-02-12 10:19:45 -06:00
|
|
|
color: series.color,
|
2019-08-13 00:32:43 -05:00
|
|
|
label: series.alias,
|
|
|
|
data: points,
|
|
|
|
isVisible: true,
|
|
|
|
yAxis: {
|
|
|
|
index: 1,
|
|
|
|
min: 0,
|
|
|
|
tickDecimals: 0,
|
|
|
|
},
|
2019-11-07 05:37:46 -06:00
|
|
|
seriesIndex: i,
|
|
|
|
timeField,
|
2019-11-07 07:58:50 -06:00
|
|
|
valueField,
|
2019-11-07 05:37:46 -06:00
|
|
|
// for now setting the time step to be 0,
|
|
|
|
// and handle the bar width by setting lineWidth instead of barWidth in flot options
|
|
|
|
timeStep: 0,
|
2019-02-11 03:01:43 -06:00
|
|
|
};
|
2019-08-13 00:32:43 -05:00
|
|
|
|
|
|
|
return graphSeries;
|
2018-12-08 13:15:02 -06:00
|
|
|
});
|
2018-11-08 07:24:54 -06:00
|
|
|
}
|
2019-04-30 11:21:22 -05:00
|
|
|
|
2019-07-01 14:00:29 -05:00
|
|
|
function isLogsData(series: DataFrame) {
|
2021-01-20 00:59:48 -06:00
|
|
|
return series.fields.some((f) => f.type === FieldType.time) && series.fields.some((f) => f.type === FieldType.string);
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
|
|
|
|
2019-11-06 09:15:08 -06:00
|
|
|
/**
|
|
|
|
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series. Metrics
|
|
|
|
* series can be either already included in the dataFrame or will be computed from the log rows.
|
|
|
|
* @param dataFrame
|
|
|
|
* @param intervalMs In case there are no metrics series, we use this for computing it from log rows.
|
|
|
|
*/
|
2020-04-08 16:44:10 -05:00
|
|
|
export function dataFrameToLogsModel(
|
|
|
|
dataFrame: DataFrame[],
|
|
|
|
intervalMs: number | undefined,
|
2020-05-29 08:39:13 -05:00
|
|
|
timeZone: TimeZone,
|
|
|
|
absoluteRange?: AbsoluteTimeRange
|
2020-04-08 16:44:10 -05:00
|
|
|
): LogsModel {
|
2020-07-09 09:14:55 -05:00
|
|
|
const { logSeries } = separateLogsAndMetrics(dataFrame);
|
2019-04-30 11:21:22 -05:00
|
|
|
const logsModel = logSeriesToLogsModel(logSeries);
|
2019-11-06 09:15:08 -06:00
|
|
|
|
2020-07-09 09:14:55 -05:00
|
|
|
// unification: Removed logic for using metrics data in LogsModel as with the unification changes this would result
|
|
|
|
// in the incorrect data being used. Instead logs series are always derived from logs.
|
2019-04-30 11:21:22 -05:00
|
|
|
if (logsModel) {
|
2020-07-09 09:14:55 -05:00
|
|
|
// Create histogram metrics from logs using the interval as bucket size for the line count
|
|
|
|
if (intervalMs && logsModel.rows.length > 0) {
|
|
|
|
const sortedRows = logsModel.rows.sort(sortInAscendingOrder);
|
|
|
|
const { visibleRange, bucketSize } = getSeriesProperties(sortedRows, intervalMs, absoluteRange);
|
|
|
|
logsModel.visibleRange = visibleRange;
|
|
|
|
logsModel.series = makeSeriesForLogs(sortedRows, bucketSize, timeZone);
|
2019-04-30 11:21:22 -05:00
|
|
|
} else {
|
2020-07-09 09:14:55 -05:00
|
|
|
logsModel.series = [];
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
|
|
|
return logsModel;
|
|
|
|
}
|
|
|
|
|
2019-05-10 05:45:26 -05:00
|
|
|
return {
|
|
|
|
hasUniqueLabels: false,
|
|
|
|
rows: [],
|
|
|
|
meta: [],
|
|
|
|
series: [],
|
|
|
|
};
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
|
|
|
|
2020-05-29 08:39:13 -05:00
|
|
|
/**
|
|
|
|
* Returns a clamped time range and interval based on the visible logs and the given range.
|
|
|
|
*
|
|
|
|
* @param sortedRows Log rows from the query response
|
|
|
|
* @param intervalMs Dynamnic data interval based on available pixel width
|
|
|
|
* @param absoluteRange Requested time range
|
|
|
|
* @param pxPerBar Default: 20, buckets will be rendered as bars, assuming 10px per histogram bar plus some free space around it
|
|
|
|
*/
|
|
|
|
export function getSeriesProperties(
|
|
|
|
sortedRows: LogRowModel[],
|
|
|
|
intervalMs: number,
|
2020-05-29 13:01:01 -05:00
|
|
|
absoluteRange?: AbsoluteTimeRange,
|
2020-05-29 08:39:13 -05:00
|
|
|
pxPerBar = 20,
|
|
|
|
minimumBucketSize = 1000
|
|
|
|
) {
|
|
|
|
let visibleRange = absoluteRange;
|
|
|
|
let resolutionIntervalMs = intervalMs;
|
|
|
|
let bucketSize = Math.max(resolutionIntervalMs * pxPerBar, minimumBucketSize);
|
|
|
|
// Clamp time range to visible logs otherwise big parts of the graph might look empty
|
|
|
|
if (absoluteRange) {
|
|
|
|
const earliest = sortedRows[0].timeEpochMs;
|
|
|
|
const latest = absoluteRange.to;
|
|
|
|
const visibleRangeMs = latest - earliest;
|
|
|
|
if (visibleRangeMs > 0) {
|
|
|
|
// Adjust interval bucket size for potentially shorter visible range
|
|
|
|
const clampingFactor = visibleRangeMs / (absoluteRange.to - absoluteRange.from);
|
|
|
|
resolutionIntervalMs *= clampingFactor;
|
|
|
|
// Minimum bucketsize of 1s for nicer graphing
|
|
|
|
bucketSize = Math.max(Math.ceil(resolutionIntervalMs * pxPerBar), minimumBucketSize);
|
|
|
|
// makeSeriesForLogs() aligns dataspoints with time buckets, so we do the same here to not cut off data
|
|
|
|
const adjustedEarliest = Math.floor(earliest / bucketSize) * bucketSize;
|
|
|
|
visibleRange = { from: adjustedEarliest, to: latest };
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return { bucketSize, visibleRange };
|
|
|
|
}
|
|
|
|
|
2020-04-25 15:48:20 -05:00
|
|
|
function separateLogsAndMetrics(dataFrames: DataFrame[]) {
|
2019-11-06 09:15:08 -06:00
|
|
|
const metricSeries: DataFrame[] = [];
|
|
|
|
const logSeries: DataFrame[] = [];
|
2019-04-30 11:21:22 -05:00
|
|
|
|
2020-04-25 15:48:20 -05:00
|
|
|
for (const dataFrame of dataFrames) {
|
2020-10-30 03:12:57 -05:00
|
|
|
// We want to show meta stats even if no result was returned. That's why we are pushing also data frames with no fields.
|
2020-07-17 03:30:27 -05:00
|
|
|
if (isLogsData(dataFrame) || !dataFrame.fields.length) {
|
2020-04-25 15:48:20 -05:00
|
|
|
logSeries.push(dataFrame);
|
2019-11-06 09:15:08 -06:00
|
|
|
continue;
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
2019-11-06 09:15:08 -06:00
|
|
|
|
2020-04-25 15:48:20 -05:00
|
|
|
if (dataFrame.length > 0) {
|
|
|
|
metricSeries.push(dataFrame);
|
|
|
|
}
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
|
|
|
|
2019-11-06 09:15:08 -06:00
|
|
|
return { logSeries, metricSeries };
|
|
|
|
}
|
|
|
|
|
2019-11-07 09:50:45 -06:00
|
|
|
interface LogFields {
|
|
|
|
series: DataFrame;
|
|
|
|
|
|
|
|
timeField: FieldWithIndex;
|
|
|
|
stringField: FieldWithIndex;
|
2020-05-07 06:53:24 -05:00
|
|
|
timeNanosecondField?: FieldWithIndex;
|
2019-11-07 09:50:45 -06:00
|
|
|
logLevelField?: FieldWithIndex;
|
|
|
|
idField?: FieldWithIndex;
|
|
|
|
}
|
|
|
|
|
2019-11-06 09:15:08 -06:00
|
|
|
/**
|
|
|
|
* Converts dataFrames into LogsModel. This involves merging them into one list, sorting them and computing metadata
|
|
|
|
* like common labels.
|
|
|
|
*/
|
|
|
|
export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefined {
|
|
|
|
if (logSeries.length === 0) {
|
|
|
|
return undefined;
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
2019-11-07 09:50:45 -06:00
|
|
|
const allLabels: Labels[] = [];
|
|
|
|
|
|
|
|
// Find the fields we care about and collect all labels
|
2020-07-17 03:30:27 -05:00
|
|
|
let allSeries: LogFields[] = [];
|
|
|
|
|
2020-10-30 03:12:57 -05:00
|
|
|
// We are sometimes passing data frames with no fields because we want to calculate correct meta stats.
|
|
|
|
// Therefore we need to filter out series with no fields. These series are used only for meta stats calculation.
|
2021-01-20 00:59:48 -06:00
|
|
|
const seriesWithFields = logSeries.filter((series) => series.fields.length);
|
2020-10-30 03:12:57 -05:00
|
|
|
|
|
|
|
if (seriesWithFields.length) {
|
2021-01-20 00:59:48 -06:00
|
|
|
allSeries = seriesWithFields.map((series) => {
|
2020-07-17 03:30:27 -05:00
|
|
|
const fieldCache = new FieldCache(series);
|
|
|
|
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
|
|
|
|
|
|
|
|
if (stringField?.labels) {
|
|
|
|
allLabels.push(stringField.labels);
|
|
|
|
}
|
|
|
|
|
|
|
|
return {
|
|
|
|
series,
|
|
|
|
timeField: fieldCache.getFirstFieldOfType(FieldType.time),
|
|
|
|
timeNanosecondField: fieldCache.hasFieldWithNameAndType('tsNs', FieldType.time)
|
|
|
|
? fieldCache.getFieldByName('tsNs')
|
|
|
|
: undefined,
|
|
|
|
stringField,
|
|
|
|
logLevelField: fieldCache.getFieldByName('level'),
|
|
|
|
idField: getIdField(fieldCache),
|
|
|
|
} as LogFields;
|
|
|
|
});
|
|
|
|
}
|
2019-11-07 09:50:45 -06:00
|
|
|
|
|
|
|
const commonLabels = allLabels.length > 0 ? findCommonLabels(allLabels) : {};
|
2019-04-30 11:21:22 -05:00
|
|
|
|
|
|
|
const rows: LogRowModel[] = [];
|
|
|
|
let hasUniqueLabels = false;
|
|
|
|
|
2019-11-07 09:50:45 -06:00
|
|
|
for (const info of allSeries) {
|
2020-05-07 06:53:24 -05:00
|
|
|
const { timeField, timeNanosecondField, stringField, logLevelField, idField, series } = info;
|
2019-11-07 09:50:45 -06:00
|
|
|
const labels = stringField.labels;
|
|
|
|
const uniqueLabels = findUniqueLabels(labels, commonLabels);
|
2019-04-30 11:21:22 -05:00
|
|
|
if (Object.keys(uniqueLabels).length > 0) {
|
|
|
|
hasUniqueLabels = true;
|
|
|
|
}
|
|
|
|
|
2019-08-15 11:18:51 -05:00
|
|
|
let seriesLogLevel: LogLevel | undefined = undefined;
|
2019-11-07 09:50:45 -06:00
|
|
|
if (labels && Object.keys(labels).indexOf('level') !== -1) {
|
|
|
|
seriesLogLevel = getLogLevelFromKey(labels['level']);
|
2019-08-15 11:18:51 -05:00
|
|
|
}
|
|
|
|
|
2019-09-01 07:44:22 -05:00
|
|
|
for (let j = 0; j < series.length; j++) {
|
2019-09-13 09:38:21 -05:00
|
|
|
const ts = timeField.values.get(j);
|
2019-08-15 11:18:51 -05:00
|
|
|
const time = dateTime(ts);
|
2020-05-07 06:53:24 -05:00
|
|
|
const tsNs = timeNanosecondField ? timeNanosecondField.values.get(j) : undefined;
|
|
|
|
const timeEpochNs = tsNs ? tsNs : time.valueOf() + '000000';
|
2019-08-15 11:18:51 -05:00
|
|
|
|
2021-03-23 03:17:55 -05:00
|
|
|
// In edge cases, this can be undefined. If undefined, we want to replace it with empty string.
|
|
|
|
const messageValue: unknown = stringField.values.get(j) ?? '';
|
2019-09-04 03:49:09 -05:00
|
|
|
// This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
|
2019-11-06 09:15:08 -06:00
|
|
|
const message: string = typeof messageValue === 'string' ? messageValue : JSON.stringify(messageValue);
|
|
|
|
|
2020-04-20 00:37:38 -05:00
|
|
|
const hasAnsi = textUtil.hasAnsiCodes(message);
|
2021-03-03 11:32:27 -06:00
|
|
|
|
|
|
|
const hasUnescapedContent = !!message.match(/\\n|\\t|\\r/);
|
|
|
|
|
2019-11-06 09:15:08 -06:00
|
|
|
const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
|
2019-08-15 11:18:51 -05:00
|
|
|
|
|
|
|
let logLevel = LogLevel.unknown;
|
2020-02-17 09:49:24 -06:00
|
|
|
if (logLevelField && logLevelField.values.get(j)) {
|
2019-08-15 11:18:51 -05:00
|
|
|
logLevel = getLogLevelFromKey(logLevelField.values.get(j));
|
|
|
|
} else if (seriesLogLevel) {
|
|
|
|
logLevel = seriesLogLevel;
|
|
|
|
} else {
|
|
|
|
logLevel = getLogLevel(message);
|
|
|
|
}
|
|
|
|
rows.push({
|
2019-11-06 09:15:08 -06:00
|
|
|
entryFieldIndex: stringField.index,
|
|
|
|
rowIndex: j,
|
|
|
|
dataFrame: series,
|
2019-08-15 11:18:51 -05:00
|
|
|
logLevel,
|
2020-04-27 08:28:06 -05:00
|
|
|
timeFromNow: dateTimeFormatTimeAgo(ts),
|
2019-11-06 09:15:08 -06:00
|
|
|
timeEpochMs: time.valueOf(),
|
2020-05-07 06:53:24 -05:00
|
|
|
timeEpochNs,
|
2020-04-27 08:28:06 -05:00
|
|
|
timeLocal: dateTimeFormat(ts, { timeZone: 'browser' }),
|
|
|
|
timeUtc: dateTimeFormat(ts, { timeZone: 'utc' }),
|
2019-08-15 11:18:51 -05:00
|
|
|
uniqueLabels,
|
|
|
|
hasAnsi,
|
2021-03-03 11:32:27 -06:00
|
|
|
hasUnescapedContent,
|
2019-08-15 11:18:51 -05:00
|
|
|
searchWords,
|
|
|
|
entry: hasAnsi ? ansicolor.strip(message) : message,
|
|
|
|
raw: message,
|
2020-04-08 16:44:10 -05:00
|
|
|
labels: stringField.labels || {},
|
2019-09-30 07:44:15 -05:00
|
|
|
uid: idField ? idField.values.get(j) : j.toString(),
|
2019-08-15 11:18:51 -05:00
|
|
|
});
|
2019-04-30 11:21:22 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Meta data to display in status
|
|
|
|
const meta: LogsMetaItem[] = [];
|
|
|
|
if (_.size(commonLabels) > 0) {
|
|
|
|
meta.push({
|
|
|
|
label: 'Common labels',
|
|
|
|
value: commonLabels,
|
|
|
|
kind: LogsMetaKind.LabelsMap,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2021-01-20 00:59:48 -06:00
|
|
|
const limits = logSeries.filter((series) => series.meta && series.meta.limit);
|
2020-02-06 06:34:52 -06:00
|
|
|
const limitValue = Object.values(
|
|
|
|
limits.reduce((acc: any, elem: any) => {
|
|
|
|
acc[elem.refId] = elem.meta.limit;
|
|
|
|
return acc;
|
|
|
|
}, {})
|
|
|
|
).reduce((acc: number, elem: any) => (acc += elem), 0);
|
2019-04-30 11:21:22 -05:00
|
|
|
|
|
|
|
if (limits.length > 0) {
|
|
|
|
meta.push({
|
|
|
|
label: 'Limit',
|
2020-12-15 08:18:12 -06:00
|
|
|
value: `${limitValue} (${rows.length} returned)`,
|
2019-04-30 11:21:22 -05:00
|
|
|
kind: LogsMetaKind.String,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2020-05-04 11:06:21 -05:00
|
|
|
// Hack to print loki stats in Explore. Should be using proper stats display via drawer in Explore (rework in 7.1)
|
|
|
|
let totalBytes = 0;
|
2020-05-14 09:27:26 -05:00
|
|
|
const queriesVisited: { [refId: string]: boolean } = {};
|
2020-10-21 12:11:32 -05:00
|
|
|
// To add just 1 error message
|
|
|
|
let errorMetaAdded = false;
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
|
2020-05-04 11:06:21 -05:00
|
|
|
for (const series of logSeries) {
|
|
|
|
const totalBytesKey = series.meta?.custom?.lokiQueryStatKey;
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
const { refId } = series; // Stats are per query, keeping track by refId
|
|
|
|
|
2020-10-21 12:11:32 -05:00
|
|
|
if (!errorMetaAdded && series.meta?.custom?.error) {
|
|
|
|
meta.push({
|
|
|
|
label: '',
|
|
|
|
value: series.meta?.custom.error,
|
|
|
|
kind: LogsMetaKind.Error,
|
|
|
|
});
|
|
|
|
errorMetaAdded = true;
|
|
|
|
}
|
|
|
|
|
2020-05-14 09:27:26 -05:00
|
|
|
if (refId && !queriesVisited[refId]) {
|
2020-07-09 09:14:55 -05:00
|
|
|
if (totalBytesKey && series.meta?.stats) {
|
2021-01-20 00:59:48 -06:00
|
|
|
const byteStat = series.meta.stats.find((stat) => stat.displayName === totalBytesKey);
|
2020-05-14 09:27:26 -05:00
|
|
|
if (byteStat) {
|
|
|
|
totalBytes += byteStat.value;
|
|
|
|
}
|
2020-05-04 11:06:21 -05:00
|
|
|
}
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
|
2020-05-14 09:27:26 -05:00
|
|
|
queriesVisited[refId] = true;
|
2020-05-04 11:06:21 -05:00
|
|
|
}
|
|
|
|
}
|
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
|
|
|
|
2020-05-04 11:06:21 -05:00
|
|
|
if (totalBytes > 0) {
|
2020-09-11 03:42:54 -05:00
|
|
|
const { text, suffix } = SIPrefix('B')(totalBytes);
|
2020-05-04 11:06:21 -05:00
|
|
|
meta.push({
|
|
|
|
label: 'Total bytes processed',
|
|
|
|
value: `${text} ${suffix}`,
|
|
|
|
kind: LogsMetaKind.String,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-04-30 11:21:22 -05:00
|
|
|
return {
|
|
|
|
hasUniqueLabels,
|
|
|
|
meta,
|
2020-12-15 08:18:12 -06:00
|
|
|
rows,
|
2019-04-30 11:21:22 -05:00
|
|
|
};
|
|
|
|
}
|
2019-09-30 07:44:15 -05:00
|
|
|
|
|
|
|
function getIdField(fieldCache: FieldCache): FieldWithIndex | undefined {
|
|
|
|
const idFieldNames = ['id'];
|
|
|
|
for (const fieldName of idFieldNames) {
|
|
|
|
const idField = fieldCache.getFieldByName(fieldName);
|
|
|
|
if (idField) {
|
|
|
|
return idField;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return undefined;
|
|
|
|
}
|