2023-03-07 08:00:11 -06:00
import { groupBy , size } from 'lodash' ;
2023-02-07 07:32:06 -06:00
import { from , isObservable , Observable } from 'rxjs' ;
2019-07-06 01:05:53 -05:00
2019-04-30 11:21:22 -05:00
import {
2021-06-01 02:28:25 -05:00
AbsoluteTimeRange ,
2019-07-01 14:00:29 -05:00
DataFrame ,
2021-06-01 02:28:25 -05:00
DataQuery ,
2021-11-02 05:53:47 -05:00
DataQueryRequest ,
DataQueryResponse ,
DataSourceApi ,
2022-07-06 01:49:45 -05:00
DataSourceJsonData ,
2021-06-01 02:28:25 -05:00
dateTimeFormat ,
dateTimeFormatTimeAgo ,
FieldCache ,
2021-09-13 04:11:49 -05:00
FieldColorModeId ,
2021-06-01 02:28:25 -05:00
FieldType ,
FieldWithIndex ,
2019-04-30 11:21:22 -05:00
findCommonLabels ,
findUniqueLabels ,
2023-03-03 09:02:14 -06:00
getTimeField ,
2021-06-01 02:28:25 -05:00
Labels ,
2021-11-02 05:53:47 -05:00
LoadingState ,
2021-06-01 02:28:25 -05:00
LogLevel ,
2019-05-22 16:10:05 -05:00
LogRowModel ,
2021-06-01 02:28:25 -05:00
LogsDedupStrategy ,
2019-05-22 16:10:05 -05:00
LogsMetaItem ,
LogsMetaKind ,
2021-06-01 02:28:25 -05:00
LogsModel ,
2023-03-07 08:00:11 -06:00
LogsVolumeCustomMetaData ,
2023-02-07 07:32:06 -06:00
LogsVolumeType ,
2021-04-21 05:02:34 -05:00
rangeUtil ,
2021-11-02 05:53:47 -05:00
ScopedVars ,
2023-03-03 09:02:14 -06:00
sortDataFrame ,
2021-06-01 02:28:25 -05:00
textUtil ,
2021-11-02 05:53:47 -05:00
TimeRange ,
2021-06-01 02:28:25 -05:00
toDataFrame ,
2022-02-02 03:54:33 -06:00
toUtc ,
2019-07-06 01:05:53 -05:00
} from '@grafana/data' ;
2021-05-24 05:28:10 -05:00
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters' ;
2022-04-22 08:33:13 -05:00
import { BarAlignment , GraphDrawStyle , StackingMode } from '@grafana/schema' ;
import { ansicolor , colors } from '@grafana/ui' ;
import { getThemeColor } from 'app/core/utils/colors' ;
2020-04-20 00:37:38 -05:00
2022-09-30 05:16:47 -05:00
import { getLogLevel , getLogLevelFromKey , sortInAscendingOrder } from '../features/logs/utils' ;
2023-03-03 09:02:14 -06:00
2021-04-21 05:02:34 -05:00
export const LIMIT_LABEL = 'Line limit' ;
2021-07-01 07:06:58 -05:00
export const COMMON_LABELS = 'Common labels' ;
2018-10-30 10:14:01 -05:00
2018-11-06 05:00:05 -06:00
export const LogLevelColor = {
2018-11-23 09:29:55 -06:00
[ LogLevel . critical ] : colors [ 7 ] ,
[ LogLevel . warning ] : colors [ 1 ] ,
2018-11-06 05:00:05 -06:00
[ LogLevel . error ] : colors [ 4 ] ,
[ LogLevel . info ] : colors [ 0 ] ,
2018-11-23 09:29:55 -06:00
[ LogLevel . debug ] : colors [ 5 ] ,
[ LogLevel . trace ] : colors [ 2 ] ,
2022-07-26 03:50:48 -05:00
[ LogLevel . unknown ] : getThemeColor ( '#8e8e8e' , '#bdc4cd' ) ,
2018-11-06 05:00:05 -06:00
} ;
2021-11-10 04:20:30 -06:00
const MILLISECOND = 1 ;
const SECOND = 1000 * MILLISECOND ;
2021-11-02 05:53:47 -05:00
const MINUTE = 60 * SECOND ;
const HOUR = 60 * MINUTE ;
const DAY = 24 * HOUR ;
2018-11-18 03:38:06 -06:00
const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g ;
2019-11-26 03:01:32 -06:00
function isDuplicateRow ( row : LogRowModel , other : LogRowModel , strategy? : LogsDedupStrategy ) : boolean {
2018-11-18 03:38:06 -06:00
switch ( strategy ) {
case LogsDedupStrategy . exact :
// Exact still strips dates
return row . entry . replace ( isoDateRegexp , '' ) === other . entry . replace ( isoDateRegexp , '' ) ;
case LogsDedupStrategy . numbers :
return row . entry . replace ( /\d/g , '' ) === other . entry . replace ( /\d/g , '' ) ;
case LogsDedupStrategy . signature :
return row . entry . replace ( /\w/g , '' ) === other . entry . replace ( /\w/g , '' ) ;
default :
return false ;
}
}
2019-11-26 03:01:32 -06:00
export function dedupLogRows ( rows : LogRowModel [ ] , strategy? : LogsDedupStrategy ) : LogRowModel [ ] {
2018-11-18 03:38:06 -06:00
if ( strategy === LogsDedupStrategy . none ) {
2019-11-01 10:38:34 -05:00
return rows ;
2018-11-18 03:38:06 -06:00
}
2019-11-01 10:38:34 -05:00
return rows . reduce ( ( result : LogRowModel [ ] , row : LogRowModel , index ) = > {
2019-03-11 05:48:14 -05:00
const rowCopy = { . . . row } ;
2018-11-18 03:38:06 -06:00
const previous = result [ result . length - 1 ] ;
if ( index > 0 && isDuplicateRow ( row , previous , strategy ) ) {
2019-11-26 03:01:32 -06:00
previous . duplicates ! ++ ;
2018-11-18 03:38:06 -06:00
} else {
2019-03-11 05:48:14 -05:00
rowCopy . duplicates = 0 ;
result . push ( rowCopy ) ;
2018-11-18 03:38:06 -06:00
}
return result ;
} , [ ] ) ;
}
2019-11-01 10:38:34 -05:00
export function filterLogLevels ( logRows : LogRowModel [ ] , hiddenLogLevels : Set < LogLevel > ) : LogRowModel [ ] {
2018-11-23 09:29:55 -06:00
if ( hiddenLogLevels . size === 0 ) {
2019-11-01 10:38:34 -05:00
return logRows ;
2018-11-23 09:29:55 -06:00
}
2019-11-01 10:38:34 -05:00
return logRows . filter ( ( row : LogRowModel ) = > {
return ! hiddenLogLevels . has ( row . logLevel ) ;
} ) ;
2018-11-23 09:29:55 -06:00
}
2022-07-06 01:49:45 -05:00
interface Series {
lastTs : number | null ;
datapoints : Array < [ number , number ] > ;
target : LogLevel ;
color : string ;
}
2021-06-01 02:28:25 -05:00
export function makeDataFramesForLogs ( sortedRows : LogRowModel [ ] , bucketSize : number ) : DataFrame [ ] {
2018-12-06 05:12:43 -06:00
// currently interval is rangeMs / resolution, which is too low for showing series as bars.
2020-05-29 08:39:13 -05:00
// Should be solved higher up the chain when executing queries & interval calculated and not here but this is a temporary fix.
2018-12-06 05:12:43 -06:00
2018-11-08 07:24:54 -06:00
// Graph time series by log level
2022-07-06 01:49:45 -05:00
const seriesByLevel : Record < string , Series > = { } ;
const seriesList : Series [ ] = [ ] ;
2018-12-06 05:12:43 -06:00
2019-09-13 06:58:29 -05:00
for ( const row of sortedRows ) {
2018-12-08 13:15:02 -06:00
let series = seriesByLevel [ row . logLevel ] ;
if ( ! series ) {
seriesByLevel [ row . logLevel ] = series = {
lastTs : null ,
datapoints : [ ] ,
2019-11-07 05:37:46 -06:00
target : row.logLevel ,
2018-12-08 13:15:02 -06:00
color : LogLevelColor [ row . logLevel ] ,
} ;
2018-12-06 05:12:43 -06:00
2018-12-08 13:15:02 -06:00
seriesList . push ( series ) ;
}
2018-11-08 07:24:54 -06:00
2019-09-13 06:58:29 -05:00
// align time to bucket size - used Math.floor for calculation as time of the bucket
// must be in the past (before Date.now()) to be displayed on the graph
const time = Math . floor ( row . timeEpochMs / bucketSize ) * bucketSize ;
2018-12-06 05:12:43 -06:00
2018-11-08 07:24:54 -06:00
// Entry for time
2018-12-08 13:15:02 -06:00
if ( time === series . lastTs ) {
series . datapoints [ series . datapoints . length - 1 ] [ 0 ] ++ ;
2018-11-08 07:24:54 -06:00
} else {
2018-12-08 13:15:02 -06:00
series . datapoints . push ( [ 1 , time ] ) ;
series . lastTs = time ;
2018-11-08 07:24:54 -06:00
}
2018-12-08 13:15:02 -06:00
// add zero to other levels to aid stacking so each level series has same number of points
for ( const other of seriesList ) {
if ( other !== series && other . lastTs !== time ) {
other . datapoints . push ( [ 0 , time ] ) ;
other . lastTs = time ;
}
2018-11-08 07:24:54 -06:00
}
2018-12-08 13:15:02 -06:00
}
2019-11-07 05:37:46 -06:00
return seriesList . map ( ( series , i ) = > {
2020-04-08 03:07:12 -05:00
series . datapoints . sort ( ( a : number [ ] , b : number [ ] ) = > a [ 1 ] - b [ 1 ] ) ;
2018-12-08 13:15:02 -06:00
2019-08-15 11:18:51 -05:00
const data = toDataFrame ( series ) ;
2020-04-08 03:07:12 -05:00
const fieldCache = new FieldCache ( data ) ;
2019-08-13 00:32:43 -05:00
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
const valueField = fieldCache . getFirstFieldOfType ( FieldType . number ) ! ;
2020-04-08 03:07:12 -05:00
2021-06-01 02:28:25 -05:00
data . fields [ valueField . index ] . config . min = 0 ;
data . fields [ valueField . index ] . config . decimals = 0 ;
2021-09-13 04:11:49 -05:00
data . fields [ valueField . index ] . config . color = {
mode : FieldColorModeId.Fixed ,
fixedColor : series.color ,
} ;
2021-06-01 02:28:25 -05:00
data . fields [ valueField . index ] . config . custom = {
2021-08-25 11:59:03 -05:00
drawStyle : GraphDrawStyle.Bars ,
2021-06-01 02:28:25 -05:00
barAlignment : BarAlignment.Center ,
barWidthFactor : 0.9 ,
barMaxWidth : 5 ,
lineColor : series.color ,
pointColor : series.color ,
fillColor : series.color ,
lineWidth : 0 ,
fillOpacity : 100 ,
stacking : {
mode : StackingMode.Normal ,
group : 'A' ,
2019-08-13 00:32:43 -05:00
} ,
2019-02-11 03:01:43 -06:00
} ;
2019-08-13 00:32:43 -05:00
2021-06-01 02:28:25 -05:00
return data ;
2018-12-08 13:15:02 -06:00
} ) ;
2018-11-08 07:24:54 -06:00
}
2019-04-30 11:21:22 -05:00
2019-07-01 14:00:29 -05:00
function isLogsData ( series : DataFrame ) {
2021-01-20 00:59:48 -06:00
return series . fields . some ( ( f ) = > f . type === FieldType . time ) && series . fields . some ( ( f ) = > f . type === FieldType . string ) ;
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
/ * *
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series . Metrics
* series can be either already included in the dataFrame or will be computed from the log rows .
* @param dataFrame
2023-01-24 12:10:27 -06:00
* @param intervalMs Optional . In case there are no metrics series , we use this for computing it from log rows .
* @param absoluteRange Optional . Used to store absolute range of executed queries in logs model . This is used for pagination .
* @param queries Optional . Used to store executed queries in logs model . This is used for pagination .
2019-11-06 09:15:08 -06:00
* /
2020-04-08 16:44:10 -05:00
export function dataFrameToLogsModel (
dataFrame : DataFrame [ ] ,
2023-01-24 12:10:27 -06:00
intervalMs? : number ,
2021-05-12 05:54:15 -05:00
absoluteRange? : AbsoluteTimeRange ,
queries? : DataQuery [ ]
2020-04-08 16:44:10 -05:00
) : LogsModel {
2020-07-09 09:14:55 -05:00
const { logSeries } = separateLogsAndMetrics ( dataFrame ) ;
2022-10-11 04:04:43 -05:00
const logsModel = logSeriesToLogsModel ( logSeries , queries ) ;
2019-11-06 09:15:08 -06:00
2019-04-30 11:21:22 -05:00
if ( logsModel ) {
2020-07-09 09:14:55 -05:00
// Create histogram metrics from logs using the interval as bucket size for the line count
if ( intervalMs && logsModel . rows . length > 0 ) {
const sortedRows = logsModel . rows . sort ( sortInAscendingOrder ) ;
2021-04-21 05:02:34 -05:00
const { visibleRange , bucketSize , visibleRangeMs , requestedRangeMs } = getSeriesProperties (
sortedRows ,
intervalMs ,
absoluteRange
) ;
2020-07-09 09:14:55 -05:00
logsModel . visibleRange = visibleRange ;
2023-03-07 08:00:11 -06:00
logsModel . bucketSize = bucketSize ;
2021-06-01 02:28:25 -05:00
logsModel . series = makeDataFramesForLogs ( sortedRows , bucketSize ) ;
2021-04-21 05:02:34 -05:00
if ( logsModel . meta ) {
logsModel . meta = adjustMetaInfo ( logsModel , visibleRangeMs , requestedRangeMs ) ;
}
2019-04-30 11:21:22 -05:00
} else {
2020-07-09 09:14:55 -05:00
logsModel . series = [ ] ;
2019-04-30 11:21:22 -05:00
}
2021-05-12 05:54:15 -05:00
logsModel . queries = queries ;
2019-04-30 11:21:22 -05:00
return logsModel ;
}
2019-05-10 05:45:26 -05:00
return {
hasUniqueLabels : false ,
rows : [ ] ,
meta : [ ] ,
series : [ ] ,
2021-05-12 05:54:15 -05:00
queries ,
2019-05-10 05:45:26 -05:00
} ;
2019-04-30 11:21:22 -05:00
}
2020-05-29 08:39:13 -05:00
/ * *
* Returns a clamped time range and interval based on the visible logs and the given range .
*
* @param sortedRows Log rows from the query response
2021-06-01 02:28:25 -05:00
* @param intervalMs Dynamic data interval based on available pixel width
2020-05-29 08:39:13 -05:00
* @param absoluteRange Requested time range
* @param pxPerBar Default : 20 , buckets will be rendered as bars , assuming 10 px per histogram bar plus some free space around it
* /
export function getSeriesProperties (
sortedRows : LogRowModel [ ] ,
intervalMs : number ,
2020-05-29 13:01:01 -05:00
absoluteRange? : AbsoluteTimeRange ,
2020-05-29 08:39:13 -05:00
pxPerBar = 20 ,
minimumBucketSize = 1000
) {
let visibleRange = absoluteRange ;
let resolutionIntervalMs = intervalMs ;
let bucketSize = Math . max ( resolutionIntervalMs * pxPerBar , minimumBucketSize ) ;
2021-04-21 05:02:34 -05:00
let visibleRangeMs ;
let requestedRangeMs ;
2020-05-29 08:39:13 -05:00
// Clamp time range to visible logs otherwise big parts of the graph might look empty
if ( absoluteRange ) {
2021-04-21 05:02:34 -05:00
const earliestTsLogs = sortedRows [ 0 ] . timeEpochMs ;
requestedRangeMs = absoluteRange . to - absoluteRange . from ;
visibleRangeMs = absoluteRange . to - earliestTsLogs ;
2020-05-29 08:39:13 -05:00
if ( visibleRangeMs > 0 ) {
// Adjust interval bucket size for potentially shorter visible range
2021-04-21 05:02:34 -05:00
const clampingFactor = visibleRangeMs / requestedRangeMs ;
2020-05-29 08:39:13 -05:00
resolutionIntervalMs *= clampingFactor ;
// Minimum bucketsize of 1s for nicer graphing
bucketSize = Math . max ( Math . ceil ( resolutionIntervalMs * pxPerBar ) , minimumBucketSize ) ;
// makeSeriesForLogs() aligns dataspoints with time buckets, so we do the same here to not cut off data
2021-04-21 05:02:34 -05:00
const adjustedEarliest = Math . floor ( earliestTsLogs / bucketSize ) * bucketSize ;
visibleRange = { from : adjustedEarliest , to : absoluteRange.to } ;
} else {
// We use visibleRangeMs to calculate range coverage of received logs. However, some data sources are rounding up range in requests. This means that received logs
// can (in edge cases) be outside of the requested range and visibleRangeMs < 0. In that case, we want to change visibleRangeMs to be 1 so we can calculate coverage.
visibleRangeMs = 1 ;
2020-05-29 08:39:13 -05:00
}
}
2021-04-21 05:02:34 -05:00
return { bucketSize , visibleRange , visibleRangeMs , requestedRangeMs } ;
2020-05-29 08:39:13 -05:00
}
2020-04-25 15:48:20 -05:00
function separateLogsAndMetrics ( dataFrames : DataFrame [ ] ) {
2019-11-06 09:15:08 -06:00
const metricSeries : DataFrame [ ] = [ ] ;
const logSeries : DataFrame [ ] = [ ] ;
2019-04-30 11:21:22 -05:00
2020-04-25 15:48:20 -05:00
for ( const dataFrame of dataFrames ) {
2020-10-30 03:12:57 -05:00
// We want to show meta stats even if no result was returned. That's why we are pushing also data frames with no fields.
2020-07-17 03:30:27 -05:00
if ( isLogsData ( dataFrame ) || ! dataFrame . fields . length ) {
2020-04-25 15:48:20 -05:00
logSeries . push ( dataFrame ) ;
2019-11-06 09:15:08 -06:00
continue ;
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
2020-04-25 15:48:20 -05:00
if ( dataFrame . length > 0 ) {
metricSeries . push ( dataFrame ) ;
}
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
return { logSeries , metricSeries } ;
}
2019-11-07 09:50:45 -06:00
interface LogFields {
series : DataFrame ;
timeField : FieldWithIndex ;
stringField : FieldWithIndex ;
2022-04-08 02:57:06 -05:00
labelsField? : FieldWithIndex ;
2020-05-07 06:53:24 -05:00
timeNanosecondField? : FieldWithIndex ;
2019-11-07 09:50:45 -06:00
logLevelField? : FieldWithIndex ;
idField? : FieldWithIndex ;
}
2022-04-08 02:57:06 -05:00
function getAllLabels ( fields : LogFields ) : Labels [ ] {
// there are two types of dataframes we handle:
// 1. labels are in a separate field (more efficient when labels change by every log-row)
// 2. labels are in in the string-field's `.labels` attribute
const { stringField , labelsField } = fields ;
2022-05-16 06:38:05 -05:00
if ( labelsField !== undefined ) {
return labelsField . values . toArray ( ) ;
} else {
return [ stringField . labels ? ? { } ] ;
}
2022-04-08 02:57:06 -05:00
}
function getLabelsForFrameRow ( fields : LogFields , index : number ) : Labels {
// there are two types of dataframes we handle.
// either labels-on-the-string-field, or labels-in-the-labels-field
const { stringField , labelsField } = fields ;
2022-05-16 06:38:05 -05:00
if ( labelsField !== undefined ) {
return labelsField . values . get ( index ) ;
} else {
return stringField . labels ? ? { } ;
}
2022-04-08 02:57:06 -05:00
}
2019-11-06 09:15:08 -06:00
/ * *
* Converts dataFrames into LogsModel . This involves merging them into one list , sorting them and computing metadata
* like common labels .
* /
2022-10-11 04:04:43 -05:00
export function logSeriesToLogsModel ( logSeries : DataFrame [ ] , queries : DataQuery [ ] = [ ] ) : LogsModel | undefined {
2019-11-06 09:15:08 -06:00
if ( logSeries . length === 0 ) {
return undefined ;
2019-04-30 11:21:22 -05:00
}
2022-04-08 02:57:06 -05:00
const allLabels : Labels [ ] [ ] = [ ] ;
2019-11-07 09:50:45 -06:00
// Find the fields we care about and collect all labels
2020-07-17 03:30:27 -05:00
let allSeries : LogFields [ ] = [ ] ;
2020-10-30 03:12:57 -05:00
// We are sometimes passing data frames with no fields because we want to calculate correct meta stats.
// Therefore we need to filter out series with no fields. These series are used only for meta stats calculation.
2021-01-20 00:59:48 -06:00
const seriesWithFields = logSeries . filter ( ( series ) = > series . fields . length ) ;
2020-10-30 03:12:57 -05:00
if ( seriesWithFields . length ) {
2022-03-31 03:48:03 -05:00
seriesWithFields . forEach ( ( series ) = > {
2020-07-17 03:30:27 -05:00
const fieldCache = new FieldCache ( series ) ;
const stringField = fieldCache . getFirstFieldOfType ( FieldType . string ) ;
2022-03-31 03:48:03 -05:00
const timeField = fieldCache . getFirstFieldOfType ( FieldType . time ) ;
2022-05-16 06:38:05 -05:00
// NOTE: this is experimental, please do not use in your code.
// we will get this custom-frame-type into the "real" frame-type list soon,
// but the name might change, so please do not use it until then.
const labelsField =
series . meta ? . custom ? . frameType === 'LabeledTimeValues' ? fieldCache . getFieldByName ( 'labels' ) : undefined ;
2022-03-31 03:48:03 -05:00
if ( stringField !== undefined && timeField !== undefined ) {
2022-04-08 02:57:06 -05:00
const info = {
2022-03-31 03:48:03 -05:00
series ,
timeField ,
2022-04-08 02:57:06 -05:00
labelsField ,
2022-05-06 03:55:12 -05:00
timeNanosecondField : fieldCache.getFieldByName ( 'tsNs' ) ,
2022-03-31 03:48:03 -05:00
stringField ,
logLevelField : fieldCache.getFieldByName ( 'level' ) ,
idField : getIdField ( fieldCache ) ,
2022-04-08 02:57:06 -05:00
} ;
allSeries . push ( info ) ;
const labels = getAllLabels ( info ) ;
if ( labels . length > 0 ) {
allLabels . push ( labels ) ;
}
2020-07-17 03:30:27 -05:00
}
} ) ;
}
2019-11-07 09:50:45 -06:00
2022-04-08 02:57:06 -05:00
const flatAllLabels = allLabels . flat ( ) ;
const commonLabels = flatAllLabels . length > 0 ? findCommonLabels ( flatAllLabels ) : { } ;
2019-04-30 11:21:22 -05:00
const rows : LogRowModel [ ] = [ ] ;
let hasUniqueLabels = false ;
2019-11-07 09:50:45 -06:00
for ( const info of allSeries ) {
2020-05-07 06:53:24 -05:00
const { timeField , timeNanosecondField , stringField , logLevelField , idField , series } = info ;
2019-08-15 11:18:51 -05:00
2019-09-01 07:44:22 -05:00
for ( let j = 0 ; j < series . length ; j ++ ) {
2019-09-13 09:38:21 -05:00
const ts = timeField . values . get ( j ) ;
2022-02-02 03:54:33 -06:00
const time = toUtc ( ts ) ;
2020-05-07 06:53:24 -05:00
const tsNs = timeNanosecondField ? timeNanosecondField . values . get ( j ) : undefined ;
const timeEpochNs = tsNs ? tsNs : time.valueOf ( ) + '000000' ;
2019-08-15 11:18:51 -05:00
2021-03-23 03:17:55 -05:00
// In edge cases, this can be undefined. If undefined, we want to replace it with empty string.
const messageValue : unknown = stringField . values . get ( j ) ? ? '' ;
2019-09-04 03:49:09 -05:00
// This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
2019-11-06 09:15:08 -06:00
const message : string = typeof messageValue === 'string' ? messageValue : JSON.stringify ( messageValue ) ;
2020-04-20 00:37:38 -05:00
const hasAnsi = textUtil . hasAnsiCodes ( message ) ;
2021-03-03 11:32:27 -06:00
const hasUnescapedContent = ! ! message . match ( /\\n|\\t|\\r/ ) ;
2023-03-09 06:34:53 -06:00
// Data sources that set up searchWords on backend use meta.custom.searchWords
// Data sources that set up searchWords trough frontend can use meta.searchWords
const searchWords = series . meta ? . custom ? . searchWords ? ? series . meta ? . searchWords ? ? [ ] ;
2021-06-15 08:42:18 -05:00
const entry = hasAnsi ? ansicolor . strip ( message ) : message ;
2019-08-15 11:18:51 -05:00
2022-04-08 02:57:06 -05:00
const labels = getLabelsForFrameRow ( info , j ) ;
const uniqueLabels = findUniqueLabels ( labels , commonLabels ) ;
if ( Object . keys ( uniqueLabels ) . length > 0 ) {
hasUniqueLabels = true ;
}
2019-08-15 11:18:51 -05:00
let logLevel = LogLevel . unknown ;
2022-04-08 02:57:06 -05:00
const logLevelKey = ( logLevelField && logLevelField . values . get ( j ) ) || ( labels && labels [ 'level' ] ) ;
if ( logLevelKey ) {
logLevel = getLogLevelFromKey ( logLevelKey ) ;
2019-08-15 11:18:51 -05:00
} else {
2021-06-15 08:42:18 -05:00
logLevel = getLogLevel ( entry ) ;
2019-08-15 11:18:51 -05:00
}
2022-04-08 02:57:06 -05:00
2022-10-11 04:04:43 -05:00
const datasourceType = queries . find ( ( query ) = > query . refId === series . refId ) ? . datasource ? . type ;
2019-08-15 11:18:51 -05:00
rows . push ( {
2019-11-06 09:15:08 -06:00
entryFieldIndex : stringField.index ,
rowIndex : j ,
dataFrame : series ,
2019-08-15 11:18:51 -05:00
logLevel ,
2020-04-27 08:28:06 -05:00
timeFromNow : dateTimeFormatTimeAgo ( ts ) ,
2019-11-06 09:15:08 -06:00
timeEpochMs : time.valueOf ( ) ,
2020-05-07 06:53:24 -05:00
timeEpochNs ,
2020-04-27 08:28:06 -05:00
timeLocal : dateTimeFormat ( ts , { timeZone : 'browser' } ) ,
timeUtc : dateTimeFormat ( ts , { timeZone : 'utc' } ) ,
2019-08-15 11:18:51 -05:00
uniqueLabels ,
hasAnsi ,
2021-03-03 11:32:27 -06:00
hasUnescapedContent ,
2019-08-15 11:18:51 -05:00
searchWords ,
2021-06-15 08:42:18 -05:00
entry ,
2019-08-15 11:18:51 -05:00
raw : message ,
2022-04-08 02:57:06 -05:00
labels : labels || { } ,
2019-09-30 07:44:15 -05:00
uid : idField ? idField . values . get ( j ) : j . toString ( ) ,
2022-10-11 04:04:43 -05:00
datasourceType ,
2019-08-15 11:18:51 -05:00
} ) ;
2019-04-30 11:21:22 -05:00
}
}
// Meta data to display in status
const meta : LogsMetaItem [ ] = [ ] ;
2021-04-21 02:38:00 -05:00
if ( size ( commonLabels ) > 0 ) {
2019-04-30 11:21:22 -05:00
meta . push ( {
2021-07-01 07:06:58 -05:00
label : COMMON_LABELS ,
2019-04-30 11:21:22 -05:00
value : commonLabels ,
kind : LogsMetaKind.LabelsMap ,
} ) ;
}
2021-01-20 00:59:48 -06:00
const limits = logSeries . filter ( ( series ) = > series . meta && series . meta . limit ) ;
2022-07-06 01:49:45 -05:00
const lastLimitPerRef = limits . reduce < Record < string , number > > ( ( acc , elem ) = > {
acc [ elem . refId ? ? '' ] = elem . meta ? . limit ? ? 0 ;
return acc ;
} , { } ) ;
const limitValue = Object . values ( lastLimitPerRef ) . reduce ( ( acc , elem ) = > ( acc += elem ) , 0 ) ;
2019-04-30 11:21:22 -05:00
2021-04-21 05:02:34 -05:00
if ( limitValue > 0 ) {
2019-04-30 11:21:22 -05:00
meta . push ( {
2021-04-21 05:02:34 -05:00
label : LIMIT_LABEL ,
value : limitValue ,
kind : LogsMetaKind.Number ,
2019-04-30 11:21:22 -05:00
} ) ;
}
2021-05-24 05:28:10 -05:00
let totalBytes = 0 ;
const queriesVisited : { [ refId : string ] : boolean } = { } ;
2020-10-21 12:11:32 -05:00
// To add just 1 error message
let errorMetaAdded = false ;
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
2020-05-04 11:06:21 -05:00
for ( const series of logSeries ) {
2021-05-24 05:28:10 -05:00
const totalBytesKey = series . meta ? . custom ? . lokiQueryStatKey ;
const { refId } = series ; // Stats are per query, keeping track by refId
2020-10-21 12:11:32 -05:00
if ( ! errorMetaAdded && series . meta ? . custom ? . error ) {
meta . push ( {
label : '' ,
value : series.meta?.custom.error ,
kind : LogsMetaKind.Error ,
} ) ;
errorMetaAdded = true ;
}
2021-05-24 05:28:10 -05:00
if ( refId && ! queriesVisited [ refId ] ) {
if ( totalBytesKey && series . meta ? . stats ) {
const byteStat = series . meta . stats . find ( ( stat ) = > stat . displayName === totalBytesKey ) ;
if ( byteStat ) {
totalBytes += byteStat . value ;
}
}
queriesVisited [ refId ] = true ;
}
2020-05-04 11:06:21 -05:00
}
2021-05-24 05:28:10 -05:00
if ( totalBytes > 0 ) {
const { text , suffix } = SIPrefix ( 'B' ) ( totalBytes ) ;
meta . push ( {
label : 'Total bytes processed' ,
value : ` ${ text } ${ suffix } ` ,
kind : LogsMetaKind.String ,
} ) ;
}
2019-04-30 11:21:22 -05:00
return {
hasUniqueLabels ,
meta ,
2020-12-15 08:18:12 -06:00
rows ,
2019-04-30 11:21:22 -05:00
} ;
}
2019-09-30 07:44:15 -05:00
function getIdField ( fieldCache : FieldCache ) : FieldWithIndex | undefined {
const idFieldNames = [ 'id' ] ;
for ( const fieldName of idFieldNames ) {
const idField = fieldCache . getFieldByName ( fieldName ) ;
if ( idField ) {
return idField ;
}
}
return undefined ;
}
2021-04-21 05:02:34 -05:00
// Used to add additional information to Line limit meta info
function adjustMetaInfo ( logsModel : LogsModel , visibleRangeMs? : number , requestedRangeMs? : number ) : LogsMetaItem [ ] {
let logsModelMeta = [ . . . logsModel . meta ! ] ;
const limitIndex = logsModelMeta . findIndex ( ( meta ) = > meta . label === LIMIT_LABEL ) ;
2021-05-03 05:55:53 -05:00
const limit = limitIndex >= 0 && logsModelMeta [ limitIndex ] ? . value ;
2021-04-21 05:02:34 -05:00
if ( limit && limit > 0 ) {
let metaLimitValue ;
if ( limit === logsModel . rows . length && visibleRangeMs && requestedRangeMs ) {
const coverage = ( ( visibleRangeMs / requestedRangeMs ) * 100 ) . toFixed ( 2 ) ;
metaLimitValue = ` ${ limit } reached, received logs cover ${ coverage } % ( ${ rangeUtil . msRangeToTimeString (
visibleRangeMs
) } ) of your selected time range ( $ { rangeUtil . msRangeToTimeString ( requestedRangeMs ) } ) ` ;
} else {
metaLimitValue = ` ${ limit } ( ${ logsModel . rows . length } returned) ` ;
}
logsModelMeta [ limitIndex ] = {
label : LIMIT_LABEL ,
value : metaLimitValue ,
kind : LogsMetaKind.String ,
} ;
}
return logsModelMeta ;
}
2021-11-02 05:53:47 -05:00
/ * *
* Returns field configuration used to render logs volume bars
* /
function getLogVolumeFieldConfig ( level : LogLevel , oneLevelDetected : boolean ) {
const name = oneLevelDetected && level === LogLevel . unknown ? 'logs' : level ;
const color = LogLevelColor [ level ] ;
return {
displayNameFromDS : name ,
color : {
mode : FieldColorModeId.Fixed ,
fixedColor : color ,
} ,
custom : {
drawStyle : GraphDrawStyle.Bars ,
barAlignment : BarAlignment.Center ,
lineColor : color ,
pointColor : color ,
fillColor : color ,
lineWidth : 1 ,
fillOpacity : 100 ,
stacking : {
mode : StackingMode.Normal ,
group : 'A' ,
} ,
} ,
} ;
}
2023-03-07 08:00:11 -06:00
const updateLogsVolumeConfig = (
dataFrame : DataFrame ,
extractLevel : ( dataFrame : DataFrame ) = > LogLevel ,
oneLevelDetected : boolean
) : DataFrame = > {
dataFrame . fields = dataFrame . fields . map ( ( field ) = > {
if ( field . type === FieldType . number ) {
field . config = {
. . . field . config ,
. . . getLogVolumeFieldConfig ( extractLevel ( dataFrame ) , oneLevelDetected ) ,
} ;
2021-11-02 05:53:47 -05:00
}
2023-03-07 08:00:11 -06:00
return field ;
2021-11-02 05:53:47 -05:00
} ) ;
2023-03-07 08:00:11 -06:00
return dataFrame ;
} ;
2021-11-02 05:53:47 -05:00
type LogsVolumeQueryOptions < T extends DataQuery > = {
extractLevel : ( dataFrame : DataFrame ) = > LogLevel ;
targets : T [ ] ;
range : TimeRange ;
} ;
/ * *
* Creates an observable , which makes requests to get logs volume and aggregates results .
* /
2022-07-06 01:49:45 -05:00
export function queryLogsVolume < TQuery extends DataQuery , TOptions extends DataSourceJsonData > (
datasource : DataSourceApi < TQuery , TOptions > ,
logsVolumeRequest : DataQueryRequest < TQuery > ,
options : LogsVolumeQueryOptions < TQuery >
2021-11-02 05:53:47 -05:00
) : Observable < DataQueryResponse > {
2021-11-10 04:20:30 -06:00
const timespan = options . range . to . valueOf ( ) - options . range . from . valueOf ( ) ;
const intervalInfo = getIntervalInfo ( logsVolumeRequest . scopedVars , timespan ) ;
2022-09-13 06:27:16 -05:00
2021-11-02 05:53:47 -05:00
logsVolumeRequest . interval = intervalInfo . interval ;
logsVolumeRequest . scopedVars . __interval = { value : intervalInfo.interval , text : intervalInfo.interval } ;
2022-09-13 06:27:16 -05:00
2021-11-02 05:53:47 -05:00
if ( intervalInfo . intervalMs !== undefined ) {
logsVolumeRequest . intervalMs = intervalInfo . intervalMs ;
logsVolumeRequest . scopedVars . __interval_ms = { value : intervalInfo.intervalMs , text : intervalInfo.intervalMs } ;
}
2022-09-13 06:27:16 -05:00
logsVolumeRequest . hideFromInspector = true ;
2021-11-02 05:53:47 -05:00
return new Observable ( ( observer ) = > {
2023-03-07 08:00:11 -06:00
let logsVolumeData : DataFrame [ ] = [ ] ;
2021-11-02 05:53:47 -05:00
observer . next ( {
state : LoadingState.Loading ,
error : undefined ,
data : [ ] ,
} ) ;
2022-07-06 01:49:45 -05:00
const queryResponse = datasource . query ( logsVolumeRequest ) ;
const queryObservable = isObservable ( queryResponse ) ? queryResponse : from ( queryResponse ) ;
const subscription = queryObservable . subscribe ( {
2022-02-08 08:39:09 -06:00
complete : ( ) = > {
observer . complete ( ) ;
} ,
next : ( dataQueryResponse : DataQueryResponse ) = > {
2022-06-13 01:33:28 -05:00
const { error } = dataQueryResponse ;
if ( error !== undefined ) {
observer . next ( {
state : LoadingState.Error ,
error ,
data : [ ] ,
} ) ;
observer . error ( error ) ;
} else {
2023-03-07 08:00:11 -06:00
const framesByRefId = groupBy ( dataQueryResponse . data , 'refId' ) ;
logsVolumeData = dataQueryResponse . data . map ( ( dataFrame ) = > {
let sourceRefId = dataFrame . refId || '' ;
if ( sourceRefId . startsWith ( 'log-volume-' ) ) {
sourceRefId = sourceRefId . substr ( 'log-volume-' . length ) ;
}
const logsVolumeCustomMetaData : LogsVolumeCustomMetaData = {
logsVolumeType : LogsVolumeType.FullRange ,
absoluteRange : { from : options . range . from . valueOf ( ) , to : options.range.to.valueOf ( ) } ,
datasourceName : datasource.name ,
sourceQuery : options.targets.find ( ( dataQuery ) = > dataQuery . refId === sourceRefId ) ! ,
} ;
dataFrame . meta = {
. . . dataFrame . meta ,
2023-02-10 07:43:41 -06:00
custom : {
2023-03-07 08:00:11 -06:00
. . . dataFrame . meta ? . custom ,
. . . logsVolumeCustomMetaData ,
2023-02-10 07:43:41 -06:00
} ,
} ;
2023-03-07 08:00:11 -06:00
return updateLogsVolumeConfig ( dataFrame , options . extractLevel , framesByRefId [ dataFrame . refId ] . length === 1 ) ;
} ) ;
2023-02-10 07:43:41 -06:00
observer . next ( {
2023-03-07 08:00:11 -06:00
state : dataQueryResponse.state ,
2023-02-10 07:43:41 -06:00
error : undefined ,
2023-03-07 08:00:11 -06:00
data : logsVolumeData ,
2023-02-10 07:43:41 -06:00
} ) ;
2022-06-13 01:33:28 -05:00
}
2022-02-08 08:39:09 -06:00
} ,
error : ( error ) = > {
observer . next ( {
state : LoadingState.Error ,
error : error ,
2023-01-20 07:20:49 -06:00
data : [ ] ,
} ) ;
observer . error ( error ) ;
} ,
} ) ;
return ( ) = > {
subscription ? . unsubscribe ( ) ;
} ;
} ) ;
}
/ * *
2023-01-24 12:10:27 -06:00
* Creates an observable , which makes requests to get logs sample .
2023-01-20 07:20:49 -06:00
* /
export function queryLogsSample < TQuery extends DataQuery , TOptions extends DataSourceJsonData > (
datasource : DataSourceApi < TQuery , TOptions > ,
logsSampleRequest : DataQueryRequest < TQuery >
) : Observable < DataQueryResponse > {
logsSampleRequest . hideFromInspector = true ;
return new Observable ( ( observer ) = > {
let rawLogsSample : DataFrame [ ] = [ ] ;
observer . next ( {
state : LoadingState.Loading ,
error : undefined ,
data : [ ] ,
} ) ;
const queryResponse = datasource . query ( logsSampleRequest ) ;
const queryObservable = isObservable ( queryResponse ) ? queryResponse : from ( queryResponse ) ;
const subscription = queryObservable . subscribe ( {
complete : ( ) = > {
observer . next ( {
state : LoadingState.Done ,
error : undefined ,
data : rawLogsSample ,
} ) ;
observer . complete ( ) ;
} ,
next : ( dataQueryResponse : DataQueryResponse ) = > {
const { error } = dataQueryResponse ;
if ( error !== undefined ) {
observer . next ( {
state : LoadingState.Error ,
error ,
data : [ ] ,
} ) ;
observer . error ( error ) ;
} else {
2023-03-03 09:02:14 -06:00
rawLogsSample = dataQueryResponse . data . map ( ( dataFrame ) = > {
const frame = toDataFrame ( dataFrame ) ;
const { timeIndex } = getTimeField ( frame ) ;
return sortDataFrame ( frame , timeIndex ) ;
} ) ;
2023-01-20 07:20:49 -06:00
}
} ,
error : ( error ) = > {
observer . next ( {
state : LoadingState.Error ,
error : error ,
2022-02-08 08:39:09 -06:00
data : [ ] ,
} ) ;
observer . error ( error ) ;
} ,
} ) ;
2021-11-02 05:53:47 -05:00
return ( ) = > {
subscription ? . unsubscribe ( ) ;
} ;
} ) ;
}
2021-11-10 04:20:30 -06:00
function getIntervalInfo ( scopedVars : ScopedVars , timespanMs : number ) : { interval : string ; intervalMs? : number } {
2023-03-28 12:22:34 -05:00
if ( scopedVars . __interval_ms ) {
2021-11-02 05:53:47 -05:00
let intervalMs : number = scopedVars . __interval_ms . value ;
let interval = '' ;
2021-11-10 04:20:30 -06:00
// below 5 seconds we force the resolution to be per 1ms as interval in scopedVars is not less than 10ms
if ( timespanMs < SECOND * 5 ) {
intervalMs = MILLISECOND ;
interval = '1ms' ;
} else if ( intervalMs > HOUR ) {
2021-11-02 05:53:47 -05:00
intervalMs = DAY ;
interval = '1d' ;
} else if ( intervalMs > MINUTE ) {
intervalMs = HOUR ;
interval = '1h' ;
} else if ( intervalMs > SECOND ) {
intervalMs = MINUTE ;
interval = '1m' ;
} else {
intervalMs = SECOND ;
interval = '1s' ;
}
return { interval , intervalMs } ;
} else {
return { interval : '$__interval' } ;
}
}