2021-04-21 02:38:00 -05:00
import { size } from 'lodash' ;
2021-08-25 11:59:03 -05:00
import { BarAlignment , GraphDrawStyle , StackingMode } from '@grafana/schema' ;
2021-08-24 10:22:34 -05:00
import { ansicolor , colors } from '@grafana/ui' ;
2019-07-06 01:05:53 -05:00
2019-04-30 11:21:22 -05:00
import {
2021-06-01 02:28:25 -05:00
AbsoluteTimeRange ,
2019-07-01 14:00:29 -05:00
DataFrame ,
2021-06-01 02:28:25 -05:00
DataQuery ,
dateTime ,
dateTimeFormat ,
dateTimeFormatTimeAgo ,
FieldCache ,
2021-09-13 04:11:49 -05:00
FieldColorModeId ,
2021-06-01 02:28:25 -05:00
FieldType ,
FieldWithIndex ,
2019-04-30 11:21:22 -05:00
findCommonLabels ,
findUniqueLabels ,
getLogLevel ,
2019-05-27 11:43:04 -05:00
getLogLevelFromKey ,
2021-06-01 02:28:25 -05:00
Labels ,
LogLevel ,
2019-05-22 16:10:05 -05:00
LogRowModel ,
2021-06-01 02:28:25 -05:00
LogsDedupStrategy ,
2019-05-22 16:10:05 -05:00
LogsMetaItem ,
LogsMetaKind ,
2021-06-01 02:28:25 -05:00
LogsModel ,
2021-04-21 05:02:34 -05:00
rangeUtil ,
2021-06-01 02:28:25 -05:00
sortInAscendingOrder ,
textUtil ,
toDataFrame ,
2019-07-06 01:05:53 -05:00
} from '@grafana/data' ;
2019-01-10 06:34:23 -06:00
import { getThemeColor } from 'app/core/utils/colors' ;
2021-05-24 05:28:10 -05:00
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters' ;
2020-04-20 00:37:38 -05:00
2021-04-21 05:02:34 -05:00
export const LIMIT_LABEL = 'Line limit' ;
2021-07-01 07:06:58 -05:00
export const COMMON_LABELS = 'Common labels' ;
2018-10-30 10:14:01 -05:00
2018-11-06 05:00:05 -06:00
export const LogLevelColor = {
2018-11-23 09:29:55 -06:00
[ LogLevel . critical ] : colors [ 7 ] ,
[ LogLevel . warning ] : colors [ 1 ] ,
2018-11-06 05:00:05 -06:00
[ LogLevel . error ] : colors [ 4 ] ,
[ LogLevel . info ] : colors [ 0 ] ,
2018-11-23 09:29:55 -06:00
[ LogLevel . debug ] : colors [ 5 ] ,
[ LogLevel . trace ] : colors [ 2 ] ,
2019-02-22 06:13:10 -06:00
[ LogLevel . unknown ] : getThemeColor ( '#8e8e8e' , '#dde4ed' ) ,
2018-11-06 05:00:05 -06:00
} ;
2018-11-18 03:38:06 -06:00
const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g ;
2019-11-26 03:01:32 -06:00
function isDuplicateRow ( row : LogRowModel , other : LogRowModel , strategy? : LogsDedupStrategy ) : boolean {
2018-11-18 03:38:06 -06:00
switch ( strategy ) {
case LogsDedupStrategy . exact :
// Exact still strips dates
return row . entry . replace ( isoDateRegexp , '' ) === other . entry . replace ( isoDateRegexp , '' ) ;
case LogsDedupStrategy . numbers :
return row . entry . replace ( /\d/g , '' ) === other . entry . replace ( /\d/g , '' ) ;
case LogsDedupStrategy . signature :
return row . entry . replace ( /\w/g , '' ) === other . entry . replace ( /\w/g , '' ) ;
default :
return false ;
}
}
2019-11-26 03:01:32 -06:00
export function dedupLogRows ( rows : LogRowModel [ ] , strategy? : LogsDedupStrategy ) : LogRowModel [ ] {
2018-11-18 03:38:06 -06:00
if ( strategy === LogsDedupStrategy . none ) {
2019-11-01 10:38:34 -05:00
return rows ;
2018-11-18 03:38:06 -06:00
}
2019-11-01 10:38:34 -05:00
return rows . reduce ( ( result : LogRowModel [ ] , row : LogRowModel , index ) = > {
2019-03-11 05:48:14 -05:00
const rowCopy = { . . . row } ;
2018-11-18 03:38:06 -06:00
const previous = result [ result . length - 1 ] ;
if ( index > 0 && isDuplicateRow ( row , previous , strategy ) ) {
2019-11-26 03:01:32 -06:00
previous . duplicates ! ++ ;
2018-11-18 03:38:06 -06:00
} else {
2019-03-11 05:48:14 -05:00
rowCopy . duplicates = 0 ;
result . push ( rowCopy ) ;
2018-11-18 03:38:06 -06:00
}
return result ;
} , [ ] ) ;
}
2019-11-01 10:38:34 -05:00
export function filterLogLevels ( logRows : LogRowModel [ ] , hiddenLogLevels : Set < LogLevel > ) : LogRowModel [ ] {
2018-11-23 09:29:55 -06:00
if ( hiddenLogLevels . size === 0 ) {
2019-11-01 10:38:34 -05:00
return logRows ;
2018-11-23 09:29:55 -06:00
}
2019-11-01 10:38:34 -05:00
return logRows . filter ( ( row : LogRowModel ) = > {
return ! hiddenLogLevels . has ( row . logLevel ) ;
} ) ;
2018-11-23 09:29:55 -06:00
}
2021-06-01 02:28:25 -05:00
export function makeDataFramesForLogs ( sortedRows : LogRowModel [ ] , bucketSize : number ) : DataFrame [ ] {
2018-12-06 05:12:43 -06:00
// currently interval is rangeMs / resolution, which is too low for showing series as bars.
2020-05-29 08:39:13 -05:00
// Should be solved higher up the chain when executing queries & interval calculated and not here but this is a temporary fix.
2018-12-06 05:12:43 -06:00
2018-11-08 07:24:54 -06:00
// Graph time series by log level
2019-05-13 02:38:19 -05:00
const seriesByLevel : any = { } ;
const seriesList : any [ ] = [ ] ;
2018-12-06 05:12:43 -06:00
2019-09-13 06:58:29 -05:00
for ( const row of sortedRows ) {
2018-12-08 13:15:02 -06:00
let series = seriesByLevel [ row . logLevel ] ;
if ( ! series ) {
seriesByLevel [ row . logLevel ] = series = {
lastTs : null ,
datapoints : [ ] ,
2019-11-07 05:37:46 -06:00
target : row.logLevel ,
2018-12-08 13:15:02 -06:00
color : LogLevelColor [ row . logLevel ] ,
} ;
2018-12-06 05:12:43 -06:00
2018-12-08 13:15:02 -06:00
seriesList . push ( series ) ;
}
2018-11-08 07:24:54 -06:00
2019-09-13 06:58:29 -05:00
// align time to bucket size - used Math.floor for calculation as time of the bucket
// must be in the past (before Date.now()) to be displayed on the graph
const time = Math . floor ( row . timeEpochMs / bucketSize ) * bucketSize ;
2018-12-06 05:12:43 -06:00
2018-11-08 07:24:54 -06:00
// Entry for time
2018-12-08 13:15:02 -06:00
if ( time === series . lastTs ) {
series . datapoints [ series . datapoints . length - 1 ] [ 0 ] ++ ;
2018-11-08 07:24:54 -06:00
} else {
2018-12-08 13:15:02 -06:00
series . datapoints . push ( [ 1 , time ] ) ;
series . lastTs = time ;
2018-11-08 07:24:54 -06:00
}
2018-12-08 13:15:02 -06:00
// add zero to other levels to aid stacking so each level series has same number of points
for ( const other of seriesList ) {
if ( other !== series && other . lastTs !== time ) {
other . datapoints . push ( [ 0 , time ] ) ;
other . lastTs = time ;
}
2018-11-08 07:24:54 -06:00
}
2018-12-08 13:15:02 -06:00
}
2019-11-07 05:37:46 -06:00
return seriesList . map ( ( series , i ) = > {
2020-04-08 03:07:12 -05:00
series . datapoints . sort ( ( a : number [ ] , b : number [ ] ) = > a [ 1 ] - b [ 1 ] ) ;
2018-12-08 13:15:02 -06:00
2019-08-15 11:18:51 -05:00
const data = toDataFrame ( series ) ;
2020-04-08 03:07:12 -05:00
const fieldCache = new FieldCache ( data ) ;
2019-08-13 00:32:43 -05:00
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
const valueField = fieldCache . getFirstFieldOfType ( FieldType . number ) ! ;
2020-04-08 03:07:12 -05:00
2021-06-01 02:28:25 -05:00
data . fields [ valueField . index ] . config . min = 0 ;
data . fields [ valueField . index ] . config . decimals = 0 ;
2021-09-13 04:11:49 -05:00
data . fields [ valueField . index ] . config . color = {
mode : FieldColorModeId.Fixed ,
fixedColor : series.color ,
} ;
2021-06-01 02:28:25 -05:00
data . fields [ valueField . index ] . config . custom = {
2021-08-25 11:59:03 -05:00
drawStyle : GraphDrawStyle.Bars ,
2021-06-01 02:28:25 -05:00
barAlignment : BarAlignment.Center ,
barWidthFactor : 0.9 ,
barMaxWidth : 5 ,
lineColor : series.color ,
pointColor : series.color ,
fillColor : series.color ,
lineWidth : 0 ,
fillOpacity : 100 ,
stacking : {
mode : StackingMode.Normal ,
group : 'A' ,
2019-08-13 00:32:43 -05:00
} ,
2019-02-11 03:01:43 -06:00
} ;
2019-08-13 00:32:43 -05:00
2021-06-01 02:28:25 -05:00
return data ;
2018-12-08 13:15:02 -06:00
} ) ;
2018-11-08 07:24:54 -06:00
}
2019-04-30 11:21:22 -05:00
2019-07-01 14:00:29 -05:00
function isLogsData ( series : DataFrame ) {
2021-01-20 00:59:48 -06:00
return series . fields . some ( ( f ) = > f . type === FieldType . time ) && series . fields . some ( ( f ) = > f . type === FieldType . string ) ;
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
/ * *
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series . Metrics
* series can be either already included in the dataFrame or will be computed from the log rows .
* @param dataFrame
* @param intervalMs In case there are no metrics series , we use this for computing it from log rows .
* /
2020-04-08 16:44:10 -05:00
export function dataFrameToLogsModel (
dataFrame : DataFrame [ ] ,
intervalMs : number | undefined ,
2021-05-12 05:54:15 -05:00
absoluteRange? : AbsoluteTimeRange ,
queries? : DataQuery [ ]
2020-04-08 16:44:10 -05:00
) : LogsModel {
2020-07-09 09:14:55 -05:00
const { logSeries } = separateLogsAndMetrics ( dataFrame ) ;
2019-04-30 11:21:22 -05:00
const logsModel = logSeriesToLogsModel ( logSeries ) ;
2019-11-06 09:15:08 -06:00
2019-04-30 11:21:22 -05:00
if ( logsModel ) {
2020-07-09 09:14:55 -05:00
// Create histogram metrics from logs using the interval as bucket size for the line count
if ( intervalMs && logsModel . rows . length > 0 ) {
const sortedRows = logsModel . rows . sort ( sortInAscendingOrder ) ;
2021-04-21 05:02:34 -05:00
const { visibleRange , bucketSize , visibleRangeMs , requestedRangeMs } = getSeriesProperties (
sortedRows ,
intervalMs ,
absoluteRange
) ;
2020-07-09 09:14:55 -05:00
logsModel . visibleRange = visibleRange ;
2021-06-01 02:28:25 -05:00
logsModel . series = makeDataFramesForLogs ( sortedRows , bucketSize ) ;
2021-04-21 05:02:34 -05:00
if ( logsModel . meta ) {
logsModel . meta = adjustMetaInfo ( logsModel , visibleRangeMs , requestedRangeMs ) ;
}
2019-04-30 11:21:22 -05:00
} else {
2020-07-09 09:14:55 -05:00
logsModel . series = [ ] ;
2019-04-30 11:21:22 -05:00
}
2021-05-12 05:54:15 -05:00
logsModel . queries = queries ;
2019-04-30 11:21:22 -05:00
return logsModel ;
}
2019-05-10 05:45:26 -05:00
return {
hasUniqueLabels : false ,
rows : [ ] ,
meta : [ ] ,
series : [ ] ,
2021-05-12 05:54:15 -05:00
queries ,
2019-05-10 05:45:26 -05:00
} ;
2019-04-30 11:21:22 -05:00
}
2020-05-29 08:39:13 -05:00
/ * *
* Returns a clamped time range and interval based on the visible logs and the given range .
*
* @param sortedRows Log rows from the query response
2021-06-01 02:28:25 -05:00
* @param intervalMs Dynamic data interval based on available pixel width
2020-05-29 08:39:13 -05:00
* @param absoluteRange Requested time range
* @param pxPerBar Default : 20 , buckets will be rendered as bars , assuming 10 px per histogram bar plus some free space around it
* /
export function getSeriesProperties (
sortedRows : LogRowModel [ ] ,
intervalMs : number ,
2020-05-29 13:01:01 -05:00
absoluteRange? : AbsoluteTimeRange ,
2020-05-29 08:39:13 -05:00
pxPerBar = 20 ,
minimumBucketSize = 1000
) {
let visibleRange = absoluteRange ;
let resolutionIntervalMs = intervalMs ;
let bucketSize = Math . max ( resolutionIntervalMs * pxPerBar , minimumBucketSize ) ;
2021-04-21 05:02:34 -05:00
let visibleRangeMs ;
let requestedRangeMs ;
2020-05-29 08:39:13 -05:00
// Clamp time range to visible logs otherwise big parts of the graph might look empty
if ( absoluteRange ) {
2021-04-21 05:02:34 -05:00
const earliestTsLogs = sortedRows [ 0 ] . timeEpochMs ;
requestedRangeMs = absoluteRange . to - absoluteRange . from ;
visibleRangeMs = absoluteRange . to - earliestTsLogs ;
2020-05-29 08:39:13 -05:00
if ( visibleRangeMs > 0 ) {
// Adjust interval bucket size for potentially shorter visible range
2021-04-21 05:02:34 -05:00
const clampingFactor = visibleRangeMs / requestedRangeMs ;
2020-05-29 08:39:13 -05:00
resolutionIntervalMs *= clampingFactor ;
// Minimum bucketsize of 1s for nicer graphing
bucketSize = Math . max ( Math . ceil ( resolutionIntervalMs * pxPerBar ) , minimumBucketSize ) ;
// makeSeriesForLogs() aligns dataspoints with time buckets, so we do the same here to not cut off data
2021-04-21 05:02:34 -05:00
const adjustedEarliest = Math . floor ( earliestTsLogs / bucketSize ) * bucketSize ;
visibleRange = { from : adjustedEarliest , to : absoluteRange.to } ;
} else {
// We use visibleRangeMs to calculate range coverage of received logs. However, some data sources are rounding up range in requests. This means that received logs
// can (in edge cases) be outside of the requested range and visibleRangeMs < 0. In that case, we want to change visibleRangeMs to be 1 so we can calculate coverage.
visibleRangeMs = 1 ;
2020-05-29 08:39:13 -05:00
}
}
2021-04-21 05:02:34 -05:00
return { bucketSize , visibleRange , visibleRangeMs , requestedRangeMs } ;
2020-05-29 08:39:13 -05:00
}
2020-04-25 15:48:20 -05:00
function separateLogsAndMetrics ( dataFrames : DataFrame [ ] ) {
2019-11-06 09:15:08 -06:00
const metricSeries : DataFrame [ ] = [ ] ;
const logSeries : DataFrame [ ] = [ ] ;
2019-04-30 11:21:22 -05:00
2020-04-25 15:48:20 -05:00
for ( const dataFrame of dataFrames ) {
2020-10-30 03:12:57 -05:00
// We want to show meta stats even if no result was returned. That's why we are pushing also data frames with no fields.
2020-07-17 03:30:27 -05:00
if ( isLogsData ( dataFrame ) || ! dataFrame . fields . length ) {
2020-04-25 15:48:20 -05:00
logSeries . push ( dataFrame ) ;
2019-11-06 09:15:08 -06:00
continue ;
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
2020-04-25 15:48:20 -05:00
if ( dataFrame . length > 0 ) {
metricSeries . push ( dataFrame ) ;
}
2019-04-30 11:21:22 -05:00
}
2019-11-06 09:15:08 -06:00
return { logSeries , metricSeries } ;
}
2019-11-07 09:50:45 -06:00
interface LogFields {
series : DataFrame ;
timeField : FieldWithIndex ;
stringField : FieldWithIndex ;
2020-05-07 06:53:24 -05:00
timeNanosecondField? : FieldWithIndex ;
2019-11-07 09:50:45 -06:00
logLevelField? : FieldWithIndex ;
idField? : FieldWithIndex ;
}
2019-11-06 09:15:08 -06:00
/ * *
* Converts dataFrames into LogsModel . This involves merging them into one list , sorting them and computing metadata
* like common labels .
* /
export function logSeriesToLogsModel ( logSeries : DataFrame [ ] ) : LogsModel | undefined {
if ( logSeries . length === 0 ) {
return undefined ;
2019-04-30 11:21:22 -05:00
}
2019-11-07 09:50:45 -06:00
const allLabels : Labels [ ] = [ ] ;
// Find the fields we care about and collect all labels
2020-07-17 03:30:27 -05:00
let allSeries : LogFields [ ] = [ ] ;
2020-10-30 03:12:57 -05:00
// We are sometimes passing data frames with no fields because we want to calculate correct meta stats.
// Therefore we need to filter out series with no fields. These series are used only for meta stats calculation.
2021-01-20 00:59:48 -06:00
const seriesWithFields = logSeries . filter ( ( series ) = > series . fields . length ) ;
2020-10-30 03:12:57 -05:00
if ( seriesWithFields . length ) {
2021-01-20 00:59:48 -06:00
allSeries = seriesWithFields . map ( ( series ) = > {
2020-07-17 03:30:27 -05:00
const fieldCache = new FieldCache ( series ) ;
const stringField = fieldCache . getFirstFieldOfType ( FieldType . string ) ;
if ( stringField ? . labels ) {
allLabels . push ( stringField . labels ) ;
}
return {
series ,
timeField : fieldCache.getFirstFieldOfType ( FieldType . time ) ,
timeNanosecondField : fieldCache.hasFieldWithNameAndType ( 'tsNs' , FieldType . time )
? fieldCache . getFieldByName ( 'tsNs' )
: undefined ,
stringField ,
logLevelField : fieldCache.getFieldByName ( 'level' ) ,
idField : getIdField ( fieldCache ) ,
} as LogFields ;
} ) ;
}
2019-11-07 09:50:45 -06:00
const commonLabels = allLabels . length > 0 ? findCommonLabels ( allLabels ) : { } ;
2019-04-30 11:21:22 -05:00
const rows : LogRowModel [ ] = [ ] ;
let hasUniqueLabels = false ;
2019-11-07 09:50:45 -06:00
for ( const info of allSeries ) {
2020-05-07 06:53:24 -05:00
const { timeField , timeNanosecondField , stringField , logLevelField , idField , series } = info ;
2019-11-07 09:50:45 -06:00
const labels = stringField . labels ;
const uniqueLabels = findUniqueLabels ( labels , commonLabels ) ;
2019-04-30 11:21:22 -05:00
if ( Object . keys ( uniqueLabels ) . length > 0 ) {
hasUniqueLabels = true ;
}
2019-08-15 11:18:51 -05:00
let seriesLogLevel : LogLevel | undefined = undefined ;
2019-11-07 09:50:45 -06:00
if ( labels && Object . keys ( labels ) . indexOf ( 'level' ) !== - 1 ) {
seriesLogLevel = getLogLevelFromKey ( labels [ 'level' ] ) ;
2019-08-15 11:18:51 -05:00
}
2019-09-01 07:44:22 -05:00
for ( let j = 0 ; j < series . length ; j ++ ) {
2019-09-13 09:38:21 -05:00
const ts = timeField . values . get ( j ) ;
2019-08-15 11:18:51 -05:00
const time = dateTime ( ts ) ;
2020-05-07 06:53:24 -05:00
const tsNs = timeNanosecondField ? timeNanosecondField . values . get ( j ) : undefined ;
const timeEpochNs = tsNs ? tsNs : time.valueOf ( ) + '000000' ;
2019-08-15 11:18:51 -05:00
2021-03-23 03:17:55 -05:00
// In edge cases, this can be undefined. If undefined, we want to replace it with empty string.
const messageValue : unknown = stringField . values . get ( j ) ? ? '' ;
2019-09-04 03:49:09 -05:00
// This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
2019-11-06 09:15:08 -06:00
const message : string = typeof messageValue === 'string' ? messageValue : JSON.stringify ( messageValue ) ;
2020-04-20 00:37:38 -05:00
const hasAnsi = textUtil . hasAnsiCodes ( message ) ;
2021-03-03 11:32:27 -06:00
const hasUnescapedContent = ! ! message . match ( /\\n|\\t|\\r/ ) ;
2019-11-06 09:15:08 -06:00
const searchWords = series . meta && series . meta . searchWords ? series . meta . searchWords : [ ] ;
2021-06-15 08:42:18 -05:00
const entry = hasAnsi ? ansicolor . strip ( message ) : message ;
2019-08-15 11:18:51 -05:00
let logLevel = LogLevel . unknown ;
2020-02-17 09:49:24 -06:00
if ( logLevelField && logLevelField . values . get ( j ) ) {
2019-08-15 11:18:51 -05:00
logLevel = getLogLevelFromKey ( logLevelField . values . get ( j ) ) ;
} else if ( seriesLogLevel ) {
logLevel = seriesLogLevel ;
} else {
2021-06-15 08:42:18 -05:00
logLevel = getLogLevel ( entry ) ;
2019-08-15 11:18:51 -05:00
}
rows . push ( {
2019-11-06 09:15:08 -06:00
entryFieldIndex : stringField.index ,
rowIndex : j ,
dataFrame : series ,
2019-08-15 11:18:51 -05:00
logLevel ,
2020-04-27 08:28:06 -05:00
timeFromNow : dateTimeFormatTimeAgo ( ts ) ,
2019-11-06 09:15:08 -06:00
timeEpochMs : time.valueOf ( ) ,
2020-05-07 06:53:24 -05:00
timeEpochNs ,
2020-04-27 08:28:06 -05:00
timeLocal : dateTimeFormat ( ts , { timeZone : 'browser' } ) ,
timeUtc : dateTimeFormat ( ts , { timeZone : 'utc' } ) ,
2019-08-15 11:18:51 -05:00
uniqueLabels ,
hasAnsi ,
2021-03-03 11:32:27 -06:00
hasUnescapedContent ,
2019-08-15 11:18:51 -05:00
searchWords ,
2021-06-15 08:42:18 -05:00
entry ,
2019-08-15 11:18:51 -05:00
raw : message ,
2020-04-08 16:44:10 -05:00
labels : stringField.labels || { } ,
2019-09-30 07:44:15 -05:00
uid : idField ? idField . values . get ( j ) : j . toString ( ) ,
2019-08-15 11:18:51 -05:00
} ) ;
2019-04-30 11:21:22 -05:00
}
}
// Meta data to display in status
const meta : LogsMetaItem [ ] = [ ] ;
2021-04-21 02:38:00 -05:00
if ( size ( commonLabels ) > 0 ) {
2019-04-30 11:21:22 -05:00
meta . push ( {
2021-07-01 07:06:58 -05:00
label : COMMON_LABELS ,
2019-04-30 11:21:22 -05:00
value : commonLabels ,
kind : LogsMetaKind.LabelsMap ,
} ) ;
}
2021-01-20 00:59:48 -06:00
const limits = logSeries . filter ( ( series ) = > series . meta && series . meta . limit ) ;
2020-02-06 06:34:52 -06:00
const limitValue = Object . values (
limits . reduce ( ( acc : any , elem : any ) = > {
acc [ elem . refId ] = elem . meta . limit ;
return acc ;
} , { } )
2021-04-21 05:02:34 -05:00
) . reduce ( ( acc : number , elem : any ) = > ( acc += elem ) , 0 ) as number ;
2019-04-30 11:21:22 -05:00
2021-04-21 05:02:34 -05:00
if ( limitValue > 0 ) {
2019-04-30 11:21:22 -05:00
meta . push ( {
2021-04-21 05:02:34 -05:00
label : LIMIT_LABEL ,
value : limitValue ,
kind : LogsMetaKind.Number ,
2019-04-30 11:21:22 -05:00
} ) ;
}
2021-05-24 05:28:10 -05:00
let totalBytes = 0 ;
const queriesVisited : { [ refId : string ] : boolean } = { } ;
2020-10-21 12:11:32 -05:00
// To add just 1 error message
let errorMetaAdded = false ;
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 05:46:59 -05:00
2020-05-04 11:06:21 -05:00
for ( const series of logSeries ) {
2021-05-24 05:28:10 -05:00
const totalBytesKey = series . meta ? . custom ? . lokiQueryStatKey ;
const { refId } = series ; // Stats are per query, keeping track by refId
2020-10-21 12:11:32 -05:00
if ( ! errorMetaAdded && series . meta ? . custom ? . error ) {
meta . push ( {
label : '' ,
value : series.meta?.custom.error ,
kind : LogsMetaKind.Error ,
} ) ;
errorMetaAdded = true ;
}
2021-05-24 05:28:10 -05:00
if ( refId && ! queriesVisited [ refId ] ) {
if ( totalBytesKey && series . meta ? . stats ) {
const byteStat = series . meta . stats . find ( ( stat ) = > stat . displayName === totalBytesKey ) ;
if ( byteStat ) {
totalBytes += byteStat . value ;
}
}
queriesVisited [ refId ] = true ;
}
2020-05-04 11:06:21 -05:00
}
2021-05-24 05:28:10 -05:00
if ( totalBytes > 0 ) {
const { text , suffix } = SIPrefix ( 'B' ) ( totalBytes ) ;
meta . push ( {
label : 'Total bytes processed' ,
value : ` ${ text } ${ suffix } ` ,
kind : LogsMetaKind.String ,
} ) ;
}
2019-04-30 11:21:22 -05:00
return {
hasUniqueLabels ,
meta ,
2020-12-15 08:18:12 -06:00
rows ,
2019-04-30 11:21:22 -05:00
} ;
}
2019-09-30 07:44:15 -05:00
function getIdField ( fieldCache : FieldCache ) : FieldWithIndex | undefined {
const idFieldNames = [ 'id' ] ;
for ( const fieldName of idFieldNames ) {
const idField = fieldCache . getFieldByName ( fieldName ) ;
if ( idField ) {
return idField ;
}
}
return undefined ;
}
2021-04-21 05:02:34 -05:00
// Used to add additional information to Line limit meta info
function adjustMetaInfo ( logsModel : LogsModel , visibleRangeMs? : number , requestedRangeMs? : number ) : LogsMetaItem [ ] {
let logsModelMeta = [ . . . logsModel . meta ! ] ;
const limitIndex = logsModelMeta . findIndex ( ( meta ) = > meta . label === LIMIT_LABEL ) ;
2021-05-03 05:55:53 -05:00
const limit = limitIndex >= 0 && logsModelMeta [ limitIndex ] ? . value ;
2021-04-21 05:02:34 -05:00
if ( limit && limit > 0 ) {
let metaLimitValue ;
if ( limit === logsModel . rows . length && visibleRangeMs && requestedRangeMs ) {
const coverage = ( ( visibleRangeMs / requestedRangeMs ) * 100 ) . toFixed ( 2 ) ;
metaLimitValue = ` ${ limit } reached, received logs cover ${ coverage } % ( ${ rangeUtil . msRangeToTimeString (
visibleRangeMs
) } ) of your selected time range ( $ { rangeUtil . msRangeToTimeString ( requestedRangeMs ) } ) ` ;
} else {
metaLimitValue = ` ${ limit } ( ${ logsModel . rows . length } returned) ` ;
}
logsModelMeta [ limitIndex ] = {
label : LIMIT_LABEL ,
value : metaLimitValue ,
kind : LogsMetaKind.String ,
} ;
}
return logsModelMeta ;
}