2021-04-21 08:38:00 +01:00
import { size } from 'lodash' ;
2021-08-25 09:59:03 -07:00
import { BarAlignment , GraphDrawStyle , StackingMode } from '@grafana/schema' ;
2021-08-24 08:22:34 -07:00
import { ansicolor , colors } from '@grafana/ui' ;
2019-07-05 23:05:53 -07:00
2019-04-30 18:21:22 +02:00
import {
2021-06-01 09:28:25 +02:00
AbsoluteTimeRange ,
2019-07-01 12:00:29 -07:00
DataFrame ,
2021-06-01 09:28:25 +02:00
DataQuery ,
2021-11-02 11:53:47 +01:00
DataQueryRequest ,
DataQueryResponse ,
DataSourceApi ,
2021-06-01 09:28:25 +02:00
dateTime ,
dateTimeFormat ,
dateTimeFormatTimeAgo ,
FieldCache ,
2021-09-13 11:11:49 +02:00
FieldColorModeId ,
2021-11-02 11:53:47 +01:00
FieldConfig ,
2021-06-01 09:28:25 +02:00
FieldType ,
FieldWithIndex ,
2019-04-30 18:21:22 +02:00
findCommonLabels ,
findUniqueLabels ,
getLogLevel ,
2019-05-27 18:43:04 +02:00
getLogLevelFromKey ,
2021-06-01 09:28:25 +02:00
Labels ,
2021-11-02 11:53:47 +01:00
LoadingState ,
2021-06-01 09:28:25 +02:00
LogLevel ,
2019-05-22 23:10:05 +02:00
LogRowModel ,
2021-06-01 09:28:25 +02:00
LogsDedupStrategy ,
2019-05-22 23:10:05 +02:00
LogsMetaItem ,
LogsMetaKind ,
2021-06-01 09:28:25 +02:00
LogsModel ,
2021-11-02 11:53:47 +01:00
MutableDataFrame ,
2021-04-21 12:02:34 +02:00
rangeUtil ,
2021-11-02 11:53:47 +01:00
ScopedVars ,
2021-06-01 09:28:25 +02:00
sortInAscendingOrder ,
textUtil ,
2021-11-02 11:53:47 +01:00
TimeRange ,
2021-06-01 09:28:25 +02:00
toDataFrame ,
2019-07-05 23:05:53 -07:00
} from '@grafana/data' ;
2019-01-10 13:34:23 +01:00
import { getThemeColor } from 'app/core/utils/colors' ;
2021-05-24 12:28:10 +02:00
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters' ;
2021-11-02 11:53:47 +01:00
import { Observable , throwError , timeout } from 'rxjs' ;
2020-04-20 07:37:38 +02:00
2021-04-21 12:02:34 +02:00
export const LIMIT_LABEL = 'Line limit' ;
2021-07-01 08:06:58 -04:00
export const COMMON_LABELS = 'Common labels' ;
2018-10-30 16:14:01 +01:00
2018-11-06 12:00:05 +01:00
export const LogLevelColor = {
2018-11-23 16:29:55 +01:00
[ LogLevel . critical ] : colors [ 7 ] ,
[ LogLevel . warning ] : colors [ 1 ] ,
2018-11-06 12:00:05 +01:00
[ LogLevel . error ] : colors [ 4 ] ,
[ LogLevel . info ] : colors [ 0 ] ,
2018-11-23 16:29:55 +01:00
[ LogLevel . debug ] : colors [ 5 ] ,
[ LogLevel . trace ] : colors [ 2 ] ,
2019-02-22 20:13:10 +08:00
[ LogLevel . unknown ] : getThemeColor ( '#8e8e8e' , '#dde4ed' ) ,
2018-11-06 12:00:05 +01:00
} ;
2021-11-02 11:53:47 +01:00
const SECOND = 1000 ;
const MINUTE = 60 * SECOND ;
const HOUR = 60 * MINUTE ;
const DAY = 24 * HOUR ;
2018-11-18 09:38:06 +00:00
const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g ;
2019-11-26 09:01:32 +00:00
function isDuplicateRow ( row : LogRowModel , other : LogRowModel , strategy? : LogsDedupStrategy ) : boolean {
2018-11-18 09:38:06 +00:00
switch ( strategy ) {
case LogsDedupStrategy . exact :
// Exact still strips dates
return row . entry . replace ( isoDateRegexp , '' ) === other . entry . replace ( isoDateRegexp , '' ) ;
case LogsDedupStrategy . numbers :
return row . entry . replace ( /\d/g , '' ) === other . entry . replace ( /\d/g , '' ) ;
case LogsDedupStrategy . signature :
return row . entry . replace ( /\w/g , '' ) === other . entry . replace ( /\w/g , '' ) ;
default :
return false ;
}
}
2019-11-26 09:01:32 +00:00
export function dedupLogRows ( rows : LogRowModel [ ] , strategy? : LogsDedupStrategy ) : LogRowModel [ ] {
2018-11-18 09:38:06 +00:00
if ( strategy === LogsDedupStrategy . none ) {
2019-11-01 16:38:34 +01:00
return rows ;
2018-11-18 09:38:06 +00:00
}
2019-11-01 16:38:34 +01:00
return rows . reduce ( ( result : LogRowModel [ ] , row : LogRowModel , index ) = > {
2019-03-11 11:48:14 +01:00
const rowCopy = { . . . row } ;
2018-11-18 09:38:06 +00:00
const previous = result [ result . length - 1 ] ;
if ( index > 0 && isDuplicateRow ( row , previous , strategy ) ) {
2019-11-26 09:01:32 +00:00
previous . duplicates ! ++ ;
2018-11-18 09:38:06 +00:00
} else {
2019-03-11 11:48:14 +01:00
rowCopy . duplicates = 0 ;
result . push ( rowCopy ) ;
2018-11-18 09:38:06 +00:00
}
return result ;
} , [ ] ) ;
}
2019-11-01 16:38:34 +01:00
export function filterLogLevels ( logRows : LogRowModel [ ] , hiddenLogLevels : Set < LogLevel > ) : LogRowModel [ ] {
2018-11-23 16:29:55 +01:00
if ( hiddenLogLevels . size === 0 ) {
2019-11-01 16:38:34 +01:00
return logRows ;
2018-11-23 16:29:55 +01:00
}
2019-11-01 16:38:34 +01:00
return logRows . filter ( ( row : LogRowModel ) = > {
return ! hiddenLogLevels . has ( row . logLevel ) ;
} ) ;
2018-11-23 16:29:55 +01:00
}
2021-06-01 09:28:25 +02:00
export function makeDataFramesForLogs ( sortedRows : LogRowModel [ ] , bucketSize : number ) : DataFrame [ ] {
2018-12-06 12:12:43 +01:00
// currently interval is rangeMs / resolution, which is too low for showing series as bars.
2020-05-29 15:39:13 +02:00
// Should be solved higher up the chain when executing queries & interval calculated and not here but this is a temporary fix.
2018-12-06 12:12:43 +01:00
2018-11-08 14:24:54 +01:00
// Graph time series by log level
2019-05-13 09:38:19 +02:00
const seriesByLevel : any = { } ;
const seriesList : any [ ] = [ ] ;
2018-12-06 12:12:43 +01:00
2019-09-13 13:58:29 +02:00
for ( const row of sortedRows ) {
2018-12-08 11:15:02 -08:00
let series = seriesByLevel [ row . logLevel ] ;
if ( ! series ) {
seriesByLevel [ row . logLevel ] = series = {
lastTs : null ,
datapoints : [ ] ,
2019-11-07 12:37:46 +01:00
target : row.logLevel ,
2018-12-08 11:15:02 -08:00
color : LogLevelColor [ row . logLevel ] ,
} ;
2018-12-06 12:12:43 +01:00
2018-12-08 11:15:02 -08:00
seriesList . push ( series ) ;
}
2018-11-08 14:24:54 +01:00
2019-09-13 13:58:29 +02:00
// align time to bucket size - used Math.floor for calculation as time of the bucket
// must be in the past (before Date.now()) to be displayed on the graph
const time = Math . floor ( row . timeEpochMs / bucketSize ) * bucketSize ;
2018-12-06 12:12:43 +01:00
2018-11-08 14:24:54 +01:00
// Entry for time
2018-12-08 11:15:02 -08:00
if ( time === series . lastTs ) {
series . datapoints [ series . datapoints . length - 1 ] [ 0 ] ++ ;
2018-11-08 14:24:54 +01:00
} else {
2018-12-08 11:15:02 -08:00
series . datapoints . push ( [ 1 , time ] ) ;
series . lastTs = time ;
2018-11-08 14:24:54 +01:00
}
2018-12-08 11:15:02 -08:00
// add zero to other levels to aid stacking so each level series has same number of points
for ( const other of seriesList ) {
if ( other !== series && other . lastTs !== time ) {
other . datapoints . push ( [ 0 , time ] ) ;
other . lastTs = time ;
}
2018-11-08 14:24:54 +01:00
}
2018-12-08 11:15:02 -08:00
}
2019-11-07 12:37:46 +01:00
return seriesList . map ( ( series , i ) = > {
2020-04-08 09:07:12 +01:00
series . datapoints . sort ( ( a : number [ ] , b : number [ ] ) = > a [ 1 ] - b [ 1 ] ) ;
2018-12-08 11:15:02 -08:00
2019-08-15 09:18:51 -07:00
const data = toDataFrame ( series ) ;
2020-04-08 09:07:12 +01:00
const fieldCache = new FieldCache ( data ) ;
2019-08-13 07:32:43 +02:00
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 12:46:59 +02:00
const valueField = fieldCache . getFirstFieldOfType ( FieldType . number ) ! ;
2020-04-08 09:07:12 +01:00
2021-06-01 09:28:25 +02:00
data . fields [ valueField . index ] . config . min = 0 ;
data . fields [ valueField . index ] . config . decimals = 0 ;
2021-09-13 11:11:49 +02:00
data . fields [ valueField . index ] . config . color = {
mode : FieldColorModeId.Fixed ,
fixedColor : series.color ,
} ;
2021-06-01 09:28:25 +02:00
data . fields [ valueField . index ] . config . custom = {
2021-08-25 09:59:03 -07:00
drawStyle : GraphDrawStyle.Bars ,
2021-06-01 09:28:25 +02:00
barAlignment : BarAlignment.Center ,
barWidthFactor : 0.9 ,
barMaxWidth : 5 ,
lineColor : series.color ,
pointColor : series.color ,
fillColor : series.color ,
lineWidth : 0 ,
fillOpacity : 100 ,
stacking : {
mode : StackingMode.Normal ,
group : 'A' ,
2019-08-13 07:32:43 +02:00
} ,
2019-02-11 10:01:43 +01:00
} ;
2019-08-13 07:32:43 +02:00
2021-06-01 09:28:25 +02:00
return data ;
2018-12-08 11:15:02 -08:00
} ) ;
2018-11-08 14:24:54 +01:00
}
2019-04-30 18:21:22 +02:00
2019-07-01 12:00:29 -07:00
function isLogsData ( series : DataFrame ) {
2021-01-20 07:59:48 +01:00
return series . fields . some ( ( f ) = > f . type === FieldType . time ) && series . fields . some ( ( f ) = > f . type === FieldType . string ) ;
2019-04-30 18:21:22 +02:00
}
2019-11-06 16:15:08 +01:00
/ * *
* Convert dataFrame into LogsModel which consists of creating separate array of log rows and metrics series . Metrics
* series can be either already included in the dataFrame or will be computed from the log rows .
* @param dataFrame
* @param intervalMs In case there are no metrics series , we use this for computing it from log rows .
* /
2020-04-08 23:44:10 +02:00
export function dataFrameToLogsModel (
dataFrame : DataFrame [ ] ,
intervalMs : number | undefined ,
2021-05-12 12:54:15 +02:00
absoluteRange? : AbsoluteTimeRange ,
queries? : DataQuery [ ]
2020-04-08 23:44:10 +02:00
) : LogsModel {
2020-07-09 16:14:55 +02:00
const { logSeries } = separateLogsAndMetrics ( dataFrame ) ;
2019-04-30 18:21:22 +02:00
const logsModel = logSeriesToLogsModel ( logSeries ) ;
2019-11-06 16:15:08 +01:00
2019-04-30 18:21:22 +02:00
if ( logsModel ) {
2020-07-09 16:14:55 +02:00
// Create histogram metrics from logs using the interval as bucket size for the line count
if ( intervalMs && logsModel . rows . length > 0 ) {
const sortedRows = logsModel . rows . sort ( sortInAscendingOrder ) ;
2021-04-21 12:02:34 +02:00
const { visibleRange , bucketSize , visibleRangeMs , requestedRangeMs } = getSeriesProperties (
sortedRows ,
intervalMs ,
absoluteRange
) ;
2020-07-09 16:14:55 +02:00
logsModel . visibleRange = visibleRange ;
2021-06-01 09:28:25 +02:00
logsModel . series = makeDataFramesForLogs ( sortedRows , bucketSize ) ;
2021-04-21 12:02:34 +02:00
if ( logsModel . meta ) {
logsModel . meta = adjustMetaInfo ( logsModel , visibleRangeMs , requestedRangeMs ) ;
}
2019-04-30 18:21:22 +02:00
} else {
2020-07-09 16:14:55 +02:00
logsModel . series = [ ] ;
2019-04-30 18:21:22 +02:00
}
2021-05-12 12:54:15 +02:00
logsModel . queries = queries ;
2019-04-30 18:21:22 +02:00
return logsModel ;
}
2019-05-10 12:45:26 +02:00
return {
hasUniqueLabels : false ,
rows : [ ] ,
meta : [ ] ,
series : [ ] ,
2021-05-12 12:54:15 +02:00
queries ,
2019-05-10 12:45:26 +02:00
} ;
2019-04-30 18:21:22 +02:00
}
2020-05-29 15:39:13 +02:00
/ * *
* Returns a clamped time range and interval based on the visible logs and the given range .
*
* @param sortedRows Log rows from the query response
2021-06-01 09:28:25 +02:00
* @param intervalMs Dynamic data interval based on available pixel width
2020-05-29 15:39:13 +02:00
* @param absoluteRange Requested time range
* @param pxPerBar Default : 20 , buckets will be rendered as bars , assuming 10 px per histogram bar plus some free space around it
* /
export function getSeriesProperties (
sortedRows : LogRowModel [ ] ,
intervalMs : number ,
2020-05-29 20:01:01 +02:00
absoluteRange? : AbsoluteTimeRange ,
2020-05-29 15:39:13 +02:00
pxPerBar = 20 ,
minimumBucketSize = 1000
) {
let visibleRange = absoluteRange ;
let resolutionIntervalMs = intervalMs ;
let bucketSize = Math . max ( resolutionIntervalMs * pxPerBar , minimumBucketSize ) ;
2021-04-21 12:02:34 +02:00
let visibleRangeMs ;
let requestedRangeMs ;
2020-05-29 15:39:13 +02:00
// Clamp time range to visible logs otherwise big parts of the graph might look empty
if ( absoluteRange ) {
2021-04-21 12:02:34 +02:00
const earliestTsLogs = sortedRows [ 0 ] . timeEpochMs ;
requestedRangeMs = absoluteRange . to - absoluteRange . from ;
visibleRangeMs = absoluteRange . to - earliestTsLogs ;
2020-05-29 15:39:13 +02:00
if ( visibleRangeMs > 0 ) {
// Adjust interval bucket size for potentially shorter visible range
2021-04-21 12:02:34 +02:00
const clampingFactor = visibleRangeMs / requestedRangeMs ;
2020-05-29 15:39:13 +02:00
resolutionIntervalMs *= clampingFactor ;
// Minimum bucketsize of 1s for nicer graphing
bucketSize = Math . max ( Math . ceil ( resolutionIntervalMs * pxPerBar ) , minimumBucketSize ) ;
// makeSeriesForLogs() aligns dataspoints with time buckets, so we do the same here to not cut off data
2021-04-21 12:02:34 +02:00
const adjustedEarliest = Math . floor ( earliestTsLogs / bucketSize ) * bucketSize ;
visibleRange = { from : adjustedEarliest , to : absoluteRange.to } ;
} else {
// We use visibleRangeMs to calculate range coverage of received logs. However, some data sources are rounding up range in requests. This means that received logs
// can (in edge cases) be outside of the requested range and visibleRangeMs < 0. In that case, we want to change visibleRangeMs to be 1 so we can calculate coverage.
visibleRangeMs = 1 ;
2020-05-29 15:39:13 +02:00
}
}
2021-04-21 12:02:34 +02:00
return { bucketSize , visibleRange , visibleRangeMs , requestedRangeMs } ;
2020-05-29 15:39:13 +02:00
}
2020-04-25 21:48:20 +01:00
function separateLogsAndMetrics ( dataFrames : DataFrame [ ] ) {
2019-11-06 16:15:08 +01:00
const metricSeries : DataFrame [ ] = [ ] ;
const logSeries : DataFrame [ ] = [ ] ;
2019-04-30 18:21:22 +02:00
2020-04-25 21:48:20 +01:00
for ( const dataFrame of dataFrames ) {
2020-10-30 09:12:57 +01:00
// We want to show meta stats even if no result was returned. That's why we are pushing also data frames with no fields.
2020-07-17 10:30:27 +02:00
if ( isLogsData ( dataFrame ) || ! dataFrame . fields . length ) {
2020-04-25 21:48:20 +01:00
logSeries . push ( dataFrame ) ;
2019-11-06 16:15:08 +01:00
continue ;
2019-04-30 18:21:22 +02:00
}
2019-11-06 16:15:08 +01:00
2020-04-25 21:48:20 +01:00
if ( dataFrame . length > 0 ) {
metricSeries . push ( dataFrame ) ;
}
2019-04-30 18:21:22 +02:00
}
2019-11-06 16:15:08 +01:00
return { logSeries , metricSeries } ;
}
2019-11-07 07:50:45 -08:00
interface LogFields {
series : DataFrame ;
timeField : FieldWithIndex ;
stringField : FieldWithIndex ;
2020-05-07 13:53:24 +02:00
timeNanosecondField? : FieldWithIndex ;
2019-11-07 07:50:45 -08:00
logLevelField? : FieldWithIndex ;
idField? : FieldWithIndex ;
}
2019-11-06 16:15:08 +01:00
/ * *
* Converts dataFrames into LogsModel . This involves merging them into one list , sorting them and computing metadata
* like common labels .
* /
export function logSeriesToLogsModel ( logSeries : DataFrame [ ] ) : LogsModel | undefined {
if ( logSeries . length === 0 ) {
return undefined ;
2019-04-30 18:21:22 +02:00
}
2019-11-07 07:50:45 -08:00
const allLabels : Labels [ ] = [ ] ;
// Find the fields we care about and collect all labels
2020-07-17 10:30:27 +02:00
let allSeries : LogFields [ ] = [ ] ;
2020-10-30 09:12:57 +01:00
// We are sometimes passing data frames with no fields because we want to calculate correct meta stats.
// Therefore we need to filter out series with no fields. These series are used only for meta stats calculation.
2021-01-20 07:59:48 +01:00
const seriesWithFields = logSeries . filter ( ( series ) = > series . fields . length ) ;
2020-10-30 09:12:57 +01:00
if ( seriesWithFields . length ) {
2021-01-20 07:59:48 +01:00
allSeries = seriesWithFields . map ( ( series ) = > {
2020-07-17 10:30:27 +02:00
const fieldCache = new FieldCache ( series ) ;
const stringField = fieldCache . getFirstFieldOfType ( FieldType . string ) ;
if ( stringField ? . labels ) {
allLabels . push ( stringField . labels ) ;
}
return {
series ,
timeField : fieldCache.getFirstFieldOfType ( FieldType . time ) ,
timeNanosecondField : fieldCache.hasFieldWithNameAndType ( 'tsNs' , FieldType . time )
? fieldCache . getFieldByName ( 'tsNs' )
: undefined ,
stringField ,
logLevelField : fieldCache.getFieldByName ( 'level' ) ,
idField : getIdField ( fieldCache ) ,
} as LogFields ;
} ) ;
}
2019-11-07 07:50:45 -08:00
const commonLabels = allLabels . length > 0 ? findCommonLabels ( allLabels ) : { } ;
2019-04-30 18:21:22 +02:00
const rows : LogRowModel [ ] = [ ] ;
let hasUniqueLabels = false ;
2019-11-07 07:50:45 -08:00
for ( const info of allSeries ) {
2020-05-07 13:53:24 +02:00
const { timeField , timeNanosecondField , stringField , logLevelField , idField , series } = info ;
2019-11-07 07:50:45 -08:00
const labels = stringField . labels ;
const uniqueLabels = findUniqueLabels ( labels , commonLabels ) ;
2019-04-30 18:21:22 +02:00
if ( Object . keys ( uniqueLabels ) . length > 0 ) {
hasUniqueLabels = true ;
}
2019-08-15 09:18:51 -07:00
let seriesLogLevel : LogLevel | undefined = undefined ;
2019-11-07 07:50:45 -08:00
if ( labels && Object . keys ( labels ) . indexOf ( 'level' ) !== - 1 ) {
seriesLogLevel = getLogLevelFromKey ( labels [ 'level' ] ) ;
2019-08-15 09:18:51 -07:00
}
2019-09-01 05:44:22 -07:00
for ( let j = 0 ; j < series . length ; j ++ ) {
2019-09-13 16:38:21 +02:00
const ts = timeField . values . get ( j ) ;
2019-08-15 09:18:51 -07:00
const time = dateTime ( ts ) ;
2020-05-07 13:53:24 +02:00
const tsNs = timeNanosecondField ? timeNanosecondField . values . get ( j ) : undefined ;
const timeEpochNs = tsNs ? tsNs : time.valueOf ( ) + '000000' ;
2019-08-15 09:18:51 -07:00
2021-03-23 09:17:55 +01:00
// In edge cases, this can be undefined. If undefined, we want to replace it with empty string.
const messageValue : unknown = stringField . values . get ( j ) ? ? '' ;
2019-09-04 10:49:09 +02:00
// This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
2019-11-06 16:15:08 +01:00
const message : string = typeof messageValue === 'string' ? messageValue : JSON.stringify ( messageValue ) ;
2020-04-20 07:37:38 +02:00
const hasAnsi = textUtil . hasAnsiCodes ( message ) ;
2021-03-03 18:32:27 +01:00
const hasUnescapedContent = ! ! message . match ( /\\n|\\t|\\r/ ) ;
2019-11-06 16:15:08 +01:00
const searchWords = series . meta && series . meta . searchWords ? series . meta . searchWords : [ ] ;
2021-06-15 15:42:18 +02:00
const entry = hasAnsi ? ansicolor . strip ( message ) : message ;
2019-08-15 09:18:51 -07:00
let logLevel = LogLevel . unknown ;
2020-02-17 16:49:24 +01:00
if ( logLevelField && logLevelField . values . get ( j ) ) {
2019-08-15 09:18:51 -07:00
logLevel = getLogLevelFromKey ( logLevelField . values . get ( j ) ) ;
} else if ( seriesLogLevel ) {
logLevel = seriesLogLevel ;
} else {
2021-06-15 15:42:18 +02:00
logLevel = getLogLevel ( entry ) ;
2019-08-15 09:18:51 -07:00
}
rows . push ( {
2019-11-06 16:15:08 +01:00
entryFieldIndex : stringField.index ,
rowIndex : j ,
dataFrame : series ,
2019-08-15 09:18:51 -07:00
logLevel ,
2020-04-27 15:28:06 +02:00
timeFromNow : dateTimeFormatTimeAgo ( ts ) ,
2019-11-06 16:15:08 +01:00
timeEpochMs : time.valueOf ( ) ,
2020-05-07 13:53:24 +02:00
timeEpochNs ,
2020-04-27 15:28:06 +02:00
timeLocal : dateTimeFormat ( ts , { timeZone : 'browser' } ) ,
timeUtc : dateTimeFormat ( ts , { timeZone : 'utc' } ) ,
2019-08-15 09:18:51 -07:00
uniqueLabels ,
hasAnsi ,
2021-03-03 18:32:27 +01:00
hasUnescapedContent ,
2019-08-15 09:18:51 -07:00
searchWords ,
2021-06-15 15:42:18 +02:00
entry ,
2019-08-15 09:18:51 -07:00
raw : message ,
2020-04-08 23:44:10 +02:00
labels : stringField.labels || { } ,
2019-09-30 14:44:15 +02:00
uid : idField ? idField . values . get ( j ) : j . toString ( ) ,
2019-08-15 09:18:51 -07:00
} ) ;
2019-04-30 18:21:22 +02:00
}
}
// Meta data to display in status
const meta : LogsMetaItem [ ] = [ ] ;
2021-04-21 08:38:00 +01:00
if ( size ( commonLabels ) > 0 ) {
2019-04-30 18:21:22 +02:00
meta . push ( {
2021-07-01 08:06:58 -04:00
label : COMMON_LABELS ,
2019-04-30 18:21:22 +02:00
value : commonLabels ,
kind : LogsMetaKind.LabelsMap ,
} ) ;
}
2021-01-20 07:59:48 +01:00
const limits = logSeries . filter ( ( series ) = > series . meta && series . meta . limit ) ;
2020-02-06 12:34:52 +00:00
const limitValue = Object . values (
limits . reduce ( ( acc : any , elem : any ) = > {
acc [ elem . refId ] = elem . meta . limit ;
return acc ;
} , { } )
2021-04-21 12:02:34 +02:00
) . reduce ( ( acc : number , elem : any ) = > ( acc += elem ) , 0 ) as number ;
2019-04-30 18:21:22 +02:00
2021-04-21 12:02:34 +02:00
if ( limitValue > 0 ) {
2019-04-30 18:21:22 +02:00
meta . push ( {
2021-04-21 12:02:34 +02:00
label : LIMIT_LABEL ,
value : limitValue ,
kind : LogsMetaKind.Number ,
2019-04-30 18:21:22 +02:00
} ) ;
}
2021-05-24 12:28:10 +02:00
let totalBytes = 0 ;
const queriesVisited : { [ refId : string ] : boolean } = { } ;
2020-10-21 19:11:32 +02:00
// To add just 1 error message
let errorMetaAdded = false ;
Chore: Fix all Typescript strict null errors (#26204)
* Chore: Fix typescript strict null errors
* Added new limit
* Fixed ts issue
* fixed tests
* trying to fix type inference
* Fixing more ts errors
* Revert tsconfig option
* Fix
* Fixed code
* More fixes
* fix tests
* Updated snapshot
* Chore: More ts strict null fixes
* More fixes in some really messed up azure config components
* More fixes, current count: 441
* 419
* More fixes
* Fixed invalid initial state in explore
* Fixing tests
* Fixed tests
* Explore fix
* More fixes
* Progress
* Sub 300
* Now at 218
* Progress
* Update
* Progress
* Updated tests
* at 159
* fixed tests
* Progress
* YAy blow 100! at 94
* 10,9,8,7,6,5,4,3,2,1... lift off
* Fixed tests
* Fixed more type errors
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
2020-07-10 12:46:59 +02:00
2020-05-04 18:06:21 +02:00
for ( const series of logSeries ) {
2021-05-24 12:28:10 +02:00
const totalBytesKey = series . meta ? . custom ? . lokiQueryStatKey ;
const { refId } = series ; // Stats are per query, keeping track by refId
2020-10-21 19:11:32 +02:00
if ( ! errorMetaAdded && series . meta ? . custom ? . error ) {
meta . push ( {
label : '' ,
value : series.meta?.custom.error ,
kind : LogsMetaKind.Error ,
} ) ;
errorMetaAdded = true ;
}
2021-05-24 12:28:10 +02:00
if ( refId && ! queriesVisited [ refId ] ) {
if ( totalBytesKey && series . meta ? . stats ) {
const byteStat = series . meta . stats . find ( ( stat ) = > stat . displayName === totalBytesKey ) ;
if ( byteStat ) {
totalBytes += byteStat . value ;
}
}
queriesVisited [ refId ] = true ;
}
2020-05-04 18:06:21 +02:00
}
2021-05-24 12:28:10 +02:00
if ( totalBytes > 0 ) {
const { text , suffix } = SIPrefix ( 'B' ) ( totalBytes ) ;
meta . push ( {
label : 'Total bytes processed' ,
value : ` ${ text } ${ suffix } ` ,
kind : LogsMetaKind.String ,
} ) ;
}
2019-04-30 18:21:22 +02:00
return {
hasUniqueLabels ,
meta ,
2020-12-15 15:18:12 +01:00
rows ,
2019-04-30 18:21:22 +02:00
} ;
}
2019-09-30 14:44:15 +02:00
function getIdField ( fieldCache : FieldCache ) : FieldWithIndex | undefined {
const idFieldNames = [ 'id' ] ;
for ( const fieldName of idFieldNames ) {
const idField = fieldCache . getFieldByName ( fieldName ) ;
if ( idField ) {
return idField ;
}
}
return undefined ;
}
2021-04-21 12:02:34 +02:00
// Used to add additional information to Line limit meta info
function adjustMetaInfo ( logsModel : LogsModel , visibleRangeMs? : number , requestedRangeMs? : number ) : LogsMetaItem [ ] {
let logsModelMeta = [ . . . logsModel . meta ! ] ;
const limitIndex = logsModelMeta . findIndex ( ( meta ) = > meta . label === LIMIT_LABEL ) ;
2021-05-03 11:55:53 +01:00
const limit = limitIndex >= 0 && logsModelMeta [ limitIndex ] ? . value ;
2021-04-21 12:02:34 +02:00
if ( limit && limit > 0 ) {
let metaLimitValue ;
if ( limit === logsModel . rows . length && visibleRangeMs && requestedRangeMs ) {
const coverage = ( ( visibleRangeMs / requestedRangeMs ) * 100 ) . toFixed ( 2 ) ;
metaLimitValue = ` ${ limit } reached, received logs cover ${ coverage } % ( ${ rangeUtil . msRangeToTimeString (
visibleRangeMs
) } ) of your selected time range ( $ { rangeUtil . msRangeToTimeString ( requestedRangeMs ) } ) ` ;
} else {
metaLimitValue = ` ${ limit } ( ${ logsModel . rows . length } returned) ` ;
}
logsModelMeta [ limitIndex ] = {
label : LIMIT_LABEL ,
value : metaLimitValue ,
kind : LogsMetaKind.String ,
} ;
}
return logsModelMeta ;
}
2021-11-02 11:53:47 +01:00
/ * *
* Returns field configuration used to render logs volume bars
* /
function getLogVolumeFieldConfig ( level : LogLevel , oneLevelDetected : boolean ) {
const name = oneLevelDetected && level === LogLevel . unknown ? 'logs' : level ;
const color = LogLevelColor [ level ] ;
return {
displayNameFromDS : name ,
color : {
mode : FieldColorModeId.Fixed ,
fixedColor : color ,
} ,
custom : {
drawStyle : GraphDrawStyle.Bars ,
barAlignment : BarAlignment.Center ,
lineColor : color ,
pointColor : color ,
fillColor : color ,
lineWidth : 1 ,
fillOpacity : 100 ,
stacking : {
mode : StackingMode.Normal ,
group : 'A' ,
} ,
} ,
} ;
}
/ * *
* Take multiple data frames , sum up values and group by level .
* Return a list of data frames , each representing single level .
* /
export function aggregateRawLogsVolume (
rawLogsVolume : DataFrame [ ] ,
extractLevel : ( dataFrame : DataFrame ) = > LogLevel
) : DataFrame [ ] {
const logsVolumeByLevelMap : Partial < Record < LogLevel , DataFrame [ ] > > = { } ;
rawLogsVolume . forEach ( ( dataFrame ) = > {
const level = extractLevel ( dataFrame ) ;
if ( ! logsVolumeByLevelMap [ level ] ) {
logsVolumeByLevelMap [ level ] = [ ] ;
}
logsVolumeByLevelMap [ level ] ! . push ( dataFrame ) ;
} ) ;
return Object . keys ( logsVolumeByLevelMap ) . map ( ( level : string ) = > {
return aggregateFields (
logsVolumeByLevelMap [ level as LogLevel ] ! ,
getLogVolumeFieldConfig ( level as LogLevel , Object . keys ( logsVolumeByLevelMap ) . length === 1 )
) ;
} ) ;
}
/ * *
* Aggregate multiple data frames into a single data frame by adding values .
* Multiple data frames for the same level are passed here to get a single
* data frame for a given level . Aggregation by level happens in aggregateRawLogsVolume ( )
* /
function aggregateFields ( dataFrames : DataFrame [ ] , config : FieldConfig ) : DataFrame {
const aggregatedDataFrame = new MutableDataFrame ( ) ;
if ( ! dataFrames . length ) {
return aggregatedDataFrame ;
}
const totalLength = dataFrames [ 0 ] . length ;
const timeField = new FieldCache ( dataFrames [ 0 ] ) . getFirstFieldOfType ( FieldType . time ) ;
if ( ! timeField ) {
return aggregatedDataFrame ;
}
aggregatedDataFrame . addField ( { name : 'Time' , type : FieldType . time } , totalLength ) ;
aggregatedDataFrame . addField ( { name : 'Value' , type : FieldType . number , config } , totalLength ) ;
dataFrames . forEach ( ( dataFrame ) = > {
dataFrame . fields . forEach ( ( field ) = > {
if ( field . type === FieldType . number ) {
for ( let pointIndex = 0 ; pointIndex < totalLength ; pointIndex ++ ) {
const currentValue = aggregatedDataFrame . get ( pointIndex ) . Value ;
const valueToAdd = field . values . get ( pointIndex ) ;
const totalValue =
currentValue === null && valueToAdd === null ? null : ( currentValue || 0 ) + ( valueToAdd || 0 ) ;
aggregatedDataFrame . set ( pointIndex , { Value : totalValue , Time : timeField.values.get ( pointIndex ) } ) ;
}
}
} ) ;
} ) ;
return aggregatedDataFrame ;
}
const LOGS_VOLUME_QUERY_DEFAULT_TIMEOUT = 60000 ;
type LogsVolumeQueryOptions < T extends DataQuery > = {
timeout? : number ;
extractLevel : ( dataFrame : DataFrame ) = > LogLevel ;
targets : T [ ] ;
range : TimeRange ;
} ;
/ * *
* Creates an observable , which makes requests to get logs volume and aggregates results .
* /
export function queryLogsVolume < T extends DataQuery > (
datasource : DataSourceApi < T , any , any > ,
logsVolumeRequest : DataQueryRequest < T > ,
options : LogsVolumeQueryOptions < T >
) : Observable < DataQueryResponse > {
const intervalInfo = getIntervalInfo ( logsVolumeRequest . scopedVars ) ;
logsVolumeRequest . interval = intervalInfo . interval ;
logsVolumeRequest . scopedVars . __interval = { value : intervalInfo.interval , text : intervalInfo.interval } ;
if ( intervalInfo . intervalMs !== undefined ) {
logsVolumeRequest . intervalMs = intervalInfo . intervalMs ;
logsVolumeRequest . scopedVars . __interval_ms = { value : intervalInfo.intervalMs , text : intervalInfo.intervalMs } ;
}
return new Observable ( ( observer ) = > {
let rawLogsVolume : DataFrame [ ] = [ ] ;
observer . next ( {
state : LoadingState.Loading ,
error : undefined ,
data : [ ] ,
} ) ;
const subscription = ( datasource . query ( logsVolumeRequest ) as Observable < DataQueryResponse > )
. pipe (
timeout ( {
each : options.timeout || LOGS_VOLUME_QUERY_DEFAULT_TIMEOUT ,
with : ( ) = > throwError ( new Error ( 'Request timed-out. Please make your query more specific and try again.' ) ) ,
} )
)
. subscribe ( {
complete : ( ) = > {
const aggregatedLogsVolume = aggregateRawLogsVolume ( rawLogsVolume , options . extractLevel ) ;
if ( aggregatedLogsVolume [ 0 ] ) {
aggregatedLogsVolume [ 0 ] . meta = {
custom : {
targets : options.targets ,
absoluteRange : { from : options . range . from . valueOf ( ) , to : options.range.to.valueOf ( ) } ,
} ,
} ;
}
observer . next ( {
state : LoadingState.Done ,
error : undefined ,
data : aggregatedLogsVolume ,
} ) ;
observer . complete ( ) ;
} ,
next : ( dataQueryResponse : DataQueryResponse ) = > {
rawLogsVolume = rawLogsVolume . concat ( dataQueryResponse . data . map ( toDataFrame ) ) ;
} ,
error : ( error ) = > {
observer . next ( {
state : LoadingState.Error ,
error : error ,
data : [ ] ,
} ) ;
observer . error ( error ) ;
} ,
} ) ;
return ( ) = > {
subscription ? . unsubscribe ( ) ;
} ;
} ) ;
}
function getIntervalInfo ( scopedVars : ScopedVars ) : { interval : string ; intervalMs? : number } {
if ( scopedVars . __interval ) {
let intervalMs : number = scopedVars . __interval_ms . value ;
let interval = '' ;
if ( intervalMs > HOUR ) {
intervalMs = DAY ;
interval = '1d' ;
} else if ( intervalMs > MINUTE ) {
intervalMs = HOUR ;
interval = '1h' ;
} else if ( intervalMs > SECOND ) {
intervalMs = MINUTE ;
interval = '1m' ;
} else {
intervalMs = SECOND ;
interval = '1s' ;
}
return { interval , intervalMs } ;
} else {
return { interval : '$__interval' } ;
}
}