2022-02-25 02:14:17 -06:00
package loki
import (
2022-05-03 01:03:25 -05:00
"encoding/json"
2022-02-25 02:14:17 -06:00
"fmt"
2022-04-12 04:58:48 -05:00
"hash/fnv"
2022-02-25 02:14:17 -06:00
"sort"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// we adjust the dataframes to be the way frontend & alerting
// wants them.
2023-07-13 02:58:00 -05:00
func adjustFrame ( frame * data . Frame , query * lokiQuery , setMetricFrameName bool , logsDataplane bool ) error {
2022-04-12 04:58:48 -05:00
fields := frame . Fields
if len ( fields ) < 2 {
return fmt . Errorf ( "missing fields in frame" )
}
// metric-fields have "timefield, valuefield"
// logs-fields have "labelsfield, timefield, ..."
secondField := fields [ 1 ]
if secondField . Type ( ) == data . FieldTypeFloat64 {
2023-04-13 08:07:08 -05:00
return adjustMetricFrame ( frame , query , setMetricFrameName )
2022-04-12 04:58:48 -05:00
} else {
2023-07-13 02:58:00 -05:00
return adjustLogsFrame ( frame , query , logsDataplane )
2022-04-12 04:58:48 -05:00
}
}
2023-04-13 08:07:08 -05:00
func adjustMetricFrame ( frame * data . Frame , query * lokiQuery , setFrameName bool ) error {
2022-04-12 04:58:48 -05:00
fields := frame . Fields
// we check if the fields are of correct type
if len ( fields ) != 2 {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field length in metric frame. expected 2, got %d" , len ( fields ) )
2022-04-12 04:58:48 -05:00
}
2022-02-25 02:14:17 -06:00
2022-04-12 04:58:48 -05:00
timeField := fields [ 0 ]
valueField := fields [ 1 ]
2022-02-25 02:14:17 -06:00
2022-04-12 04:58:48 -05:00
if ( timeField . Type ( ) != data . FieldTypeTime ) || ( valueField . Type ( ) != data . FieldTypeFloat64 ) {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field types in metric frame. expected time and float64, got %s and %s" , timeField . Type ( ) , valueField . Type ( ) )
2022-04-12 04:58:48 -05:00
}
labels := getFrameLabels ( frame )
2022-02-25 02:14:17 -06:00
2022-04-12 04:58:48 -05:00
isMetricRange := query . QueryType == QueryTypeRange
2022-02-25 02:14:17 -06:00
name := formatName ( labels , query )
2023-04-13 08:07:08 -05:00
if setFrameName {
frame . Name = name
}
2022-02-25 02:14:17 -06:00
if frame . Meta == nil {
frame . Meta = & data . FrameMeta { }
}
2022-05-05 06:09:01 -05:00
frame . Meta . Stats = parseStats ( frame . Meta . Custom )
frame . Meta . Custom = nil
2022-02-25 02:14:17 -06:00
if isMetricRange {
frame . Meta . ExecutedQueryString = "Expr: " + query . Expr + "\n" + "Step: " + query . Step . String ( )
} else {
frame . Meta . ExecutedQueryString = "Expr: " + query . Expr
}
2022-04-12 04:58:48 -05:00
if isMetricRange {
if timeField . Config == nil {
timeField . Config = & data . FieldConfig { }
2022-02-25 02:14:17 -06:00
}
2022-04-12 04:58:48 -05:00
timeField . Config . Interval = float64 ( query . Step . Milliseconds ( ) )
2022-02-25 02:14:17 -06:00
}
2022-04-12 04:58:48 -05:00
if valueField . Config == nil {
valueField . Config = & data . FieldConfig { }
}
valueField . Config . DisplayNameFromDS = name
return nil
}
2023-07-13 02:58:00 -05:00
func adjustLogsFrame ( frame * data . Frame , query * lokiQuery , dataplane bool ) error {
if dataplane {
return adjustDataplaneLogsFrame ( frame , query )
} else {
return adjustLegacyLogsFrame ( frame , query )
}
}
func adjustLegacyLogsFrame ( frame * data . Frame , query * lokiQuery ) error {
2022-04-12 04:58:48 -05:00
// we check if the fields are of correct type and length
fields := frame . Fields
2022-05-05 06:09:01 -05:00
if len ( fields ) != 4 {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field length in logs frame. expected 4, got %d" , len ( fields ) )
2022-04-12 04:58:48 -05:00
}
labelsField := fields [ 0 ]
timeField := fields [ 1 ]
lineField := fields [ 2 ]
2022-05-05 06:09:01 -05:00
stringTimeField := fields [ 3 ]
2022-04-12 04:58:48 -05:00
2022-05-05 06:09:01 -05:00
if ( timeField . Type ( ) != data . FieldTypeTime ) || ( lineField . Type ( ) != data . FieldTypeString ) || ( labelsField . Type ( ) != data . FieldTypeJSON ) || ( stringTimeField . Type ( ) != data . FieldTypeString ) {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field types in logs frame. expected time, string, json and string, got %s, %s, %s and %s" , timeField . Type ( ) , lineField . Type ( ) , labelsField . Type ( ) , stringTimeField . Type ( ) )
2022-04-12 04:58:48 -05:00
}
2022-05-05 06:09:01 -05:00
if ( timeField . Len ( ) != lineField . Len ( ) ) || ( timeField . Len ( ) != labelsField . Len ( ) ) || ( timeField . Len ( ) != stringTimeField . Len ( ) ) {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "indifferent field lengths in logs frame. expected all to be equal, got %d, %d, %d and %d" , timeField . Len ( ) , lineField . Len ( ) , labelsField . Len ( ) , stringTimeField . Len ( ) )
2022-04-12 04:58:48 -05:00
}
2022-05-05 06:09:01 -05:00
// this returns an error when the length of fields do not match
_ , err := frame . RowLen ( )
if err != nil {
return err
}
labelsField . Name = "labels"
stringTimeField . Name = "tsNs"
2022-04-12 04:58:48 -05:00
if frame . Meta == nil {
frame . Meta = & data . FrameMeta { }
2022-02-25 02:14:17 -06:00
}
2022-05-05 06:09:01 -05:00
frame . Meta . Stats = parseStats ( frame . Meta . Custom )
2022-05-16 06:38:05 -05:00
// TODO: when we get a real frame-type in grafana-plugin-sdk-go,
// move this to frame.Meta.FrameType
frame . Meta . Custom = map [ string ] string {
"frameType" : "LabeledTimeValues" ,
}
2022-05-05 06:09:01 -05:00
2022-04-12 04:58:48 -05:00
frame . Meta . ExecutedQueryString = "Expr: " + query . Expr
// we need to send to the browser the nanosecond-precision timestamp too.
2022-03-04 02:42:18 -06:00
// usually timestamps become javascript-date-objects in the browser automatically, which only
// have millisecond-precision.
2022-05-05 06:09:01 -05:00
// so we send a separate timestamp-as-string field too. it is provided by the
// loki-json-parser-code
2022-03-04 02:42:18 -06:00
2022-08-22 06:09:33 -05:00
idField , err := makeIdField ( stringTimeField , lineField , labelsField , query . RefID )
2022-04-12 04:58:48 -05:00
if err != nil {
return err
}
2022-05-05 06:09:01 -05:00
frame . Fields = append ( frame . Fields , idField )
2022-04-12 04:58:48 -05:00
return nil
2022-02-25 02:14:17 -06:00
}
2023-07-13 02:58:00 -05:00
func adjustDataplaneLogsFrame ( frame * data . Frame , query * lokiQuery ) error {
// we check if the fields are of correct type and length
fields := frame . Fields
if len ( fields ) != 4 {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field length in logs frame. expected 4, got %d" , len ( fields ) )
2023-07-13 02:58:00 -05:00
}
labelsField := fields [ 0 ]
timeField := fields [ 1 ]
lineField := fields [ 2 ]
stringTimeField := fields [ 3 ]
if ( timeField . Type ( ) != data . FieldTypeTime ) || ( lineField . Type ( ) != data . FieldTypeString ) || ( labelsField . Type ( ) != data . FieldTypeJSON ) || ( stringTimeField . Type ( ) != data . FieldTypeString ) {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "invalid field types in logs frame. expected time, string, json and string, got %s, %s, %s and %s" , timeField . Type ( ) , lineField . Type ( ) , labelsField . Type ( ) , stringTimeField . Type ( ) )
2023-07-13 02:58:00 -05:00
}
if ( timeField . Len ( ) != lineField . Len ( ) ) || ( timeField . Len ( ) != labelsField . Len ( ) ) || ( timeField . Len ( ) != stringTimeField . Len ( ) ) {
2023-09-01 04:44:18 -05:00
return fmt . Errorf ( "indifferent field lengths in logs frame. expected all to be equal, got %d, %d, %d and %d" , timeField . Len ( ) , lineField . Len ( ) , labelsField . Len ( ) , stringTimeField . Len ( ) )
2023-07-13 02:58:00 -05:00
}
// this returns an error when the length of fields do not match
_ , err := frame . RowLen ( )
if err != nil {
return err
}
timeField . Name = "timestamp"
labelsField . Name = "attributes"
lineField . Name = "body"
if frame . Meta == nil {
frame . Meta = & data . FrameMeta { }
}
frame . Meta . Stats = parseStats ( frame . Meta . Custom )
frame . Meta . Custom = nil
frame . Meta . Type = data . FrameTypeLogLines
frame . Meta . ExecutedQueryString = "Expr: " + query . Expr
idField , err := makeIdField ( stringTimeField , lineField , labelsField , query . RefID )
if err != nil {
return err
}
frame . Fields = data . Fields { labelsField , timeField , lineField , idField }
return nil
}
2022-05-03 01:03:25 -05:00
func calculateCheckSum ( time string , line string , labels [ ] byte ) ( string , error ) {
input := [ ] byte ( line + "_" )
input = append ( input , labels ... )
2022-04-12 04:58:48 -05:00
hash := fnv . New32 ( )
_ , err := hash . Write ( input )
if err != nil {
return "" , err
}
return fmt . Sprintf ( "%s_%x" , time , hash . Sum32 ( ) ) , nil
}
func makeIdField ( stringTimeField * data . Field , lineField * data . Field , labelsField * data . Field , refId string ) ( * data . Field , error ) {
length := stringTimeField . Len ( )
ids := make ( [ ] string , length )
checksums := make ( map [ string ] int )
for i := 0 ; i < length ; i ++ {
time := stringTimeField . At ( i ) . ( string )
line := lineField . At ( i ) . ( string )
2022-05-03 01:03:25 -05:00
labels := labelsField . At ( i ) . ( json . RawMessage )
2022-04-12 04:58:48 -05:00
sum , err := calculateCheckSum ( time , line , labels )
if err != nil {
return nil , err
2022-03-04 02:42:18 -06:00
}
2022-04-12 04:58:48 -05:00
sumCount := checksums [ sum ]
idSuffix := ""
if sumCount > 0 {
// we had this checksum already, we need to do something to make it unique
idSuffix = fmt . Sprintf ( "_%d" , sumCount )
}
checksums [ sum ] = sumCount + 1
2023-05-17 08:28:25 -05:00
ids [ i ] = sum + idSuffix
2022-03-04 02:42:18 -06:00
}
2022-04-12 04:58:48 -05:00
return data . NewField ( "id" , nil , ids ) , nil
2022-03-04 02:42:18 -06:00
}
2022-02-25 02:14:17 -06:00
func formatNamePrometheusStyle ( labels map [ string ] string ) string {
2023-01-23 10:44:27 -06:00
parts := make ( [ ] string , 0 , len ( labels ) )
2022-02-25 02:14:17 -06:00
for k , v := range labels {
parts = append ( parts , fmt . Sprintf ( "%s=%q" , k , v ) )
}
sort . Strings ( parts )
return fmt . Sprintf ( "{%s}" , strings . Join ( parts , ", " ) )
}
2022-09-12 05:03:49 -05:00
// If legend (using of name or pattern instead of time series name) is used, use that name/pattern for formatting
2022-02-25 02:14:17 -06:00
func formatName ( labels map [ string ] string , query * lokiQuery ) string {
if query . LegendFormat == "" {
return formatNamePrometheusStyle ( labels )
}
result := legendFormat . ReplaceAllFunc ( [ ] byte ( query . LegendFormat ) , func ( in [ ] byte ) [ ] byte {
labelName := strings . Replace ( string ( in ) , "{{" , "" , 1 )
labelName = strings . Replace ( labelName , "}}" , "" , 1 )
labelName = strings . TrimSpace ( labelName )
if val , exists := labels [ labelName ] ; exists {
return [ ] byte ( val )
}
return [ ] byte { }
} )
return string ( result )
}
func getFrameLabels ( frame * data . Frame ) map [ string ] string {
labels := make ( map [ string ] string )
for _ , field := range frame . Fields {
for k , v := range field . Labels {
labels [ k ] = v
}
}
return labels
}
2022-05-05 06:09:01 -05:00
func parseStats ( frameMetaCustom interface { } ) [ ] data . QueryStat {
customMap , ok := frameMetaCustom . ( map [ string ] interface { } )
if ! ok {
return nil
}
rawStats , ok := customMap [ "stats" ] . ( map [ string ] interface { } )
if ! ok {
return nil
}
var stats [ ] data . QueryStat
summary , ok := rawStats [ "summary" ] . ( map [ string ] interface { } )
if ok {
stats = append ( stats ,
makeStat ( "Summary: bytes processed per second" , summary [ "bytesProcessedPerSecond" ] , "Bps" ) ,
makeStat ( "Summary: lines processed per second" , summary [ "linesProcessedPerSecond" ] , "" ) ,
makeStat ( "Summary: total bytes processed" , summary [ "totalBytesProcessed" ] , "decbytes" ) ,
makeStat ( "Summary: total lines processed" , summary [ "totalLinesProcessed" ] , "" ) ,
makeStat ( "Summary: exec time" , summary [ "execTime" ] , "s" ) )
}
store , ok := rawStats [ "store" ] . ( map [ string ] interface { } )
if ok {
stats = append ( stats ,
makeStat ( "Store: total chunks ref" , store [ "totalChunksRef" ] , "" ) ,
makeStat ( "Store: total chunks downloaded" , store [ "totalChunksDownloaded" ] , "" ) ,
makeStat ( "Store: chunks download time" , store [ "chunksDownloadTime" ] , "s" ) ,
makeStat ( "Store: head chunk bytes" , store [ "headChunkBytes" ] , "decbytes" ) ,
makeStat ( "Store: head chunk lines" , store [ "headChunkLines" ] , "" ) ,
makeStat ( "Store: decompressed bytes" , store [ "decompressedBytes" ] , "decbytes" ) ,
makeStat ( "Store: decompressed lines" , store [ "decompressedLines" ] , "" ) ,
makeStat ( "Store: compressed bytes" , store [ "compressedBytes" ] , "decbytes" ) ,
makeStat ( "Store: total duplicates" , store [ "totalDuplicates" ] , "" ) )
}
ingester , ok := rawStats [ "ingester" ] . ( map [ string ] interface { } )
if ok {
stats = append ( stats ,
makeStat ( "Ingester: total reached" , ingester [ "totalReached" ] , "" ) ,
makeStat ( "Ingester: total chunks matched" , ingester [ "totalChunksMatched" ] , "" ) ,
makeStat ( "Ingester: total batches" , ingester [ "totalBatches" ] , "" ) ,
makeStat ( "Ingester: total lines sent" , ingester [ "totalLinesSent" ] , "" ) ,
makeStat ( "Ingester: head chunk bytes" , ingester [ "headChunkBytes" ] , "decbytes" ) ,
makeStat ( "Ingester: head chunk lines" , ingester [ "headChunkLines" ] , "" ) ,
makeStat ( "Ingester: decompressed bytes" , ingester [ "decompressedBytes" ] , "decbytes" ) ,
makeStat ( "Ingester: decompressed lines" , ingester [ "decompressedLines" ] , "" ) ,
makeStat ( "Ingester: compressed bytes" , ingester [ "compressedBytes" ] , "decbytes" ) ,
makeStat ( "Ingester: total duplicates" , ingester [ "totalDuplicates" ] , "" ) )
}
return stats
}
func makeStat ( name string , interfaceValue interface { } , unit string ) data . QueryStat {
var value float64
switch v := interfaceValue . ( type ) {
case float64 :
value = v
case int :
value = float64 ( v )
}
return data . QueryStat {
FieldConfig : data . FieldConfig {
DisplayName : name ,
Unit : unit ,
} ,
Value : value ,
}
}