mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
loki: use single-dataframe format on the backend (#47069)
This commit is contained in:
parent
201557c6fc
commit
68511e7712
@ -2,6 +2,7 @@ package loki
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/fnv"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
@ -11,14 +12,42 @@ import (
|
|||||||
|
|
||||||
// we adjust the dataframes to be the way frontend & alerting
|
// we adjust the dataframes to be the way frontend & alerting
|
||||||
// wants them.
|
// wants them.
|
||||||
func adjustFrame(frame *data.Frame, query *lokiQuery) *data.Frame {
|
func adjustFrame(frame *data.Frame, query *lokiQuery) error {
|
||||||
|
fields := frame.Fields
|
||||||
|
|
||||||
|
if len(fields) < 2 {
|
||||||
|
return fmt.Errorf("missing fields in frame")
|
||||||
|
}
|
||||||
|
|
||||||
|
// metric-fields have "timefield, valuefield"
|
||||||
|
// logs-fields have "labelsfield, timefield, ..."
|
||||||
|
|
||||||
|
secondField := fields[1]
|
||||||
|
|
||||||
|
if secondField.Type() == data.FieldTypeFloat64 {
|
||||||
|
return adjustMetricFrame(frame, query)
|
||||||
|
} else {
|
||||||
|
return adjustLogsFrame(frame, query)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func adjustMetricFrame(frame *data.Frame, query *lokiQuery) error {
|
||||||
|
fields := frame.Fields
|
||||||
|
// we check if the fields are of correct type
|
||||||
|
if len(fields) != 2 {
|
||||||
|
return fmt.Errorf("invalid fields in metric frame")
|
||||||
|
}
|
||||||
|
|
||||||
|
timeField := fields[0]
|
||||||
|
valueField := fields[1]
|
||||||
|
|
||||||
|
if (timeField.Type() != data.FieldTypeTime) || (valueField.Type() != data.FieldTypeFloat64) {
|
||||||
|
return fmt.Errorf("invalid fields in metric frame")
|
||||||
|
}
|
||||||
|
|
||||||
labels := getFrameLabels(frame)
|
labels := getFrameLabels(frame)
|
||||||
|
|
||||||
timeFields, nonTimeFields := partitionFields(frame)
|
isMetricRange := query.QueryType == QueryTypeRange
|
||||||
|
|
||||||
isMetricFrame := nonTimeFields[0].Type() != data.FieldTypeString
|
|
||||||
|
|
||||||
isMetricRange := isMetricFrame && query.QueryType == QueryTypeRange
|
|
||||||
|
|
||||||
name := formatName(labels, query)
|
name := formatName(labels, query)
|
||||||
frame.Name = name
|
frame.Name = name
|
||||||
@ -33,48 +62,109 @@ func adjustFrame(frame *data.Frame, query *lokiQuery) *data.Frame {
|
|||||||
frame.Meta.ExecutedQueryString = "Expr: " + query.Expr
|
frame.Meta.ExecutedQueryString = "Expr: " + query.Expr
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, field := range timeFields {
|
if isMetricRange {
|
||||||
field.Name = "time"
|
if timeField.Config == nil {
|
||||||
|
timeField.Config = &data.FieldConfig{}
|
||||||
if isMetricRange {
|
|
||||||
if field.Config == nil {
|
|
||||||
field.Config = &data.FieldConfig{}
|
|
||||||
}
|
|
||||||
field.Config.Interval = float64(query.Step.Milliseconds())
|
|
||||||
}
|
}
|
||||||
|
timeField.Config.Interval = float64(query.Step.Milliseconds())
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, field := range nonTimeFields {
|
if valueField.Config == nil {
|
||||||
field.Name = "value"
|
valueField.Config = &data.FieldConfig{}
|
||||||
if field.Config == nil {
|
}
|
||||||
field.Config = &data.FieldConfig{}
|
valueField.Config.DisplayNameFromDS = name
|
||||||
}
|
|
||||||
field.Config.DisplayNameFromDS = name
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func adjustLogsFrame(frame *data.Frame, query *lokiQuery) error {
|
||||||
|
// we check if the fields are of correct type and length
|
||||||
|
fields := frame.Fields
|
||||||
|
if len(fields) != 3 {
|
||||||
|
return fmt.Errorf("invalid fields in logs frame")
|
||||||
}
|
}
|
||||||
|
|
||||||
// for streams-dataframes, we need to send to the browser the nanosecond-precision timestamp too.
|
labelsField := fields[0]
|
||||||
|
timeField := fields[1]
|
||||||
|
lineField := fields[2]
|
||||||
|
|
||||||
|
if (timeField.Type() != data.FieldTypeTime) || (lineField.Type() != data.FieldTypeString) || (labelsField.Type() != data.FieldTypeString) {
|
||||||
|
return fmt.Errorf("invalid fields in metric frame")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (timeField.Len() != lineField.Len()) || (timeField.Len() != labelsField.Len()) {
|
||||||
|
return fmt.Errorf("invalid fields in metric frame")
|
||||||
|
}
|
||||||
|
|
||||||
|
if frame.Meta == nil {
|
||||||
|
frame.Meta = &data.FrameMeta{}
|
||||||
|
}
|
||||||
|
|
||||||
|
frame.Meta.ExecutedQueryString = "Expr: " + query.Expr
|
||||||
|
|
||||||
|
// we need to send to the browser the nanosecond-precision timestamp too.
|
||||||
// usually timestamps become javascript-date-objects in the browser automatically, which only
|
// usually timestamps become javascript-date-objects in the browser automatically, which only
|
||||||
// have millisecond-precision.
|
// have millisecond-precision.
|
||||||
// so we send a separate timestamp-as-string field too.
|
// so we send a separate timestamp-as-string field too.
|
||||||
if !isMetricFrame {
|
stringTimeField := makeStringTimeField(timeField)
|
||||||
stringTimeField := makeStringTimeField(timeFields[0])
|
|
||||||
frame.Fields = append(frame.Fields, stringTimeField)
|
|
||||||
}
|
|
||||||
|
|
||||||
return frame
|
idField, err := makeIdField(stringTimeField, lineField, labelsField, frame.RefID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
frame.Fields = append(frame.Fields, stringTimeField, idField)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeStringTimeField(field *data.Field) *data.Field {
|
func makeStringTimeField(timeField *data.Field) *data.Field {
|
||||||
length := field.Len()
|
length := timeField.Len()
|
||||||
stringTimestamps := make([]string, length)
|
stringTimestamps := make([]string, length)
|
||||||
|
|
||||||
for i := 0; i < length; i++ {
|
for i := 0; i < length; i++ {
|
||||||
if v, ok := field.ConcreteAt(i); ok {
|
nsNumber := timeField.At(i).(time.Time).UnixNano()
|
||||||
nsNumber := v.(time.Time).UnixNano()
|
stringTimestamps[i] = fmt.Sprintf("%d", nsNumber)
|
||||||
stringTimestamps[i] = fmt.Sprintf("%d", nsNumber)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return data.NewField("tsNs", field.Labels.Copy(), stringTimestamps)
|
return data.NewField("tsNs", timeField.Labels.Copy(), stringTimestamps)
|
||||||
|
}
|
||||||
|
|
||||||
|
func calculateCheckSum(time string, line string, labels string) (string, error) {
|
||||||
|
input := []byte(line + "_" + labels)
|
||||||
|
hash := fnv.New32()
|
||||||
|
_, err := hash.Write(input)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s_%x", time, hash.Sum32()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeIdField(stringTimeField *data.Field, lineField *data.Field, labelsField *data.Field, refId string) (*data.Field, error) {
|
||||||
|
length := stringTimeField.Len()
|
||||||
|
|
||||||
|
ids := make([]string, length)
|
||||||
|
|
||||||
|
checksums := make(map[string]int)
|
||||||
|
|
||||||
|
for i := 0; i < length; i++ {
|
||||||
|
time := stringTimeField.At(i).(string)
|
||||||
|
line := lineField.At(i).(string)
|
||||||
|
labels := labelsField.At(i).(string)
|
||||||
|
|
||||||
|
sum, err := calculateCheckSum(time, line, labels)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sumCount := checksums[sum]
|
||||||
|
idSuffix := ""
|
||||||
|
if sumCount > 0 {
|
||||||
|
// we had this checksum already, we need to do something to make it unique
|
||||||
|
idSuffix = fmt.Sprintf("_%d", sumCount)
|
||||||
|
}
|
||||||
|
checksums[sum] = sumCount + 1
|
||||||
|
|
||||||
|
ids[i] = sum + idSuffix + "_" + refId
|
||||||
|
}
|
||||||
|
return data.NewField("id", nil, ids), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func formatNamePrometheusStyle(labels map[string]string) string {
|
func formatNamePrometheusStyle(labels map[string]string) string {
|
||||||
@ -119,18 +209,3 @@ func getFrameLabels(frame *data.Frame) map[string]string {
|
|||||||
|
|
||||||
return labels
|
return labels
|
||||||
}
|
}
|
||||||
|
|
||||||
func partitionFields(frame *data.Frame) ([]*data.Field, []*data.Field) {
|
|
||||||
var timeFields []*data.Field
|
|
||||||
var nonTimeFields []*data.Field
|
|
||||||
|
|
||||||
for _, field := range frame.Fields {
|
|
||||||
if field.Type() == data.FieldTypeTime {
|
|
||||||
timeFields = append(timeFields, field)
|
|
||||||
} else {
|
|
||||||
nonTimeFields = append(nonTimeFields, field)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return timeFields, nonTimeFields
|
|
||||||
}
|
|
||||||
|
@ -37,7 +37,56 @@ func TestFormatName(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestAdjustFrame(t *testing.T) {
|
func TestAdjustFrame(t *testing.T) {
|
||||||
t.Run("response should be parsed normally", func(t *testing.T) {
|
t.Run("logs-frame metadata should be set correctly", func(t *testing.T) {
|
||||||
|
frame := data.NewFrame("",
|
||||||
|
data.NewField("labels", nil, []string{
|
||||||
|
`{"level":"info"}`,
|
||||||
|
`{"level":"error"}`,
|
||||||
|
`{"level":"error"}`,
|
||||||
|
`{"level":"info"}`,
|
||||||
|
}),
|
||||||
|
data.NewField("time", nil, []time.Time{
|
||||||
|
time.Date(2022, 1, 2, 3, 4, 5, 6, time.UTC),
|
||||||
|
time.Date(2022, 1, 2, 3, 5, 5, 6, time.UTC),
|
||||||
|
time.Date(2022, 1, 2, 3, 5, 5, 6, time.UTC),
|
||||||
|
time.Date(2022, 1, 2, 3, 6, 5, 6, time.UTC),
|
||||||
|
}),
|
||||||
|
data.NewField("line", nil, []string{"line1", "line2", "line2", "line3"}),
|
||||||
|
)
|
||||||
|
|
||||||
|
frame.RefID = "A"
|
||||||
|
|
||||||
|
query := &lokiQuery{
|
||||||
|
Expr: `{type="important"}`,
|
||||||
|
QueryType: QueryTypeRange,
|
||||||
|
}
|
||||||
|
|
||||||
|
err := adjustFrame(frame, query)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
fields := frame.Fields
|
||||||
|
|
||||||
|
require.Equal(t, 5, len(fields))
|
||||||
|
tsNsField := fields[3]
|
||||||
|
require.Equal(t, "tsNs", tsNsField.Name)
|
||||||
|
require.Equal(t, data.FieldTypeString, tsNsField.Type())
|
||||||
|
require.Equal(t, 4, tsNsField.Len())
|
||||||
|
require.Equal(t, "1641092645000000006", tsNsField.At(0))
|
||||||
|
require.Equal(t, "1641092705000000006", tsNsField.At(1))
|
||||||
|
require.Equal(t, "1641092705000000006", tsNsField.At(2))
|
||||||
|
require.Equal(t, "1641092765000000006", tsNsField.At(3))
|
||||||
|
|
||||||
|
idField := fields[4]
|
||||||
|
require.Equal(t, "id", idField.Name)
|
||||||
|
require.Equal(t, data.FieldTypeString, idField.Type())
|
||||||
|
require.Equal(t, 4, idField.Len())
|
||||||
|
require.Equal(t, "1641092645000000006_a36f4e1b_A", idField.At(0))
|
||||||
|
require.Equal(t, "1641092705000000006_1d77c9ca_A", idField.At(1))
|
||||||
|
require.Equal(t, "1641092705000000006_1d77c9ca_1_A", idField.At(2))
|
||||||
|
require.Equal(t, "1641092765000000006_948c1a7d_A", idField.At(3))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("logs-frame id and string-time fields should be created", func(t *testing.T) {
|
||||||
field1 := data.NewField("", nil, make([]time.Time, 0))
|
field1 := data.NewField("", nil, make([]time.Time, 0))
|
||||||
field2 := data.NewField("", nil, make([]float64, 0))
|
field2 := data.NewField("", nil, make([]float64, 0))
|
||||||
field2.Labels = data.Labels{"app": "Application", "tag2": "tag2"}
|
field2.Labels = data.Labels{"app": "Application", "tag2": "tag2"}
|
||||||
@ -52,7 +101,8 @@ func TestAdjustFrame(t *testing.T) {
|
|||||||
Step: time.Second * 42,
|
Step: time.Second * 42,
|
||||||
}
|
}
|
||||||
|
|
||||||
adjustFrame(frame, query)
|
err := adjustFrame(frame, query)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.Equal(t, frame.Name, "legend Application")
|
require.Equal(t, frame.Name, "legend Application")
|
||||||
require.Equal(t, frame.Meta.ExecutedQueryString, "Expr: up(ALERTS)\nStep: 42s")
|
require.Equal(t, frame.Meta.ExecutedQueryString, "Expr: up(ALERTS)\nStep: 42s")
|
||||||
@ -72,7 +122,8 @@ func TestAdjustFrame(t *testing.T) {
|
|||||||
frame := data.NewFrame("test", field1, field2)
|
frame := data.NewFrame("test", field1, field2)
|
||||||
frame.SetMeta(&data.FrameMeta{Type: data.FrameTypeTimeSeriesMany})
|
frame.SetMeta(&data.FrameMeta{Type: data.FrameTypeTimeSeriesMany})
|
||||||
|
|
||||||
adjustFrame(frame, query)
|
err := adjustFrame(frame, query)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
// to keep the test simple, we assume the
|
// to keep the test simple, we assume the
|
||||||
// first field is the time-field
|
// first field is the time-field
|
||||||
|
@ -2,11 +2,13 @@ package loki
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"github.com/grafana/loki/pkg/loghttp"
|
"github.com/grafana/loki/pkg/loghttp"
|
||||||
"github.com/grafana/loki/pkg/logqlmodel/stats"
|
"github.com/grafana/loki/pkg/logqlmodel/stats"
|
||||||
|
jsoniter "github.com/json-iterator/go"
|
||||||
)
|
)
|
||||||
|
|
||||||
func parseResponse(value *loghttp.QueryResponse, query *lokiQuery) (data.Frames, error) {
|
func parseResponse(value *loghttp.QueryResponse, query *lokiQuery) (data.Frames, error) {
|
||||||
@ -17,7 +19,10 @@ func parseResponse(value *loghttp.QueryResponse, query *lokiQuery) (data.Frames,
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, frame := range frames {
|
for _, frame := range frames {
|
||||||
adjustFrame(frame, query)
|
err = adjustFrame(frame, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return frames, nil
|
return frames, nil
|
||||||
@ -31,7 +36,7 @@ func lokiResponseToDataFrames(value *loghttp.QueryResponse, query *lokiQuery) (d
|
|||||||
case loghttp.Vector:
|
case loghttp.Vector:
|
||||||
return lokiVectorToDataFrames(res, query, stats), nil
|
return lokiVectorToDataFrames(res, query, stats), nil
|
||||||
case loghttp.Streams:
|
case loghttp.Streams:
|
||||||
return lokiStreamsToDataFrames(res, query, stats), nil
|
return lokiStreamsToDataFrames(res, query, stats)
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("resultType %T not supported{", res)
|
return nil, fmt.Errorf("resultType %T not supported{", res)
|
||||||
}
|
}
|
||||||
@ -54,8 +59,8 @@ func lokiMatrixToDataFrames(matrix loghttp.Matrix, query *lokiQuery, stats []dat
|
|||||||
values = append(values, float64(k.Value))
|
values = append(values, float64(k.Value))
|
||||||
}
|
}
|
||||||
|
|
||||||
timeField := data.NewField("", nil, timeVector)
|
timeField := data.NewField("time", nil, timeVector)
|
||||||
valueField := data.NewField("", tags, values)
|
valueField := data.NewField("value", tags, values)
|
||||||
|
|
||||||
frame := data.NewFrame("", timeField, valueField)
|
frame := data.NewFrame("", timeField, valueField)
|
||||||
frame.SetMeta(&data.FrameMeta{
|
frame.SetMeta(&data.FrameMeta{
|
||||||
@ -79,8 +84,8 @@ func lokiVectorToDataFrames(vector loghttp.Vector, query *lokiQuery, stats []dat
|
|||||||
for k, v := range v.Metric {
|
for k, v := range v.Metric {
|
||||||
tags[string(k)] = string(v)
|
tags[string(k)] = string(v)
|
||||||
}
|
}
|
||||||
timeField := data.NewField("", nil, timeVector)
|
timeField := data.NewField("time", nil, timeVector)
|
||||||
valueField := data.NewField("", tags, values)
|
valueField := data.NewField("value", tags, values)
|
||||||
|
|
||||||
frame := data.NewFrame("", timeField, valueField)
|
frame := data.NewFrame("", timeField, valueField)
|
||||||
frame.SetMeta(&data.FrameMeta{
|
frame.SetMeta(&data.FrameMeta{
|
||||||
@ -93,35 +98,61 @@ func lokiVectorToDataFrames(vector loghttp.Vector, query *lokiQuery, stats []dat
|
|||||||
return frames
|
return frames
|
||||||
}
|
}
|
||||||
|
|
||||||
func lokiStreamsToDataFrames(streams loghttp.Streams, query *lokiQuery, stats []data.QueryStat) data.Frames {
|
// we serialize the labels as an ordered list of pairs
|
||||||
frames := data.Frames{}
|
func labelsToString(labels data.Labels) (string, error) {
|
||||||
|
keys := make([]string, 0, len(labels))
|
||||||
|
for k := range labels {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
|
||||||
|
labelArray := make([][2]string, 0, len(labels))
|
||||||
|
|
||||||
|
for _, k := range keys {
|
||||||
|
pair := [2]string{k, labels[k]}
|
||||||
|
labelArray = append(labelArray, pair)
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes, err := jsoniter.Marshal(labelArray)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(bytes), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func lokiStreamsToDataFrames(streams loghttp.Streams, query *lokiQuery, stats []data.QueryStat) (data.Frames, error) {
|
||||||
|
var timeVector []time.Time
|
||||||
|
var values []string
|
||||||
|
var labelsVector []string
|
||||||
|
|
||||||
for _, v := range streams {
|
for _, v := range streams {
|
||||||
tags := make(map[string]string, len(v.Labels))
|
labelsText, err := labelsToString(v.Labels.Map())
|
||||||
timeVector := make([]time.Time, 0, len(v.Entries))
|
if err != nil {
|
||||||
values := make([]string, 0, len(v.Entries))
|
return nil, err
|
||||||
|
|
||||||
for k, v := range v.Labels {
|
|
||||||
tags[k] = v
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, k := range v.Entries {
|
for _, k := range v.Entries {
|
||||||
timeVector = append(timeVector, k.Timestamp.UTC())
|
timeVector = append(timeVector, k.Timestamp.UTC())
|
||||||
values = append(values, k.Line)
|
values = append(values, k.Line)
|
||||||
|
labelsVector = append(labelsVector, labelsText)
|
||||||
}
|
}
|
||||||
|
|
||||||
timeField := data.NewField("", nil, timeVector)
|
|
||||||
valueField := data.NewField("", tags, values)
|
|
||||||
|
|
||||||
frame := data.NewFrame("", timeField, valueField)
|
|
||||||
frame.SetMeta(&data.FrameMeta{
|
|
||||||
Stats: stats,
|
|
||||||
})
|
|
||||||
|
|
||||||
frames = append(frames, frame)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return frames
|
timeField := data.NewField("ts", nil, timeVector)
|
||||||
|
valueField := data.NewField("line", nil, values)
|
||||||
|
labelsField := data.NewField("labels", nil, labelsVector)
|
||||||
|
labelsField.Config = &data.FieldConfig{
|
||||||
|
// we should have a native json-field-type
|
||||||
|
Custom: map[string]interface{}{"json": true},
|
||||||
|
}
|
||||||
|
|
||||||
|
frame := data.NewFrame("", labelsField, timeField, valueField)
|
||||||
|
frame.SetMeta(&data.FrameMeta{
|
||||||
|
Stats: stats,
|
||||||
|
})
|
||||||
|
|
||||||
|
return data.Frames{frame}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseStats(result stats.Result) []data.QueryStat {
|
func parseStats(result stats.Result) []data.QueryStat {
|
||||||
|
152
pkg/tsdb/loki/testdata/streams_simple.golden.txt
vendored
152
pkg/tsdb/loki/testdata/streams_simple.golden.txt
vendored
File diff suppressed because one or more lines are too long
12
pkg/tsdb/loki/testdata/streams_simple.json
vendored
12
pkg/tsdb/loki/testdata/streams_simple.json
vendored
@ -5,8 +5,8 @@
|
|||||||
"result": [
|
"result": [
|
||||||
{
|
{
|
||||||
"stream": {
|
"stream": {
|
||||||
"level": "error",
|
"code": "one\",",
|
||||||
"location": "moon"
|
"location": "moon🌙"
|
||||||
},
|
},
|
||||||
"values": [
|
"values": [
|
||||||
[
|
[
|
||||||
@ -17,8 +17,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"stream": {
|
"stream": {
|
||||||
"level": "info",
|
"code": "\",two",
|
||||||
"location": "moon"
|
"location": "moon🌙"
|
||||||
},
|
},
|
||||||
"values": [
|
"values": [
|
||||||
[
|
[
|
||||||
@ -29,6 +29,10 @@
|
|||||||
"1645030246277587968",
|
"1645030246277587968",
|
||||||
"log line info 2"
|
"log line info 2"
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"1645030246277587968",
|
||||||
|
"log line info 2"
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"1645030245539423744",
|
"1645030245539423744",
|
||||||
"log line info 3"
|
"log line info 3"
|
||||||
|
@ -1,100 +1,95 @@
|
|||||||
import { ArrayVector, CoreApp, DataFrame, DataQueryRequest, DataQueryResponse, FieldType, toUtc } from '@grafana/data';
|
import { ArrayVector, DataFrame, DataQueryResponse, FieldType } from '@grafana/data';
|
||||||
import { cloneDeep } from 'lodash';
|
import { cloneDeep } from 'lodash';
|
||||||
|
|
||||||
import { transformBackendResult } from './backendResultTransformer';
|
import { transformBackendResult } from './backendResultTransformer';
|
||||||
import { LokiQuery } from './types';
|
|
||||||
|
|
||||||
const frame: DataFrame = {
|
const LOKI_EXPR = '{level="info"} |= "thing1"';
|
||||||
name: 'frame1',
|
const inputFrame: DataFrame = {
|
||||||
refId: 'A',
|
refId: 'A',
|
||||||
meta: {
|
meta: {
|
||||||
executedQueryString: 'something1',
|
executedQueryString: LOKI_EXPR,
|
||||||
},
|
},
|
||||||
fields: [
|
fields: [
|
||||||
{
|
{
|
||||||
name: 'Time',
|
name: 'time',
|
||||||
type: FieldType.time,
|
type: FieldType.time,
|
||||||
config: {},
|
config: {},
|
||||||
values: new ArrayVector([1645029699311, 1645029699312, 1645029699313]),
|
values: new ArrayVector([1645030244810, 1645030247027, 1645030246277, 1645030245539, 1645030244091]),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Value',
|
name: 'value',
|
||||||
|
type: FieldType.string,
|
||||||
|
config: {},
|
||||||
|
values: new ArrayVector(['line1', 'line2', 'line3', 'line4', 'line5']),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'labels',
|
||||||
type: FieldType.string,
|
type: FieldType.string,
|
||||||
labels: {
|
|
||||||
level: 'error',
|
|
||||||
location: 'moon',
|
|
||||||
protocol: 'http',
|
|
||||||
},
|
|
||||||
config: {
|
config: {
|
||||||
displayNameFromDS: '{level="error", location="moon", protocol="http"}',
|
custom: {
|
||||||
|
json: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
values: new ArrayVector(['line1', 'line2', 'line3']),
|
values: new ArrayVector([
|
||||||
|
`[["level", "info"],["code", "41🌙"]]`,
|
||||||
|
`[["level", "error"],["code", "41🌙"]]`,
|
||||||
|
`[["level", "error"],["code", "43🌙"]]`,
|
||||||
|
`[["level", "error"],["code", "41🌙"]]`,
|
||||||
|
`[["level", "info"],["code", "41🌙"]]`,
|
||||||
|
]),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'tsNs',
|
name: 'tsNs',
|
||||||
|
type: FieldType.time,
|
||||||
|
config: {},
|
||||||
|
values: new ArrayVector([
|
||||||
|
'1645030244810757120',
|
||||||
|
'1645030247027735040',
|
||||||
|
'1645030246277587968',
|
||||||
|
'1645030245539423744',
|
||||||
|
'1645030244091700992',
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'id',
|
||||||
type: FieldType.string,
|
type: FieldType.string,
|
||||||
config: {},
|
config: {},
|
||||||
values: new ArrayVector(['1645029699311000500', '1645029699312000500', '1645029699313000500']),
|
values: new ArrayVector(['id1', 'id2', 'id3', 'id4', 'id5']),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
length: 3,
|
length: 5,
|
||||||
};
|
};
|
||||||
|
|
||||||
function makeRequest(expr: string): DataQueryRequest<LokiQuery> {
|
|
||||||
return {
|
|
||||||
requestId: 'test1',
|
|
||||||
interval: '1s',
|
|
||||||
intervalMs: 1000,
|
|
||||||
range: {
|
|
||||||
from: toUtc('2022-02-22T13:14:15'),
|
|
||||||
to: toUtc('2022-02-22T13:15:15'),
|
|
||||||
raw: {
|
|
||||||
from: toUtc('2022-02-22T13:14:15'),
|
|
||||||
to: toUtc('2022-02-22T13:15:15'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
scopedVars: {},
|
|
||||||
targets: [
|
|
||||||
{
|
|
||||||
refId: 'A',
|
|
||||||
expr,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
timezone: 'UTC',
|
|
||||||
app: CoreApp.Explore,
|
|
||||||
startTime: 0,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('loki backendResultTransformer', () => {
|
describe('loki backendResultTransformer', () => {
|
||||||
it('processes a logs-dataframe correctly', () => {
|
it('processes a logs-dataframe correctly', () => {
|
||||||
const response: DataQueryResponse = { data: [cloneDeep(frame)] };
|
const response: DataQueryResponse = { data: [cloneDeep(inputFrame)] };
|
||||||
const request = makeRequest('{level="info"} |= "thing1"');
|
|
||||||
|
|
||||||
const expectedFrame = cloneDeep(frame);
|
const expectedFrame = cloneDeep(inputFrame);
|
||||||
expectedFrame.meta = {
|
expectedFrame.meta = {
|
||||||
executedQueryString: 'something1',
|
...expectedFrame.meta,
|
||||||
preferredVisualisationType: 'logs',
|
preferredVisualisationType: 'logs',
|
||||||
searchWords: ['thing1'],
|
searchWords: ['thing1'],
|
||||||
custom: {
|
custom: {
|
||||||
lokiQueryStatKey: 'Summary: total bytes processed',
|
lokiQueryStatKey: 'Summary: total bytes processed',
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
expectedFrame.fields[2].type = FieldType.time;
|
expectedFrame.fields[2].type = FieldType.other;
|
||||||
expectedFrame.fields.push({
|
expectedFrame.fields[2].values = new ArrayVector([
|
||||||
name: 'id',
|
{ level: 'info', code: '41🌙' },
|
||||||
type: FieldType.string,
|
{ level: 'error', code: '41🌙' },
|
||||||
config: {},
|
{ level: 'error', code: '43🌙' },
|
||||||
values: new ArrayVector([
|
{ level: 'error', code: '41🌙' },
|
||||||
'6b099923-25a6-5336-96fa-c84a14b7c351_A',
|
{ level: 'info', code: '41🌙' },
|
||||||
'0e1b7c47-a956-5cf2-a803-d487679745bd_A',
|
]);
|
||||||
'6f9a840c-6a00-525b-9ed4-cceea29e62af_A',
|
|
||||||
]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const expected: DataQueryResponse = { data: [expectedFrame] };
|
const expected: DataQueryResponse = { data: [expectedFrame] };
|
||||||
|
|
||||||
const result = transformBackendResult(response, request);
|
const result = transformBackendResult(response, [
|
||||||
|
{
|
||||||
|
refId: 'A',
|
||||||
|
expr: LOKI_EXPR,
|
||||||
|
},
|
||||||
|
]);
|
||||||
expect(result).toEqual(expected);
|
expect(result).toEqual(expected);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,8 +1,15 @@
|
|||||||
import { DataQueryRequest, DataQueryResponse, DataFrame, isDataFrame, FieldType, QueryResultMeta } from '@grafana/data';
|
import {
|
||||||
|
DataQueryResponse,
|
||||||
|
DataFrame,
|
||||||
|
isDataFrame,
|
||||||
|
FieldType,
|
||||||
|
QueryResultMeta,
|
||||||
|
ArrayVector,
|
||||||
|
Labels,
|
||||||
|
} from '@grafana/data';
|
||||||
import { LokiQuery, LokiQueryType } from './types';
|
import { LokiQuery, LokiQueryType } from './types';
|
||||||
import { makeTableFrames } from './makeTableFrames';
|
import { makeTableFrames } from './makeTableFrames';
|
||||||
import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
|
import { formatQuery, getHighlighterExpressionsFromQuery } from './query_utils';
|
||||||
import { makeIdField } from './makeIdField';
|
|
||||||
|
|
||||||
function isMetricFrame(frame: DataFrame): boolean {
|
function isMetricFrame(frame: DataFrame): boolean {
|
||||||
return frame.fields.every((field) => field.type === FieldType.time || field.type === FieldType.number);
|
return frame.fields.every((field) => field.type === FieldType.time || field.type === FieldType.number);
|
||||||
@ -19,6 +26,12 @@ function setFrameMeta(frame: DataFrame, meta: QueryResultMeta): DataFrame {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function decodeLabelsInJson(text: string): Labels {
|
||||||
|
const array: Array<[string, string]> = JSON.parse(text);
|
||||||
|
// NOTE: maybe we should go with maps, those have guaranteed ordering
|
||||||
|
return Object.fromEntries(array);
|
||||||
|
}
|
||||||
|
|
||||||
function processStreamFrame(frame: DataFrame, query: LokiQuery | undefined): DataFrame {
|
function processStreamFrame(frame: DataFrame, query: LokiQuery | undefined): DataFrame {
|
||||||
const meta: QueryResultMeta = {
|
const meta: QueryResultMeta = {
|
||||||
preferredVisualisationType: 'logs',
|
preferredVisualisationType: 'logs',
|
||||||
@ -29,21 +42,36 @@ function processStreamFrame(frame: DataFrame, query: LokiQuery | undefined): Dat
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
const newFrame = setFrameMeta(frame, meta);
|
const newFrame = setFrameMeta(frame, meta);
|
||||||
const newFields = frame.fields.map((field) => {
|
|
||||||
// the nanosecond-timestamp field must have a type-time
|
const newFields = newFrame.fields.map((field) => {
|
||||||
if (field.name === 'tsNs') {
|
switch (field.name) {
|
||||||
return {
|
case 'labels': {
|
||||||
...field,
|
// the labels, when coming from the server, are json-encoded.
|
||||||
type: FieldType.time,
|
// here we decode them if needed.
|
||||||
};
|
return field.config.custom.json
|
||||||
} else {
|
? {
|
||||||
return field;
|
name: field.name,
|
||||||
|
type: FieldType.other,
|
||||||
|
config: field.config,
|
||||||
|
// we are parsing the labels the same way as streaming-dataframes do
|
||||||
|
values: new ArrayVector(field.values.toArray().map((text) => decodeLabelsInJson(text))),
|
||||||
|
}
|
||||||
|
: field;
|
||||||
|
}
|
||||||
|
case 'tsNs': {
|
||||||
|
// we need to switch the field-type to be `time`
|
||||||
|
return {
|
||||||
|
...field,
|
||||||
|
type: FieldType.time,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
// no modification needed
|
||||||
|
return field;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// we add a calculated id-field
|
|
||||||
newFields.push(makeIdField(frame));
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...newFrame,
|
...newFrame,
|
||||||
fields: newFields,
|
fields: newFields,
|
||||||
@ -96,10 +124,7 @@ function groupFrames(
|
|||||||
return { streamsFrames, metricInstantFrames, metricRangeFrames };
|
return { streamsFrames, metricInstantFrames, metricRangeFrames };
|
||||||
}
|
}
|
||||||
|
|
||||||
export function transformBackendResult(
|
export function transformBackendResult(response: DataQueryResponse, queries: LokiQuery[]): DataQueryResponse {
|
||||||
response: DataQueryResponse,
|
|
||||||
request: DataQueryRequest<LokiQuery>
|
|
||||||
): DataQueryResponse {
|
|
||||||
const { data, ...rest } = response;
|
const { data, ...rest } = response;
|
||||||
|
|
||||||
// in the typescript type, data is an array of basically anything.
|
// in the typescript type, data is an array of basically anything.
|
||||||
@ -112,7 +137,7 @@ export function transformBackendResult(
|
|||||||
return d;
|
return d;
|
||||||
});
|
});
|
||||||
|
|
||||||
const queryMap = new Map(request.targets.map((query) => [query.refId, query]));
|
const queryMap = new Map(queries.map((query) => [query.refId, query]));
|
||||||
|
|
||||||
const { streamsFrames, metricInstantFrames, metricRangeFrames } = groupFrames(dataFrames, queryMap);
|
const { streamsFrames, metricInstantFrames, metricRangeFrames } = groupFrames(dataFrames, queryMap);
|
||||||
|
|
||||||
|
@ -161,7 +161,14 @@ export class LokiDatasource
|
|||||||
...request,
|
...request,
|
||||||
targets: request.targets.map(getNormalizedLokiQuery),
|
targets: request.targets.map(getNormalizedLokiQuery),
|
||||||
};
|
};
|
||||||
return super.query(fixedRequest).pipe(map((response) => transformBackendResult(response, fixedRequest)));
|
|
||||||
|
if (fixedRequest.liveStreaming) {
|
||||||
|
return this.runLiveQueryThroughBackend(fixedRequest);
|
||||||
|
} else {
|
||||||
|
return super
|
||||||
|
.query(fixedRequest)
|
||||||
|
.pipe(map((response) => transformBackendResult(response, fixedRequest.targets)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const filteredTargets = request.targets
|
const filteredTargets = request.targets
|
||||||
@ -199,6 +206,27 @@ export class LokiDatasource
|
|||||||
return merge(...subQueries);
|
return merge(...subQueries);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
runLiveQueryThroughBackend(request: DataQueryRequest<LokiQuery>): Observable<DataQueryResponse> {
|
||||||
|
// this only works in explore-mode, so variables don't need to be handled,
|
||||||
|
// and only for logs-queries, not metric queries
|
||||||
|
const logsQueries = request.targets.filter((query) => query.expr !== '' && !isMetricsQuery(query.expr));
|
||||||
|
|
||||||
|
if (logsQueries.length === 0) {
|
||||||
|
return of({
|
||||||
|
data: [],
|
||||||
|
state: LoadingState.Done,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const subQueries = logsQueries.map((query) => {
|
||||||
|
const maxDataPoints = query.maxLines || this.maxLines;
|
||||||
|
// FIXME: currently we are running it through the frontend still.
|
||||||
|
return this.runLiveQuery(query, maxDataPoints);
|
||||||
|
});
|
||||||
|
|
||||||
|
return merge(...subQueries);
|
||||||
|
}
|
||||||
|
|
||||||
runInstantQuery = (
|
runInstantQuery = (
|
||||||
target: LokiQuery,
|
target: LokiQuery,
|
||||||
options: DataQueryRequest<LokiQuery>,
|
options: DataQueryRequest<LokiQuery>,
|
||||||
|
@ -1,87 +0,0 @@
|
|||||||
import { ArrayVector, DataFrame, FieldType } from '@grafana/data';
|
|
||||||
import { makeIdField } from './makeIdField';
|
|
||||||
|
|
||||||
function makeFrame(timestamps: number[], values: string[], timestampNss: string[], refId?: string): DataFrame {
|
|
||||||
return {
|
|
||||||
name: 'frame',
|
|
||||||
refId,
|
|
||||||
meta: {
|
|
||||||
executedQueryString: 'something1',
|
|
||||||
},
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: 'Time',
|
|
||||||
type: FieldType.time,
|
|
||||||
config: {},
|
|
||||||
values: new ArrayVector(timestamps),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Value',
|
|
||||||
type: FieldType.string,
|
|
||||||
config: {},
|
|
||||||
labels: {
|
|
||||||
foo: 'bar',
|
|
||||||
},
|
|
||||||
values: new ArrayVector(values),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'tsNs',
|
|
||||||
type: FieldType.time,
|
|
||||||
config: {},
|
|
||||||
values: new ArrayVector(timestampNss),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
length: timestamps.length,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('loki makeIdField', () => {
|
|
||||||
it('should always generate unique ids for logs', () => {
|
|
||||||
const frame = makeFrame(
|
|
||||||
[1579857562021, 1579857562021, 1579857562021, 1579857562021],
|
|
||||||
[
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Non-Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
],
|
|
||||||
['1579857562021616000', '1579857562021616000', '1579857562021616000', '1579857562021616000']
|
|
||||||
);
|
|
||||||
expect(makeIdField(frame)).toEqual({
|
|
||||||
config: {},
|
|
||||||
name: 'id',
|
|
||||||
type: 'string',
|
|
||||||
values: new ArrayVector([
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877',
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877_1',
|
|
||||||
'4a081a89-040d-5f64-9477-a4d846ce9f6b',
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877_2',
|
|
||||||
]),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should append refId to the unique ids if refId is provided', () => {
|
|
||||||
const frame = makeFrame(
|
|
||||||
[1579857562021, 1579857562021, 1579857562021, 1579857562021],
|
|
||||||
[
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Non-Duplicated"',
|
|
||||||
't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"',
|
|
||||||
],
|
|
||||||
['1579857562021616000', '1579857562021616000', '1579857562021616000', '1579857562021616000'],
|
|
||||||
'X'
|
|
||||||
);
|
|
||||||
expect(makeIdField(frame)).toEqual({
|
|
||||||
config: {},
|
|
||||||
name: 'id',
|
|
||||||
type: 'string',
|
|
||||||
values: new ArrayVector([
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877_X',
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877_1_X',
|
|
||||||
'4a081a89-040d-5f64-9477-a4d846ce9f6b_X',
|
|
||||||
'75fceace-9f98-5134-b222-643fdcde2877_2_X',
|
|
||||||
]),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
@ -1,54 +0,0 @@
|
|||||||
import { v5 as uuidv5 } from 'uuid';
|
|
||||||
|
|
||||||
import { ArrayVector, DataFrame, Field, FieldType, Labels } from '@grafana/data';
|
|
||||||
|
|
||||||
const UUID_NAMESPACE = '6ec946da-0f49-47a8-983a-1d76d17e7c92';
|
|
||||||
|
|
||||||
function createUid(text: string, usedUids: Map<string, number>, refId?: string): string {
|
|
||||||
const id = uuidv5(text, UUID_NAMESPACE);
|
|
||||||
|
|
||||||
// check how many times have we seen this id before,
|
|
||||||
// set the count to zero, if never.
|
|
||||||
const count = usedUids.get(id) ?? 0;
|
|
||||||
|
|
||||||
// if we have seen this id before, we need to make
|
|
||||||
// it unique by appending the seen-count
|
|
||||||
// (starts with 1, and goes up)
|
|
||||||
const uniqueId = count > 0 ? `${id}_${count}` : id;
|
|
||||||
|
|
||||||
// we increment the counter for this id, to be used when we are called the next time
|
|
||||||
usedUids.set(id, count + 1);
|
|
||||||
|
|
||||||
// we add refId to the end, if it is available
|
|
||||||
return refId !== undefined ? `${uniqueId}_${refId}` : uniqueId;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function makeIdField(frame: DataFrame): Field {
|
|
||||||
const allLabels: Labels = {};
|
|
||||||
|
|
||||||
// collect labels from every field
|
|
||||||
frame.fields.forEach((field) => {
|
|
||||||
Object.assign(allLabels, field.labels);
|
|
||||||
});
|
|
||||||
|
|
||||||
const labelsString = Object.entries(allLabels)
|
|
||||||
.map(([key, val]) => `${key}="${val}"`)
|
|
||||||
.sort()
|
|
||||||
.join('');
|
|
||||||
|
|
||||||
const usedUids = new Map<string, number>();
|
|
||||||
|
|
||||||
const { length } = frame;
|
|
||||||
|
|
||||||
const uids: string[] = new Array(length);
|
|
||||||
|
|
||||||
// we need to go through the dataframe "row by row"
|
|
||||||
for (let i = 0; i < length; i++) {
|
|
||||||
const row = frame.fields.map((f) => String(f.values.get(i)));
|
|
||||||
const text = `${labelsString}_${row.join('_')}`;
|
|
||||||
const uid = createUid(text, usedUids, frame.refId);
|
|
||||||
uids[i] = uid;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { name: 'id', type: FieldType.string, config: {}, values: new ArrayVector(uids) };
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user