Elasticsearch: Fix some of the tests duplicated from frontend (#64320)

This commit is contained in:
Ivana Huckova 2023-03-08 13:31:50 +01:00 committed by GitHub
parent 0c8876c3a2
commit 1cb39b3523
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,6 +1,7 @@
package elasticsearch package elasticsearch
import ( import (
"encoding/json"
"fmt" "fmt"
"testing" "testing"
"time" "time"
@ -1176,24 +1177,23 @@ func TestRawDocumentQuery(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Len(t, result.response.Responses, 1) require.Len(t, result.response.Responses, 1)
// FIXME: the whole raw_document format is not implemented currently frames := result.response.Responses["A"].Frames
// frames := result.response.Responses["A"].Frames require.Len(t, frames, 1)
// require.Len(t, frames, 1) fields := frames[0].Fields
// fields := frames[0].Fields
// require.Len(t, fields, 1) require.Len(t, fields, 1)
// f := fields[0] f := fields[0]
// require.Equal(t, data.FieldTypeJSON, f.Type()) require.Equal(t, data.FieldTypeNullableJSON, f.Type())
// require.Equal(t, 2, f.Len()) require.Equal(t, 2, f.Len())
// v := f.At(0).(json.RawMessage) v := f.At(0).(*json.RawMessage)
// var jsonData map[string]interface{} var jsonData map[string]interface{}
// err = json.Unmarshal(v, &jsonData) err = json.Unmarshal(*v, &jsonData)
// require.NoError(t, err) require.NoError(t, err)
// require.Equal(t, "asd", jsonData["sourceProp"]) require.Equal(t, "asd", jsonData["sourceProp"])
// require.Equal(t, "field", jsonData["fieldProp"]) require.Equal(t, "field", jsonData["fieldProp"])
} }
func TestBucketScript(t *testing.T) { func TestBucketScript(t *testing.T) {
@ -1361,7 +1361,7 @@ func TestLogsAndCount(t *testing.T) {
[ [
{ {
"refId": "A", "refId": "A",
"metrics": [{ "type": "count", "id": "1" }], "metrics": [{ "type": "logs"}],
"bucketAggs": [ "bucketAggs": [
{ {
"type": "date_histogram", "type": "date_histogram",
@ -1379,14 +1379,7 @@ func TestLogsAndCount(t *testing.T) {
{ {
"responses": [ "responses": [
{ {
"aggregations": { "aggregations": {},
"2": {
"buckets": [
{ "doc_count": 10, "key": 1000 },
{ "doc_count": 15, "key": 2000 }
]
}
},
"hits": { "hits": {
"hits": [ "hits": [
{ {
@ -1397,7 +1390,7 @@ func TestLogsAndCount(t *testing.T) {
"@timestamp": "2019-06-24T09:51:19.765Z", "@timestamp": "2019-06-24T09:51:19.765Z",
"host": "djisaodjsoad", "host": "djisaodjsoad",
"number": 1, "number": 1,
"message": "hello, i am a message", "line": "hello, i am a message",
"level": "debug", "level": "debug",
"fields": { "lvl": "debug" } "fields": { "lvl": "debug" }
}, },
@ -1415,7 +1408,7 @@ func TestLogsAndCount(t *testing.T) {
"@timestamp": "2019-06-24T09:52:19.765Z", "@timestamp": "2019-06-24T09:52:19.765Z",
"host": "dsalkdakdop", "host": "dsalkdakdop",
"number": 2, "number": 2,
"message": "hello, i am also message", "line": "hello, i am also message",
"level": "error", "level": "error",
"fields": { "lvl": "info" } "fields": { "lvl": "info" }
}, },
@ -1522,7 +1515,6 @@ func TestLogsAndCount(t *testing.T) {
}) })
t.Run("level field", func(t *testing.T) { t.Run("level field", func(t *testing.T) {
// FIXME: config datasource with messageField=<unset>, levelField="level"
result, err := queryDataTest(query, response) result, err := queryDataTest(query, response)
require.NoError(t, err) require.NoError(t, err)
@ -1536,33 +1528,11 @@ func TestLogsAndCount(t *testing.T) {
fieldMap[field.Name] = field fieldMap[field.Name] = field
} }
// require.Contains(t, fieldMap, "level") // FIXME require.Contains(t, fieldMap, "level")
// field := fieldMap["level"] field := fieldMap["level"]
// requireStringAt(t, "debug", field, 0) requireStringAt(t, "debug", field, 0)
// requireStringAt(t, "error", field, 1) requireStringAt(t, "error", field, 1)
})
t.Run("level field remap", func(t *testing.T) {
// FIXME: config datasource with messageField=<unset>, levelField="fields.lvl"
result, err := queryDataTest(query, response)
require.NoError(t, err)
require.Len(t, result.response.Responses, 1)
frames := result.response.Responses["A"].Frames
require.True(t, len(frames) > 0)
requireFrameLength(t, frames[0], 2)
fieldMap := make(map[string]*data.Field)
for _, field := range frames[0].Fields {
fieldMap[field.Name] = field
}
// require.Contains(t, fieldMap, "level") // FIXME
// field := fieldMap["level"]
// requireStringAt(t, "debug", field, 0)
// requireStringAt(t, "info", field, 1)
}) })
} }
@ -1572,13 +1542,7 @@ func TestLogsEmptyResponse(t *testing.T) {
{ {
"refId": "A", "refId": "A",
"metrics": [{ "type": "logs", "id": "2" }], "metrics": [{ "type": "logs", "id": "2" }],
"bucketAggs": [ "bucketAggs": [],
{
"type": "date_histogram",
"settings": { "interval": "auto" },
"id": "1"
}
],
"key": "Q-1561369883389-0.7611823271062786-0", "key": "Q-1561369883389-0.7611823271062786-0",
"query": "hello AND message" "query": "hello AND message"
} }
@ -1590,38 +1554,17 @@ func TestLogsEmptyResponse(t *testing.T) {
"responses": [ "responses": [
{ {
"hits": { "hits": [] }, "hits": { "hits": [] },
"aggregations": { "aggregations": {},
"1": {
"buckets": [
{
"key_as_string": "1633676760000",
"key": 1633676760000,
"doc_count": 0
},
{
"key_as_string": "1633676770000",
"key": 1633676770000,
"doc_count": 0
},
{
"key_as_string": "1633676780000",
"key": 1633676780000,
"doc_count": 0
}
]
}
},
"status": 200 "status": 200
} }
] ]
} }
`) `)
// FIXME: config datasource with messageField="message", levelField="level"
result, err := queryDataTest(query, response) result, err := queryDataTest(query, response)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, result.response.Responses, 1) require.Len(t, result.response.Responses, 1)
// frames := result.response.Responses["A"].Frames frames := result.response.Responses["A"].Frames
// require.Len(t, frames, 2) // FIXME require.Len(t, frames, 1)
} }