mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Elasticsearch: Fix some of the tests duplicated from frontend (#64320)
This commit is contained in:
parent
0c8876c3a2
commit
1cb39b3523
@ -1,6 +1,7 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
@ -1176,24 +1177,23 @@ func TestRawDocumentQuery(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, result.response.Responses, 1)
|
||||
// FIXME: the whole raw_document format is not implemented currently
|
||||
// frames := result.response.Responses["A"].Frames
|
||||
// require.Len(t, frames, 1)
|
||||
// fields := frames[0].Fields
|
||||
frames := result.response.Responses["A"].Frames
|
||||
require.Len(t, frames, 1)
|
||||
fields := frames[0].Fields
|
||||
|
||||
// require.Len(t, fields, 1)
|
||||
// f := fields[0]
|
||||
require.Len(t, fields, 1)
|
||||
f := fields[0]
|
||||
|
||||
// require.Equal(t, data.FieldTypeJSON, f.Type())
|
||||
// require.Equal(t, 2, f.Len())
|
||||
require.Equal(t, data.FieldTypeNullableJSON, f.Type())
|
||||
require.Equal(t, 2, f.Len())
|
||||
|
||||
// v := f.At(0).(json.RawMessage)
|
||||
// var jsonData map[string]interface{}
|
||||
// err = json.Unmarshal(v, &jsonData)
|
||||
// require.NoError(t, err)
|
||||
v := f.At(0).(*json.RawMessage)
|
||||
var jsonData map[string]interface{}
|
||||
err = json.Unmarshal(*v, &jsonData)
|
||||
require.NoError(t, err)
|
||||
|
||||
// require.Equal(t, "asd", jsonData["sourceProp"])
|
||||
// require.Equal(t, "field", jsonData["fieldProp"])
|
||||
require.Equal(t, "asd", jsonData["sourceProp"])
|
||||
require.Equal(t, "field", jsonData["fieldProp"])
|
||||
}
|
||||
|
||||
func TestBucketScript(t *testing.T) {
|
||||
@ -1361,7 +1361,7 @@ func TestLogsAndCount(t *testing.T) {
|
||||
[
|
||||
{
|
||||
"refId": "A",
|
||||
"metrics": [{ "type": "count", "id": "1" }],
|
||||
"metrics": [{ "type": "logs"}],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "date_histogram",
|
||||
@ -1379,14 +1379,7 @@ func TestLogsAndCount(t *testing.T) {
|
||||
{
|
||||
"responses": [
|
||||
{
|
||||
"aggregations": {
|
||||
"2": {
|
||||
"buckets": [
|
||||
{ "doc_count": 10, "key": 1000 },
|
||||
{ "doc_count": 15, "key": 2000 }
|
||||
]
|
||||
}
|
||||
},
|
||||
"aggregations": {},
|
||||
"hits": {
|
||||
"hits": [
|
||||
{
|
||||
@ -1397,7 +1390,7 @@ func TestLogsAndCount(t *testing.T) {
|
||||
"@timestamp": "2019-06-24T09:51:19.765Z",
|
||||
"host": "djisaodjsoad",
|
||||
"number": 1,
|
||||
"message": "hello, i am a message",
|
||||
"line": "hello, i am a message",
|
||||
"level": "debug",
|
||||
"fields": { "lvl": "debug" }
|
||||
},
|
||||
@ -1415,7 +1408,7 @@ func TestLogsAndCount(t *testing.T) {
|
||||
"@timestamp": "2019-06-24T09:52:19.765Z",
|
||||
"host": "dsalkdakdop",
|
||||
"number": 2,
|
||||
"message": "hello, i am also message",
|
||||
"line": "hello, i am also message",
|
||||
"level": "error",
|
||||
"fields": { "lvl": "info" }
|
||||
},
|
||||
@ -1522,7 +1515,6 @@ func TestLogsAndCount(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("level field", func(t *testing.T) {
|
||||
// FIXME: config datasource with messageField=<unset>, levelField="level"
|
||||
result, err := queryDataTest(query, response)
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -1536,33 +1528,11 @@ func TestLogsAndCount(t *testing.T) {
|
||||
fieldMap[field.Name] = field
|
||||
}
|
||||
|
||||
// require.Contains(t, fieldMap, "level") // FIXME
|
||||
// field := fieldMap["level"]
|
||||
require.Contains(t, fieldMap, "level")
|
||||
field := fieldMap["level"]
|
||||
|
||||
// requireStringAt(t, "debug", field, 0)
|
||||
// requireStringAt(t, "error", field, 1)
|
||||
})
|
||||
|
||||
t.Run("level field remap", func(t *testing.T) {
|
||||
// FIXME: config datasource with messageField=<unset>, levelField="fields.lvl"
|
||||
result, err := queryDataTest(query, response)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, result.response.Responses, 1)
|
||||
frames := result.response.Responses["A"].Frames
|
||||
require.True(t, len(frames) > 0)
|
||||
|
||||
requireFrameLength(t, frames[0], 2)
|
||||
fieldMap := make(map[string]*data.Field)
|
||||
for _, field := range frames[0].Fields {
|
||||
fieldMap[field.Name] = field
|
||||
}
|
||||
|
||||
// require.Contains(t, fieldMap, "level") // FIXME
|
||||
// field := fieldMap["level"]
|
||||
|
||||
// requireStringAt(t, "debug", field, 0)
|
||||
// requireStringAt(t, "info", field, 1)
|
||||
requireStringAt(t, "debug", field, 0)
|
||||
requireStringAt(t, "error", field, 1)
|
||||
})
|
||||
}
|
||||
|
||||
@ -1572,13 +1542,7 @@ func TestLogsEmptyResponse(t *testing.T) {
|
||||
{
|
||||
"refId": "A",
|
||||
"metrics": [{ "type": "logs", "id": "2" }],
|
||||
"bucketAggs": [
|
||||
{
|
||||
"type": "date_histogram",
|
||||
"settings": { "interval": "auto" },
|
||||
"id": "1"
|
||||
}
|
||||
],
|
||||
"bucketAggs": [],
|
||||
"key": "Q-1561369883389-0.7611823271062786-0",
|
||||
"query": "hello AND message"
|
||||
}
|
||||
@ -1590,38 +1554,17 @@ func TestLogsEmptyResponse(t *testing.T) {
|
||||
"responses": [
|
||||
{
|
||||
"hits": { "hits": [] },
|
||||
"aggregations": {
|
||||
"1": {
|
||||
"buckets": [
|
||||
{
|
||||
"key_as_string": "1633676760000",
|
||||
"key": 1633676760000,
|
||||
"doc_count": 0
|
||||
},
|
||||
{
|
||||
"key_as_string": "1633676770000",
|
||||
"key": 1633676770000,
|
||||
"doc_count": 0
|
||||
},
|
||||
{
|
||||
"key_as_string": "1633676780000",
|
||||
"key": 1633676780000,
|
||||
"doc_count": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"aggregations": {},
|
||||
"status": 200
|
||||
}
|
||||
]
|
||||
}
|
||||
`)
|
||||
|
||||
// FIXME: config datasource with messageField="message", levelField="level"
|
||||
result, err := queryDataTest(query, response)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, result.response.Responses, 1)
|
||||
// frames := result.response.Responses["A"].Frames
|
||||
// require.Len(t, frames, 2) // FIXME
|
||||
frames := result.response.Responses["A"].Frames
|
||||
require.Len(t, frames, 1)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user