mirror of
https://github.com/grafana/grafana.git
synced 2024-12-28 18:01:40 -06:00
Elasticsearch: Add query building for raw data and raw document queries to backend (#59741)
* Elasticsearch: Fix ordering in raw_document and add logic for raw_data * Add comments * Fix raw data request to use correct timefield * Fix linting * Add raw data as metric type * Fix linting * Hopefully fix lint
This commit is contained in:
parent
d571376e8f
commit
0973501233
@ -8,9 +8,10 @@ import (
|
||||
|
||||
// SearchRequestBuilder represents a builder which can build a search request
|
||||
type SearchRequestBuilder struct {
|
||||
interval intervalv2.Interval
|
||||
index string
|
||||
size int
|
||||
interval intervalv2.Interval
|
||||
index string
|
||||
size int
|
||||
// Currently sort is map, but based in examples it should be an array https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-search-results.html
|
||||
sort map[string]interface{}
|
||||
queryBuilder *QueryBuilder
|
||||
aggBuilders []AggBuilder
|
||||
|
@ -54,6 +54,7 @@ var metricAggType = map[string]string{
|
||||
"serial_diff": "Serial Difference",
|
||||
"bucket_script": "Bucket Script",
|
||||
"raw_document": "Raw Document",
|
||||
"raw_data": "Raw Data",
|
||||
"rate": "Rate",
|
||||
}
|
||||
|
||||
|
@ -74,6 +74,8 @@ func TestRequestSnapshots(t *testing.T) {
|
||||
{name: "simple metric test", path: "metric_simple"},
|
||||
{name: "complex metric test", path: "metric_complex"},
|
||||
{name: "multi metric test", path: "metric_multi"},
|
||||
{name: "raw data", path: "raw_data"},
|
||||
{name: "raw document", path: "raw_document"},
|
||||
}
|
||||
|
||||
queryHeader := []byte(`
|
||||
|
@ -0,0 +1,26 @@
|
||||
[
|
||||
{
|
||||
"metrics": [
|
||||
{
|
||||
"id": "1",
|
||||
"type": "raw_data",
|
||||
"settings": {
|
||||
"size": "500"
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"refId": "A",
|
||||
"datasource": {
|
||||
"type": "elasticsearch",
|
||||
"uid": "PE50363A9B6833EE7"
|
||||
},
|
||||
"alias": "",
|
||||
"bucketAggs": [],
|
||||
"timeField": "testtime",
|
||||
"key": "Q-ee8fea91-a4c4-4ded-9827-b362476a4083-0",
|
||||
"datasourceId": 39,
|
||||
"intervalMs": 2000,
|
||||
"maxDataPoints": 1318
|
||||
}
|
||||
]
|
@ -0,0 +1,30 @@
|
||||
{
|
||||
"docvalue_fields": [
|
||||
"testtime"
|
||||
],
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"range": {
|
||||
"testtime": {
|
||||
"format": "epoch_millis",
|
||||
"gte": 1668422437218,
|
||||
"lte": 1668422625668
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"script_fields": {},
|
||||
"size": 500,
|
||||
"sort":
|
||||
{
|
||||
"testtime": {
|
||||
"order": "desc",
|
||||
"unmapped_type": "boolean"
|
||||
},
|
||||
"_doc": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
[
|
||||
{
|
||||
"metrics": [
|
||||
{
|
||||
"id": "1",
|
||||
"type": "raw_document",
|
||||
"settings": {
|
||||
"size": "500"
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"refId": "A",
|
||||
"datasource": {
|
||||
"type": "elasticsearch",
|
||||
"uid": "PE50363A9B6833EE7"
|
||||
},
|
||||
"alias": "",
|
||||
"bucketAggs": [],
|
||||
"timeField": "testtime",
|
||||
"key": "Q-ee8fea91-a4c4-4ded-9827-b362476a4083-0",
|
||||
"datasourceId": 39,
|
||||
"intervalMs": 2000,
|
||||
"maxDataPoints": 1318
|
||||
}
|
||||
]
|
@ -0,0 +1,30 @@
|
||||
{
|
||||
"docvalue_fields": [
|
||||
"testtime"
|
||||
],
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"range": {
|
||||
"testtime": {
|
||||
"format": "epoch_millis",
|
||||
"gte": 1668422437218,
|
||||
"lte": 1668422625668
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"script_fields": {},
|
||||
"size": 500,
|
||||
"sort":
|
||||
{
|
||||
"testtime": {
|
||||
"order": "desc",
|
||||
"unmapped_type": "boolean"
|
||||
},
|
||||
"_doc": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
}
|
@ -78,7 +78,7 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
|
||||
}
|
||||
|
||||
if len(q.BucketAggs) == 0 {
|
||||
if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" {
|
||||
if len(q.Metrics) == 0 || !(q.Metrics[0].Type == "raw_document" || q.Metrics[0].Type == "raw_data") {
|
||||
result.Responses[q.RefID] = backend.DataResponse{
|
||||
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
|
||||
}
|
||||
@ -86,8 +86,9 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
|
||||
}
|
||||
metric := q.Metrics[0]
|
||||
b.Size(metric.Settings.Get("size").MustInt(500))
|
||||
b.SortDesc("@timestamp", "boolean")
|
||||
b.AddDocValueField("@timestamp")
|
||||
b.SortDesc(e.client.GetTimeField(), "boolean")
|
||||
b.SortDesc("_doc", "")
|
||||
b.AddDocValueField(e.client.GetTimeField())
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -432,9 +432,40 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
|
||||
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
|
||||
}`, from, to, 15*time.Second)
|
||||
require.NoError(t, err)
|
||||
// FIXME: { _doc: { order: 'desc' } } is missing
|
||||
// sr := c.multisearchRequests[0].Requests[0]
|
||||
// require.Equal(t, sr, `{"docvalue_fields":["@timestamp"],"query":{"bool":{"filter":{"range":{"@timestamp":{"format":"epoch_millis","gte":1526406600000,"lte":1526406900000}}}}},"script_fields":{},"size":500,"sort":[{"@timestamp":{"order":"desc","unmapped_type":"boolean"}}, {"_doc": {"order": "desc"}}]}`)
|
||||
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
|
||||
require.Equal(t, rangeFilter.Key, c.timeField)
|
||||
require.Equal(t, rangeFilter.Lte, toMs)
|
||||
require.Equal(t, rangeFilter.Gte, fromMs)
|
||||
require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
|
||||
|
||||
require.Equal(t, sr.Size, 500)
|
||||
require.Equal(t, sr.Sort["@timestamp"], map[string]string{"order": "desc", "unmapped_type": "boolean"})
|
||||
require.Equal(t, sr.Sort["_doc"], map[string]string{"order": "desc"})
|
||||
require.Equal(t, sr.CustomProps["script_fields"], map[string]interface{}{})
|
||||
})
|
||||
|
||||
t.Run("With raw data metric query (from frontend tests)", func(t *testing.T) {
|
||||
c := newFakeClient()
|
||||
_, err := executeTsdbQuery(c, `{
|
||||
"timeField": "@timestamp",
|
||||
"bucketAggs": [],
|
||||
"metrics": [{ "id": "1", "type": "raw_data", "settings": {} }]
|
||||
}`, from, to, 15*time.Second)
|
||||
require.NoError(t, err)
|
||||
|
||||
sr := c.multisearchRequests[0].Requests[0]
|
||||
rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
|
||||
require.Equal(t, rangeFilter.Key, c.timeField)
|
||||
require.Equal(t, rangeFilter.Lte, toMs)
|
||||
require.Equal(t, rangeFilter.Gte, fromMs)
|
||||
require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
|
||||
|
||||
require.Equal(t, sr.Size, 500)
|
||||
require.Equal(t, sr.Sort["@timestamp"], map[string]string{"order": "desc", "unmapped_type": "boolean"})
|
||||
require.Equal(t, sr.Sort["_doc"], map[string]string{"order": "desc"})
|
||||
require.Equal(t, sr.CustomProps["script_fields"], map[string]interface{}{})
|
||||
})
|
||||
|
||||
t.Run("With raw document metric size set", func(t *testing.T) {
|
||||
|
Loading…
Reference in New Issue
Block a user