2018-05-23 07:36:41 -05:00
|
|
|
package elasticsearch
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2018-05-23 08:09:58 -05:00
|
|
|
"strconv"
|
2018-05-23 07:36:41 -05:00
|
|
|
|
|
|
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
|
|
|
"github.com/grafana/grafana/pkg/tsdb"
|
2018-05-23 08:09:58 -05:00
|
|
|
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
2018-05-23 07:36:41 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
type timeSeriesQuery struct {
|
2018-05-23 08:09:58 -05:00
|
|
|
client es.Client
|
|
|
|
tsdbQuery *tsdb.TsdbQuery
|
|
|
|
intervalCalculator tsdb.IntervalCalculator
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
var newTimeSeriesQuery = func(client es.Client, tsdbQuery *tsdb.TsdbQuery, intervalCalculator tsdb.IntervalCalculator) *timeSeriesQuery {
|
|
|
|
return &timeSeriesQuery{
|
|
|
|
client: client,
|
|
|
|
tsdbQuery: tsdbQuery,
|
|
|
|
intervalCalculator: intervalCalculator,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *timeSeriesQuery) execute() (*tsdb.Response, error) {
|
2018-05-23 07:36:41 -05:00
|
|
|
result := &tsdb.Response{}
|
|
|
|
result.Results = make(map[string]*tsdb.QueryResult)
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
tsQueryParser := newTimeSeriesQueryParser()
|
|
|
|
queries, err := tsQueryParser.parse(e.tsdbQuery)
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
ms := e.client.MultiSearch()
|
|
|
|
|
|
|
|
from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch())
|
|
|
|
to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch())
|
|
|
|
|
|
|
|
for _, q := range queries {
|
|
|
|
minInterval, err := e.client.GetMinInterval(q.Interval)
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2018-05-23 08:09:58 -05:00
|
|
|
interval := e.intervalCalculator.Calculate(e.tsdbQuery.TimeRange, minInterval)
|
2018-05-23 07:36:41 -05:00
|
|
|
|
2018-05-31 12:02:20 -05:00
|
|
|
b := ms.Search(interval)
|
2018-05-23 08:09:58 -05:00
|
|
|
b.Size(0)
|
|
|
|
filters := b.Query().Bool().Filter()
|
|
|
|
filters.AddDateRangeFilter(e.client.GetTimeField(), to, from, es.DateFormatEpochMS)
|
2018-05-23 07:36:41 -05:00
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
if q.RawQuery != "" {
|
|
|
|
filters.AddQueryStringFilter(q.RawQuery, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(q.BucketAggs) == 0 {
|
|
|
|
if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" {
|
|
|
|
result.Results[q.RefID] = &tsdb.QueryResult{
|
|
|
|
RefId: q.RefID,
|
|
|
|
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
|
|
|
|
ErrorString: "invalid query, missing metrics and aggregations",
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
metric := q.Metrics[0]
|
|
|
|
b.Size(metric.Settings.Get("size").MustInt(500))
|
|
|
|
b.SortDesc("@timestamp", "boolean")
|
|
|
|
b.AddDocValueField("@timestamp")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
aggBuilder := b.Agg()
|
|
|
|
|
|
|
|
// iterate backwards to create aggregations bottom-down
|
|
|
|
for _, bucketAgg := range q.BucketAggs {
|
|
|
|
switch bucketAgg.Type {
|
2018-09-22 03:50:00 -05:00
|
|
|
case dateHistType:
|
2018-05-31 12:02:20 -05:00
|
|
|
aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to)
|
2018-09-22 03:50:00 -05:00
|
|
|
case histogramType:
|
2018-05-23 08:09:58 -05:00
|
|
|
aggBuilder = addHistogramAgg(aggBuilder, bucketAgg)
|
2018-09-22 03:50:00 -05:00
|
|
|
case filtersType:
|
2018-05-23 08:09:58 -05:00
|
|
|
aggBuilder = addFiltersAgg(aggBuilder, bucketAgg)
|
2018-09-22 03:50:00 -05:00
|
|
|
case termsType:
|
2018-05-23 08:09:58 -05:00
|
|
|
aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics)
|
2018-09-22 03:50:00 -05:00
|
|
|
case geohashGridType:
|
2018-05-23 08:09:58 -05:00
|
|
|
aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, m := range q.Metrics {
|
2018-11-15 12:06:47 -06:00
|
|
|
if m.Type == countType {
|
2018-05-23 08:09:58 -05:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if isPipelineAgg(m.Type) {
|
2018-11-16 09:54:25 -06:00
|
|
|
if isPipelineAggWithMultipleBucketPaths(m.Type) {
|
|
|
|
if len(m.PipelineVariables) > 0 {
|
|
|
|
bucketPaths := map[string]interface{}{}
|
|
|
|
for name, pipelineAgg := range m.PipelineVariables {
|
|
|
|
if _, err := strconv.Atoi(pipelineAgg); err == nil {
|
|
|
|
var appliedAgg *MetricAgg
|
|
|
|
for _, pipelineMetric := range q.Metrics {
|
|
|
|
if pipelineMetric.ID == pipelineAgg {
|
|
|
|
appliedAgg = pipelineMetric
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if appliedAgg != nil {
|
|
|
|
if appliedAgg.Type == countType {
|
|
|
|
bucketPaths[name] = "_count"
|
|
|
|
} else {
|
|
|
|
bucketPaths[name] = pipelineAgg
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-11-15 12:06:47 -06:00
|
|
|
}
|
|
|
|
|
2018-11-16 09:54:25 -06:00
|
|
|
aggBuilder.Pipeline(m.ID, m.Type, bucketPaths, func(a *es.PipelineAggregation) {
|
2018-11-15 12:06:47 -06:00
|
|
|
a.Settings = m.Settings.MustMap()
|
|
|
|
})
|
2018-11-16 09:54:25 -06:00
|
|
|
} else {
|
|
|
|
continue
|
2018-11-15 12:06:47 -06:00
|
|
|
}
|
2018-05-23 08:09:58 -05:00
|
|
|
} else {
|
2018-11-16 09:54:25 -06:00
|
|
|
if _, err := strconv.Atoi(m.PipelineAggregate); err == nil {
|
|
|
|
var appliedAgg *MetricAgg
|
|
|
|
for _, pipelineMetric := range q.Metrics {
|
|
|
|
if pipelineMetric.ID == m.PipelineAggregate {
|
|
|
|
appliedAgg = pipelineMetric
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if appliedAgg != nil {
|
|
|
|
bucketPath := m.PipelineAggregate
|
|
|
|
if appliedAgg.Type == countType {
|
|
|
|
bucketPath = "_count"
|
|
|
|
}
|
|
|
|
|
|
|
|
aggBuilder.Pipeline(m.ID, m.Type, bucketPath, func(a *es.PipelineAggregation) {
|
|
|
|
a.Settings = m.Settings.MustMap()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
continue
|
|
|
|
}
|
2018-05-23 08:09:58 -05:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) {
|
|
|
|
a.Settings = m.Settings.MustMap()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
req, err := ms.Build()
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
res, err := e.client.ExecuteMultisearch(req)
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
rp := newResponseParser(res.Responses, queries)
|
|
|
|
return rp.getTimeSeries()
|
|
|
|
}
|
2018-05-23 07:36:41 -05:00
|
|
|
|
2018-05-31 12:02:20 -05:00
|
|
|
func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFrom, timeTo string) es.AggBuilder {
|
2018-05-23 08:09:58 -05:00
|
|
|
aggBuilder.DateHistogram(bucketAgg.ID, bucketAgg.Field, func(a *es.DateHistogramAgg, b es.AggBuilder) {
|
|
|
|
a.Interval = bucketAgg.Settings.Get("interval").MustString("auto")
|
|
|
|
a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
|
|
|
|
a.ExtendedBounds = &es.ExtendedBounds{Min: timeFrom, Max: timeTo}
|
|
|
|
a.Format = bucketAgg.Settings.Get("format").MustString(es.DateFormatEpochMS)
|
2018-05-23 07:36:41 -05:00
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
if a.Interval == "auto" {
|
|
|
|
a.Interval = "$__interval"
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
2018-05-23 08:09:58 -05:00
|
|
|
|
2018-11-27 02:42:20 -06:00
|
|
|
if offset, err := bucketAgg.Settings.Get("offset").String(); err == nil {
|
|
|
|
a.Offset = offset
|
2018-11-26 07:58:25 -06:00
|
|
|
}
|
2018-11-27 02:42:20 -06:00
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
|
|
|
|
a.Missing = &missing
|
|
|
|
}
|
|
|
|
|
|
|
|
aggBuilder = b
|
|
|
|
})
|
|
|
|
|
|
|
|
return aggBuilder
|
|
|
|
}
|
|
|
|
|
|
|
|
func addHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
|
|
|
aggBuilder.Histogram(bucketAgg.ID, bucketAgg.Field, func(a *es.HistogramAgg, b es.AggBuilder) {
|
|
|
|
a.Interval = bucketAgg.Settings.Get("interval").MustInt(1000)
|
|
|
|
a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0)
|
|
|
|
|
|
|
|
if missing, err := bucketAgg.Settings.Get("missing").Int(); err == nil {
|
|
|
|
a.Missing = &missing
|
|
|
|
}
|
|
|
|
|
|
|
|
aggBuilder = b
|
|
|
|
})
|
|
|
|
|
|
|
|
return aggBuilder
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*MetricAgg) es.AggBuilder {
|
|
|
|
aggBuilder.Terms(bucketAgg.ID, bucketAgg.Field, func(a *es.TermsAggregation, b es.AggBuilder) {
|
|
|
|
if size, err := bucketAgg.Settings.Get("size").Int(); err == nil {
|
|
|
|
a.Size = size
|
|
|
|
} else if size, err := bucketAgg.Settings.Get("size").String(); err == nil {
|
|
|
|
a.Size, err = strconv.Atoi(size)
|
|
|
|
if err != nil {
|
|
|
|
a.Size = 500
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
a.Size = 500
|
|
|
|
}
|
2018-10-10 22:21:06 -05:00
|
|
|
if a.Size == 0 {
|
2018-10-10 22:36:40 -05:00
|
|
|
a.Size = 500
|
2018-10-10 22:21:06 -05:00
|
|
|
}
|
2018-10-10 22:40:23 -05:00
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil {
|
|
|
|
a.MinDocCount = &minDocCount
|
|
|
|
}
|
|
|
|
if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil {
|
|
|
|
a.Missing = &missing
|
|
|
|
}
|
|
|
|
|
|
|
|
if orderBy, err := bucketAgg.Settings.Get("orderBy").String(); err == nil {
|
|
|
|
a.Order[orderBy] = bucketAgg.Settings.Get("order").MustString("desc")
|
|
|
|
|
|
|
|
if _, err := strconv.Atoi(orderBy); err == nil {
|
|
|
|
for _, m := range metrics {
|
|
|
|
if m.ID == orderBy {
|
|
|
|
b.Metric(m.ID, m.Type, m.Field, nil)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
aggBuilder = b
|
|
|
|
})
|
|
|
|
|
|
|
|
return aggBuilder
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
func addFiltersAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
|
|
|
filters := make(map[string]interface{})
|
|
|
|
for _, filter := range bucketAgg.Settings.Get("filters").MustArray() {
|
|
|
|
json := simplejson.NewFromAny(filter)
|
|
|
|
query := json.Get("query").MustString()
|
|
|
|
label := json.Get("label").MustString()
|
|
|
|
if label == "" {
|
|
|
|
label = query
|
|
|
|
}
|
|
|
|
filters[label] = &es.QueryStringFilter{Query: query, AnalyzeWildcard: true}
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
2018-05-23 08:09:58 -05:00
|
|
|
|
|
|
|
if len(filters) > 0 {
|
|
|
|
aggBuilder.Filters(bucketAgg.ID, func(a *es.FiltersAggregation, b es.AggBuilder) {
|
|
|
|
a.Filters = filters
|
|
|
|
aggBuilder = b
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return aggBuilder
|
|
|
|
}
|
|
|
|
|
|
|
|
func addGeoHashGridAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder {
|
|
|
|
aggBuilder.GeoHashGrid(bucketAgg.ID, bucketAgg.Field, func(a *es.GeoHashGridAggregation, b es.AggBuilder) {
|
|
|
|
a.Precision = bucketAgg.Settings.Get("precision").MustInt(3)
|
|
|
|
aggBuilder = b
|
|
|
|
})
|
|
|
|
|
|
|
|
return aggBuilder
|
|
|
|
}
|
|
|
|
|
|
|
|
type timeSeriesQueryParser struct{}
|
|
|
|
|
|
|
|
func newTimeSeriesQueryParser() *timeSeriesQueryParser {
|
|
|
|
return &timeSeriesQueryParser{}
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, error) {
|
2018-05-23 07:36:41 -05:00
|
|
|
queries := make([]*Query, 0)
|
|
|
|
for _, q := range tsdbQuery.Queries {
|
|
|
|
model := q.Model
|
|
|
|
timeField, err := model.Get("timeField").String()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
rawQuery := model.Get("query").MustString()
|
|
|
|
bucketAggs, err := p.parseBucketAggs(model)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
metrics, err := p.parseMetrics(model)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
alias := model.Get("alias").MustString("")
|
2018-05-23 15:07:52 -05:00
|
|
|
interval := strconv.FormatInt(q.IntervalMs, 10) + "ms"
|
2018-05-23 07:36:41 -05:00
|
|
|
|
|
|
|
queries = append(queries, &Query{
|
|
|
|
TimeField: timeField,
|
|
|
|
RawQuery: rawQuery,
|
|
|
|
BucketAggs: bucketAggs,
|
|
|
|
Metrics: metrics,
|
|
|
|
Alias: alias,
|
2018-05-23 08:09:58 -05:00
|
|
|
Interval: interval,
|
|
|
|
RefID: q.RefId,
|
2018-05-23 07:36:41 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
return queries, nil
|
2018-05-23 07:36:41 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *timeSeriesQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
|
|
|
|
var err error
|
|
|
|
var result []*BucketAgg
|
|
|
|
for _, t := range model.Get("bucketAggs").MustArray() {
|
2018-05-23 08:09:58 -05:00
|
|
|
aggJSON := simplejson.NewFromAny(t)
|
2018-05-23 07:36:41 -05:00
|
|
|
agg := &BucketAgg{}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
agg.Type, err = aggJSON.Get("type").String()
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
agg.ID, err = aggJSON.Get("id").String()
|
2018-05-23 07:36:41 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
agg.Field = aggJSON.Get("field").MustString()
|
|
|
|
agg.Settings = simplejson.NewFromAny(aggJSON.Get("settings").MustMap())
|
2018-05-23 07:36:41 -05:00
|
|
|
|
|
|
|
result = append(result, agg)
|
|
|
|
}
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2018-05-23 08:09:58 -05:00
|
|
|
func (p *timeSeriesQueryParser) parseMetrics(model *simplejson.Json) ([]*MetricAgg, error) {
|
2018-05-23 07:36:41 -05:00
|
|
|
var err error
|
2018-05-23 08:09:58 -05:00
|
|
|
var result []*MetricAgg
|
2018-05-23 07:36:41 -05:00
|
|
|
for _, t := range model.Get("metrics").MustArray() {
|
|
|
|
metricJSON := simplejson.NewFromAny(t)
|
2018-05-23 08:09:58 -05:00
|
|
|
metric := &MetricAgg{}
|
2018-05-23 07:36:41 -05:00
|
|
|
|
|
|
|
metric.Field = metricJSON.Get("field").MustString()
|
|
|
|
metric.Hide = metricJSON.Get("hide").MustBool(false)
|
2018-05-23 08:09:58 -05:00
|
|
|
metric.ID = metricJSON.Get("id").MustString()
|
2018-05-23 07:36:41 -05:00
|
|
|
metric.PipelineAggregate = metricJSON.Get("pipelineAgg").MustString()
|
|
|
|
metric.Settings = simplejson.NewFromAny(metricJSON.Get("settings").MustMap())
|
2018-05-23 08:09:58 -05:00
|
|
|
metric.Meta = simplejson.NewFromAny(metricJSON.Get("meta").MustMap())
|
2018-05-23 07:36:41 -05:00
|
|
|
metric.Type, err = metricJSON.Get("type").String()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2018-11-16 09:54:25 -06:00
|
|
|
if isPipelineAggWithMultipleBucketPaths(metric.Type) {
|
|
|
|
metric.PipelineVariables = map[string]string{}
|
|
|
|
pvArr := metricJSON.Get("pipelineVariables").MustArray()
|
|
|
|
for _, v := range pvArr {
|
|
|
|
kv := v.(map[string]interface{})
|
|
|
|
metric.PipelineVariables[kv["name"].(string)] = kv["pipelineAgg"].(string)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-23 07:36:41 -05:00
|
|
|
result = append(result, metric)
|
|
|
|
}
|
|
|
|
return result, nil
|
|
|
|
}
|