mirror of
https://github.com/grafana/grafana.git
synced 2024-11-26 19:00:54 -06:00
cleanup and add more test
This commit is contained in:
parent
4042e4b225
commit
06f7332156
121
pkg/tsdb/elasticsearch/elasticsearch_test.go
Normal file
121
pkg/tsdb/elasticsearch/elasticsearch_test.go
Normal file
@ -0,0 +1,121 @@
|
||||
package elasticsearch
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"time"
|
||||
)
|
||||
|
||||
var avgWithMovingAvg = Query{
|
||||
TimeField: "timestamp",
|
||||
RawQuery: "(test:query) AND (name:sample)",
|
||||
Interval: time.Millisecond,
|
||||
BucketAggs: []*BucketAgg{{
|
||||
Field: "timestamp",
|
||||
ID: "2",
|
||||
Type: "date_histogram",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"interval": "auto",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0,
|
||||
}),
|
||||
}},
|
||||
Metrics: []*Metric{{
|
||||
Field: "value",
|
||||
ID: "1",
|
||||
Type: "avg",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"script": map[string]string{
|
||||
"inline": "_value * 2",
|
||||
},
|
||||
}),
|
||||
}, {
|
||||
Field: "1",
|
||||
ID: "3",
|
||||
Type: "moving_avg",
|
||||
PipelineAggregate: "1",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"minimize": false,
|
||||
"model": "simple",
|
||||
"window": 5,
|
||||
}),
|
||||
}},
|
||||
}
|
||||
|
||||
var wildcardsAndQuotes = Query{
|
||||
TimeField: "timestamp",
|
||||
RawQuery: "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"",
|
||||
Interval: time.Millisecond,
|
||||
BucketAggs: []*BucketAgg{{
|
||||
Field: "timestamp",
|
||||
ID: "2",
|
||||
Type: "date_histogram",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
}},
|
||||
Metrics: []*Metric{{
|
||||
Field: "value",
|
||||
ID: "1",
|
||||
Type: "sum",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
}},
|
||||
}
|
||||
var termAggs = Query{
|
||||
TimeField: "timestamp",
|
||||
RawQuery: "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)",
|
||||
Interval: time.Millisecond,
|
||||
BucketAggs: []*BucketAgg{{
|
||||
Field: "name_raw",
|
||||
ID: "4",
|
||||
Type: "terms",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"order": "desc",
|
||||
"orderBy": "_term",
|
||||
"size": "10",
|
||||
}),
|
||||
}, {
|
||||
Field: "timestamp",
|
||||
ID: "2",
|
||||
Type: "date_histogram",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"interval": "auto",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0,
|
||||
}),
|
||||
}},
|
||||
Metrics: []*Metric{{
|
||||
Field: "value",
|
||||
ID: "1",
|
||||
Type: "sum",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
}},
|
||||
}
|
||||
|
||||
var filtersAggs = Query{
|
||||
TimeField: "time",
|
||||
RawQuery: "*",
|
||||
Interval: time.Millisecond,
|
||||
BucketAggs: []*BucketAgg{{
|
||||
ID: "3",
|
||||
Type: "filters",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"filters": []interface{}{
|
||||
map[string]interface{}{"label": "hello", "query": "host:\"67.65.185.232\""},
|
||||
},
|
||||
}),
|
||||
}, {
|
||||
Field: "timestamp",
|
||||
ID: "2",
|
||||
Type: "date_histogram",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{
|
||||
"interval": "auto",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0,
|
||||
}),
|
||||
}},
|
||||
Metrics: []*Metric{{
|
||||
Field: "bytesSent",
|
||||
ID: "1",
|
||||
Type: "count",
|
||||
PipelineAggregate: "select metric",
|
||||
Settings: simplejson.NewFromAny(map[string]interface{}{}),
|
||||
}},
|
||||
}
|
@ -20,9 +20,15 @@ func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rawQuery := model.Get("query").MustString("")
|
||||
bucketAggs := model.Get("bucketAggs").MustArray()
|
||||
metrics := model.Get("metrics").MustArray()
|
||||
rawQuery := model.Get("query").MustString()
|
||||
bucketAggs, err := qp.parseBucketAggs(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
metrics, err := qp.parseMetrics(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
alias := model.Get("alias").MustString("")
|
||||
parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond)
|
||||
if err != nil {
|
||||
@ -37,6 +43,57 @@ func (qp *ElasticSearchQueryParser) Parse(model *simplejson.Json, dsInfo *models
|
||||
parsedInterval}, nil
|
||||
}
|
||||
|
||||
func (qp *ElasticSearchQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
|
||||
var err error
|
||||
var result []*BucketAgg
|
||||
for _, t := range model.Get("bucketAggs").MustArray() {
|
||||
aggJson := simplejson.NewFromAny(t)
|
||||
agg := &BucketAgg{}
|
||||
|
||||
agg.Type, err = aggJson.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agg.ID, err = aggJson.Get("id").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
agg.Field = aggJson.Get("field").MustString()
|
||||
agg.Settings = simplejson.NewFromAny(aggJson.Get("settings").MustMap())
|
||||
|
||||
result = append(result, agg)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (qp *ElasticSearchQueryParser) parseMetrics(model *simplejson.Json) ([]*Metric, error) {
|
||||
var err error
|
||||
var result []*Metric
|
||||
for _, t := range model.Get("metrics").MustArray() {
|
||||
metricJson := simplejson.NewFromAny(t)
|
||||
metric := &Metric{}
|
||||
|
||||
metric.Field = metricJson.Get("field").MustString()
|
||||
metric.Hide = metricJson.Get("hide").MustBool(false)
|
||||
metric.ID, err = metricJson.Get("id").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metric.PipelineAggregate = metricJson.Get("pipelineAgg").MustString()
|
||||
metric.Settings = simplejson.NewFromAny(metricJson.Get("settings").MustMap())
|
||||
|
||||
metric.Type, err = metricJson.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result = append(result, metric)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *QueryHeader {
|
||||
var header QueryHeader
|
||||
esVersion := dsInfo.JsonData.Get("esVersion").MustInt()
|
||||
@ -47,7 +104,7 @@ func getRequestHeader(timeRange *tsdb.TimeRange, dsInfo *models.DataSource) *Que
|
||||
}
|
||||
header.SearchType = searchType
|
||||
header.IgnoreUnavailable = true
|
||||
header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(""), timeRange)
|
||||
header.Index = getIndexList(dsInfo.Database, dsInfo.JsonData.Get("interval").MustString(), timeRange)
|
||||
|
||||
if esVersion >= 56 {
|
||||
header.MaxConcurrentShardRequests = dsInfo.JsonData.Get("maxConcurrentShardRequests").MustInt()
|
||||
|
@ -7,6 +7,22 @@ import (
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
type BucketAgg struct {
|
||||
Field string `json:"field"`
|
||||
ID string `json:"id"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
Type string `jsons:"type"`
|
||||
}
|
||||
|
||||
type Metric struct {
|
||||
Field string `json:"field"`
|
||||
Hide bool `json:"hide"`
|
||||
ID string `json:"id"`
|
||||
PipelineAggregate string `json:"pipelineAgg"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type QueryHeader struct {
|
||||
SearchType string `json:"search_type"`
|
||||
IgnoreUnavailable bool `json:"ignore_unavailable"`
|
||||
@ -44,16 +60,16 @@ type FiltersAgg struct {
|
||||
Filters map[string]interface{} `json:"filters"`
|
||||
}
|
||||
|
||||
type TermsAggSetting struct {
|
||||
type TermsAgg struct {
|
||||
Field string `json:"field"`
|
||||
Size int `json:"size"`
|
||||
Order map[string]interface{} `json:"order"`
|
||||
Missing string `json:"missing,omitempty"`
|
||||
}
|
||||
|
||||
type TermsAgg struct {
|
||||
Terms TermsAggSetting `json:"terms"`
|
||||
Aggs Aggs `json:"aggs"`
|
||||
type TermsAggWrap struct {
|
||||
Terms TermsAgg `json:"terms"`
|
||||
Aggs Aggs `json:"aggs"`
|
||||
}
|
||||
|
||||
type ExtendedBounds struct {
|
||||
@ -91,8 +107,6 @@ type BoolQuery struct {
|
||||
Filter []interface{} `json:"filter"`
|
||||
}
|
||||
|
||||
type Metric map[string]interface{}
|
||||
|
||||
type Responses struct {
|
||||
Responses []Response `json:"responses"`
|
||||
}
|
||||
|
@ -18,11 +18,11 @@ var rangeFilterSetting = RangeFilterSetting{Gte: "$timeFrom",
|
||||
Format: "epoch_millis"}
|
||||
|
||||
type Query struct {
|
||||
TimeField string `json:"timeField"`
|
||||
RawQuery string `json:"query"`
|
||||
BucketAggs []interface{} `json:"bucketAggs"`
|
||||
Metrics []interface{} `json:"metrics"`
|
||||
Alias string `json:"Alias"`
|
||||
TimeField string `json:"timeField"`
|
||||
RawQuery string `json:"query"`
|
||||
BucketAggs []*BucketAgg `json:"bucketAggs"`
|
||||
Metrics []*Metric `json:"metrics"`
|
||||
Alias string `json:"Alias"`
|
||||
Interval time.Duration
|
||||
}
|
||||
|
||||
@ -73,27 +73,17 @@ func (q *Query) renderReqQuery(req *Request) {
|
||||
func (q *Query) parseAggs(req *Request) error {
|
||||
aggs := make(Aggs)
|
||||
nestedAggs := aggs
|
||||
for _, aggRaw := range q.BucketAggs {
|
||||
for _, agg := range q.BucketAggs {
|
||||
esAggs := make(Aggs)
|
||||
aggJson := simplejson.NewFromAny(aggRaw)
|
||||
aggType, err := aggJson.Get("type").String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
id, err := aggJson.Get("id").String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch aggType {
|
||||
switch agg.Type {
|
||||
case "date_histogram":
|
||||
esAggs["date_histogram"] = q.getDateHistogramAgg(aggJson)
|
||||
esAggs["date_histogram"] = q.getDateHistogramAgg(agg)
|
||||
case "histogram":
|
||||
esAggs["histogram"] = q.getHistogramAgg(aggJson)
|
||||
esAggs["histogram"] = q.getHistogramAgg(agg)
|
||||
case "filters":
|
||||
esAggs["filters"] = q.getFilters(aggJson)
|
||||
esAggs["filters"] = q.getFilters(agg)
|
||||
case "terms":
|
||||
terms := q.getTerms(aggJson)
|
||||
terms := q.getTerms(agg)
|
||||
esAggs["terms"] = terms.Terms
|
||||
esAggs["aggs"] = terms.Aggs
|
||||
case "geohash_grid":
|
||||
@ -105,59 +95,47 @@ func (q *Query) parseAggs(req *Request) error {
|
||||
}
|
||||
|
||||
if aggs, ok := (nestedAggs["aggs"]).(Aggs); ok {
|
||||
aggs[id] = esAggs
|
||||
aggs[agg.ID] = esAggs
|
||||
}
|
||||
nestedAggs = esAggs
|
||||
|
||||
}
|
||||
nestedAggs["aggs"] = make(Aggs)
|
||||
|
||||
for _, metricRaw := range q.Metrics {
|
||||
metric := make(Metric)
|
||||
metricJson := simplejson.NewFromAny(metricRaw)
|
||||
for _, metric := range q.Metrics {
|
||||
subAgg := make(Aggs)
|
||||
|
||||
id, err := metricJson.Get("id").String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
metricType, err := metricJson.Get("type").String()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if metricType == "count" {
|
||||
if metric.Type == "count" {
|
||||
continue
|
||||
}
|
||||
settings := metric.Settings.MustMap(make(map[string]interface{}))
|
||||
|
||||
settings := metricJson.Get("settings").MustMap(map[string]interface{}{})
|
||||
|
||||
if isPipelineAgg(metricType) {
|
||||
pipelineAgg := metricJson.Get("pipelineAgg").MustString("")
|
||||
if _, err := strconv.Atoi(pipelineAgg); err == nil {
|
||||
settings["buckets_path"] = pipelineAgg
|
||||
if isPipelineAgg(metric.Type) {
|
||||
if _, err := strconv.Atoi(metric.PipelineAggregate); err == nil {
|
||||
settings["buckets_path"] = metric.PipelineAggregate
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
|
||||
} else {
|
||||
settings["field"] = metricJson.Get("field").MustString()
|
||||
settings["field"] = metric.Field
|
||||
}
|
||||
|
||||
metric[metricType] = settings
|
||||
nestedAggs["aggs"].(Aggs)[id] = metric
|
||||
subAgg[metric.Type] = settings
|
||||
nestedAggs["aggs"].(Aggs)[metric.ID] = subAgg
|
||||
}
|
||||
req.Aggs = aggs["aggs"].(Aggs)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *Query) getDateHistogramAgg(model *simplejson.Json) *DateHistogramAgg {
|
||||
func (q *Query) getDateHistogramAgg(target *BucketAgg) *DateHistogramAgg {
|
||||
agg := &DateHistogramAgg{}
|
||||
settings := simplejson.NewFromAny(model.Get("settings").Interface())
|
||||
interval, err := settings.Get("interval").String()
|
||||
interval, err := target.Settings.Get("interval").String()
|
||||
if err == nil {
|
||||
agg.Interval = interval
|
||||
}
|
||||
agg.Field = q.TimeField
|
||||
agg.MinDocCount = settings.Get("min_doc_count").MustInt(0)
|
||||
agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
|
||||
agg.ExtendedBounds = ExtendedBounds{"$timeFrom", "$timeTo"}
|
||||
agg.Format = "epoch_millis"
|
||||
|
||||
@ -165,66 +143,63 @@ func (q *Query) getDateHistogramAgg(model *simplejson.Json) *DateHistogramAgg {
|
||||
agg.Interval = "$__interval"
|
||||
}
|
||||
|
||||
missing, err := settings.Get("missing").String()
|
||||
missing, err := target.Settings.Get("missing").String()
|
||||
if err == nil {
|
||||
agg.Missing = missing
|
||||
}
|
||||
return agg
|
||||
}
|
||||
|
||||
func (q *Query) getHistogramAgg(model *simplejson.Json) *HistogramAgg {
|
||||
func (q *Query) getHistogramAgg(target *BucketAgg) *HistogramAgg {
|
||||
agg := &HistogramAgg{}
|
||||
settings := simplejson.NewFromAny(model.Get("settings").Interface())
|
||||
interval, err := settings.Get("interval").String()
|
||||
interval, err := target.Settings.Get("interval").String()
|
||||
if err == nil {
|
||||
agg.Interval = interval
|
||||
}
|
||||
field, err := model.Get("field").String()
|
||||
if err == nil {
|
||||
agg.Field = field
|
||||
|
||||
if target.Field != "" {
|
||||
agg.Field = target.Field
|
||||
}
|
||||
agg.MinDocCount = settings.Get("min_doc_count").MustInt(0)
|
||||
missing, err := settings.Get("missing").String()
|
||||
agg.MinDocCount = target.Settings.Get("min_doc_count").MustInt(0)
|
||||
missing, err := target.Settings.Get("missing").String()
|
||||
if err == nil {
|
||||
agg.Missing = missing
|
||||
}
|
||||
return agg
|
||||
}
|
||||
|
||||
func (q *Query) getFilters(model *simplejson.Json) *FiltersAgg {
|
||||
func (q *Query) getFilters(target *BucketAgg) *FiltersAgg {
|
||||
agg := &FiltersAgg{}
|
||||
agg.Filters = map[string]interface{}{}
|
||||
settings := simplejson.NewFromAny(model.Get("settings").Interface())
|
||||
|
||||
for _, filter := range settings.Get("filters").MustArray() {
|
||||
for _, filter := range target.Settings.Get("filters").MustArray() {
|
||||
filterJson := simplejson.NewFromAny(filter)
|
||||
query := filterJson.Get("query").MustString("")
|
||||
label := filterJson.Get("label").MustString("")
|
||||
if label == "" {
|
||||
label = query
|
||||
}
|
||||
|
||||
agg.Filters[label] = newQueryStringFilter(true, query)
|
||||
}
|
||||
return agg
|
||||
}
|
||||
|
||||
func (q *Query) getTerms(model *simplejson.Json) *TermsAgg {
|
||||
agg := &TermsAgg{Aggs: make(Aggs)}
|
||||
settings := simplejson.NewFromAny(model.Get("settings").Interface())
|
||||
agg.Terms.Field = model.Get("field").MustString()
|
||||
if settings == nil {
|
||||
func (q *Query) getTerms(target *BucketAgg) *TermsAggWrap {
|
||||
agg := &TermsAggWrap{Aggs: make(Aggs)}
|
||||
agg.Terms.Field = target.Field
|
||||
if len(target.Settings.MustMap()) == 0 {
|
||||
return agg
|
||||
}
|
||||
sizeStr := settings.Get("size").MustString("")
|
||||
sizeStr := target.Settings.Get("size").MustString("")
|
||||
size, err := strconv.Atoi(sizeStr)
|
||||
if err != nil {
|
||||
size = 500
|
||||
}
|
||||
agg.Terms.Size = size
|
||||
orderBy, err := settings.Get("orderBy").String()
|
||||
orderBy, err := target.Settings.Get("orderBy").String()
|
||||
if err == nil {
|
||||
agg.Terms.Order = make(map[string]interface{})
|
||||
agg.Terms.Order[orderBy] = settings.Get("order").MustString("")
|
||||
agg.Terms.Order[orderBy] = target.Settings.Get("order").MustString("")
|
||||
if _, err := strconv.Atoi(orderBy); err != nil {
|
||||
for _, metricI := range q.Metrics {
|
||||
metric := simplejson.NewFromAny(metricI)
|
||||
@ -242,7 +217,7 @@ func (q *Query) getTerms(model *simplejson.Json) *TermsAgg {
|
||||
}
|
||||
}
|
||||
|
||||
missing, err := settings.Get("missing").String()
|
||||
missing, err := target.Settings.Get("missing").String()
|
||||
if err == nil {
|
||||
agg.Terms.Missing = missing
|
||||
}
|
||||
|
@ -40,5 +40,4 @@ func isPipelineAgg(metricType string) bool {
|
||||
func describeMetric(metricType, field string) string {
|
||||
text := metricAggType[metricType]
|
||||
return text + " " + field
|
||||
|
||||
}
|
||||
|
@ -13,13 +13,8 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJSON string) {
|
||||
func testElasticSearchResponse(query Query, expectedElasticSearchRequestJSON string) {
|
||||
var queryExpectedJSONInterface, queryJSONInterface interface{}
|
||||
parser := ElasticSearchQueryParser{}
|
||||
model := &Query{}
|
||||
|
||||
err := json.Unmarshal([]byte(requestJSON), model)
|
||||
So(err, ShouldBeNil)
|
||||
jsonDate, _ := simplejson.NewJson([]byte(`{"esVersion":2}`))
|
||||
dsInfo := &models.DataSource{
|
||||
Database: "grafana-test",
|
||||
@ -28,10 +23,8 @@ func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJ
|
||||
|
||||
testTimeRange := tsdb.NewTimeRange("5m", "now")
|
||||
|
||||
req, _ := simplejson.NewJson([]byte(requestJSON))
|
||||
query, err := parser.Parse(req, dsInfo)
|
||||
s, err := query.Build(&tsdb.TsdbQuery{TimeRange: testTimeRange}, dsInfo)
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
queryJSON := strings.Split(s, "\n")[1]
|
||||
err = json.Unmarshal([]byte(queryJSON), &queryJSONInterface)
|
||||
So(err, ShouldBeNil)
|
||||
@ -62,53 +55,6 @@ func testElasticSearchResponse(requestJSON string, expectedElasticSearchRequestJ
|
||||
func TestElasticSearchQueryBuilder(t *testing.T) {
|
||||
Convey("Elasticsearch QueryBuilder query testing", t, func() {
|
||||
Convey("Build test average metric with moving average", func() {
|
||||
var testElasticsearchModelRequestJSON = `
|
||||
{
|
||||
"bucketAggs": [
|
||||
{
|
||||
"field": "timestamp",
|
||||
"id": "2",
|
||||
"settings": {
|
||||
"interval": "auto",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0
|
||||
},
|
||||
"type": "date_histogram"
|
||||
}
|
||||
],
|
||||
"dsType": "elasticsearch",
|
||||
"metrics": [
|
||||
{
|
||||
"field": "value",
|
||||
"id": "1",
|
||||
"inlineScript": "_value * 2",
|
||||
"meta": {},
|
||||
"settings": {
|
||||
"script": {
|
||||
"inline": "_value * 2"
|
||||
}
|
||||
},
|
||||
"type": "avg"
|
||||
},
|
||||
{
|
||||
"field": "1",
|
||||
"id": "3",
|
||||
"meta": {},
|
||||
"pipelineAgg": "1",
|
||||
"settings": {
|
||||
"minimize": false,
|
||||
"model": "simple",
|
||||
"window": 5
|
||||
},
|
||||
"type": "moving_avg"
|
||||
}
|
||||
],
|
||||
"query": "(test:query) AND (name:sample)",
|
||||
"refId": "A",
|
||||
"timeField": "timestamp"
|
||||
}
|
||||
`
|
||||
|
||||
var expectedElasticsearchQueryJSON = `
|
||||
{
|
||||
"size": 0,
|
||||
@ -167,32 +113,9 @@ func TestElasticSearchQueryBuilder(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON)
|
||||
testElasticSearchResponse(avgWithMovingAvg, expectedElasticsearchQueryJSON)
|
||||
})
|
||||
Convey("Test Wildcards and Quotes", func() {
|
||||
testElasticsearchModelRequestJSON := `
|
||||
{
|
||||
"alias": "New",
|
||||
"bucketAggs": [
|
||||
{
|
||||
"field": "timestamp",
|
||||
"id": "2",
|
||||
"type": "date_histogram"
|
||||
}
|
||||
],
|
||||
"dsType": "elasticsearch",
|
||||
"metrics": [
|
||||
{
|
||||
"type": "sum",
|
||||
"field": "value",
|
||||
"id": "1"
|
||||
}
|
||||
],
|
||||
"query": "scope:$location.leagueconnect.api AND name:*CreateRegistration AND name:\"*.201-responses.rate\"",
|
||||
"refId": "A",
|
||||
"timeField": "timestamp"
|
||||
}`
|
||||
|
||||
expectedElasticsearchQueryJSON := `
|
||||
{
|
||||
"size": 0,
|
||||
@ -239,65 +162,9 @@ func TestElasticSearchQueryBuilder(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON)
|
||||
testElasticSearchResponse(wildcardsAndQuotes, expectedElasticsearchQueryJSON)
|
||||
})
|
||||
Convey("Test Term Aggregates", func() {
|
||||
testElasticsearchModelRequestJSON := `
|
||||
{
|
||||
"bucketAggs": [{
|
||||
"field": "name_raw",
|
||||
"id": "4",
|
||||
"settings": {
|
||||
"order": "desc",
|
||||
"orderBy": "_term",
|
||||
"size": "10"
|
||||
},
|
||||
"type": "terms"
|
||||
}, {
|
||||
"field": "timestamp",
|
||||
"id": "2",
|
||||
"settings": {
|
||||
"interval": "1m",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0
|
||||
},
|
||||
"type": "date_histogram"
|
||||
}],
|
||||
"dsType": "elasticsearch",
|
||||
"filters": [{
|
||||
"boolOp": "AND",
|
||||
"not": false,
|
||||
"type": "rfc190Scope",
|
||||
"value": "*.hmp.metricsd"
|
||||
}, {
|
||||
"boolOp": "AND",
|
||||
"not": false,
|
||||
"type": "name_raw",
|
||||
"value": "builtin.general.*_instance_count"
|
||||
}],
|
||||
"metricObject": {},
|
||||
"metrics": [{
|
||||
"field": "value",
|
||||
"id": "1",
|
||||
"meta": {},
|
||||
"options": {},
|
||||
"settings": {},
|
||||
"type": "sum"
|
||||
}],
|
||||
"mode": 0,
|
||||
"numToGraph": 10,
|
||||
"prependHostName": false,
|
||||
"query": "(scope:*.hmp.metricsd) AND (name_raw:builtin.general.*_instance_count)",
|
||||
"refId": "A",
|
||||
"regexAlias": false,
|
||||
"selectedApplication": "",
|
||||
"selectedHost": "",
|
||||
"selectedLocation": "",
|
||||
"timeField": "timestamp",
|
||||
"useFullHostName": "",
|
||||
"useQuery": false
|
||||
}`
|
||||
|
||||
expectedElasticsearchQueryJSON := `
|
||||
{
|
||||
"size": 0,
|
||||
@ -322,51 +189,12 @@ func TestElasticSearchQueryBuilder(t *testing.T) {
|
||||
]
|
||||
}
|
||||
},
|
||||
"aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"<TO_TIMESTAMP>","min":"<FROM_TIMESTAMP>"},"field":"timestamp","format":"epoch_millis","interval":"1m","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}}
|
||||
"aggs": {"4":{"aggs":{"2":{"aggs":{"1":{"sum":{"field":"value"}}},"date_histogram":{"extended_bounds":{"max":"<TO_TIMESTAMP>","min":"<FROM_TIMESTAMP>"},"field":"timestamp","format":"epoch_millis","interval":"200ms","min_doc_count":0}}},"terms":{"field":"name_raw","order":{"_term":"desc"},"size":10}}}
|
||||
}`
|
||||
|
||||
testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON)
|
||||
testElasticSearchResponse(termAggs, expectedElasticsearchQueryJSON)
|
||||
})
|
||||
Convey("Test Filters Aggregates", func() {
|
||||
testElasticsearchModelRequestJSON := `
|
||||
{
|
||||
"bucketAggs": [
|
||||
{
|
||||
"id": "3",
|
||||
"settings": {
|
||||
"filters": [{
|
||||
"label": "hello",
|
||||
"query": "host:\"67.65.185.232\""
|
||||
}]
|
||||
},
|
||||
"type": "filters"
|
||||
},
|
||||
{
|
||||
"field": "time",
|
||||
"id": "2",
|
||||
"settings": {
|
||||
"interval": "auto",
|
||||
"min_doc_count": 0,
|
||||
"trimEdges": 0
|
||||
},
|
||||
"type": "date_histogram"
|
||||
}
|
||||
],
|
||||
"metrics": [
|
||||
{
|
||||
"pipelineAgg": "select metric",
|
||||
"field": "bytesSent",
|
||||
"id": "1",
|
||||
"meta": {},
|
||||
"settings": {},
|
||||
"type": "count"
|
||||
}
|
||||
],
|
||||
"query": "*",
|
||||
"refId": "A",
|
||||
"timeField": "time"
|
||||
}`
|
||||
|
||||
expectedElasticsearchQueryJSON := `{
|
||||
"size": 0,
|
||||
"query": {
|
||||
@ -422,7 +250,7 @@ func TestElasticSearchQueryBuilder(t *testing.T) {
|
||||
}
|
||||
`
|
||||
|
||||
testElasticSearchResponse(testElasticsearchModelRequestJSON, expectedElasticsearchQueryJSON)
|
||||
testElasticSearchResponse(filtersAggs, expectedElasticsearchQueryJSON)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -40,27 +40,26 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{
|
||||
}
|
||||
|
||||
if depth == maxDepth {
|
||||
if aggDef.Get("type").MustString() == "date_histogram" {
|
||||
if aggDef.Type == "date_histogram" {
|
||||
err = rp.processMetrics(esAgg, target, series, props)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("not support type:%s", aggDef.Get("type").MustString())
|
||||
return fmt.Errorf("not support type:%s", aggDef.Type)
|
||||
}
|
||||
} else {
|
||||
for i, b := range esAgg.Get("buckets").MustArray() {
|
||||
field := aggDef.Get("field").MustString()
|
||||
bucket := simplejson.NewFromAny(b)
|
||||
newProps := props
|
||||
if key, err := bucket.Get("key").String(); err == nil {
|
||||
newProps[field] = key
|
||||
newProps[aggDef.Field] = key
|
||||
} else {
|
||||
props["filter"] = strconv.Itoa(i)
|
||||
}
|
||||
|
||||
if key, err := bucket.Get("key_as_string").String(); err == nil {
|
||||
props[field] = key
|
||||
props[aggDef.Field] = key
|
||||
}
|
||||
rp.processBuckets(bucket.MustMap(), target, series, newProps, depth+1)
|
||||
}
|
||||
@ -72,17 +71,12 @@ func (rp *ElasticsearchResponseParser) processBuckets(aggs map[string]interface{
|
||||
}
|
||||
|
||||
func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *[]*tsdb.TimeSeries, props map[string]string) error {
|
||||
for _, v := range target.Metrics {
|
||||
metric := simplejson.NewFromAny(v)
|
||||
if metric.Get("hide").MustBool(false) {
|
||||
for _, metric := range target.Metrics {
|
||||
if metric.Hide {
|
||||
continue
|
||||
}
|
||||
|
||||
metricId := metric.Get("id").MustString()
|
||||
metricField := metric.Get("field").MustString()
|
||||
metricType := metric.Get("type").MustString()
|
||||
|
||||
switch metricType {
|
||||
switch metric.Type {
|
||||
case "count":
|
||||
newSeries := tsdb.TimeSeries{}
|
||||
for _, v := range esAgg.Get("buckets").MustArray() {
|
||||
@ -102,16 +96,16 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta
|
||||
}
|
||||
|
||||
firstBucket := simplejson.NewFromAny(buckets[0])
|
||||
percentiles := firstBucket.GetPath(metricId, "values").MustMap()
|
||||
percentiles := firstBucket.GetPath(metric.ID, "values").MustMap()
|
||||
|
||||
for percentileName := range percentiles {
|
||||
newSeries := tsdb.TimeSeries{}
|
||||
newSeries.Tags = props
|
||||
newSeries.Tags["metric"] = "p" + percentileName
|
||||
newSeries.Tags["field"] = metricField
|
||||
newSeries.Tags["field"] = metric.Field
|
||||
for _, v := range buckets {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
value := castToNullFloat(bucket.GetPath(metricId, "values", percentileName))
|
||||
value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName))
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
@ -120,20 +114,20 @@ func (rp *ElasticsearchResponseParser) processMetrics(esAgg *simplejson.Json, ta
|
||||
default:
|
||||
newSeries := tsdb.TimeSeries{}
|
||||
newSeries.Tags = props
|
||||
newSeries.Tags["metric"] = metricType
|
||||
newSeries.Tags["field"] = metricField
|
||||
newSeries.Tags["metric"] = metric.Type
|
||||
newSeries.Tags["field"] = metric.Field
|
||||
for _, v := range esAgg.Get("buckets").MustArray() {
|
||||
bucket := simplejson.NewFromAny(v)
|
||||
key := castToNullFloat(bucket.Get("key"))
|
||||
valueObj, err := bucket.Get(metricId).Map()
|
||||
valueObj, err := bucket.Get(metric.ID).Map()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
var value null.Float
|
||||
if _, ok := valueObj["normalized_value"]; ok {
|
||||
value = castToNullFloat(bucket.GetPath(metricId, "normalized_value"))
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value"))
|
||||
} else {
|
||||
value = castToNullFloat(bucket.GetPath(metricId, "value"))
|
||||
value = castToNullFloat(bucket.GetPath(metric.ID, "value"))
|
||||
}
|
||||
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
|
||||
}
|
||||
@ -196,10 +190,9 @@ func (rp *ElasticsearchResponseParser) getSeriesName(series *tsdb.TimeSeries, ta
|
||||
// todo, if field and pipelineAgg
|
||||
if field != "" && isPipelineAgg(metricType) {
|
||||
found := false
|
||||
for _, targetMetricI := range target.Metrics {
|
||||
targetMetric := simplejson.NewFromAny(targetMetricI)
|
||||
if targetMetric.Get("id").MustString() == field {
|
||||
metricName += " " + describeMetric(targetMetric.Get("type").MustString(), field)
|
||||
for _, metric := range target.Metrics {
|
||||
if metric.ID == field {
|
||||
metricName += " " + describeMetric(metric.Type, field)
|
||||
found = true
|
||||
}
|
||||
}
|
||||
@ -255,11 +248,10 @@ func castToNullFloat(j *simplejson.Json) null.Float {
|
||||
return null.NewFloat(0, false)
|
||||
}
|
||||
|
||||
func findAgg(target *Query, aggId string) (*simplejson.Json, error) {
|
||||
func findAgg(target *Query, aggId string) (*BucketAgg, error) {
|
||||
for _, v := range target.BucketAggs {
|
||||
aggDef := simplejson.NewFromAny(v)
|
||||
if aggId == aggDef.Get("id").MustString() {
|
||||
return aggDef, nil
|
||||
if aggId == v.ID {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("can't found aggDef, aggID:" + aggId)
|
||||
|
Loading…
Reference in New Issue
Block a user